blob: 1524e1e0114644210de1f7536e8a891ddcc4551c [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/mips64/assembler_mips64.h"
28#include "utils/mips64/constants_mips64.h"
29
30namespace art {
31
32namespace mips64 {
33
34IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
35 : arena_(codegen->GetGraph()->GetArena()) {
36}
37
38Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
39 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
40}
41
42ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
43 return codegen_->GetGraph()->GetArena();
44}
45
Chris Larsen9701c2e2015-09-04 17:22:47 -070046#define __ codegen->GetAssembler()->
47
48static void MoveFromReturnRegister(Location trg,
49 Primitive::Type type,
50 CodeGeneratorMIPS64* codegen) {
51 if (!trg.IsValid()) {
52 DCHECK_EQ(type, Primitive::kPrimVoid);
53 return;
54 }
55
56 DCHECK_NE(type, Primitive::kPrimVoid);
57
58 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
59 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
60 if (trg_reg != V0) {
61 __ Move(V0, trg_reg);
62 }
63 } else {
64 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
65 if (trg_reg != F0) {
66 if (type == Primitive::kPrimFloat) {
67 __ MovS(F0, trg_reg);
68 } else {
69 __ MovD(F0, trg_reg);
70 }
71 }
72 }
73}
74
75static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
76 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
77 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
78}
79
80// Slow-path for fallback (calling the managed code to handle the
81// intrinsic) in an intrinsified call. This will copy the arguments
82// into the positions for a regular call.
83//
84// Note: The actual parameters are required to be in the locations
85// given by the invoke's location summary. If an intrinsic
86// modifies those locations before a slowpath call, they must be
87// restored!
88class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
89 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000090 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
91 : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
Chris Larsen9701c2e2015-09-04 17:22:47 -070092
93 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
94 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
95
96 __ Bind(GetEntryLabel());
97
98 SaveLiveRegisters(codegen, invoke_->GetLocations());
99
100 MoveArguments(invoke_, codegen);
101
102 if (invoke_->IsInvokeStaticOrDirect()) {
103 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
104 Location::RegisterLocation(A0));
Chris Larsen9701c2e2015-09-04 17:22:47 -0700105 } else {
Alexey Frunze53afca12015-11-05 16:34:23 -0800106 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0));
Chris Larsen9701c2e2015-09-04 17:22:47 -0700107 }
Alexey Frunze53afca12015-11-05 16:34:23 -0800108 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700109
110 // Copy the result back to the expected output.
111 Location out = invoke_->GetLocations()->Out();
112 if (out.IsValid()) {
113 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
114 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
115 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
116 }
117
118 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700119 __ Bc(GetExitLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -0700120 }
121
122 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
123
124 private:
125 // The instruction where this slow path is happening.
126 HInvoke* const invoke_;
127
128 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
129};
130
131#undef __
132
Chris Larsen3039e382015-08-26 07:54:08 -0700133bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
134 Dispatch(invoke);
135 LocationSummary* res = invoke->GetLocations();
136 return res != nullptr && res->Intrinsified();
137}
138
139#define __ assembler->
140
141static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
142 LocationSummary* locations = new (arena) LocationSummary(invoke,
143 LocationSummary::kNoCall,
144 kIntrinsified);
145 locations->SetInAt(0, Location::RequiresFpuRegister());
146 locations->SetOut(Location::RequiresRegister());
147}
148
149static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
150 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
151 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
152
153 if (is64bit) {
154 __ Dmfc1(out, in);
155 } else {
156 __ Mfc1(out, in);
157 }
158}
159
160// long java.lang.Double.doubleToRawLongBits(double)
161void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
162 CreateFPToIntLocations(arena_, invoke);
163}
164
165void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000166 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700167}
168
169// int java.lang.Float.floatToRawIntBits(float)
170void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
171 CreateFPToIntLocations(arena_, invoke);
172}
173
174void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000175 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700176}
177
178static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
179 LocationSummary* locations = new (arena) LocationSummary(invoke,
180 LocationSummary::kNoCall,
181 kIntrinsified);
182 locations->SetInAt(0, Location::RequiresRegister());
183 locations->SetOut(Location::RequiresFpuRegister());
184}
185
186static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
187 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
188 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
189
190 if (is64bit) {
191 __ Dmtc1(in, out);
192 } else {
193 __ Mtc1(in, out);
194 }
195}
196
197// double java.lang.Double.longBitsToDouble(long)
198void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
199 CreateIntToFPLocations(arena_, invoke);
200}
201
202void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000203 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700204}
205
206// float java.lang.Float.intBitsToFloat(int)
207void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
208 CreateIntToFPLocations(arena_, invoke);
209}
210
211void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000212 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700213}
214
215static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
216 LocationSummary* locations = new (arena) LocationSummary(invoke,
217 LocationSummary::kNoCall,
218 kIntrinsified);
219 locations->SetInAt(0, Location::RequiresRegister());
220 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
221}
222
223static void GenReverseBytes(LocationSummary* locations,
224 Primitive::Type type,
225 Mips64Assembler* assembler) {
226 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
227 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
228
229 switch (type) {
230 case Primitive::kPrimShort:
231 __ Dsbh(out, in);
232 __ Seh(out, out);
233 break;
234 case Primitive::kPrimInt:
235 __ Rotr(out, in, 16);
236 __ Wsbh(out, out);
237 break;
238 case Primitive::kPrimLong:
239 __ Dsbh(out, in);
240 __ Dshd(out, out);
241 break;
242 default:
243 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
244 UNREACHABLE();
245 }
246}
247
248// int java.lang.Integer.reverseBytes(int)
249void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
250 CreateIntToIntLocations(arena_, invoke);
251}
252
253void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
254 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
255}
256
257// long java.lang.Long.reverseBytes(long)
258void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
259 CreateIntToIntLocations(arena_, invoke);
260}
261
262void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
263 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
264}
265
266// short java.lang.Short.reverseBytes(short)
267void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
268 CreateIntToIntLocations(arena_, invoke);
269}
270
271void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
272 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
273}
274
Chris Larsen81284372015-10-21 15:28:53 -0700275static void GenNumberOfLeadingZeroes(LocationSummary* locations,
276 bool is64bit,
277 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700278 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
279 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
280
281 if (is64bit) {
282 __ Dclz(out, in);
283 } else {
284 __ Clz(out, in);
285 }
286}
287
288// int java.lang.Integer.numberOfLeadingZeros(int i)
289void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
290 CreateIntToIntLocations(arena_, invoke);
291}
292
293void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000294 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700295}
296
297// int java.lang.Long.numberOfLeadingZeros(long i)
298void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
299 CreateIntToIntLocations(arena_, invoke);
300}
301
302void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000303 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700304}
305
Chris Larsen81284372015-10-21 15:28:53 -0700306static void GenNumberOfTrailingZeroes(LocationSummary* locations,
307 bool is64bit,
308 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700309 Location in = locations->InAt(0);
310 Location out = locations->Out();
311
312 if (is64bit) {
313 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
314 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
315 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
316 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
317 } else {
318 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
319 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
321 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
322 }
323}
324
325// int java.lang.Integer.numberOfTrailingZeros(int i)
326void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
327 CreateIntToIntLocations(arena_, invoke);
328}
329
330void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000331 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700332}
333
334// int java.lang.Long.numberOfTrailingZeros(long i)
335void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
336 CreateIntToIntLocations(arena_, invoke);
337}
338
339void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000340 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700341}
342
343static void GenReverse(LocationSummary* locations,
344 Primitive::Type type,
345 Mips64Assembler* assembler) {
346 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
347
348 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
349 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
350
351 if (type == Primitive::kPrimInt) {
352 __ Rotr(out, in, 16);
353 __ Wsbh(out, out);
354 __ Bitswap(out, out);
355 } else {
356 __ Dsbh(out, in);
357 __ Dshd(out, out);
358 __ Dbitswap(out, out);
359 }
360}
361
362// int java.lang.Integer.reverse(int)
363void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
364 CreateIntToIntLocations(arena_, invoke);
365}
366
367void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
368 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
369}
370
371// long java.lang.Long.reverse(long)
372void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
373 CreateIntToIntLocations(arena_, invoke);
374}
375
376void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
377 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
378}
379
Chris Larsen0b7ac982015-09-04 12:54:28 -0700380static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
381 LocationSummary* locations = new (arena) LocationSummary(invoke,
382 LocationSummary::kNoCall,
383 kIntrinsified);
384 locations->SetInAt(0, Location::RequiresFpuRegister());
385 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
386}
387
Chris Larsen7fda7852016-04-21 16:00:36 -0700388static void GenBitCount(LocationSummary* locations,
389 const Primitive::Type type,
390 Mips64Assembler* assembler) {
391 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
392 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
393
394 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
395
396 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
397 //
398 // A generalization of the best bit counting method to integers of
399 // bit-widths up to 128 (parameterized by type T) is this:
400 //
401 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
402 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
403 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
404 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
405 //
406 // For comparison, for 32-bit quantities, this algorithm can be executed
407 // using 20 MIPS instructions (the calls to LoadConst32() generate two
408 // machine instructions each for the values being used in this algorithm).
409 // A(n unrolled) loop-based algorithm requires 25 instructions.
410 //
411 // For a 64-bit operand this can be performed in 24 instructions compared
412 // to a(n unrolled) loop based algorithm which requires 38 instructions.
413 //
414 // There are algorithms which are faster in the cases where very few
415 // bits are set but the algorithm here attempts to minimize the total
416 // number of instructions executed even when a large number of bits
417 // are set.
418
419 if (type == Primitive::kPrimInt) {
420 __ Srl(TMP, in, 1);
421 __ LoadConst32(AT, 0x55555555);
422 __ And(TMP, TMP, AT);
423 __ Subu(TMP, in, TMP);
424 __ LoadConst32(AT, 0x33333333);
425 __ And(out, TMP, AT);
426 __ Srl(TMP, TMP, 2);
427 __ And(TMP, TMP, AT);
428 __ Addu(TMP, out, TMP);
429 __ Srl(out, TMP, 4);
430 __ Addu(out, out, TMP);
431 __ LoadConst32(AT, 0x0F0F0F0F);
432 __ And(out, out, AT);
433 __ LoadConst32(TMP, 0x01010101);
434 __ MulR6(out, out, TMP);
435 __ Srl(out, out, 24);
436 } else if (type == Primitive::kPrimLong) {
437 __ Dsrl(TMP, in, 1);
438 __ LoadConst64(AT, 0x5555555555555555L);
439 __ And(TMP, TMP, AT);
440 __ Dsubu(TMP, in, TMP);
441 __ LoadConst64(AT, 0x3333333333333333L);
442 __ And(out, TMP, AT);
443 __ Dsrl(TMP, TMP, 2);
444 __ And(TMP, TMP, AT);
445 __ Daddu(TMP, out, TMP);
446 __ Dsrl(out, TMP, 4);
447 __ Daddu(out, out, TMP);
448 __ LoadConst64(AT, 0x0F0F0F0F0F0F0F0FL);
449 __ And(out, out, AT);
450 __ LoadConst64(TMP, 0x0101010101010101L);
451 __ Dmul(out, out, TMP);
452 __ Dsrl32(out, out, 24);
453 }
454}
455
456// int java.lang.Integer.bitCount(int)
457void IntrinsicLocationsBuilderMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
458 CreateIntToIntLocations(arena_, invoke);
459}
460
461void IntrinsicCodeGeneratorMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
462 GenBitCount(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
463}
464
465// int java.lang.Long.bitCount(long)
466void IntrinsicLocationsBuilderMIPS64::VisitLongBitCount(HInvoke* invoke) {
467 CreateIntToIntLocations(arena_, invoke);
468}
469
470void IntrinsicCodeGeneratorMIPS64::VisitLongBitCount(HInvoke* invoke) {
471 GenBitCount(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
472}
473
Chris Larsen0b7ac982015-09-04 12:54:28 -0700474static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
475 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
476 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
477
478 if (is64bit) {
479 __ AbsD(out, in);
480 } else {
481 __ AbsS(out, in);
482 }
483}
484
485// double java.lang.Math.abs(double)
486void IntrinsicLocationsBuilderMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
487 CreateFPToFPLocations(arena_, invoke);
488}
489
490void IntrinsicCodeGeneratorMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000491 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700492}
493
494// float java.lang.Math.abs(float)
495void IntrinsicLocationsBuilderMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
496 CreateFPToFPLocations(arena_, invoke);
497}
498
499void IntrinsicCodeGeneratorMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000500 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700501}
502
503static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
504 LocationSummary* locations = new (arena) LocationSummary(invoke,
505 LocationSummary::kNoCall,
506 kIntrinsified);
507 locations->SetInAt(0, Location::RequiresRegister());
508 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
509}
510
511static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
512 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
513 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
514
515 if (is64bit) {
516 __ Dsra32(AT, in, 31);
517 __ Xor(out, in, AT);
518 __ Dsubu(out, out, AT);
519 } else {
520 __ Sra(AT, in, 31);
521 __ Xor(out, in, AT);
522 __ Subu(out, out, AT);
523 }
524}
525
526// int java.lang.Math.abs(int)
527void IntrinsicLocationsBuilderMIPS64::VisitMathAbsInt(HInvoke* invoke) {
528 CreateIntToInt(arena_, invoke);
529}
530
531void IntrinsicCodeGeneratorMIPS64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000532 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700533}
534
535// long java.lang.Math.abs(long)
536void IntrinsicLocationsBuilderMIPS64::VisitMathAbsLong(HInvoke* invoke) {
537 CreateIntToInt(arena_, invoke);
538}
539
540void IntrinsicCodeGeneratorMIPS64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000541 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700542}
543
544static void GenMinMaxFP(LocationSummary* locations,
545 bool is_min,
Chris Larsenb74353a2015-11-20 09:07:09 -0800546 Primitive::Type type,
Chris Larsen0b7ac982015-09-04 12:54:28 -0700547 Mips64Assembler* assembler) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800548 FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
549 FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
Chris Larsen0b7ac982015-09-04 12:54:28 -0700550 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
551
Chris Larsenb74353a2015-11-20 09:07:09 -0800552 Mips64Label noNaNs;
553 Mips64Label done;
554 FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
555
556 // When Java computes min/max it prefers a NaN to a number; the
557 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
558 // the inputs is a NaN and the other is a valid number, the MIPS
559 // instruction will return the number; Java wants the NaN value
560 // returned. This is why there is extra logic preceding the use of
561 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
562 // NaN, return the NaN, otherwise return the min/max.
563 if (type == Primitive::kPrimDouble) {
564 __ CmpUnD(FTMP, a, b);
565 __ Bc1eqz(FTMP, &noNaNs);
566
567 // One of the inputs is a NaN
568 __ CmpEqD(ftmp, a, a);
569 // If a == a then b is the NaN, otherwise a is the NaN.
570 __ SelD(ftmp, a, b);
571
572 if (ftmp != out) {
573 __ MovD(out, ftmp);
574 }
575
576 __ Bc(&done);
577
578 __ Bind(&noNaNs);
579
Chris Larsen0b7ac982015-09-04 12:54:28 -0700580 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800581 __ MinD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700582 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800583 __ MaxD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700584 }
585 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800586 DCHECK_EQ(type, Primitive::kPrimFloat);
587 __ CmpUnS(FTMP, a, b);
588 __ Bc1eqz(FTMP, &noNaNs);
589
590 // One of the inputs is a NaN
591 __ CmpEqS(ftmp, a, a);
592 // If a == a then b is the NaN, otherwise a is the NaN.
593 __ SelS(ftmp, a, b);
594
595 if (ftmp != out) {
596 __ MovS(out, ftmp);
597 }
598
599 __ Bc(&done);
600
601 __ Bind(&noNaNs);
602
Chris Larsen0b7ac982015-09-04 12:54:28 -0700603 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800604 __ MinS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700605 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800606 __ MaxS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700607 }
608 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800609
610 __ Bind(&done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700611}
612
613static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
614 LocationSummary* locations = new (arena) LocationSummary(invoke,
615 LocationSummary::kNoCall,
616 kIntrinsified);
617 locations->SetInAt(0, Location::RequiresFpuRegister());
618 locations->SetInAt(1, Location::RequiresFpuRegister());
619 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
620}
621
622// double java.lang.Math.min(double, double)
623void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
624 CreateFPFPToFPLocations(arena_, invoke);
625}
626
627void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800628 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, Primitive::kPrimDouble, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700629}
630
631// float java.lang.Math.min(float, float)
632void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
633 CreateFPFPToFPLocations(arena_, invoke);
634}
635
636void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800637 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, Primitive::kPrimFloat, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700638}
639
640// double java.lang.Math.max(double, double)
641void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
642 CreateFPFPToFPLocations(arena_, invoke);
643}
644
645void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800646 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, Primitive::kPrimDouble, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700647}
648
649// float java.lang.Math.max(float, float)
650void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
651 CreateFPFPToFPLocations(arena_, invoke);
652}
653
654void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800655 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, Primitive::kPrimFloat, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700656}
657
658static void GenMinMax(LocationSummary* locations,
659 bool is_min,
660 Mips64Assembler* assembler) {
661 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
662 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
663 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
664
Chris Larsenb74353a2015-11-20 09:07:09 -0800665 if (lhs == rhs) {
666 if (out != lhs) {
667 __ Move(out, lhs);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700668 }
669 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800670 // Some architectures, such as ARM and MIPS (prior to r6), have a
671 // conditional move instruction which only changes the target
672 // (output) register if the condition is true (MIPS prior to r6 had
673 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
674 // change the target (output) register. If the condition is true the
675 // output register gets the contents of the "rs" register; otherwise,
676 // the output register is set to zero. One consequence of this is
677 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
678 // needs to use a pair of SELEQZ/SELNEZ instructions. After
679 // executing this pair of instructions one of the output registers
680 // from the pair will necessarily contain zero. Then the code ORs the
681 // output registers from the SELEQZ/SELNEZ instructions to get the
682 // final result.
683 //
684 // The initial test to see if the output register is same as the
685 // first input register is needed to make sure that value in the
686 // first input register isn't clobbered before we've finished
687 // computing the output value. The logic in the corresponding else
688 // clause performs the same task but makes sure the second input
689 // register isn't clobbered in the event that it's the same register
690 // as the output register; the else clause also handles the case
691 // where the output register is distinct from both the first, and the
692 // second input registers.
693 if (out == lhs) {
694 __ Slt(AT, rhs, lhs);
695 if (is_min) {
696 __ Seleqz(out, lhs, AT);
697 __ Selnez(AT, rhs, AT);
698 } else {
699 __ Selnez(out, lhs, AT);
700 __ Seleqz(AT, rhs, AT);
701 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700702 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800703 __ Slt(AT, lhs, rhs);
704 if (is_min) {
705 __ Seleqz(out, rhs, AT);
706 __ Selnez(AT, lhs, AT);
707 } else {
708 __ Selnez(out, rhs, AT);
709 __ Seleqz(AT, lhs, AT);
710 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700711 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800712 __ Or(out, out, AT);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700713 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700714}
715
716static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
717 LocationSummary* locations = new (arena) LocationSummary(invoke,
718 LocationSummary::kNoCall,
719 kIntrinsified);
720 locations->SetInAt(0, Location::RequiresRegister());
721 locations->SetInAt(1, Location::RequiresRegister());
722 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
723}
724
725// int java.lang.Math.min(int, int)
726void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
727 CreateIntIntToIntLocations(arena_, invoke);
728}
729
730void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000731 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700732}
733
734// long java.lang.Math.min(long, long)
735void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
736 CreateIntIntToIntLocations(arena_, invoke);
737}
738
739void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000740 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700741}
742
743// int java.lang.Math.max(int, int)
744void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
745 CreateIntIntToIntLocations(arena_, invoke);
746}
747
748void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000749 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700750}
751
752// long java.lang.Math.max(long, long)
753void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
754 CreateIntIntToIntLocations(arena_, invoke);
755}
756
757void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000758 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700759}
760
761// double java.lang.Math.sqrt(double)
762void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
763 CreateFPToFPLocations(arena_, invoke);
764}
765
766void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
767 LocationSummary* locations = invoke->GetLocations();
768 Mips64Assembler* assembler = GetAssembler();
769 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
770 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
771
772 __ SqrtD(out, in);
773}
774
Chris Larsen81284372015-10-21 15:28:53 -0700775static void CreateFPToFP(ArenaAllocator* arena,
776 HInvoke* invoke,
777 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700778 LocationSummary* locations = new (arena) LocationSummary(invoke,
779 LocationSummary::kNoCall,
780 kIntrinsified);
781 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700782 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700783}
784
785// double java.lang.Math.rint(double)
786void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700787 CreateFPToFP(arena_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700788}
789
790void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
791 LocationSummary* locations = invoke->GetLocations();
792 Mips64Assembler* assembler = GetAssembler();
793 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
794 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
795
796 __ RintD(out, in);
797}
798
799// double java.lang.Math.floor(double)
800void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
801 CreateFPToFP(arena_, invoke);
802}
803
Chris Larsen14500822015-10-01 11:35:18 -0700804const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
805 kPositiveInfinity |
806 kNegativeZero |
807 kNegativeInfinity |
808 kQuietNaN |
809 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700810
Chris Larsen81284372015-10-21 15:28:53 -0700811enum FloatRoundingMode {
812 kFloor,
813 kCeil,
814};
815
816static void GenRoundingMode(LocationSummary* locations,
817 FloatRoundingMode mode,
818 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700819 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
820 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
821
Chris Larsen81284372015-10-21 15:28:53 -0700822 DCHECK_NE(in, out);
823
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700824 Mips64Label done;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700825
Chris Larsen81284372015-10-21 15:28:53 -0700826 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700827 // if in.isNaN || in.isInfinite || in.isZero {
828 // return in;
829 // }
830 __ ClassD(out, in);
831 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700832 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700833 __ MovD(out, in);
834 __ Bnezc(AT, &done);
835
Chris Larsen81284372015-10-21 15:28:53 -0700836 // Long outLong = floor/ceil(in);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700837 // if outLong == Long.MAX_VALUE {
Chris Larsen81284372015-10-21 15:28:53 -0700838 // // floor()/ceil() has almost certainly returned a value
839 // // which can't be successfully represented as a signed
840 // // 64-bit number. Java expects that the input value will
841 // // be returned in these cases.
842 // // There is also a small probability that floor(in)/ceil(in)
843 // // correctly truncates/rounds up the input value to
844 // // Long.MAX_VALUE. In that case, this exception handling
845 // // code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700846 // return in;
847 // }
Chris Larsen81284372015-10-21 15:28:53 -0700848 if (mode == kFloor) {
849 __ FloorLD(out, in);
850 } else if (mode == kCeil) {
851 __ CeilLD(out, in);
852 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700853 __ Dmfc1(AT, out);
854 __ MovD(out, in);
855 __ LoadConst64(TMP, kPrimLongMax);
856 __ Beqc(AT, TMP, &done);
857
858 // double out = outLong;
859 // return out;
860 __ Dmtc1(AT, out);
861 __ Cvtdl(out, out);
862 __ Bind(&done);
863 // }
864}
865
Chris Larsen81284372015-10-21 15:28:53 -0700866void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
867 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
868}
869
Chris Larsen0b7ac982015-09-04 12:54:28 -0700870// double java.lang.Math.ceil(double)
871void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
872 CreateFPToFP(arena_, invoke);
873}
874
875void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700876 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700877}
878
Chris Larsen70fb1f42015-09-04 10:15:27 -0700879// byte libcore.io.Memory.peekByte(long address)
880void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
881 CreateIntToIntLocations(arena_, invoke);
882}
883
884void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
885 Mips64Assembler* assembler = GetAssembler();
886 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
887 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
888
889 __ Lb(out, adr, 0);
890}
891
892// short libcore.io.Memory.peekShort(long address)
893void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
894 CreateIntToIntLocations(arena_, invoke);
895}
896
897void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
898 Mips64Assembler* assembler = GetAssembler();
899 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
900 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
901
902 __ Lh(out, adr, 0);
903}
904
905// int libcore.io.Memory.peekInt(long address)
906void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
907 CreateIntToIntLocations(arena_, invoke);
908}
909
910void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
911 Mips64Assembler* assembler = GetAssembler();
912 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
913 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
914
915 __ Lw(out, adr, 0);
916}
917
918// long libcore.io.Memory.peekLong(long address)
919void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
920 CreateIntToIntLocations(arena_, invoke);
921}
922
923void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
924 Mips64Assembler* assembler = GetAssembler();
925 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
926 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
927
928 __ Ld(out, adr, 0);
929}
930
931static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
932 LocationSummary* locations = new (arena) LocationSummary(invoke,
933 LocationSummary::kNoCall,
934 kIntrinsified);
935 locations->SetInAt(0, Location::RequiresRegister());
936 locations->SetInAt(1, Location::RequiresRegister());
937}
938
939// void libcore.io.Memory.pokeByte(long address, byte value)
940void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
941 CreateIntIntToVoidLocations(arena_, invoke);
942}
943
944void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
945 Mips64Assembler* assembler = GetAssembler();
946 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
947 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
948
949 __ Sb(val, adr, 0);
950}
951
952// void libcore.io.Memory.pokeShort(long address, short value)
953void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
954 CreateIntIntToVoidLocations(arena_, invoke);
955}
956
957void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
958 Mips64Assembler* assembler = GetAssembler();
959 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
960 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
961
962 __ Sh(val, adr, 0);
963}
964
965// void libcore.io.Memory.pokeInt(long address, int value)
966void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
967 CreateIntIntToVoidLocations(arena_, invoke);
968}
969
970void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
971 Mips64Assembler* assembler = GetAssembler();
972 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
973 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
974
975 __ Sw(val, adr, 00);
976}
977
978// void libcore.io.Memory.pokeLong(long address, long value)
979void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
980 CreateIntIntToVoidLocations(arena_, invoke);
981}
982
983void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
984 Mips64Assembler* assembler = GetAssembler();
985 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
986 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
987
988 __ Sd(val, adr, 0);
989}
990
Chris Larsen49e55392015-09-04 16:04:03 -0700991// Thread java.lang.Thread.currentThread()
992void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
993 LocationSummary* locations = new (arena_) LocationSummary(invoke,
994 LocationSummary::kNoCall,
995 kIntrinsified);
996 locations->SetOut(Location::RequiresRegister());
997}
998
999void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
1000 Mips64Assembler* assembler = GetAssembler();
1001 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1002
1003 __ LoadFromOffset(kLoadUnsignedWord,
1004 out,
1005 TR,
1006 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
1007}
1008
Chris Larsen1360ada2015-09-04 23:38:16 -07001009static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
1010 LocationSummary* locations = new (arena) LocationSummary(invoke,
1011 LocationSummary::kNoCall,
1012 kIntrinsified);
1013 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1014 locations->SetInAt(1, Location::RequiresRegister());
1015 locations->SetInAt(2, Location::RequiresRegister());
1016 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1017}
1018
1019static void GenUnsafeGet(HInvoke* invoke,
1020 Primitive::Type type,
1021 bool is_volatile,
1022 CodeGeneratorMIPS64* codegen) {
1023 LocationSummary* locations = invoke->GetLocations();
1024 DCHECK((type == Primitive::kPrimInt) ||
1025 (type == Primitive::kPrimLong) ||
1026 (type == Primitive::kPrimNot));
1027 Mips64Assembler* assembler = codegen->GetAssembler();
1028 // Object pointer.
1029 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1030 // Long offset.
1031 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1032 GpuRegister trg = locations->Out().AsRegister<GpuRegister>();
1033
1034 __ Daddu(TMP, base, offset);
1035 if (is_volatile) {
1036 __ Sync(0);
1037 }
1038 switch (type) {
1039 case Primitive::kPrimInt:
1040 __ Lw(trg, TMP, 0);
1041 break;
1042
1043 case Primitive::kPrimNot:
1044 __ Lwu(trg, TMP, 0);
1045 break;
1046
1047 case Primitive::kPrimLong:
1048 __ Ld(trg, TMP, 0);
1049 break;
1050
1051 default:
1052 LOG(FATAL) << "Unsupported op size " << type;
1053 UNREACHABLE();
1054 }
1055}
1056
1057// int sun.misc.Unsafe.getInt(Object o, long offset)
1058void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
1059 CreateIntIntIntToIntLocations(arena_, invoke);
1060}
1061
1062void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001063 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001064}
1065
1066// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1067void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
1068 CreateIntIntIntToIntLocations(arena_, invoke);
1069}
1070
1071void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001072 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001073}
1074
1075// long sun.misc.Unsafe.getLong(Object o, long offset)
1076void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
1077 CreateIntIntIntToIntLocations(arena_, invoke);
1078}
1079
1080void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001081 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001082}
1083
1084// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
1085void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1086 CreateIntIntIntToIntLocations(arena_, invoke);
1087}
1088
1089void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001090 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001091}
1092
1093// Object sun.misc.Unsafe.getObject(Object o, long offset)
1094void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
1095 CreateIntIntIntToIntLocations(arena_, invoke);
1096}
1097
1098void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001099 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001100}
1101
1102// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1103void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1104 CreateIntIntIntToIntLocations(arena_, invoke);
1105}
1106
1107void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001108 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001109}
1110
1111static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
1112 LocationSummary* locations = new (arena) LocationSummary(invoke,
1113 LocationSummary::kNoCall,
1114 kIntrinsified);
1115 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1116 locations->SetInAt(1, Location::RequiresRegister());
1117 locations->SetInAt(2, Location::RequiresRegister());
1118 locations->SetInAt(3, Location::RequiresRegister());
1119}
1120
1121static void GenUnsafePut(LocationSummary* locations,
1122 Primitive::Type type,
1123 bool is_volatile,
1124 bool is_ordered,
1125 CodeGeneratorMIPS64* codegen) {
1126 DCHECK((type == Primitive::kPrimInt) ||
1127 (type == Primitive::kPrimLong) ||
1128 (type == Primitive::kPrimNot));
1129 Mips64Assembler* assembler = codegen->GetAssembler();
1130 // Object pointer.
1131 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1132 // Long offset.
1133 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1134 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1135
1136 __ Daddu(TMP, base, offset);
1137 if (is_volatile || is_ordered) {
1138 __ Sync(0);
1139 }
1140 switch (type) {
1141 case Primitive::kPrimInt:
1142 case Primitive::kPrimNot:
1143 __ Sw(value, TMP, 0);
1144 break;
1145
1146 case Primitive::kPrimLong:
1147 __ Sd(value, TMP, 0);
1148 break;
1149
1150 default:
1151 LOG(FATAL) << "Unsupported op size " << type;
1152 UNREACHABLE();
1153 }
1154 if (is_volatile) {
1155 __ Sync(0);
1156 }
1157
1158 if (type == Primitive::kPrimNot) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001159 bool value_can_be_null = true; // TODO: Worth finding out this information?
1160 codegen->MarkGCCard(base, value, value_can_be_null);
Chris Larsen1360ada2015-09-04 23:38:16 -07001161 }
1162}
1163
1164// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1165void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
1166 CreateIntIntIntIntToVoid(arena_, invoke);
1167}
1168
1169void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001170 GenUnsafePut(invoke->GetLocations(),
1171 Primitive::kPrimInt,
1172 /* is_volatile */ false,
1173 /* is_ordered */ false,
1174 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001175}
1176
1177// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1178void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
1179 CreateIntIntIntIntToVoid(arena_, invoke);
1180}
1181
1182void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001183 GenUnsafePut(invoke->GetLocations(),
1184 Primitive::kPrimInt,
1185 /* is_volatile */ false,
1186 /* is_ordered */ true,
1187 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001188}
1189
1190// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1191void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
1192 CreateIntIntIntIntToVoid(arena_, invoke);
1193}
1194
1195void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001196 GenUnsafePut(invoke->GetLocations(),
1197 Primitive::kPrimInt,
1198 /* is_volatile */ true,
1199 /* is_ordered */ false,
1200 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001201}
1202
1203// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1204void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
1205 CreateIntIntIntIntToVoid(arena_, invoke);
1206}
1207
1208void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001209 GenUnsafePut(invoke->GetLocations(),
1210 Primitive::kPrimNot,
1211 /* is_volatile */ false,
1212 /* is_ordered */ false,
1213 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001214}
1215
1216// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1217void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1218 CreateIntIntIntIntToVoid(arena_, invoke);
1219}
1220
1221void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001222 GenUnsafePut(invoke->GetLocations(),
1223 Primitive::kPrimNot,
1224 /* is_volatile */ false,
1225 /* is_ordered */ true,
1226 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001227}
1228
1229// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1230void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1231 CreateIntIntIntIntToVoid(arena_, invoke);
1232}
1233
1234void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001235 GenUnsafePut(invoke->GetLocations(),
1236 Primitive::kPrimNot,
1237 /* is_volatile */ true,
1238 /* is_ordered */ false,
1239 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001240}
1241
1242// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1243void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
1244 CreateIntIntIntIntToVoid(arena_, invoke);
1245}
1246
1247void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001248 GenUnsafePut(invoke->GetLocations(),
1249 Primitive::kPrimLong,
1250 /* is_volatile */ false,
1251 /* is_ordered */ false,
1252 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001253}
1254
1255// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1256void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1257 CreateIntIntIntIntToVoid(arena_, invoke);
1258}
1259
1260void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001261 GenUnsafePut(invoke->GetLocations(),
1262 Primitive::kPrimLong,
1263 /* is_volatile */ false,
1264 /* is_ordered */ true,
1265 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001266}
1267
1268// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1269void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1270 CreateIntIntIntIntToVoid(arena_, invoke);
1271}
1272
1273void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001274 GenUnsafePut(invoke->GetLocations(),
1275 Primitive::kPrimLong,
1276 /* is_volatile */ true,
1277 /* is_ordered */ false,
1278 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001279}
1280
Chris Larsen36427492015-10-23 02:19:38 -07001281static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
1282 LocationSummary* locations = new (arena) LocationSummary(invoke,
1283 LocationSummary::kNoCall,
1284 kIntrinsified);
1285 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1286 locations->SetInAt(1, Location::RequiresRegister());
1287 locations->SetInAt(2, Location::RequiresRegister());
1288 locations->SetInAt(3, Location::RequiresRegister());
1289 locations->SetInAt(4, Location::RequiresRegister());
1290
1291 locations->SetOut(Location::RequiresRegister());
1292}
1293
1294static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorMIPS64* codegen) {
1295 Mips64Assembler* assembler = codegen->GetAssembler();
1296 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1297 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1298 GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
1299 GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
1300 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1301
1302 DCHECK_NE(base, out);
1303 DCHECK_NE(offset, out);
1304 DCHECK_NE(expected, out);
1305
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001306 if (type == Primitive::kPrimNot) {
1307 // Mark card for object assuming new value is stored.
1308 bool value_can_be_null = true; // TODO: Worth finding out this information?
1309 codegen->MarkGCCard(base, value, value_can_be_null);
1310 }
1311
Chris Larsen36427492015-10-23 02:19:38 -07001312 // do {
1313 // tmp_value = [tmp_ptr] - expected;
1314 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1315 // result = tmp_value != 0;
1316
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001317 Mips64Label loop_head, exit_loop;
Chris Larsen36427492015-10-23 02:19:38 -07001318 __ Daddu(TMP, base, offset);
1319 __ Sync(0);
1320 __ Bind(&loop_head);
1321 if (type == Primitive::kPrimLong) {
1322 __ Lld(out, TMP);
1323 } else {
Roland Levillain391b8662015-12-18 11:43:38 +00001324 // Note: We will need a read barrier here, when read barrier
1325 // support is added to the MIPS64 back end.
Chris Larsen36427492015-10-23 02:19:38 -07001326 __ Ll(out, TMP);
1327 }
1328 __ Dsubu(out, out, expected); // If we didn't get the 'expected'
1329 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1330 __ Beqzc(out, &exit_loop); // return.
1331 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1332 // If we use 'value' directly, we would lose 'value'
1333 // in the case that the store fails. Whether the
1334 // store succeeds, or fails, it will load the
1335 // correct boolean value into the 'out' register.
1336 if (type == Primitive::kPrimLong) {
1337 __ Scd(out, TMP);
1338 } else {
1339 __ Sc(out, TMP);
1340 }
1341 __ Beqzc(out, &loop_head); // If we couldn't do the read-modify-write
1342 // cycle atomically then retry.
1343 __ Bind(&exit_loop);
1344 __ Sync(0);
1345}
1346
1347// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1348void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
1349 CreateIntIntIntIntIntToInt(arena_, invoke);
1350}
1351
1352void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
1353 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1354}
1355
1356// boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
1357void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
1358 CreateIntIntIntIntIntToInt(arena_, invoke);
1359}
1360
1361void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
1362 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
1363}
1364
1365// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1366void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
1367 CreateIntIntIntIntIntToInt(arena_, invoke);
1368}
1369
1370void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
1371 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1372}
1373
Chris Larsen9701c2e2015-09-04 17:22:47 -07001374// char java.lang.String.charAt(int index)
1375void IntrinsicLocationsBuilderMIPS64::VisitStringCharAt(HInvoke* invoke) {
1376 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1377 LocationSummary::kCallOnSlowPath,
1378 kIntrinsified);
1379 locations->SetInAt(0, Location::RequiresRegister());
1380 locations->SetInAt(1, Location::RequiresRegister());
1381 locations->SetOut(Location::SameAsFirstInput());
1382}
1383
1384void IntrinsicCodeGeneratorMIPS64::VisitStringCharAt(HInvoke* invoke) {
1385 LocationSummary* locations = invoke->GetLocations();
1386 Mips64Assembler* assembler = GetAssembler();
1387
1388 // Location of reference to data array
1389 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1390 // Location of count
1391 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1392
1393 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1394 GpuRegister idx = locations->InAt(1).AsRegister<GpuRegister>();
1395 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1396
1397 // TODO: Maybe we can support range check elimination. Overall,
1398 // though, I think it's not worth the cost.
1399 // TODO: For simplicity, the index parameter is requested in a
1400 // register, so different from Quick we will not optimize the
1401 // code for constants (which would save a register).
1402
1403 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1404 codegen_->AddSlowPath(slow_path);
1405
1406 // Load the string size
1407 __ Lw(TMP, obj, count_offset);
1408 codegen_->MaybeRecordImplicitNullCheck(invoke);
1409 // Revert to slow path if idx is too large, or negative
1410 __ Bgeuc(idx, TMP, slow_path->GetEntryLabel());
1411
1412 // out = obj[2*idx].
1413 __ Sll(TMP, idx, 1); // idx * 2
1414 __ Daddu(TMP, TMP, obj); // Address of char at location idx
1415 __ Lhu(out, TMP, value_offset); // Load char at location idx
1416
1417 __ Bind(slow_path->GetExitLabel());
1418}
1419
1420// int java.lang.String.compareTo(String anotherString)
1421void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1422 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1423 LocationSummary::kCall,
1424 kIntrinsified);
1425 InvokeRuntimeCallingConvention calling_convention;
1426 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1427 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1428 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1429 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1430}
1431
1432void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1433 Mips64Assembler* assembler = GetAssembler();
1434 LocationSummary* locations = invoke->GetLocations();
1435
1436 // Note that the null check must have been done earlier.
1437 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1438
1439 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
1440 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1441 codegen_->AddSlowPath(slow_path);
1442 __ Beqzc(argument, slow_path->GetEntryLabel());
1443
1444 __ LoadFromOffset(kLoadDoubleword,
Goran Jakovljevicddc40552016-03-11 15:22:18 +01001445 T9,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001446 TR,
Lazar Trsicd9672662015-09-03 17:33:01 +02001447 QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize, pStringCompareTo).Int32Value());
Goran Jakovljevicddc40552016-03-11 15:22:18 +01001448 __ Jalr(T9);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001449 __ Nop();
1450 __ Bind(slow_path->GetExitLabel());
1451}
1452
Chris Larsen972d6d72015-10-20 11:29:12 -07001453// boolean java.lang.String.equals(Object anObject)
1454void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
1455 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1456 LocationSummary::kNoCall,
1457 kIntrinsified);
1458 locations->SetInAt(0, Location::RequiresRegister());
1459 locations->SetInAt(1, Location::RequiresRegister());
1460 locations->SetOut(Location::RequiresRegister());
1461
1462 // Temporary registers to store lengths of strings and for calculations.
1463 locations->AddTemp(Location::RequiresRegister());
1464 locations->AddTemp(Location::RequiresRegister());
1465 locations->AddTemp(Location::RequiresRegister());
1466}
1467
1468void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
1469 Mips64Assembler* assembler = GetAssembler();
1470 LocationSummary* locations = invoke->GetLocations();
1471
1472 GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
1473 GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
1474 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1475
1476 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
1477 GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
1478 GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
1479
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001480 Mips64Label loop;
1481 Mips64Label end;
1482 Mips64Label return_true;
1483 Mips64Label return_false;
Chris Larsen972d6d72015-10-20 11:29:12 -07001484
1485 // Get offsets of count, value, and class fields within a string object.
1486 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1487 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1488 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1489
1490 // Note that the null check must have been done earlier.
1491 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1492
1493 // If the register containing the pointer to "this", and the register
1494 // containing the pointer to "anObject" are the same register then
1495 // "this", and "anObject" are the same object and we can
1496 // short-circuit the logic to a true result.
1497 if (str == arg) {
1498 __ LoadConst64(out, 1);
1499 return;
1500 }
1501
1502 // Check if input is null, return false if it is.
1503 __ Beqzc(arg, &return_false);
1504
1505 // Reference equality check, return true if same reference.
1506 __ Beqc(str, arg, &return_true);
1507
1508 // Instanceof check for the argument by comparing class fields.
1509 // All string objects must have the same type since String cannot be subclassed.
1510 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1511 // If the argument is a string object, its class field must be equal to receiver's class field.
1512 __ Lw(temp1, str, class_offset);
1513 __ Lw(temp2, arg, class_offset);
1514 __ Bnec(temp1, temp2, &return_false);
1515
1516 // Load lengths of this and argument strings.
1517 __ Lw(temp1, str, count_offset);
1518 __ Lw(temp2, arg, count_offset);
1519 // Check if lengths are equal, return false if they're not.
1520 __ Bnec(temp1, temp2, &return_false);
1521 // Return true if both strings are empty.
1522 __ Beqzc(temp1, &return_true);
1523
1524 // Don't overwrite input registers
1525 __ Move(TMP, str);
1526 __ Move(temp3, arg);
1527
1528 // Assertions that must hold in order to compare strings 4 characters at a time.
1529 DCHECK_ALIGNED(value_offset, 8);
1530 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1531
1532 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1533 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1534 __ Bind(&loop);
1535 __ Ld(out, TMP, value_offset);
1536 __ Ld(temp2, temp3, value_offset);
1537 __ Bnec(out, temp2, &return_false);
1538 __ Daddiu(TMP, TMP, 8);
1539 __ Daddiu(temp3, temp3, 8);
1540 __ Addiu(temp1, temp1, -4);
1541 __ Bgtzc(temp1, &loop);
1542
1543 // Return true and exit the function.
1544 // If loop does not result in returning false, we return true.
1545 __ Bind(&return_true);
1546 __ LoadConst64(out, 1);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001547 __ Bc(&end);
Chris Larsen972d6d72015-10-20 11:29:12 -07001548
1549 // Return false and exit the function.
1550 __ Bind(&return_false);
1551 __ LoadConst64(out, 0);
1552 __ Bind(&end);
1553}
1554
Chris Larsen9701c2e2015-09-04 17:22:47 -07001555static void GenerateStringIndexOf(HInvoke* invoke,
1556 Mips64Assembler* assembler,
1557 CodeGeneratorMIPS64* codegen,
1558 ArenaAllocator* allocator,
1559 bool start_at_zero) {
1560 LocationSummary* locations = invoke->GetLocations();
1561 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1562
1563 // Note that the null check must have been done earlier.
1564 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1565
1566 // Check for code points > 0xFFFF. Either a slow-path check when we
1567 // don't know statically, or directly dispatch if we have a constant.
1568 SlowPathCodeMIPS64* slow_path = nullptr;
1569 if (invoke->InputAt(1)->IsIntConstant()) {
1570 if (!IsUint<16>(invoke->InputAt(1)->AsIntConstant()->GetValue())) {
1571 // Always needs the slow-path. We could directly dispatch to it,
1572 // but this case should be rare, so for simplicity just put the
1573 // full slow-path down and branch unconditionally.
1574 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1575 codegen->AddSlowPath(slow_path);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001576 __ Bc(slow_path->GetEntryLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001577 __ Bind(slow_path->GetExitLabel());
1578 return;
1579 }
1580 } else {
1581 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1582 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
1583 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1584 codegen->AddSlowPath(slow_path);
1585 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1586 }
1587
1588 if (start_at_zero) {
1589 DCHECK_EQ(tmp_reg, A2);
1590 // Start-index = 0.
1591 __ Clear(tmp_reg);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001592 }
1593
1594 __ LoadFromOffset(kLoadDoubleword,
Goran Jakovljevicddc40552016-03-11 15:22:18 +01001595 T9,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001596 TR,
Lazar Trsicd9672662015-09-03 17:33:01 +02001597 QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize, pIndexOf).Int32Value());
Roland Levillain42ad2882016-02-29 18:26:54 +00001598 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Goran Jakovljevicddc40552016-03-11 15:22:18 +01001599 __ Jalr(T9);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001600 __ Nop();
1601
1602 if (slow_path != nullptr) {
1603 __ Bind(slow_path->GetExitLabel());
1604 }
1605}
1606
1607// int java.lang.String.indexOf(int ch)
1608void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
1609 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1610 LocationSummary::kCall,
1611 kIntrinsified);
1612 // We have a hand-crafted assembly stub that follows the runtime
1613 // calling convention. So it's best to align the inputs accordingly.
1614 InvokeRuntimeCallingConvention calling_convention;
1615 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1616 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1617 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1618 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1619
1620 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1621 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1622}
1623
1624void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001625 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001626}
1627
1628// int java.lang.String.indexOf(int ch, int fromIndex)
1629void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
1630 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1631 LocationSummary::kCall,
1632 kIntrinsified);
1633 // We have a hand-crafted assembly stub that follows the runtime
1634 // calling convention. So it's best to align the inputs accordingly.
1635 InvokeRuntimeCallingConvention calling_convention;
1636 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1637 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1638 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1639 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1640 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1641}
1642
1643void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001644 GenerateStringIndexOf(
1645 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001646}
1647
Roland Levillaincc3839c2016-02-29 16:23:48 +00001648// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001649void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1650 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1651 LocationSummary::kCall,
1652 kIntrinsified);
1653 InvokeRuntimeCallingConvention calling_convention;
1654 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1655 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1656 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1657 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1658 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1659 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1660}
1661
1662void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1663 Mips64Assembler* assembler = GetAssembler();
1664 LocationSummary* locations = invoke->GetLocations();
1665
1666 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
1667 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1668 codegen_->AddSlowPath(slow_path);
1669 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1670
1671 __ LoadFromOffset(kLoadDoubleword,
Goran Jakovljevicddc40552016-03-11 15:22:18 +01001672 T9,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001673 TR,
Lazar Trsicd9672662015-09-03 17:33:01 +02001674 QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize,
1675 pAllocStringFromBytes).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001676 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Goran Jakovljevicddc40552016-03-11 15:22:18 +01001677 __ Jalr(T9);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001678 __ Nop();
Roland Levillainf969a202016-03-09 16:14:00 +00001679 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001680 __ Bind(slow_path->GetExitLabel());
1681}
1682
Roland Levillaincc3839c2016-02-29 16:23:48 +00001683// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001684void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
1685 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1686 LocationSummary::kCall,
1687 kIntrinsified);
1688 InvokeRuntimeCallingConvention calling_convention;
1689 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1690 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1691 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1692 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1693 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1694}
1695
1696void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
1697 Mips64Assembler* assembler = GetAssembler();
1698
Roland Levillaincc3839c2016-02-29 16:23:48 +00001699 // No need to emit code checking whether `locations->InAt(2)` is a null
1700 // pointer, as callers of the native method
1701 //
1702 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1703 //
1704 // all include a null check on `data` before calling that method.
Chris Larsen9701c2e2015-09-04 17:22:47 -07001705 __ LoadFromOffset(kLoadDoubleword,
Goran Jakovljevicddc40552016-03-11 15:22:18 +01001706 T9,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001707 TR,
Lazar Trsicd9672662015-09-03 17:33:01 +02001708 QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize,
1709 pAllocStringFromChars).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001710 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Goran Jakovljevicddc40552016-03-11 15:22:18 +01001711 __ Jalr(T9);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001712 __ Nop();
Roland Levillainf969a202016-03-09 16:14:00 +00001713 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001714}
1715
Roland Levillainf969a202016-03-09 16:14:00 +00001716// java.lang.StringFactory.newStringFromString(String toCopy)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001717void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1718 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1719 LocationSummary::kCall,
1720 kIntrinsified);
1721 InvokeRuntimeCallingConvention calling_convention;
1722 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Chris Larsen9701c2e2015-09-04 17:22:47 -07001723 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1724 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1725}
1726
1727void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1728 Mips64Assembler* assembler = GetAssembler();
1729 LocationSummary* locations = invoke->GetLocations();
1730
1731 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
1732 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1733 codegen_->AddSlowPath(slow_path);
1734 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1735
1736 __ LoadFromOffset(kLoadDoubleword,
Goran Jakovljevicddc40552016-03-11 15:22:18 +01001737 T9,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001738 TR,
Lazar Trsicd9672662015-09-03 17:33:01 +02001739 QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize,
1740 pAllocStringFromString).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001741 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Goran Jakovljevicddc40552016-03-11 15:22:18 +01001742 __ Jalr(T9);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001743 __ Nop();
Roland Levillainf969a202016-03-09 16:14:00 +00001744 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001745 __ Bind(slow_path->GetExitLabel());
1746}
1747
Chris Larsenddec7f92016-02-16 12:35:04 -08001748static void GenIsInfinite(LocationSummary* locations,
1749 bool is64bit,
1750 Mips64Assembler* assembler) {
1751 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
1752 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1753
1754 if (is64bit) {
1755 __ ClassD(FTMP, in);
1756 } else {
1757 __ ClassS(FTMP, in);
1758 }
1759 __ Mfc1(out, FTMP);
1760 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
1761 __ Sltu(out, ZERO, out);
1762}
1763
1764// boolean java.lang.Float.isInfinite(float)
1765void IntrinsicLocationsBuilderMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1766 CreateFPToIntLocations(arena_, invoke);
1767}
1768
1769void IntrinsicCodeGeneratorMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1770 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
1771}
1772
1773// boolean java.lang.Double.isInfinite(double)
1774void IntrinsicLocationsBuilderMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1775 CreateFPToIntLocations(arena_, invoke);
1776}
1777
1778void IntrinsicCodeGeneratorMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1779 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
1780}
1781
Aart Bik2f9fcc92016-03-01 15:16:54 -08001782UNIMPLEMENTED_INTRINSIC(MIPS64, MathRoundDouble)
1783UNIMPLEMENTED_INTRINSIC(MIPS64, MathRoundFloat)
Chris Larsen3039e382015-08-26 07:54:08 -07001784
Aart Bik2f9fcc92016-03-01 15:16:54 -08001785UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent)
1786UNIMPLEMENTED_INTRINSIC(MIPS64, StringGetCharsNoCheck)
1787UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopyChar)
1788UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy)
Aart Bik3f67e692016-01-15 14:35:12 -08001789
Aart Bik2f9fcc92016-03-01 15:16:54 -08001790UNIMPLEMENTED_INTRINSIC(MIPS64, MathCos)
1791UNIMPLEMENTED_INTRINSIC(MIPS64, MathSin)
1792UNIMPLEMENTED_INTRINSIC(MIPS64, MathAcos)
1793UNIMPLEMENTED_INTRINSIC(MIPS64, MathAsin)
1794UNIMPLEMENTED_INTRINSIC(MIPS64, MathAtan)
1795UNIMPLEMENTED_INTRINSIC(MIPS64, MathAtan2)
1796UNIMPLEMENTED_INTRINSIC(MIPS64, MathCbrt)
1797UNIMPLEMENTED_INTRINSIC(MIPS64, MathCosh)
1798UNIMPLEMENTED_INTRINSIC(MIPS64, MathExp)
1799UNIMPLEMENTED_INTRINSIC(MIPS64, MathExpm1)
1800UNIMPLEMENTED_INTRINSIC(MIPS64, MathHypot)
1801UNIMPLEMENTED_INTRINSIC(MIPS64, MathLog)
1802UNIMPLEMENTED_INTRINSIC(MIPS64, MathLog10)
1803UNIMPLEMENTED_INTRINSIC(MIPS64, MathNextAfter)
1804UNIMPLEMENTED_INTRINSIC(MIPS64, MathSinh)
1805UNIMPLEMENTED_INTRINSIC(MIPS64, MathTan)
1806UNIMPLEMENTED_INTRINSIC(MIPS64, MathTanh)
Chris Larsen0b7ac982015-09-04 12:54:28 -07001807
Aart Bik2f9fcc92016-03-01 15:16:54 -08001808UNIMPLEMENTED_INTRINSIC(MIPS64, IntegerHighestOneBit)
1809UNIMPLEMENTED_INTRINSIC(MIPS64, LongHighestOneBit)
1810UNIMPLEMENTED_INTRINSIC(MIPS64, IntegerLowestOneBit)
1811UNIMPLEMENTED_INTRINSIC(MIPS64, LongLowestOneBit)
Mark Mendella4f12202015-08-06 15:23:34 -04001812
Aart Bik0e54c012016-03-04 12:08:31 -08001813// 1.8.
1814UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt)
1815UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddLong)
1816UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetInt)
1817UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetLong)
1818UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08001819
Aart Bik2f9fcc92016-03-01 15:16:54 -08001820UNREACHABLE_INTRINSICS(MIPS64)
Chris Larsen3039e382015-08-26 07:54:08 -07001821
1822#undef __
1823
1824} // namespace mips64
1825} // namespace art