blob: 9a9ae714bc64849ddf9d5e36152e1430dfe36179 [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070023#include "heap_poisoning.h"
Chris Larsen3039e382015-08-26 07:54:08 -070024#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe895f9222017-07-05 09:53:32 -070026#include "mirror/object_array-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070027#include "mirror/string.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070028#include "scoped_thread_state_change-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "thread.h"
30#include "utils/mips64/assembler_mips64.h"
31#include "utils/mips64/constants_mips64.h"
32
33namespace art {
34
35namespace mips64 {
36
37IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
Vladimir Markoca6fff82017-10-03 14:49:14 +010038 : codegen_(codegen), allocator_(codegen->GetGraph()->GetAllocator()) {
Chris Larsen3039e382015-08-26 07:54:08 -070039}
40
41Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
42 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
43}
44
45ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
Vladimir Markoca6fff82017-10-03 14:49:14 +010046 return codegen_->GetGraph()->GetAllocator();
Chris Larsen3039e382015-08-26 07:54:08 -070047}
48
Lena Djokic0d2cab52018-03-06 15:20:45 +010049inline bool IntrinsicCodeGeneratorMIPS64::HasMsa() const {
50 return codegen_->GetInstructionSetFeatures().HasMsa();
51}
52
Chris Larsen9701c2e2015-09-04 17:22:47 -070053#define __ codegen->GetAssembler()->
54
55static void MoveFromReturnRegister(Location trg,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010056 DataType::Type type,
Chris Larsen9701c2e2015-09-04 17:22:47 -070057 CodeGeneratorMIPS64* codegen) {
58 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010059 DCHECK_EQ(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070060 return;
61 }
62
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010063 DCHECK_NE(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070064
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010065 if (DataType::IsIntegralType(type) || type == DataType::Type::kReference) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070066 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
67 if (trg_reg != V0) {
68 __ Move(V0, trg_reg);
69 }
70 } else {
71 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
72 if (trg_reg != F0) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010073 if (type == DataType::Type::kFloat32) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070074 __ MovS(F0, trg_reg);
75 } else {
76 __ MovD(F0, trg_reg);
77 }
78 }
79 }
80}
81
82static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
83 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
84 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
85}
86
87// Slow-path for fallback (calling the managed code to handle the
88// intrinsic) in an intrinsified call. This will copy the arguments
89// into the positions for a regular call.
90//
91// Note: The actual parameters are required to be in the locations
92// given by the invoke's location summary. If an intrinsic
93// modifies those locations before a slowpath call, they must be
94// restored!
95class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
96 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000097 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
98 : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
Chris Larsen9701c2e2015-09-04 17:22:47 -070099
100 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
101 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
102
103 __ Bind(GetEntryLabel());
104
105 SaveLiveRegisters(codegen, invoke_->GetLocations());
106
107 MoveArguments(invoke_, codegen);
108
109 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100110 codegen->GenerateStaticOrDirectCall(
111 invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700112 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100113 codegen->GenerateVirtualCall(
114 invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700115 }
116
117 // Copy the result back to the expected output.
118 Location out = invoke_->GetLocations()->Out();
119 if (out.IsValid()) {
120 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
121 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
122 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
123 }
124
125 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700126 __ Bc(GetExitLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -0700127 }
128
129 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
130
131 private:
132 // The instruction where this slow path is happening.
133 HInvoke* const invoke_;
134
135 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
136};
137
138#undef __
139
Chris Larsen3039e382015-08-26 07:54:08 -0700140bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
141 Dispatch(invoke);
142 LocationSummary* res = invoke->GetLocations();
143 return res != nullptr && res->Intrinsified();
144}
145
146#define __ assembler->
147
Vladimir Markoca6fff82017-10-03 14:49:14 +0100148static void CreateFPToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
149 LocationSummary* locations =
150 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700151 locations->SetInAt(0, Location::RequiresFpuRegister());
152 locations->SetOut(Location::RequiresRegister());
153}
154
155static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
156 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
157 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
158
159 if (is64bit) {
160 __ Dmfc1(out, in);
161 } else {
162 __ Mfc1(out, in);
163 }
164}
165
166// long java.lang.Double.doubleToRawLongBits(double)
167void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100168 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700169}
170
171void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000172 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700173}
174
175// int java.lang.Float.floatToRawIntBits(float)
176void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100177 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700178}
179
180void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000181 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700182}
183
Vladimir Markoca6fff82017-10-03 14:49:14 +0100184static void CreateIntToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
185 LocationSummary* locations =
186 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700187 locations->SetInAt(0, Location::RequiresRegister());
188 locations->SetOut(Location::RequiresFpuRegister());
189}
190
191static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
192 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
193 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
194
195 if (is64bit) {
196 __ Dmtc1(in, out);
197 } else {
198 __ Mtc1(in, out);
199 }
200}
201
202// double java.lang.Double.longBitsToDouble(long)
203void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100204 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700205}
206
207void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000208 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700209}
210
211// float java.lang.Float.intBitsToFloat(int)
212void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100213 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700214}
215
216void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000217 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700218}
219
Vladimir Markoca6fff82017-10-03 14:49:14 +0100220static void CreateIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
221 LocationSummary* locations =
222 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700223 locations->SetInAt(0, Location::RequiresRegister());
224 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
225}
226
227static void GenReverseBytes(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100228 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700229 Mips64Assembler* assembler) {
230 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
231 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
232
233 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100234 case DataType::Type::kInt16:
Chris Larsen3039e382015-08-26 07:54:08 -0700235 __ Dsbh(out, in);
236 __ Seh(out, out);
237 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100238 case DataType::Type::kInt32:
Chris Larsen3039e382015-08-26 07:54:08 -0700239 __ Rotr(out, in, 16);
240 __ Wsbh(out, out);
241 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100242 case DataType::Type::kInt64:
Chris Larsen3039e382015-08-26 07:54:08 -0700243 __ Dsbh(out, in);
244 __ Dshd(out, out);
245 break;
246 default:
247 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
248 UNREACHABLE();
249 }
250}
251
252// int java.lang.Integer.reverseBytes(int)
253void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100254 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700255}
256
257void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100258 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700259}
260
261// long java.lang.Long.reverseBytes(long)
262void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100263 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700264}
265
266void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100267 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700268}
269
270// short java.lang.Short.reverseBytes(short)
271void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100272 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700273}
274
275void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100276 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700277}
278
Chris Larsen81284372015-10-21 15:28:53 -0700279static void GenNumberOfLeadingZeroes(LocationSummary* locations,
280 bool is64bit,
281 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700282 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
283 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
284
285 if (is64bit) {
286 __ Dclz(out, in);
287 } else {
288 __ Clz(out, in);
289 }
290}
291
292// int java.lang.Integer.numberOfLeadingZeros(int i)
293void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100294 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700295}
296
297void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000298 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700299}
300
301// int java.lang.Long.numberOfLeadingZeros(long i)
302void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100303 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700304}
305
306void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000307 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700308}
309
Chris Larsen81284372015-10-21 15:28:53 -0700310static void GenNumberOfTrailingZeroes(LocationSummary* locations,
311 bool is64bit,
312 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700313 Location in = locations->InAt(0);
314 Location out = locations->Out();
315
316 if (is64bit) {
317 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
318 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
319 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
321 } else {
322 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
323 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
324 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
325 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
326 }
327}
328
329// int java.lang.Integer.numberOfTrailingZeros(int i)
330void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100331 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700332}
333
334void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000335 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700336}
337
338// int java.lang.Long.numberOfTrailingZeros(long i)
339void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100340 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700341}
342
343void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000344 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700345}
346
347static void GenReverse(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100348 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700349 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100350 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen3039e382015-08-26 07:54:08 -0700351
352 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
353 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
354
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100355 if (type == DataType::Type::kInt32) {
Chris Larsen3039e382015-08-26 07:54:08 -0700356 __ Rotr(out, in, 16);
357 __ Wsbh(out, out);
358 __ Bitswap(out, out);
359 } else {
360 __ Dsbh(out, in);
361 __ Dshd(out, out);
362 __ Dbitswap(out, out);
363 }
364}
365
366// int java.lang.Integer.reverse(int)
367void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100368 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700369}
370
371void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100372 GenReverse(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700373}
374
375// long java.lang.Long.reverse(long)
376void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100377 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700378}
379
380void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100381 GenReverse(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700382}
383
Vladimir Markoca6fff82017-10-03 14:49:14 +0100384static void CreateFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
385 LocationSummary* locations =
386 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700387 locations->SetInAt(0, Location::RequiresFpuRegister());
388 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
389}
390
Chris Larsen7fda7852016-04-21 16:00:36 -0700391static void GenBitCount(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100392 const DataType::Type type,
Lena Djokic0d2cab52018-03-06 15:20:45 +0100393 const bool hasMsa,
Chris Larsen7fda7852016-04-21 16:00:36 -0700394 Mips64Assembler* assembler) {
395 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
396 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
397
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100398 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen7fda7852016-04-21 16:00:36 -0700399
400 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
401 //
402 // A generalization of the best bit counting method to integers of
403 // bit-widths up to 128 (parameterized by type T) is this:
404 //
405 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
406 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
407 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
408 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
409 //
410 // For comparison, for 32-bit quantities, this algorithm can be executed
411 // using 20 MIPS instructions (the calls to LoadConst32() generate two
412 // machine instructions each for the values being used in this algorithm).
413 // A(n unrolled) loop-based algorithm requires 25 instructions.
414 //
415 // For a 64-bit operand this can be performed in 24 instructions compared
416 // to a(n unrolled) loop based algorithm which requires 38 instructions.
417 //
418 // There are algorithms which are faster in the cases where very few
419 // bits are set but the algorithm here attempts to minimize the total
420 // number of instructions executed even when a large number of bits
421 // are set.
Lena Djokic0d2cab52018-03-06 15:20:45 +0100422 if (hasMsa) {
423 if (type == DataType::Type::kInt32) {
424 __ Mtc1(in, FTMP);
425 __ PcntW(static_cast<VectorRegister>(FTMP), static_cast<VectorRegister>(FTMP));
426 __ Mfc1(out, FTMP);
427 } else {
428 __ Dmtc1(in, FTMP);
429 __ PcntD(static_cast<VectorRegister>(FTMP), static_cast<VectorRegister>(FTMP));
430 __ Dmfc1(out, FTMP);
431 }
432 } else {
433 if (type == DataType::Type::kInt32) {
434 __ Srl(TMP, in, 1);
435 __ LoadConst32(AT, 0x55555555);
436 __ And(TMP, TMP, AT);
437 __ Subu(TMP, in, TMP);
438 __ LoadConst32(AT, 0x33333333);
439 __ And(out, TMP, AT);
440 __ Srl(TMP, TMP, 2);
441 __ And(TMP, TMP, AT);
442 __ Addu(TMP, out, TMP);
443 __ Srl(out, TMP, 4);
444 __ Addu(out, out, TMP);
445 __ LoadConst32(AT, 0x0F0F0F0F);
446 __ And(out, out, AT);
447 __ LoadConst32(TMP, 0x01010101);
448 __ MulR6(out, out, TMP);
449 __ Srl(out, out, 24);
450 } else {
451 __ Dsrl(TMP, in, 1);
452 __ LoadConst64(AT, 0x5555555555555555L);
453 __ And(TMP, TMP, AT);
454 __ Dsubu(TMP, in, TMP);
455 __ LoadConst64(AT, 0x3333333333333333L);
456 __ And(out, TMP, AT);
457 __ Dsrl(TMP, TMP, 2);
458 __ And(TMP, TMP, AT);
459 __ Daddu(TMP, out, TMP);
460 __ Dsrl(out, TMP, 4);
461 __ Daddu(out, out, TMP);
462 __ LoadConst64(AT, 0x0F0F0F0F0F0F0F0FL);
463 __ And(out, out, AT);
464 __ LoadConst64(TMP, 0x0101010101010101L);
465 __ Dmul(out, out, TMP);
466 __ Dsrl32(out, out, 24);
467 }
Chris Larsen7fda7852016-04-21 16:00:36 -0700468 }
469}
470
471// int java.lang.Integer.bitCount(int)
472void IntrinsicLocationsBuilderMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100473 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700474}
475
476void IntrinsicCodeGeneratorMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100477 GenBitCount(invoke->GetLocations(), DataType::Type::kInt32, HasMsa(), GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700478}
479
480// int java.lang.Long.bitCount(long)
481void IntrinsicLocationsBuilderMIPS64::VisitLongBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100482 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700483}
484
485void IntrinsicCodeGeneratorMIPS64::VisitLongBitCount(HInvoke* invoke) {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100486 GenBitCount(invoke->GetLocations(), DataType::Type::kInt64, HasMsa(), GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700487}
488
Chris Larsen0b7ac982015-09-04 12:54:28 -0700489// double java.lang.Math.sqrt(double)
490void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100491 CreateFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700492}
493
494void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
495 LocationSummary* locations = invoke->GetLocations();
496 Mips64Assembler* assembler = GetAssembler();
497 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
498 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
499
500 __ SqrtD(out, in);
501}
502
Vladimir Markoca6fff82017-10-03 14:49:14 +0100503static void CreateFPToFP(ArenaAllocator* allocator,
Chris Larsen81284372015-10-21 15:28:53 -0700504 HInvoke* invoke,
505 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100506 LocationSummary* locations =
507 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700508 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700509 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700510}
511
512// double java.lang.Math.rint(double)
513void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100514 CreateFPToFP(allocator_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700515}
516
517void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
518 LocationSummary* locations = invoke->GetLocations();
519 Mips64Assembler* assembler = GetAssembler();
520 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
521 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
522
523 __ RintD(out, in);
524}
525
526// double java.lang.Math.floor(double)
527void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100528 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700529}
530
Chris Larsen14500822015-10-01 11:35:18 -0700531const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
532 kPositiveInfinity |
533 kNegativeZero |
534 kNegativeInfinity |
535 kQuietNaN |
536 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700537
Chris Larsen81284372015-10-21 15:28:53 -0700538enum FloatRoundingMode {
539 kFloor,
540 kCeil,
541};
542
543static void GenRoundingMode(LocationSummary* locations,
544 FloatRoundingMode mode,
545 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700546 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
547 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
548
Chris Larsen81284372015-10-21 15:28:53 -0700549 DCHECK_NE(in, out);
550
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700551 Mips64Label done;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700552
Chris Larsen81284372015-10-21 15:28:53 -0700553 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700554 // if in.isNaN || in.isInfinite || in.isZero {
555 // return in;
556 // }
557 __ ClassD(out, in);
558 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700559 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700560 __ MovD(out, in);
561 __ Bnezc(AT, &done);
562
Chris Larsen81284372015-10-21 15:28:53 -0700563 // Long outLong = floor/ceil(in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200564 // if (outLong == Long.MAX_VALUE) || (outLong == Long.MIN_VALUE) {
Chris Larsen81284372015-10-21 15:28:53 -0700565 // // floor()/ceil() has almost certainly returned a value
566 // // which can't be successfully represented as a signed
567 // // 64-bit number. Java expects that the input value will
568 // // be returned in these cases.
569 // // There is also a small probability that floor(in)/ceil(in)
570 // // correctly truncates/rounds up the input value to
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200571 // // Long.MAX_VALUE or Long.MIN_VALUE. In these cases, this
572 // // exception handling code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700573 // return in;
574 // }
Chris Larsen81284372015-10-21 15:28:53 -0700575 if (mode == kFloor) {
576 __ FloorLD(out, in);
577 } else if (mode == kCeil) {
578 __ CeilLD(out, in);
579 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700580 __ Dmfc1(AT, out);
581 __ MovD(out, in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200582 __ Daddiu(TMP, AT, 1);
583 __ Dati(TMP, 0x8000); // TMP = AT + 0x8000 0000 0000 0001
584 // or AT - 0x7FFF FFFF FFFF FFFF.
585 // IOW, TMP = 1 if AT = Long.MIN_VALUE
586 // or TMP = 0 if AT = Long.MAX_VALUE.
587 __ Dsrl(TMP, TMP, 1); // TMP = 0 if AT = Long.MIN_VALUE
588 // or AT = Long.MAX_VALUE.
589 __ Beqzc(TMP, &done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700590
591 // double out = outLong;
592 // return out;
593 __ Dmtc1(AT, out);
594 __ Cvtdl(out, out);
595 __ Bind(&done);
596 // }
597}
598
Chris Larsen81284372015-10-21 15:28:53 -0700599void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
600 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
601}
602
Chris Larsen0b7ac982015-09-04 12:54:28 -0700603// double java.lang.Math.ceil(double)
604void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100605 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700606}
607
608void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700609 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700610}
611
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100612static void GenRound(LocationSummary* locations, Mips64Assembler* assembler, DataType::Type type) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700613 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
614 FpuRegister half = locations->GetTemp(0).AsFpuRegister<FpuRegister>();
615 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
616
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100617 DCHECK(type == DataType::Type::kFloat32 || type == DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700618
619 Mips64Label done;
Chris Larsen7adaab02016-04-21 14:49:20 -0700620
Chris Larsen7adaab02016-04-21 14:49:20 -0700621 // out = floor(in);
622 //
Lena Djokicf4e23a82017-05-09 15:43:45 +0200623 // if (out != MAX_VALUE && out != MIN_VALUE) {
624 // TMP = ((in - out) >= 0.5) ? 1 : 0;
Chris Larsen7adaab02016-04-21 14:49:20 -0700625 // return out += TMP;
626 // }
Lena Djokicf4e23a82017-05-09 15:43:45 +0200627 // return out;
Chris Larsen7adaab02016-04-21 14:49:20 -0700628
629 // out = floor(in);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100630 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700631 __ FloorLD(FTMP, in);
632 __ Dmfc1(out, FTMP);
633 } else {
634 __ FloorWS(FTMP, in);
635 __ Mfc1(out, FTMP);
636 }
637
Lena Djokicf4e23a82017-05-09 15:43:45 +0200638 // if (out != MAX_VALUE && out != MIN_VALUE)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100639 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200640 __ Daddiu(TMP, out, 1);
641 __ Dati(TMP, 0x8000); // TMP = out + 0x8000 0000 0000 0001
642 // or out - 0x7FFF FFFF FFFF FFFF.
643 // IOW, TMP = 1 if out = Long.MIN_VALUE
644 // or TMP = 0 if out = Long.MAX_VALUE.
645 __ Dsrl(TMP, TMP, 1); // TMP = 0 if out = Long.MIN_VALUE
646 // or out = Long.MAX_VALUE.
647 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700648 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200649 __ Addiu(TMP, out, 1);
650 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
651 // or out - 0x7FFF FFFF.
652 // IOW, TMP = 1 if out = Int.MIN_VALUE
653 // or TMP = 0 if out = Int.MAX_VALUE.
654 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
655 // or out = Int.MAX_VALUE.
656 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700657 }
Chris Larsen7adaab02016-04-21 14:49:20 -0700658
659 // TMP = (0.5 <= (in - out)) ? -1 : 0;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100660 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700661 __ Cvtdl(FTMP, FTMP); // Convert output of floor.l.d back to "double".
662 __ LoadConst64(AT, bit_cast<int64_t, double>(0.5));
663 __ SubD(FTMP, in, FTMP);
664 __ Dmtc1(AT, half);
665 __ CmpLeD(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200666 __ Dmfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700667 } else {
668 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
669 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
670 __ SubS(FTMP, in, FTMP);
671 __ Mtc1(AT, half);
672 __ CmpLeS(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200673 __ Mfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700674 }
675
Chris Larsen7adaab02016-04-21 14:49:20 -0700676 // Return out -= TMP.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100677 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200678 __ Dsubu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700679 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200680 __ Subu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700681 }
682
683 __ Bind(&done);
684}
685
686// int java.lang.Math.round(float)
687void IntrinsicLocationsBuilderMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100688 LocationSummary* locations =
689 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700690 locations->SetInAt(0, Location::RequiresFpuRegister());
691 locations->AddTemp(Location::RequiresFpuRegister());
692 locations->SetOut(Location::RequiresRegister());
693}
694
695void IntrinsicCodeGeneratorMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100696 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat32);
Chris Larsen7adaab02016-04-21 14:49:20 -0700697}
698
699// long java.lang.Math.round(double)
700void IntrinsicLocationsBuilderMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100701 LocationSummary* locations =
702 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700703 locations->SetInAt(0, Location::RequiresFpuRegister());
704 locations->AddTemp(Location::RequiresFpuRegister());
705 locations->SetOut(Location::RequiresRegister());
706}
707
708void IntrinsicCodeGeneratorMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100709 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700710}
711
Chris Larsen70fb1f42015-09-04 10:15:27 -0700712// byte libcore.io.Memory.peekByte(long address)
713void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100714 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700715}
716
717void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
718 Mips64Assembler* assembler = GetAssembler();
719 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
720 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
721
722 __ Lb(out, adr, 0);
723}
724
725// short libcore.io.Memory.peekShort(long address)
726void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100727 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700728}
729
730void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
731 Mips64Assembler* assembler = GetAssembler();
732 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
733 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
734
735 __ Lh(out, adr, 0);
736}
737
738// int libcore.io.Memory.peekInt(long address)
739void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100740 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700741}
742
743void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
744 Mips64Assembler* assembler = GetAssembler();
745 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
746 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
747
748 __ Lw(out, adr, 0);
749}
750
751// long libcore.io.Memory.peekLong(long address)
752void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100753 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700754}
755
756void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
757 Mips64Assembler* assembler = GetAssembler();
758 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
759 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
760
761 __ Ld(out, adr, 0);
762}
763
Vladimir Markoca6fff82017-10-03 14:49:14 +0100764static void CreateIntIntToVoidLocations(ArenaAllocator* allocator, HInvoke* invoke) {
765 LocationSummary* locations =
766 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700767 locations->SetInAt(0, Location::RequiresRegister());
768 locations->SetInAt(1, Location::RequiresRegister());
769}
770
771// void libcore.io.Memory.pokeByte(long address, byte value)
772void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100773 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700774}
775
776void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
777 Mips64Assembler* assembler = GetAssembler();
778 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
779 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
780
781 __ Sb(val, adr, 0);
782}
783
784// void libcore.io.Memory.pokeShort(long address, short value)
785void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100786 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700787}
788
789void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
790 Mips64Assembler* assembler = GetAssembler();
791 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
792 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
793
794 __ Sh(val, adr, 0);
795}
796
797// void libcore.io.Memory.pokeInt(long address, int value)
798void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100799 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700800}
801
802void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
803 Mips64Assembler* assembler = GetAssembler();
804 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
805 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
806
807 __ Sw(val, adr, 00);
808}
809
810// void libcore.io.Memory.pokeLong(long address, long value)
811void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100812 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700813}
814
815void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
816 Mips64Assembler* assembler = GetAssembler();
817 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
818 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
819
820 __ Sd(val, adr, 0);
821}
822
Chris Larsen49e55392015-09-04 16:04:03 -0700823// Thread java.lang.Thread.currentThread()
824void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100825 LocationSummary* locations =
826 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen49e55392015-09-04 16:04:03 -0700827 locations->SetOut(Location::RequiresRegister());
828}
829
830void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
831 Mips64Assembler* assembler = GetAssembler();
832 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
833
834 __ LoadFromOffset(kLoadUnsignedWord,
835 out,
836 TR,
837 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
838}
839
Vladimir Markoca6fff82017-10-03 14:49:14 +0100840static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
Alexey Frunze15958152017-02-09 19:08:30 -0800841 HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100842 DataType::Type type) {
Alexey Frunze15958152017-02-09 19:08:30 -0800843 bool can_call = kEmitCompilerReadBarrier &&
844 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
845 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Vladimir Markoca6fff82017-10-03 14:49:14 +0100846 LocationSummary* locations =
847 new (allocator) LocationSummary(invoke,
848 can_call
849 ? LocationSummary::kCallOnSlowPath
850 : LocationSummary::kNoCall,
851 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700852 if (can_call && kUseBakerReadBarrier) {
853 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
854 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700855 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
856 locations->SetInAt(1, Location::RequiresRegister());
857 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -0800858 locations->SetOut(Location::RequiresRegister(),
859 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100860 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze15958152017-02-09 19:08:30 -0800861 // We need a temporary register for the read barrier marking slow
862 // path in InstructionCodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier.
863 locations->AddTemp(Location::RequiresRegister());
864 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700865}
866
Alexey Frunze15958152017-02-09 19:08:30 -0800867// Note that the caller must supply a properly aligned memory address.
868// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -0700869static void GenUnsafeGet(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100870 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -0700871 bool is_volatile,
872 CodeGeneratorMIPS64* codegen) {
873 LocationSummary* locations = invoke->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100874 DCHECK((type == DataType::Type::kInt32) ||
875 (type == DataType::Type::kInt64) ||
876 (type == DataType::Type::kReference)) << type;
Chris Larsen1360ada2015-09-04 23:38:16 -0700877 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -0800878 // Target register.
879 Location trg_loc = locations->Out();
880 GpuRegister trg = trg_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700881 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -0800882 Location base_loc = locations->InAt(1);
883 GpuRegister base = base_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700884 // Long offset.
Alexey Frunze15958152017-02-09 19:08:30 -0800885 Location offset_loc = locations->InAt(2);
886 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700887
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100888 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == DataType::Type::kReference))) {
Alexey Frunze15958152017-02-09 19:08:30 -0800889 __ Daddu(TMP, base, offset);
Chris Larsen1360ada2015-09-04 23:38:16 -0700890 }
Alexey Frunze15958152017-02-09 19:08:30 -0800891
Chris Larsen1360ada2015-09-04 23:38:16 -0700892 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100893 case DataType::Type::kInt64:
Alexey Frunze15958152017-02-09 19:08:30 -0800894 __ Ld(trg, TMP, 0);
895 if (is_volatile) {
896 __ Sync(0);
897 }
898 break;
899
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100900 case DataType::Type::kInt32:
Chris Larsen1360ada2015-09-04 23:38:16 -0700901 __ Lw(trg, TMP, 0);
Alexey Frunze15958152017-02-09 19:08:30 -0800902 if (is_volatile) {
903 __ Sync(0);
904 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700905 break;
906
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100907 case DataType::Type::kReference:
Alexey Frunze15958152017-02-09 19:08:30 -0800908 if (kEmitCompilerReadBarrier) {
909 if (kUseBakerReadBarrier) {
910 Location temp = locations->GetTemp(0);
911 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
912 trg_loc,
913 base,
914 /* offset */ 0U,
915 /* index */ offset_loc,
916 TIMES_1,
917 temp,
918 /* needs_null_check */ false);
919 if (is_volatile) {
920 __ Sync(0);
921 }
922 } else {
923 __ Lwu(trg, TMP, 0);
924 if (is_volatile) {
925 __ Sync(0);
926 }
927 codegen->GenerateReadBarrierSlow(invoke,
928 trg_loc,
929 trg_loc,
930 base_loc,
931 /* offset */ 0U,
932 /* index */ offset_loc);
933 }
934 } else {
935 __ Lwu(trg, TMP, 0);
936 if (is_volatile) {
937 __ Sync(0);
938 }
939 __ MaybeUnpoisonHeapReference(trg);
940 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700941 break;
942
943 default:
944 LOG(FATAL) << "Unsupported op size " << type;
945 UNREACHABLE();
946 }
947}
948
949// int sun.misc.Unsafe.getInt(Object o, long offset)
950void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100951 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -0700952}
953
954void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100955 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700956}
957
958// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
959void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100960 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -0700961}
962
963void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100964 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700965}
966
967// long sun.misc.Unsafe.getLong(Object o, long offset)
968void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100969 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -0700970}
971
972void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100973 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700974}
975
976// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
977void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100978 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -0700979}
980
981void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100982 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700983}
984
985// Object sun.misc.Unsafe.getObject(Object o, long offset)
986void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100987 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -0700988}
989
990void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100991 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700992}
993
994// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
995void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100996 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -0700997}
998
999void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001000 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001001}
1002
Vladimir Markoca6fff82017-10-03 14:49:14 +01001003static void CreateIntIntIntIntToVoid(ArenaAllocator* allocator, HInvoke* invoke) {
1004 LocationSummary* locations =
1005 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen1360ada2015-09-04 23:38:16 -07001006 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1007 locations->SetInAt(1, Location::RequiresRegister());
1008 locations->SetInAt(2, Location::RequiresRegister());
1009 locations->SetInAt(3, Location::RequiresRegister());
1010}
1011
Alexey Frunze15958152017-02-09 19:08:30 -08001012// Note that the caller must supply a properly aligned memory address.
1013// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001014static void GenUnsafePut(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001015 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -07001016 bool is_volatile,
1017 bool is_ordered,
1018 CodeGeneratorMIPS64* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001019 DCHECK((type == DataType::Type::kInt32) ||
1020 (type == DataType::Type::kInt64) ||
1021 (type == DataType::Type::kReference));
Chris Larsen1360ada2015-09-04 23:38:16 -07001022 Mips64Assembler* assembler = codegen->GetAssembler();
1023 // Object pointer.
1024 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1025 // Long offset.
1026 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1027 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1028
1029 __ Daddu(TMP, base, offset);
1030 if (is_volatile || is_ordered) {
1031 __ Sync(0);
1032 }
1033 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001034 case DataType::Type::kInt32:
1035 case DataType::Type::kReference:
1036 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001037 __ PoisonHeapReference(AT, value);
1038 __ Sw(AT, TMP, 0);
1039 } else {
1040 __ Sw(value, TMP, 0);
1041 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001042 break;
1043
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001044 case DataType::Type::kInt64:
Chris Larsen1360ada2015-09-04 23:38:16 -07001045 __ Sd(value, TMP, 0);
1046 break;
1047
1048 default:
1049 LOG(FATAL) << "Unsupported op size " << type;
1050 UNREACHABLE();
1051 }
1052 if (is_volatile) {
1053 __ Sync(0);
1054 }
1055
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001056 if (type == DataType::Type::kReference) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001057 bool value_can_be_null = true; // TODO: Worth finding out this information?
1058 codegen->MarkGCCard(base, value, value_can_be_null);
Chris Larsen1360ada2015-09-04 23:38:16 -07001059 }
1060}
1061
1062// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1063void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001064 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001065}
1066
1067void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001068 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001069 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001070 /* is_volatile */ false,
1071 /* is_ordered */ false,
1072 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001073}
1074
1075// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1076void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001077 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001078}
1079
1080void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001081 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001082 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001083 /* is_volatile */ false,
1084 /* is_ordered */ true,
1085 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001086}
1087
1088// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1089void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001090 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001091}
1092
1093void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001094 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001095 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001096 /* is_volatile */ true,
1097 /* is_ordered */ false,
1098 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001099}
1100
1101// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1102void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001103 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001104}
1105
1106void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001107 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001108 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001109 /* is_volatile */ false,
1110 /* is_ordered */ false,
1111 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001112}
1113
1114// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1115void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001116 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001117}
1118
1119void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001120 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001121 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001122 /* is_volatile */ false,
1123 /* is_ordered */ true,
1124 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001125}
1126
1127// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1128void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001129 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001130}
1131
1132void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001133 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001134 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001135 /* is_volatile */ true,
1136 /* is_ordered */ false,
1137 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001138}
1139
1140// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1141void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001142 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001143}
1144
1145void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001146 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001147 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001148 /* is_volatile */ false,
1149 /* is_ordered */ false,
1150 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001151}
1152
1153// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1154void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001155 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001156}
1157
1158void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001159 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001160 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001161 /* is_volatile */ false,
1162 /* is_ordered */ true,
1163 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001164}
1165
1166// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1167void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001168 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001169}
1170
1171void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001172 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001173 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001174 /* is_volatile */ true,
1175 /* is_ordered */ false,
1176 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001177}
1178
Vladimir Markoca6fff82017-10-03 14:49:14 +01001179static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* allocator, HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001180 bool can_call = kEmitCompilerReadBarrier &&
1181 kUseBakerReadBarrier &&
1182 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001183 LocationSummary* locations =
1184 new (allocator) LocationSummary(invoke,
1185 can_call
1186 ? LocationSummary::kCallOnSlowPath
1187 : LocationSummary::kNoCall,
1188 kIntrinsified);
Chris Larsen36427492015-10-23 02:19:38 -07001189 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1190 locations->SetInAt(1, Location::RequiresRegister());
1191 locations->SetInAt(2, Location::RequiresRegister());
1192 locations->SetInAt(3, Location::RequiresRegister());
1193 locations->SetInAt(4, Location::RequiresRegister());
Chris Larsen36427492015-10-23 02:19:38 -07001194 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001195
1196 // Temporary register used in CAS by (Baker) read barrier.
1197 if (can_call) {
1198 locations->AddTemp(Location::RequiresRegister());
1199 }
Chris Larsen36427492015-10-23 02:19:38 -07001200}
1201
Alexey Frunze15958152017-02-09 19:08:30 -08001202// Note that the caller must supply a properly aligned memory address.
1203// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001204static void GenCas(HInvoke* invoke, DataType::Type type, CodeGeneratorMIPS64* codegen) {
Chris Larsen36427492015-10-23 02:19:38 -07001205 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001206 LocationSummary* locations = invoke->GetLocations();
Chris Larsen36427492015-10-23 02:19:38 -07001207 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001208 Location offset_loc = locations->InAt(2);
1209 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001210 GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
1211 GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001212 Location out_loc = locations->Out();
1213 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001214
1215 DCHECK_NE(base, out);
1216 DCHECK_NE(offset, out);
1217 DCHECK_NE(expected, out);
1218
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001219 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08001220 // The only read barrier implementation supporting the
1221 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1222 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1223
1224 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1225 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001226 bool value_can_be_null = true; // TODO: Worth finding out this information?
1227 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001228
1229 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1230 Location temp = locations->GetTemp(0);
1231 // Need to make sure the reference stored in the field is a to-space
1232 // one before attempting the CAS or the CAS could fail incorrectly.
1233 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1234 invoke,
1235 out_loc, // Unused, used only as a "temporary" within the read barrier.
1236 base,
1237 /* offset */ 0u,
1238 /* index */ offset_loc,
1239 ScaleFactor::TIMES_1,
1240 temp,
1241 /* needs_null_check */ false,
1242 /* always_update_field */ true);
1243 }
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001244 }
1245
Alexey Frunzec061de12017-02-14 13:27:23 -08001246 Mips64Label loop_head, exit_loop;
1247 __ Daddu(TMP, base, offset);
1248
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001249 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001250 __ PoisonHeapReference(expected);
1251 // Do not poison `value`, if it is the same register as
1252 // `expected`, which has just been poisoned.
1253 if (value != expected) {
1254 __ PoisonHeapReference(value);
1255 }
1256 }
1257
Chris Larsen36427492015-10-23 02:19:38 -07001258 // do {
1259 // tmp_value = [tmp_ptr] - expected;
1260 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1261 // result = tmp_value != 0;
1262
Chris Larsen36427492015-10-23 02:19:38 -07001263 __ Sync(0);
1264 __ Bind(&loop_head);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001265 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001266 __ Lld(out, TMP);
1267 } else {
Roland Levillain391b8662015-12-18 11:43:38 +00001268 // Note: We will need a read barrier here, when read barrier
1269 // support is added to the MIPS64 back end.
Chris Larsen36427492015-10-23 02:19:38 -07001270 __ Ll(out, TMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001271 if (type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001272 // The LL instruction sign-extends the 32-bit value, but
1273 // 32-bit references must be zero-extended. Zero-extend `out`.
1274 __ Dext(out, out, 0, 32);
1275 }
Chris Larsen36427492015-10-23 02:19:38 -07001276 }
1277 __ Dsubu(out, out, expected); // If we didn't get the 'expected'
1278 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1279 __ Beqzc(out, &exit_loop); // return.
1280 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1281 // If we use 'value' directly, we would lose 'value'
1282 // in the case that the store fails. Whether the
1283 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001284 // correct Boolean value into the 'out' register.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001285 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001286 __ Scd(out, TMP);
1287 } else {
1288 __ Sc(out, TMP);
1289 }
1290 __ Beqzc(out, &loop_head); // If we couldn't do the read-modify-write
1291 // cycle atomically then retry.
1292 __ Bind(&exit_loop);
1293 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08001294
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001295 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001296 __ UnpoisonHeapReference(expected);
1297 // Do not unpoison `value`, if it is the same register as
1298 // `expected`, which has just been unpoisoned.
1299 if (value != expected) {
1300 __ UnpoisonHeapReference(value);
1301 }
1302 }
Chris Larsen36427492015-10-23 02:19:38 -07001303}
1304
1305// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1306void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001307 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001308}
1309
1310void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001311 GenCas(invoke, DataType::Type::kInt32, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001312}
1313
1314// boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
1315void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001316 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001317}
1318
1319void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001320 GenCas(invoke, DataType::Type::kInt64, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001321}
1322
1323// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1324void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001325 // The only read barrier implementation supporting the
1326 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1327 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1328 return;
1329 }
1330
Vladimir Markoca6fff82017-10-03 14:49:14 +01001331 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001332}
1333
1334void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001335 // The only read barrier implementation supporting the
1336 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1337 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1338
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001339 GenCas(invoke, DataType::Type::kReference, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001340}
1341
Chris Larsen9701c2e2015-09-04 17:22:47 -07001342// int java.lang.String.compareTo(String anotherString)
1343void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001344 LocationSummary* locations = new (allocator_) LocationSummary(
1345 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001346 InvokeRuntimeCallingConvention calling_convention;
1347 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1348 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001349 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001350 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1351}
1352
1353void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1354 Mips64Assembler* assembler = GetAssembler();
1355 LocationSummary* locations = invoke->GetLocations();
1356
1357 // Note that the null check must have been done earlier.
1358 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1359
1360 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001361 SlowPathCodeMIPS64* slow_path =
1362 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001363 codegen_->AddSlowPath(slow_path);
1364 __ Beqzc(argument, slow_path->GetEntryLabel());
1365
Serban Constantinescufc734082016-07-19 17:18:07 +01001366 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001367 __ Bind(slow_path->GetExitLabel());
1368}
1369
Chris Larsen972d6d72015-10-20 11:29:12 -07001370// boolean java.lang.String.equals(Object anObject)
1371void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
Vladimir Markoda283052017-11-07 21:17:24 +00001372 if (kEmitCompilerReadBarrier &&
1373 !StringEqualsOptimizations(invoke).GetArgumentIsString() &&
1374 !StringEqualsOptimizations(invoke).GetNoReadBarrierForStringClass()) {
1375 // No support for this odd case (String class is moveable, not in the boot image).
1376 return;
1377 }
1378
Vladimir Markoca6fff82017-10-03 14:49:14 +01001379 LocationSummary* locations =
1380 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen972d6d72015-10-20 11:29:12 -07001381 locations->SetInAt(0, Location::RequiresRegister());
1382 locations->SetInAt(1, Location::RequiresRegister());
1383 locations->SetOut(Location::RequiresRegister());
1384
1385 // Temporary registers to store lengths of strings and for calculations.
1386 locations->AddTemp(Location::RequiresRegister());
1387 locations->AddTemp(Location::RequiresRegister());
1388 locations->AddTemp(Location::RequiresRegister());
1389}
1390
1391void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
1392 Mips64Assembler* assembler = GetAssembler();
1393 LocationSummary* locations = invoke->GetLocations();
1394
1395 GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
1396 GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
1397 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1398
1399 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
1400 GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
1401 GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
1402
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001403 Mips64Label loop;
1404 Mips64Label end;
1405 Mips64Label return_true;
1406 Mips64Label return_false;
Chris Larsen972d6d72015-10-20 11:29:12 -07001407
1408 // Get offsets of count, value, and class fields within a string object.
1409 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1410 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1411 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1412
1413 // Note that the null check must have been done earlier.
1414 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1415
1416 // If the register containing the pointer to "this", and the register
1417 // containing the pointer to "anObject" are the same register then
1418 // "this", and "anObject" are the same object and we can
1419 // short-circuit the logic to a true result.
1420 if (str == arg) {
1421 __ LoadConst64(out, 1);
1422 return;
1423 }
1424
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001425 StringEqualsOptimizations optimizations(invoke);
1426 if (!optimizations.GetArgumentNotNull()) {
1427 // Check if input is null, return false if it is.
1428 __ Beqzc(arg, &return_false);
1429 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001430
1431 // Reference equality check, return true if same reference.
1432 __ Beqc(str, arg, &return_true);
1433
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001434 if (!optimizations.GetArgumentIsString()) {
1435 // Instanceof check for the argument by comparing class fields.
1436 // All string objects must have the same type since String cannot be subclassed.
1437 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1438 // If the argument is a string object, its class field must be equal to receiver's class field.
1439 __ Lw(temp1, str, class_offset);
1440 __ Lw(temp2, arg, class_offset);
1441 __ Bnec(temp1, temp2, &return_false);
1442 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001443
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001444 // Load `count` fields of this and argument strings.
Chris Larsen972d6d72015-10-20 11:29:12 -07001445 __ Lw(temp1, str, count_offset);
1446 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001447 // Check if `count` fields are equal, return false if they're not.
1448 // Also compares the compression style, if differs return false.
Chris Larsen972d6d72015-10-20 11:29:12 -07001449 __ Bnec(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001450 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1451 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1452 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen972d6d72015-10-20 11:29:12 -07001453 __ Beqzc(temp1, &return_true);
1454
1455 // Don't overwrite input registers
1456 __ Move(TMP, str);
1457 __ Move(temp3, arg);
1458
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001459 // Assertions that must hold in order to compare strings 8 bytes at a time.
Chris Larsen972d6d72015-10-20 11:29:12 -07001460 DCHECK_ALIGNED(value_offset, 8);
1461 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1462
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001463 if (mirror::kUseStringCompression) {
1464 // For string compression, calculate the number of bytes to compare (not chars).
1465 __ Dext(temp2, temp1, 0, 1); // Extract compression flag.
1466 __ Srl(temp1, temp1, 1); // Extract length.
1467 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
1468 }
1469
1470 // Loop to compare strings 8 bytes at a time starting at the beginning of the string.
1471 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen972d6d72015-10-20 11:29:12 -07001472 __ Bind(&loop);
1473 __ Ld(out, TMP, value_offset);
1474 __ Ld(temp2, temp3, value_offset);
1475 __ Bnec(out, temp2, &return_false);
1476 __ Daddiu(TMP, TMP, 8);
1477 __ Daddiu(temp3, temp3, 8);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001478 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1479 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -8 : -4);
Chris Larsen972d6d72015-10-20 11:29:12 -07001480 __ Bgtzc(temp1, &loop);
1481
1482 // Return true and exit the function.
1483 // If loop does not result in returning false, we return true.
1484 __ Bind(&return_true);
1485 __ LoadConst64(out, 1);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001486 __ Bc(&end);
Chris Larsen972d6d72015-10-20 11:29:12 -07001487
1488 // Return false and exit the function.
1489 __ Bind(&return_false);
1490 __ LoadConst64(out, 0);
1491 __ Bind(&end);
1492}
1493
Chris Larsen9701c2e2015-09-04 17:22:47 -07001494static void GenerateStringIndexOf(HInvoke* invoke,
1495 Mips64Assembler* assembler,
1496 CodeGeneratorMIPS64* codegen,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001497 bool start_at_zero) {
1498 LocationSummary* locations = invoke->GetLocations();
1499 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1500
1501 // Note that the null check must have been done earlier.
1502 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1503
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001504 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1505 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsen9701c2e2015-09-04 17:22:47 -07001506 SlowPathCodeMIPS64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001507 HInstruction* code_point = invoke->InputAt(1);
1508 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001509 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001510 // Always needs the slow-path. We could directly dispatch to it,
1511 // but this case should be rare, so for simplicity just put the
1512 // full slow-path down and branch unconditionally.
Vladimir Marko174b2e22017-10-12 13:34:49 +01001513 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001514 codegen->AddSlowPath(slow_path);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001515 __ Bc(slow_path->GetEntryLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001516 __ Bind(slow_path->GetExitLabel());
1517 return;
1518 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001519 } else if (code_point->GetType() != DataType::Type::kUint16) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001520 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1521 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
Vladimir Marko174b2e22017-10-12 13:34:49 +01001522 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001523 codegen->AddSlowPath(slow_path);
1524 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1525 }
1526
1527 if (start_at_zero) {
1528 DCHECK_EQ(tmp_reg, A2);
1529 // Start-index = 0.
1530 __ Clear(tmp_reg);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001531 }
1532
Serban Constantinescufc734082016-07-19 17:18:07 +01001533 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Roland Levillain42ad2882016-02-29 18:26:54 +00001534 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001535
1536 if (slow_path != nullptr) {
1537 __ Bind(slow_path->GetExitLabel());
1538 }
1539}
1540
1541// int java.lang.String.indexOf(int ch)
1542void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001543 LocationSummary* locations = new (allocator_) LocationSummary(
1544 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001545 // We have a hand-crafted assembly stub that follows the runtime
1546 // calling convention. So it's best to align the inputs accordingly.
1547 InvokeRuntimeCallingConvention calling_convention;
1548 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1549 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001550 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001551 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1552
1553 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1554 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1555}
1556
1557void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001558 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ true);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001559}
1560
1561// int java.lang.String.indexOf(int ch, int fromIndex)
1562void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001563 LocationSummary* locations = new (allocator_) LocationSummary(
1564 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001565 // We have a hand-crafted assembly stub that follows the runtime
1566 // calling convention. So it's best to align the inputs accordingly.
1567 InvokeRuntimeCallingConvention calling_convention;
1568 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1569 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1570 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001571 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001572 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1573}
1574
1575void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001576 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ false);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001577}
1578
Roland Levillaincc3839c2016-02-29 16:23:48 +00001579// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001580void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001581 LocationSummary* locations = new (allocator_) LocationSummary(
1582 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001583 InvokeRuntimeCallingConvention calling_convention;
1584 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1585 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1586 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1587 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001588 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001589 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1590}
1591
1592void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1593 Mips64Assembler* assembler = GetAssembler();
1594 LocationSummary* locations = invoke->GetLocations();
1595
1596 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001597 SlowPathCodeMIPS64* slow_path =
1598 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001599 codegen_->AddSlowPath(slow_path);
1600 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1601
Serban Constantinescufc734082016-07-19 17:18:07 +01001602 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001603 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001604 __ Bind(slow_path->GetExitLabel());
1605}
1606
Roland Levillaincc3839c2016-02-29 16:23:48 +00001607// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001608void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001609 LocationSummary* locations =
1610 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001611 InvokeRuntimeCallingConvention calling_convention;
1612 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1613 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1614 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001615 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001616 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1617}
1618
1619void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001620 // No need to emit code checking whether `locations->InAt(2)` is a null
1621 // pointer, as callers of the native method
1622 //
1623 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1624 //
1625 // all include a null check on `data` before calling that method.
Serban Constantinescufc734082016-07-19 17:18:07 +01001626 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001627 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001628}
1629
Roland Levillainf969a202016-03-09 16:14:00 +00001630// java.lang.StringFactory.newStringFromString(String toCopy)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001631void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001632 LocationSummary* locations = new (allocator_) LocationSummary(
1633 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001634 InvokeRuntimeCallingConvention calling_convention;
1635 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001636 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001637 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1638}
1639
1640void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1641 Mips64Assembler* assembler = GetAssembler();
1642 LocationSummary* locations = invoke->GetLocations();
1643
1644 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001645 SlowPathCodeMIPS64* slow_path =
1646 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001647 codegen_->AddSlowPath(slow_path);
1648 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1649
Serban Constantinescufc734082016-07-19 17:18:07 +01001650 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001651 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001652 __ Bind(slow_path->GetExitLabel());
1653}
1654
Chris Larsenddec7f92016-02-16 12:35:04 -08001655static void GenIsInfinite(LocationSummary* locations,
1656 bool is64bit,
1657 Mips64Assembler* assembler) {
1658 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
1659 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1660
1661 if (is64bit) {
1662 __ ClassD(FTMP, in);
1663 } else {
1664 __ ClassS(FTMP, in);
1665 }
1666 __ Mfc1(out, FTMP);
1667 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
1668 __ Sltu(out, ZERO, out);
1669}
1670
1671// boolean java.lang.Float.isInfinite(float)
1672void IntrinsicLocationsBuilderMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001673 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001674}
1675
1676void IntrinsicCodeGeneratorMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1677 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
1678}
1679
1680// boolean java.lang.Double.isInfinite(double)
1681void IntrinsicLocationsBuilderMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001682 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001683}
1684
1685void IntrinsicCodeGeneratorMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1686 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
1687}
1688
Chris Larsene3660592016-11-09 11:13:42 -08001689// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
1690void IntrinsicLocationsBuilderMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001691 LocationSummary* locations =
1692 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsene3660592016-11-09 11:13:42 -08001693 locations->SetInAt(0, Location::RequiresRegister());
1694 locations->SetInAt(1, Location::RequiresRegister());
1695 locations->SetInAt(2, Location::RequiresRegister());
1696 locations->SetInAt(3, Location::RequiresRegister());
1697 locations->SetInAt(4, Location::RequiresRegister());
1698
Chris Larsen366d4332017-03-23 09:02:56 -07001699 locations->AddTemp(Location::RequiresRegister());
1700 locations->AddTemp(Location::RequiresRegister());
1701 locations->AddTemp(Location::RequiresRegister());
Chris Larsene3660592016-11-09 11:13:42 -08001702}
1703
1704void IntrinsicCodeGeneratorMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1705 Mips64Assembler* assembler = GetAssembler();
1706 LocationSummary* locations = invoke->GetLocations();
1707
1708 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001709 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001710 DCHECK_EQ(char_size, 2u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001711 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001712
1713 GpuRegister srcObj = locations->InAt(0).AsRegister<GpuRegister>();
1714 GpuRegister srcBegin = locations->InAt(1).AsRegister<GpuRegister>();
1715 GpuRegister srcEnd = locations->InAt(2).AsRegister<GpuRegister>();
1716 GpuRegister dstObj = locations->InAt(3).AsRegister<GpuRegister>();
1717 GpuRegister dstBegin = locations->InAt(4).AsRegister<GpuRegister>();
1718
1719 GpuRegister dstPtr = locations->GetTemp(0).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001720 GpuRegister srcPtr = locations->GetTemp(1).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001721 GpuRegister numChrs = locations->GetTemp(2).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001722
1723 Mips64Label done;
Chris Larsen366d4332017-03-23 09:02:56 -07001724 Mips64Label loop;
Chris Larsene3660592016-11-09 11:13:42 -08001725
1726 // Location of data in char array buffer.
1727 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1728
1729 // Get offset of value field within a string object.
1730 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1731
1732 __ Beqc(srcEnd, srcBegin, &done); // No characters to move.
1733
1734 // Calculate number of characters to be copied.
1735 __ Dsubu(numChrs, srcEnd, srcBegin);
1736
1737 // Calculate destination address.
1738 __ Daddiu(dstPtr, dstObj, data_offset);
1739 __ Dlsa(dstPtr, dstBegin, dstPtr, char_shift);
1740
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001741 if (mirror::kUseStringCompression) {
1742 Mips64Label uncompressed_copy, compressed_loop;
1743 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1744 // Load count field and extract compression flag.
1745 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
1746 __ Dext(TMP, TMP, 0, 1);
1747
Chris Larsen366d4332017-03-23 09:02:56 -07001748 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001749 __ Bnezc(TMP, &uncompressed_copy);
1750
1751 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
1752 __ Daddu(srcPtr, srcObj, srcBegin);
1753 __ Bind(&compressed_loop);
1754 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
1755 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
1756 __ Daddiu(numChrs, numChrs, -1);
1757 __ Daddiu(srcPtr, srcPtr, 1);
1758 __ Daddiu(dstPtr, dstPtr, 2);
1759 __ Bnezc(numChrs, &compressed_loop);
1760
1761 __ Bc(&done);
1762 __ Bind(&uncompressed_copy);
1763 }
1764
Chris Larsene3660592016-11-09 11:13:42 -08001765 // Calculate source address.
1766 __ Daddiu(srcPtr, srcObj, value_offset);
1767 __ Dlsa(srcPtr, srcBegin, srcPtr, char_shift);
1768
Chris Larsen366d4332017-03-23 09:02:56 -07001769 __ Bind(&loop);
1770 __ Lh(AT, srcPtr, 0);
1771 __ Daddiu(numChrs, numChrs, -1);
1772 __ Daddiu(srcPtr, srcPtr, char_size);
1773 __ Sh(AT, dstPtr, 0);
1774 __ Daddiu(dstPtr, dstPtr, char_size);
1775 __ Bnezc(numChrs, &loop);
Chris Larsene3660592016-11-09 11:13:42 -08001776
1777 __ Bind(&done);
1778}
1779
Chris Larsen5863f852017-03-23 15:41:37 -07001780// static void java.lang.System.arraycopy(Object src, int srcPos,
1781// Object dest, int destPos,
1782// int length)
1783void IntrinsicLocationsBuilderMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1784 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1785 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1786 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1787
1788 // As long as we are checking, we might as well check to see if the src and dest
1789 // positions are >= 0.
1790 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1791 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
1792 // We will have to fail anyways.
1793 return;
1794 }
1795
1796 // And since we are already checking, check the length too.
1797 if (length != nullptr) {
1798 int32_t len = length->GetValue();
1799 if (len < 0) {
1800 // Just call as normal.
1801 return;
1802 }
1803 }
1804
1805 // Okay, it is safe to generate inline code.
1806 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001807 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
Chris Larsen5863f852017-03-23 15:41:37 -07001808 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
1809 locations->SetInAt(0, Location::RequiresRegister());
1810 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1811 locations->SetInAt(2, Location::RequiresRegister());
1812 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
1813 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
1814
1815 locations->AddTemp(Location::RequiresRegister());
1816 locations->AddTemp(Location::RequiresRegister());
1817 locations->AddTemp(Location::RequiresRegister());
1818}
1819
1820// Utility routine to verify that "length(input) - pos >= length"
1821static void EnoughItems(Mips64Assembler* assembler,
1822 GpuRegister length_input_minus_pos,
1823 Location length,
1824 SlowPathCodeMIPS64* slow_path) {
1825 if (length.IsConstant()) {
1826 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
1827
1828 if (IsInt<16>(length_constant)) {
1829 __ Slti(TMP, length_input_minus_pos, length_constant);
1830 __ Bnezc(TMP, slow_path->GetEntryLabel());
1831 } else {
1832 __ LoadConst32(TMP, length_constant);
1833 __ Bltc(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
1834 }
1835 } else {
1836 __ Bltc(length_input_minus_pos, length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
1837 }
1838}
1839
1840static void CheckPosition(Mips64Assembler* assembler,
1841 Location pos,
1842 GpuRegister input,
1843 Location length,
1844 SlowPathCodeMIPS64* slow_path,
1845 bool length_is_input_length = false) {
1846 // Where is the length in the Array?
1847 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1848
1849 // Calculate length(input) - pos.
1850 if (pos.IsConstant()) {
1851 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1852 if (pos_const == 0) {
1853 if (!length_is_input_length) {
1854 // Check that length(input) >= length.
1855 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1856 EnoughItems(assembler, AT, length, slow_path);
1857 }
1858 } else {
1859 // Check that (length(input) - pos) >= zero.
1860 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1861 DCHECK_GT(pos_const, 0);
1862 __ Addiu32(AT, AT, -pos_const);
1863 __ Bltzc(AT, slow_path->GetEntryLabel());
1864
1865 // Verify that (length(input) - pos) >= length.
1866 EnoughItems(assembler, AT, length, slow_path);
1867 }
1868 } else if (length_is_input_length) {
1869 // The only way the copy can succeed is if pos is zero.
1870 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
1871 __ Bnezc(pos_reg, slow_path->GetEntryLabel());
1872 } else {
1873 // Verify that pos >= 0.
1874 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
1875 __ Bltzc(pos_reg, slow_path->GetEntryLabel());
1876
1877 // Check that (length(input) - pos) >= zero.
1878 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1879 __ Subu(AT, AT, pos_reg);
1880 __ Bltzc(AT, slow_path->GetEntryLabel());
1881
1882 // Verify that (length(input) - pos) >= length.
1883 EnoughItems(assembler, AT, length, slow_path);
1884 }
1885}
1886
1887void IntrinsicCodeGeneratorMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1888 Mips64Assembler* assembler = GetAssembler();
1889 LocationSummary* locations = invoke->GetLocations();
1890
1891 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
1892 Location src_pos = locations->InAt(1);
1893 GpuRegister dest = locations->InAt(2).AsRegister<GpuRegister>();
1894 Location dest_pos = locations->InAt(3);
1895 Location length = locations->InAt(4);
1896
1897 Mips64Label loop;
1898
1899 GpuRegister dest_base = locations->GetTemp(0).AsRegister<GpuRegister>();
1900 GpuRegister src_base = locations->GetTemp(1).AsRegister<GpuRegister>();
1901 GpuRegister count = locations->GetTemp(2).AsRegister<GpuRegister>();
1902
Vladimir Marko174b2e22017-10-12 13:34:49 +01001903 SlowPathCodeMIPS64* slow_path =
1904 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen5863f852017-03-23 15:41:37 -07001905 codegen_->AddSlowPath(slow_path);
1906
1907 // Bail out if the source and destination are the same (to handle overlap).
1908 __ Beqc(src, dest, slow_path->GetEntryLabel());
1909
1910 // Bail out if the source is null.
1911 __ Beqzc(src, slow_path->GetEntryLabel());
1912
1913 // Bail out if the destination is null.
1914 __ Beqzc(dest, slow_path->GetEntryLabel());
1915
1916 // Load length into register for count.
1917 if (length.IsConstant()) {
1918 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
1919 } else {
1920 // If the length is negative, bail out.
1921 // We have already checked in the LocationsBuilder for the constant case.
1922 __ Bltzc(length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
1923
1924 __ Move(count, length.AsRegister<GpuRegister>());
1925 }
1926
1927 // Validity checks: source.
1928 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
1929
1930 // Validity checks: dest.
1931 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
1932
1933 // If count is zero, we're done.
1934 __ Beqzc(count, slow_path->GetExitLabel());
1935
1936 // Okay, everything checks out. Finally time to do the copy.
1937 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001938 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07001939 DCHECK_EQ(char_size, 2u);
1940
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001941 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07001942
1943 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1944
1945 // Calculate source and destination addresses.
1946 if (src_pos.IsConstant()) {
1947 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
1948
1949 __ Daddiu64(src_base, src, data_offset + char_size * src_pos_const, TMP);
1950 } else {
1951 __ Daddiu64(src_base, src, data_offset, TMP);
1952 __ Dlsa(src_base, src_pos.AsRegister<GpuRegister>(), src_base, char_shift);
1953 }
1954 if (dest_pos.IsConstant()) {
1955 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1956
1957 __ Daddiu64(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
1958 } else {
1959 __ Daddiu64(dest_base, dest, data_offset, TMP);
1960 __ Dlsa(dest_base, dest_pos.AsRegister<GpuRegister>(), dest_base, char_shift);
1961 }
1962
1963 __ Bind(&loop);
1964 __ Lh(TMP, src_base, 0);
1965 __ Daddiu(src_base, src_base, char_size);
1966 __ Daddiu(count, count, -1);
1967 __ Sh(TMP, dest_base, 0);
1968 __ Daddiu(dest_base, dest_base, char_size);
1969 __ Bnezc(count, &loop);
1970
1971 __ Bind(slow_path->GetExitLabel());
1972}
1973
Chris Larsenab922502016-04-15 10:00:56 -07001974static void GenHighestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001975 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07001976 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001977 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07001978
1979 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
1980 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1981
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001982 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07001983 __ Dclz(TMP, in);
1984 __ LoadConst64(AT, INT64_C(0x8000000000000000));
Chris Larsen68db2a92016-09-14 15:41:29 -07001985 __ Dsrlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07001986 } else {
1987 __ Clz(TMP, in);
1988 __ LoadConst32(AT, 0x80000000);
Chris Larsen68db2a92016-09-14 15:41:29 -07001989 __ Srlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07001990 }
1991 // For either value of "type", when "in" is zero, "out" should also
1992 // be zero. Without this extra "and" operation, when "in" is zero,
1993 // "out" would be either Integer.MIN_VALUE, or Long.MIN_VALUE because
1994 // the MIPS logical shift operations "dsrlv", and "srlv" don't use
1995 // the shift amount (TMP) directly; they use either (TMP % 64) or
1996 // (TMP % 32), respectively.
Chris Larsen68db2a92016-09-14 15:41:29 -07001997 __ And(out, AT, in);
Chris Larsenab922502016-04-15 10:00:56 -07001998}
1999
2000// int java.lang.Integer.highestOneBit(int)
2001void IntrinsicLocationsBuilderMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002002 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002003}
2004
2005void IntrinsicCodeGeneratorMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002006 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002007}
2008
2009// long java.lang.Long.highestOneBit(long)
2010void IntrinsicLocationsBuilderMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002011 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002012}
2013
2014void IntrinsicCodeGeneratorMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002015 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002016}
2017
2018static void GenLowestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002019 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07002020 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002021 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07002022
2023 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2024 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2025
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002026 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07002027 __ Dsubu(TMP, ZERO, in);
2028 } else {
2029 __ Subu(TMP, ZERO, in);
2030 }
2031 __ And(out, TMP, in);
2032}
2033
2034// int java.lang.Integer.lowestOneBit(int)
2035void IntrinsicLocationsBuilderMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002036 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002037}
2038
2039void IntrinsicCodeGeneratorMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002040 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002041}
2042
2043// long java.lang.Long.lowestOneBit(long)
2044void IntrinsicLocationsBuilderMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002045 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002046}
2047
2048void IntrinsicCodeGeneratorMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002049 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002050}
2051
Vladimir Markoca6fff82017-10-03 14:49:14 +01002052static void CreateFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2053 LocationSummary* locations =
2054 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002055 InvokeRuntimeCallingConvention calling_convention;
2056
2057 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002058 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002059}
2060
Vladimir Markoca6fff82017-10-03 14:49:14 +01002061static void CreateFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2062 LocationSummary* locations =
2063 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002064 InvokeRuntimeCallingConvention calling_convention;
2065
2066 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2067 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002068 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002069}
2070
2071static void GenFPToFPCall(HInvoke* invoke,
2072 CodeGeneratorMIPS64* codegen,
2073 QuickEntrypointEnum entry) {
2074 LocationSummary* locations = invoke->GetLocations();
2075 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
2076 DCHECK_EQ(in, F12);
2077 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2078 DCHECK_EQ(out, F0);
2079
2080 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2081}
2082
2083static void GenFPFPToFPCall(HInvoke* invoke,
2084 CodeGeneratorMIPS64* codegen,
2085 QuickEntrypointEnum entry) {
2086 LocationSummary* locations = invoke->GetLocations();
2087 FpuRegister in0 = locations->InAt(0).AsFpuRegister<FpuRegister>();
2088 DCHECK_EQ(in0, F12);
2089 FpuRegister in1 = locations->InAt(1).AsFpuRegister<FpuRegister>();
2090 DCHECK_EQ(in1, F13);
2091 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2092 DCHECK_EQ(out, F0);
2093
2094 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2095}
2096
2097// static double java.lang.Math.cos(double a)
2098void IntrinsicLocationsBuilderMIPS64::VisitMathCos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002099 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002100}
2101
2102void IntrinsicCodeGeneratorMIPS64::VisitMathCos(HInvoke* invoke) {
2103 GenFPToFPCall(invoke, codegen_, kQuickCos);
2104}
2105
2106// static double java.lang.Math.sin(double a)
2107void IntrinsicLocationsBuilderMIPS64::VisitMathSin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002108 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002109}
2110
2111void IntrinsicCodeGeneratorMIPS64::VisitMathSin(HInvoke* invoke) {
2112 GenFPToFPCall(invoke, codegen_, kQuickSin);
2113}
2114
2115// static double java.lang.Math.acos(double a)
2116void IntrinsicLocationsBuilderMIPS64::VisitMathAcos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002117 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002118}
2119
2120void IntrinsicCodeGeneratorMIPS64::VisitMathAcos(HInvoke* invoke) {
2121 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2122}
2123
2124// static double java.lang.Math.asin(double a)
2125void IntrinsicLocationsBuilderMIPS64::VisitMathAsin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002126 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002127}
2128
2129void IntrinsicCodeGeneratorMIPS64::VisitMathAsin(HInvoke* invoke) {
2130 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2131}
2132
2133// static double java.lang.Math.atan(double a)
2134void IntrinsicLocationsBuilderMIPS64::VisitMathAtan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002135 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002136}
2137
2138void IntrinsicCodeGeneratorMIPS64::VisitMathAtan(HInvoke* invoke) {
2139 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2140}
2141
2142// static double java.lang.Math.atan2(double y, double x)
2143void IntrinsicLocationsBuilderMIPS64::VisitMathAtan2(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002144 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002145}
2146
2147void IntrinsicCodeGeneratorMIPS64::VisitMathAtan2(HInvoke* invoke) {
2148 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2149}
2150
Vladimir Marko4d179872018-01-19 14:50:10 +00002151// static double java.lang.Math.pow(double y, double x)
2152void IntrinsicLocationsBuilderMIPS64::VisitMathPow(HInvoke* invoke) {
2153 CreateFPFPToFPCallLocations(allocator_, invoke);
2154}
2155
2156void IntrinsicCodeGeneratorMIPS64::VisitMathPow(HInvoke* invoke) {
2157 GenFPFPToFPCall(invoke, codegen_, kQuickPow);
2158}
2159
Chris Larsen74c20582017-03-28 22:17:35 -07002160// static double java.lang.Math.cbrt(double a)
2161void IntrinsicLocationsBuilderMIPS64::VisitMathCbrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002162 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002163}
2164
2165void IntrinsicCodeGeneratorMIPS64::VisitMathCbrt(HInvoke* invoke) {
2166 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2167}
2168
2169// static double java.lang.Math.cosh(double x)
2170void IntrinsicLocationsBuilderMIPS64::VisitMathCosh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002171 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002172}
2173
2174void IntrinsicCodeGeneratorMIPS64::VisitMathCosh(HInvoke* invoke) {
2175 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2176}
2177
2178// static double java.lang.Math.exp(double a)
2179void IntrinsicLocationsBuilderMIPS64::VisitMathExp(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002180 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002181}
2182
2183void IntrinsicCodeGeneratorMIPS64::VisitMathExp(HInvoke* invoke) {
2184 GenFPToFPCall(invoke, codegen_, kQuickExp);
2185}
2186
2187// static double java.lang.Math.expm1(double x)
2188void IntrinsicLocationsBuilderMIPS64::VisitMathExpm1(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002189 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002190}
2191
2192void IntrinsicCodeGeneratorMIPS64::VisitMathExpm1(HInvoke* invoke) {
2193 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2194}
2195
2196// static double java.lang.Math.hypot(double x, double y)
2197void IntrinsicLocationsBuilderMIPS64::VisitMathHypot(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002198 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002199}
2200
2201void IntrinsicCodeGeneratorMIPS64::VisitMathHypot(HInvoke* invoke) {
2202 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2203}
2204
2205// static double java.lang.Math.log(double a)
2206void IntrinsicLocationsBuilderMIPS64::VisitMathLog(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002207 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002208}
2209
2210void IntrinsicCodeGeneratorMIPS64::VisitMathLog(HInvoke* invoke) {
2211 GenFPToFPCall(invoke, codegen_, kQuickLog);
2212}
2213
2214// static double java.lang.Math.log10(double x)
2215void IntrinsicLocationsBuilderMIPS64::VisitMathLog10(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002216 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002217}
2218
2219void IntrinsicCodeGeneratorMIPS64::VisitMathLog10(HInvoke* invoke) {
2220 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2221}
2222
2223// static double java.lang.Math.nextAfter(double start, double direction)
2224void IntrinsicLocationsBuilderMIPS64::VisitMathNextAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002225 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002226}
2227
2228void IntrinsicCodeGeneratorMIPS64::VisitMathNextAfter(HInvoke* invoke) {
2229 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2230}
2231
2232// static double java.lang.Math.sinh(double x)
2233void IntrinsicLocationsBuilderMIPS64::VisitMathSinh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002234 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002235}
2236
2237void IntrinsicCodeGeneratorMIPS64::VisitMathSinh(HInvoke* invoke) {
2238 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2239}
2240
2241// static double java.lang.Math.tan(double a)
2242void IntrinsicLocationsBuilderMIPS64::VisitMathTan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002243 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002244}
2245
2246void IntrinsicCodeGeneratorMIPS64::VisitMathTan(HInvoke* invoke) {
2247 GenFPToFPCall(invoke, codegen_, kQuickTan);
2248}
2249
2250// static double java.lang.Math.tanh(double x)
2251void IntrinsicLocationsBuilderMIPS64::VisitMathTanh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002252 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002253}
2254
2255void IntrinsicCodeGeneratorMIPS64::VisitMathTanh(HInvoke* invoke) {
2256 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2257}
2258
Chris Larsen5633ce72017-04-10 15:47:40 -07002259// long java.lang.Integer.valueOf(long)
2260void IntrinsicLocationsBuilderMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2261 InvokeRuntimeCallingConvention calling_convention;
2262 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2263 invoke,
2264 codegen_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002265 calling_convention.GetReturnLocation(DataType::Type::kReference),
Chris Larsen5633ce72017-04-10 15:47:40 -07002266 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2267}
2268
2269void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2270 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
2271 LocationSummary* locations = invoke->GetLocations();
2272 Mips64Assembler* assembler = GetAssembler();
2273 InstructionCodeGeneratorMIPS64* icodegen =
2274 down_cast<InstructionCodeGeneratorMIPS64*>(codegen_->GetInstructionVisitor());
2275
2276 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2277 InvokeRuntimeCallingConvention calling_convention;
2278 if (invoke->InputAt(0)->IsConstant()) {
2279 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
2280 if (value >= info.low && value <= info.high) {
2281 // Just embed the j.l.Integer in the code.
2282 ScopedObjectAccess soa(Thread::Current());
2283 mirror::Object* boxed = info.cache->Get(value + (-info.low));
2284 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
2285 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
2286 __ LoadConst64(out, address);
2287 } else {
2288 // Allocate and initialize a new j.l.Integer.
2289 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2290 // JIT object table.
2291 uint32_t address =
2292 dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2293 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2294 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2295 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2296 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
2297 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2298 // one.
2299 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2300 }
2301 } else {
2302 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2303 Mips64Label allocate, done;
2304 int32_t count = static_cast<uint32_t>(info.high) - info.low + 1;
2305
2306 // Is (info.low <= in) && (in <= info.high)?
2307 __ Addiu32(out, in, -info.low);
2308 // As unsigned quantities is out < (info.high - info.low + 1)?
2309 __ LoadConst32(AT, count);
2310 // Branch if out >= (info.high - info.low + 1).
2311 // This means that "in" is outside of the range [info.low, info.high].
2312 __ Bgeuc(out, AT, &allocate);
2313
2314 // If the value is within the bounds, load the j.l.Integer directly from the array.
2315 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2316 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
2317 __ LoadConst64(TMP, data_offset + address);
2318 __ Dlsa(out, out, TMP, TIMES_4);
2319 __ Lwu(out, out, 0);
2320 __ MaybeUnpoisonHeapReference(out);
2321 __ Bc(&done);
2322
2323 __ Bind(&allocate);
2324 // Otherwise allocate and initialize a new j.l.Integer.
2325 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2326 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2327 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2328 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2329 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
2330 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2331 // one.
2332 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2333 __ Bind(&done);
2334 }
2335}
2336
Chris Larsenb065b032017-11-02 12:13:20 -07002337// static boolean java.lang.Thread.interrupted()
2338void IntrinsicLocationsBuilderMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2339 LocationSummary* locations =
2340 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2341 locations->SetOut(Location::RequiresRegister());
2342}
2343
2344void IntrinsicCodeGeneratorMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2345 Mips64Assembler* assembler = GetAssembler();
2346 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
2347 int32_t offset = Thread::InterruptedOffset<kMips64PointerSize>().Int32Value();
2348 __ LoadFromOffset(kLoadWord, out, TR, offset);
2349 Mips64Label done;
2350 __ Beqzc(out, &done);
2351 __ Sync(0);
2352 __ StoreToOffset(kStoreWord, ZERO, TR, offset);
2353 __ Sync(0);
2354 __ Bind(&done);
2355}
2356
Hans Boehmc7b28de2018-03-09 17:05:28 -08002357void IntrinsicLocationsBuilderMIPS64::VisitReachabilityFence(HInvoke* invoke) {
2358 LocationSummary* locations =
2359 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2360 locations->SetInAt(0, Location::Any());
2361}
2362
2363void IntrinsicCodeGeneratorMIPS64::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { }
2364
Aart Bik2f9fcc92016-03-01 15:16:54 -08002365UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002366UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy)
Aart Bik3f67e692016-01-15 14:35:12 -08002367
Aart Bikff7d89c2016-11-07 08:49:28 -08002368UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOf);
2369UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08002370UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferAppend);
2371UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferLength);
2372UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferToString);
2373UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppend);
2374UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderLength);
2375UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08002376
Aart Bik0e54c012016-03-04 12:08:31 -08002377// 1.8.
2378UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt)
2379UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddLong)
2380UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetInt)
2381UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetLong)
2382UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002383
Aart Bik2f9fcc92016-03-01 15:16:54 -08002384UNREACHABLE_INTRINSICS(MIPS64)
Chris Larsen3039e382015-08-26 07:54:08 -07002385
2386#undef __
2387
2388} // namespace mips64
2389} // namespace art