blob: d8ec8922b61b263654226af682435950c92019f9 [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070023#include "heap_poisoning.h"
Chris Larsen3039e382015-08-26 07:54:08 -070024#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe895f9222017-07-05 09:53:32 -070026#include "mirror/object_array-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070027#include "mirror/string.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070028#include "scoped_thread_state_change-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "thread.h"
30#include "utils/mips64/assembler_mips64.h"
31#include "utils/mips64/constants_mips64.h"
32
33namespace art {
34
35namespace mips64 {
36
37IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
Vladimir Markoca6fff82017-10-03 14:49:14 +010038 : codegen_(codegen), allocator_(codegen->GetGraph()->GetAllocator()) {
Chris Larsen3039e382015-08-26 07:54:08 -070039}
40
41Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
42 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
43}
44
45ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
Vladimir Markoca6fff82017-10-03 14:49:14 +010046 return codegen_->GetGraph()->GetAllocator();
Chris Larsen3039e382015-08-26 07:54:08 -070047}
48
Lena Djokic0d2cab52018-03-06 15:20:45 +010049inline bool IntrinsicCodeGeneratorMIPS64::HasMsa() const {
50 return codegen_->GetInstructionSetFeatures().HasMsa();
51}
52
Chris Larsen9701c2e2015-09-04 17:22:47 -070053#define __ codegen->GetAssembler()->
54
55static void MoveFromReturnRegister(Location trg,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010056 DataType::Type type,
Chris Larsen9701c2e2015-09-04 17:22:47 -070057 CodeGeneratorMIPS64* codegen) {
58 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010059 DCHECK_EQ(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070060 return;
61 }
62
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010063 DCHECK_NE(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070064
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010065 if (DataType::IsIntegralType(type) || type == DataType::Type::kReference) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070066 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
67 if (trg_reg != V0) {
68 __ Move(V0, trg_reg);
69 }
70 } else {
71 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
72 if (trg_reg != F0) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010073 if (type == DataType::Type::kFloat32) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070074 __ MovS(F0, trg_reg);
75 } else {
76 __ MovD(F0, trg_reg);
77 }
78 }
79 }
80}
81
82static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
83 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
84 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
85}
86
87// Slow-path for fallback (calling the managed code to handle the
88// intrinsic) in an intrinsified call. This will copy the arguments
89// into the positions for a regular call.
90//
91// Note: The actual parameters are required to be in the locations
92// given by the invoke's location summary. If an intrinsic
93// modifies those locations before a slowpath call, they must be
94// restored!
95class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
96 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000097 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
98 : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
Chris Larsen9701c2e2015-09-04 17:22:47 -070099
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100100 void EmitNativeCode(CodeGenerator* codegen_in) override {
Chris Larsen9701c2e2015-09-04 17:22:47 -0700101 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
102
103 __ Bind(GetEntryLabel());
104
105 SaveLiveRegisters(codegen, invoke_->GetLocations());
106
107 MoveArguments(invoke_, codegen);
108
109 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100110 codegen->GenerateStaticOrDirectCall(
111 invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700112 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100113 codegen->GenerateVirtualCall(
114 invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700115 }
116
117 // Copy the result back to the expected output.
118 Location out = invoke_->GetLocations()->Out();
119 if (out.IsValid()) {
120 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
121 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
122 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
123 }
124
125 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700126 __ Bc(GetExitLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -0700127 }
128
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100129 const char* GetDescription() const override { return "IntrinsicSlowPathMIPS64"; }
Chris Larsen9701c2e2015-09-04 17:22:47 -0700130
131 private:
132 // The instruction where this slow path is happening.
133 HInvoke* const invoke_;
134
135 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
136};
137
138#undef __
139
Chris Larsen3039e382015-08-26 07:54:08 -0700140bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
141 Dispatch(invoke);
142 LocationSummary* res = invoke->GetLocations();
143 return res != nullptr && res->Intrinsified();
144}
145
146#define __ assembler->
147
Vladimir Markoca6fff82017-10-03 14:49:14 +0100148static void CreateFPToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
149 LocationSummary* locations =
150 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700151 locations->SetInAt(0, Location::RequiresFpuRegister());
152 locations->SetOut(Location::RequiresRegister());
153}
154
155static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
156 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
157 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
158
159 if (is64bit) {
160 __ Dmfc1(out, in);
161 } else {
162 __ Mfc1(out, in);
163 }
164}
165
166// long java.lang.Double.doubleToRawLongBits(double)
167void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100168 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700169}
170
171void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800172 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700173}
174
175// int java.lang.Float.floatToRawIntBits(float)
176void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100177 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700178}
179
180void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800181 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700182}
183
Vladimir Markoca6fff82017-10-03 14:49:14 +0100184static void CreateIntToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
185 LocationSummary* locations =
186 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700187 locations->SetInAt(0, Location::RequiresRegister());
188 locations->SetOut(Location::RequiresFpuRegister());
189}
190
191static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
192 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
193 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
194
195 if (is64bit) {
196 __ Dmtc1(in, out);
197 } else {
198 __ Mtc1(in, out);
199 }
200}
201
202// double java.lang.Double.longBitsToDouble(long)
203void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100204 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700205}
206
207void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800208 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700209}
210
211// float java.lang.Float.intBitsToFloat(int)
212void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100213 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700214}
215
216void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800217 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700218}
219
Vladimir Markoca6fff82017-10-03 14:49:14 +0100220static void CreateIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
221 LocationSummary* locations =
222 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700223 locations->SetInAt(0, Location::RequiresRegister());
224 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
225}
226
227static void GenReverseBytes(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100228 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700229 Mips64Assembler* assembler) {
230 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
231 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
232
233 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100234 case DataType::Type::kInt16:
Chris Larsen3039e382015-08-26 07:54:08 -0700235 __ Dsbh(out, in);
236 __ Seh(out, out);
237 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100238 case DataType::Type::kInt32:
Chris Larsen3039e382015-08-26 07:54:08 -0700239 __ Rotr(out, in, 16);
240 __ Wsbh(out, out);
241 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100242 case DataType::Type::kInt64:
Chris Larsen3039e382015-08-26 07:54:08 -0700243 __ Dsbh(out, in);
244 __ Dshd(out, out);
245 break;
246 default:
247 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
248 UNREACHABLE();
249 }
250}
251
252// int java.lang.Integer.reverseBytes(int)
253void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100254 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700255}
256
257void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100258 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700259}
260
261// long java.lang.Long.reverseBytes(long)
262void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100263 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700264}
265
266void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100267 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700268}
269
270// short java.lang.Short.reverseBytes(short)
271void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100272 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700273}
274
275void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100276 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700277}
278
Chris Larsen81284372015-10-21 15:28:53 -0700279static void GenNumberOfLeadingZeroes(LocationSummary* locations,
280 bool is64bit,
281 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700282 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
283 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
284
285 if (is64bit) {
286 __ Dclz(out, in);
287 } else {
288 __ Clz(out, in);
289 }
290}
291
292// int java.lang.Integer.numberOfLeadingZeros(int i)
293void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100294 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700295}
296
297void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800298 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit= */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700299}
300
301// int java.lang.Long.numberOfLeadingZeros(long i)
302void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100303 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700304}
305
306void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800307 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit= */ true, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700308}
309
Chris Larsen81284372015-10-21 15:28:53 -0700310static void GenNumberOfTrailingZeroes(LocationSummary* locations,
311 bool is64bit,
312 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700313 Location in = locations->InAt(0);
314 Location out = locations->Out();
315
316 if (is64bit) {
317 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
318 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
319 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
321 } else {
322 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
323 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
324 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
325 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
326 }
327}
328
329// int java.lang.Integer.numberOfTrailingZeros(int i)
330void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100331 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700332}
333
334void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800335 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit= */ false, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700336}
337
338// int java.lang.Long.numberOfTrailingZeros(long i)
339void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100340 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700341}
342
343void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800344 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit= */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700345}
346
347static void GenReverse(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100348 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700349 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100350 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen3039e382015-08-26 07:54:08 -0700351
352 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
353 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
354
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100355 if (type == DataType::Type::kInt32) {
Chris Larsen3039e382015-08-26 07:54:08 -0700356 __ Rotr(out, in, 16);
357 __ Wsbh(out, out);
358 __ Bitswap(out, out);
359 } else {
360 __ Dsbh(out, in);
361 __ Dshd(out, out);
362 __ Dbitswap(out, out);
363 }
364}
365
366// int java.lang.Integer.reverse(int)
367void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100368 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700369}
370
371void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100372 GenReverse(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700373}
374
375// long java.lang.Long.reverse(long)
376void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100377 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700378}
379
380void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100381 GenReverse(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700382}
383
Vladimir Markoca6fff82017-10-03 14:49:14 +0100384static void CreateFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
385 LocationSummary* locations =
386 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700387 locations->SetInAt(0, Location::RequiresFpuRegister());
388 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
389}
390
Chris Larsen7fda7852016-04-21 16:00:36 -0700391static void GenBitCount(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100392 const DataType::Type type,
Lena Djokic0d2cab52018-03-06 15:20:45 +0100393 const bool hasMsa,
Chris Larsen7fda7852016-04-21 16:00:36 -0700394 Mips64Assembler* assembler) {
395 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
396 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
397
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100398 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen7fda7852016-04-21 16:00:36 -0700399
400 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
401 //
402 // A generalization of the best bit counting method to integers of
403 // bit-widths up to 128 (parameterized by type T) is this:
404 //
405 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
406 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
407 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
408 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
409 //
410 // For comparison, for 32-bit quantities, this algorithm can be executed
411 // using 20 MIPS instructions (the calls to LoadConst32() generate two
412 // machine instructions each for the values being used in this algorithm).
413 // A(n unrolled) loop-based algorithm requires 25 instructions.
414 //
415 // For a 64-bit operand this can be performed in 24 instructions compared
416 // to a(n unrolled) loop based algorithm which requires 38 instructions.
417 //
418 // There are algorithms which are faster in the cases where very few
419 // bits are set but the algorithm here attempts to minimize the total
420 // number of instructions executed even when a large number of bits
421 // are set.
Lena Djokic0d2cab52018-03-06 15:20:45 +0100422 if (hasMsa) {
423 if (type == DataType::Type::kInt32) {
424 __ Mtc1(in, FTMP);
425 __ PcntW(static_cast<VectorRegister>(FTMP), static_cast<VectorRegister>(FTMP));
426 __ Mfc1(out, FTMP);
427 } else {
428 __ Dmtc1(in, FTMP);
429 __ PcntD(static_cast<VectorRegister>(FTMP), static_cast<VectorRegister>(FTMP));
430 __ Dmfc1(out, FTMP);
431 }
432 } else {
433 if (type == DataType::Type::kInt32) {
434 __ Srl(TMP, in, 1);
435 __ LoadConst32(AT, 0x55555555);
436 __ And(TMP, TMP, AT);
437 __ Subu(TMP, in, TMP);
438 __ LoadConst32(AT, 0x33333333);
439 __ And(out, TMP, AT);
440 __ Srl(TMP, TMP, 2);
441 __ And(TMP, TMP, AT);
442 __ Addu(TMP, out, TMP);
443 __ Srl(out, TMP, 4);
444 __ Addu(out, out, TMP);
445 __ LoadConst32(AT, 0x0F0F0F0F);
446 __ And(out, out, AT);
447 __ LoadConst32(TMP, 0x01010101);
448 __ MulR6(out, out, TMP);
449 __ Srl(out, out, 24);
450 } else {
451 __ Dsrl(TMP, in, 1);
452 __ LoadConst64(AT, 0x5555555555555555L);
453 __ And(TMP, TMP, AT);
454 __ Dsubu(TMP, in, TMP);
455 __ LoadConst64(AT, 0x3333333333333333L);
456 __ And(out, TMP, AT);
457 __ Dsrl(TMP, TMP, 2);
458 __ And(TMP, TMP, AT);
459 __ Daddu(TMP, out, TMP);
460 __ Dsrl(out, TMP, 4);
461 __ Daddu(out, out, TMP);
462 __ LoadConst64(AT, 0x0F0F0F0F0F0F0F0FL);
463 __ And(out, out, AT);
464 __ LoadConst64(TMP, 0x0101010101010101L);
465 __ Dmul(out, out, TMP);
466 __ Dsrl32(out, out, 24);
467 }
Chris Larsen7fda7852016-04-21 16:00:36 -0700468 }
469}
470
471// int java.lang.Integer.bitCount(int)
472void IntrinsicLocationsBuilderMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100473 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700474}
475
476void IntrinsicCodeGeneratorMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100477 GenBitCount(invoke->GetLocations(), DataType::Type::kInt32, HasMsa(), GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700478}
479
480// int java.lang.Long.bitCount(long)
481void IntrinsicLocationsBuilderMIPS64::VisitLongBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100482 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700483}
484
485void IntrinsicCodeGeneratorMIPS64::VisitLongBitCount(HInvoke* invoke) {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100486 GenBitCount(invoke->GetLocations(), DataType::Type::kInt64, HasMsa(), GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700487}
488
Chris Larsen0b7ac982015-09-04 12:54:28 -0700489// double java.lang.Math.sqrt(double)
490void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100491 CreateFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700492}
493
494void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
495 LocationSummary* locations = invoke->GetLocations();
496 Mips64Assembler* assembler = GetAssembler();
497 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
498 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
499
500 __ SqrtD(out, in);
501}
502
Vladimir Markoca6fff82017-10-03 14:49:14 +0100503static void CreateFPToFP(ArenaAllocator* allocator,
Chris Larsen81284372015-10-21 15:28:53 -0700504 HInvoke* invoke,
505 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100506 LocationSummary* locations =
507 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700508 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700509 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700510}
511
512// double java.lang.Math.rint(double)
513void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100514 CreateFPToFP(allocator_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700515}
516
517void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
518 LocationSummary* locations = invoke->GetLocations();
519 Mips64Assembler* assembler = GetAssembler();
520 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
521 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
522
523 __ RintD(out, in);
524}
525
526// double java.lang.Math.floor(double)
527void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100528 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700529}
530
Chris Larsen14500822015-10-01 11:35:18 -0700531const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
532 kPositiveInfinity |
533 kNegativeZero |
534 kNegativeInfinity |
535 kQuietNaN |
536 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700537
Chris Larsen81284372015-10-21 15:28:53 -0700538enum FloatRoundingMode {
539 kFloor,
540 kCeil,
541};
542
543static void GenRoundingMode(LocationSummary* locations,
544 FloatRoundingMode mode,
545 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700546 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
547 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
548
Chris Larsen81284372015-10-21 15:28:53 -0700549 DCHECK_NE(in, out);
550
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700551 Mips64Label done;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700552
Chris Larsen81284372015-10-21 15:28:53 -0700553 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700554 // if in.isNaN || in.isInfinite || in.isZero {
555 // return in;
556 // }
557 __ ClassD(out, in);
558 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700559 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700560 __ MovD(out, in);
561 __ Bnezc(AT, &done);
562
Chris Larsen81284372015-10-21 15:28:53 -0700563 // Long outLong = floor/ceil(in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200564 // if (outLong == Long.MAX_VALUE) || (outLong == Long.MIN_VALUE) {
Chris Larsen81284372015-10-21 15:28:53 -0700565 // // floor()/ceil() has almost certainly returned a value
566 // // which can't be successfully represented as a signed
567 // // 64-bit number. Java expects that the input value will
568 // // be returned in these cases.
569 // // There is also a small probability that floor(in)/ceil(in)
570 // // correctly truncates/rounds up the input value to
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200571 // // Long.MAX_VALUE or Long.MIN_VALUE. In these cases, this
572 // // exception handling code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700573 // return in;
574 // }
Chris Larsen81284372015-10-21 15:28:53 -0700575 if (mode == kFloor) {
576 __ FloorLD(out, in);
577 } else if (mode == kCeil) {
578 __ CeilLD(out, in);
579 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700580 __ Dmfc1(AT, out);
581 __ MovD(out, in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200582 __ Daddiu(TMP, AT, 1);
583 __ Dati(TMP, 0x8000); // TMP = AT + 0x8000 0000 0000 0001
584 // or AT - 0x7FFF FFFF FFFF FFFF.
585 // IOW, TMP = 1 if AT = Long.MIN_VALUE
586 // or TMP = 0 if AT = Long.MAX_VALUE.
587 __ Dsrl(TMP, TMP, 1); // TMP = 0 if AT = Long.MIN_VALUE
588 // or AT = Long.MAX_VALUE.
589 __ Beqzc(TMP, &done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700590
591 // double out = outLong;
592 // return out;
593 __ Dmtc1(AT, out);
594 __ Cvtdl(out, out);
595 __ Bind(&done);
596 // }
597}
598
Chris Larsen81284372015-10-21 15:28:53 -0700599void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
600 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
601}
602
Chris Larsen0b7ac982015-09-04 12:54:28 -0700603// double java.lang.Math.ceil(double)
604void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100605 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700606}
607
608void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700609 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700610}
611
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100612static void GenRound(LocationSummary* locations, Mips64Assembler* assembler, DataType::Type type) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700613 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
614 FpuRegister half = locations->GetTemp(0).AsFpuRegister<FpuRegister>();
615 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
616
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100617 DCHECK(type == DataType::Type::kFloat32 || type == DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700618
619 Mips64Label done;
Chris Larsen7adaab02016-04-21 14:49:20 -0700620
Chris Larsen7adaab02016-04-21 14:49:20 -0700621 // out = floor(in);
622 //
Lena Djokicf4e23a82017-05-09 15:43:45 +0200623 // if (out != MAX_VALUE && out != MIN_VALUE) {
624 // TMP = ((in - out) >= 0.5) ? 1 : 0;
Chris Larsen7adaab02016-04-21 14:49:20 -0700625 // return out += TMP;
626 // }
Lena Djokicf4e23a82017-05-09 15:43:45 +0200627 // return out;
Chris Larsen7adaab02016-04-21 14:49:20 -0700628
629 // out = floor(in);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100630 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700631 __ FloorLD(FTMP, in);
632 __ Dmfc1(out, FTMP);
633 } else {
634 __ FloorWS(FTMP, in);
635 __ Mfc1(out, FTMP);
636 }
637
Lena Djokicf4e23a82017-05-09 15:43:45 +0200638 // if (out != MAX_VALUE && out != MIN_VALUE)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100639 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200640 __ Daddiu(TMP, out, 1);
641 __ Dati(TMP, 0x8000); // TMP = out + 0x8000 0000 0000 0001
642 // or out - 0x7FFF FFFF FFFF FFFF.
643 // IOW, TMP = 1 if out = Long.MIN_VALUE
644 // or TMP = 0 if out = Long.MAX_VALUE.
645 __ Dsrl(TMP, TMP, 1); // TMP = 0 if out = Long.MIN_VALUE
646 // or out = Long.MAX_VALUE.
647 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700648 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200649 __ Addiu(TMP, out, 1);
650 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
651 // or out - 0x7FFF FFFF.
652 // IOW, TMP = 1 if out = Int.MIN_VALUE
653 // or TMP = 0 if out = Int.MAX_VALUE.
654 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
655 // or out = Int.MAX_VALUE.
656 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700657 }
Chris Larsen7adaab02016-04-21 14:49:20 -0700658
659 // TMP = (0.5 <= (in - out)) ? -1 : 0;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100660 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700661 __ Cvtdl(FTMP, FTMP); // Convert output of floor.l.d back to "double".
662 __ LoadConst64(AT, bit_cast<int64_t, double>(0.5));
663 __ SubD(FTMP, in, FTMP);
664 __ Dmtc1(AT, half);
665 __ CmpLeD(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200666 __ Dmfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700667 } else {
668 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
669 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
670 __ SubS(FTMP, in, FTMP);
671 __ Mtc1(AT, half);
672 __ CmpLeS(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200673 __ Mfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700674 }
675
Chris Larsen7adaab02016-04-21 14:49:20 -0700676 // Return out -= TMP.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100677 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200678 __ Dsubu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700679 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200680 __ Subu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700681 }
682
683 __ Bind(&done);
684}
685
686// int java.lang.Math.round(float)
687void IntrinsicLocationsBuilderMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100688 LocationSummary* locations =
689 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700690 locations->SetInAt(0, Location::RequiresFpuRegister());
691 locations->AddTemp(Location::RequiresFpuRegister());
692 locations->SetOut(Location::RequiresRegister());
693}
694
695void IntrinsicCodeGeneratorMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100696 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat32);
Chris Larsen7adaab02016-04-21 14:49:20 -0700697}
698
699// long java.lang.Math.round(double)
700void IntrinsicLocationsBuilderMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100701 LocationSummary* locations =
702 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700703 locations->SetInAt(0, Location::RequiresFpuRegister());
704 locations->AddTemp(Location::RequiresFpuRegister());
705 locations->SetOut(Location::RequiresRegister());
706}
707
708void IntrinsicCodeGeneratorMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100709 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700710}
711
Chris Larsen70fb1f42015-09-04 10:15:27 -0700712// byte libcore.io.Memory.peekByte(long address)
713void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100714 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700715}
716
717void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
718 Mips64Assembler* assembler = GetAssembler();
719 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
720 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
721
722 __ Lb(out, adr, 0);
723}
724
725// short libcore.io.Memory.peekShort(long address)
726void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100727 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700728}
729
730void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
731 Mips64Assembler* assembler = GetAssembler();
732 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
733 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
734
735 __ Lh(out, adr, 0);
736}
737
738// int libcore.io.Memory.peekInt(long address)
739void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100740 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700741}
742
743void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
744 Mips64Assembler* assembler = GetAssembler();
745 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
746 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
747
748 __ Lw(out, adr, 0);
749}
750
751// long libcore.io.Memory.peekLong(long address)
752void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100753 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700754}
755
756void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
757 Mips64Assembler* assembler = GetAssembler();
758 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
759 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
760
761 __ Ld(out, adr, 0);
762}
763
Vladimir Markoca6fff82017-10-03 14:49:14 +0100764static void CreateIntIntToVoidLocations(ArenaAllocator* allocator, HInvoke* invoke) {
765 LocationSummary* locations =
766 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700767 locations->SetInAt(0, Location::RequiresRegister());
768 locations->SetInAt(1, Location::RequiresRegister());
769}
770
771// void libcore.io.Memory.pokeByte(long address, byte value)
772void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100773 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700774}
775
776void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
777 Mips64Assembler* assembler = GetAssembler();
778 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
779 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
780
781 __ Sb(val, adr, 0);
782}
783
784// void libcore.io.Memory.pokeShort(long address, short value)
785void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100786 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700787}
788
789void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
790 Mips64Assembler* assembler = GetAssembler();
791 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
792 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
793
794 __ Sh(val, adr, 0);
795}
796
797// void libcore.io.Memory.pokeInt(long address, int value)
798void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100799 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700800}
801
802void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
803 Mips64Assembler* assembler = GetAssembler();
804 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
805 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
806
807 __ Sw(val, adr, 00);
808}
809
810// void libcore.io.Memory.pokeLong(long address, long value)
811void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100812 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700813}
814
815void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
816 Mips64Assembler* assembler = GetAssembler();
817 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
818 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
819
820 __ Sd(val, adr, 0);
821}
822
Chris Larsen49e55392015-09-04 16:04:03 -0700823// Thread java.lang.Thread.currentThread()
824void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100825 LocationSummary* locations =
826 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen49e55392015-09-04 16:04:03 -0700827 locations->SetOut(Location::RequiresRegister());
828}
829
830void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
831 Mips64Assembler* assembler = GetAssembler();
832 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
833
834 __ LoadFromOffset(kLoadUnsignedWord,
835 out,
836 TR,
837 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
838}
839
Vladimir Markoca6fff82017-10-03 14:49:14 +0100840static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
Alexey Frunze15958152017-02-09 19:08:30 -0800841 HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100842 DataType::Type type) {
Alexey Frunze15958152017-02-09 19:08:30 -0800843 bool can_call = kEmitCompilerReadBarrier &&
844 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
845 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Vladimir Markoca6fff82017-10-03 14:49:14 +0100846 LocationSummary* locations =
847 new (allocator) LocationSummary(invoke,
848 can_call
849 ? LocationSummary::kCallOnSlowPath
850 : LocationSummary::kNoCall,
851 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700852 if (can_call && kUseBakerReadBarrier) {
853 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
854 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700855 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
856 locations->SetInAt(1, Location::RequiresRegister());
857 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -0800858 locations->SetOut(Location::RequiresRegister(),
859 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100860 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze15958152017-02-09 19:08:30 -0800861 // We need a temporary register for the read barrier marking slow
862 // path in InstructionCodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier.
863 locations->AddTemp(Location::RequiresRegister());
864 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700865}
866
Alexey Frunze15958152017-02-09 19:08:30 -0800867// Note that the caller must supply a properly aligned memory address.
868// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -0700869static void GenUnsafeGet(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100870 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -0700871 bool is_volatile,
872 CodeGeneratorMIPS64* codegen) {
873 LocationSummary* locations = invoke->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100874 DCHECK((type == DataType::Type::kInt32) ||
875 (type == DataType::Type::kInt64) ||
876 (type == DataType::Type::kReference)) << type;
Chris Larsen1360ada2015-09-04 23:38:16 -0700877 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -0800878 // Target register.
879 Location trg_loc = locations->Out();
880 GpuRegister trg = trg_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700881 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -0800882 Location base_loc = locations->InAt(1);
883 GpuRegister base = base_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700884 // Long offset.
Alexey Frunze15958152017-02-09 19:08:30 -0800885 Location offset_loc = locations->InAt(2);
886 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700887
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100888 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == DataType::Type::kReference))) {
Alexey Frunze15958152017-02-09 19:08:30 -0800889 __ Daddu(TMP, base, offset);
Chris Larsen1360ada2015-09-04 23:38:16 -0700890 }
Alexey Frunze15958152017-02-09 19:08:30 -0800891
Chris Larsen1360ada2015-09-04 23:38:16 -0700892 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100893 case DataType::Type::kInt64:
Alexey Frunze15958152017-02-09 19:08:30 -0800894 __ Ld(trg, TMP, 0);
895 if (is_volatile) {
896 __ Sync(0);
897 }
898 break;
899
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100900 case DataType::Type::kInt32:
Chris Larsen1360ada2015-09-04 23:38:16 -0700901 __ Lw(trg, TMP, 0);
Alexey Frunze15958152017-02-09 19:08:30 -0800902 if (is_volatile) {
903 __ Sync(0);
904 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700905 break;
906
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100907 case DataType::Type::kReference:
Alexey Frunze15958152017-02-09 19:08:30 -0800908 if (kEmitCompilerReadBarrier) {
909 if (kUseBakerReadBarrier) {
910 Location temp = locations->GetTemp(0);
911 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
912 trg_loc,
913 base,
Andreas Gampe3db70682018-12-26 15:12:03 -0800914 /* offset= */ 0U,
915 /* index= */ offset_loc,
Alexey Frunze15958152017-02-09 19:08:30 -0800916 TIMES_1,
917 temp,
Andreas Gampe3db70682018-12-26 15:12:03 -0800918 /* needs_null_check= */ false);
Alexey Frunze15958152017-02-09 19:08:30 -0800919 if (is_volatile) {
920 __ Sync(0);
921 }
922 } else {
923 __ Lwu(trg, TMP, 0);
924 if (is_volatile) {
925 __ Sync(0);
926 }
927 codegen->GenerateReadBarrierSlow(invoke,
928 trg_loc,
929 trg_loc,
930 base_loc,
Andreas Gampe3db70682018-12-26 15:12:03 -0800931 /* offset= */ 0U,
932 /* index= */ offset_loc);
Alexey Frunze15958152017-02-09 19:08:30 -0800933 }
934 } else {
935 __ Lwu(trg, TMP, 0);
936 if (is_volatile) {
937 __ Sync(0);
938 }
939 __ MaybeUnpoisonHeapReference(trg);
940 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700941 break;
942
943 default:
944 LOG(FATAL) << "Unsupported op size " << type;
945 UNREACHABLE();
946 }
947}
948
949// int sun.misc.Unsafe.getInt(Object o, long offset)
950void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100951 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -0700952}
953
954void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800955 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile= */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700956}
957
958// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
959void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100960 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -0700961}
962
963void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800964 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile= */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700965}
966
967// long sun.misc.Unsafe.getLong(Object o, long offset)
968void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100969 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -0700970}
971
972void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800973 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile= */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700974}
975
976// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
977void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100978 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -0700979}
980
981void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800982 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile= */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700983}
984
985// Object sun.misc.Unsafe.getObject(Object o, long offset)
986void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100987 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -0700988}
989
990void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800991 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile= */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700992}
993
994// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
995void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100996 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -0700997}
998
999void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001000 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile= */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001001}
1002
Vladimir Markoca6fff82017-10-03 14:49:14 +01001003static void CreateIntIntIntIntToVoid(ArenaAllocator* allocator, HInvoke* invoke) {
1004 LocationSummary* locations =
1005 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen1360ada2015-09-04 23:38:16 -07001006 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1007 locations->SetInAt(1, Location::RequiresRegister());
1008 locations->SetInAt(2, Location::RequiresRegister());
1009 locations->SetInAt(3, Location::RequiresRegister());
1010}
1011
Alexey Frunze15958152017-02-09 19:08:30 -08001012// Note that the caller must supply a properly aligned memory address.
1013// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001014static void GenUnsafePut(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001015 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -07001016 bool is_volatile,
1017 bool is_ordered,
1018 CodeGeneratorMIPS64* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001019 DCHECK((type == DataType::Type::kInt32) ||
1020 (type == DataType::Type::kInt64) ||
1021 (type == DataType::Type::kReference));
Chris Larsen1360ada2015-09-04 23:38:16 -07001022 Mips64Assembler* assembler = codegen->GetAssembler();
1023 // Object pointer.
1024 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1025 // Long offset.
1026 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1027 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1028
1029 __ Daddu(TMP, base, offset);
1030 if (is_volatile || is_ordered) {
1031 __ Sync(0);
1032 }
1033 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001034 case DataType::Type::kInt32:
1035 case DataType::Type::kReference:
1036 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001037 __ PoisonHeapReference(AT, value);
1038 __ Sw(AT, TMP, 0);
1039 } else {
1040 __ Sw(value, TMP, 0);
1041 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001042 break;
1043
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001044 case DataType::Type::kInt64:
Chris Larsen1360ada2015-09-04 23:38:16 -07001045 __ Sd(value, TMP, 0);
1046 break;
1047
1048 default:
1049 LOG(FATAL) << "Unsupported op size " << type;
1050 UNREACHABLE();
1051 }
1052 if (is_volatile) {
1053 __ Sync(0);
1054 }
1055
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001056 if (type == DataType::Type::kReference) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001057 bool value_can_be_null = true; // TODO: Worth finding out this information?
1058 codegen->MarkGCCard(base, value, value_can_be_null);
Chris Larsen1360ada2015-09-04 23:38:16 -07001059 }
1060}
1061
1062// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1063void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001064 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001065}
1066
1067void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001068 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001069 DataType::Type::kInt32,
Andreas Gampe3db70682018-12-26 15:12:03 -08001070 /* is_volatile= */ false,
1071 /* is_ordered= */ false,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001072 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001073}
1074
1075// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1076void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001077 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001078}
1079
1080void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001081 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001082 DataType::Type::kInt32,
Andreas Gampe3db70682018-12-26 15:12:03 -08001083 /* is_volatile= */ false,
1084 /* is_ordered= */ true,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001085 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001086}
1087
1088// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1089void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001090 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001091}
1092
1093void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001094 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001095 DataType::Type::kInt32,
Andreas Gampe3db70682018-12-26 15:12:03 -08001096 /* is_volatile= */ true,
1097 /* is_ordered= */ false,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001098 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001099}
1100
1101// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1102void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001103 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001104}
1105
1106void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001107 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001108 DataType::Type::kReference,
Andreas Gampe3db70682018-12-26 15:12:03 -08001109 /* is_volatile= */ false,
1110 /* is_ordered= */ false,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001111 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001112}
1113
1114// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1115void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001116 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001117}
1118
1119void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001120 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001121 DataType::Type::kReference,
Andreas Gampe3db70682018-12-26 15:12:03 -08001122 /* is_volatile= */ false,
1123 /* is_ordered= */ true,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001124 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001125}
1126
1127// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1128void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001129 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001130}
1131
1132void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001133 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001134 DataType::Type::kReference,
Andreas Gampe3db70682018-12-26 15:12:03 -08001135 /* is_volatile= */ true,
1136 /* is_ordered= */ false,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001137 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001138}
1139
1140// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1141void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001142 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001143}
1144
1145void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001146 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001147 DataType::Type::kInt64,
Andreas Gampe3db70682018-12-26 15:12:03 -08001148 /* is_volatile= */ false,
1149 /* is_ordered= */ false,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001150 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001151}
1152
1153// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1154void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001155 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001156}
1157
1158void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001159 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001160 DataType::Type::kInt64,
Andreas Gampe3db70682018-12-26 15:12:03 -08001161 /* is_volatile= */ false,
1162 /* is_ordered= */ true,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001163 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001164}
1165
1166// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1167void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001168 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001169}
1170
1171void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001172 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001173 DataType::Type::kInt64,
Andreas Gampe3db70682018-12-26 15:12:03 -08001174 /* is_volatile= */ true,
1175 /* is_ordered= */ false,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001176 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001177}
1178
Vladimir Markoca6fff82017-10-03 14:49:14 +01001179static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* allocator, HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001180 bool can_call = kEmitCompilerReadBarrier &&
1181 kUseBakerReadBarrier &&
1182 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001183 LocationSummary* locations =
1184 new (allocator) LocationSummary(invoke,
1185 can_call
1186 ? LocationSummary::kCallOnSlowPath
1187 : LocationSummary::kNoCall,
1188 kIntrinsified);
Chris Larsen36427492015-10-23 02:19:38 -07001189 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1190 locations->SetInAt(1, Location::RequiresRegister());
1191 locations->SetInAt(2, Location::RequiresRegister());
1192 locations->SetInAt(3, Location::RequiresRegister());
1193 locations->SetInAt(4, Location::RequiresRegister());
Chris Larsen36427492015-10-23 02:19:38 -07001194 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001195
1196 // Temporary register used in CAS by (Baker) read barrier.
1197 if (can_call) {
1198 locations->AddTemp(Location::RequiresRegister());
1199 }
Chris Larsen36427492015-10-23 02:19:38 -07001200}
1201
Alexey Frunze15958152017-02-09 19:08:30 -08001202// Note that the caller must supply a properly aligned memory address.
1203// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001204static void GenCas(HInvoke* invoke, DataType::Type type, CodeGeneratorMIPS64* codegen) {
Chris Larsen36427492015-10-23 02:19:38 -07001205 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001206 LocationSummary* locations = invoke->GetLocations();
Chris Larsen36427492015-10-23 02:19:38 -07001207 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001208 Location offset_loc = locations->InAt(2);
1209 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001210 GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
1211 GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001212 Location out_loc = locations->Out();
1213 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001214
1215 DCHECK_NE(base, out);
1216 DCHECK_NE(offset, out);
1217 DCHECK_NE(expected, out);
1218
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001219 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08001220 // The only read barrier implementation supporting the
1221 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1222 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1223
1224 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1225 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001226 bool value_can_be_null = true; // TODO: Worth finding out this information?
1227 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001228
1229 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1230 Location temp = locations->GetTemp(0);
1231 // Need to make sure the reference stored in the field is a to-space
1232 // one before attempting the CAS or the CAS could fail incorrectly.
1233 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1234 invoke,
1235 out_loc, // Unused, used only as a "temporary" within the read barrier.
1236 base,
Andreas Gampe3db70682018-12-26 15:12:03 -08001237 /* offset= */ 0u,
1238 /* index= */ offset_loc,
Alexey Frunze15958152017-02-09 19:08:30 -08001239 ScaleFactor::TIMES_1,
1240 temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08001241 /* needs_null_check= */ false,
1242 /* always_update_field= */ true);
Alexey Frunze15958152017-02-09 19:08:30 -08001243 }
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001244 }
1245
Alexey Frunzec061de12017-02-14 13:27:23 -08001246 Mips64Label loop_head, exit_loop;
1247 __ Daddu(TMP, base, offset);
1248
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001249 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001250 __ PoisonHeapReference(expected);
1251 // Do not poison `value`, if it is the same register as
1252 // `expected`, which has just been poisoned.
1253 if (value != expected) {
1254 __ PoisonHeapReference(value);
1255 }
1256 }
1257
Chris Larsen36427492015-10-23 02:19:38 -07001258 // do {
1259 // tmp_value = [tmp_ptr] - expected;
1260 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1261 // result = tmp_value != 0;
1262
Chris Larsen36427492015-10-23 02:19:38 -07001263 __ Sync(0);
1264 __ Bind(&loop_head);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001265 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001266 __ Lld(out, TMP);
1267 } else {
Roland Levillain391b8662015-12-18 11:43:38 +00001268 // Note: We will need a read barrier here, when read barrier
1269 // support is added to the MIPS64 back end.
Chris Larsen36427492015-10-23 02:19:38 -07001270 __ Ll(out, TMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001271 if (type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001272 // The LL instruction sign-extends the 32-bit value, but
1273 // 32-bit references must be zero-extended. Zero-extend `out`.
1274 __ Dext(out, out, 0, 32);
1275 }
Chris Larsen36427492015-10-23 02:19:38 -07001276 }
1277 __ Dsubu(out, out, expected); // If we didn't get the 'expected'
1278 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1279 __ Beqzc(out, &exit_loop); // return.
1280 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1281 // If we use 'value' directly, we would lose 'value'
1282 // in the case that the store fails. Whether the
1283 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001284 // correct Boolean value into the 'out' register.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001285 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001286 __ Scd(out, TMP);
1287 } else {
1288 __ Sc(out, TMP);
1289 }
1290 __ Beqzc(out, &loop_head); // If we couldn't do the read-modify-write
1291 // cycle atomically then retry.
1292 __ Bind(&exit_loop);
1293 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08001294
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001295 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001296 __ UnpoisonHeapReference(expected);
1297 // Do not unpoison `value`, if it is the same register as
1298 // `expected`, which has just been unpoisoned.
1299 if (value != expected) {
1300 __ UnpoisonHeapReference(value);
1301 }
1302 }
Chris Larsen36427492015-10-23 02:19:38 -07001303}
1304
1305// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1306void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001307 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001308}
1309
1310void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001311 GenCas(invoke, DataType::Type::kInt32, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001312}
1313
1314// boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
1315void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001316 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001317}
1318
1319void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001320 GenCas(invoke, DataType::Type::kInt64, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001321}
1322
1323// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1324void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001325 // The only read barrier implementation supporting the
1326 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1327 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1328 return;
1329 }
1330
Vladimir Markoca6fff82017-10-03 14:49:14 +01001331 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001332}
1333
1334void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001335 // The only read barrier implementation supporting the
1336 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1337 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1338
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001339 GenCas(invoke, DataType::Type::kReference, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001340}
1341
Chris Larsen9701c2e2015-09-04 17:22:47 -07001342// int java.lang.String.compareTo(String anotherString)
1343void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001344 LocationSummary* locations = new (allocator_) LocationSummary(
1345 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001346 InvokeRuntimeCallingConvention calling_convention;
1347 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1348 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001349 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001350 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1351}
1352
1353void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1354 Mips64Assembler* assembler = GetAssembler();
1355 LocationSummary* locations = invoke->GetLocations();
1356
1357 // Note that the null check must have been done earlier.
1358 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1359
1360 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001361 SlowPathCodeMIPS64* slow_path =
1362 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001363 codegen_->AddSlowPath(slow_path);
1364 __ Beqzc(argument, slow_path->GetEntryLabel());
1365
Serban Constantinescufc734082016-07-19 17:18:07 +01001366 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001367 __ Bind(slow_path->GetExitLabel());
1368}
1369
Chris Larsen972d6d72015-10-20 11:29:12 -07001370// boolean java.lang.String.equals(Object anObject)
1371void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001372 LocationSummary* locations =
1373 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen972d6d72015-10-20 11:29:12 -07001374 locations->SetInAt(0, Location::RequiresRegister());
1375 locations->SetInAt(1, Location::RequiresRegister());
1376 locations->SetOut(Location::RequiresRegister());
1377
1378 // Temporary registers to store lengths of strings and for calculations.
1379 locations->AddTemp(Location::RequiresRegister());
1380 locations->AddTemp(Location::RequiresRegister());
1381 locations->AddTemp(Location::RequiresRegister());
1382}
1383
1384void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
1385 Mips64Assembler* assembler = GetAssembler();
1386 LocationSummary* locations = invoke->GetLocations();
1387
1388 GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
1389 GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
1390 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1391
1392 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
1393 GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
1394 GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
1395
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001396 Mips64Label loop;
1397 Mips64Label end;
1398 Mips64Label return_true;
1399 Mips64Label return_false;
Chris Larsen972d6d72015-10-20 11:29:12 -07001400
1401 // Get offsets of count, value, and class fields within a string object.
1402 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1403 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1404 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1405
1406 // Note that the null check must have been done earlier.
1407 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1408
1409 // If the register containing the pointer to "this", and the register
1410 // containing the pointer to "anObject" are the same register then
1411 // "this", and "anObject" are the same object and we can
1412 // short-circuit the logic to a true result.
1413 if (str == arg) {
1414 __ LoadConst64(out, 1);
1415 return;
1416 }
1417
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001418 StringEqualsOptimizations optimizations(invoke);
1419 if (!optimizations.GetArgumentNotNull()) {
1420 // Check if input is null, return false if it is.
1421 __ Beqzc(arg, &return_false);
1422 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001423
1424 // Reference equality check, return true if same reference.
1425 __ Beqc(str, arg, &return_true);
1426
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001427 if (!optimizations.GetArgumentIsString()) {
1428 // Instanceof check for the argument by comparing class fields.
1429 // All string objects must have the same type since String cannot be subclassed.
1430 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1431 // If the argument is a string object, its class field must be equal to receiver's class field.
Roland Levillain1d775d22018-09-07 13:56:57 +01001432 //
1433 // As the String class is expected to be non-movable, we can read the class
1434 // field from String.equals' arguments without read barriers.
1435 AssertNonMovableStringClass();
1436 // /* HeapReference<Class> */ temp1 = str->klass_
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001437 __ Lw(temp1, str, class_offset);
Roland Levillain1d775d22018-09-07 13:56:57 +01001438 // /* HeapReference<Class> */ temp2 = arg->klass_
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001439 __ Lw(temp2, arg, class_offset);
Roland Levillain1d775d22018-09-07 13:56:57 +01001440 // Also, because we use the previously loaded class references only in the
1441 // following comparison, we don't need to unpoison them.
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001442 __ Bnec(temp1, temp2, &return_false);
1443 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001444
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001445 // Load `count` fields of this and argument strings.
Chris Larsen972d6d72015-10-20 11:29:12 -07001446 __ Lw(temp1, str, count_offset);
1447 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001448 // Check if `count` fields are equal, return false if they're not.
1449 // Also compares the compression style, if differs return false.
Chris Larsen972d6d72015-10-20 11:29:12 -07001450 __ Bnec(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001451 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1452 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1453 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen972d6d72015-10-20 11:29:12 -07001454 __ Beqzc(temp1, &return_true);
1455
1456 // Don't overwrite input registers
1457 __ Move(TMP, str);
1458 __ Move(temp3, arg);
1459
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001460 // Assertions that must hold in order to compare strings 8 bytes at a time.
Chris Larsen972d6d72015-10-20 11:29:12 -07001461 DCHECK_ALIGNED(value_offset, 8);
1462 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1463
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001464 if (mirror::kUseStringCompression) {
1465 // For string compression, calculate the number of bytes to compare (not chars).
1466 __ Dext(temp2, temp1, 0, 1); // Extract compression flag.
1467 __ Srl(temp1, temp1, 1); // Extract length.
1468 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
1469 }
1470
1471 // Loop to compare strings 8 bytes at a time starting at the beginning of the string.
1472 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen972d6d72015-10-20 11:29:12 -07001473 __ Bind(&loop);
1474 __ Ld(out, TMP, value_offset);
1475 __ Ld(temp2, temp3, value_offset);
1476 __ Bnec(out, temp2, &return_false);
1477 __ Daddiu(TMP, TMP, 8);
1478 __ Daddiu(temp3, temp3, 8);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001479 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1480 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -8 : -4);
Chris Larsen972d6d72015-10-20 11:29:12 -07001481 __ Bgtzc(temp1, &loop);
1482
1483 // Return true and exit the function.
1484 // If loop does not result in returning false, we return true.
1485 __ Bind(&return_true);
1486 __ LoadConst64(out, 1);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001487 __ Bc(&end);
Chris Larsen972d6d72015-10-20 11:29:12 -07001488
1489 // Return false and exit the function.
1490 __ Bind(&return_false);
1491 __ LoadConst64(out, 0);
1492 __ Bind(&end);
1493}
1494
Chris Larsen9701c2e2015-09-04 17:22:47 -07001495static void GenerateStringIndexOf(HInvoke* invoke,
1496 Mips64Assembler* assembler,
1497 CodeGeneratorMIPS64* codegen,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001498 bool start_at_zero) {
1499 LocationSummary* locations = invoke->GetLocations();
1500 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1501
1502 // Note that the null check must have been done earlier.
1503 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1504
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001505 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1506 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsen9701c2e2015-09-04 17:22:47 -07001507 SlowPathCodeMIPS64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001508 HInstruction* code_point = invoke->InputAt(1);
1509 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001510 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001511 // Always needs the slow-path. We could directly dispatch to it,
1512 // but this case should be rare, so for simplicity just put the
1513 // full slow-path down and branch unconditionally.
Vladimir Marko174b2e22017-10-12 13:34:49 +01001514 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001515 codegen->AddSlowPath(slow_path);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001516 __ Bc(slow_path->GetEntryLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001517 __ Bind(slow_path->GetExitLabel());
1518 return;
1519 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001520 } else if (code_point->GetType() != DataType::Type::kUint16) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001521 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1522 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
Vladimir Marko174b2e22017-10-12 13:34:49 +01001523 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001524 codegen->AddSlowPath(slow_path);
1525 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1526 }
1527
1528 if (start_at_zero) {
1529 DCHECK_EQ(tmp_reg, A2);
1530 // Start-index = 0.
1531 __ Clear(tmp_reg);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001532 }
1533
Serban Constantinescufc734082016-07-19 17:18:07 +01001534 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Roland Levillain42ad2882016-02-29 18:26:54 +00001535 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001536
1537 if (slow_path != nullptr) {
1538 __ Bind(slow_path->GetExitLabel());
1539 }
1540}
1541
1542// int java.lang.String.indexOf(int ch)
1543void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001544 LocationSummary* locations = new (allocator_) LocationSummary(
1545 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001546 // We have a hand-crafted assembly stub that follows the runtime
1547 // calling convention. So it's best to align the inputs accordingly.
1548 InvokeRuntimeCallingConvention calling_convention;
1549 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1550 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001551 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001552 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1553
1554 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1555 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1556}
1557
1558void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001559 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero= */ true);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001560}
1561
1562// int java.lang.String.indexOf(int ch, int fromIndex)
1563void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001564 LocationSummary* locations = new (allocator_) LocationSummary(
1565 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001566 // We have a hand-crafted assembly stub that follows the runtime
1567 // calling convention. So it's best to align the inputs accordingly.
1568 InvokeRuntimeCallingConvention calling_convention;
1569 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1570 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1571 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001572 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001573 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1574}
1575
1576void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001577 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero= */ false);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001578}
1579
Roland Levillaincc3839c2016-02-29 16:23:48 +00001580// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001581void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001582 LocationSummary* locations = new (allocator_) LocationSummary(
1583 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001584 InvokeRuntimeCallingConvention calling_convention;
1585 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1586 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1587 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1588 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001589 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001590 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1591}
1592
1593void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1594 Mips64Assembler* assembler = GetAssembler();
1595 LocationSummary* locations = invoke->GetLocations();
1596
1597 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001598 SlowPathCodeMIPS64* slow_path =
1599 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001600 codegen_->AddSlowPath(slow_path);
1601 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1602
Serban Constantinescufc734082016-07-19 17:18:07 +01001603 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001604 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001605 __ Bind(slow_path->GetExitLabel());
1606}
1607
Roland Levillaincc3839c2016-02-29 16:23:48 +00001608// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001609void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001610 LocationSummary* locations =
1611 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001612 InvokeRuntimeCallingConvention calling_convention;
1613 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1614 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1615 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001616 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001617 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1618}
1619
1620void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001621 // No need to emit code checking whether `locations->InAt(2)` is a null
1622 // pointer, as callers of the native method
1623 //
1624 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1625 //
1626 // all include a null check on `data` before calling that method.
Serban Constantinescufc734082016-07-19 17:18:07 +01001627 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001628 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001629}
1630
Roland Levillainf969a202016-03-09 16:14:00 +00001631// java.lang.StringFactory.newStringFromString(String toCopy)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001632void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001633 LocationSummary* locations = new (allocator_) LocationSummary(
1634 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001635 InvokeRuntimeCallingConvention calling_convention;
1636 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001637 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001638 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1639}
1640
1641void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1642 Mips64Assembler* assembler = GetAssembler();
1643 LocationSummary* locations = invoke->GetLocations();
1644
1645 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001646 SlowPathCodeMIPS64* slow_path =
1647 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001648 codegen_->AddSlowPath(slow_path);
1649 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1650
Serban Constantinescufc734082016-07-19 17:18:07 +01001651 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001652 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001653 __ Bind(slow_path->GetExitLabel());
1654}
1655
Chris Larsenddec7f92016-02-16 12:35:04 -08001656static void GenIsInfinite(LocationSummary* locations,
1657 bool is64bit,
1658 Mips64Assembler* assembler) {
1659 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
1660 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1661
1662 if (is64bit) {
1663 __ ClassD(FTMP, in);
1664 } else {
1665 __ ClassS(FTMP, in);
1666 }
1667 __ Mfc1(out, FTMP);
1668 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
1669 __ Sltu(out, ZERO, out);
1670}
1671
1672// boolean java.lang.Float.isInfinite(float)
1673void IntrinsicLocationsBuilderMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001674 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001675}
1676
1677void IntrinsicCodeGeneratorMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001678 GenIsInfinite(invoke->GetLocations(), /* is64bit= */ false, GetAssembler());
Chris Larsenddec7f92016-02-16 12:35:04 -08001679}
1680
1681// boolean java.lang.Double.isInfinite(double)
1682void IntrinsicLocationsBuilderMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001683 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001684}
1685
1686void IntrinsicCodeGeneratorMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001687 GenIsInfinite(invoke->GetLocations(), /* is64bit= */ true, GetAssembler());
Chris Larsenddec7f92016-02-16 12:35:04 -08001688}
1689
Chris Larsene3660592016-11-09 11:13:42 -08001690// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
1691void IntrinsicLocationsBuilderMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001692 LocationSummary* locations =
1693 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsene3660592016-11-09 11:13:42 -08001694 locations->SetInAt(0, Location::RequiresRegister());
1695 locations->SetInAt(1, Location::RequiresRegister());
1696 locations->SetInAt(2, Location::RequiresRegister());
1697 locations->SetInAt(3, Location::RequiresRegister());
1698 locations->SetInAt(4, Location::RequiresRegister());
1699
Chris Larsen366d4332017-03-23 09:02:56 -07001700 locations->AddTemp(Location::RequiresRegister());
1701 locations->AddTemp(Location::RequiresRegister());
1702 locations->AddTemp(Location::RequiresRegister());
Chris Larsene3660592016-11-09 11:13:42 -08001703}
1704
1705void IntrinsicCodeGeneratorMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1706 Mips64Assembler* assembler = GetAssembler();
1707 LocationSummary* locations = invoke->GetLocations();
1708
1709 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001710 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001711 DCHECK_EQ(char_size, 2u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001712 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001713
1714 GpuRegister srcObj = locations->InAt(0).AsRegister<GpuRegister>();
1715 GpuRegister srcBegin = locations->InAt(1).AsRegister<GpuRegister>();
1716 GpuRegister srcEnd = locations->InAt(2).AsRegister<GpuRegister>();
1717 GpuRegister dstObj = locations->InAt(3).AsRegister<GpuRegister>();
1718 GpuRegister dstBegin = locations->InAt(4).AsRegister<GpuRegister>();
1719
1720 GpuRegister dstPtr = locations->GetTemp(0).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001721 GpuRegister srcPtr = locations->GetTemp(1).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001722 GpuRegister numChrs = locations->GetTemp(2).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001723
1724 Mips64Label done;
Chris Larsen366d4332017-03-23 09:02:56 -07001725 Mips64Label loop;
Chris Larsene3660592016-11-09 11:13:42 -08001726
1727 // Location of data in char array buffer.
1728 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1729
1730 // Get offset of value field within a string object.
1731 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1732
1733 __ Beqc(srcEnd, srcBegin, &done); // No characters to move.
1734
1735 // Calculate number of characters to be copied.
1736 __ Dsubu(numChrs, srcEnd, srcBegin);
1737
1738 // Calculate destination address.
1739 __ Daddiu(dstPtr, dstObj, data_offset);
1740 __ Dlsa(dstPtr, dstBegin, dstPtr, char_shift);
1741
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001742 if (mirror::kUseStringCompression) {
1743 Mips64Label uncompressed_copy, compressed_loop;
1744 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1745 // Load count field and extract compression flag.
1746 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
1747 __ Dext(TMP, TMP, 0, 1);
1748
Chris Larsen366d4332017-03-23 09:02:56 -07001749 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001750 __ Bnezc(TMP, &uncompressed_copy);
1751
1752 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
1753 __ Daddu(srcPtr, srcObj, srcBegin);
1754 __ Bind(&compressed_loop);
1755 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
1756 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
1757 __ Daddiu(numChrs, numChrs, -1);
1758 __ Daddiu(srcPtr, srcPtr, 1);
1759 __ Daddiu(dstPtr, dstPtr, 2);
1760 __ Bnezc(numChrs, &compressed_loop);
1761
1762 __ Bc(&done);
1763 __ Bind(&uncompressed_copy);
1764 }
1765
Chris Larsene3660592016-11-09 11:13:42 -08001766 // Calculate source address.
1767 __ Daddiu(srcPtr, srcObj, value_offset);
1768 __ Dlsa(srcPtr, srcBegin, srcPtr, char_shift);
1769
Chris Larsen366d4332017-03-23 09:02:56 -07001770 __ Bind(&loop);
1771 __ Lh(AT, srcPtr, 0);
1772 __ Daddiu(numChrs, numChrs, -1);
1773 __ Daddiu(srcPtr, srcPtr, char_size);
1774 __ Sh(AT, dstPtr, 0);
1775 __ Daddiu(dstPtr, dstPtr, char_size);
1776 __ Bnezc(numChrs, &loop);
Chris Larsene3660592016-11-09 11:13:42 -08001777
1778 __ Bind(&done);
1779}
1780
Chris Larsen5863f852017-03-23 15:41:37 -07001781// static void java.lang.System.arraycopy(Object src, int srcPos,
1782// Object dest, int destPos,
1783// int length)
1784void IntrinsicLocationsBuilderMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1785 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1786 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1787 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1788
1789 // As long as we are checking, we might as well check to see if the src and dest
1790 // positions are >= 0.
1791 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1792 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
1793 // We will have to fail anyways.
1794 return;
1795 }
1796
1797 // And since we are already checking, check the length too.
1798 if (length != nullptr) {
1799 int32_t len = length->GetValue();
1800 if (len < 0) {
1801 // Just call as normal.
1802 return;
1803 }
1804 }
1805
1806 // Okay, it is safe to generate inline code.
1807 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001808 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
Chris Larsen5863f852017-03-23 15:41:37 -07001809 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
1810 locations->SetInAt(0, Location::RequiresRegister());
1811 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1812 locations->SetInAt(2, Location::RequiresRegister());
1813 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
1814 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
1815
1816 locations->AddTemp(Location::RequiresRegister());
1817 locations->AddTemp(Location::RequiresRegister());
1818 locations->AddTemp(Location::RequiresRegister());
1819}
1820
1821// Utility routine to verify that "length(input) - pos >= length"
1822static void EnoughItems(Mips64Assembler* assembler,
1823 GpuRegister length_input_minus_pos,
1824 Location length,
1825 SlowPathCodeMIPS64* slow_path) {
1826 if (length.IsConstant()) {
1827 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
1828
1829 if (IsInt<16>(length_constant)) {
1830 __ Slti(TMP, length_input_minus_pos, length_constant);
1831 __ Bnezc(TMP, slow_path->GetEntryLabel());
1832 } else {
1833 __ LoadConst32(TMP, length_constant);
1834 __ Bltc(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
1835 }
1836 } else {
1837 __ Bltc(length_input_minus_pos, length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
1838 }
1839}
1840
1841static void CheckPosition(Mips64Assembler* assembler,
1842 Location pos,
1843 GpuRegister input,
1844 Location length,
1845 SlowPathCodeMIPS64* slow_path,
1846 bool length_is_input_length = false) {
1847 // Where is the length in the Array?
1848 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1849
1850 // Calculate length(input) - pos.
1851 if (pos.IsConstant()) {
1852 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1853 if (pos_const == 0) {
1854 if (!length_is_input_length) {
1855 // Check that length(input) >= length.
1856 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1857 EnoughItems(assembler, AT, length, slow_path);
1858 }
1859 } else {
1860 // Check that (length(input) - pos) >= zero.
1861 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1862 DCHECK_GT(pos_const, 0);
1863 __ Addiu32(AT, AT, -pos_const);
1864 __ Bltzc(AT, slow_path->GetEntryLabel());
1865
1866 // Verify that (length(input) - pos) >= length.
1867 EnoughItems(assembler, AT, length, slow_path);
1868 }
1869 } else if (length_is_input_length) {
1870 // The only way the copy can succeed is if pos is zero.
1871 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
1872 __ Bnezc(pos_reg, slow_path->GetEntryLabel());
1873 } else {
1874 // Verify that pos >= 0.
1875 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
1876 __ Bltzc(pos_reg, slow_path->GetEntryLabel());
1877
1878 // Check that (length(input) - pos) >= zero.
1879 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1880 __ Subu(AT, AT, pos_reg);
1881 __ Bltzc(AT, slow_path->GetEntryLabel());
1882
1883 // Verify that (length(input) - pos) >= length.
1884 EnoughItems(assembler, AT, length, slow_path);
1885 }
1886}
1887
1888void IntrinsicCodeGeneratorMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1889 Mips64Assembler* assembler = GetAssembler();
1890 LocationSummary* locations = invoke->GetLocations();
1891
1892 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
1893 Location src_pos = locations->InAt(1);
1894 GpuRegister dest = locations->InAt(2).AsRegister<GpuRegister>();
1895 Location dest_pos = locations->InAt(3);
1896 Location length = locations->InAt(4);
1897
1898 Mips64Label loop;
1899
1900 GpuRegister dest_base = locations->GetTemp(0).AsRegister<GpuRegister>();
1901 GpuRegister src_base = locations->GetTemp(1).AsRegister<GpuRegister>();
1902 GpuRegister count = locations->GetTemp(2).AsRegister<GpuRegister>();
1903
Vladimir Marko174b2e22017-10-12 13:34:49 +01001904 SlowPathCodeMIPS64* slow_path =
1905 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen5863f852017-03-23 15:41:37 -07001906 codegen_->AddSlowPath(slow_path);
1907
1908 // Bail out if the source and destination are the same (to handle overlap).
1909 __ Beqc(src, dest, slow_path->GetEntryLabel());
1910
1911 // Bail out if the source is null.
1912 __ Beqzc(src, slow_path->GetEntryLabel());
1913
1914 // Bail out if the destination is null.
1915 __ Beqzc(dest, slow_path->GetEntryLabel());
1916
1917 // Load length into register for count.
1918 if (length.IsConstant()) {
1919 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
1920 } else {
1921 // If the length is negative, bail out.
1922 // We have already checked in the LocationsBuilder for the constant case.
1923 __ Bltzc(length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
1924
1925 __ Move(count, length.AsRegister<GpuRegister>());
1926 }
1927
1928 // Validity checks: source.
1929 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
1930
1931 // Validity checks: dest.
1932 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
1933
1934 // If count is zero, we're done.
1935 __ Beqzc(count, slow_path->GetExitLabel());
1936
1937 // Okay, everything checks out. Finally time to do the copy.
1938 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001939 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07001940 DCHECK_EQ(char_size, 2u);
1941
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001942 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07001943
1944 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1945
1946 // Calculate source and destination addresses.
1947 if (src_pos.IsConstant()) {
1948 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
1949
1950 __ Daddiu64(src_base, src, data_offset + char_size * src_pos_const, TMP);
1951 } else {
1952 __ Daddiu64(src_base, src, data_offset, TMP);
1953 __ Dlsa(src_base, src_pos.AsRegister<GpuRegister>(), src_base, char_shift);
1954 }
1955 if (dest_pos.IsConstant()) {
1956 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1957
1958 __ Daddiu64(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
1959 } else {
1960 __ Daddiu64(dest_base, dest, data_offset, TMP);
1961 __ Dlsa(dest_base, dest_pos.AsRegister<GpuRegister>(), dest_base, char_shift);
1962 }
1963
1964 __ Bind(&loop);
1965 __ Lh(TMP, src_base, 0);
1966 __ Daddiu(src_base, src_base, char_size);
1967 __ Daddiu(count, count, -1);
1968 __ Sh(TMP, dest_base, 0);
1969 __ Daddiu(dest_base, dest_base, char_size);
1970 __ Bnezc(count, &loop);
1971
1972 __ Bind(slow_path->GetExitLabel());
1973}
1974
Chris Larsenab922502016-04-15 10:00:56 -07001975static void GenHighestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001976 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07001977 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001978 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07001979
1980 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
1981 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1982
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001983 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07001984 __ Dclz(TMP, in);
1985 __ LoadConst64(AT, INT64_C(0x8000000000000000));
Chris Larsen68db2a92016-09-14 15:41:29 -07001986 __ Dsrlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07001987 } else {
1988 __ Clz(TMP, in);
1989 __ LoadConst32(AT, 0x80000000);
Chris Larsen68db2a92016-09-14 15:41:29 -07001990 __ Srlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07001991 }
1992 // For either value of "type", when "in" is zero, "out" should also
1993 // be zero. Without this extra "and" operation, when "in" is zero,
1994 // "out" would be either Integer.MIN_VALUE, or Long.MIN_VALUE because
1995 // the MIPS logical shift operations "dsrlv", and "srlv" don't use
1996 // the shift amount (TMP) directly; they use either (TMP % 64) or
1997 // (TMP % 32), respectively.
Chris Larsen68db2a92016-09-14 15:41:29 -07001998 __ And(out, AT, in);
Chris Larsenab922502016-04-15 10:00:56 -07001999}
2000
2001// int java.lang.Integer.highestOneBit(int)
2002void IntrinsicLocationsBuilderMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002003 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002004}
2005
2006void IntrinsicCodeGeneratorMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002007 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002008}
2009
2010// long java.lang.Long.highestOneBit(long)
2011void IntrinsicLocationsBuilderMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002012 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002013}
2014
2015void IntrinsicCodeGeneratorMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002016 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002017}
2018
2019static void GenLowestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002020 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07002021 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002022 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07002023
2024 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2025 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2026
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002027 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07002028 __ Dsubu(TMP, ZERO, in);
2029 } else {
2030 __ Subu(TMP, ZERO, in);
2031 }
2032 __ And(out, TMP, in);
2033}
2034
2035// int java.lang.Integer.lowestOneBit(int)
2036void IntrinsicLocationsBuilderMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002037 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002038}
2039
2040void IntrinsicCodeGeneratorMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002041 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002042}
2043
2044// long java.lang.Long.lowestOneBit(long)
2045void IntrinsicLocationsBuilderMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002046 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002047}
2048
2049void IntrinsicCodeGeneratorMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002050 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002051}
2052
Vladimir Markoca6fff82017-10-03 14:49:14 +01002053static void CreateFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2054 LocationSummary* locations =
2055 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002056 InvokeRuntimeCallingConvention calling_convention;
2057
2058 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002059 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002060}
2061
Vladimir Markoca6fff82017-10-03 14:49:14 +01002062static void CreateFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2063 LocationSummary* locations =
2064 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002065 InvokeRuntimeCallingConvention calling_convention;
2066
2067 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2068 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002069 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002070}
2071
2072static void GenFPToFPCall(HInvoke* invoke,
2073 CodeGeneratorMIPS64* codegen,
2074 QuickEntrypointEnum entry) {
2075 LocationSummary* locations = invoke->GetLocations();
2076 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
2077 DCHECK_EQ(in, F12);
2078 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2079 DCHECK_EQ(out, F0);
2080
2081 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2082}
2083
2084static void GenFPFPToFPCall(HInvoke* invoke,
2085 CodeGeneratorMIPS64* codegen,
2086 QuickEntrypointEnum entry) {
2087 LocationSummary* locations = invoke->GetLocations();
2088 FpuRegister in0 = locations->InAt(0).AsFpuRegister<FpuRegister>();
2089 DCHECK_EQ(in0, F12);
2090 FpuRegister in1 = locations->InAt(1).AsFpuRegister<FpuRegister>();
2091 DCHECK_EQ(in1, F13);
2092 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2093 DCHECK_EQ(out, F0);
2094
2095 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2096}
2097
2098// static double java.lang.Math.cos(double a)
2099void IntrinsicLocationsBuilderMIPS64::VisitMathCos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002100 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002101}
2102
2103void IntrinsicCodeGeneratorMIPS64::VisitMathCos(HInvoke* invoke) {
2104 GenFPToFPCall(invoke, codegen_, kQuickCos);
2105}
2106
2107// static double java.lang.Math.sin(double a)
2108void IntrinsicLocationsBuilderMIPS64::VisitMathSin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002109 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002110}
2111
2112void IntrinsicCodeGeneratorMIPS64::VisitMathSin(HInvoke* invoke) {
2113 GenFPToFPCall(invoke, codegen_, kQuickSin);
2114}
2115
2116// static double java.lang.Math.acos(double a)
2117void IntrinsicLocationsBuilderMIPS64::VisitMathAcos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002118 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002119}
2120
2121void IntrinsicCodeGeneratorMIPS64::VisitMathAcos(HInvoke* invoke) {
2122 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2123}
2124
2125// static double java.lang.Math.asin(double a)
2126void IntrinsicLocationsBuilderMIPS64::VisitMathAsin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002127 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002128}
2129
2130void IntrinsicCodeGeneratorMIPS64::VisitMathAsin(HInvoke* invoke) {
2131 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2132}
2133
2134// static double java.lang.Math.atan(double a)
2135void IntrinsicLocationsBuilderMIPS64::VisitMathAtan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002136 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002137}
2138
2139void IntrinsicCodeGeneratorMIPS64::VisitMathAtan(HInvoke* invoke) {
2140 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2141}
2142
2143// static double java.lang.Math.atan2(double y, double x)
2144void IntrinsicLocationsBuilderMIPS64::VisitMathAtan2(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002145 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002146}
2147
2148void IntrinsicCodeGeneratorMIPS64::VisitMathAtan2(HInvoke* invoke) {
2149 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2150}
2151
Vladimir Marko4d179872018-01-19 14:50:10 +00002152// static double java.lang.Math.pow(double y, double x)
2153void IntrinsicLocationsBuilderMIPS64::VisitMathPow(HInvoke* invoke) {
2154 CreateFPFPToFPCallLocations(allocator_, invoke);
2155}
2156
2157void IntrinsicCodeGeneratorMIPS64::VisitMathPow(HInvoke* invoke) {
2158 GenFPFPToFPCall(invoke, codegen_, kQuickPow);
2159}
2160
Chris Larsen74c20582017-03-28 22:17:35 -07002161// static double java.lang.Math.cbrt(double a)
2162void IntrinsicLocationsBuilderMIPS64::VisitMathCbrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002163 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002164}
2165
2166void IntrinsicCodeGeneratorMIPS64::VisitMathCbrt(HInvoke* invoke) {
2167 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2168}
2169
2170// static double java.lang.Math.cosh(double x)
2171void IntrinsicLocationsBuilderMIPS64::VisitMathCosh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002172 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002173}
2174
2175void IntrinsicCodeGeneratorMIPS64::VisitMathCosh(HInvoke* invoke) {
2176 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2177}
2178
2179// static double java.lang.Math.exp(double a)
2180void IntrinsicLocationsBuilderMIPS64::VisitMathExp(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002181 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002182}
2183
2184void IntrinsicCodeGeneratorMIPS64::VisitMathExp(HInvoke* invoke) {
2185 GenFPToFPCall(invoke, codegen_, kQuickExp);
2186}
2187
2188// static double java.lang.Math.expm1(double x)
2189void IntrinsicLocationsBuilderMIPS64::VisitMathExpm1(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002190 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002191}
2192
2193void IntrinsicCodeGeneratorMIPS64::VisitMathExpm1(HInvoke* invoke) {
2194 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2195}
2196
2197// static double java.lang.Math.hypot(double x, double y)
2198void IntrinsicLocationsBuilderMIPS64::VisitMathHypot(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002199 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002200}
2201
2202void IntrinsicCodeGeneratorMIPS64::VisitMathHypot(HInvoke* invoke) {
2203 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2204}
2205
2206// static double java.lang.Math.log(double a)
2207void IntrinsicLocationsBuilderMIPS64::VisitMathLog(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002208 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002209}
2210
2211void IntrinsicCodeGeneratorMIPS64::VisitMathLog(HInvoke* invoke) {
2212 GenFPToFPCall(invoke, codegen_, kQuickLog);
2213}
2214
2215// static double java.lang.Math.log10(double x)
2216void IntrinsicLocationsBuilderMIPS64::VisitMathLog10(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002217 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002218}
2219
2220void IntrinsicCodeGeneratorMIPS64::VisitMathLog10(HInvoke* invoke) {
2221 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2222}
2223
2224// static double java.lang.Math.nextAfter(double start, double direction)
2225void IntrinsicLocationsBuilderMIPS64::VisitMathNextAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002226 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002227}
2228
2229void IntrinsicCodeGeneratorMIPS64::VisitMathNextAfter(HInvoke* invoke) {
2230 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2231}
2232
2233// static double java.lang.Math.sinh(double x)
2234void IntrinsicLocationsBuilderMIPS64::VisitMathSinh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002235 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002236}
2237
2238void IntrinsicCodeGeneratorMIPS64::VisitMathSinh(HInvoke* invoke) {
2239 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2240}
2241
2242// static double java.lang.Math.tan(double a)
2243void IntrinsicLocationsBuilderMIPS64::VisitMathTan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002244 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002245}
2246
2247void IntrinsicCodeGeneratorMIPS64::VisitMathTan(HInvoke* invoke) {
2248 GenFPToFPCall(invoke, codegen_, kQuickTan);
2249}
2250
2251// static double java.lang.Math.tanh(double x)
2252void IntrinsicLocationsBuilderMIPS64::VisitMathTanh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002253 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002254}
2255
2256void IntrinsicCodeGeneratorMIPS64::VisitMathTanh(HInvoke* invoke) {
2257 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2258}
2259
Chris Larsen5633ce72017-04-10 15:47:40 -07002260// long java.lang.Integer.valueOf(long)
2261void IntrinsicLocationsBuilderMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2262 InvokeRuntimeCallingConvention calling_convention;
2263 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2264 invoke,
2265 codegen_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002266 calling_convention.GetReturnLocation(DataType::Type::kReference),
Chris Larsen5633ce72017-04-10 15:47:40 -07002267 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2268}
2269
2270void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01002271 IntrinsicVisitor::IntegerValueOfInfo info =
2272 IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions());
Chris Larsen5633ce72017-04-10 15:47:40 -07002273 LocationSummary* locations = invoke->GetLocations();
2274 Mips64Assembler* assembler = GetAssembler();
2275 InstructionCodeGeneratorMIPS64* icodegen =
2276 down_cast<InstructionCodeGeneratorMIPS64*>(codegen_->GetInstructionVisitor());
2277
2278 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
Chris Larsen5633ce72017-04-10 15:47:40 -07002279 if (invoke->InputAt(0)->IsConstant()) {
2280 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
Vladimir Marko6fd16062018-06-26 11:02:04 +01002281 if (static_cast<uint32_t>(value - info.low) < info.length) {
Chris Larsen5633ce72017-04-10 15:47:40 -07002282 // Just embed the j.l.Integer in the code.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002283 DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference);
2284 codegen_->LoadBootImageAddress(out, info.value_boot_image_reference);
Chris Larsen5633ce72017-04-10 15:47:40 -07002285 } else {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002286 DCHECK(locations->CanCall());
Chris Larsen5633ce72017-04-10 15:47:40 -07002287 // Allocate and initialize a new j.l.Integer.
2288 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2289 // JIT object table.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002290 codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(),
2291 info.integer_boot_image_offset);
Chris Larsen5633ce72017-04-10 15:47:40 -07002292 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
2293 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2294 // one.
2295 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2296 }
2297 } else {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002298 DCHECK(locations->CanCall());
Chris Larsen5633ce72017-04-10 15:47:40 -07002299 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2300 Mips64Label allocate, done;
Chris Larsen5633ce72017-04-10 15:47:40 -07002301
Chris Larsen5633ce72017-04-10 15:47:40 -07002302 __ Addiu32(out, in, -info.low);
Vladimir Markoeebb8212018-06-05 14:57:24 +01002303 // As unsigned quantities is out < info.length ?
2304 __ LoadConst32(AT, info.length);
2305 // Branch if out >= info.length . This means that "in" is outside of the valid range.
Chris Larsen5633ce72017-04-10 15:47:40 -07002306 __ Bgeuc(out, AT, &allocate);
2307
2308 // If the value is within the bounds, load the j.l.Integer directly from the array.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002309 codegen_->LoadBootImageAddress(TMP, info.array_data_boot_image_reference);
Chris Larsen5633ce72017-04-10 15:47:40 -07002310 __ Dlsa(out, out, TMP, TIMES_4);
2311 __ Lwu(out, out, 0);
2312 __ MaybeUnpoisonHeapReference(out);
2313 __ Bc(&done);
2314
2315 __ Bind(&allocate);
2316 // Otherwise allocate and initialize a new j.l.Integer.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002317 codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(),
2318 info.integer_boot_image_offset);
Chris Larsen5633ce72017-04-10 15:47:40 -07002319 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
2320 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2321 // one.
2322 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2323 __ Bind(&done);
2324 }
2325}
2326
Chris Larsenb065b032017-11-02 12:13:20 -07002327// static boolean java.lang.Thread.interrupted()
2328void IntrinsicLocationsBuilderMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2329 LocationSummary* locations =
2330 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2331 locations->SetOut(Location::RequiresRegister());
2332}
2333
2334void IntrinsicCodeGeneratorMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2335 Mips64Assembler* assembler = GetAssembler();
2336 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
2337 int32_t offset = Thread::InterruptedOffset<kMips64PointerSize>().Int32Value();
2338 __ LoadFromOffset(kLoadWord, out, TR, offset);
2339 Mips64Label done;
2340 __ Beqzc(out, &done);
2341 __ Sync(0);
2342 __ StoreToOffset(kStoreWord, ZERO, TR, offset);
2343 __ Sync(0);
2344 __ Bind(&done);
2345}
2346
Hans Boehmc7b28de2018-03-09 17:05:28 -08002347void IntrinsicLocationsBuilderMIPS64::VisitReachabilityFence(HInvoke* invoke) {
2348 LocationSummary* locations =
2349 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2350 locations->SetInAt(0, Location::Any());
2351}
2352
2353void IntrinsicCodeGeneratorMIPS64::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { }
2354
Aart Bik2f9fcc92016-03-01 15:16:54 -08002355UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002356UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy)
xueliang.zhongcb58b072017-10-13 12:06:56 +01002357UNIMPLEMENTED_INTRINSIC(MIPS64, CRC32Update)
Evgeny Astigeevich15c5b972018-11-20 13:41:40 +00002358UNIMPLEMENTED_INTRINSIC(MIPS64, CRC32UpdateBytes)
Evgeny Astigeevich776a7c22018-12-17 11:40:34 +00002359UNIMPLEMENTED_INTRINSIC(MIPS64, CRC32UpdateByteBuffer)
Aart Bik3f67e692016-01-15 14:35:12 -08002360
Aart Bikff7d89c2016-11-07 08:49:28 -08002361UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOf);
2362UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08002363UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferAppend);
2364UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferLength);
2365UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferToString);
Vladimir Markod4561172017-10-30 17:48:25 +00002366UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppendObject);
2367UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppendString);
2368UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppendCharSequence);
2369UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppendCharArray);
2370UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppendBoolean);
2371UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppendChar);
2372UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppendInt);
2373UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppendLong);
2374UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppendFloat);
2375UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppendDouble);
Aart Bik71bf7b42016-11-16 10:17:46 -08002376UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderLength);
2377UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08002378
Aart Bik0e54c012016-03-04 12:08:31 -08002379// 1.8.
2380UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt)
2381UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddLong)
2382UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetInt)
2383UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetLong)
2384UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002385
Aart Bik2f9fcc92016-03-01 15:16:54 -08002386UNREACHABLE_INTRINSICS(MIPS64)
Chris Larsen3039e382015-08-26 07:54:08 -07002387
2388#undef __
2389
2390} // namespace mips64
2391} // namespace art