blob: 5debd26c5a57a474eaf8666c410301bc3cc2e3ab [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070023#include "heap_poisoning.h"
Chris Larsen3039e382015-08-26 07:54:08 -070024#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe895f9222017-07-05 09:53:32 -070026#include "mirror/object_array-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070027#include "mirror/string.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070028#include "scoped_thread_state_change-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "thread.h"
30#include "utils/mips64/assembler_mips64.h"
31#include "utils/mips64/constants_mips64.h"
32
33namespace art {
34
35namespace mips64 {
36
37IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
Vladimir Markoca6fff82017-10-03 14:49:14 +010038 : codegen_(codegen), allocator_(codegen->GetGraph()->GetAllocator()) {
Chris Larsen3039e382015-08-26 07:54:08 -070039}
40
41Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
42 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
43}
44
45ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
Vladimir Markoca6fff82017-10-03 14:49:14 +010046 return codegen_->GetGraph()->GetAllocator();
Chris Larsen3039e382015-08-26 07:54:08 -070047}
48
Chris Larsen9701c2e2015-09-04 17:22:47 -070049#define __ codegen->GetAssembler()->
50
51static void MoveFromReturnRegister(Location trg,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010052 DataType::Type type,
Chris Larsen9701c2e2015-09-04 17:22:47 -070053 CodeGeneratorMIPS64* codegen) {
54 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010055 DCHECK_EQ(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070056 return;
57 }
58
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010059 DCHECK_NE(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070060
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010061 if (DataType::IsIntegralType(type) || type == DataType::Type::kReference) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070062 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
63 if (trg_reg != V0) {
64 __ Move(V0, trg_reg);
65 }
66 } else {
67 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
68 if (trg_reg != F0) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010069 if (type == DataType::Type::kFloat32) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070070 __ MovS(F0, trg_reg);
71 } else {
72 __ MovD(F0, trg_reg);
73 }
74 }
75 }
76}
77
78static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
79 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
80 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
81}
82
83// Slow-path for fallback (calling the managed code to handle the
84// intrinsic) in an intrinsified call. This will copy the arguments
85// into the positions for a regular call.
86//
87// Note: The actual parameters are required to be in the locations
88// given by the invoke's location summary. If an intrinsic
89// modifies those locations before a slowpath call, they must be
90// restored!
91class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
92 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000093 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
94 : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
Chris Larsen9701c2e2015-09-04 17:22:47 -070095
96 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
97 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
98
99 __ Bind(GetEntryLabel());
100
101 SaveLiveRegisters(codegen, invoke_->GetLocations());
102
103 MoveArguments(invoke_, codegen);
104
105 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100106 codegen->GenerateStaticOrDirectCall(
107 invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700108 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100109 codegen->GenerateVirtualCall(
110 invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700111 }
112
113 // Copy the result back to the expected output.
114 Location out = invoke_->GetLocations()->Out();
115 if (out.IsValid()) {
116 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
117 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
118 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
119 }
120
121 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700122 __ Bc(GetExitLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -0700123 }
124
125 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
126
127 private:
128 // The instruction where this slow path is happening.
129 HInvoke* const invoke_;
130
131 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
132};
133
134#undef __
135
Chris Larsen3039e382015-08-26 07:54:08 -0700136bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
137 Dispatch(invoke);
138 LocationSummary* res = invoke->GetLocations();
139 return res != nullptr && res->Intrinsified();
140}
141
142#define __ assembler->
143
Vladimir Markoca6fff82017-10-03 14:49:14 +0100144static void CreateFPToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
145 LocationSummary* locations =
146 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700147 locations->SetInAt(0, Location::RequiresFpuRegister());
148 locations->SetOut(Location::RequiresRegister());
149}
150
151static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
152 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
153 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
154
155 if (is64bit) {
156 __ Dmfc1(out, in);
157 } else {
158 __ Mfc1(out, in);
159 }
160}
161
162// long java.lang.Double.doubleToRawLongBits(double)
163void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100164 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700165}
166
167void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000168 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700169}
170
171// int java.lang.Float.floatToRawIntBits(float)
172void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100173 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700174}
175
176void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000177 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700178}
179
Vladimir Markoca6fff82017-10-03 14:49:14 +0100180static void CreateIntToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
181 LocationSummary* locations =
182 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700183 locations->SetInAt(0, Location::RequiresRegister());
184 locations->SetOut(Location::RequiresFpuRegister());
185}
186
187static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
188 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
189 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
190
191 if (is64bit) {
192 __ Dmtc1(in, out);
193 } else {
194 __ Mtc1(in, out);
195 }
196}
197
198// double java.lang.Double.longBitsToDouble(long)
199void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100200 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700201}
202
203void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000204 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700205}
206
207// float java.lang.Float.intBitsToFloat(int)
208void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100209 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700210}
211
212void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000213 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700214}
215
Vladimir Markoca6fff82017-10-03 14:49:14 +0100216static void CreateIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
217 LocationSummary* locations =
218 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700219 locations->SetInAt(0, Location::RequiresRegister());
220 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
221}
222
223static void GenReverseBytes(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100224 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700225 Mips64Assembler* assembler) {
226 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
227 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
228
229 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100230 case DataType::Type::kInt16:
Chris Larsen3039e382015-08-26 07:54:08 -0700231 __ Dsbh(out, in);
232 __ Seh(out, out);
233 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100234 case DataType::Type::kInt32:
Chris Larsen3039e382015-08-26 07:54:08 -0700235 __ Rotr(out, in, 16);
236 __ Wsbh(out, out);
237 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100238 case DataType::Type::kInt64:
Chris Larsen3039e382015-08-26 07:54:08 -0700239 __ Dsbh(out, in);
240 __ Dshd(out, out);
241 break;
242 default:
243 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
244 UNREACHABLE();
245 }
246}
247
248// int java.lang.Integer.reverseBytes(int)
249void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100250 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700251}
252
253void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100254 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700255}
256
257// long java.lang.Long.reverseBytes(long)
258void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100259 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700260}
261
262void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100263 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700264}
265
266// short java.lang.Short.reverseBytes(short)
267void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100268 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700269}
270
271void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100272 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700273}
274
Chris Larsen81284372015-10-21 15:28:53 -0700275static void GenNumberOfLeadingZeroes(LocationSummary* locations,
276 bool is64bit,
277 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700278 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
279 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
280
281 if (is64bit) {
282 __ Dclz(out, in);
283 } else {
284 __ Clz(out, in);
285 }
286}
287
288// int java.lang.Integer.numberOfLeadingZeros(int i)
289void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100290 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700291}
292
293void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000294 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700295}
296
297// int java.lang.Long.numberOfLeadingZeros(long i)
298void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100299 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700300}
301
302void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000303 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700304}
305
Chris Larsen81284372015-10-21 15:28:53 -0700306static void GenNumberOfTrailingZeroes(LocationSummary* locations,
307 bool is64bit,
308 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700309 Location in = locations->InAt(0);
310 Location out = locations->Out();
311
312 if (is64bit) {
313 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
314 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
315 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
316 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
317 } else {
318 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
319 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
321 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
322 }
323}
324
325// int java.lang.Integer.numberOfTrailingZeros(int i)
326void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100327 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700328}
329
330void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000331 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700332}
333
334// int java.lang.Long.numberOfTrailingZeros(long i)
335void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100336 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700337}
338
339void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000340 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700341}
342
343static void GenReverse(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100344 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700345 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100346 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen3039e382015-08-26 07:54:08 -0700347
348 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
349 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
350
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100351 if (type == DataType::Type::kInt32) {
Chris Larsen3039e382015-08-26 07:54:08 -0700352 __ Rotr(out, in, 16);
353 __ Wsbh(out, out);
354 __ Bitswap(out, out);
355 } else {
356 __ Dsbh(out, in);
357 __ Dshd(out, out);
358 __ Dbitswap(out, out);
359 }
360}
361
362// int java.lang.Integer.reverse(int)
363void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100364 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700365}
366
367void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100368 GenReverse(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700369}
370
371// long java.lang.Long.reverse(long)
372void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100373 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700374}
375
376void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100377 GenReverse(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700378}
379
Vladimir Markoca6fff82017-10-03 14:49:14 +0100380static void CreateFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
381 LocationSummary* locations =
382 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700383 locations->SetInAt(0, Location::RequiresFpuRegister());
384 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
385}
386
Chris Larsen7fda7852016-04-21 16:00:36 -0700387static void GenBitCount(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100388 const DataType::Type type,
Chris Larsen7fda7852016-04-21 16:00:36 -0700389 Mips64Assembler* assembler) {
390 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
391 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
392
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100393 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen7fda7852016-04-21 16:00:36 -0700394
395 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
396 //
397 // A generalization of the best bit counting method to integers of
398 // bit-widths up to 128 (parameterized by type T) is this:
399 //
400 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
401 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
402 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
403 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
404 //
405 // For comparison, for 32-bit quantities, this algorithm can be executed
406 // using 20 MIPS instructions (the calls to LoadConst32() generate two
407 // machine instructions each for the values being used in this algorithm).
408 // A(n unrolled) loop-based algorithm requires 25 instructions.
409 //
410 // For a 64-bit operand this can be performed in 24 instructions compared
411 // to a(n unrolled) loop based algorithm which requires 38 instructions.
412 //
413 // There are algorithms which are faster in the cases where very few
414 // bits are set but the algorithm here attempts to minimize the total
415 // number of instructions executed even when a large number of bits
416 // are set.
417
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100418 if (type == DataType::Type::kInt32) {
Chris Larsen7fda7852016-04-21 16:00:36 -0700419 __ Srl(TMP, in, 1);
420 __ LoadConst32(AT, 0x55555555);
421 __ And(TMP, TMP, AT);
422 __ Subu(TMP, in, TMP);
423 __ LoadConst32(AT, 0x33333333);
424 __ And(out, TMP, AT);
425 __ Srl(TMP, TMP, 2);
426 __ And(TMP, TMP, AT);
427 __ Addu(TMP, out, TMP);
428 __ Srl(out, TMP, 4);
429 __ Addu(out, out, TMP);
430 __ LoadConst32(AT, 0x0F0F0F0F);
431 __ And(out, out, AT);
432 __ LoadConst32(TMP, 0x01010101);
433 __ MulR6(out, out, TMP);
434 __ Srl(out, out, 24);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100435 } else if (type == DataType::Type::kInt64) {
Chris Larsen7fda7852016-04-21 16:00:36 -0700436 __ Dsrl(TMP, in, 1);
437 __ LoadConst64(AT, 0x5555555555555555L);
438 __ And(TMP, TMP, AT);
439 __ Dsubu(TMP, in, TMP);
440 __ LoadConst64(AT, 0x3333333333333333L);
441 __ And(out, TMP, AT);
442 __ Dsrl(TMP, TMP, 2);
443 __ And(TMP, TMP, AT);
444 __ Daddu(TMP, out, TMP);
445 __ Dsrl(out, TMP, 4);
446 __ Daddu(out, out, TMP);
447 __ LoadConst64(AT, 0x0F0F0F0F0F0F0F0FL);
448 __ And(out, out, AT);
449 __ LoadConst64(TMP, 0x0101010101010101L);
450 __ Dmul(out, out, TMP);
451 __ Dsrl32(out, out, 24);
452 }
453}
454
455// int java.lang.Integer.bitCount(int)
456void IntrinsicLocationsBuilderMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100457 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700458}
459
460void IntrinsicCodeGeneratorMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100461 GenBitCount(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700462}
463
464// int java.lang.Long.bitCount(long)
465void IntrinsicLocationsBuilderMIPS64::VisitLongBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100466 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700467}
468
469void IntrinsicCodeGeneratorMIPS64::VisitLongBitCount(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100470 GenBitCount(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700471}
472
Chris Larsen0b7ac982015-09-04 12:54:28 -0700473static void GenMinMaxFP(LocationSummary* locations,
474 bool is_min,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100475 DataType::Type type,
Chris Larsen0b7ac982015-09-04 12:54:28 -0700476 Mips64Assembler* assembler) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800477 FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
478 FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
Chris Larsen0b7ac982015-09-04 12:54:28 -0700479 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
480
Chris Larsenb74353a2015-11-20 09:07:09 -0800481 Mips64Label noNaNs;
482 Mips64Label done;
483 FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
484
485 // When Java computes min/max it prefers a NaN to a number; the
486 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
487 // the inputs is a NaN and the other is a valid number, the MIPS
488 // instruction will return the number; Java wants the NaN value
489 // returned. This is why there is extra logic preceding the use of
490 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
491 // NaN, return the NaN, otherwise return the min/max.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100492 if (type == DataType::Type::kFloat64) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800493 __ CmpUnD(FTMP, a, b);
494 __ Bc1eqz(FTMP, &noNaNs);
495
496 // One of the inputs is a NaN
497 __ CmpEqD(ftmp, a, a);
498 // If a == a then b is the NaN, otherwise a is the NaN.
499 __ SelD(ftmp, a, b);
500
501 if (ftmp != out) {
502 __ MovD(out, ftmp);
503 }
504
505 __ Bc(&done);
506
507 __ Bind(&noNaNs);
508
Chris Larsen0b7ac982015-09-04 12:54:28 -0700509 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800510 __ MinD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700511 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800512 __ MaxD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700513 }
514 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100515 DCHECK_EQ(type, DataType::Type::kFloat32);
Chris Larsenb74353a2015-11-20 09:07:09 -0800516 __ CmpUnS(FTMP, a, b);
517 __ Bc1eqz(FTMP, &noNaNs);
518
519 // One of the inputs is a NaN
520 __ CmpEqS(ftmp, a, a);
521 // If a == a then b is the NaN, otherwise a is the NaN.
522 __ SelS(ftmp, a, b);
523
524 if (ftmp != out) {
525 __ MovS(out, ftmp);
526 }
527
528 __ Bc(&done);
529
530 __ Bind(&noNaNs);
531
Chris Larsen0b7ac982015-09-04 12:54:28 -0700532 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800533 __ MinS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700534 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800535 __ MaxS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700536 }
537 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800538
539 __ Bind(&done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700540}
541
Vladimir Markoca6fff82017-10-03 14:49:14 +0100542static void CreateFPFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
543 LocationSummary* locations =
544 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700545 locations->SetInAt(0, Location::RequiresFpuRegister());
546 locations->SetInAt(1, Location::RequiresFpuRegister());
547 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
548}
549
550// double java.lang.Math.min(double, double)
551void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100552 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700553}
554
555void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100556 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, DataType::Type::kFloat64, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700557}
558
559// float java.lang.Math.min(float, float)
560void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100561 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700562}
563
564void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100565 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, DataType::Type::kFloat32, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700566}
567
568// double java.lang.Math.max(double, double)
569void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100570 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700571}
572
573void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100574 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, DataType::Type::kFloat64, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700575}
576
577// float java.lang.Math.max(float, float)
578void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100579 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700580}
581
582void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100583 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, DataType::Type::kFloat32, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700584}
585
586static void GenMinMax(LocationSummary* locations,
587 bool is_min,
588 Mips64Assembler* assembler) {
589 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
590 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
591 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
592
Chris Larsenb74353a2015-11-20 09:07:09 -0800593 if (lhs == rhs) {
594 if (out != lhs) {
595 __ Move(out, lhs);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700596 }
597 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800598 // Some architectures, such as ARM and MIPS (prior to r6), have a
599 // conditional move instruction which only changes the target
600 // (output) register if the condition is true (MIPS prior to r6 had
601 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
602 // change the target (output) register. If the condition is true the
603 // output register gets the contents of the "rs" register; otherwise,
604 // the output register is set to zero. One consequence of this is
605 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
606 // needs to use a pair of SELEQZ/SELNEZ instructions. After
607 // executing this pair of instructions one of the output registers
608 // from the pair will necessarily contain zero. Then the code ORs the
609 // output registers from the SELEQZ/SELNEZ instructions to get the
610 // final result.
611 //
612 // The initial test to see if the output register is same as the
613 // first input register is needed to make sure that value in the
614 // first input register isn't clobbered before we've finished
615 // computing the output value. The logic in the corresponding else
616 // clause performs the same task but makes sure the second input
617 // register isn't clobbered in the event that it's the same register
618 // as the output register; the else clause also handles the case
619 // where the output register is distinct from both the first, and the
620 // second input registers.
621 if (out == lhs) {
622 __ Slt(AT, rhs, lhs);
623 if (is_min) {
624 __ Seleqz(out, lhs, AT);
625 __ Selnez(AT, rhs, AT);
626 } else {
627 __ Selnez(out, lhs, AT);
628 __ Seleqz(AT, rhs, AT);
629 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700630 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800631 __ Slt(AT, lhs, rhs);
632 if (is_min) {
633 __ Seleqz(out, rhs, AT);
634 __ Selnez(AT, lhs, AT);
635 } else {
636 __ Selnez(out, rhs, AT);
637 __ Seleqz(AT, lhs, AT);
638 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700639 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800640 __ Or(out, out, AT);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700641 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700642}
643
Vladimir Markoca6fff82017-10-03 14:49:14 +0100644static void CreateIntIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
645 LocationSummary* locations =
646 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700647 locations->SetInAt(0, Location::RequiresRegister());
648 locations->SetInAt(1, Location::RequiresRegister());
649 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
650}
651
652// int java.lang.Math.min(int, int)
653void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100654 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700655}
656
657void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000658 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700659}
660
661// long java.lang.Math.min(long, long)
662void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100663 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700664}
665
666void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000667 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700668}
669
670// int java.lang.Math.max(int, int)
671void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100672 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700673}
674
675void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000676 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700677}
678
679// long java.lang.Math.max(long, long)
680void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100681 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700682}
683
684void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000685 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700686}
687
688// double java.lang.Math.sqrt(double)
689void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100690 CreateFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700691}
692
693void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
694 LocationSummary* locations = invoke->GetLocations();
695 Mips64Assembler* assembler = GetAssembler();
696 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
697 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
698
699 __ SqrtD(out, in);
700}
701
Vladimir Markoca6fff82017-10-03 14:49:14 +0100702static void CreateFPToFP(ArenaAllocator* allocator,
Chris Larsen81284372015-10-21 15:28:53 -0700703 HInvoke* invoke,
704 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100705 LocationSummary* locations =
706 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700707 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700708 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700709}
710
711// double java.lang.Math.rint(double)
712void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100713 CreateFPToFP(allocator_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700714}
715
716void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
717 LocationSummary* locations = invoke->GetLocations();
718 Mips64Assembler* assembler = GetAssembler();
719 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
720 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
721
722 __ RintD(out, in);
723}
724
725// double java.lang.Math.floor(double)
726void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100727 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700728}
729
Chris Larsen14500822015-10-01 11:35:18 -0700730const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
731 kPositiveInfinity |
732 kNegativeZero |
733 kNegativeInfinity |
734 kQuietNaN |
735 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700736
Chris Larsen81284372015-10-21 15:28:53 -0700737enum FloatRoundingMode {
738 kFloor,
739 kCeil,
740};
741
742static void GenRoundingMode(LocationSummary* locations,
743 FloatRoundingMode mode,
744 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700745 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
746 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
747
Chris Larsen81284372015-10-21 15:28:53 -0700748 DCHECK_NE(in, out);
749
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700750 Mips64Label done;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700751
Chris Larsen81284372015-10-21 15:28:53 -0700752 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700753 // if in.isNaN || in.isInfinite || in.isZero {
754 // return in;
755 // }
756 __ ClassD(out, in);
757 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700758 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700759 __ MovD(out, in);
760 __ Bnezc(AT, &done);
761
Chris Larsen81284372015-10-21 15:28:53 -0700762 // Long outLong = floor/ceil(in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200763 // if (outLong == Long.MAX_VALUE) || (outLong == Long.MIN_VALUE) {
Chris Larsen81284372015-10-21 15:28:53 -0700764 // // floor()/ceil() has almost certainly returned a value
765 // // which can't be successfully represented as a signed
766 // // 64-bit number. Java expects that the input value will
767 // // be returned in these cases.
768 // // There is also a small probability that floor(in)/ceil(in)
769 // // correctly truncates/rounds up the input value to
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200770 // // Long.MAX_VALUE or Long.MIN_VALUE. In these cases, this
771 // // exception handling code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700772 // return in;
773 // }
Chris Larsen81284372015-10-21 15:28:53 -0700774 if (mode == kFloor) {
775 __ FloorLD(out, in);
776 } else if (mode == kCeil) {
777 __ CeilLD(out, in);
778 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700779 __ Dmfc1(AT, out);
780 __ MovD(out, in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200781 __ Daddiu(TMP, AT, 1);
782 __ Dati(TMP, 0x8000); // TMP = AT + 0x8000 0000 0000 0001
783 // or AT - 0x7FFF FFFF FFFF FFFF.
784 // IOW, TMP = 1 if AT = Long.MIN_VALUE
785 // or TMP = 0 if AT = Long.MAX_VALUE.
786 __ Dsrl(TMP, TMP, 1); // TMP = 0 if AT = Long.MIN_VALUE
787 // or AT = Long.MAX_VALUE.
788 __ Beqzc(TMP, &done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700789
790 // double out = outLong;
791 // return out;
792 __ Dmtc1(AT, out);
793 __ Cvtdl(out, out);
794 __ Bind(&done);
795 // }
796}
797
Chris Larsen81284372015-10-21 15:28:53 -0700798void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
799 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
800}
801
Chris Larsen0b7ac982015-09-04 12:54:28 -0700802// double java.lang.Math.ceil(double)
803void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100804 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700805}
806
807void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700808 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700809}
810
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100811static void GenRound(LocationSummary* locations, Mips64Assembler* assembler, DataType::Type type) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700812 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
813 FpuRegister half = locations->GetTemp(0).AsFpuRegister<FpuRegister>();
814 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
815
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100816 DCHECK(type == DataType::Type::kFloat32 || type == DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700817
818 Mips64Label done;
Chris Larsen7adaab02016-04-21 14:49:20 -0700819
Chris Larsen7adaab02016-04-21 14:49:20 -0700820 // out = floor(in);
821 //
Lena Djokicf4e23a82017-05-09 15:43:45 +0200822 // if (out != MAX_VALUE && out != MIN_VALUE) {
823 // TMP = ((in - out) >= 0.5) ? 1 : 0;
Chris Larsen7adaab02016-04-21 14:49:20 -0700824 // return out += TMP;
825 // }
Lena Djokicf4e23a82017-05-09 15:43:45 +0200826 // return out;
Chris Larsen7adaab02016-04-21 14:49:20 -0700827
828 // out = floor(in);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100829 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700830 __ FloorLD(FTMP, in);
831 __ Dmfc1(out, FTMP);
832 } else {
833 __ FloorWS(FTMP, in);
834 __ Mfc1(out, FTMP);
835 }
836
Lena Djokicf4e23a82017-05-09 15:43:45 +0200837 // if (out != MAX_VALUE && out != MIN_VALUE)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100838 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200839 __ Daddiu(TMP, out, 1);
840 __ Dati(TMP, 0x8000); // TMP = out + 0x8000 0000 0000 0001
841 // or out - 0x7FFF FFFF FFFF FFFF.
842 // IOW, TMP = 1 if out = Long.MIN_VALUE
843 // or TMP = 0 if out = Long.MAX_VALUE.
844 __ Dsrl(TMP, TMP, 1); // TMP = 0 if out = Long.MIN_VALUE
845 // or out = Long.MAX_VALUE.
846 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700847 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200848 __ Addiu(TMP, out, 1);
849 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
850 // or out - 0x7FFF FFFF.
851 // IOW, TMP = 1 if out = Int.MIN_VALUE
852 // or TMP = 0 if out = Int.MAX_VALUE.
853 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
854 // or out = Int.MAX_VALUE.
855 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700856 }
Chris Larsen7adaab02016-04-21 14:49:20 -0700857
858 // TMP = (0.5 <= (in - out)) ? -1 : 0;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100859 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700860 __ Cvtdl(FTMP, FTMP); // Convert output of floor.l.d back to "double".
861 __ LoadConst64(AT, bit_cast<int64_t, double>(0.5));
862 __ SubD(FTMP, in, FTMP);
863 __ Dmtc1(AT, half);
864 __ CmpLeD(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200865 __ Dmfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700866 } else {
867 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
868 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
869 __ SubS(FTMP, in, FTMP);
870 __ Mtc1(AT, half);
871 __ CmpLeS(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200872 __ Mfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700873 }
874
Chris Larsen7adaab02016-04-21 14:49:20 -0700875 // Return out -= TMP.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100876 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200877 __ Dsubu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700878 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200879 __ Subu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700880 }
881
882 __ Bind(&done);
883}
884
885// int java.lang.Math.round(float)
886void IntrinsicLocationsBuilderMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100887 LocationSummary* locations =
888 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700889 locations->SetInAt(0, Location::RequiresFpuRegister());
890 locations->AddTemp(Location::RequiresFpuRegister());
891 locations->SetOut(Location::RequiresRegister());
892}
893
894void IntrinsicCodeGeneratorMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100895 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat32);
Chris Larsen7adaab02016-04-21 14:49:20 -0700896}
897
898// long java.lang.Math.round(double)
899void IntrinsicLocationsBuilderMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100900 LocationSummary* locations =
901 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700902 locations->SetInAt(0, Location::RequiresFpuRegister());
903 locations->AddTemp(Location::RequiresFpuRegister());
904 locations->SetOut(Location::RequiresRegister());
905}
906
907void IntrinsicCodeGeneratorMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100908 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700909}
910
Chris Larsen70fb1f42015-09-04 10:15:27 -0700911// byte libcore.io.Memory.peekByte(long address)
912void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100913 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700914}
915
916void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
917 Mips64Assembler* assembler = GetAssembler();
918 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
919 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
920
921 __ Lb(out, adr, 0);
922}
923
924// short libcore.io.Memory.peekShort(long address)
925void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100926 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700927}
928
929void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
930 Mips64Assembler* assembler = GetAssembler();
931 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
932 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
933
934 __ Lh(out, adr, 0);
935}
936
937// int libcore.io.Memory.peekInt(long address)
938void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100939 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700940}
941
942void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
943 Mips64Assembler* assembler = GetAssembler();
944 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
945 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
946
947 __ Lw(out, adr, 0);
948}
949
950// long libcore.io.Memory.peekLong(long address)
951void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100952 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700953}
954
955void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
956 Mips64Assembler* assembler = GetAssembler();
957 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
958 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
959
960 __ Ld(out, adr, 0);
961}
962
Vladimir Markoca6fff82017-10-03 14:49:14 +0100963static void CreateIntIntToVoidLocations(ArenaAllocator* allocator, HInvoke* invoke) {
964 LocationSummary* locations =
965 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700966 locations->SetInAt(0, Location::RequiresRegister());
967 locations->SetInAt(1, Location::RequiresRegister());
968}
969
970// void libcore.io.Memory.pokeByte(long address, byte value)
971void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100972 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700973}
974
975void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
976 Mips64Assembler* assembler = GetAssembler();
977 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
978 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
979
980 __ Sb(val, adr, 0);
981}
982
983// void libcore.io.Memory.pokeShort(long address, short value)
984void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100985 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700986}
987
988void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
989 Mips64Assembler* assembler = GetAssembler();
990 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
991 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
992
993 __ Sh(val, adr, 0);
994}
995
996// void libcore.io.Memory.pokeInt(long address, int value)
997void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100998 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700999}
1000
1001void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
1002 Mips64Assembler* assembler = GetAssembler();
1003 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1004 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1005
1006 __ Sw(val, adr, 00);
1007}
1008
1009// void libcore.io.Memory.pokeLong(long address, long value)
1010void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001011 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -07001012}
1013
1014void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
1015 Mips64Assembler* assembler = GetAssembler();
1016 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1017 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1018
1019 __ Sd(val, adr, 0);
1020}
1021
Chris Larsen49e55392015-09-04 16:04:03 -07001022// Thread java.lang.Thread.currentThread()
1023void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001024 LocationSummary* locations =
1025 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen49e55392015-09-04 16:04:03 -07001026 locations->SetOut(Location::RequiresRegister());
1027}
1028
1029void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
1030 Mips64Assembler* assembler = GetAssembler();
1031 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1032
1033 __ LoadFromOffset(kLoadUnsignedWord,
1034 out,
1035 TR,
1036 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
1037}
1038
Vladimir Markoca6fff82017-10-03 14:49:14 +01001039static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
Alexey Frunze15958152017-02-09 19:08:30 -08001040 HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001041 DataType::Type type) {
Alexey Frunze15958152017-02-09 19:08:30 -08001042 bool can_call = kEmitCompilerReadBarrier &&
1043 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1044 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001045 LocationSummary* locations =
1046 new (allocator) LocationSummary(invoke,
1047 can_call
1048 ? LocationSummary::kCallOnSlowPath
1049 : LocationSummary::kNoCall,
1050 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001051 if (can_call && kUseBakerReadBarrier) {
1052 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
1053 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001054 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1055 locations->SetInAt(1, Location::RequiresRegister());
1056 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001057 locations->SetOut(Location::RequiresRegister(),
1058 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001059 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze15958152017-02-09 19:08:30 -08001060 // We need a temporary register for the read barrier marking slow
1061 // path in InstructionCodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier.
1062 locations->AddTemp(Location::RequiresRegister());
1063 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001064}
1065
Alexey Frunze15958152017-02-09 19:08:30 -08001066// Note that the caller must supply a properly aligned memory address.
1067// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001068static void GenUnsafeGet(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001069 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -07001070 bool is_volatile,
1071 CodeGeneratorMIPS64* codegen) {
1072 LocationSummary* locations = invoke->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001073 DCHECK((type == DataType::Type::kInt32) ||
1074 (type == DataType::Type::kInt64) ||
1075 (type == DataType::Type::kReference)) << type;
Chris Larsen1360ada2015-09-04 23:38:16 -07001076 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001077 // Target register.
1078 Location trg_loc = locations->Out();
1079 GpuRegister trg = trg_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001080 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -08001081 Location base_loc = locations->InAt(1);
1082 GpuRegister base = base_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001083 // Long offset.
Alexey Frunze15958152017-02-09 19:08:30 -08001084 Location offset_loc = locations->InAt(2);
1085 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001086
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001087 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == DataType::Type::kReference))) {
Alexey Frunze15958152017-02-09 19:08:30 -08001088 __ Daddu(TMP, base, offset);
Chris Larsen1360ada2015-09-04 23:38:16 -07001089 }
Alexey Frunze15958152017-02-09 19:08:30 -08001090
Chris Larsen1360ada2015-09-04 23:38:16 -07001091 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001092 case DataType::Type::kInt64:
Alexey Frunze15958152017-02-09 19:08:30 -08001093 __ Ld(trg, TMP, 0);
1094 if (is_volatile) {
1095 __ Sync(0);
1096 }
1097 break;
1098
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001099 case DataType::Type::kInt32:
Chris Larsen1360ada2015-09-04 23:38:16 -07001100 __ Lw(trg, TMP, 0);
Alexey Frunze15958152017-02-09 19:08:30 -08001101 if (is_volatile) {
1102 __ Sync(0);
1103 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001104 break;
1105
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001106 case DataType::Type::kReference:
Alexey Frunze15958152017-02-09 19:08:30 -08001107 if (kEmitCompilerReadBarrier) {
1108 if (kUseBakerReadBarrier) {
1109 Location temp = locations->GetTemp(0);
1110 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
1111 trg_loc,
1112 base,
1113 /* offset */ 0U,
1114 /* index */ offset_loc,
1115 TIMES_1,
1116 temp,
1117 /* needs_null_check */ false);
1118 if (is_volatile) {
1119 __ Sync(0);
1120 }
1121 } else {
1122 __ Lwu(trg, TMP, 0);
1123 if (is_volatile) {
1124 __ Sync(0);
1125 }
1126 codegen->GenerateReadBarrierSlow(invoke,
1127 trg_loc,
1128 trg_loc,
1129 base_loc,
1130 /* offset */ 0U,
1131 /* index */ offset_loc);
1132 }
1133 } else {
1134 __ Lwu(trg, TMP, 0);
1135 if (is_volatile) {
1136 __ Sync(0);
1137 }
1138 __ MaybeUnpoisonHeapReference(trg);
1139 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001140 break;
1141
1142 default:
1143 LOG(FATAL) << "Unsupported op size " << type;
1144 UNREACHABLE();
1145 }
1146}
1147
1148// int sun.misc.Unsafe.getInt(Object o, long offset)
1149void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001150 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -07001151}
1152
1153void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001154 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001155}
1156
1157// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1158void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001159 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -07001160}
1161
1162void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001163 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001164}
1165
1166// long sun.misc.Unsafe.getLong(Object o, long offset)
1167void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001168 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -07001169}
1170
1171void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001172 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001173}
1174
1175// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
1176void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001177 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -07001178}
1179
1180void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001181 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001182}
1183
1184// Object sun.misc.Unsafe.getObject(Object o, long offset)
1185void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001186 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -07001187}
1188
1189void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001190 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001191}
1192
1193// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1194void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001195 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -07001196}
1197
1198void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001199 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001200}
1201
Vladimir Markoca6fff82017-10-03 14:49:14 +01001202static void CreateIntIntIntIntToVoid(ArenaAllocator* allocator, HInvoke* invoke) {
1203 LocationSummary* locations =
1204 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen1360ada2015-09-04 23:38:16 -07001205 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1206 locations->SetInAt(1, Location::RequiresRegister());
1207 locations->SetInAt(2, Location::RequiresRegister());
1208 locations->SetInAt(3, Location::RequiresRegister());
1209}
1210
Alexey Frunze15958152017-02-09 19:08:30 -08001211// Note that the caller must supply a properly aligned memory address.
1212// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001213static void GenUnsafePut(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001214 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -07001215 bool is_volatile,
1216 bool is_ordered,
1217 CodeGeneratorMIPS64* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001218 DCHECK((type == DataType::Type::kInt32) ||
1219 (type == DataType::Type::kInt64) ||
1220 (type == DataType::Type::kReference));
Chris Larsen1360ada2015-09-04 23:38:16 -07001221 Mips64Assembler* assembler = codegen->GetAssembler();
1222 // Object pointer.
1223 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1224 // Long offset.
1225 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1226 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1227
1228 __ Daddu(TMP, base, offset);
1229 if (is_volatile || is_ordered) {
1230 __ Sync(0);
1231 }
1232 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001233 case DataType::Type::kInt32:
1234 case DataType::Type::kReference:
1235 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001236 __ PoisonHeapReference(AT, value);
1237 __ Sw(AT, TMP, 0);
1238 } else {
1239 __ Sw(value, TMP, 0);
1240 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001241 break;
1242
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001243 case DataType::Type::kInt64:
Chris Larsen1360ada2015-09-04 23:38:16 -07001244 __ Sd(value, TMP, 0);
1245 break;
1246
1247 default:
1248 LOG(FATAL) << "Unsupported op size " << type;
1249 UNREACHABLE();
1250 }
1251 if (is_volatile) {
1252 __ Sync(0);
1253 }
1254
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001255 if (type == DataType::Type::kReference) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001256 bool value_can_be_null = true; // TODO: Worth finding out this information?
1257 codegen->MarkGCCard(base, value, value_can_be_null);
Chris Larsen1360ada2015-09-04 23:38:16 -07001258 }
1259}
1260
1261// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1262void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001263 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001264}
1265
1266void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001267 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001268 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001269 /* is_volatile */ false,
1270 /* is_ordered */ false,
1271 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001272}
1273
1274// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1275void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001276 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001277}
1278
1279void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001280 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001281 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001282 /* is_volatile */ false,
1283 /* is_ordered */ true,
1284 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001285}
1286
1287// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1288void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001289 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001290}
1291
1292void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001293 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001294 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001295 /* is_volatile */ true,
1296 /* is_ordered */ false,
1297 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001298}
1299
1300// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1301void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001302 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001303}
1304
1305void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001306 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001307 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001308 /* is_volatile */ false,
1309 /* is_ordered */ false,
1310 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001311}
1312
1313// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1314void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001315 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001316}
1317
1318void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001319 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001320 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001321 /* is_volatile */ false,
1322 /* is_ordered */ true,
1323 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001324}
1325
1326// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1327void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001328 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001329}
1330
1331void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001332 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001333 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001334 /* is_volatile */ true,
1335 /* is_ordered */ false,
1336 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001337}
1338
1339// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1340void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001341 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001342}
1343
1344void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001345 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001346 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001347 /* is_volatile */ false,
1348 /* is_ordered */ false,
1349 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001350}
1351
1352// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1353void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001354 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001355}
1356
1357void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001358 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001359 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001360 /* is_volatile */ false,
1361 /* is_ordered */ true,
1362 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001363}
1364
1365// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1366void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001367 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001368}
1369
1370void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001371 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001372 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001373 /* is_volatile */ true,
1374 /* is_ordered */ false,
1375 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001376}
1377
Vladimir Markoca6fff82017-10-03 14:49:14 +01001378static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* allocator, HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001379 bool can_call = kEmitCompilerReadBarrier &&
1380 kUseBakerReadBarrier &&
1381 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001382 LocationSummary* locations =
1383 new (allocator) LocationSummary(invoke,
1384 can_call
1385 ? LocationSummary::kCallOnSlowPath
1386 : LocationSummary::kNoCall,
1387 kIntrinsified);
Chris Larsen36427492015-10-23 02:19:38 -07001388 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1389 locations->SetInAt(1, Location::RequiresRegister());
1390 locations->SetInAt(2, Location::RequiresRegister());
1391 locations->SetInAt(3, Location::RequiresRegister());
1392 locations->SetInAt(4, Location::RequiresRegister());
Chris Larsen36427492015-10-23 02:19:38 -07001393 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001394
1395 // Temporary register used in CAS by (Baker) read barrier.
1396 if (can_call) {
1397 locations->AddTemp(Location::RequiresRegister());
1398 }
Chris Larsen36427492015-10-23 02:19:38 -07001399}
1400
Alexey Frunze15958152017-02-09 19:08:30 -08001401// Note that the caller must supply a properly aligned memory address.
1402// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001403static void GenCas(HInvoke* invoke, DataType::Type type, CodeGeneratorMIPS64* codegen) {
Chris Larsen36427492015-10-23 02:19:38 -07001404 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001405 LocationSummary* locations = invoke->GetLocations();
Chris Larsen36427492015-10-23 02:19:38 -07001406 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001407 Location offset_loc = locations->InAt(2);
1408 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001409 GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
1410 GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001411 Location out_loc = locations->Out();
1412 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001413
1414 DCHECK_NE(base, out);
1415 DCHECK_NE(offset, out);
1416 DCHECK_NE(expected, out);
1417
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001418 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08001419 // The only read barrier implementation supporting the
1420 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1421 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1422
1423 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1424 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001425 bool value_can_be_null = true; // TODO: Worth finding out this information?
1426 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001427
1428 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1429 Location temp = locations->GetTemp(0);
1430 // Need to make sure the reference stored in the field is a to-space
1431 // one before attempting the CAS or the CAS could fail incorrectly.
1432 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1433 invoke,
1434 out_loc, // Unused, used only as a "temporary" within the read barrier.
1435 base,
1436 /* offset */ 0u,
1437 /* index */ offset_loc,
1438 ScaleFactor::TIMES_1,
1439 temp,
1440 /* needs_null_check */ false,
1441 /* always_update_field */ true);
1442 }
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001443 }
1444
Alexey Frunzec061de12017-02-14 13:27:23 -08001445 Mips64Label loop_head, exit_loop;
1446 __ Daddu(TMP, base, offset);
1447
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001448 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001449 __ PoisonHeapReference(expected);
1450 // Do not poison `value`, if it is the same register as
1451 // `expected`, which has just been poisoned.
1452 if (value != expected) {
1453 __ PoisonHeapReference(value);
1454 }
1455 }
1456
Chris Larsen36427492015-10-23 02:19:38 -07001457 // do {
1458 // tmp_value = [tmp_ptr] - expected;
1459 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1460 // result = tmp_value != 0;
1461
Chris Larsen36427492015-10-23 02:19:38 -07001462 __ Sync(0);
1463 __ Bind(&loop_head);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001464 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001465 __ Lld(out, TMP);
1466 } else {
Roland Levillain391b8662015-12-18 11:43:38 +00001467 // Note: We will need a read barrier here, when read barrier
1468 // support is added to the MIPS64 back end.
Chris Larsen36427492015-10-23 02:19:38 -07001469 __ Ll(out, TMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001470 if (type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001471 // The LL instruction sign-extends the 32-bit value, but
1472 // 32-bit references must be zero-extended. Zero-extend `out`.
1473 __ Dext(out, out, 0, 32);
1474 }
Chris Larsen36427492015-10-23 02:19:38 -07001475 }
1476 __ Dsubu(out, out, expected); // If we didn't get the 'expected'
1477 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1478 __ Beqzc(out, &exit_loop); // return.
1479 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1480 // If we use 'value' directly, we would lose 'value'
1481 // in the case that the store fails. Whether the
1482 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001483 // correct Boolean value into the 'out' register.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001484 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001485 __ Scd(out, TMP);
1486 } else {
1487 __ Sc(out, TMP);
1488 }
1489 __ Beqzc(out, &loop_head); // If we couldn't do the read-modify-write
1490 // cycle atomically then retry.
1491 __ Bind(&exit_loop);
1492 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08001493
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001494 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001495 __ UnpoisonHeapReference(expected);
1496 // Do not unpoison `value`, if it is the same register as
1497 // `expected`, which has just been unpoisoned.
1498 if (value != expected) {
1499 __ UnpoisonHeapReference(value);
1500 }
1501 }
Chris Larsen36427492015-10-23 02:19:38 -07001502}
1503
1504// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1505void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001506 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001507}
1508
1509void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001510 GenCas(invoke, DataType::Type::kInt32, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001511}
1512
1513// boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
1514void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001515 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001516}
1517
1518void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001519 GenCas(invoke, DataType::Type::kInt64, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001520}
1521
1522// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1523void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001524 // The only read barrier implementation supporting the
1525 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1526 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1527 return;
1528 }
1529
Vladimir Markoca6fff82017-10-03 14:49:14 +01001530 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001531}
1532
1533void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001534 // The only read barrier implementation supporting the
1535 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1536 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1537
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001538 GenCas(invoke, DataType::Type::kReference, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001539}
1540
Chris Larsen9701c2e2015-09-04 17:22:47 -07001541// int java.lang.String.compareTo(String anotherString)
1542void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001543 LocationSummary* locations = new (allocator_) LocationSummary(
1544 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001545 InvokeRuntimeCallingConvention calling_convention;
1546 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1547 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001548 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001549 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1550}
1551
1552void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1553 Mips64Assembler* assembler = GetAssembler();
1554 LocationSummary* locations = invoke->GetLocations();
1555
1556 // Note that the null check must have been done earlier.
1557 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1558
1559 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001560 SlowPathCodeMIPS64* slow_path =
1561 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001562 codegen_->AddSlowPath(slow_path);
1563 __ Beqzc(argument, slow_path->GetEntryLabel());
1564
Serban Constantinescufc734082016-07-19 17:18:07 +01001565 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001566 __ Bind(slow_path->GetExitLabel());
1567}
1568
Chris Larsen972d6d72015-10-20 11:29:12 -07001569// boolean java.lang.String.equals(Object anObject)
1570void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
Vladimir Markoda283052017-11-07 21:17:24 +00001571 if (kEmitCompilerReadBarrier &&
1572 !StringEqualsOptimizations(invoke).GetArgumentIsString() &&
1573 !StringEqualsOptimizations(invoke).GetNoReadBarrierForStringClass()) {
1574 // No support for this odd case (String class is moveable, not in the boot image).
1575 return;
1576 }
1577
Vladimir Markoca6fff82017-10-03 14:49:14 +01001578 LocationSummary* locations =
1579 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen972d6d72015-10-20 11:29:12 -07001580 locations->SetInAt(0, Location::RequiresRegister());
1581 locations->SetInAt(1, Location::RequiresRegister());
1582 locations->SetOut(Location::RequiresRegister());
1583
1584 // Temporary registers to store lengths of strings and for calculations.
1585 locations->AddTemp(Location::RequiresRegister());
1586 locations->AddTemp(Location::RequiresRegister());
1587 locations->AddTemp(Location::RequiresRegister());
1588}
1589
1590void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
1591 Mips64Assembler* assembler = GetAssembler();
1592 LocationSummary* locations = invoke->GetLocations();
1593
1594 GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
1595 GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
1596 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1597
1598 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
1599 GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
1600 GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
1601
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001602 Mips64Label loop;
1603 Mips64Label end;
1604 Mips64Label return_true;
1605 Mips64Label return_false;
Chris Larsen972d6d72015-10-20 11:29:12 -07001606
1607 // Get offsets of count, value, and class fields within a string object.
1608 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1609 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1610 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1611
1612 // Note that the null check must have been done earlier.
1613 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1614
1615 // If the register containing the pointer to "this", and the register
1616 // containing the pointer to "anObject" are the same register then
1617 // "this", and "anObject" are the same object and we can
1618 // short-circuit the logic to a true result.
1619 if (str == arg) {
1620 __ LoadConst64(out, 1);
1621 return;
1622 }
1623
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001624 StringEqualsOptimizations optimizations(invoke);
1625 if (!optimizations.GetArgumentNotNull()) {
1626 // Check if input is null, return false if it is.
1627 __ Beqzc(arg, &return_false);
1628 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001629
1630 // Reference equality check, return true if same reference.
1631 __ Beqc(str, arg, &return_true);
1632
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001633 if (!optimizations.GetArgumentIsString()) {
1634 // Instanceof check for the argument by comparing class fields.
1635 // All string objects must have the same type since String cannot be subclassed.
1636 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1637 // If the argument is a string object, its class field must be equal to receiver's class field.
1638 __ Lw(temp1, str, class_offset);
1639 __ Lw(temp2, arg, class_offset);
1640 __ Bnec(temp1, temp2, &return_false);
1641 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001642
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001643 // Load `count` fields of this and argument strings.
Chris Larsen972d6d72015-10-20 11:29:12 -07001644 __ Lw(temp1, str, count_offset);
1645 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001646 // Check if `count` fields are equal, return false if they're not.
1647 // Also compares the compression style, if differs return false.
Chris Larsen972d6d72015-10-20 11:29:12 -07001648 __ Bnec(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001649 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1650 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1651 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen972d6d72015-10-20 11:29:12 -07001652 __ Beqzc(temp1, &return_true);
1653
1654 // Don't overwrite input registers
1655 __ Move(TMP, str);
1656 __ Move(temp3, arg);
1657
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001658 // Assertions that must hold in order to compare strings 8 bytes at a time.
Chris Larsen972d6d72015-10-20 11:29:12 -07001659 DCHECK_ALIGNED(value_offset, 8);
1660 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1661
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001662 if (mirror::kUseStringCompression) {
1663 // For string compression, calculate the number of bytes to compare (not chars).
1664 __ Dext(temp2, temp1, 0, 1); // Extract compression flag.
1665 __ Srl(temp1, temp1, 1); // Extract length.
1666 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
1667 }
1668
1669 // Loop to compare strings 8 bytes at a time starting at the beginning of the string.
1670 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen972d6d72015-10-20 11:29:12 -07001671 __ Bind(&loop);
1672 __ Ld(out, TMP, value_offset);
1673 __ Ld(temp2, temp3, value_offset);
1674 __ Bnec(out, temp2, &return_false);
1675 __ Daddiu(TMP, TMP, 8);
1676 __ Daddiu(temp3, temp3, 8);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001677 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1678 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -8 : -4);
Chris Larsen972d6d72015-10-20 11:29:12 -07001679 __ Bgtzc(temp1, &loop);
1680
1681 // Return true and exit the function.
1682 // If loop does not result in returning false, we return true.
1683 __ Bind(&return_true);
1684 __ LoadConst64(out, 1);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001685 __ Bc(&end);
Chris Larsen972d6d72015-10-20 11:29:12 -07001686
1687 // Return false and exit the function.
1688 __ Bind(&return_false);
1689 __ LoadConst64(out, 0);
1690 __ Bind(&end);
1691}
1692
Chris Larsen9701c2e2015-09-04 17:22:47 -07001693static void GenerateStringIndexOf(HInvoke* invoke,
1694 Mips64Assembler* assembler,
1695 CodeGeneratorMIPS64* codegen,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001696 bool start_at_zero) {
1697 LocationSummary* locations = invoke->GetLocations();
1698 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1699
1700 // Note that the null check must have been done earlier.
1701 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1702
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001703 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1704 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsen9701c2e2015-09-04 17:22:47 -07001705 SlowPathCodeMIPS64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001706 HInstruction* code_point = invoke->InputAt(1);
1707 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001708 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001709 // Always needs the slow-path. We could directly dispatch to it,
1710 // but this case should be rare, so for simplicity just put the
1711 // full slow-path down and branch unconditionally.
Vladimir Marko174b2e22017-10-12 13:34:49 +01001712 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001713 codegen->AddSlowPath(slow_path);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001714 __ Bc(slow_path->GetEntryLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001715 __ Bind(slow_path->GetExitLabel());
1716 return;
1717 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001718 } else if (code_point->GetType() != DataType::Type::kUint16) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001719 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1720 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
Vladimir Marko174b2e22017-10-12 13:34:49 +01001721 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001722 codegen->AddSlowPath(slow_path);
1723 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1724 }
1725
1726 if (start_at_zero) {
1727 DCHECK_EQ(tmp_reg, A2);
1728 // Start-index = 0.
1729 __ Clear(tmp_reg);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001730 }
1731
Serban Constantinescufc734082016-07-19 17:18:07 +01001732 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Roland Levillain42ad2882016-02-29 18:26:54 +00001733 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001734
1735 if (slow_path != nullptr) {
1736 __ Bind(slow_path->GetExitLabel());
1737 }
1738}
1739
1740// int java.lang.String.indexOf(int ch)
1741void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001742 LocationSummary* locations = new (allocator_) LocationSummary(
1743 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001744 // We have a hand-crafted assembly stub that follows the runtime
1745 // calling convention. So it's best to align the inputs accordingly.
1746 InvokeRuntimeCallingConvention calling_convention;
1747 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1748 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001749 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001750 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1751
1752 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1753 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1754}
1755
1756void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001757 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ true);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001758}
1759
1760// int java.lang.String.indexOf(int ch, int fromIndex)
1761void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001762 LocationSummary* locations = new (allocator_) LocationSummary(
1763 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001764 // We have a hand-crafted assembly stub that follows the runtime
1765 // calling convention. So it's best to align the inputs accordingly.
1766 InvokeRuntimeCallingConvention calling_convention;
1767 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1768 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1769 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001770 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001771 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1772}
1773
1774void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001775 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ false);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001776}
1777
Roland Levillaincc3839c2016-02-29 16:23:48 +00001778// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001779void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001780 LocationSummary* locations = new (allocator_) LocationSummary(
1781 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001782 InvokeRuntimeCallingConvention calling_convention;
1783 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1784 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1785 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1786 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001787 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001788 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1789}
1790
1791void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1792 Mips64Assembler* assembler = GetAssembler();
1793 LocationSummary* locations = invoke->GetLocations();
1794
1795 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001796 SlowPathCodeMIPS64* slow_path =
1797 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001798 codegen_->AddSlowPath(slow_path);
1799 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1800
Serban Constantinescufc734082016-07-19 17:18:07 +01001801 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001802 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001803 __ Bind(slow_path->GetExitLabel());
1804}
1805
Roland Levillaincc3839c2016-02-29 16:23:48 +00001806// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001807void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001808 LocationSummary* locations =
1809 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001810 InvokeRuntimeCallingConvention calling_convention;
1811 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1812 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1813 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001814 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001815 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1816}
1817
1818void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001819 // No need to emit code checking whether `locations->InAt(2)` is a null
1820 // pointer, as callers of the native method
1821 //
1822 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1823 //
1824 // all include a null check on `data` before calling that method.
Serban Constantinescufc734082016-07-19 17:18:07 +01001825 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001826 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001827}
1828
Roland Levillainf969a202016-03-09 16:14:00 +00001829// java.lang.StringFactory.newStringFromString(String toCopy)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001830void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001831 LocationSummary* locations = new (allocator_) LocationSummary(
1832 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001833 InvokeRuntimeCallingConvention calling_convention;
1834 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001835 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001836 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1837}
1838
1839void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1840 Mips64Assembler* assembler = GetAssembler();
1841 LocationSummary* locations = invoke->GetLocations();
1842
1843 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001844 SlowPathCodeMIPS64* slow_path =
1845 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001846 codegen_->AddSlowPath(slow_path);
1847 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1848
Serban Constantinescufc734082016-07-19 17:18:07 +01001849 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001850 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001851 __ Bind(slow_path->GetExitLabel());
1852}
1853
Chris Larsenddec7f92016-02-16 12:35:04 -08001854static void GenIsInfinite(LocationSummary* locations,
1855 bool is64bit,
1856 Mips64Assembler* assembler) {
1857 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
1858 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1859
1860 if (is64bit) {
1861 __ ClassD(FTMP, in);
1862 } else {
1863 __ ClassS(FTMP, in);
1864 }
1865 __ Mfc1(out, FTMP);
1866 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
1867 __ Sltu(out, ZERO, out);
1868}
1869
1870// boolean java.lang.Float.isInfinite(float)
1871void IntrinsicLocationsBuilderMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001872 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001873}
1874
1875void IntrinsicCodeGeneratorMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1876 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
1877}
1878
1879// boolean java.lang.Double.isInfinite(double)
1880void IntrinsicLocationsBuilderMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001881 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001882}
1883
1884void IntrinsicCodeGeneratorMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1885 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
1886}
1887
Chris Larsene3660592016-11-09 11:13:42 -08001888// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
1889void IntrinsicLocationsBuilderMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001890 LocationSummary* locations =
1891 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsene3660592016-11-09 11:13:42 -08001892 locations->SetInAt(0, Location::RequiresRegister());
1893 locations->SetInAt(1, Location::RequiresRegister());
1894 locations->SetInAt(2, Location::RequiresRegister());
1895 locations->SetInAt(3, Location::RequiresRegister());
1896 locations->SetInAt(4, Location::RequiresRegister());
1897
Chris Larsen366d4332017-03-23 09:02:56 -07001898 locations->AddTemp(Location::RequiresRegister());
1899 locations->AddTemp(Location::RequiresRegister());
1900 locations->AddTemp(Location::RequiresRegister());
Chris Larsene3660592016-11-09 11:13:42 -08001901}
1902
1903void IntrinsicCodeGeneratorMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1904 Mips64Assembler* assembler = GetAssembler();
1905 LocationSummary* locations = invoke->GetLocations();
1906
1907 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001908 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001909 DCHECK_EQ(char_size, 2u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001910 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001911
1912 GpuRegister srcObj = locations->InAt(0).AsRegister<GpuRegister>();
1913 GpuRegister srcBegin = locations->InAt(1).AsRegister<GpuRegister>();
1914 GpuRegister srcEnd = locations->InAt(2).AsRegister<GpuRegister>();
1915 GpuRegister dstObj = locations->InAt(3).AsRegister<GpuRegister>();
1916 GpuRegister dstBegin = locations->InAt(4).AsRegister<GpuRegister>();
1917
1918 GpuRegister dstPtr = locations->GetTemp(0).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001919 GpuRegister srcPtr = locations->GetTemp(1).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001920 GpuRegister numChrs = locations->GetTemp(2).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001921
1922 Mips64Label done;
Chris Larsen366d4332017-03-23 09:02:56 -07001923 Mips64Label loop;
Chris Larsene3660592016-11-09 11:13:42 -08001924
1925 // Location of data in char array buffer.
1926 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1927
1928 // Get offset of value field within a string object.
1929 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1930
1931 __ Beqc(srcEnd, srcBegin, &done); // No characters to move.
1932
1933 // Calculate number of characters to be copied.
1934 __ Dsubu(numChrs, srcEnd, srcBegin);
1935
1936 // Calculate destination address.
1937 __ Daddiu(dstPtr, dstObj, data_offset);
1938 __ Dlsa(dstPtr, dstBegin, dstPtr, char_shift);
1939
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001940 if (mirror::kUseStringCompression) {
1941 Mips64Label uncompressed_copy, compressed_loop;
1942 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1943 // Load count field and extract compression flag.
1944 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
1945 __ Dext(TMP, TMP, 0, 1);
1946
Chris Larsen366d4332017-03-23 09:02:56 -07001947 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001948 __ Bnezc(TMP, &uncompressed_copy);
1949
1950 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
1951 __ Daddu(srcPtr, srcObj, srcBegin);
1952 __ Bind(&compressed_loop);
1953 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
1954 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
1955 __ Daddiu(numChrs, numChrs, -1);
1956 __ Daddiu(srcPtr, srcPtr, 1);
1957 __ Daddiu(dstPtr, dstPtr, 2);
1958 __ Bnezc(numChrs, &compressed_loop);
1959
1960 __ Bc(&done);
1961 __ Bind(&uncompressed_copy);
1962 }
1963
Chris Larsene3660592016-11-09 11:13:42 -08001964 // Calculate source address.
1965 __ Daddiu(srcPtr, srcObj, value_offset);
1966 __ Dlsa(srcPtr, srcBegin, srcPtr, char_shift);
1967
Chris Larsen366d4332017-03-23 09:02:56 -07001968 __ Bind(&loop);
1969 __ Lh(AT, srcPtr, 0);
1970 __ Daddiu(numChrs, numChrs, -1);
1971 __ Daddiu(srcPtr, srcPtr, char_size);
1972 __ Sh(AT, dstPtr, 0);
1973 __ Daddiu(dstPtr, dstPtr, char_size);
1974 __ Bnezc(numChrs, &loop);
Chris Larsene3660592016-11-09 11:13:42 -08001975
1976 __ Bind(&done);
1977}
1978
Chris Larsen5863f852017-03-23 15:41:37 -07001979// static void java.lang.System.arraycopy(Object src, int srcPos,
1980// Object dest, int destPos,
1981// int length)
1982void IntrinsicLocationsBuilderMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1983 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1984 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1985 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1986
1987 // As long as we are checking, we might as well check to see if the src and dest
1988 // positions are >= 0.
1989 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1990 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
1991 // We will have to fail anyways.
1992 return;
1993 }
1994
1995 // And since we are already checking, check the length too.
1996 if (length != nullptr) {
1997 int32_t len = length->GetValue();
1998 if (len < 0) {
1999 // Just call as normal.
2000 return;
2001 }
2002 }
2003
2004 // Okay, it is safe to generate inline code.
2005 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002006 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
Chris Larsen5863f852017-03-23 15:41:37 -07002007 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
2008 locations->SetInAt(0, Location::RequiresRegister());
2009 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
2010 locations->SetInAt(2, Location::RequiresRegister());
2011 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
2012 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
2013
2014 locations->AddTemp(Location::RequiresRegister());
2015 locations->AddTemp(Location::RequiresRegister());
2016 locations->AddTemp(Location::RequiresRegister());
2017}
2018
2019// Utility routine to verify that "length(input) - pos >= length"
2020static void EnoughItems(Mips64Assembler* assembler,
2021 GpuRegister length_input_minus_pos,
2022 Location length,
2023 SlowPathCodeMIPS64* slow_path) {
2024 if (length.IsConstant()) {
2025 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
2026
2027 if (IsInt<16>(length_constant)) {
2028 __ Slti(TMP, length_input_minus_pos, length_constant);
2029 __ Bnezc(TMP, slow_path->GetEntryLabel());
2030 } else {
2031 __ LoadConst32(TMP, length_constant);
2032 __ Bltc(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
2033 }
2034 } else {
2035 __ Bltc(length_input_minus_pos, length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2036 }
2037}
2038
2039static void CheckPosition(Mips64Assembler* assembler,
2040 Location pos,
2041 GpuRegister input,
2042 Location length,
2043 SlowPathCodeMIPS64* slow_path,
2044 bool length_is_input_length = false) {
2045 // Where is the length in the Array?
2046 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
2047
2048 // Calculate length(input) - pos.
2049 if (pos.IsConstant()) {
2050 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
2051 if (pos_const == 0) {
2052 if (!length_is_input_length) {
2053 // Check that length(input) >= length.
2054 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2055 EnoughItems(assembler, AT, length, slow_path);
2056 }
2057 } else {
2058 // Check that (length(input) - pos) >= zero.
2059 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2060 DCHECK_GT(pos_const, 0);
2061 __ Addiu32(AT, AT, -pos_const);
2062 __ Bltzc(AT, slow_path->GetEntryLabel());
2063
2064 // Verify that (length(input) - pos) >= length.
2065 EnoughItems(assembler, AT, length, slow_path);
2066 }
2067 } else if (length_is_input_length) {
2068 // The only way the copy can succeed is if pos is zero.
2069 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
2070 __ Bnezc(pos_reg, slow_path->GetEntryLabel());
2071 } else {
2072 // Verify that pos >= 0.
2073 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
2074 __ Bltzc(pos_reg, slow_path->GetEntryLabel());
2075
2076 // Check that (length(input) - pos) >= zero.
2077 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2078 __ Subu(AT, AT, pos_reg);
2079 __ Bltzc(AT, slow_path->GetEntryLabel());
2080
2081 // Verify that (length(input) - pos) >= length.
2082 EnoughItems(assembler, AT, length, slow_path);
2083 }
2084}
2085
2086void IntrinsicCodeGeneratorMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
2087 Mips64Assembler* assembler = GetAssembler();
2088 LocationSummary* locations = invoke->GetLocations();
2089
2090 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
2091 Location src_pos = locations->InAt(1);
2092 GpuRegister dest = locations->InAt(2).AsRegister<GpuRegister>();
2093 Location dest_pos = locations->InAt(3);
2094 Location length = locations->InAt(4);
2095
2096 Mips64Label loop;
2097
2098 GpuRegister dest_base = locations->GetTemp(0).AsRegister<GpuRegister>();
2099 GpuRegister src_base = locations->GetTemp(1).AsRegister<GpuRegister>();
2100 GpuRegister count = locations->GetTemp(2).AsRegister<GpuRegister>();
2101
Vladimir Marko174b2e22017-10-12 13:34:49 +01002102 SlowPathCodeMIPS64* slow_path =
2103 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen5863f852017-03-23 15:41:37 -07002104 codegen_->AddSlowPath(slow_path);
2105
2106 // Bail out if the source and destination are the same (to handle overlap).
2107 __ Beqc(src, dest, slow_path->GetEntryLabel());
2108
2109 // Bail out if the source is null.
2110 __ Beqzc(src, slow_path->GetEntryLabel());
2111
2112 // Bail out if the destination is null.
2113 __ Beqzc(dest, slow_path->GetEntryLabel());
2114
2115 // Load length into register for count.
2116 if (length.IsConstant()) {
2117 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
2118 } else {
2119 // If the length is negative, bail out.
2120 // We have already checked in the LocationsBuilder for the constant case.
2121 __ Bltzc(length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2122
2123 __ Move(count, length.AsRegister<GpuRegister>());
2124 }
2125
2126 // Validity checks: source.
2127 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
2128
2129 // Validity checks: dest.
2130 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
2131
2132 // If count is zero, we're done.
2133 __ Beqzc(count, slow_path->GetExitLabel());
2134
2135 // Okay, everything checks out. Finally time to do the copy.
2136 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002137 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07002138 DCHECK_EQ(char_size, 2u);
2139
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002140 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07002141
2142 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2143
2144 // Calculate source and destination addresses.
2145 if (src_pos.IsConstant()) {
2146 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
2147
2148 __ Daddiu64(src_base, src, data_offset + char_size * src_pos_const, TMP);
2149 } else {
2150 __ Daddiu64(src_base, src, data_offset, TMP);
2151 __ Dlsa(src_base, src_pos.AsRegister<GpuRegister>(), src_base, char_shift);
2152 }
2153 if (dest_pos.IsConstant()) {
2154 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2155
2156 __ Daddiu64(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
2157 } else {
2158 __ Daddiu64(dest_base, dest, data_offset, TMP);
2159 __ Dlsa(dest_base, dest_pos.AsRegister<GpuRegister>(), dest_base, char_shift);
2160 }
2161
2162 __ Bind(&loop);
2163 __ Lh(TMP, src_base, 0);
2164 __ Daddiu(src_base, src_base, char_size);
2165 __ Daddiu(count, count, -1);
2166 __ Sh(TMP, dest_base, 0);
2167 __ Daddiu(dest_base, dest_base, char_size);
2168 __ Bnezc(count, &loop);
2169
2170 __ Bind(slow_path->GetExitLabel());
2171}
2172
Chris Larsenab922502016-04-15 10:00:56 -07002173static void GenHighestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002174 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07002175 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002176 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07002177
2178 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2179 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2180
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002181 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07002182 __ Dclz(TMP, in);
2183 __ LoadConst64(AT, INT64_C(0x8000000000000000));
Chris Larsen68db2a92016-09-14 15:41:29 -07002184 __ Dsrlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07002185 } else {
2186 __ Clz(TMP, in);
2187 __ LoadConst32(AT, 0x80000000);
Chris Larsen68db2a92016-09-14 15:41:29 -07002188 __ Srlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07002189 }
2190 // For either value of "type", when "in" is zero, "out" should also
2191 // be zero. Without this extra "and" operation, when "in" is zero,
2192 // "out" would be either Integer.MIN_VALUE, or Long.MIN_VALUE because
2193 // the MIPS logical shift operations "dsrlv", and "srlv" don't use
2194 // the shift amount (TMP) directly; they use either (TMP % 64) or
2195 // (TMP % 32), respectively.
Chris Larsen68db2a92016-09-14 15:41:29 -07002196 __ And(out, AT, in);
Chris Larsenab922502016-04-15 10:00:56 -07002197}
2198
2199// int java.lang.Integer.highestOneBit(int)
2200void IntrinsicLocationsBuilderMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002201 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002202}
2203
2204void IntrinsicCodeGeneratorMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002205 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002206}
2207
2208// long java.lang.Long.highestOneBit(long)
2209void IntrinsicLocationsBuilderMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002210 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002211}
2212
2213void IntrinsicCodeGeneratorMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002214 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002215}
2216
2217static void GenLowestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002218 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07002219 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002220 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07002221
2222 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2223 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2224
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002225 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07002226 __ Dsubu(TMP, ZERO, in);
2227 } else {
2228 __ Subu(TMP, ZERO, in);
2229 }
2230 __ And(out, TMP, in);
2231}
2232
2233// int java.lang.Integer.lowestOneBit(int)
2234void IntrinsicLocationsBuilderMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002235 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002236}
2237
2238void IntrinsicCodeGeneratorMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002239 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002240}
2241
2242// long java.lang.Long.lowestOneBit(long)
2243void IntrinsicLocationsBuilderMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002244 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002245}
2246
2247void IntrinsicCodeGeneratorMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002248 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002249}
2250
Vladimir Markoca6fff82017-10-03 14:49:14 +01002251static void CreateFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2252 LocationSummary* locations =
2253 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002254 InvokeRuntimeCallingConvention calling_convention;
2255
2256 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002257 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002258}
2259
Vladimir Markoca6fff82017-10-03 14:49:14 +01002260static void CreateFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2261 LocationSummary* locations =
2262 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002263 InvokeRuntimeCallingConvention calling_convention;
2264
2265 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2266 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002267 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002268}
2269
2270static void GenFPToFPCall(HInvoke* invoke,
2271 CodeGeneratorMIPS64* codegen,
2272 QuickEntrypointEnum entry) {
2273 LocationSummary* locations = invoke->GetLocations();
2274 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
2275 DCHECK_EQ(in, F12);
2276 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2277 DCHECK_EQ(out, F0);
2278
2279 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2280}
2281
2282static void GenFPFPToFPCall(HInvoke* invoke,
2283 CodeGeneratorMIPS64* codegen,
2284 QuickEntrypointEnum entry) {
2285 LocationSummary* locations = invoke->GetLocations();
2286 FpuRegister in0 = locations->InAt(0).AsFpuRegister<FpuRegister>();
2287 DCHECK_EQ(in0, F12);
2288 FpuRegister in1 = locations->InAt(1).AsFpuRegister<FpuRegister>();
2289 DCHECK_EQ(in1, F13);
2290 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2291 DCHECK_EQ(out, F0);
2292
2293 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2294}
2295
2296// static double java.lang.Math.cos(double a)
2297void IntrinsicLocationsBuilderMIPS64::VisitMathCos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002298 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002299}
2300
2301void IntrinsicCodeGeneratorMIPS64::VisitMathCos(HInvoke* invoke) {
2302 GenFPToFPCall(invoke, codegen_, kQuickCos);
2303}
2304
2305// static double java.lang.Math.sin(double a)
2306void IntrinsicLocationsBuilderMIPS64::VisitMathSin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002307 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002308}
2309
2310void IntrinsicCodeGeneratorMIPS64::VisitMathSin(HInvoke* invoke) {
2311 GenFPToFPCall(invoke, codegen_, kQuickSin);
2312}
2313
2314// static double java.lang.Math.acos(double a)
2315void IntrinsicLocationsBuilderMIPS64::VisitMathAcos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002316 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002317}
2318
2319void IntrinsicCodeGeneratorMIPS64::VisitMathAcos(HInvoke* invoke) {
2320 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2321}
2322
2323// static double java.lang.Math.asin(double a)
2324void IntrinsicLocationsBuilderMIPS64::VisitMathAsin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002325 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002326}
2327
2328void IntrinsicCodeGeneratorMIPS64::VisitMathAsin(HInvoke* invoke) {
2329 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2330}
2331
2332// static double java.lang.Math.atan(double a)
2333void IntrinsicLocationsBuilderMIPS64::VisitMathAtan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002334 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002335}
2336
2337void IntrinsicCodeGeneratorMIPS64::VisitMathAtan(HInvoke* invoke) {
2338 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2339}
2340
2341// static double java.lang.Math.atan2(double y, double x)
2342void IntrinsicLocationsBuilderMIPS64::VisitMathAtan2(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002343 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002344}
2345
2346void IntrinsicCodeGeneratorMIPS64::VisitMathAtan2(HInvoke* invoke) {
2347 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2348}
2349
Vladimir Marko4d179872018-01-19 14:50:10 +00002350// static double java.lang.Math.pow(double y, double x)
2351void IntrinsicLocationsBuilderMIPS64::VisitMathPow(HInvoke* invoke) {
2352 CreateFPFPToFPCallLocations(allocator_, invoke);
2353}
2354
2355void IntrinsicCodeGeneratorMIPS64::VisitMathPow(HInvoke* invoke) {
2356 GenFPFPToFPCall(invoke, codegen_, kQuickPow);
2357}
2358
Chris Larsen74c20582017-03-28 22:17:35 -07002359// static double java.lang.Math.cbrt(double a)
2360void IntrinsicLocationsBuilderMIPS64::VisitMathCbrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002361 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002362}
2363
2364void IntrinsicCodeGeneratorMIPS64::VisitMathCbrt(HInvoke* invoke) {
2365 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2366}
2367
2368// static double java.lang.Math.cosh(double x)
2369void IntrinsicLocationsBuilderMIPS64::VisitMathCosh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002370 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002371}
2372
2373void IntrinsicCodeGeneratorMIPS64::VisitMathCosh(HInvoke* invoke) {
2374 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2375}
2376
2377// static double java.lang.Math.exp(double a)
2378void IntrinsicLocationsBuilderMIPS64::VisitMathExp(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002379 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002380}
2381
2382void IntrinsicCodeGeneratorMIPS64::VisitMathExp(HInvoke* invoke) {
2383 GenFPToFPCall(invoke, codegen_, kQuickExp);
2384}
2385
2386// static double java.lang.Math.expm1(double x)
2387void IntrinsicLocationsBuilderMIPS64::VisitMathExpm1(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002388 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002389}
2390
2391void IntrinsicCodeGeneratorMIPS64::VisitMathExpm1(HInvoke* invoke) {
2392 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2393}
2394
2395// static double java.lang.Math.hypot(double x, double y)
2396void IntrinsicLocationsBuilderMIPS64::VisitMathHypot(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002397 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002398}
2399
2400void IntrinsicCodeGeneratorMIPS64::VisitMathHypot(HInvoke* invoke) {
2401 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2402}
2403
2404// static double java.lang.Math.log(double a)
2405void IntrinsicLocationsBuilderMIPS64::VisitMathLog(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002406 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002407}
2408
2409void IntrinsicCodeGeneratorMIPS64::VisitMathLog(HInvoke* invoke) {
2410 GenFPToFPCall(invoke, codegen_, kQuickLog);
2411}
2412
2413// static double java.lang.Math.log10(double x)
2414void IntrinsicLocationsBuilderMIPS64::VisitMathLog10(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002415 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002416}
2417
2418void IntrinsicCodeGeneratorMIPS64::VisitMathLog10(HInvoke* invoke) {
2419 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2420}
2421
2422// static double java.lang.Math.nextAfter(double start, double direction)
2423void IntrinsicLocationsBuilderMIPS64::VisitMathNextAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002424 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002425}
2426
2427void IntrinsicCodeGeneratorMIPS64::VisitMathNextAfter(HInvoke* invoke) {
2428 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2429}
2430
2431// static double java.lang.Math.sinh(double x)
2432void IntrinsicLocationsBuilderMIPS64::VisitMathSinh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002433 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002434}
2435
2436void IntrinsicCodeGeneratorMIPS64::VisitMathSinh(HInvoke* invoke) {
2437 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2438}
2439
2440// static double java.lang.Math.tan(double a)
2441void IntrinsicLocationsBuilderMIPS64::VisitMathTan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002442 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002443}
2444
2445void IntrinsicCodeGeneratorMIPS64::VisitMathTan(HInvoke* invoke) {
2446 GenFPToFPCall(invoke, codegen_, kQuickTan);
2447}
2448
2449// static double java.lang.Math.tanh(double x)
2450void IntrinsicLocationsBuilderMIPS64::VisitMathTanh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002451 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002452}
2453
2454void IntrinsicCodeGeneratorMIPS64::VisitMathTanh(HInvoke* invoke) {
2455 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2456}
2457
Chris Larsen5633ce72017-04-10 15:47:40 -07002458// long java.lang.Integer.valueOf(long)
2459void IntrinsicLocationsBuilderMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2460 InvokeRuntimeCallingConvention calling_convention;
2461 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2462 invoke,
2463 codegen_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002464 calling_convention.GetReturnLocation(DataType::Type::kReference),
Chris Larsen5633ce72017-04-10 15:47:40 -07002465 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2466}
2467
2468void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2469 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
2470 LocationSummary* locations = invoke->GetLocations();
2471 Mips64Assembler* assembler = GetAssembler();
2472 InstructionCodeGeneratorMIPS64* icodegen =
2473 down_cast<InstructionCodeGeneratorMIPS64*>(codegen_->GetInstructionVisitor());
2474
2475 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2476 InvokeRuntimeCallingConvention calling_convention;
2477 if (invoke->InputAt(0)->IsConstant()) {
2478 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
2479 if (value >= info.low && value <= info.high) {
2480 // Just embed the j.l.Integer in the code.
2481 ScopedObjectAccess soa(Thread::Current());
2482 mirror::Object* boxed = info.cache->Get(value + (-info.low));
2483 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
2484 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
2485 __ LoadConst64(out, address);
2486 } else {
2487 // Allocate and initialize a new j.l.Integer.
2488 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2489 // JIT object table.
2490 uint32_t address =
2491 dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2492 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2493 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2494 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2495 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
2496 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2497 // one.
2498 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2499 }
2500 } else {
2501 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2502 Mips64Label allocate, done;
2503 int32_t count = static_cast<uint32_t>(info.high) - info.low + 1;
2504
2505 // Is (info.low <= in) && (in <= info.high)?
2506 __ Addiu32(out, in, -info.low);
2507 // As unsigned quantities is out < (info.high - info.low + 1)?
2508 __ LoadConst32(AT, count);
2509 // Branch if out >= (info.high - info.low + 1).
2510 // This means that "in" is outside of the range [info.low, info.high].
2511 __ Bgeuc(out, AT, &allocate);
2512
2513 // If the value is within the bounds, load the j.l.Integer directly from the array.
2514 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2515 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
2516 __ LoadConst64(TMP, data_offset + address);
2517 __ Dlsa(out, out, TMP, TIMES_4);
2518 __ Lwu(out, out, 0);
2519 __ MaybeUnpoisonHeapReference(out);
2520 __ Bc(&done);
2521
2522 __ Bind(&allocate);
2523 // Otherwise allocate and initialize a new j.l.Integer.
2524 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2525 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2526 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2527 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2528 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
2529 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2530 // one.
2531 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2532 __ Bind(&done);
2533 }
2534}
2535
Chris Larsenb065b032017-11-02 12:13:20 -07002536// static boolean java.lang.Thread.interrupted()
2537void IntrinsicLocationsBuilderMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2538 LocationSummary* locations =
2539 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2540 locations->SetOut(Location::RequiresRegister());
2541}
2542
2543void IntrinsicCodeGeneratorMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2544 Mips64Assembler* assembler = GetAssembler();
2545 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
2546 int32_t offset = Thread::InterruptedOffset<kMips64PointerSize>().Int32Value();
2547 __ LoadFromOffset(kLoadWord, out, TR, offset);
2548 Mips64Label done;
2549 __ Beqzc(out, &done);
2550 __ Sync(0);
2551 __ StoreToOffset(kStoreWord, ZERO, TR, offset);
2552 __ Sync(0);
2553 __ Bind(&done);
2554}
2555
Aart Bik2f9fcc92016-03-01 15:16:54 -08002556UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002557UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy)
Aart Bik3f67e692016-01-15 14:35:12 -08002558
Aart Bikff7d89c2016-11-07 08:49:28 -08002559UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOf);
2560UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08002561UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferAppend);
2562UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferLength);
2563UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferToString);
2564UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppend);
2565UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderLength);
2566UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08002567
Aart Bik0e54c012016-03-04 12:08:31 -08002568// 1.8.
2569UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt)
2570UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddLong)
2571UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetInt)
2572UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetLong)
2573UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002574
Aart Bik2f9fcc92016-03-01 15:16:54 -08002575UNREACHABLE_INTRINSICS(MIPS64)
Chris Larsen3039e382015-08-26 07:54:08 -07002576
2577#undef __
2578
2579} // namespace mips64
2580} // namespace art