blob: 36690c0569db109747f0b952b6dc658601b163d2 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunzec857c742015-09-23 15:12:39 -070019#include "art_method.h"
20#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080021#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070022#include "entrypoints/quick/quick_entrypoints.h"
23#include "entrypoints/quick/quick_entrypoints_enum.h"
24#include "gc/accounting/card_table.h"
25#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070026#include "intrinsics_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070027#include "mirror/array-inl.h"
28#include "mirror/class-inl.h"
29#include "offsets.h"
30#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070032#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070033#include "utils/stack_checks.h"
34
35namespace art {
36namespace mips64 {
37
38static constexpr int kCurrentMethodStackOffset = 0;
39static constexpr GpuRegister kMethodRegisterArgument = A0;
40
Alexey Frunze4dda3372015-06-01 18:31:49 -070041Location Mips64ReturnLocation(Primitive::Type return_type) {
42 switch (return_type) {
43 case Primitive::kPrimBoolean:
44 case Primitive::kPrimByte:
45 case Primitive::kPrimChar:
46 case Primitive::kPrimShort:
47 case Primitive::kPrimInt:
48 case Primitive::kPrimNot:
49 case Primitive::kPrimLong:
50 return Location::RegisterLocation(V0);
51
52 case Primitive::kPrimFloat:
53 case Primitive::kPrimDouble:
54 return Location::FpuRegisterLocation(F0);
55
56 case Primitive::kPrimVoid:
57 return Location();
58 }
59 UNREACHABLE();
60}
61
62Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(Primitive::Type type) const {
63 return Mips64ReturnLocation(type);
64}
65
66Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
67 return Location::RegisterLocation(kMethodRegisterArgument);
68}
69
70Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(Primitive::Type type) {
71 Location next_location;
72 if (type == Primitive::kPrimVoid) {
73 LOG(FATAL) << "Unexpected parameter type " << type;
74 }
75
76 if (Primitive::IsFloatingPointType(type) &&
77 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
78 next_location = Location::FpuRegisterLocation(
79 calling_convention.GetFpuRegisterAt(float_index_++));
80 gp_index_++;
81 } else if (!Primitive::IsFloatingPointType(type) &&
82 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
83 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
84 float_index_++;
85 } else {
86 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
87 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
88 : Location::StackSlot(stack_offset);
89 }
90
91 // Space on the stack is reserved for all arguments.
92 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
93
Alexey Frunze4dda3372015-06-01 18:31:49 -070094 // TODO: shouldn't we use a whole machine word per argument on the stack?
95 // Implicit 4-byte method pointer (and such) will cause misalignment.
96
97 return next_location;
98}
99
100Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
101 return Mips64ReturnLocation(type);
102}
103
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100104// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
105#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700106#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700107
108class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
109 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000110 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700111
112 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100113 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700114 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
115 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000116 if (instruction_->CanThrowIntoCatchBlock()) {
117 // Live registers will be restored in the catch block if caught.
118 SaveLiveRegisters(codegen, instruction_->GetLocations());
119 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700120 // We're moving two locations to locations that could overlap, so we need a parallel
121 // move resolver.
122 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100123 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700124 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
125 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100126 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700127 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
128 Primitive::kPrimInt);
Serban Constantinescufc734082016-07-19 17:18:07 +0100129 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
130 ? kQuickThrowStringBounds
131 : kQuickThrowArrayBounds;
132 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100133 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700134 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
135 }
136
Alexandre Rames8158f282015-08-07 10:26:17 +0100137 bool IsFatal() const OVERRIDE { return true; }
138
Roland Levillain46648892015-06-19 16:07:18 +0100139 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
140
Alexey Frunze4dda3372015-06-01 18:31:49 -0700141 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700142 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
143};
144
145class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
146 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000147 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700148
149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
150 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
151 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100152 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700153 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
154 }
155
Alexandre Rames8158f282015-08-07 10:26:17 +0100156 bool IsFatal() const OVERRIDE { return true; }
157
Roland Levillain46648892015-06-19 16:07:18 +0100158 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
159
Alexey Frunze4dda3372015-06-01 18:31:49 -0700160 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700161 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
162};
163
164class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
165 public:
166 LoadClassSlowPathMIPS64(HLoadClass* cls,
167 HInstruction* at,
168 uint32_t dex_pc,
169 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000170 : SlowPathCodeMIPS64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700171 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
172 }
173
174 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
175 LocationSummary* locations = at_->GetLocations();
176 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
177
178 __ Bind(GetEntryLabel());
179 SaveLiveRegisters(codegen, locations);
180
181 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampea5b09a62016-11-17 15:21:22 -0800182 __ LoadConst32(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100183 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
184 : kQuickInitializeType;
185 mips64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700186 if (do_clinit_) {
187 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
188 } else {
189 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
190 }
191
192 // Move the class to the desired location.
193 Location out = locations->Out();
194 if (out.IsValid()) {
195 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
196 Primitive::Type type = at_->GetType();
197 mips64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
198 }
199
200 RestoreLiveRegisters(codegen, locations);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700201 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700202 }
203
Roland Levillain46648892015-06-19 16:07:18 +0100204 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
205
Alexey Frunze4dda3372015-06-01 18:31:49 -0700206 private:
207 // The class this slow path will load.
208 HLoadClass* const cls_;
209
210 // The instruction where this slow path is happening.
211 // (Might be the load class or an initialization check).
212 HInstruction* const at_;
213
214 // The dex PC of `at_`.
215 const uint32_t dex_pc_;
216
217 // Whether to initialize the class.
218 const bool do_clinit_;
219
220 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
221};
222
223class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
224 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000225 explicit LoadStringSlowPathMIPS64(HLoadString* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700226
227 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
228 LocationSummary* locations = instruction_->GetLocations();
229 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
230 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
231
232 __ Bind(GetEntryLabel());
233 SaveLiveRegisters(codegen, locations);
234
235 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzef63f5692016-12-13 17:43:11 -0800236 HLoadString* load = instruction_->AsLoadString();
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800237 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
David Srbecky9cd6d372016-02-09 15:24:47 +0000238 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index);
Serban Constantinescufc734082016-07-19 17:18:07 +0100239 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700240 instruction_,
241 instruction_->GetDexPc(),
242 this);
243 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
244 Primitive::Type type = instruction_->GetType();
245 mips64_codegen->MoveLocation(locations->Out(),
246 calling_convention.GetReturnLocation(type),
247 type);
248
249 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800250
251 // Store the resolved String to the BSS entry.
252 // TODO: Change art_quick_resolve_string to kSaveEverything and use a temporary for the
253 // .bss entry address in the fast path, so that we can avoid another calculation here.
254 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
255 DCHECK_NE(out, AT);
256 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
257 mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
258 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info, AT);
259 __ Sw(out, AT, /* placeholder */ 0x5678);
260
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700261 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700262 }
263
Roland Levillain46648892015-06-19 16:07:18 +0100264 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
265
Alexey Frunze4dda3372015-06-01 18:31:49 -0700266 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700267 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
268};
269
270class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
271 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000272 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700273
274 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
275 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
276 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000277 if (instruction_->CanThrowIntoCatchBlock()) {
278 // Live registers will be restored in the catch block if caught.
279 SaveLiveRegisters(codegen, instruction_->GetLocations());
280 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100281 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700282 instruction_,
283 instruction_->GetDexPc(),
284 this);
285 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
286 }
287
Alexandre Rames8158f282015-08-07 10:26:17 +0100288 bool IsFatal() const OVERRIDE { return true; }
289
Roland Levillain46648892015-06-19 16:07:18 +0100290 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
291
Alexey Frunze4dda3372015-06-01 18:31:49 -0700292 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700293 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
294};
295
296class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
297 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100298 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000299 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700300
301 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
302 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
303 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100304 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700305 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700306 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700307 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700308 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700309 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700310 }
311 }
312
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700313 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700314 DCHECK(successor_ == nullptr);
315 return &return_label_;
316 }
317
Roland Levillain46648892015-06-19 16:07:18 +0100318 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
319
Alexey Frunze4dda3372015-06-01 18:31:49 -0700320 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700321 // If not null, the block to branch to after the suspend check.
322 HBasicBlock* const successor_;
323
324 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700325 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700326
327 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
328};
329
330class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
331 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000332 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700333
334 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
335 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800336
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100337 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700338 DCHECK(instruction_->IsCheckCast()
339 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
340 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
341
342 __ Bind(GetEntryLabel());
343 SaveLiveRegisters(codegen, locations);
344
345 // We're moving two locations to locations that could overlap, so we need a parallel
346 // move resolver.
347 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800348 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700349 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
350 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800351 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700352 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
353 Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700354 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100355 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800356 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700357 Primitive::Type ret_type = instruction_->GetType();
358 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
359 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700360 } else {
361 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800362 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
363 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700364 }
365
366 RestoreLiveRegisters(codegen, locations);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700367 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700368 }
369
Roland Levillain46648892015-06-19 16:07:18 +0100370 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
371
Alexey Frunze4dda3372015-06-01 18:31:49 -0700372 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700373 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
374};
375
376class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
377 public:
Aart Bik42249c32016-01-07 15:33:50 -0800378 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000379 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700380
381 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800382 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700383 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100384 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000385 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700386 }
387
Roland Levillain46648892015-06-19 16:07:18 +0100388 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
389
Alexey Frunze4dda3372015-06-01 18:31:49 -0700390 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700391 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
392};
393
394CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
395 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100396 const CompilerOptions& compiler_options,
397 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700398 : CodeGenerator(graph,
399 kNumberOfGpuRegisters,
400 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000401 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700402 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
403 arraysize(kCoreCalleeSaves)),
404 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
405 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100406 compiler_options,
407 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100408 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700409 location_builder_(graph, this),
410 instruction_visitor_(graph, this),
411 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100412 assembler_(graph->GetArena()),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800413 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800414 uint32_literals_(std::less<uint32_t>(),
415 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800416 uint64_literals_(std::less<uint64_t>(),
417 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800418 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800419 boot_image_string_patches_(StringReferenceValueComparator(),
420 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
421 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
422 boot_image_type_patches_(TypeReferenceValueComparator(),
423 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
424 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
425 boot_image_address_patches_(std::less<uint32_t>(),
426 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700427 // Save RA (containing the return address) to mimic Quick.
428 AddAllocatedRegister(Location::RegisterLocation(RA));
429}
430
431#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100432// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
433#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700434#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700435
436void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700437 // Ensure that we fix up branches.
438 __ FinalizeCode();
439
440 // Adjust native pc offsets in stack maps.
441 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
442 uint32_t old_position = stack_map_stream_.GetStackMap(i).native_pc_offset;
443 uint32_t new_position = __ GetAdjustedPosition(old_position);
444 DCHECK_GE(new_position, old_position);
445 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
446 }
447
448 // Adjust pc offsets for the disassembly information.
449 if (disasm_info_ != nullptr) {
450 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
451 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
452 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
453 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
454 it.second.start = __ GetAdjustedPosition(it.second.start);
455 it.second.end = __ GetAdjustedPosition(it.second.end);
456 }
457 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
458 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
459 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
460 }
461 }
462
Alexey Frunze4dda3372015-06-01 18:31:49 -0700463 CodeGenerator::Finalize(allocator);
464}
465
466Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
467 return codegen_->GetAssembler();
468}
469
470void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100471 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -0700472 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
473}
474
475void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100476 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -0700477 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
478}
479
480void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
481 // Pop reg
482 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +0200483 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700484}
485
486void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
487 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +0200488 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700489 __ Sd(GpuRegister(reg), SP, 0);
490}
491
492void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
493 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
494 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
495 // Allocate a scratch register other than TMP, if available.
496 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
497 // automatically unspilled when the scratch scope object is destroyed).
498 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
499 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +0200500 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700501 __ LoadFromOffset(load_type,
502 GpuRegister(ensure_scratch.GetRegister()),
503 SP,
504 index1 + stack_offset);
505 __ LoadFromOffset(load_type,
506 TMP,
507 SP,
508 index2 + stack_offset);
509 __ StoreToOffset(store_type,
510 GpuRegister(ensure_scratch.GetRegister()),
511 SP,
512 index2 + stack_offset);
513 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
514}
515
516static dwarf::Reg DWARFReg(GpuRegister reg) {
517 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
518}
519
David Srbeckyba702002016-02-01 18:15:29 +0000520static dwarf::Reg DWARFReg(FpuRegister reg) {
521 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
522}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700523
524void CodeGeneratorMIPS64::GenerateFrameEntry() {
525 __ Bind(&frame_entry_label_);
526
527 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips64) || !IsLeafMethod();
528
529 if (do_overflow_check) {
530 __ LoadFromOffset(kLoadWord,
531 ZERO,
532 SP,
533 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips64)));
534 RecordPcInfo(nullptr, 0);
535 }
536
Alexey Frunze4dda3372015-06-01 18:31:49 -0700537 if (HasEmptyFrame()) {
538 return;
539 }
540
541 // Make sure the frame size isn't unreasonably large. Per the various APIs
542 // it looks like it should always be less than 2GB in size, which allows
543 // us using 32-bit signed offsets from the stack pointer.
544 if (GetFrameSize() > 0x7FFFFFFF)
545 LOG(FATAL) << "Stack frame larger than 2GB";
546
547 // Spill callee-saved registers.
548 // Note that their cumulative size is small and they can be indexed using
549 // 16-bit offsets.
550
551 // TODO: increment/decrement SP in one step instead of two or remove this comment.
552
553 uint32_t ofs = FrameEntrySpillSize();
554 __ IncreaseFrameSize(ofs);
555
556 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
557 GpuRegister reg = kCoreCalleeSaves[i];
558 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +0200559 ofs -= kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700560 __ Sd(reg, SP, ofs);
561 __ cfi().RelOffset(DWARFReg(reg), ofs);
562 }
563 }
564
565 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
566 FpuRegister reg = kFpuCalleeSaves[i];
567 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +0200568 ofs -= kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700569 __ Sdc1(reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +0000570 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700571 }
572 }
573
574 // Allocate the rest of the frame and store the current method pointer
575 // at its end.
576
577 __ IncreaseFrameSize(GetFrameSize() - FrameEntrySpillSize());
578
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +0100579 // Save the current method if we need it. Note that we do not
580 // do this in HCurrentMethod, as the instruction might have been removed
581 // in the SSA graph.
582 if (RequiresCurrentMethod()) {
583 static_assert(IsInt<16>(kCurrentMethodStackOffset),
584 "kCurrentMethodStackOffset must fit into int16_t");
585 __ Sd(kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
586 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +0100587
588 if (GetGraph()->HasShouldDeoptimizeFlag()) {
589 // Initialize should_deoptimize flag to 0.
590 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
591 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700592}
593
594void CodeGeneratorMIPS64::GenerateFrameExit() {
595 __ cfi().RememberState();
596
Alexey Frunze4dda3372015-06-01 18:31:49 -0700597 if (!HasEmptyFrame()) {
598 // Deallocate the rest of the frame.
599
600 __ DecreaseFrameSize(GetFrameSize() - FrameEntrySpillSize());
601
602 // Restore callee-saved registers.
603 // Note that their cumulative size is small and they can be indexed using
604 // 16-bit offsets.
605
606 // TODO: increment/decrement SP in one step instead of two or remove this comment.
607
608 uint32_t ofs = 0;
609
610 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
611 FpuRegister reg = kFpuCalleeSaves[i];
612 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
613 __ Ldc1(reg, SP, ofs);
Lazar Trsicd9672662015-09-03 17:33:01 +0200614 ofs += kMips64DoublewordSize;
David Srbeckyba702002016-02-01 18:15:29 +0000615 __ cfi().Restore(DWARFReg(reg));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700616 }
617 }
618
619 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
620 GpuRegister reg = kCoreCalleeSaves[i];
621 if (allocated_registers_.ContainsCoreRegister(reg)) {
622 __ Ld(reg, SP, ofs);
Lazar Trsicd9672662015-09-03 17:33:01 +0200623 ofs += kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700624 __ cfi().Restore(DWARFReg(reg));
625 }
626 }
627
628 DCHECK_EQ(ofs, FrameEntrySpillSize());
629 __ DecreaseFrameSize(ofs);
630 }
631
632 __ Jr(RA);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700633 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700634
635 __ cfi().RestoreState();
636 __ cfi().DefCFAOffset(GetFrameSize());
637}
638
639void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
640 __ Bind(GetLabelOf(block));
641}
642
643void CodeGeneratorMIPS64::MoveLocation(Location destination,
644 Location source,
Calin Juravlee460d1d2015-09-29 04:52:17 +0100645 Primitive::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700646 if (source.Equals(destination)) {
647 return;
648 }
649
650 // A valid move can always be inferred from the destination and source
651 // locations. When moving from and to a register, the argument type can be
652 // used to generate 32bit instead of 64bit moves.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100653 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700654 DCHECK_EQ(unspecified_type, false);
655
656 if (destination.IsRegister() || destination.IsFpuRegister()) {
657 if (unspecified_type) {
658 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
659 if (source.IsStackSlot() ||
660 (src_cst != nullptr && (src_cst->IsIntConstant()
661 || src_cst->IsFloatConstant()
662 || src_cst->IsNullConstant()))) {
663 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100664 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700665 } else {
666 // If the source is a double stack slot or a 64bit constant, a 64bit
667 // type is appropriate. Else the source is a register, and since the
668 // type has not been specified, we chose a 64bit type to force a 64bit
669 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100670 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700671 }
672 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100673 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
674 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700675 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
676 // Move to GPR/FPR from stack
677 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100678 if (Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700679 __ LoadFpuFromOffset(load_type,
680 destination.AsFpuRegister<FpuRegister>(),
681 SP,
682 source.GetStackIndex());
683 } else {
684 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
685 __ LoadFromOffset(load_type,
686 destination.AsRegister<GpuRegister>(),
687 SP,
688 source.GetStackIndex());
689 }
690 } else if (source.IsConstant()) {
691 // Move to GPR/FPR from constant
692 GpuRegister gpr = AT;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100693 if (!Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700694 gpr = destination.AsRegister<GpuRegister>();
695 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100696 if (dst_type == Primitive::kPrimInt || dst_type == Primitive::kPrimFloat) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700697 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
698 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
699 gpr = ZERO;
700 } else {
701 __ LoadConst32(gpr, value);
702 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700703 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700704 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
705 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
706 gpr = ZERO;
707 } else {
708 __ LoadConst64(gpr, value);
709 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700710 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100711 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700712 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Calin Juravlee460d1d2015-09-29 04:52:17 +0100713 } else if (dst_type == Primitive::kPrimDouble) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700714 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
715 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100716 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700717 if (destination.IsRegister()) {
718 // Move to GPR from GPR
719 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
720 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100721 DCHECK(destination.IsFpuRegister());
722 if (Primitive::Is64BitType(dst_type)) {
723 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
724 } else {
725 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
726 }
727 }
728 } else if (source.IsFpuRegister()) {
729 if (destination.IsFpuRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700730 // Move to FPR from FPR
Calin Juravlee460d1d2015-09-29 04:52:17 +0100731 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700732 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
733 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100734 DCHECK_EQ(dst_type, Primitive::kPrimDouble);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700735 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
736 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100737 } else {
738 DCHECK(destination.IsRegister());
739 if (Primitive::Is64BitType(dst_type)) {
740 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
741 } else {
742 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
743 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700744 }
745 }
746 } else { // The destination is not a register. It must be a stack slot.
747 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
748 if (source.IsRegister() || source.IsFpuRegister()) {
749 if (unspecified_type) {
750 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100751 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700752 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100753 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700754 }
755 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100756 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
757 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700758 // Move to stack from GPR/FPR
759 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
760 if (source.IsRegister()) {
761 __ StoreToOffset(store_type,
762 source.AsRegister<GpuRegister>(),
763 SP,
764 destination.GetStackIndex());
765 } else {
766 __ StoreFpuToOffset(store_type,
767 source.AsFpuRegister<FpuRegister>(),
768 SP,
769 destination.GetStackIndex());
770 }
771 } else if (source.IsConstant()) {
772 // Move to stack from constant
773 HConstant* src_cst = source.GetConstant();
774 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700775 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700776 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700777 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
778 if (value != 0) {
779 gpr = TMP;
780 __ LoadConst32(gpr, value);
781 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700782 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700783 DCHECK(destination.IsDoubleStackSlot());
784 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
785 if (value != 0) {
786 gpr = TMP;
787 __ LoadConst64(gpr, value);
788 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700789 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700790 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700791 } else {
792 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
793 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
794 // Move to stack from stack
795 if (destination.IsStackSlot()) {
796 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
797 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
798 } else {
799 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
800 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
801 }
802 }
803 }
804}
805
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700806void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, Primitive::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700807 DCHECK(!loc1.IsConstant());
808 DCHECK(!loc2.IsConstant());
809
810 if (loc1.Equals(loc2)) {
811 return;
812 }
813
814 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
815 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
816 bool is_fp_reg1 = loc1.IsFpuRegister();
817 bool is_fp_reg2 = loc2.IsFpuRegister();
818
819 if (loc2.IsRegister() && loc1.IsRegister()) {
820 // Swap 2 GPRs
821 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
822 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
823 __ Move(TMP, r2);
824 __ Move(r2, r1);
825 __ Move(r1, TMP);
826 } else if (is_fp_reg2 && is_fp_reg1) {
827 // Swap 2 FPRs
828 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
829 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700830 if (type == Primitive::kPrimFloat) {
831 __ MovS(FTMP, r1);
832 __ MovS(r1, r2);
833 __ MovS(r2, FTMP);
834 } else {
835 DCHECK_EQ(type, Primitive::kPrimDouble);
836 __ MovD(FTMP, r1);
837 __ MovD(r1, r2);
838 __ MovD(r2, FTMP);
839 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700840 } else if (is_slot1 != is_slot2) {
841 // Swap GPR/FPR and stack slot
842 Location reg_loc = is_slot1 ? loc2 : loc1;
843 Location mem_loc = is_slot1 ? loc1 : loc2;
844 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
845 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
846 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
847 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
848 if (reg_loc.IsFpuRegister()) {
849 __ StoreFpuToOffset(store_type,
850 reg_loc.AsFpuRegister<FpuRegister>(),
851 SP,
852 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700853 if (mem_loc.IsStackSlot()) {
854 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
855 } else {
856 DCHECK(mem_loc.IsDoubleStackSlot());
857 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
858 }
859 } else {
860 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
861 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
862 }
863 } else if (is_slot1 && is_slot2) {
864 move_resolver_.Exchange(loc1.GetStackIndex(),
865 loc2.GetStackIndex(),
866 loc1.IsDoubleStackSlot());
867 } else {
868 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
869 }
870}
871
Calin Juravle175dc732015-08-25 15:42:32 +0100872void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
873 DCHECK(location.IsRegister());
874 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
875}
876
Calin Juravlee460d1d2015-09-29 04:52:17 +0100877void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
878 if (location.IsRegister()) {
879 locations->AddTemp(location);
880 } else {
881 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
882 }
883}
884
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100885void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
886 GpuRegister value,
887 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700888 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700889 GpuRegister card = AT;
890 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100891 if (value_can_be_null) {
892 __ Beqzc(value, &done);
893 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700894 __ LoadFromOffset(kLoadDoubleword,
895 card,
896 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -0700897 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700898 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
899 __ Daddu(temp, card, temp);
900 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100901 if (value_can_be_null) {
902 __ Bind(&done);
903 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700904}
905
Alexey Frunze19f6c692016-11-30 19:19:55 -0800906template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
907inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
908 const ArenaDeque<PcRelativePatchInfo>& infos,
909 ArenaVector<LinkerPatch>* linker_patches) {
910 for (const PcRelativePatchInfo& info : infos) {
911 const DexFile& dex_file = info.target_dex_file;
912 size_t offset_or_index = info.offset_or_index;
913 DCHECK(info.pc_rel_label.IsBound());
914 uint32_t pc_rel_offset = __ GetLabelLocation(&info.pc_rel_label);
915 linker_patches->push_back(Factory(pc_rel_offset, &dex_file, pc_rel_offset, offset_or_index));
916 }
917}
918
919void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
920 DCHECK(linker_patches->empty());
921 size_t size =
Alexey Frunze19f6c692016-11-30 19:19:55 -0800922 pc_relative_dex_cache_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -0800923 pc_relative_string_patches_.size() +
924 pc_relative_type_patches_.size() +
925 boot_image_string_patches_.size() +
926 boot_image_type_patches_.size() +
927 boot_image_address_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -0800928 linker_patches->reserve(size);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800929 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
930 linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800931 if (!GetCompilerOptions().IsBootImage()) {
932 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
933 linker_patches);
934 } else {
935 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
936 linker_patches);
937 }
938 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
939 linker_patches);
940 for (const auto& entry : boot_image_string_patches_) {
941 const StringReference& target_string = entry.first;
942 Literal* literal = entry.second;
943 DCHECK(literal->GetLabel()->IsBound());
944 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
945 linker_patches->push_back(LinkerPatch::StringPatch(literal_offset,
946 target_string.dex_file,
947 target_string.string_index.index_));
948 }
949 for (const auto& entry : boot_image_type_patches_) {
950 const TypeReference& target_type = entry.first;
951 Literal* literal = entry.second;
952 DCHECK(literal->GetLabel()->IsBound());
953 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
954 linker_patches->push_back(LinkerPatch::TypePatch(literal_offset,
955 target_type.dex_file,
956 target_type.type_index.index_));
957 }
958 for (const auto& entry : boot_image_address_patches_) {
959 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
960 Literal* literal = entry.second;
961 DCHECK(literal->GetLabel()->IsBound());
962 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
963 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
964 }
965}
966
967CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
968 const DexFile& dex_file, uint32_t string_index) {
969 return NewPcRelativePatch(dex_file, string_index, &pc_relative_string_patches_);
970}
971
972CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
973 const DexFile& dex_file, dex::TypeIndex type_index) {
974 return NewPcRelativePatch(dex_file, type_index.index_, &pc_relative_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800975}
976
977CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeDexCacheArrayPatch(
978 const DexFile& dex_file, uint32_t element_offset) {
979 return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
980}
981
Alexey Frunze19f6c692016-11-30 19:19:55 -0800982CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
983 const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
984 patches->emplace_back(dex_file, offset_or_index);
985 return &patches->back();
986}
987
Alexey Frunzef63f5692016-12-13 17:43:11 -0800988Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
989 return map->GetOrCreate(
990 value,
991 [this, value]() { return __ NewLiteral<uint32_t>(value); });
992}
993
Alexey Frunze19f6c692016-11-30 19:19:55 -0800994Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
995 return uint64_literals_.GetOrCreate(
996 value,
997 [this, value]() { return __ NewLiteral<uint64_t>(value); });
998}
999
1000Literal* CodeGeneratorMIPS64::DeduplicateMethodLiteral(MethodReference target_method,
1001 MethodToLiteralMap* map) {
1002 return map->GetOrCreate(
1003 target_method,
1004 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1005}
1006
Alexey Frunzef63f5692016-12-13 17:43:11 -08001007Literal* CodeGeneratorMIPS64::DeduplicateBootImageStringLiteral(const DexFile& dex_file,
1008 dex::StringIndex string_index) {
1009 return boot_image_string_patches_.GetOrCreate(
1010 StringReference(&dex_file, string_index),
1011 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1012}
1013
1014Literal* CodeGeneratorMIPS64::DeduplicateBootImageTypeLiteral(const DexFile& dex_file,
1015 dex::TypeIndex type_index) {
1016 return boot_image_type_patches_.GetOrCreate(
1017 TypeReference(&dex_file, type_index),
1018 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1019}
1020
1021Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
1022 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
1023 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
1024 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
1025}
1026
Alexey Frunze19f6c692016-11-30 19:19:55 -08001027void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info,
1028 GpuRegister out) {
1029 __ Bind(&info->pc_rel_label);
1030 // Add the high half of a 32-bit offset to PC.
1031 __ Auipc(out, /* placeholder */ 0x1234);
1032 // The immediately following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001033 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze19f6c692016-11-30 19:19:55 -08001034}
1035
David Brazdil58282f42016-01-14 12:45:10 +00001036void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001037 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1038 blocked_core_registers_[ZERO] = true;
1039 blocked_core_registers_[K0] = true;
1040 blocked_core_registers_[K1] = true;
1041 blocked_core_registers_[GP] = true;
1042 blocked_core_registers_[SP] = true;
1043 blocked_core_registers_[RA] = true;
1044
Lazar Trsicd9672662015-09-03 17:33:01 +02001045 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1046 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001047 blocked_core_registers_[AT] = true;
1048 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001049 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001050 blocked_fpu_registers_[FTMP] = true;
1051
1052 // Reserve suspend and thread registers.
1053 blocked_core_registers_[S0] = true;
1054 blocked_core_registers_[TR] = true;
1055
1056 // Reserve T9 for function calls
1057 blocked_core_registers_[T9] = true;
1058
Goran Jakovljevic782be112016-06-21 12:39:04 +02001059 if (GetGraph()->IsDebuggable()) {
1060 // Stubs do not save callee-save floating point registers. If the graph
1061 // is debuggable, we need to deal with these registers differently. For
1062 // now, just block them.
1063 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1064 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1065 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001066 }
1067}
1068
Alexey Frunze4dda3372015-06-01 18:31:49 -07001069size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1070 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001071 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001072}
1073
1074size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1075 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001076 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001077}
1078
1079size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1080 __ StoreFpuToOffset(kStoreDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001081 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001082}
1083
1084size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1085 __ LoadFpuFromOffset(kLoadDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001086 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001087}
1088
1089void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001090 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001091}
1092
1093void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001094 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001095}
1096
Calin Juravle175dc732015-08-25 15:42:32 +01001097void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001098 HInstruction* instruction,
1099 uint32_t dex_pc,
1100 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001101 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescufc734082016-07-19 17:18:07 +01001102 __ LoadFromOffset(kLoadDoubleword,
1103 T9,
1104 TR,
1105 GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001106 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001107 __ Nop();
Serban Constantinescufc734082016-07-19 17:18:07 +01001108 if (EntrypointRequiresStackMap(entrypoint)) {
1109 RecordPcInfo(instruction, dex_pc, slow_path);
1110 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001111}
1112
1113void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1114 GpuRegister class_reg) {
1115 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1116 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1117 __ Bltc(TMP, AT, slow_path->GetEntryLabel());
1118 // TODO: barrier needed?
1119 __ Bind(slow_path->GetExitLabel());
1120}
1121
1122void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1123 __ Sync(0); // only stype 0 is supported
1124}
1125
1126void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1127 HBasicBlock* successor) {
1128 SuspendCheckSlowPathMIPS64* slow_path =
1129 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS64(instruction, successor);
1130 codegen_->AddSlowPath(slow_path);
1131
1132 __ LoadFromOffset(kLoadUnsignedHalfword,
1133 TMP,
1134 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001135 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001136 if (successor == nullptr) {
1137 __ Bnezc(TMP, slow_path->GetEntryLabel());
1138 __ Bind(slow_path->GetReturnLabel());
1139 } else {
1140 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001141 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001142 // slow_path will return to GetLabelOf(successor).
1143 }
1144}
1145
1146InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1147 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001148 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001149 assembler_(codegen->GetAssembler()),
1150 codegen_(codegen) {}
1151
1152void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1153 DCHECK_EQ(instruction->InputCount(), 2U);
1154 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1155 Primitive::Type type = instruction->GetResultType();
1156 switch (type) {
1157 case Primitive::kPrimInt:
1158 case Primitive::kPrimLong: {
1159 locations->SetInAt(0, Location::RequiresRegister());
1160 HInstruction* right = instruction->InputAt(1);
1161 bool can_use_imm = false;
1162 if (right->IsConstant()) {
1163 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1164 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1165 can_use_imm = IsUint<16>(imm);
1166 } else if (instruction->IsAdd()) {
1167 can_use_imm = IsInt<16>(imm);
1168 } else {
1169 DCHECK(instruction->IsSub());
1170 can_use_imm = IsInt<16>(-imm);
1171 }
1172 }
1173 if (can_use_imm)
1174 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1175 else
1176 locations->SetInAt(1, Location::RequiresRegister());
1177 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1178 }
1179 break;
1180
1181 case Primitive::kPrimFloat:
1182 case Primitive::kPrimDouble:
1183 locations->SetInAt(0, Location::RequiresFpuRegister());
1184 locations->SetInAt(1, Location::RequiresFpuRegister());
1185 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1186 break;
1187
1188 default:
1189 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1190 }
1191}
1192
1193void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1194 Primitive::Type type = instruction->GetType();
1195 LocationSummary* locations = instruction->GetLocations();
1196
1197 switch (type) {
1198 case Primitive::kPrimInt:
1199 case Primitive::kPrimLong: {
1200 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1201 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1202 Location rhs_location = locations->InAt(1);
1203
1204 GpuRegister rhs_reg = ZERO;
1205 int64_t rhs_imm = 0;
1206 bool use_imm = rhs_location.IsConstant();
1207 if (use_imm) {
1208 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1209 } else {
1210 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1211 }
1212
1213 if (instruction->IsAnd()) {
1214 if (use_imm)
1215 __ Andi(dst, lhs, rhs_imm);
1216 else
1217 __ And(dst, lhs, rhs_reg);
1218 } else if (instruction->IsOr()) {
1219 if (use_imm)
1220 __ Ori(dst, lhs, rhs_imm);
1221 else
1222 __ Or(dst, lhs, rhs_reg);
1223 } else if (instruction->IsXor()) {
1224 if (use_imm)
1225 __ Xori(dst, lhs, rhs_imm);
1226 else
1227 __ Xor(dst, lhs, rhs_reg);
1228 } else if (instruction->IsAdd()) {
1229 if (type == Primitive::kPrimInt) {
1230 if (use_imm)
1231 __ Addiu(dst, lhs, rhs_imm);
1232 else
1233 __ Addu(dst, lhs, rhs_reg);
1234 } else {
1235 if (use_imm)
1236 __ Daddiu(dst, lhs, rhs_imm);
1237 else
1238 __ Daddu(dst, lhs, rhs_reg);
1239 }
1240 } else {
1241 DCHECK(instruction->IsSub());
1242 if (type == Primitive::kPrimInt) {
1243 if (use_imm)
1244 __ Addiu(dst, lhs, -rhs_imm);
1245 else
1246 __ Subu(dst, lhs, rhs_reg);
1247 } else {
1248 if (use_imm)
1249 __ Daddiu(dst, lhs, -rhs_imm);
1250 else
1251 __ Dsubu(dst, lhs, rhs_reg);
1252 }
1253 }
1254 break;
1255 }
1256 case Primitive::kPrimFloat:
1257 case Primitive::kPrimDouble: {
1258 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
1259 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1260 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1261 if (instruction->IsAdd()) {
1262 if (type == Primitive::kPrimFloat)
1263 __ AddS(dst, lhs, rhs);
1264 else
1265 __ AddD(dst, lhs, rhs);
1266 } else if (instruction->IsSub()) {
1267 if (type == Primitive::kPrimFloat)
1268 __ SubS(dst, lhs, rhs);
1269 else
1270 __ SubD(dst, lhs, rhs);
1271 } else {
1272 LOG(FATAL) << "Unexpected floating-point binary operation";
1273 }
1274 break;
1275 }
1276 default:
1277 LOG(FATAL) << "Unexpected binary operation type " << type;
1278 }
1279}
1280
1281void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001282 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001283
1284 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1285 Primitive::Type type = instr->GetResultType();
1286 switch (type) {
1287 case Primitive::kPrimInt:
1288 case Primitive::kPrimLong: {
1289 locations->SetInAt(0, Location::RequiresRegister());
1290 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001291 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001292 break;
1293 }
1294 default:
1295 LOG(FATAL) << "Unexpected shift type " << type;
1296 }
1297}
1298
1299void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001300 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001301 LocationSummary* locations = instr->GetLocations();
1302 Primitive::Type type = instr->GetType();
1303
1304 switch (type) {
1305 case Primitive::kPrimInt:
1306 case Primitive::kPrimLong: {
1307 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1308 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1309 Location rhs_location = locations->InAt(1);
1310
1311 GpuRegister rhs_reg = ZERO;
1312 int64_t rhs_imm = 0;
1313 bool use_imm = rhs_location.IsConstant();
1314 if (use_imm) {
1315 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1316 } else {
1317 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1318 }
1319
1320 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001321 uint32_t shift_value = rhs_imm &
1322 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001323
Alexey Frunze92d90602015-12-18 18:16:36 -08001324 if (shift_value == 0) {
1325 if (dst != lhs) {
1326 __ Move(dst, lhs);
1327 }
1328 } else if (type == Primitive::kPrimInt) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001329 if (instr->IsShl()) {
1330 __ Sll(dst, lhs, shift_value);
1331 } else if (instr->IsShr()) {
1332 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001333 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001334 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001335 } else {
1336 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001337 }
1338 } else {
1339 if (shift_value < 32) {
1340 if (instr->IsShl()) {
1341 __ Dsll(dst, lhs, shift_value);
1342 } else if (instr->IsShr()) {
1343 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001344 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001345 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001346 } else {
1347 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001348 }
1349 } else {
1350 shift_value -= 32;
1351 if (instr->IsShl()) {
1352 __ Dsll32(dst, lhs, shift_value);
1353 } else if (instr->IsShr()) {
1354 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001355 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001356 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001357 } else {
1358 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001359 }
1360 }
1361 }
1362 } else {
1363 if (type == Primitive::kPrimInt) {
1364 if (instr->IsShl()) {
1365 __ Sllv(dst, lhs, rhs_reg);
1366 } else if (instr->IsShr()) {
1367 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001368 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001369 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001370 } else {
1371 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001372 }
1373 } else {
1374 if (instr->IsShl()) {
1375 __ Dsllv(dst, lhs, rhs_reg);
1376 } else if (instr->IsShr()) {
1377 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001378 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001379 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001380 } else {
1381 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001382 }
1383 }
1384 }
1385 break;
1386 }
1387 default:
1388 LOG(FATAL) << "Unexpected shift operation type " << type;
1389 }
1390}
1391
1392void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
1393 HandleBinaryOp(instruction);
1394}
1395
1396void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
1397 HandleBinaryOp(instruction);
1398}
1399
1400void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
1401 HandleBinaryOp(instruction);
1402}
1403
1404void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
1405 HandleBinaryOp(instruction);
1406}
1407
1408void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
1409 LocationSummary* locations =
1410 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1411 locations->SetInAt(0, Location::RequiresRegister());
1412 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1413 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1414 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1415 } else {
1416 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1417 }
1418}
1419
1420void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
1421 LocationSummary* locations = instruction->GetLocations();
1422 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1423 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01001424 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001425
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01001426 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001427 switch (type) {
1428 case Primitive::kPrimBoolean: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001429 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1430 if (index.IsConstant()) {
1431 size_t offset =
1432 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1433 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1434 } else {
1435 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1436 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
1437 }
1438 break;
1439 }
1440
1441 case Primitive::kPrimByte: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001442 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1443 if (index.IsConstant()) {
1444 size_t offset =
1445 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1446 __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1447 } else {
1448 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1449 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset);
1450 }
1451 break;
1452 }
1453
1454 case Primitive::kPrimShort: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001455 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1456 if (index.IsConstant()) {
1457 size_t offset =
1458 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1459 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1460 } else {
1461 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1462 __ Daddu(TMP, obj, TMP);
1463 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset);
1464 }
1465 break;
1466 }
1467
1468 case Primitive::kPrimChar: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001469 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1470 if (index.IsConstant()) {
1471 size_t offset =
1472 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1473 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1474 } else {
1475 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1476 __ Daddu(TMP, obj, TMP);
1477 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
1478 }
1479 break;
1480 }
1481
1482 case Primitive::kPrimInt:
1483 case Primitive::kPrimNot: {
1484 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001485 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1486 LoadOperandType load_type = (type == Primitive::kPrimNot) ? kLoadUnsignedWord : kLoadWord;
1487 if (index.IsConstant()) {
1488 size_t offset =
1489 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1490 __ LoadFromOffset(load_type, out, obj, offset);
1491 } else {
1492 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1493 __ Daddu(TMP, obj, TMP);
1494 __ LoadFromOffset(load_type, out, TMP, data_offset);
1495 }
1496 break;
1497 }
1498
1499 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001500 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1501 if (index.IsConstant()) {
1502 size_t offset =
1503 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1504 __ LoadFromOffset(kLoadDoubleword, out, obj, offset);
1505 } else {
1506 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1507 __ Daddu(TMP, obj, TMP);
1508 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset);
1509 }
1510 break;
1511 }
1512
1513 case Primitive::kPrimFloat: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001514 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
1515 if (index.IsConstant()) {
1516 size_t offset =
1517 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1518 __ LoadFpuFromOffset(kLoadWord, out, obj, offset);
1519 } else {
1520 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1521 __ Daddu(TMP, obj, TMP);
1522 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset);
1523 }
1524 break;
1525 }
1526
1527 case Primitive::kPrimDouble: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001528 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
1529 if (index.IsConstant()) {
1530 size_t offset =
1531 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1532 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset);
1533 } else {
1534 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1535 __ Daddu(TMP, obj, TMP);
1536 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset);
1537 }
1538 break;
1539 }
1540
1541 case Primitive::kPrimVoid:
1542 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1543 UNREACHABLE();
1544 }
1545 codegen_->MaybeRecordImplicitNullCheck(instruction);
1546}
1547
1548void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
1549 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1550 locations->SetInAt(0, Location::RequiresRegister());
1551 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1552}
1553
1554void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
1555 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01001556 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001557 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1558 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1559 __ LoadFromOffset(kLoadWord, out, obj, offset);
1560 codegen_->MaybeRecordImplicitNullCheck(instruction);
1561}
1562
1563void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
David Brazdilbb3d5052015-09-21 18:39:16 +01001564 bool needs_runtime_call = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001565 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1566 instruction,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001567 needs_runtime_call ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
David Brazdilbb3d5052015-09-21 18:39:16 +01001568 if (needs_runtime_call) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001569 InvokeRuntimeCallingConvention calling_convention;
1570 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1571 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1572 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1573 } else {
1574 locations->SetInAt(0, Location::RequiresRegister());
1575 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1576 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
1577 locations->SetInAt(2, Location::RequiresFpuRegister());
1578 } else {
1579 locations->SetInAt(2, Location::RequiresRegister());
1580 }
1581 }
1582}
1583
1584void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
1585 LocationSummary* locations = instruction->GetLocations();
1586 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1587 Location index = locations->InAt(1);
1588 Primitive::Type value_type = instruction->GetComponentType();
1589 bool needs_runtime_call = locations->WillCall();
1590 bool needs_write_barrier =
1591 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
1592
1593 switch (value_type) {
1594 case Primitive::kPrimBoolean:
1595 case Primitive::kPrimByte: {
1596 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1597 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1598 if (index.IsConstant()) {
1599 size_t offset =
1600 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1601 __ StoreToOffset(kStoreByte, value, obj, offset);
1602 } else {
1603 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1604 __ StoreToOffset(kStoreByte, value, TMP, data_offset);
1605 }
1606 break;
1607 }
1608
1609 case Primitive::kPrimShort:
1610 case Primitive::kPrimChar: {
1611 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1612 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1613 if (index.IsConstant()) {
1614 size_t offset =
1615 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1616 __ StoreToOffset(kStoreHalfword, value, obj, offset);
1617 } else {
1618 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1619 __ Daddu(TMP, obj, TMP);
1620 __ StoreToOffset(kStoreHalfword, value, TMP, data_offset);
1621 }
1622 break;
1623 }
1624
1625 case Primitive::kPrimInt:
1626 case Primitive::kPrimNot: {
1627 if (!needs_runtime_call) {
1628 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1629 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1630 if (index.IsConstant()) {
1631 size_t offset =
1632 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1633 __ StoreToOffset(kStoreWord, value, obj, offset);
1634 } else {
1635 DCHECK(index.IsRegister()) << index;
1636 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1637 __ Daddu(TMP, obj, TMP);
1638 __ StoreToOffset(kStoreWord, value, TMP, data_offset);
1639 }
1640 codegen_->MaybeRecordImplicitNullCheck(instruction);
1641 if (needs_write_barrier) {
1642 DCHECK_EQ(value_type, Primitive::kPrimNot);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001643 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001644 }
1645 } else {
1646 DCHECK_EQ(value_type, Primitive::kPrimNot);
Serban Constantinescufc734082016-07-19 17:18:07 +01001647 codegen_->InvokeRuntime(kQuickAputObject, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00001648 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001649 }
1650 break;
1651 }
1652
1653 case Primitive::kPrimLong: {
1654 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1655 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1656 if (index.IsConstant()) {
1657 size_t offset =
1658 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1659 __ StoreToOffset(kStoreDoubleword, value, obj, offset);
1660 } else {
1661 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1662 __ Daddu(TMP, obj, TMP);
1663 __ StoreToOffset(kStoreDoubleword, value, TMP, data_offset);
1664 }
1665 break;
1666 }
1667
1668 case Primitive::kPrimFloat: {
1669 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
1670 FpuRegister value = locations->InAt(2).AsFpuRegister<FpuRegister>();
1671 DCHECK(locations->InAt(2).IsFpuRegister());
1672 if (index.IsConstant()) {
1673 size_t offset =
1674 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1675 __ StoreFpuToOffset(kStoreWord, value, obj, offset);
1676 } else {
1677 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1678 __ Daddu(TMP, obj, TMP);
1679 __ StoreFpuToOffset(kStoreWord, value, TMP, data_offset);
1680 }
1681 break;
1682 }
1683
1684 case Primitive::kPrimDouble: {
1685 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
1686 FpuRegister value = locations->InAt(2).AsFpuRegister<FpuRegister>();
1687 DCHECK(locations->InAt(2).IsFpuRegister());
1688 if (index.IsConstant()) {
1689 size_t offset =
1690 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1691 __ StoreFpuToOffset(kStoreDoubleword, value, obj, offset);
1692 } else {
1693 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1694 __ Daddu(TMP, obj, TMP);
1695 __ StoreFpuToOffset(kStoreDoubleword, value, TMP, data_offset);
1696 }
1697 break;
1698 }
1699
1700 case Primitive::kPrimVoid:
1701 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1702 UNREACHABLE();
1703 }
1704
1705 // Ints and objects are handled in the switch.
1706 if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
1707 codegen_->MaybeRecordImplicitNullCheck(instruction);
1708 }
1709}
1710
1711void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001712 RegisterSet caller_saves = RegisterSet::Empty();
1713 InvokeRuntimeCallingConvention calling_convention;
1714 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1715 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1716 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001717 locations->SetInAt(0, Location::RequiresRegister());
1718 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001719}
1720
1721void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
1722 LocationSummary* locations = instruction->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001723 BoundsCheckSlowPathMIPS64* slow_path =
1724 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001725 codegen_->AddSlowPath(slow_path);
1726
1727 GpuRegister index = locations->InAt(0).AsRegister<GpuRegister>();
1728 GpuRegister length = locations->InAt(1).AsRegister<GpuRegister>();
1729
1730 // length is limited by the maximum positive signed 32-bit integer.
1731 // Unsigned comparison of length and index checks for index < 0
1732 // and for length <= index simultaneously.
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001733 __ Bgeuc(index, length, slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001734}
1735
1736void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
1737 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1738 instruction,
1739 LocationSummary::kCallOnSlowPath);
1740 locations->SetInAt(0, Location::RequiresRegister());
1741 locations->SetInAt(1, Location::RequiresRegister());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001742 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001743 locations->AddTemp(Location::RequiresRegister());
1744}
1745
1746void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
1747 LocationSummary* locations = instruction->GetLocations();
1748 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1749 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
1750 GpuRegister obj_cls = locations->GetTemp(0).AsRegister<GpuRegister>();
1751
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001752 SlowPathCodeMIPS64* slow_path =
1753 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001754 codegen_->AddSlowPath(slow_path);
1755
1756 // TODO: avoid this check if we know obj is not null.
1757 __ Beqzc(obj, slow_path->GetExitLabel());
1758 // Compare the class of `obj` with `cls`.
1759 __ LoadFromOffset(kLoadUnsignedWord, obj_cls, obj, mirror::Object::ClassOffset().Int32Value());
1760 __ Bnec(obj_cls, cls, slow_path->GetEntryLabel());
1761 __ Bind(slow_path->GetExitLabel());
1762}
1763
1764void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
1765 LocationSummary* locations =
1766 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1767 locations->SetInAt(0, Location::RequiresRegister());
1768 if (check->HasUses()) {
1769 locations->SetOut(Location::SameAsFirstInput());
1770 }
1771}
1772
1773void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
1774 // We assume the class is not null.
1775 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
1776 check->GetLoadClass(),
1777 check,
1778 check->GetDexPc(),
1779 true);
1780 codegen_->AddSlowPath(slow_path);
1781 GenerateClassInitializationCheck(slow_path,
1782 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
1783}
1784
1785void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
1786 Primitive::Type in_type = compare->InputAt(0)->GetType();
1787
Alexey Frunze299a9392015-12-08 16:08:02 -08001788 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001789
1790 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001791 case Primitive::kPrimBoolean:
1792 case Primitive::kPrimByte:
1793 case Primitive::kPrimShort:
1794 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001795 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001796 case Primitive::kPrimLong:
1797 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001798 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001799 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1800 break;
1801
1802 case Primitive::kPrimFloat:
Alexey Frunze299a9392015-12-08 16:08:02 -08001803 case Primitive::kPrimDouble:
1804 locations->SetInAt(0, Location::RequiresFpuRegister());
1805 locations->SetInAt(1, Location::RequiresFpuRegister());
1806 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001807 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001808
1809 default:
1810 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1811 }
1812}
1813
1814void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
1815 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08001816 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001817 Primitive::Type in_type = instruction->InputAt(0)->GetType();
1818
1819 // 0 if: left == right
1820 // 1 if: left > right
1821 // -1 if: left < right
1822 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001823 case Primitive::kPrimBoolean:
1824 case Primitive::kPrimByte:
1825 case Primitive::kPrimShort:
1826 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001827 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001828 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001829 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001830 Location rhs_location = locations->InAt(1);
1831 bool use_imm = rhs_location.IsConstant();
1832 GpuRegister rhs = ZERO;
1833 if (use_imm) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001834 if (in_type == Primitive::kPrimLong) {
Aart Bika19616e2016-02-01 18:57:58 -08001835 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
1836 if (value != 0) {
1837 rhs = AT;
1838 __ LoadConst64(rhs, value);
1839 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00001840 } else {
1841 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
1842 if (value != 0) {
1843 rhs = AT;
1844 __ LoadConst32(rhs, value);
1845 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001846 }
1847 } else {
1848 rhs = rhs_location.AsRegister<GpuRegister>();
1849 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001850 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08001851 __ Slt(res, rhs, lhs);
1852 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001853 break;
1854 }
1855
Alexey Frunze299a9392015-12-08 16:08:02 -08001856 case Primitive::kPrimFloat: {
1857 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1858 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1859 Mips64Label done;
1860 __ CmpEqS(FTMP, lhs, rhs);
1861 __ LoadConst32(res, 0);
1862 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00001863 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08001864 __ CmpLtS(FTMP, lhs, rhs);
1865 __ LoadConst32(res, -1);
1866 __ Bc1nez(FTMP, &done);
1867 __ LoadConst32(res, 1);
1868 } else {
1869 __ CmpLtS(FTMP, rhs, lhs);
1870 __ LoadConst32(res, 1);
1871 __ Bc1nez(FTMP, &done);
1872 __ LoadConst32(res, -1);
1873 }
1874 __ Bind(&done);
1875 break;
1876 }
1877
Alexey Frunze4dda3372015-06-01 18:31:49 -07001878 case Primitive::kPrimDouble: {
Alexey Frunze299a9392015-12-08 16:08:02 -08001879 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1880 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1881 Mips64Label done;
1882 __ CmpEqD(FTMP, lhs, rhs);
1883 __ LoadConst32(res, 0);
1884 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00001885 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08001886 __ CmpLtD(FTMP, lhs, rhs);
1887 __ LoadConst32(res, -1);
1888 __ Bc1nez(FTMP, &done);
1889 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001890 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08001891 __ CmpLtD(FTMP, rhs, lhs);
1892 __ LoadConst32(res, 1);
1893 __ Bc1nez(FTMP, &done);
1894 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001895 }
Alexey Frunze299a9392015-12-08 16:08:02 -08001896 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001897 break;
1898 }
1899
1900 default:
1901 LOG(FATAL) << "Unimplemented compare type " << in_type;
1902 }
1903}
1904
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001905void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001906 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08001907 switch (instruction->InputAt(0)->GetType()) {
1908 default:
1909 case Primitive::kPrimLong:
1910 locations->SetInAt(0, Location::RequiresRegister());
1911 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1912 break;
1913
1914 case Primitive::kPrimFloat:
1915 case Primitive::kPrimDouble:
1916 locations->SetInAt(0, Location::RequiresFpuRegister());
1917 locations->SetInAt(1, Location::RequiresFpuRegister());
1918 break;
1919 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001920 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001921 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1922 }
1923}
1924
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001925void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001926 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001927 return;
1928 }
1929
Alexey Frunze299a9392015-12-08 16:08:02 -08001930 Primitive::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001931 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08001932 switch (type) {
1933 default:
1934 // Integer case.
1935 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
1936 return;
1937 case Primitive::kPrimLong:
1938 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
1939 return;
Alexey Frunze299a9392015-12-08 16:08:02 -08001940 case Primitive::kPrimFloat:
1941 case Primitive::kPrimDouble:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01001942 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
1943 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001944 }
1945}
1946
Alexey Frunzec857c742015-09-23 15:12:39 -07001947void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
1948 DCHECK(instruction->IsDiv() || instruction->IsRem());
1949 Primitive::Type type = instruction->GetResultType();
1950
1951 LocationSummary* locations = instruction->GetLocations();
1952 Location second = locations->InAt(1);
1953 DCHECK(second.IsConstant());
1954
1955 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1956 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
1957 int64_t imm = Int64FromConstant(second.GetConstant());
1958 DCHECK(imm == 1 || imm == -1);
1959
1960 if (instruction->IsRem()) {
1961 __ Move(out, ZERO);
1962 } else {
1963 if (imm == -1) {
1964 if (type == Primitive::kPrimInt) {
1965 __ Subu(out, ZERO, dividend);
1966 } else {
1967 DCHECK_EQ(type, Primitive::kPrimLong);
1968 __ Dsubu(out, ZERO, dividend);
1969 }
1970 } else if (out != dividend) {
1971 __ Move(out, dividend);
1972 }
1973 }
1974}
1975
1976void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
1977 DCHECK(instruction->IsDiv() || instruction->IsRem());
1978 Primitive::Type type = instruction->GetResultType();
1979
1980 LocationSummary* locations = instruction->GetLocations();
1981 Location second = locations->InAt(1);
1982 DCHECK(second.IsConstant());
1983
1984 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1985 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
1986 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00001987 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07001988 int ctz_imm = CTZ(abs_imm);
1989
1990 if (instruction->IsDiv()) {
1991 if (type == Primitive::kPrimInt) {
1992 if (ctz_imm == 1) {
1993 // Fast path for division by +/-2, which is very common.
1994 __ Srl(TMP, dividend, 31);
1995 } else {
1996 __ Sra(TMP, dividend, 31);
1997 __ Srl(TMP, TMP, 32 - ctz_imm);
1998 }
1999 __ Addu(out, dividend, TMP);
2000 __ Sra(out, out, ctz_imm);
2001 if (imm < 0) {
2002 __ Subu(out, ZERO, out);
2003 }
2004 } else {
2005 DCHECK_EQ(type, Primitive::kPrimLong);
2006 if (ctz_imm == 1) {
2007 // Fast path for division by +/-2, which is very common.
2008 __ Dsrl32(TMP, dividend, 31);
2009 } else {
2010 __ Dsra32(TMP, dividend, 31);
2011 if (ctz_imm > 32) {
2012 __ Dsrl(TMP, TMP, 64 - ctz_imm);
2013 } else {
2014 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
2015 }
2016 }
2017 __ Daddu(out, dividend, TMP);
2018 if (ctz_imm < 32) {
2019 __ Dsra(out, out, ctz_imm);
2020 } else {
2021 __ Dsra32(out, out, ctz_imm - 32);
2022 }
2023 if (imm < 0) {
2024 __ Dsubu(out, ZERO, out);
2025 }
2026 }
2027 } else {
2028 if (type == Primitive::kPrimInt) {
2029 if (ctz_imm == 1) {
2030 // Fast path for modulo +/-2, which is very common.
2031 __ Sra(TMP, dividend, 31);
2032 __ Subu(out, dividend, TMP);
2033 __ Andi(out, out, 1);
2034 __ Addu(out, out, TMP);
2035 } else {
2036 __ Sra(TMP, dividend, 31);
2037 __ Srl(TMP, TMP, 32 - ctz_imm);
2038 __ Addu(out, dividend, TMP);
2039 if (IsUint<16>(abs_imm - 1)) {
2040 __ Andi(out, out, abs_imm - 1);
2041 } else {
2042 __ Sll(out, out, 32 - ctz_imm);
2043 __ Srl(out, out, 32 - ctz_imm);
2044 }
2045 __ Subu(out, out, TMP);
2046 }
2047 } else {
2048 DCHECK_EQ(type, Primitive::kPrimLong);
2049 if (ctz_imm == 1) {
2050 // Fast path for modulo +/-2, which is very common.
2051 __ Dsra32(TMP, dividend, 31);
2052 __ Dsubu(out, dividend, TMP);
2053 __ Andi(out, out, 1);
2054 __ Daddu(out, out, TMP);
2055 } else {
2056 __ Dsra32(TMP, dividend, 31);
2057 if (ctz_imm > 32) {
2058 __ Dsrl(TMP, TMP, 64 - ctz_imm);
2059 } else {
2060 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
2061 }
2062 __ Daddu(out, dividend, TMP);
2063 if (IsUint<16>(abs_imm - 1)) {
2064 __ Andi(out, out, abs_imm - 1);
2065 } else {
2066 if (ctz_imm > 32) {
2067 __ Dsll(out, out, 64 - ctz_imm);
2068 __ Dsrl(out, out, 64 - ctz_imm);
2069 } else {
2070 __ Dsll32(out, out, 32 - ctz_imm);
2071 __ Dsrl32(out, out, 32 - ctz_imm);
2072 }
2073 }
2074 __ Dsubu(out, out, TMP);
2075 }
2076 }
2077 }
2078}
2079
2080void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2081 DCHECK(instruction->IsDiv() || instruction->IsRem());
2082
2083 LocationSummary* locations = instruction->GetLocations();
2084 Location second = locations->InAt(1);
2085 DCHECK(second.IsConstant());
2086
2087 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2088 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2089 int64_t imm = Int64FromConstant(second.GetConstant());
2090
2091 Primitive::Type type = instruction->GetResultType();
2092 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
2093
2094 int64_t magic;
2095 int shift;
2096 CalculateMagicAndShiftForDivRem(imm,
2097 (type == Primitive::kPrimLong),
2098 &magic,
2099 &shift);
2100
2101 if (type == Primitive::kPrimInt) {
2102 __ LoadConst32(TMP, magic);
2103 __ MuhR6(TMP, dividend, TMP);
2104
2105 if (imm > 0 && magic < 0) {
2106 __ Addu(TMP, TMP, dividend);
2107 } else if (imm < 0 && magic > 0) {
2108 __ Subu(TMP, TMP, dividend);
2109 }
2110
2111 if (shift != 0) {
2112 __ Sra(TMP, TMP, shift);
2113 }
2114
2115 if (instruction->IsDiv()) {
2116 __ Sra(out, TMP, 31);
2117 __ Subu(out, TMP, out);
2118 } else {
2119 __ Sra(AT, TMP, 31);
2120 __ Subu(AT, TMP, AT);
2121 __ LoadConst32(TMP, imm);
2122 __ MulR6(TMP, AT, TMP);
2123 __ Subu(out, dividend, TMP);
2124 }
2125 } else {
2126 __ LoadConst64(TMP, magic);
2127 __ Dmuh(TMP, dividend, TMP);
2128
2129 if (imm > 0 && magic < 0) {
2130 __ Daddu(TMP, TMP, dividend);
2131 } else if (imm < 0 && magic > 0) {
2132 __ Dsubu(TMP, TMP, dividend);
2133 }
2134
2135 if (shift >= 32) {
2136 __ Dsra32(TMP, TMP, shift - 32);
2137 } else if (shift > 0) {
2138 __ Dsra(TMP, TMP, shift);
2139 }
2140
2141 if (instruction->IsDiv()) {
2142 __ Dsra32(out, TMP, 31);
2143 __ Dsubu(out, TMP, out);
2144 } else {
2145 __ Dsra32(AT, TMP, 31);
2146 __ Dsubu(AT, TMP, AT);
2147 __ LoadConst64(TMP, imm);
2148 __ Dmul(TMP, AT, TMP);
2149 __ Dsubu(out, dividend, TMP);
2150 }
2151 }
2152}
2153
2154void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2155 DCHECK(instruction->IsDiv() || instruction->IsRem());
2156 Primitive::Type type = instruction->GetResultType();
2157 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
2158
2159 LocationSummary* locations = instruction->GetLocations();
2160 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2161 Location second = locations->InAt(1);
2162
2163 if (second.IsConstant()) {
2164 int64_t imm = Int64FromConstant(second.GetConstant());
2165 if (imm == 0) {
2166 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2167 } else if (imm == 1 || imm == -1) {
2168 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002169 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07002170 DivRemByPowerOfTwo(instruction);
2171 } else {
2172 DCHECK(imm <= -2 || imm >= 2);
2173 GenerateDivRemWithAnyConstant(instruction);
2174 }
2175 } else {
2176 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2177 GpuRegister divisor = second.AsRegister<GpuRegister>();
2178 if (instruction->IsDiv()) {
2179 if (type == Primitive::kPrimInt)
2180 __ DivR6(out, dividend, divisor);
2181 else
2182 __ Ddiv(out, dividend, divisor);
2183 } else {
2184 if (type == Primitive::kPrimInt)
2185 __ ModR6(out, dividend, divisor);
2186 else
2187 __ Dmod(out, dividend, divisor);
2188 }
2189 }
2190}
2191
Alexey Frunze4dda3372015-06-01 18:31:49 -07002192void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
2193 LocationSummary* locations =
2194 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2195 switch (div->GetResultType()) {
2196 case Primitive::kPrimInt:
2197 case Primitive::kPrimLong:
2198 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07002199 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002200 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2201 break;
2202
2203 case Primitive::kPrimFloat:
2204 case Primitive::kPrimDouble:
2205 locations->SetInAt(0, Location::RequiresFpuRegister());
2206 locations->SetInAt(1, Location::RequiresFpuRegister());
2207 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2208 break;
2209
2210 default:
2211 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2212 }
2213}
2214
2215void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
2216 Primitive::Type type = instruction->GetType();
2217 LocationSummary* locations = instruction->GetLocations();
2218
2219 switch (type) {
2220 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07002221 case Primitive::kPrimLong:
2222 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002223 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002224 case Primitive::kPrimFloat:
2225 case Primitive::kPrimDouble: {
2226 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2227 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2228 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2229 if (type == Primitive::kPrimFloat)
2230 __ DivS(dst, lhs, rhs);
2231 else
2232 __ DivD(dst, lhs, rhs);
2233 break;
2234 }
2235 default:
2236 LOG(FATAL) << "Unexpected div type " << type;
2237 }
2238}
2239
2240void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002241 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002242 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002243}
2244
2245void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2246 SlowPathCodeMIPS64* slow_path =
2247 new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS64(instruction);
2248 codegen_->AddSlowPath(slow_path);
2249 Location value = instruction->GetLocations()->InAt(0);
2250
2251 Primitive::Type type = instruction->GetType();
2252
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002253 if (!Primitive::IsIntegralType(type)) {
2254 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06002255 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002256 }
2257
2258 if (value.IsConstant()) {
2259 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
2260 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002261 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002262 } else {
2263 // A division by a non-null constant is valid. We don't need to perform
2264 // any check, so simply fall through.
2265 }
2266 } else {
2267 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2268 }
2269}
2270
2271void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
2272 LocationSummary* locations =
2273 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2274 locations->SetOut(Location::ConstantLocation(constant));
2275}
2276
2277void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
2278 // Will be generated at use site.
2279}
2280
2281void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
2282 exit->SetLocations(nullptr);
2283}
2284
2285void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
2286}
2287
2288void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
2289 LocationSummary* locations =
2290 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2291 locations->SetOut(Location::ConstantLocation(constant));
2292}
2293
2294void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
2295 // Will be generated at use site.
2296}
2297
David Brazdilfc6a86a2015-06-26 10:33:45 +00002298void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002299 DCHECK(!successor->IsExitBlock());
2300 HBasicBlock* block = got->GetBlock();
2301 HInstruction* previous = got->GetPrevious();
2302 HLoopInformation* info = block->GetLoopInformation();
2303
2304 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
2305 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2306 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2307 return;
2308 }
2309 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2310 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2311 }
2312 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002313 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002314 }
2315}
2316
David Brazdilfc6a86a2015-06-26 10:33:45 +00002317void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
2318 got->SetLocations(nullptr);
2319}
2320
2321void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
2322 HandleGoto(got, got->GetSuccessor());
2323}
2324
2325void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
2326 try_boundary->SetLocations(nullptr);
2327}
2328
2329void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
2330 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2331 if (!successor->IsExitBlock()) {
2332 HandleGoto(try_boundary, successor);
2333 }
2334}
2335
Alexey Frunze299a9392015-12-08 16:08:02 -08002336void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
2337 bool is64bit,
2338 LocationSummary* locations) {
2339 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2340 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2341 Location rhs_location = locations->InAt(1);
2342 GpuRegister rhs_reg = ZERO;
2343 int64_t rhs_imm = 0;
2344 bool use_imm = rhs_location.IsConstant();
2345 if (use_imm) {
2346 if (is64bit) {
2347 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2348 } else {
2349 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2350 }
2351 } else {
2352 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2353 }
2354 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
2355
2356 switch (cond) {
2357 case kCondEQ:
2358 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01002359 if (use_imm && IsInt<16>(-rhs_imm)) {
2360 if (rhs_imm == 0) {
2361 if (cond == kCondEQ) {
2362 __ Sltiu(dst, lhs, 1);
2363 } else {
2364 __ Sltu(dst, ZERO, lhs);
2365 }
2366 } else {
2367 if (is64bit) {
2368 __ Daddiu(dst, lhs, -rhs_imm);
2369 } else {
2370 __ Addiu(dst, lhs, -rhs_imm);
2371 }
2372 if (cond == kCondEQ) {
2373 __ Sltiu(dst, dst, 1);
2374 } else {
2375 __ Sltu(dst, ZERO, dst);
2376 }
Alexey Frunze299a9392015-12-08 16:08:02 -08002377 }
Alexey Frunze299a9392015-12-08 16:08:02 -08002378 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01002379 if (use_imm && IsUint<16>(rhs_imm)) {
2380 __ Xori(dst, lhs, rhs_imm);
2381 } else {
2382 if (use_imm) {
2383 rhs_reg = TMP;
2384 __ LoadConst64(rhs_reg, rhs_imm);
2385 }
2386 __ Xor(dst, lhs, rhs_reg);
2387 }
2388 if (cond == kCondEQ) {
2389 __ Sltiu(dst, dst, 1);
2390 } else {
2391 __ Sltu(dst, ZERO, dst);
2392 }
Alexey Frunze299a9392015-12-08 16:08:02 -08002393 }
2394 break;
2395
2396 case kCondLT:
2397 case kCondGE:
2398 if (use_imm && IsInt<16>(rhs_imm)) {
2399 __ Slti(dst, lhs, rhs_imm);
2400 } else {
2401 if (use_imm) {
2402 rhs_reg = TMP;
2403 __ LoadConst64(rhs_reg, rhs_imm);
2404 }
2405 __ Slt(dst, lhs, rhs_reg);
2406 }
2407 if (cond == kCondGE) {
2408 // Simulate lhs >= rhs via !(lhs < rhs) since there's
2409 // only the slt instruction but no sge.
2410 __ Xori(dst, dst, 1);
2411 }
2412 break;
2413
2414 case kCondLE:
2415 case kCondGT:
2416 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
2417 // Simulate lhs <= rhs via lhs < rhs + 1.
2418 __ Slti(dst, lhs, rhs_imm_plus_one);
2419 if (cond == kCondGT) {
2420 // Simulate lhs > rhs via !(lhs <= rhs) since there's
2421 // only the slti instruction but no sgti.
2422 __ Xori(dst, dst, 1);
2423 }
2424 } else {
2425 if (use_imm) {
2426 rhs_reg = TMP;
2427 __ LoadConst64(rhs_reg, rhs_imm);
2428 }
2429 __ Slt(dst, rhs_reg, lhs);
2430 if (cond == kCondLE) {
2431 // Simulate lhs <= rhs via !(rhs < lhs) since there's
2432 // only the slt instruction but no sle.
2433 __ Xori(dst, dst, 1);
2434 }
2435 }
2436 break;
2437
2438 case kCondB:
2439 case kCondAE:
2440 if (use_imm && IsInt<16>(rhs_imm)) {
2441 // Sltiu sign-extends its 16-bit immediate operand before
2442 // the comparison and thus lets us compare directly with
2443 // unsigned values in the ranges [0, 0x7fff] and
2444 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
2445 __ Sltiu(dst, lhs, rhs_imm);
2446 } else {
2447 if (use_imm) {
2448 rhs_reg = TMP;
2449 __ LoadConst64(rhs_reg, rhs_imm);
2450 }
2451 __ Sltu(dst, lhs, rhs_reg);
2452 }
2453 if (cond == kCondAE) {
2454 // Simulate lhs >= rhs via !(lhs < rhs) since there's
2455 // only the sltu instruction but no sgeu.
2456 __ Xori(dst, dst, 1);
2457 }
2458 break;
2459
2460 case kCondBE:
2461 case kCondA:
2462 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
2463 // Simulate lhs <= rhs via lhs < rhs + 1.
2464 // Note that this only works if rhs + 1 does not overflow
2465 // to 0, hence the check above.
2466 // Sltiu sign-extends its 16-bit immediate operand before
2467 // the comparison and thus lets us compare directly with
2468 // unsigned values in the ranges [0, 0x7fff] and
2469 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
2470 __ Sltiu(dst, lhs, rhs_imm_plus_one);
2471 if (cond == kCondA) {
2472 // Simulate lhs > rhs via !(lhs <= rhs) since there's
2473 // only the sltiu instruction but no sgtiu.
2474 __ Xori(dst, dst, 1);
2475 }
2476 } else {
2477 if (use_imm) {
2478 rhs_reg = TMP;
2479 __ LoadConst64(rhs_reg, rhs_imm);
2480 }
2481 __ Sltu(dst, rhs_reg, lhs);
2482 if (cond == kCondBE) {
2483 // Simulate lhs <= rhs via !(rhs < lhs) since there's
2484 // only the sltu instruction but no sleu.
2485 __ Xori(dst, dst, 1);
2486 }
2487 }
2488 break;
2489 }
2490}
2491
2492void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
2493 bool is64bit,
2494 LocationSummary* locations,
2495 Mips64Label* label) {
2496 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2497 Location rhs_location = locations->InAt(1);
2498 GpuRegister rhs_reg = ZERO;
2499 int64_t rhs_imm = 0;
2500 bool use_imm = rhs_location.IsConstant();
2501 if (use_imm) {
2502 if (is64bit) {
2503 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2504 } else {
2505 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2506 }
2507 } else {
2508 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2509 }
2510
2511 if (use_imm && rhs_imm == 0) {
2512 switch (cond) {
2513 case kCondEQ:
2514 case kCondBE: // <= 0 if zero
2515 __ Beqzc(lhs, label);
2516 break;
2517 case kCondNE:
2518 case kCondA: // > 0 if non-zero
2519 __ Bnezc(lhs, label);
2520 break;
2521 case kCondLT:
2522 __ Bltzc(lhs, label);
2523 break;
2524 case kCondGE:
2525 __ Bgezc(lhs, label);
2526 break;
2527 case kCondLE:
2528 __ Blezc(lhs, label);
2529 break;
2530 case kCondGT:
2531 __ Bgtzc(lhs, label);
2532 break;
2533 case kCondB: // always false
2534 break;
2535 case kCondAE: // always true
2536 __ Bc(label);
2537 break;
2538 }
2539 } else {
2540 if (use_imm) {
2541 rhs_reg = TMP;
2542 __ LoadConst64(rhs_reg, rhs_imm);
2543 }
2544 switch (cond) {
2545 case kCondEQ:
2546 __ Beqc(lhs, rhs_reg, label);
2547 break;
2548 case kCondNE:
2549 __ Bnec(lhs, rhs_reg, label);
2550 break;
2551 case kCondLT:
2552 __ Bltc(lhs, rhs_reg, label);
2553 break;
2554 case kCondGE:
2555 __ Bgec(lhs, rhs_reg, label);
2556 break;
2557 case kCondLE:
2558 __ Bgec(rhs_reg, lhs, label);
2559 break;
2560 case kCondGT:
2561 __ Bltc(rhs_reg, lhs, label);
2562 break;
2563 case kCondB:
2564 __ Bltuc(lhs, rhs_reg, label);
2565 break;
2566 case kCondAE:
2567 __ Bgeuc(lhs, rhs_reg, label);
2568 break;
2569 case kCondBE:
2570 __ Bgeuc(rhs_reg, lhs, label);
2571 break;
2572 case kCondA:
2573 __ Bltuc(rhs_reg, lhs, label);
2574 break;
2575 }
2576 }
2577}
2578
Tijana Jakovljevic43758192016-12-30 09:23:01 +01002579void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
2580 bool gt_bias,
2581 Primitive::Type type,
2582 LocationSummary* locations) {
2583 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2584 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2585 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2586 if (type == Primitive::kPrimFloat) {
2587 switch (cond) {
2588 case kCondEQ:
2589 __ CmpEqS(FTMP, lhs, rhs);
2590 __ Mfc1(dst, FTMP);
2591 __ Andi(dst, dst, 1);
2592 break;
2593 case kCondNE:
2594 __ CmpEqS(FTMP, lhs, rhs);
2595 __ Mfc1(dst, FTMP);
2596 __ Addiu(dst, dst, 1);
2597 break;
2598 case kCondLT:
2599 if (gt_bias) {
2600 __ CmpLtS(FTMP, lhs, rhs);
2601 } else {
2602 __ CmpUltS(FTMP, lhs, rhs);
2603 }
2604 __ Mfc1(dst, FTMP);
2605 __ Andi(dst, dst, 1);
2606 break;
2607 case kCondLE:
2608 if (gt_bias) {
2609 __ CmpLeS(FTMP, lhs, rhs);
2610 } else {
2611 __ CmpUleS(FTMP, lhs, rhs);
2612 }
2613 __ Mfc1(dst, FTMP);
2614 __ Andi(dst, dst, 1);
2615 break;
2616 case kCondGT:
2617 if (gt_bias) {
2618 __ CmpUltS(FTMP, rhs, lhs);
2619 } else {
2620 __ CmpLtS(FTMP, rhs, lhs);
2621 }
2622 __ Mfc1(dst, FTMP);
2623 __ Andi(dst, dst, 1);
2624 break;
2625 case kCondGE:
2626 if (gt_bias) {
2627 __ CmpUleS(FTMP, rhs, lhs);
2628 } else {
2629 __ CmpLeS(FTMP, rhs, lhs);
2630 }
2631 __ Mfc1(dst, FTMP);
2632 __ Andi(dst, dst, 1);
2633 break;
2634 default:
2635 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
2636 UNREACHABLE();
2637 }
2638 } else {
2639 DCHECK_EQ(type, Primitive::kPrimDouble);
2640 switch (cond) {
2641 case kCondEQ:
2642 __ CmpEqD(FTMP, lhs, rhs);
2643 __ Mfc1(dst, FTMP);
2644 __ Andi(dst, dst, 1);
2645 break;
2646 case kCondNE:
2647 __ CmpEqD(FTMP, lhs, rhs);
2648 __ Mfc1(dst, FTMP);
2649 __ Addiu(dst, dst, 1);
2650 break;
2651 case kCondLT:
2652 if (gt_bias) {
2653 __ CmpLtD(FTMP, lhs, rhs);
2654 } else {
2655 __ CmpUltD(FTMP, lhs, rhs);
2656 }
2657 __ Mfc1(dst, FTMP);
2658 __ Andi(dst, dst, 1);
2659 break;
2660 case kCondLE:
2661 if (gt_bias) {
2662 __ CmpLeD(FTMP, lhs, rhs);
2663 } else {
2664 __ CmpUleD(FTMP, lhs, rhs);
2665 }
2666 __ Mfc1(dst, FTMP);
2667 __ Andi(dst, dst, 1);
2668 break;
2669 case kCondGT:
2670 if (gt_bias) {
2671 __ CmpUltD(FTMP, rhs, lhs);
2672 } else {
2673 __ CmpLtD(FTMP, rhs, lhs);
2674 }
2675 __ Mfc1(dst, FTMP);
2676 __ Andi(dst, dst, 1);
2677 break;
2678 case kCondGE:
2679 if (gt_bias) {
2680 __ CmpUleD(FTMP, rhs, lhs);
2681 } else {
2682 __ CmpLeD(FTMP, rhs, lhs);
2683 }
2684 __ Mfc1(dst, FTMP);
2685 __ Andi(dst, dst, 1);
2686 break;
2687 default:
2688 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
2689 UNREACHABLE();
2690 }
2691 }
2692}
2693
Alexey Frunze299a9392015-12-08 16:08:02 -08002694void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
2695 bool gt_bias,
2696 Primitive::Type type,
2697 LocationSummary* locations,
2698 Mips64Label* label) {
2699 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2700 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2701 if (type == Primitive::kPrimFloat) {
2702 switch (cond) {
2703 case kCondEQ:
2704 __ CmpEqS(FTMP, lhs, rhs);
2705 __ Bc1nez(FTMP, label);
2706 break;
2707 case kCondNE:
2708 __ CmpEqS(FTMP, lhs, rhs);
2709 __ Bc1eqz(FTMP, label);
2710 break;
2711 case kCondLT:
2712 if (gt_bias) {
2713 __ CmpLtS(FTMP, lhs, rhs);
2714 } else {
2715 __ CmpUltS(FTMP, lhs, rhs);
2716 }
2717 __ Bc1nez(FTMP, label);
2718 break;
2719 case kCondLE:
2720 if (gt_bias) {
2721 __ CmpLeS(FTMP, lhs, rhs);
2722 } else {
2723 __ CmpUleS(FTMP, lhs, rhs);
2724 }
2725 __ Bc1nez(FTMP, label);
2726 break;
2727 case kCondGT:
2728 if (gt_bias) {
2729 __ CmpUltS(FTMP, rhs, lhs);
2730 } else {
2731 __ CmpLtS(FTMP, rhs, lhs);
2732 }
2733 __ Bc1nez(FTMP, label);
2734 break;
2735 case kCondGE:
2736 if (gt_bias) {
2737 __ CmpUleS(FTMP, rhs, lhs);
2738 } else {
2739 __ CmpLeS(FTMP, rhs, lhs);
2740 }
2741 __ Bc1nez(FTMP, label);
2742 break;
2743 default:
2744 LOG(FATAL) << "Unexpected non-floating-point condition";
2745 }
2746 } else {
2747 DCHECK_EQ(type, Primitive::kPrimDouble);
2748 switch (cond) {
2749 case kCondEQ:
2750 __ CmpEqD(FTMP, lhs, rhs);
2751 __ Bc1nez(FTMP, label);
2752 break;
2753 case kCondNE:
2754 __ CmpEqD(FTMP, lhs, rhs);
2755 __ Bc1eqz(FTMP, label);
2756 break;
2757 case kCondLT:
2758 if (gt_bias) {
2759 __ CmpLtD(FTMP, lhs, rhs);
2760 } else {
2761 __ CmpUltD(FTMP, lhs, rhs);
2762 }
2763 __ Bc1nez(FTMP, label);
2764 break;
2765 case kCondLE:
2766 if (gt_bias) {
2767 __ CmpLeD(FTMP, lhs, rhs);
2768 } else {
2769 __ CmpUleD(FTMP, lhs, rhs);
2770 }
2771 __ Bc1nez(FTMP, label);
2772 break;
2773 case kCondGT:
2774 if (gt_bias) {
2775 __ CmpUltD(FTMP, rhs, lhs);
2776 } else {
2777 __ CmpLtD(FTMP, rhs, lhs);
2778 }
2779 __ Bc1nez(FTMP, label);
2780 break;
2781 case kCondGE:
2782 if (gt_bias) {
2783 __ CmpUleD(FTMP, rhs, lhs);
2784 } else {
2785 __ CmpLeD(FTMP, rhs, lhs);
2786 }
2787 __ Bc1nez(FTMP, label);
2788 break;
2789 default:
2790 LOG(FATAL) << "Unexpected non-floating-point condition";
2791 }
2792 }
2793}
2794
Alexey Frunze4dda3372015-06-01 18:31:49 -07002795void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002796 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002797 Mips64Label* true_target,
2798 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002799 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002800
David Brazdil0debae72015-11-12 18:37:00 +00002801 if (true_target == nullptr && false_target == nullptr) {
2802 // Nothing to do. The code always falls through.
2803 return;
2804 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002805 // Constant condition, statically compared against "true" (integer value 1).
2806 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002807 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002808 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002809 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002810 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002811 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002812 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002813 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002814 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002815 }
David Brazdil0debae72015-11-12 18:37:00 +00002816 return;
2817 }
2818
2819 // The following code generates these patterns:
2820 // (1) true_target == nullptr && false_target != nullptr
2821 // - opposite condition true => branch to false_target
2822 // (2) true_target != nullptr && false_target == nullptr
2823 // - condition true => branch to true_target
2824 // (3) true_target != nullptr && false_target != nullptr
2825 // - condition true => branch to true_target
2826 // - branch to false_target
2827 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002828 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002829 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002830 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002831 if (true_target == nullptr) {
2832 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
2833 } else {
2834 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
2835 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002836 } else {
2837 // The condition instruction has not been materialized, use its inputs as
2838 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002839 HCondition* condition = cond->AsCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08002840 Primitive::Type type = condition->InputAt(0)->GetType();
2841 LocationSummary* locations = cond->GetLocations();
2842 IfCondition if_cond = condition->GetCondition();
2843 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00002844
David Brazdil0debae72015-11-12 18:37:00 +00002845 if (true_target == nullptr) {
2846 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08002847 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00002848 }
2849
Alexey Frunze299a9392015-12-08 16:08:02 -08002850 switch (type) {
2851 default:
2852 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
2853 break;
2854 case Primitive::kPrimLong:
2855 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
2856 break;
2857 case Primitive::kPrimFloat:
2858 case Primitive::kPrimDouble:
2859 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
2860 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002861 }
2862 }
David Brazdil0debae72015-11-12 18:37:00 +00002863
2864 // If neither branch falls through (case 3), the conditional branch to `true_target`
2865 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2866 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002867 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002868 }
2869}
2870
2871void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
2872 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002873 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002874 locations->SetInAt(0, Location::RequiresRegister());
2875 }
2876}
2877
2878void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002879 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2880 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002881 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00002882 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002883 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00002884 nullptr : codegen_->GetLabelOf(false_successor);
2885 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002886}
2887
2888void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
2889 LocationSummary* locations = new (GetGraph()->GetArena())
2890 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01002891 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00002892 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002893 locations->SetInAt(0, Location::RequiresRegister());
2894 }
2895}
2896
2897void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002898 SlowPathCodeMIPS64* slow_path =
2899 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002900 GenerateTestAndBranch(deoptimize,
2901 /* condition_input_index */ 0,
2902 slow_path->GetEntryLabel(),
2903 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002904}
2905
Goran Jakovljevicc6418422016-12-05 16:31:55 +01002906void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2907 LocationSummary* locations = new (GetGraph()->GetArena())
2908 LocationSummary(flag, LocationSummary::kNoCall);
2909 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07002910}
2911
Goran Jakovljevicc6418422016-12-05 16:31:55 +01002912void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2913 __ LoadFromOffset(kLoadWord,
2914 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
2915 SP,
2916 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07002917}
2918
David Brazdil74eb1b22015-12-14 11:44:01 +00002919void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
2920 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
2921 if (Primitive::IsFloatingPointType(select->GetType())) {
2922 locations->SetInAt(0, Location::RequiresFpuRegister());
2923 locations->SetInAt(1, Location::RequiresFpuRegister());
2924 } else {
2925 locations->SetInAt(0, Location::RequiresRegister());
2926 locations->SetInAt(1, Location::RequiresRegister());
2927 }
2928 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
2929 locations->SetInAt(2, Location::RequiresRegister());
2930 }
2931 locations->SetOut(Location::SameAsFirstInput());
2932}
2933
2934void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
2935 LocationSummary* locations = select->GetLocations();
2936 Mips64Label false_target;
2937 GenerateTestAndBranch(select,
2938 /* condition_input_index */ 2,
2939 /* true_target */ nullptr,
2940 &false_target);
2941 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
2942 __ Bind(&false_target);
2943}
2944
David Srbecky0cf44932015-12-09 14:09:59 +00002945void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
2946 new (GetGraph()->GetArena()) LocationSummary(info);
2947}
2948
David Srbeckyd28f4a02016-03-14 17:14:24 +00002949void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
2950 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00002951}
2952
2953void CodeGeneratorMIPS64::GenerateNop() {
2954 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00002955}
2956
Alexey Frunze4dda3372015-06-01 18:31:49 -07002957void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
2958 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
2959 LocationSummary* locations =
2960 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2961 locations->SetInAt(0, Location::RequiresRegister());
2962 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2963 locations->SetOut(Location::RequiresFpuRegister());
2964 } else {
2965 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2966 }
2967}
2968
2969void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
2970 const FieldInfo& field_info) {
2971 Primitive::Type type = field_info.GetFieldType();
2972 LocationSummary* locations = instruction->GetLocations();
2973 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2974 LoadOperandType load_type = kLoadUnsignedByte;
2975 switch (type) {
2976 case Primitive::kPrimBoolean:
2977 load_type = kLoadUnsignedByte;
2978 break;
2979 case Primitive::kPrimByte:
2980 load_type = kLoadSignedByte;
2981 break;
2982 case Primitive::kPrimShort:
2983 load_type = kLoadSignedHalfword;
2984 break;
2985 case Primitive::kPrimChar:
2986 load_type = kLoadUnsignedHalfword;
2987 break;
2988 case Primitive::kPrimInt:
2989 case Primitive::kPrimFloat:
2990 load_type = kLoadWord;
2991 break;
2992 case Primitive::kPrimLong:
2993 case Primitive::kPrimDouble:
2994 load_type = kLoadDoubleword;
2995 break;
2996 case Primitive::kPrimNot:
2997 load_type = kLoadUnsignedWord;
2998 break;
2999 case Primitive::kPrimVoid:
3000 LOG(FATAL) << "Unreachable type " << type;
3001 UNREACHABLE();
3002 }
3003 if (!Primitive::IsFloatingPointType(type)) {
3004 DCHECK(locations->Out().IsRegister());
3005 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3006 __ LoadFromOffset(load_type, dst, obj, field_info.GetFieldOffset().Uint32Value());
3007 } else {
3008 DCHECK(locations->Out().IsFpuRegister());
3009 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3010 __ LoadFpuFromOffset(load_type, dst, obj, field_info.GetFieldOffset().Uint32Value());
3011 }
3012
3013 codegen_->MaybeRecordImplicitNullCheck(instruction);
3014 // TODO: memory barrier?
3015}
3016
3017void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
3018 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
3019 LocationSummary* locations =
3020 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3021 locations->SetInAt(0, Location::RequiresRegister());
3022 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
3023 locations->SetInAt(1, Location::RequiresFpuRegister());
3024 } else {
3025 locations->SetInAt(1, Location::RequiresRegister());
3026 }
3027}
3028
3029void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01003030 const FieldInfo& field_info,
3031 bool value_can_be_null) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003032 Primitive::Type type = field_info.GetFieldType();
3033 LocationSummary* locations = instruction->GetLocations();
3034 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
3035 StoreOperandType store_type = kStoreByte;
3036 switch (type) {
3037 case Primitive::kPrimBoolean:
3038 case Primitive::kPrimByte:
3039 store_type = kStoreByte;
3040 break;
3041 case Primitive::kPrimShort:
3042 case Primitive::kPrimChar:
3043 store_type = kStoreHalfword;
3044 break;
3045 case Primitive::kPrimInt:
3046 case Primitive::kPrimFloat:
3047 case Primitive::kPrimNot:
3048 store_type = kStoreWord;
3049 break;
3050 case Primitive::kPrimLong:
3051 case Primitive::kPrimDouble:
3052 store_type = kStoreDoubleword;
3053 break;
3054 case Primitive::kPrimVoid:
3055 LOG(FATAL) << "Unreachable type " << type;
3056 UNREACHABLE();
3057 }
3058 if (!Primitive::IsFloatingPointType(type)) {
3059 DCHECK(locations->InAt(1).IsRegister());
3060 GpuRegister src = locations->InAt(1).AsRegister<GpuRegister>();
3061 __ StoreToOffset(store_type, src, obj, field_info.GetFieldOffset().Uint32Value());
3062 } else {
3063 DCHECK(locations->InAt(1).IsFpuRegister());
3064 FpuRegister src = locations->InAt(1).AsFpuRegister<FpuRegister>();
3065 __ StoreFpuToOffset(store_type, src, obj, field_info.GetFieldOffset().Uint32Value());
3066 }
3067
3068 codegen_->MaybeRecordImplicitNullCheck(instruction);
3069 // TODO: memory barriers?
3070 if (CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1))) {
3071 DCHECK(locations->InAt(1).IsRegister());
3072 GpuRegister src = locations->InAt(1).AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01003073 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003074 }
3075}
3076
3077void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3078 HandleFieldGet(instruction, instruction->GetFieldInfo());
3079}
3080
3081void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3082 HandleFieldGet(instruction, instruction->GetFieldInfo());
3083}
3084
3085void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3086 HandleFieldSet(instruction, instruction->GetFieldInfo());
3087}
3088
3089void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01003090 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003091}
3092
Alexey Frunzef63f5692016-12-13 17:43:11 -08003093void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(
3094 HInstruction* instruction ATTRIBUTE_UNUSED,
3095 Location root,
3096 GpuRegister obj,
3097 uint32_t offset) {
3098 // When handling HLoadClass::LoadKind::kDexCachePcRelative, the caller calls
3099 // EmitPcRelativeAddressPlaceholderHigh() and then GenerateGcRootFieldLoad().
3100 // The relative patcher expects the two methods to emit the following patchable
3101 // sequence of instructions in this case:
3102 // auipc reg1, 0x1234 // 0x1234 is a placeholder for offset_high.
3103 // lwu reg2, 0x5678(reg1) // 0x5678 is a placeholder for offset_low.
3104 // TODO: Adjust GenerateGcRootFieldLoad() and its caller when this method is
3105 // extended (e.g. for read barriers) so as not to break the relative patcher.
3106 GpuRegister root_reg = root.AsRegister<GpuRegister>();
3107 if (kEmitCompilerReadBarrier) {
3108 UNIMPLEMENTED(FATAL) << "for read barrier";
3109 } else {
3110 // Plain GC root load with no read barrier.
3111 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
3112 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
3113 // Note that GC roots are not affected by heap poisoning, thus we
3114 // do not have to unpoison `root_reg` here.
3115 }
3116}
3117
Alexey Frunze4dda3372015-06-01 18:31:49 -07003118void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
3119 LocationSummary::CallKind call_kind =
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003120 instruction->IsExactCheck() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003121 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3122 locations->SetInAt(0, Location::RequiresRegister());
3123 locations->SetInAt(1, Location::RequiresRegister());
3124 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01003125 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07003126 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3127}
3128
3129void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
3130 LocationSummary* locations = instruction->GetLocations();
3131 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
3132 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
3133 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3134
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003135 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003136
3137 // Return 0 if `obj` is null.
3138 // TODO: Avoid this check if we know `obj` is not null.
3139 __ Move(out, ZERO);
3140 __ Beqzc(obj, &done);
3141
3142 // Compare the class of `obj` with `cls`.
3143 __ LoadFromOffset(kLoadUnsignedWord, out, obj, mirror::Object::ClassOffset().Int32Value());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003144 if (instruction->IsExactCheck()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003145 // Classes must be equal for the instanceof to succeed.
3146 __ Xor(out, out, cls);
3147 __ Sltiu(out, out, 1);
3148 } else {
3149 // If the classes are not equal, we go into a slow path.
3150 DCHECK(locations->OnlyCallsOnSlowPath());
3151 SlowPathCodeMIPS64* slow_path =
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01003152 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003153 codegen_->AddSlowPath(slow_path);
3154 __ Bnec(out, cls, slow_path->GetEntryLabel());
3155 __ LoadConst32(out, 1);
3156 __ Bind(slow_path->GetExitLabel());
3157 }
3158
3159 __ Bind(&done);
3160}
3161
3162void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
3163 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3164 locations->SetOut(Location::ConstantLocation(constant));
3165}
3166
3167void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
3168 // Will be generated at use site.
3169}
3170
3171void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
3172 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3173 locations->SetOut(Location::ConstantLocation(constant));
3174}
3175
3176void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
3177 // Will be generated at use site.
3178}
3179
Calin Juravle175dc732015-08-25 15:42:32 +01003180void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3181 // The trampoline uses the same calling convention as dex calling conventions,
3182 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3183 // the method_idx.
3184 HandleInvoke(invoke);
3185}
3186
3187void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3188 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3189}
3190
Alexey Frunze4dda3372015-06-01 18:31:49 -07003191void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
3192 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
3193 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
3194}
3195
3196void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
3197 HandleInvoke(invoke);
3198 // The register T0 is required to be used for the hidden argument in
3199 // art_quick_imt_conflict_trampoline, so add the hidden argument.
3200 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
3201}
3202
3203void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
3204 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
3205 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003206 Location receiver = invoke->GetLocations()->InAt(0);
3207 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07003208 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003209
3210 // Set the hidden argument.
3211 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
3212 invoke->GetDexMethodIndex());
3213
3214 // temp = object->GetClass();
3215 if (receiver.IsStackSlot()) {
3216 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
3217 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
3218 } else {
3219 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
3220 }
3221 codegen_->MaybeRecordImplicitNullCheck(invoke);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003222 __ LoadFromOffset(kLoadDoubleword, temp, temp,
3223 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
3224 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003225 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003226 // temp = temp->GetImtEntryAt(method_offset);
3227 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
3228 // T9 = temp->GetEntryPoint();
3229 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
3230 // T9();
3231 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003232 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003233 DCHECK(!codegen_->IsLeafMethod());
3234 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3235}
3236
3237void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07003238 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
3239 if (intrinsic.TryDispatch(invoke)) {
3240 return;
3241 }
3242
Alexey Frunze4dda3372015-06-01 18:31:49 -07003243 HandleInvoke(invoke);
3244}
3245
3246void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003247 // Explicit clinit checks triggered by static invokes must have been pruned by
3248 // art::PrepareForRegisterAllocation.
3249 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003250
Chris Larsen3039e382015-08-26 07:54:08 -07003251 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
3252 if (intrinsic.TryDispatch(invoke)) {
3253 return;
3254 }
3255
Alexey Frunze4dda3372015-06-01 18:31:49 -07003256 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003257}
3258
Chris Larsen3039e382015-08-26 07:54:08 -07003259static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003260 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07003261 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
3262 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003263 return true;
3264 }
3265 return false;
3266}
3267
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003268HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08003269 HLoadString::LoadKind desired_string_load_kind) {
3270 if (kEmitCompilerReadBarrier) {
3271 UNIMPLEMENTED(FATAL) << "for read barrier";
3272 }
3273 bool fallback_load = false;
3274 switch (desired_string_load_kind) {
3275 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
3276 DCHECK(!GetCompilerOptions().GetCompilePic());
3277 break;
3278 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
3279 DCHECK(GetCompilerOptions().GetCompilePic());
3280 break;
3281 case HLoadString::LoadKind::kBootImageAddress:
3282 break;
3283 case HLoadString::LoadKind::kBssEntry:
3284 DCHECK(!Runtime::Current()->UseJitCompilation());
3285 break;
3286 case HLoadString::LoadKind::kDexCacheViaMethod:
3287 break;
3288 case HLoadString::LoadKind::kJitTableAddress:
3289 DCHECK(Runtime::Current()->UseJitCompilation());
3290 // TODO: implement.
3291 fallback_load = true;
3292 break;
3293 }
3294 if (fallback_load) {
3295 desired_string_load_kind = HLoadString::LoadKind::kDexCacheViaMethod;
3296 }
3297 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003298}
3299
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003300HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
3301 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003302 if (kEmitCompilerReadBarrier) {
3303 UNIMPLEMENTED(FATAL) << "for read barrier";
3304 }
3305 bool fallback_load = false;
3306 switch (desired_class_load_kind) {
3307 case HLoadClass::LoadKind::kReferrersClass:
3308 break;
3309 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3310 DCHECK(!GetCompilerOptions().GetCompilePic());
3311 break;
3312 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3313 DCHECK(GetCompilerOptions().GetCompilePic());
3314 break;
3315 case HLoadClass::LoadKind::kBootImageAddress:
3316 break;
3317 case HLoadClass::LoadKind::kJitTableAddress:
3318 DCHECK(Runtime::Current()->UseJitCompilation());
3319 // TODO: implement.
3320 fallback_load = true;
3321 break;
3322 case HLoadClass::LoadKind::kDexCachePcRelative:
3323 DCHECK(!Runtime::Current()->UseJitCompilation());
3324 break;
3325 case HLoadClass::LoadKind::kDexCacheViaMethod:
3326 break;
3327 }
3328 if (fallback_load) {
3329 desired_class_load_kind = HLoadClass::LoadKind::kDexCacheViaMethod;
3330 }
3331 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003332}
3333
Vladimir Markodc151b22015-10-15 18:02:30 +01003334HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
3335 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01003336 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08003337 // On MIPS64 we support all dispatch types.
3338 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01003339}
3340
Alexey Frunze4dda3372015-06-01 18:31:49 -07003341void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
3342 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003343 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08003344 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
3345 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
3346
Alexey Frunze19f6c692016-11-30 19:19:55 -08003347 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003348 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00003349 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003350 uint32_t offset =
3351 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00003352 __ LoadFromOffset(kLoadDoubleword,
3353 temp.AsRegister<GpuRegister>(),
3354 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003355 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003356 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003357 }
Vladimir Marko58155012015-08-19 12:49:41 +00003358 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003359 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003360 break;
3361 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003362 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
3363 kLoadDoubleword,
3364 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003365 break;
Alexey Frunze19f6c692016-11-30 19:19:55 -08003366 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3367 uint32_t offset = invoke->GetDexCacheArrayOffset();
3368 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3369 NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset);
3370 EmitPcRelativeAddressPlaceholderHigh(info, AT);
3371 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
3372 break;
3373 }
Vladimir Marko58155012015-08-19 12:49:41 +00003374 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003375 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003376 GpuRegister reg = temp.AsRegister<GpuRegister>();
3377 GpuRegister method_reg;
3378 if (current_method.IsRegister()) {
3379 method_reg = current_method.AsRegister<GpuRegister>();
3380 } else {
3381 // TODO: use the appropriate DCHECK() here if possible.
3382 // DCHECK(invoke->GetLocations()->Intrinsified());
3383 DCHECK(!current_method.IsValid());
3384 method_reg = reg;
3385 __ Ld(reg, SP, kCurrentMethodStackOffset);
3386 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003387
Vladimir Marko58155012015-08-19 12:49:41 +00003388 // temp = temp->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003389 __ LoadFromOffset(kLoadDoubleword,
Vladimir Marko58155012015-08-19 12:49:41 +00003390 reg,
3391 method_reg,
Vladimir Marko05792b92015-08-03 11:56:49 +01003392 ArtMethod::DexCacheResolvedMethodsOffset(kMips64PointerSize).Int32Value());
Vladimir Marko40ecb122016-04-06 17:33:41 +01003393 // temp = temp[index_in_cache];
3394 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3395 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003396 __ LoadFromOffset(kLoadDoubleword,
3397 reg,
3398 reg,
3399 CodeGenerator::GetCachePointerOffset(index_in_cache));
3400 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003401 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003402 }
3403
Alexey Frunze19f6c692016-11-30 19:19:55 -08003404 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00003405 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003406 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00003407 break;
Vladimir Marko58155012015-08-19 12:49:41 +00003408 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3409 // T9 = callee_method->entry_point_from_quick_compiled_code_;
3410 __ LoadFromOffset(kLoadDoubleword,
3411 T9,
3412 callee_method.AsRegister<GpuRegister>(),
3413 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07003414 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00003415 // T9()
3416 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003417 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00003418 break;
3419 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003420 DCHECK(!IsLeafMethod());
3421}
3422
3423void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003424 // Explicit clinit checks triggered by static invokes must have been pruned by
3425 // art::PrepareForRegisterAllocation.
3426 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003427
3428 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3429 return;
3430 }
3431
3432 LocationSummary* locations = invoke->GetLocations();
3433 codegen_->GenerateStaticOrDirectCall(invoke,
3434 locations->HasTemps()
3435 ? locations->GetTemp(0)
3436 : Location::NoLocation());
3437 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3438}
3439
Alexey Frunze53afca12015-11-05 16:34:23 -08003440void CodeGeneratorMIPS64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003441 // Use the calling convention instead of the location of the receiver, as
3442 // intrinsics may have put the receiver in a different register. In the intrinsics
3443 // slow path, the arguments have been moved to the right place, so here we are
3444 // guaranteed that the receiver is the first register of the calling convention.
3445 InvokeDexCallingConvention calling_convention;
3446 GpuRegister receiver = calling_convention.GetRegisterAt(0);
3447
Alexey Frunze53afca12015-11-05 16:34:23 -08003448 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003449 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3450 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
3451 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07003452 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003453
3454 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003455 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08003456 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003457 // temp = temp->GetMethodAt(method_offset);
3458 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
3459 // T9 = temp->GetEntryPoint();
3460 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
3461 // T9();
3462 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003463 __ Nop();
Alexey Frunze53afca12015-11-05 16:34:23 -08003464}
3465
3466void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
3467 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3468 return;
3469 }
3470
3471 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003472 DCHECK(!codegen_->IsLeafMethod());
3473 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3474}
3475
3476void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003477 if (cls->NeedsAccessCheck()) {
3478 InvokeRuntimeCallingConvention calling_convention;
3479 CodeGenerator::CreateLoadClassLocationSummary(
3480 cls,
3481 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
3482 calling_convention.GetReturnLocation(Primitive::kPrimNot),
3483 /* code_generator_supports_read_barrier */ false);
3484 return;
3485 }
3486
3487 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
3488 ? LocationSummary::kCallOnSlowPath
3489 : LocationSummary::kNoCall;
3490 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3491 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3492 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3493 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3494 locations->SetInAt(0, Location::RequiresRegister());
3495 }
3496 locations->SetOut(Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003497}
3498
3499void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) {
3500 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01003501 if (cls->NeedsAccessCheck()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08003502 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +01003503 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003504 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003505 return;
3506 }
3507
Alexey Frunzef63f5692016-12-13 17:43:11 -08003508 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3509 Location out_loc = locations->Out();
3510 GpuRegister out = out_loc.AsRegister<GpuRegister>();
3511 GpuRegister current_method_reg = ZERO;
3512 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3513 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3514 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
3515 }
3516
3517 bool generate_null_check = false;
3518 switch (load_kind) {
3519 case HLoadClass::LoadKind::kReferrersClass:
3520 DCHECK(!cls->CanCallRuntime());
3521 DCHECK(!cls->MustGenerateClinitCheck());
3522 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3523 GenerateGcRootFieldLoad(cls,
3524 out_loc,
3525 current_method_reg,
3526 ArtMethod::DeclaringClassOffset().Int32Value());
3527 break;
3528 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3529 DCHECK(!kEmitCompilerReadBarrier);
3530 __ LoadLiteral(out,
3531 kLoadUnsignedWord,
3532 codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
3533 cls->GetTypeIndex()));
3534 break;
3535 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
3536 DCHECK(!kEmitCompilerReadBarrier);
3537 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3538 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
3539 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3540 __ Daddiu(out, AT, /* placeholder */ 0x5678);
3541 break;
3542 }
3543 case HLoadClass::LoadKind::kBootImageAddress: {
3544 DCHECK(!kEmitCompilerReadBarrier);
3545 DCHECK_NE(cls->GetAddress(), 0u);
3546 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
3547 __ LoadLiteral(out,
3548 kLoadUnsignedWord,
3549 codegen_->DeduplicateBootImageAddressLiteral(address));
3550 break;
3551 }
3552 case HLoadClass::LoadKind::kJitTableAddress: {
3553 LOG(FATAL) << "Unimplemented";
3554 break;
3555 }
3556 case HLoadClass::LoadKind::kDexCachePcRelative: {
3557 uint32_t element_offset = cls->GetDexCacheElementOffset();
3558 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3559 codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), element_offset);
3560 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3561 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
3562 GenerateGcRootFieldLoad(cls, out_loc, AT, /* placeholder */ 0x5678);
3563 generate_null_check = !cls->IsInDexCache();
3564 break;
3565 }
3566 case HLoadClass::LoadKind::kDexCacheViaMethod: {
3567 // /* GcRoot<mirror::Class>[] */ out =
3568 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
3569 __ LoadFromOffset(kLoadDoubleword,
3570 out,
3571 current_method_reg,
3572 ArtMethod::DexCacheResolvedTypesOffset(kMips64PointerSize).Int32Value());
3573 // /* GcRoot<mirror::Class> */ out = out[type_index]
3574 size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_);
3575 GenerateGcRootFieldLoad(cls, out_loc, out, offset);
3576 generate_null_check = !cls->IsInDexCache();
3577 }
3578 }
3579
3580 if (generate_null_check || cls->MustGenerateClinitCheck()) {
3581 DCHECK(cls->CanCallRuntime());
3582 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
3583 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3584 codegen_->AddSlowPath(slow_path);
3585 if (generate_null_check) {
3586 __ Beqzc(out, slow_path->GetEntryLabel());
3587 }
3588 if (cls->MustGenerateClinitCheck()) {
3589 GenerateClassInitializationCheck(slow_path, out);
3590 } else {
3591 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003592 }
3593 }
3594}
3595
David Brazdilcb1c0552015-08-04 16:22:25 +01003596static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07003597 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01003598}
3599
Alexey Frunze4dda3372015-06-01 18:31:49 -07003600void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
3601 LocationSummary* locations =
3602 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3603 locations->SetOut(Location::RequiresRegister());
3604}
3605
3606void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
3607 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01003608 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
3609}
3610
3611void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
3612 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3613}
3614
3615void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
3616 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003617}
3618
Alexey Frunze4dda3372015-06-01 18:31:49 -07003619void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003620 HLoadString::LoadKind load_kind = load->GetLoadKind();
3621 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00003622 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunzef63f5692016-12-13 17:43:11 -08003623 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
3624 InvokeRuntimeCallingConvention calling_convention;
3625 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
3626 } else {
3627 locations->SetOut(Location::RequiresRegister());
3628 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003629}
3630
3631void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003632 HLoadString::LoadKind load_kind = load->GetLoadKind();
3633 LocationSummary* locations = load->GetLocations();
3634 Location out_loc = locations->Out();
3635 GpuRegister out = out_loc.AsRegister<GpuRegister>();
3636
3637 switch (load_kind) {
3638 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
3639 __ LoadLiteral(out,
3640 kLoadUnsignedWord,
3641 codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
3642 load->GetStringIndex()));
3643 return; // No dex cache slow path.
3644 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
3645 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
3646 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3647 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
3648 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3649 __ Daddiu(out, AT, /* placeholder */ 0x5678);
3650 return; // No dex cache slow path.
3651 }
3652 case HLoadString::LoadKind::kBootImageAddress: {
3653 DCHECK_NE(load->GetAddress(), 0u);
3654 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
3655 __ LoadLiteral(out,
3656 kLoadUnsignedWord,
3657 codegen_->DeduplicateBootImageAddressLiteral(address));
3658 return; // No dex cache slow path.
3659 }
3660 case HLoadString::LoadKind::kBssEntry: {
3661 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
3662 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3663 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
3664 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3665 __ Lwu(out, AT, /* placeholder */ 0x5678);
3666 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathMIPS64(load);
3667 codegen_->AddSlowPath(slow_path);
3668 __ Beqzc(out, slow_path->GetEntryLabel());
3669 __ Bind(slow_path->GetExitLabel());
3670 return;
3671 }
3672 default:
3673 break;
3674 }
3675
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07003676 // TODO: Re-add the compiler code to do string dex cache lookup again.
Alexey Frunzef63f5692016-12-13 17:43:11 -08003677 DCHECK(load_kind == HLoadString::LoadKind::kDexCacheViaMethod);
3678 InvokeRuntimeCallingConvention calling_convention;
3679 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
3680 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
3681 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003682}
3683
Alexey Frunze4dda3372015-06-01 18:31:49 -07003684void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
3685 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3686 locations->SetOut(Location::ConstantLocation(constant));
3687}
3688
3689void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
3690 // Will be generated at use site.
3691}
3692
3693void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
3694 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003695 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003696 InvokeRuntimeCallingConvention calling_convention;
3697 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3698}
3699
3700void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01003701 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07003702 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01003703 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003704 if (instruction->IsEnter()) {
3705 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
3706 } else {
3707 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
3708 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003709}
3710
3711void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
3712 LocationSummary* locations =
3713 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3714 switch (mul->GetResultType()) {
3715 case Primitive::kPrimInt:
3716 case Primitive::kPrimLong:
3717 locations->SetInAt(0, Location::RequiresRegister());
3718 locations->SetInAt(1, Location::RequiresRegister());
3719 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3720 break;
3721
3722 case Primitive::kPrimFloat:
3723 case Primitive::kPrimDouble:
3724 locations->SetInAt(0, Location::RequiresFpuRegister());
3725 locations->SetInAt(1, Location::RequiresFpuRegister());
3726 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3727 break;
3728
3729 default:
3730 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
3731 }
3732}
3733
3734void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
3735 Primitive::Type type = instruction->GetType();
3736 LocationSummary* locations = instruction->GetLocations();
3737
3738 switch (type) {
3739 case Primitive::kPrimInt:
3740 case Primitive::kPrimLong: {
3741 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3742 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3743 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
3744 if (type == Primitive::kPrimInt)
3745 __ MulR6(dst, lhs, rhs);
3746 else
3747 __ Dmul(dst, lhs, rhs);
3748 break;
3749 }
3750 case Primitive::kPrimFloat:
3751 case Primitive::kPrimDouble: {
3752 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3753 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3754 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3755 if (type == Primitive::kPrimFloat)
3756 __ MulS(dst, lhs, rhs);
3757 else
3758 __ MulD(dst, lhs, rhs);
3759 break;
3760 }
3761 default:
3762 LOG(FATAL) << "Unexpected mul type " << type;
3763 }
3764}
3765
3766void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
3767 LocationSummary* locations =
3768 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
3769 switch (neg->GetResultType()) {
3770 case Primitive::kPrimInt:
3771 case Primitive::kPrimLong:
3772 locations->SetInAt(0, Location::RequiresRegister());
3773 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3774 break;
3775
3776 case Primitive::kPrimFloat:
3777 case Primitive::kPrimDouble:
3778 locations->SetInAt(0, Location::RequiresFpuRegister());
3779 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3780 break;
3781
3782 default:
3783 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3784 }
3785}
3786
3787void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
3788 Primitive::Type type = instruction->GetType();
3789 LocationSummary* locations = instruction->GetLocations();
3790
3791 switch (type) {
3792 case Primitive::kPrimInt:
3793 case Primitive::kPrimLong: {
3794 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3795 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3796 if (type == Primitive::kPrimInt)
3797 __ Subu(dst, ZERO, src);
3798 else
3799 __ Dsubu(dst, ZERO, src);
3800 break;
3801 }
3802 case Primitive::kPrimFloat:
3803 case Primitive::kPrimDouble: {
3804 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3805 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
3806 if (type == Primitive::kPrimFloat)
3807 __ NegS(dst, src);
3808 else
3809 __ NegD(dst, src);
3810 break;
3811 }
3812 default:
3813 LOG(FATAL) << "Unexpected neg type " << type;
3814 }
3815}
3816
3817void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
3818 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003819 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003820 InvokeRuntimeCallingConvention calling_convention;
3821 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3822 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
3823 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3824 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3825}
3826
3827void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
3828 LocationSummary* locations = instruction->GetLocations();
3829 // Move an uint16_t value to a register.
Andreas Gampea5b09a62016-11-17 15:21:22 -08003830 __ LoadConst32(locations->GetTemp(0).AsRegister<GpuRegister>(),
3831 instruction->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +01003832 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003833 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
3834}
3835
3836void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
3837 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003838 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003839 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003840 if (instruction->IsStringAlloc()) {
3841 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3842 } else {
3843 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3844 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3845 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003846 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
3847}
3848
3849void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
David Brazdil6de19382016-01-08 17:37:10 +00003850 if (instruction->IsStringAlloc()) {
3851 // String is allocated through StringFactory. Call NewEmptyString entry point.
3852 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02003853 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07003854 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00003855 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
3856 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
3857 __ Jalr(T9);
3858 __ Nop();
3859 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3860 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01003861 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00003862 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3863 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003864}
3865
3866void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
3867 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3868 locations->SetInAt(0, Location::RequiresRegister());
3869 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3870}
3871
3872void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
3873 Primitive::Type type = instruction->GetType();
3874 LocationSummary* locations = instruction->GetLocations();
3875
3876 switch (type) {
3877 case Primitive::kPrimInt:
3878 case Primitive::kPrimLong: {
3879 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3880 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3881 __ Nor(dst, src, ZERO);
3882 break;
3883 }
3884
3885 default:
3886 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
3887 }
3888}
3889
3890void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
3891 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3892 locations->SetInAt(0, Location::RequiresRegister());
3893 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3894}
3895
3896void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
3897 LocationSummary* locations = instruction->GetLocations();
3898 __ Xori(locations->Out().AsRegister<GpuRegister>(),
3899 locations->InAt(0).AsRegister<GpuRegister>(),
3900 1);
3901}
3902
3903void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003904 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
3905 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003906}
3907
Calin Juravle2ae48182016-03-16 14:05:09 +00003908void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
3909 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003910 return;
3911 }
3912 Location obj = instruction->GetLocations()->InAt(0);
3913
3914 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00003915 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003916}
3917
Calin Juravle2ae48182016-03-16 14:05:09 +00003918void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003919 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00003920 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003921
3922 Location obj = instruction->GetLocations()->InAt(0);
3923
3924 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3925}
3926
3927void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00003928 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003929}
3930
3931void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
3932 HandleBinaryOp(instruction);
3933}
3934
3935void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
3936 HandleBinaryOp(instruction);
3937}
3938
3939void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
3940 LOG(FATAL) << "Unreachable";
3941}
3942
3943void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
3944 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3945}
3946
3947void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
3948 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3949 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3950 if (location.IsStackSlot()) {
3951 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3952 } else if (location.IsDoubleStackSlot()) {
3953 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3954 }
3955 locations->SetOut(location);
3956}
3957
3958void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
3959 ATTRIBUTE_UNUSED) {
3960 // Nothing to do, the parameter is already at its location.
3961}
3962
3963void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
3964 LocationSummary* locations =
3965 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3966 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3967}
3968
3969void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
3970 ATTRIBUTE_UNUSED) {
3971 // Nothing to do, the method is already at its location.
3972}
3973
3974void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
3975 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01003976 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003977 locations->SetInAt(i, Location::Any());
3978 }
3979 locations->SetOut(Location::Any());
3980}
3981
3982void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
3983 LOG(FATAL) << "Unreachable";
3984}
3985
3986void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
3987 Primitive::Type type = rem->GetResultType();
3988 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003989 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
3990 : LocationSummary::kNoCall;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003991 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
3992
3993 switch (type) {
3994 case Primitive::kPrimInt:
3995 case Primitive::kPrimLong:
3996 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003997 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003998 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3999 break;
4000
4001 case Primitive::kPrimFloat:
4002 case Primitive::kPrimDouble: {
4003 InvokeRuntimeCallingConvention calling_convention;
4004 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
4005 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
4006 locations->SetOut(calling_convention.GetReturnLocation(type));
4007 break;
4008 }
4009
4010 default:
4011 LOG(FATAL) << "Unexpected rem type " << type;
4012 }
4013}
4014
4015void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
4016 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004017
4018 switch (type) {
4019 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07004020 case Primitive::kPrimLong:
4021 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004022 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004023
4024 case Primitive::kPrimFloat:
4025 case Primitive::kPrimDouble: {
Serban Constantinescufc734082016-07-19 17:18:07 +01004026 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
4027 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004028 if (type == Primitive::kPrimFloat) {
4029 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4030 } else {
4031 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4032 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004033 break;
4034 }
4035 default:
4036 LOG(FATAL) << "Unexpected rem type " << type;
4037 }
4038}
4039
4040void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4041 memory_barrier->SetLocations(nullptr);
4042}
4043
4044void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4045 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
4046}
4047
4048void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
4049 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
4050 Primitive::Type return_type = ret->InputAt(0)->GetType();
4051 locations->SetInAt(0, Mips64ReturnLocation(return_type));
4052}
4053
4054void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
4055 codegen_->GenerateFrameExit();
4056}
4057
4058void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
4059 ret->SetLocations(nullptr);
4060}
4061
4062void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
4063 codegen_->GenerateFrameExit();
4064}
4065
Alexey Frunze92d90602015-12-18 18:16:36 -08004066void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
4067 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004068}
4069
Alexey Frunze92d90602015-12-18 18:16:36 -08004070void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
4071 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004072}
4073
Alexey Frunze4dda3372015-06-01 18:31:49 -07004074void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
4075 HandleShift(shl);
4076}
4077
4078void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
4079 HandleShift(shl);
4080}
4081
4082void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
4083 HandleShift(shr);
4084}
4085
4086void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
4087 HandleShift(shr);
4088}
4089
Alexey Frunze4dda3372015-06-01 18:31:49 -07004090void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
4091 HandleBinaryOp(instruction);
4092}
4093
4094void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
4095 HandleBinaryOp(instruction);
4096}
4097
4098void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4099 HandleFieldGet(instruction, instruction->GetFieldInfo());
4100}
4101
4102void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4103 HandleFieldGet(instruction, instruction->GetFieldInfo());
4104}
4105
4106void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4107 HandleFieldSet(instruction, instruction->GetFieldInfo());
4108}
4109
4110void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004111 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004112}
4113
Calin Juravlee460d1d2015-09-29 04:52:17 +01004114void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
4115 HUnresolvedInstanceFieldGet* instruction) {
4116 FieldAccessCallingConventionMIPS64 calling_convention;
4117 codegen_->CreateUnresolvedFieldLocationSummary(
4118 instruction, instruction->GetFieldType(), calling_convention);
4119}
4120
4121void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
4122 HUnresolvedInstanceFieldGet* instruction) {
4123 FieldAccessCallingConventionMIPS64 calling_convention;
4124 codegen_->GenerateUnresolvedFieldAccess(instruction,
4125 instruction->GetFieldType(),
4126 instruction->GetFieldIndex(),
4127 instruction->GetDexPc(),
4128 calling_convention);
4129}
4130
4131void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
4132 HUnresolvedInstanceFieldSet* instruction) {
4133 FieldAccessCallingConventionMIPS64 calling_convention;
4134 codegen_->CreateUnresolvedFieldLocationSummary(
4135 instruction, instruction->GetFieldType(), calling_convention);
4136}
4137
4138void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
4139 HUnresolvedInstanceFieldSet* instruction) {
4140 FieldAccessCallingConventionMIPS64 calling_convention;
4141 codegen_->GenerateUnresolvedFieldAccess(instruction,
4142 instruction->GetFieldType(),
4143 instruction->GetFieldIndex(),
4144 instruction->GetDexPc(),
4145 calling_convention);
4146}
4147
4148void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
4149 HUnresolvedStaticFieldGet* instruction) {
4150 FieldAccessCallingConventionMIPS64 calling_convention;
4151 codegen_->CreateUnresolvedFieldLocationSummary(
4152 instruction, instruction->GetFieldType(), calling_convention);
4153}
4154
4155void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
4156 HUnresolvedStaticFieldGet* instruction) {
4157 FieldAccessCallingConventionMIPS64 calling_convention;
4158 codegen_->GenerateUnresolvedFieldAccess(instruction,
4159 instruction->GetFieldType(),
4160 instruction->GetFieldIndex(),
4161 instruction->GetDexPc(),
4162 calling_convention);
4163}
4164
4165void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
4166 HUnresolvedStaticFieldSet* instruction) {
4167 FieldAccessCallingConventionMIPS64 calling_convention;
4168 codegen_->CreateUnresolvedFieldLocationSummary(
4169 instruction, instruction->GetFieldType(), calling_convention);
4170}
4171
4172void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
4173 HUnresolvedStaticFieldSet* instruction) {
4174 FieldAccessCallingConventionMIPS64 calling_convention;
4175 codegen_->GenerateUnresolvedFieldAccess(instruction,
4176 instruction->GetFieldType(),
4177 instruction->GetFieldIndex(),
4178 instruction->GetDexPc(),
4179 calling_convention);
4180}
4181
Alexey Frunze4dda3372015-06-01 18:31:49 -07004182void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01004183 LocationSummary* locations =
4184 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01004185 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07004186}
4187
4188void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
4189 HBasicBlock* block = instruction->GetBlock();
4190 if (block->GetLoopInformation() != nullptr) {
4191 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4192 // The back edge will generate the suspend check.
4193 return;
4194 }
4195 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4196 // The goto will generate the suspend check.
4197 return;
4198 }
4199 GenerateSuspendCheck(instruction, nullptr);
4200}
4201
Alexey Frunze4dda3372015-06-01 18:31:49 -07004202void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
4203 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004204 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004205 InvokeRuntimeCallingConvention calling_convention;
4206 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4207}
4208
4209void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01004210 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004211 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
4212}
4213
4214void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
4215 Primitive::Type input_type = conversion->GetInputType();
4216 Primitive::Type result_type = conversion->GetResultType();
4217 DCHECK_NE(input_type, result_type);
4218
4219 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4220 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4221 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4222 }
4223
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004224 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion);
4225
4226 if (Primitive::IsFloatingPointType(input_type)) {
4227 locations->SetInAt(0, Location::RequiresFpuRegister());
4228 } else {
4229 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004230 }
4231
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004232 if (Primitive::IsFloatingPointType(result_type)) {
4233 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004234 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004235 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004236 }
4237}
4238
4239void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
4240 LocationSummary* locations = conversion->GetLocations();
4241 Primitive::Type result_type = conversion->GetResultType();
4242 Primitive::Type input_type = conversion->GetInputType();
4243
4244 DCHECK_NE(input_type, result_type);
4245
4246 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
4247 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
4248 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
4249
4250 switch (result_type) {
4251 case Primitive::kPrimChar:
4252 __ Andi(dst, src, 0xFFFF);
4253 break;
4254 case Primitive::kPrimByte:
Vladimir Markob52bbde2016-02-12 12:06:05 +00004255 if (input_type == Primitive::kPrimLong) {
4256 // Type conversion from long to types narrower than int is a result of code
4257 // transformations. To avoid unpredictable results for SEB and SEH, we first
4258 // need to sign-extend the low 32-bit value into bits 32 through 63.
4259 __ Sll(dst, src, 0);
4260 __ Seb(dst, dst);
4261 } else {
4262 __ Seb(dst, src);
4263 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004264 break;
4265 case Primitive::kPrimShort:
Vladimir Markob52bbde2016-02-12 12:06:05 +00004266 if (input_type == Primitive::kPrimLong) {
4267 // Type conversion from long to types narrower than int is a result of code
4268 // transformations. To avoid unpredictable results for SEB and SEH, we first
4269 // need to sign-extend the low 32-bit value into bits 32 through 63.
4270 __ Sll(dst, src, 0);
4271 __ Seh(dst, dst);
4272 } else {
4273 __ Seh(dst, src);
4274 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004275 break;
4276 case Primitive::kPrimInt:
4277 case Primitive::kPrimLong:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01004278 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
4279 // conversions, except when the input and output registers are the same and we are not
4280 // converting longs to shorter types. In these cases, do nothing.
4281 if ((input_type == Primitive::kPrimLong) || (dst != src)) {
4282 __ Sll(dst, src, 0);
4283 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004284 break;
4285
4286 default:
4287 LOG(FATAL) << "Unexpected type conversion from " << input_type
4288 << " to " << result_type;
4289 }
4290 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004291 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
4292 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
4293 if (input_type == Primitive::kPrimLong) {
4294 __ Dmtc1(src, FTMP);
4295 if (result_type == Primitive::kPrimFloat) {
4296 __ Cvtsl(dst, FTMP);
4297 } else {
4298 __ Cvtdl(dst, FTMP);
4299 }
4300 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004301 __ Mtc1(src, FTMP);
4302 if (result_type == Primitive::kPrimFloat) {
4303 __ Cvtsw(dst, FTMP);
4304 } else {
4305 __ Cvtdw(dst, FTMP);
4306 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004307 }
4308 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
4309 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004310 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
4311 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
4312 Mips64Label truncate;
4313 Mips64Label done;
4314
4315 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
4316 // value when the input is either a NaN or is outside of the range of the output type
4317 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
4318 // the same result.
4319 //
4320 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
4321 // value of the output type if the input is outside of the range after the truncation or
4322 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
4323 // results. This matches the desired float/double-to-int/long conversion exactly.
4324 //
4325 // So, NAN2008 affects handling of negative values and NaNs by the truncate instruction.
4326 //
4327 // The following code supports both NAN2008=0 and NAN2008=1 behaviors of the truncate
4328 // instruction, the reason being that the emulator implements NAN2008=0 on MIPS64R6,
4329 // even though it must be NAN2008=1 on R6.
4330 //
4331 // The code takes care of the different behaviors by first comparing the input to the
4332 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
4333 // If the input is greater than or equal to the minimum, it procedes to the truncate
4334 // instruction, which will handle such an input the same way irrespective of NAN2008.
4335 // Otherwise the input is compared to itself to determine whether it is a NaN or not
4336 // in order to return either zero or the minimum value.
4337 //
4338 // TODO: simplify this when the emulator correctly implements NAN2008=1 behavior of the
4339 // truncate instruction for MIPS64R6.
4340 if (input_type == Primitive::kPrimFloat) {
4341 uint32_t min_val = (result_type == Primitive::kPrimLong)
4342 ? bit_cast<uint32_t, float>(std::numeric_limits<int64_t>::min())
4343 : bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
4344 __ LoadConst32(TMP, min_val);
4345 __ Mtc1(TMP, FTMP);
4346 __ CmpLeS(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004347 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004348 uint64_t min_val = (result_type == Primitive::kPrimLong)
4349 ? bit_cast<uint64_t, double>(std::numeric_limits<int64_t>::min())
4350 : bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
4351 __ LoadConst64(TMP, min_val);
4352 __ Dmtc1(TMP, FTMP);
4353 __ CmpLeD(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004354 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004355
4356 __ Bc1nez(FTMP, &truncate);
4357
4358 if (input_type == Primitive::kPrimFloat) {
4359 __ CmpEqS(FTMP, src, src);
4360 } else {
4361 __ CmpEqD(FTMP, src, src);
4362 }
4363 if (result_type == Primitive::kPrimLong) {
4364 __ LoadConst64(dst, std::numeric_limits<int64_t>::min());
4365 } else {
4366 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
4367 }
4368 __ Mfc1(TMP, FTMP);
4369 __ And(dst, dst, TMP);
4370
4371 __ Bc(&done);
4372
4373 __ Bind(&truncate);
4374
4375 if (result_type == Primitive::kPrimLong) {
Roland Levillain888d0672015-11-23 18:53:50 +00004376 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004377 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004378 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004379 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004380 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004381 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00004382 } else {
4383 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004384 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004385 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004386 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004387 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004388 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00004389 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004390
4391 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004392 } else if (Primitive::IsFloatingPointType(result_type) &&
4393 Primitive::IsFloatingPointType(input_type)) {
4394 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
4395 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
4396 if (result_type == Primitive::kPrimFloat) {
4397 __ Cvtsd(dst, src);
4398 } else {
4399 __ Cvtds(dst, src);
4400 }
4401 } else {
4402 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4403 << " to " << result_type;
4404 }
4405}
4406
4407void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
4408 HandleShift(ushr);
4409}
4410
4411void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
4412 HandleShift(ushr);
4413}
4414
4415void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
4416 HandleBinaryOp(instruction);
4417}
4418
4419void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
4420 HandleBinaryOp(instruction);
4421}
4422
4423void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
4424 // Nothing to do, this should be removed during prepare for register allocator.
4425 LOG(FATAL) << "Unreachable";
4426}
4427
4428void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
4429 // Nothing to do, this should be removed during prepare for register allocator.
4430 LOG(FATAL) << "Unreachable";
4431}
4432
4433void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004434 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004435}
4436
4437void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004438 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004439}
4440
4441void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004442 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004443}
4444
4445void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004446 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004447}
4448
4449void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004450 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004451}
4452
4453void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004454 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004455}
4456
4457void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004458 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004459}
4460
4461void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004462 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004463}
4464
4465void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004466 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004467}
4468
4469void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004470 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004471}
4472
4473void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004474 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004475}
4476
4477void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004478 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004479}
4480
Aart Bike9f37602015-10-09 11:15:55 -07004481void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004482 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004483}
4484
4485void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004486 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004487}
4488
4489void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004490 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004491}
4492
4493void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004494 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004495}
4496
4497void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004498 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004499}
4500
4501void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004502 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004503}
4504
4505void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004506 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004507}
4508
4509void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004510 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004511}
4512
Mark Mendellfe57faa2015-09-18 09:26:15 -04004513// Simple implementation of packed switch - generate cascaded compare/jumps.
4514void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4515 LocationSummary* locations =
4516 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4517 locations->SetInAt(0, Location::RequiresRegister());
4518}
4519
Alexey Frunze0960ac52016-12-20 17:24:59 -08004520void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
4521 int32_t lower_bound,
4522 uint32_t num_entries,
4523 HBasicBlock* switch_block,
4524 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004525 // Create a set of compare/jumps.
4526 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08004527 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004528 // Jump to default if index is negative
4529 // Note: We don't check the case that index is positive while value < lower_bound, because in
4530 // this case, index >= num_entries must be true. So that we can save one branch instruction.
4531 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
4532
Alexey Frunze0960ac52016-12-20 17:24:59 -08004533 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004534 // Jump to successors[0] if value == lower_bound.
4535 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
4536 int32_t last_index = 0;
4537 for (; num_entries - last_index > 2; last_index += 2) {
4538 __ Addiu(temp_reg, temp_reg, -2);
4539 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4540 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
4541 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4542 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
4543 }
4544 if (num_entries - last_index == 2) {
4545 // The last missing case_value.
4546 __ Addiu(temp_reg, temp_reg, -1);
4547 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04004548 }
4549
4550 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08004551 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004552 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04004553 }
4554}
4555
Alexey Frunze0960ac52016-12-20 17:24:59 -08004556void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
4557 int32_t lower_bound,
4558 uint32_t num_entries,
4559 HBasicBlock* switch_block,
4560 HBasicBlock* default_block) {
4561 // Create a jump table.
4562 std::vector<Mips64Label*> labels(num_entries);
4563 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
4564 for (uint32_t i = 0; i < num_entries; i++) {
4565 labels[i] = codegen_->GetLabelOf(successors[i]);
4566 }
4567 JumpTable* table = __ CreateJumpTable(std::move(labels));
4568
4569 // Is the value in range?
4570 __ Addiu32(TMP, value_reg, -lower_bound);
4571 __ LoadConst32(AT, num_entries);
4572 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
4573
4574 // We are in the range of the table.
4575 // Load the target address from the jump table, indexing by the value.
4576 __ LoadLabelAddress(AT, table->GetLabel());
4577 __ Sll(TMP, TMP, 2);
4578 __ Daddu(TMP, TMP, AT);
4579 __ Lw(TMP, TMP, 0);
4580 // Compute the absolute target address by adding the table start address
4581 // (the table contains offsets to targets relative to its start).
4582 __ Daddu(TMP, TMP, AT);
4583 // And jump.
4584 __ Jr(TMP);
4585 __ Nop();
4586}
4587
4588void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4589 int32_t lower_bound = switch_instr->GetStartValue();
4590 uint32_t num_entries = switch_instr->GetNumEntries();
4591 LocationSummary* locations = switch_instr->GetLocations();
4592 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
4593 HBasicBlock* switch_block = switch_instr->GetBlock();
4594 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4595
4596 if (num_entries > kPackedSwitchJumpTableThreshold) {
4597 GenTableBasedPackedSwitch(value_reg,
4598 lower_bound,
4599 num_entries,
4600 switch_block,
4601 default_block);
4602 } else {
4603 GenPackedSwitchWithCompares(value_reg,
4604 lower_bound,
4605 num_entries,
4606 switch_block,
4607 default_block);
4608 }
4609}
4610
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004611void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet*) {
4612 UNIMPLEMENTED(FATAL) << "ClassTableGet is unimplemented on mips64";
4613}
4614
4615void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet*) {
4616 UNIMPLEMENTED(FATAL) << "ClassTableGet is unimplemented on mips64";
4617}
4618
Alexey Frunze4dda3372015-06-01 18:31:49 -07004619} // namespace mips64
4620} // namespace art