blob: 0d3cb3b8caa1287bc27e1c92b11392c4ffe010a6 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips64/asm_support_mips64.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070020#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070022#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080023#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010027#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070028#include "heap_poisoning.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070029#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070030#include "intrinsics_mips64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010031#include "linker/linker_patch.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070032#include "mirror/array-inl.h"
33#include "mirror/class-inl.h"
34#include "offsets.h"
Vladimir Marko174b2e22017-10-12 13:34:49 +010035#include "stack_map_stream.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070036#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070037#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070038#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070039#include "utils/stack_checks.h"
40
41namespace art {
42namespace mips64 {
43
44static constexpr int kCurrentMethodStackOffset = 0;
45static constexpr GpuRegister kMethodRegisterArgument = A0;
46
Alexey Frunze4147fcc2017-06-17 19:57:27 -070047// Flags controlling the use of thunks for Baker read barriers.
48constexpr bool kBakerReadBarrierThunksEnableForFields = true;
49constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
50constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
51
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010052Location Mips64ReturnLocation(DataType::Type return_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070053 switch (return_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010054 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +010055 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010056 case DataType::Type::kInt8:
57 case DataType::Type::kUint16:
58 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -080059 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010060 case DataType::Type::kInt32:
61 case DataType::Type::kReference:
Aart Bik66c158e2018-01-31 12:55:04 -080062 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010063 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070064 return Location::RegisterLocation(V0);
65
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010066 case DataType::Type::kFloat32:
67 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070068 return Location::FpuRegisterLocation(F0);
69
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010070 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -070071 return Location();
72 }
73 UNREACHABLE();
74}
75
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010076Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(DataType::Type type) const {
Alexey Frunze4dda3372015-06-01 18:31:49 -070077 return Mips64ReturnLocation(type);
78}
79
80Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
81 return Location::RegisterLocation(kMethodRegisterArgument);
82}
83
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010084Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070085 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010086 if (type == DataType::Type::kVoid) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070087 LOG(FATAL) << "Unexpected parameter type " << type;
88 }
89
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010090 if (DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070091 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
92 next_location = Location::FpuRegisterLocation(
93 calling_convention.GetFpuRegisterAt(float_index_++));
94 gp_index_++;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010095 } else if (!DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070096 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
97 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
98 float_index_++;
99 } else {
100 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100101 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
102 : Location::StackSlot(stack_offset);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700103 }
104
105 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100106 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700107
Alexey Frunze4dda3372015-06-01 18:31:49 -0700108 return next_location;
109}
110
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100111Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700112 return Mips64ReturnLocation(type);
113}
114
Vladimir Marko3232dbb2018-07-25 15:42:46 +0100115static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
116 InvokeRuntimeCallingConvention calling_convention;
117 RegisterSet caller_saves = RegisterSet::Empty();
118 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
119 // The reference is returned in the same register. This differs from the standard return location.
120 return caller_saves;
121}
122
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100123// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
124#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700125#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700126
127class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
128 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000129 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700130
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100131 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100132 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700133 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
134 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000135 if (instruction_->CanThrowIntoCatchBlock()) {
136 // Live registers will be restored in the catch block if caught.
137 SaveLiveRegisters(codegen, instruction_->GetLocations());
138 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700139 // We're moving two locations to locations that could overlap, so we need a parallel
140 // move resolver.
141 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100142 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700143 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100144 DataType::Type::kInt32,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100145 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700146 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100147 DataType::Type::kInt32);
Serban Constantinescufc734082016-07-19 17:18:07 +0100148 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
149 ? kQuickThrowStringBounds
150 : kQuickThrowArrayBounds;
151 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100152 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700153 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
154 }
155
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100156 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100157
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100158 const char* GetDescription() const override { return "BoundsCheckSlowPathMIPS64"; }
Roland Levillain46648892015-06-19 16:07:18 +0100159
Alexey Frunze4dda3372015-06-01 18:31:49 -0700160 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700161 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
162};
163
164class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
165 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700166 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
167 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700168
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100169 void EmitNativeCode(CodeGenerator* codegen) override {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700170 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
171 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100172 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700173 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
174 }
175
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100176 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100177
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100178 const char* GetDescription() const override { return "DivZeroCheckSlowPathMIPS64"; }
Roland Levillain46648892015-06-19 16:07:18 +0100179
Alexey Frunze4dda3372015-06-01 18:31:49 -0700180 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700181 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
182};
183
184class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
185 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100186 LoadClassSlowPathMIPS64(HLoadClass* cls, HInstruction* at)
187 : SlowPathCodeMIPS64(at), cls_(cls) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700188 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100189 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700190 }
191
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100192 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000193 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700194 Location out = locations->Out();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100195 const uint32_t dex_pc = instruction_->GetDexPc();
196 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
197 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
198
Alexey Frunze4dda3372015-06-01 18:31:49 -0700199 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700200 __ Bind(GetEntryLabel());
201 SaveLiveRegisters(codegen, locations);
202
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100203 InvokeRuntimeCallingConvention calling_convention;
204 if (must_resolve_type) {
205 DCHECK(IsSameDexFile(cls_->GetDexFile(), mips64_codegen->GetGraph()->GetDexFile()));
206 dex::TypeIndex type_index = cls_->GetTypeIndex();
207 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Vladimir Marko9d479252018-07-24 11:35:20 +0100208 mips64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
209 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100210 // If we also must_do_clinit, the resolved type is now in the correct register.
211 } else {
212 DCHECK(must_do_clinit);
213 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
214 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
215 source,
216 cls_->GetType());
217 }
218 if (must_do_clinit) {
219 mips64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
220 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700221 }
222
223 // Move the class to the desired location.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700224 if (out.IsValid()) {
225 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100226 DataType::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700227 mips64_codegen->MoveLocation(out,
228 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
229 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700230 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700231 RestoreLiveRegisters(codegen, locations);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700232
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700233 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700234 }
235
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100236 const char* GetDescription() const override { return "LoadClassSlowPathMIPS64"; }
Roland Levillain46648892015-06-19 16:07:18 +0100237
Alexey Frunze4dda3372015-06-01 18:31:49 -0700238 private:
239 // The class this slow path will load.
240 HLoadClass* const cls_;
241
Alexey Frunze4dda3372015-06-01 18:31:49 -0700242 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
243};
244
245class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
246 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000247 explicit LoadStringSlowPathMIPS64(HLoadString* instruction)
248 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700249
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100250 void EmitNativeCode(CodeGenerator* codegen) override {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700251 DCHECK(instruction_->IsLoadString());
252 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700253 LocationSummary* locations = instruction_->GetLocations();
254 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Vladimir Markof3c52b42017-11-17 17:32:12 +0000255 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700256 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700257 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700258 __ Bind(GetEntryLabel());
259 SaveLiveRegisters(codegen, locations);
260
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000261 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100262 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700263 instruction_,
264 instruction_->GetDexPc(),
265 this);
266 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700267
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100268 DataType::Type type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700269 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700270 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700271 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700272 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800273
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700274 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700275 }
276
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100277 const char* GetDescription() const override { return "LoadStringSlowPathMIPS64"; }
Roland Levillain46648892015-06-19 16:07:18 +0100278
Alexey Frunze4dda3372015-06-01 18:31:49 -0700279 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700280 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
281};
282
283class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
284 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000285 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700286
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100287 void EmitNativeCode(CodeGenerator* codegen) override {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700288 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
289 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000290 if (instruction_->CanThrowIntoCatchBlock()) {
291 // Live registers will be restored in the catch block if caught.
292 SaveLiveRegisters(codegen, instruction_->GetLocations());
293 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100294 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700295 instruction_,
296 instruction_->GetDexPc(),
297 this);
298 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
299 }
300
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100301 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100302
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100303 const char* GetDescription() const override { return "NullCheckSlowPathMIPS64"; }
Roland Levillain46648892015-06-19 16:07:18 +0100304
Alexey Frunze4dda3372015-06-01 18:31:49 -0700305 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700306 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
307};
308
309class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
310 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100311 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000312 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700313
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100314 void EmitNativeCode(CodeGenerator* codegen) override {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200315 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700316 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
317 __ Bind(GetEntryLabel());
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200318 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufc734082016-07-19 17:18:07 +0100319 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700320 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200321 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700322 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700323 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700324 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700325 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700326 }
327 }
328
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700329 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700330 DCHECK(successor_ == nullptr);
331 return &return_label_;
332 }
333
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100334 const char* GetDescription() const override { return "SuspendCheckSlowPathMIPS64"; }
Roland Levillain46648892015-06-19 16:07:18 +0100335
Chris Larsena2045912017-11-02 12:39:54 -0700336 HBasicBlock* GetSuccessor() const {
337 return successor_;
338 }
339
Alexey Frunze4dda3372015-06-01 18:31:49 -0700340 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700341 // If not null, the block to branch to after the suspend check.
342 HBasicBlock* const successor_;
343
344 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700345 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700346
347 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
348};
349
350class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
351 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800352 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
353 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700354
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100355 void EmitNativeCode(CodeGenerator* codegen) override {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700356 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800357
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100358 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700359 DCHECK(instruction_->IsCheckCast()
360 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
361 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
362
363 __ Bind(GetEntryLabel());
Alexey Frunzedfc30af2018-01-24 16:25:10 -0800364 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800365 SaveLiveRegisters(codegen, locations);
366 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700367
368 // We're moving two locations to locations that could overlap, so we need a parallel
369 // move resolver.
370 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800371 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700372 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100373 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800374 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700375 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100376 DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700377 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100378 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800379 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100380 DataType::Type ret_type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700381 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
382 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700383 } else {
384 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800385 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
386 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700387 }
388
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800389 if (!is_fatal_) {
390 RestoreLiveRegisters(codegen, locations);
391 __ Bc(GetExitLabel());
392 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700393 }
394
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100395 const char* GetDescription() const override { return "TypeCheckSlowPathMIPS64"; }
Roland Levillain46648892015-06-19 16:07:18 +0100396
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100397 bool IsFatal() const override { return is_fatal_; }
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800398
Alexey Frunze4dda3372015-06-01 18:31:49 -0700399 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800400 const bool is_fatal_;
401
Alexey Frunze4dda3372015-06-01 18:31:49 -0700402 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
403};
404
405class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
406 public:
Aart Bik42249c32016-01-07 15:33:50 -0800407 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000408 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700409
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100410 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bik42249c32016-01-07 15:33:50 -0800411 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700412 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100413 LocationSummary* locations = instruction_->GetLocations();
414 SaveLiveRegisters(codegen, locations);
415 InvokeRuntimeCallingConvention calling_convention;
416 __ LoadConst32(calling_convention.GetRegisterAt(0),
417 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufc734082016-07-19 17:18:07 +0100418 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100419 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700420 }
421
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100422 const char* GetDescription() const override { return "DeoptimizationSlowPathMIPS64"; }
Roland Levillain46648892015-06-19 16:07:18 +0100423
Alexey Frunze4dda3372015-06-01 18:31:49 -0700424 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700425 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
426};
427
Alexey Frunze15958152017-02-09 19:08:30 -0800428class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
429 public:
430 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
431
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100432 void EmitNativeCode(CodeGenerator* codegen) override {
Alexey Frunze15958152017-02-09 19:08:30 -0800433 LocationSummary* locations = instruction_->GetLocations();
434 __ Bind(GetEntryLabel());
435 SaveLiveRegisters(codegen, locations);
436
437 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100438 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800439 parallel_move.AddMove(
440 locations->InAt(0),
441 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100442 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800443 nullptr);
444 parallel_move.AddMove(
445 locations->InAt(1),
446 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100447 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800448 nullptr);
449 parallel_move.AddMove(
450 locations->InAt(2),
451 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100452 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800453 nullptr);
454 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
455
456 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
457 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
458 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
459 RestoreLiveRegisters(codegen, locations);
460 __ Bc(GetExitLabel());
461 }
462
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100463 const char* GetDescription() const override { return "ArraySetSlowPathMIPS64"; }
Alexey Frunze15958152017-02-09 19:08:30 -0800464
465 private:
466 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
467};
468
469// Slow path marking an object reference `ref` during a read
470// barrier. The field `obj.field` in the object `obj` holding this
471// reference does not get updated by this slow path after marking (see
472// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
473//
474// This means that after the execution of this slow path, `ref` will
475// always be up-to-date, but `obj.field` may not; i.e., after the
476// flip, `ref` will be a to-space reference, but `obj.field` will
477// probably still be a from-space reference (unless it gets updated by
478// another thread, or if another thread installed another object
479// reference (different from `ref`) in `obj.field`).
480//
481// If `entrypoint` is a valid location it is assumed to already be
482// holding the entrypoint. The case where the entrypoint is passed in
483// is for the GcRoot read barrier.
484class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
485 public:
486 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
487 Location ref,
488 Location entrypoint = Location::NoLocation())
489 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
490 DCHECK(kEmitCompilerReadBarrier);
491 }
492
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100493 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathMIPS"; }
Alexey Frunze15958152017-02-09 19:08:30 -0800494
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100495 void EmitNativeCode(CodeGenerator* codegen) override {
Alexey Frunze15958152017-02-09 19:08:30 -0800496 LocationSummary* locations = instruction_->GetLocations();
497 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
498 DCHECK(locations->CanCall());
499 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
500 DCHECK(instruction_->IsInstanceFieldGet() ||
501 instruction_->IsStaticFieldGet() ||
502 instruction_->IsArrayGet() ||
503 instruction_->IsArraySet() ||
504 instruction_->IsLoadClass() ||
505 instruction_->IsLoadString() ||
506 instruction_->IsInstanceOf() ||
507 instruction_->IsCheckCast() ||
508 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
509 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
510 << "Unexpected instruction in read barrier marking slow path: "
511 << instruction_->DebugName();
512
513 __ Bind(GetEntryLabel());
514 // No need to save live registers; it's taken care of by the
515 // entrypoint. Also, there is no need to update the stack mask,
516 // as this runtime call will not trigger a garbage collection.
517 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
518 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
519 (S2 <= ref_reg && ref_reg <= S7) ||
520 (ref_reg == S8)) << ref_reg;
521 // "Compact" slow path, saving two moves.
522 //
523 // Instead of using the standard runtime calling convention (input
524 // and output in A0 and V0 respectively):
525 //
526 // A0 <- ref
527 // V0 <- ReadBarrierMark(A0)
528 // ref <- V0
529 //
530 // we just use rX (the register containing `ref`) as input and output
531 // of a dedicated entrypoint:
532 //
533 // rX <- ReadBarrierMarkRegX(rX)
534 //
535 if (entrypoint_.IsValid()) {
536 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
537 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
538 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
539 __ Nop();
540 } else {
541 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100542 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800543 // This runtime call does not require a stack map.
544 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
545 instruction_,
546 this);
547 }
548 __ Bc(GetExitLabel());
549 }
550
551 private:
552 // The location (register) of the marked object reference.
553 const Location ref_;
554
555 // The location of the entrypoint if already loaded.
556 const Location entrypoint_;
557
558 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
559};
560
561// Slow path marking an object reference `ref` during a read barrier,
562// and if needed, atomically updating the field `obj.field` in the
563// object `obj` holding this reference after marking (contrary to
564// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
565// `obj.field`).
566//
567// This means that after the execution of this slow path, both `ref`
568// and `obj.field` will be up-to-date; i.e., after the flip, both will
569// hold the same to-space reference (unless another thread installed
570// another object reference (different from `ref`) in `obj.field`).
571class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
572 public:
573 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
574 Location ref,
575 GpuRegister obj,
576 Location field_offset,
577 GpuRegister temp1)
578 : SlowPathCodeMIPS64(instruction),
579 ref_(ref),
580 obj_(obj),
581 field_offset_(field_offset),
582 temp1_(temp1) {
583 DCHECK(kEmitCompilerReadBarrier);
584 }
585
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100586 const char* GetDescription() const override {
Alexey Frunze15958152017-02-09 19:08:30 -0800587 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
588 }
589
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100590 void EmitNativeCode(CodeGenerator* codegen) override {
Alexey Frunze15958152017-02-09 19:08:30 -0800591 LocationSummary* locations = instruction_->GetLocations();
592 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
593 DCHECK(locations->CanCall());
594 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
595 // This slow path is only used by the UnsafeCASObject intrinsic.
596 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
597 << "Unexpected instruction in read barrier marking and field updating slow path: "
598 << instruction_->DebugName();
599 DCHECK(instruction_->GetLocations()->Intrinsified());
600 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
601 DCHECK(field_offset_.IsRegister()) << field_offset_;
602
603 __ Bind(GetEntryLabel());
604
605 // Save the old reference.
606 // Note that we cannot use AT or TMP to save the old reference, as those
607 // are used by the code that follows, but we need the old reference after
608 // the call to the ReadBarrierMarkRegX entry point.
609 DCHECK_NE(temp1_, AT);
610 DCHECK_NE(temp1_, TMP);
611 __ Move(temp1_, ref_reg);
612
613 // No need to save live registers; it's taken care of by the
614 // entrypoint. Also, there is no need to update the stack mask,
615 // as this runtime call will not trigger a garbage collection.
616 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
617 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
618 (S2 <= ref_reg && ref_reg <= S7) ||
619 (ref_reg == S8)) << ref_reg;
620 // "Compact" slow path, saving two moves.
621 //
622 // Instead of using the standard runtime calling convention (input
623 // and output in A0 and V0 respectively):
624 //
625 // A0 <- ref
626 // V0 <- ReadBarrierMark(A0)
627 // ref <- V0
628 //
629 // we just use rX (the register containing `ref`) as input and output
630 // of a dedicated entrypoint:
631 //
632 // rX <- ReadBarrierMarkRegX(rX)
633 //
634 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100635 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800636 // This runtime call does not require a stack map.
637 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
638 instruction_,
639 this);
640
641 // If the new reference is different from the old reference,
642 // update the field in the holder (`*(obj_ + field_offset_)`).
643 //
644 // Note that this field could also hold a different object, if
645 // another thread had concurrently changed it. In that case, the
646 // the compare-and-set (CAS) loop below would abort, leaving the
647 // field as-is.
648 Mips64Label done;
649 __ Beqc(temp1_, ref_reg, &done);
650
651 // Update the the holder's field atomically. This may fail if
652 // mutator updates before us, but it's OK. This is achieved
653 // using a strong compare-and-set (CAS) operation with relaxed
654 // memory synchronization ordering, where the expected value is
655 // the old reference and the desired value is the new reference.
656
657 // Convenience aliases.
658 GpuRegister base = obj_;
659 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
660 GpuRegister expected = temp1_;
661 GpuRegister value = ref_reg;
662 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
663 GpuRegister tmp = AT; // Value in memory.
664
665 __ Daddu(tmp_ptr, base, offset);
666
667 if (kPoisonHeapReferences) {
668 __ PoisonHeapReference(expected);
669 // Do not poison `value` if it is the same register as
670 // `expected`, which has just been poisoned.
671 if (value != expected) {
672 __ PoisonHeapReference(value);
673 }
674 }
675
676 // do {
677 // tmp = [r_ptr] - expected;
678 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
679
680 Mips64Label loop_head, exit_loop;
681 __ Bind(&loop_head);
682 __ Ll(tmp, tmp_ptr);
683 // The LL instruction sign-extends the 32-bit value, but
684 // 32-bit references must be zero-extended. Zero-extend `tmp`.
685 __ Dext(tmp, tmp, 0, 32);
686 __ Bnec(tmp, expected, &exit_loop);
687 __ Move(tmp, value);
688 __ Sc(tmp, tmp_ptr);
689 __ Beqzc(tmp, &loop_head);
690 __ Bind(&exit_loop);
691
692 if (kPoisonHeapReferences) {
693 __ UnpoisonHeapReference(expected);
694 // Do not unpoison `value` if it is the same register as
695 // `expected`, which has just been unpoisoned.
696 if (value != expected) {
697 __ UnpoisonHeapReference(value);
698 }
699 }
700
701 __ Bind(&done);
702 __ Bc(GetExitLabel());
703 }
704
705 private:
706 // The location (register) of the marked object reference.
707 const Location ref_;
708 // The register containing the object holding the marked object reference field.
709 const GpuRegister obj_;
710 // The location of the offset of the marked reference field within `obj_`.
711 Location field_offset_;
712
713 const GpuRegister temp1_;
714
715 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
716};
717
718// Slow path generating a read barrier for a heap reference.
719class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
720 public:
721 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
722 Location out,
723 Location ref,
724 Location obj,
725 uint32_t offset,
726 Location index)
727 : SlowPathCodeMIPS64(instruction),
728 out_(out),
729 ref_(ref),
730 obj_(obj),
731 offset_(offset),
732 index_(index) {
733 DCHECK(kEmitCompilerReadBarrier);
734 // If `obj` is equal to `out` or `ref`, it means the initial object
735 // has been overwritten by (or after) the heap object reference load
736 // to be instrumented, e.g.:
737 //
738 // __ LoadFromOffset(kLoadWord, out, out, offset);
739 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
740 //
741 // In that case, we have lost the information about the original
742 // object, and the emitted read barrier cannot work properly.
743 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
744 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
745 }
746
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100747 void EmitNativeCode(CodeGenerator* codegen) override {
Alexey Frunze15958152017-02-09 19:08:30 -0800748 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
749 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100750 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800751 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
752 DCHECK(locations->CanCall());
753 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
754 DCHECK(instruction_->IsInstanceFieldGet() ||
755 instruction_->IsStaticFieldGet() ||
756 instruction_->IsArrayGet() ||
757 instruction_->IsInstanceOf() ||
758 instruction_->IsCheckCast() ||
759 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
760 << "Unexpected instruction in read barrier for heap reference slow path: "
761 << instruction_->DebugName();
762
763 __ Bind(GetEntryLabel());
764 SaveLiveRegisters(codegen, locations);
765
766 // We may have to change the index's value, but as `index_` is a
767 // constant member (like other "inputs" of this slow path),
768 // introduce a copy of it, `index`.
769 Location index = index_;
770 if (index_.IsValid()) {
771 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
772 if (instruction_->IsArrayGet()) {
773 // Compute the actual memory offset and store it in `index`.
774 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
775 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
776 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
777 // We are about to change the value of `index_reg` (see the
778 // calls to art::mips64::Mips64Assembler::Sll and
779 // art::mips64::MipsAssembler::Addiu32 below), but it has
780 // not been saved by the previous call to
781 // art::SlowPathCode::SaveLiveRegisters, as it is a
782 // callee-save register --
783 // art::SlowPathCode::SaveLiveRegisters does not consider
784 // callee-save registers, as it has been designed with the
785 // assumption that callee-save registers are supposed to be
786 // handled by the called function. So, as a callee-save
787 // register, `index_reg` _would_ eventually be saved onto
788 // the stack, but it would be too late: we would have
789 // changed its value earlier. Therefore, we manually save
790 // it here into another freely available register,
791 // `free_reg`, chosen of course among the caller-save
792 // registers (as a callee-save `free_reg` register would
793 // exhibit the same problem).
794 //
795 // Note we could have requested a temporary register from
796 // the register allocator instead; but we prefer not to, as
797 // this is a slow path, and we know we can find a
798 // caller-save register that is available.
799 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
800 __ Move(free_reg, index_reg);
801 index_reg = free_reg;
802 index = Location::RegisterLocation(index_reg);
803 } else {
804 // The initial register stored in `index_` has already been
805 // saved in the call to art::SlowPathCode::SaveLiveRegisters
806 // (as it is not a callee-save register), so we can freely
807 // use it.
808 }
809 // Shifting the index value contained in `index_reg` by the scale
810 // factor (2) cannot overflow in practice, as the runtime is
811 // unable to allocate object arrays with a size larger than
812 // 2^26 - 1 (that is, 2^28 - 4 bytes).
813 __ Sll(index_reg, index_reg, TIMES_4);
814 static_assert(
815 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
816 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
817 __ Addiu32(index_reg, index_reg, offset_);
818 } else {
819 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
820 // intrinsics, `index_` is not shifted by a scale factor of 2
821 // (as in the case of ArrayGet), as it is actually an offset
822 // to an object field within an object.
823 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
824 DCHECK(instruction_->GetLocations()->Intrinsified());
825 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
826 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
827 << instruction_->AsInvoke()->GetIntrinsic();
828 DCHECK_EQ(offset_, 0U);
829 DCHECK(index_.IsRegister());
830 }
831 }
832
833 // We're moving two or three locations to locations that could
834 // overlap, so we need a parallel move resolver.
835 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100836 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800837 parallel_move.AddMove(ref_,
838 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100839 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800840 nullptr);
841 parallel_move.AddMove(obj_,
842 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100843 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800844 nullptr);
845 if (index.IsValid()) {
846 parallel_move.AddMove(index,
847 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100848 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800849 nullptr);
850 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
851 } else {
852 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
853 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
854 }
855 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
856 instruction_,
857 instruction_->GetDexPc(),
858 this);
859 CheckEntrypointTypes<
860 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
861 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
862
863 RestoreLiveRegisters(codegen, locations);
864 __ Bc(GetExitLabel());
865 }
866
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100867 const char* GetDescription() const override {
Alexey Frunze15958152017-02-09 19:08:30 -0800868 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
869 }
870
871 private:
872 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
873 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
874 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
875 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
876 if (i != ref &&
877 i != obj &&
878 !codegen->IsCoreCalleeSaveRegister(i) &&
879 !codegen->IsBlockedCoreRegister(i)) {
880 return static_cast<GpuRegister>(i);
881 }
882 }
883 // We shall never fail to find a free caller-save register, as
884 // there are more than two core caller-save registers on MIPS64
885 // (meaning it is possible to find one which is different from
886 // `ref` and `obj`).
887 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
888 LOG(FATAL) << "Could not find a free caller-save register";
889 UNREACHABLE();
890 }
891
892 const Location out_;
893 const Location ref_;
894 const Location obj_;
895 const uint32_t offset_;
896 // An additional location containing an index to an array.
897 // Only used for HArrayGet and the UnsafeGetObject &
898 // UnsafeGetObjectVolatile intrinsics.
899 const Location index_;
900
901 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
902};
903
904// Slow path generating a read barrier for a GC root.
905class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
906 public:
907 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
908 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
909 DCHECK(kEmitCompilerReadBarrier);
910 }
911
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100912 void EmitNativeCode(CodeGenerator* codegen) override {
Alexey Frunze15958152017-02-09 19:08:30 -0800913 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100914 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800915 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
916 DCHECK(locations->CanCall());
917 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
918 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
919 << "Unexpected instruction in read barrier for GC root slow path: "
920 << instruction_->DebugName();
921
922 __ Bind(GetEntryLabel());
923 SaveLiveRegisters(codegen, locations);
924
925 InvokeRuntimeCallingConvention calling_convention;
926 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
927 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
928 root_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100929 DataType::Type::kReference);
Alexey Frunze15958152017-02-09 19:08:30 -0800930 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
931 instruction_,
932 instruction_->GetDexPc(),
933 this);
934 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
935 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
936
937 RestoreLiveRegisters(codegen, locations);
938 __ Bc(GetExitLabel());
939 }
940
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100941 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathMIPS64"; }
Alexey Frunze15958152017-02-09 19:08:30 -0800942
943 private:
944 const Location out_;
945 const Location root_;
946
947 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
948};
949
Alexey Frunze4dda3372015-06-01 18:31:49 -0700950CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100951 const CompilerOptions& compiler_options,
952 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700953 : CodeGenerator(graph,
954 kNumberOfGpuRegisters,
955 kNumberOfFpuRegisters,
Andreas Gampe3db70682018-12-26 15:12:03 -0800956 /* number_of_register_pairs= */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700957 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
958 arraysize(kCoreCalleeSaves)),
959 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
960 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100961 compiler_options,
962 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100963 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700964 location_builder_(graph, this),
965 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100966 move_resolver_(graph->GetAllocator(), this),
Vladimir Markoa0431112018-06-25 09:32:54 +0100967 assembler_(graph->GetAllocator(),
968 compiler_options.GetInstructionSetFeatures()->AsMips64InstructionSetFeatures()),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800969 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100970 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800971 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100972 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000973 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100974 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000975 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100976 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000977 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100978 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +0100979 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800980 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100981 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800982 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100983 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700984 // Save RA (containing the return address) to mimic Quick.
985 AddAllocatedRegister(Location::RegisterLocation(RA));
986}
987
988#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100989// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
990#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700991#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700992
993void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700994 // Ensure that we fix up branches.
995 __ FinalizeCode();
996
997 // Adjust native pc offsets in stack maps.
Vladimir Marko174b2e22017-10-12 13:34:49 +0100998 StackMapStream* stack_map_stream = GetStackMapStream();
999 for (size_t i = 0, num = stack_map_stream->GetNumberOfStackMaps(); i != num; ++i) {
David Srbeckyd02b23f2018-05-29 23:27:22 +01001000 uint32_t old_position = stack_map_stream->GetStackMapNativePcOffset(i);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001001 uint32_t new_position = __ GetAdjustedPosition(old_position);
1002 DCHECK_GE(new_position, old_position);
Vladimir Marko174b2e22017-10-12 13:34:49 +01001003 stack_map_stream->SetStackMapNativePcOffset(i, new_position);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001004 }
1005
1006 // Adjust pc offsets for the disassembly information.
1007 if (disasm_info_ != nullptr) {
1008 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1009 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1010 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1011 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1012 it.second.start = __ GetAdjustedPosition(it.second.start);
1013 it.second.end = __ GetAdjustedPosition(it.second.end);
1014 }
1015 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1016 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1017 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1018 }
1019 }
1020
Alexey Frunze4dda3372015-06-01 18:31:49 -07001021 CodeGenerator::Finalize(allocator);
1022}
1023
1024Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1025 return codegen_->GetAssembler();
1026}
1027
1028void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001029 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001030 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1031}
1032
1033void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001034 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001035 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1036}
1037
1038void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1039 // Pop reg
1040 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001041 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001042}
1043
1044void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1045 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001046 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001047 __ Sd(GpuRegister(reg), SP, 0);
1048}
1049
1050void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1051 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1052 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1053 // Allocate a scratch register other than TMP, if available.
1054 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1055 // automatically unspilled when the scratch scope object is destroyed).
1056 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1057 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001058 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001059 __ LoadFromOffset(load_type,
1060 GpuRegister(ensure_scratch.GetRegister()),
1061 SP,
1062 index1 + stack_offset);
1063 __ LoadFromOffset(load_type,
1064 TMP,
1065 SP,
1066 index2 + stack_offset);
1067 __ StoreToOffset(store_type,
1068 GpuRegister(ensure_scratch.GetRegister()),
1069 SP,
1070 index2 + stack_offset);
1071 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1072}
1073
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001074void ParallelMoveResolverMIPS64::ExchangeQuadSlots(int index1, int index2) {
1075 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, index1);
1076 __ LoadFpuFromOffset(kLoadQuadword, FTMP2, SP, index2);
1077 __ StoreFpuToOffset(kStoreQuadword, FTMP, SP, index2);
1078 __ StoreFpuToOffset(kStoreQuadword, FTMP2, SP, index1);
1079}
1080
Alexey Frunze4dda3372015-06-01 18:31:49 -07001081static dwarf::Reg DWARFReg(GpuRegister reg) {
1082 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1083}
1084
David Srbeckyba702002016-02-01 18:15:29 +00001085static dwarf::Reg DWARFReg(FpuRegister reg) {
1086 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1087}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001088
1089void CodeGeneratorMIPS64::GenerateFrameEntry() {
1090 __ Bind(&frame_entry_label_);
1091
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001092 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
Goran Jakovljevicfeec1672018-02-08 10:20:14 +01001093 __ Lhu(TMP, kMethodRegisterArgument, ArtMethod::HotnessCountOffset().Int32Value());
1094 __ Addiu(TMP, TMP, 1);
1095 __ Sh(TMP, kMethodRegisterArgument, ArtMethod::HotnessCountOffset().Int32Value());
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001096 }
1097
Vladimir Marko33bff252017-11-01 14:35:42 +00001098 bool do_overflow_check =
1099 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kMips64) || !IsLeafMethod();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001100
1101 if (do_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001102 __ LoadFromOffset(
1103 kLoadWord,
1104 ZERO,
1105 SP,
1106 -static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kMips64)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001107 RecordPcInfo(nullptr, 0);
1108 }
1109
Alexey Frunze4dda3372015-06-01 18:31:49 -07001110 if (HasEmptyFrame()) {
1111 return;
1112 }
1113
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001114 // Make sure the frame size isn't unreasonably large.
Vladimir Marko33bff252017-11-01 14:35:42 +00001115 if (GetFrameSize() > GetStackOverflowReservedBytes(InstructionSet::kMips64)) {
1116 LOG(FATAL) << "Stack frame larger than "
1117 << GetStackOverflowReservedBytes(InstructionSet::kMips64) << " bytes";
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001118 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001119
1120 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001121
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001122 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001123 __ IncreaseFrameSize(ofs);
1124
1125 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1126 GpuRegister reg = kCoreCalleeSaves[i];
1127 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001128 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001129 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001130 __ cfi().RelOffset(DWARFReg(reg), ofs);
1131 }
1132 }
1133
1134 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1135 FpuRegister reg = kFpuCalleeSaves[i];
1136 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001137 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001138 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001139 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001140 }
1141 }
1142
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001143 // Save the current method if we need it. Note that we do not
1144 // do this in HCurrentMethod, as the instruction might have been removed
1145 // in the SSA graph.
1146 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001147 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001148 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001149
1150 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1151 // Initialize should_deoptimize flag to 0.
1152 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1153 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001154}
1155
1156void CodeGeneratorMIPS64::GenerateFrameExit() {
1157 __ cfi().RememberState();
1158
Alexey Frunze4dda3372015-06-01 18:31:49 -07001159 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001160 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001161
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001162 // For better instruction scheduling restore RA before other registers.
1163 uint32_t ofs = GetFrameSize();
1164 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001165 GpuRegister reg = kCoreCalleeSaves[i];
1166 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001167 ofs -= kMips64DoublewordSize;
1168 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001169 __ cfi().Restore(DWARFReg(reg));
1170 }
1171 }
1172
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001173 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1174 FpuRegister reg = kFpuCalleeSaves[i];
1175 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1176 ofs -= kMips64DoublewordSize;
1177 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1178 __ cfi().Restore(DWARFReg(reg));
1179 }
1180 }
1181
1182 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001183 }
1184
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001185 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001186
1187 __ cfi().RestoreState();
1188 __ cfi().DefCFAOffset(GetFrameSize());
1189}
1190
1191void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1192 __ Bind(GetLabelOf(block));
1193}
1194
1195void CodeGeneratorMIPS64::MoveLocation(Location destination,
1196 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001197 DataType::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001198 if (source.Equals(destination)) {
1199 return;
1200 }
1201
1202 // A valid move can always be inferred from the destination and source
1203 // locations. When moving from and to a register, the argument type can be
1204 // used to generate 32bit instead of 64bit moves.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001205 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001206 DCHECK_EQ(unspecified_type, false);
1207
1208 if (destination.IsRegister() || destination.IsFpuRegister()) {
1209 if (unspecified_type) {
1210 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1211 if (source.IsStackSlot() ||
1212 (src_cst != nullptr && (src_cst->IsIntConstant()
1213 || src_cst->IsFloatConstant()
1214 || src_cst->IsNullConstant()))) {
1215 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001216 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001217 } else {
1218 // If the source is a double stack slot or a 64bit constant, a 64bit
1219 // type is appropriate. Else the source is a register, and since the
1220 // type has not been specified, we chose a 64bit type to force a 64bit
1221 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001222 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001223 }
1224 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001225 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1226 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001227 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1228 // Move to GPR/FPR from stack
1229 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001230 if (DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001231 __ LoadFpuFromOffset(load_type,
1232 destination.AsFpuRegister<FpuRegister>(),
1233 SP,
1234 source.GetStackIndex());
1235 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001236 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001237 __ LoadFromOffset(load_type,
1238 destination.AsRegister<GpuRegister>(),
1239 SP,
1240 source.GetStackIndex());
1241 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001242 } else if (source.IsSIMDStackSlot()) {
1243 __ LoadFpuFromOffset(kLoadQuadword,
1244 destination.AsFpuRegister<FpuRegister>(),
1245 SP,
1246 source.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001247 } else if (source.IsConstant()) {
1248 // Move to GPR/FPR from constant
1249 GpuRegister gpr = AT;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001250 if (!DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001251 gpr = destination.AsRegister<GpuRegister>();
1252 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001253 if (dst_type == DataType::Type::kInt32 || dst_type == DataType::Type::kFloat32) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001254 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001255 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001256 gpr = ZERO;
1257 } else {
1258 __ LoadConst32(gpr, value);
1259 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001260 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001261 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001262 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001263 gpr = ZERO;
1264 } else {
1265 __ LoadConst64(gpr, value);
1266 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001267 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001268 if (dst_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001269 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001270 } else if (dst_type == DataType::Type::kFloat64) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001271 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1272 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001273 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001274 if (destination.IsRegister()) {
1275 // Move to GPR from GPR
1276 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1277 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001278 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001279 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001280 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1281 } else {
1282 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1283 }
1284 }
1285 } else if (source.IsFpuRegister()) {
1286 if (destination.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001287 if (GetGraph()->HasSIMD()) {
1288 __ MoveV(VectorRegisterFrom(destination),
1289 VectorRegisterFrom(source));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001290 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001291 // Move to FPR from FPR
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001292 if (dst_type == DataType::Type::kFloat32) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001293 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1294 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001295 DCHECK_EQ(dst_type, DataType::Type::kFloat64);
Lena Djokicca8c2952017-05-29 11:31:46 +02001296 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1297 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001298 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001299 } else {
1300 DCHECK(destination.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001301 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001302 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1303 } else {
1304 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1305 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001306 }
1307 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001308 } else if (destination.IsSIMDStackSlot()) {
1309 if (source.IsFpuRegister()) {
1310 __ StoreFpuToOffset(kStoreQuadword,
1311 source.AsFpuRegister<FpuRegister>(),
1312 SP,
1313 destination.GetStackIndex());
1314 } else {
1315 DCHECK(source.IsSIMDStackSlot());
1316 __ LoadFpuFromOffset(kLoadQuadword,
1317 FTMP,
1318 SP,
1319 source.GetStackIndex());
1320 __ StoreFpuToOffset(kStoreQuadword,
1321 FTMP,
1322 SP,
1323 destination.GetStackIndex());
1324 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001325 } else { // The destination is not a register. It must be a stack slot.
1326 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1327 if (source.IsRegister() || source.IsFpuRegister()) {
1328 if (unspecified_type) {
1329 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001330 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001331 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001332 dst_type =
1333 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001334 }
1335 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001336 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1337 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001338 // Move to stack from GPR/FPR
1339 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1340 if (source.IsRegister()) {
1341 __ StoreToOffset(store_type,
1342 source.AsRegister<GpuRegister>(),
1343 SP,
1344 destination.GetStackIndex());
1345 } else {
1346 __ StoreFpuToOffset(store_type,
1347 source.AsFpuRegister<FpuRegister>(),
1348 SP,
1349 destination.GetStackIndex());
1350 }
1351 } else if (source.IsConstant()) {
1352 // Move to stack from constant
1353 HConstant* src_cst = source.GetConstant();
1354 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001355 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001356 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001357 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1358 if (value != 0) {
1359 gpr = TMP;
1360 __ LoadConst32(gpr, value);
1361 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001362 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001363 DCHECK(destination.IsDoubleStackSlot());
1364 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1365 if (value != 0) {
1366 gpr = TMP;
1367 __ LoadConst64(gpr, value);
1368 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001369 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001370 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001371 } else {
1372 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1373 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1374 // Move to stack from stack
1375 if (destination.IsStackSlot()) {
1376 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1377 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1378 } else {
1379 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1380 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1381 }
1382 }
1383 }
1384}
1385
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001386void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001387 DCHECK(!loc1.IsConstant());
1388 DCHECK(!loc2.IsConstant());
1389
1390 if (loc1.Equals(loc2)) {
1391 return;
1392 }
1393
1394 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1395 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001396 bool is_simd1 = loc1.IsSIMDStackSlot();
1397 bool is_simd2 = loc2.IsSIMDStackSlot();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001398 bool is_fp_reg1 = loc1.IsFpuRegister();
1399 bool is_fp_reg2 = loc2.IsFpuRegister();
1400
1401 if (loc2.IsRegister() && loc1.IsRegister()) {
1402 // Swap 2 GPRs
1403 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1404 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1405 __ Move(TMP, r2);
1406 __ Move(r2, r1);
1407 __ Move(r1, TMP);
1408 } else if (is_fp_reg2 && is_fp_reg1) {
1409 // Swap 2 FPRs
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001410 if (GetGraph()->HasSIMD()) {
1411 __ MoveV(static_cast<VectorRegister>(FTMP), VectorRegisterFrom(loc1));
1412 __ MoveV(VectorRegisterFrom(loc1), VectorRegisterFrom(loc2));
1413 __ MoveV(VectorRegisterFrom(loc2), static_cast<VectorRegister>(FTMP));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001414 } else {
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001415 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1416 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
1417 if (type == DataType::Type::kFloat32) {
1418 __ MovS(FTMP, r1);
1419 __ MovS(r1, r2);
1420 __ MovS(r2, FTMP);
1421 } else {
1422 DCHECK_EQ(type, DataType::Type::kFloat64);
1423 __ MovD(FTMP, r1);
1424 __ MovD(r1, r2);
1425 __ MovD(r2, FTMP);
1426 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001427 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001428 } else if (is_slot1 != is_slot2) {
1429 // Swap GPR/FPR and stack slot
1430 Location reg_loc = is_slot1 ? loc2 : loc1;
1431 Location mem_loc = is_slot1 ? loc1 : loc2;
1432 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1433 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001434 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001435 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1436 if (reg_loc.IsFpuRegister()) {
1437 __ StoreFpuToOffset(store_type,
1438 reg_loc.AsFpuRegister<FpuRegister>(),
1439 SP,
1440 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001441 if (mem_loc.IsStackSlot()) {
1442 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1443 } else {
1444 DCHECK(mem_loc.IsDoubleStackSlot());
1445 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1446 }
1447 } else {
1448 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1449 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1450 }
1451 } else if (is_slot1 && is_slot2) {
1452 move_resolver_.Exchange(loc1.GetStackIndex(),
1453 loc2.GetStackIndex(),
1454 loc1.IsDoubleStackSlot());
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001455 } else if (is_simd1 && is_simd2) {
1456 move_resolver_.ExchangeQuadSlots(loc1.GetStackIndex(), loc2.GetStackIndex());
1457 } else if ((is_fp_reg1 && is_simd2) || (is_fp_reg2 && is_simd1)) {
1458 Location fp_reg_loc = is_fp_reg1 ? loc1 : loc2;
1459 Location mem_loc = is_fp_reg1 ? loc2 : loc1;
1460 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, mem_loc.GetStackIndex());
1461 __ StoreFpuToOffset(kStoreQuadword,
1462 fp_reg_loc.AsFpuRegister<FpuRegister>(),
1463 SP,
1464 mem_loc.GetStackIndex());
1465 __ MoveV(VectorRegisterFrom(fp_reg_loc), static_cast<VectorRegister>(FTMP));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001466 } else {
1467 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1468 }
1469}
1470
Calin Juravle175dc732015-08-25 15:42:32 +01001471void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1472 DCHECK(location.IsRegister());
1473 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1474}
1475
Calin Juravlee460d1d2015-09-29 04:52:17 +01001476void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1477 if (location.IsRegister()) {
1478 locations->AddTemp(location);
1479 } else {
1480 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1481 }
1482}
1483
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001484void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1485 GpuRegister value,
1486 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001487 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001488 GpuRegister card = AT;
1489 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001490 if (value_can_be_null) {
1491 __ Beqzc(value, &done);
1492 }
Roland Levillainc73f0522018-08-14 15:16:50 +01001493 // Load the address of the card table into `card`.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001494 __ LoadFromOffset(kLoadDoubleword,
1495 card,
1496 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001497 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Roland Levillainc73f0522018-08-14 15:16:50 +01001498 // Calculate the address of the card corresponding to `object`.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001499 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1500 __ Daddu(temp, card, temp);
Roland Levillainc73f0522018-08-14 15:16:50 +01001501 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
1502 // `object`'s card.
1503 //
1504 // Register `card` contains the address of the card table. Note that the card
1505 // table's base is biased during its creation so that it always starts at an
1506 // address whose least-significant byte is equal to `kCardDirty` (see
1507 // art::gc::accounting::CardTable::Create). Therefore the SB instruction
1508 // below writes the `kCardDirty` (byte) value into the `object`'s card
1509 // (located at `card + object >> kCardShift`).
1510 //
1511 // This dual use of the value in register `card` (1. to calculate the location
1512 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
1513 // (no need to explicitly load `kCardDirty` as an immediate value).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001514 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001515 if (value_can_be_null) {
1516 __ Bind(&done);
1517 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001518}
1519
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001520template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Alexey Frunze19f6c692016-11-30 19:19:55 -08001521inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1522 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001523 ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001524 for (const PcRelativePatchInfo& info : infos) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001525 const DexFile* dex_file = info.target_dex_file;
Alexey Frunze19f6c692016-11-30 19:19:55 -08001526 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001527 DCHECK(info.label.IsBound());
1528 uint32_t literal_offset = __ GetLabelLocation(&info.label);
1529 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1530 uint32_t pc_rel_offset = __ GetLabelLocation(&info_high.label);
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001531 linker_patches->push_back(Factory(literal_offset, dex_file, pc_rel_offset, offset_or_index));
Alexey Frunze19f6c692016-11-30 19:19:55 -08001532 }
1533}
1534
Vladimir Marko6fd16062018-06-26 11:02:04 +01001535template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
1536linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
1537 const DexFile* target_dex_file,
1538 uint32_t pc_insn_offset,
1539 uint32_t boot_image_offset) {
1540 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
1541 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00001542}
1543
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001544void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001545 DCHECK(linker_patches->empty());
1546 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001547 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001548 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001549 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001550 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001551 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01001552 string_bss_entry_patches_.size() +
1553 boot_image_intrinsic_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001554 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001555 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001556 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001557 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001558 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001559 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001560 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001561 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001562 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
1563 boot_image_intrinsic_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001564 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01001565 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00001566 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001567 DCHECK(boot_image_type_patches_.empty());
1568 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001569 DCHECK(boot_image_intrinsic_patches_.empty());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001570 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001571 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1572 method_bss_entry_patches_, linker_patches);
1573 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1574 type_bss_entry_patches_, linker_patches);
1575 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1576 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001577 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001578}
1579
Vladimir Marko6fd16062018-06-26 11:02:04 +01001580CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageIntrinsicPatch(
1581 uint32_t intrinsic_data,
1582 const PcRelativePatchInfo* info_high) {
1583 return NewPcRelativePatch(
Andreas Gampe3db70682018-12-26 15:12:03 -08001584 /* dex_file= */ nullptr, intrinsic_data, info_high, &boot_image_intrinsic_patches_);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001585}
1586
Vladimir Markob066d432018-01-03 13:14:37 +00001587CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageRelRoPatch(
1588 uint32_t boot_image_offset,
1589 const PcRelativePatchInfo* info_high) {
1590 return NewPcRelativePatch(
Andreas Gampe3db70682018-12-26 15:12:03 -08001591 /* dex_file= */ nullptr, boot_image_offset, info_high, &boot_image_method_patches_);
Vladimir Markob066d432018-01-03 13:14:37 +00001592}
1593
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001594CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001595 MethodReference target_method,
1596 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001597 return NewPcRelativePatch(
1598 target_method.dex_file, target_method.index, info_high, &boot_image_method_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001599}
1600
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001601CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001602 MethodReference target_method,
1603 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001604 return NewPcRelativePatch(
1605 target_method.dex_file, target_method.index, info_high, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001606}
1607
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001608CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001609 const DexFile& dex_file,
1610 dex::TypeIndex type_index,
1611 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001612 return NewPcRelativePatch(&dex_file, type_index.index_, info_high, &boot_image_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001613}
1614
Vladimir Marko1998cd02017-01-13 13:02:58 +00001615CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001616 const DexFile& dex_file,
1617 dex::TypeIndex type_index,
1618 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001619 return NewPcRelativePatch(&dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001620}
1621
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001622CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001623 const DexFile& dex_file,
1624 dex::StringIndex string_index,
1625 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001626 return NewPcRelativePatch(
1627 &dex_file, string_index.index_, info_high, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001628}
1629
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001630CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
1631 const DexFile& dex_file,
1632 dex::StringIndex string_index,
1633 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001634 return NewPcRelativePatch(&dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001635}
1636
Alexey Frunze19f6c692016-11-30 19:19:55 -08001637CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001638 const DexFile* dex_file,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001639 uint32_t offset_or_index,
1640 const PcRelativePatchInfo* info_high,
1641 ArenaDeque<PcRelativePatchInfo>* patches) {
1642 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001643 return &patches->back();
1644}
1645
Alexey Frunzef63f5692016-12-13 17:43:11 -08001646Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1647 return map->GetOrCreate(
1648 value,
1649 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1650}
1651
Alexey Frunze19f6c692016-11-30 19:19:55 -08001652Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1653 return uint64_literals_.GetOrCreate(
1654 value,
1655 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1656}
1657
Alexey Frunzef63f5692016-12-13 17:43:11 -08001658Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001659 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001660}
1661
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001662void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
1663 GpuRegister out,
1664 PcRelativePatchInfo* info_low) {
1665 DCHECK(!info_high->patch_info_high);
1666 __ Bind(&info_high->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001667 // Add the high half of a 32-bit offset to PC.
Andreas Gampe3db70682018-12-26 15:12:03 -08001668 __ Auipc(out, /* imm16= */ 0x1234);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001669 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001670 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001671 if (info_low != nullptr) {
1672 DCHECK_EQ(info_low->patch_info_high, info_high);
1673 __ Bind(&info_low->label);
1674 }
Alexey Frunze19f6c692016-11-30 19:19:55 -08001675}
1676
Vladimir Marko6fd16062018-06-26 11:02:04 +01001677void CodeGeneratorMIPS64::LoadBootImageAddress(GpuRegister reg, uint32_t boot_image_reference) {
1678 if (GetCompilerOptions().IsBootImage()) {
1679 PcRelativePatchInfo* info_high = NewBootImageIntrinsicPatch(boot_image_reference);
1680 PcRelativePatchInfo* info_low = NewBootImageIntrinsicPatch(boot_image_reference, info_high);
1681 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Andreas Gampe3db70682018-12-26 15:12:03 -08001682 __ Daddiu(reg, AT, /* imm16= */ 0x5678);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01001683 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01001684 PcRelativePatchInfo* info_high = NewBootImageRelRoPatch(boot_image_reference);
1685 PcRelativePatchInfo* info_low = NewBootImageRelRoPatch(boot_image_reference, info_high);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001686 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
1687 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
Andreas Gampe3db70682018-12-26 15:12:03 -08001688 __ Lwu(reg, AT, /* imm16= */ 0x5678);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001689 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001690 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01001691 gc::Heap* heap = Runtime::Current()->GetHeap();
1692 DCHECK(!heap->GetBootImageSpaces().empty());
1693 uintptr_t address =
Vladimir Marko6fd16062018-06-26 11:02:04 +01001694 reinterpret_cast<uintptr_t>(heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001695 __ LoadLiteral(reg, kLoadDoubleword, DeduplicateBootImageAddressLiteral(address));
1696 }
1697}
1698
Vladimir Marko6fd16062018-06-26 11:02:04 +01001699void CodeGeneratorMIPS64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
1700 uint32_t boot_image_offset) {
1701 DCHECK(invoke->IsStatic());
1702 InvokeRuntimeCallingConvention calling_convention;
1703 GpuRegister argument = calling_convention.GetRegisterAt(0);
1704 if (GetCompilerOptions().IsBootImage()) {
1705 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
1706 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
1707 MethodReference target_method = invoke->GetTargetMethod();
1708 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
1709 PcRelativePatchInfo* info_high = NewBootImageTypePatch(*target_method.dex_file, type_idx);
1710 PcRelativePatchInfo* info_low =
1711 NewBootImageTypePatch(*target_method.dex_file, type_idx, info_high);
1712 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Andreas Gampe3db70682018-12-26 15:12:03 -08001713 __ Daddiu(argument, AT, /* imm16= */ 0x5678);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001714 } else {
1715 LoadBootImageAddress(argument, boot_image_offset);
1716 }
1717 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
1718 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
1719}
1720
Alexey Frunze627c1a02017-01-30 19:28:14 -08001721Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1722 dex::StringIndex string_index,
1723 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001724 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001725 return jit_string_patches_.GetOrCreate(
1726 StringReference(&dex_file, string_index),
Andreas Gampe3db70682018-12-26 15:12:03 -08001727 [this]() { return __ NewLiteral<uint32_t>(/* value= */ 0u); });
Alexey Frunze627c1a02017-01-30 19:28:14 -08001728}
1729
1730Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1731 dex::TypeIndex type_index,
1732 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001733 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001734 return jit_class_patches_.GetOrCreate(
1735 TypeReference(&dex_file, type_index),
Andreas Gampe3db70682018-12-26 15:12:03 -08001736 [this]() { return __ NewLiteral<uint32_t>(/* value= */ 0u); });
Alexey Frunze627c1a02017-01-30 19:28:14 -08001737}
1738
1739void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1740 const uint8_t* roots_data,
1741 const Literal* literal,
1742 uint64_t index_in_table) const {
1743 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1744 uintptr_t address =
1745 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1746 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1747}
1748
1749void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1750 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001751 const StringReference& string_reference = entry.first;
1752 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001753 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001754 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001755 }
1756 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001757 const TypeReference& type_reference = entry.first;
1758 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001759 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001760 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001761 }
1762}
1763
David Brazdil58282f42016-01-14 12:45:10 +00001764void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001765 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1766 blocked_core_registers_[ZERO] = true;
1767 blocked_core_registers_[K0] = true;
1768 blocked_core_registers_[K1] = true;
1769 blocked_core_registers_[GP] = true;
1770 blocked_core_registers_[SP] = true;
1771 blocked_core_registers_[RA] = true;
1772
Lazar Trsicd9672662015-09-03 17:33:01 +02001773 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1774 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001775 blocked_core_registers_[AT] = true;
1776 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001777 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001778 blocked_fpu_registers_[FTMP] = true;
1779
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001780 if (GetInstructionSetFeatures().HasMsa()) {
1781 // To be used just for MSA instructions.
1782 blocked_fpu_registers_[FTMP2] = true;
1783 }
1784
Alexey Frunze4dda3372015-06-01 18:31:49 -07001785 // Reserve suspend and thread registers.
1786 blocked_core_registers_[S0] = true;
1787 blocked_core_registers_[TR] = true;
1788
1789 // Reserve T9 for function calls
1790 blocked_core_registers_[T9] = true;
1791
Goran Jakovljevic782be112016-06-21 12:39:04 +02001792 if (GetGraph()->IsDebuggable()) {
1793 // Stubs do not save callee-save floating point registers. If the graph
1794 // is debuggable, we need to deal with these registers differently. For
1795 // now, just block them.
1796 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1797 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1798 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001799 }
1800}
1801
Alexey Frunze4dda3372015-06-01 18:31:49 -07001802size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1803 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001804 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001805}
1806
1807size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1808 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001809 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001810}
1811
1812size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001813 __ StoreFpuToOffset(GetGraph()->HasSIMD() ? kStoreQuadword : kStoreDoubleword,
1814 FpuRegister(reg_id),
1815 SP,
1816 stack_index);
1817 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001818}
1819
1820size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001821 __ LoadFpuFromOffset(GetGraph()->HasSIMD() ? kLoadQuadword : kLoadDoubleword,
1822 FpuRegister(reg_id),
1823 SP,
1824 stack_index);
1825 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001826}
1827
1828void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001829 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001830}
1831
1832void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001833 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001834}
1835
Vladimir Markoa0431112018-06-25 09:32:54 +01001836const Mips64InstructionSetFeatures& CodeGeneratorMIPS64::GetInstructionSetFeatures() const {
1837 return *GetCompilerOptions().GetInstructionSetFeatures()->AsMips64InstructionSetFeatures();
1838}
1839
Calin Juravle175dc732015-08-25 15:42:32 +01001840void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001841 HInstruction* instruction,
1842 uint32_t dex_pc,
1843 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001844 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001845 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001846 if (EntrypointRequiresStackMap(entrypoint)) {
1847 RecordPcInfo(instruction, dex_pc, slow_path);
1848 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001849}
1850
Alexey Frunze15958152017-02-09 19:08:30 -08001851void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1852 HInstruction* instruction,
1853 SlowPathCode* slow_path) {
1854 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1855 GenerateInvokeRuntime(entry_point_offset);
1856}
1857
1858void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1859 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1860 __ Jalr(T9);
1861 __ Nop();
1862}
1863
Alexey Frunze4dda3372015-06-01 18:31:49 -07001864void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1865 GpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00001866 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
1867 const size_t status_byte_offset =
1868 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
1869 constexpr uint32_t shifted_initialized_value =
1870 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
1871
1872 __ LoadFromOffset(kLoadUnsignedByte, TMP, class_reg, status_byte_offset);
Lena Djokic3177e102018-02-28 11:32:40 +01001873 __ Sltiu(TMP, TMP, shifted_initialized_value);
1874 __ Bnezc(TMP, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001875 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1876 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001877 __ Bind(slow_path->GetExitLabel());
1878}
1879
Vladimir Marko175e7862018-03-27 09:03:13 +00001880void InstructionCodeGeneratorMIPS64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
1881 GpuRegister temp) {
1882 uint32_t path_to_root = check->GetBitstringPathToRoot();
1883 uint32_t mask = check->GetBitstringMask();
1884 DCHECK(IsPowerOfTwo(mask + 1));
1885 size_t mask_bits = WhichPowerOf2(mask + 1);
1886
1887 if (mask_bits == 16u) {
1888 // Load only the bitstring part of the status word.
1889 __ LoadFromOffset(
1890 kLoadUnsignedHalfword, temp, temp, mirror::Class::StatusOffset().Int32Value());
1891 // Compare the bitstring bits using XOR.
1892 __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
1893 } else {
1894 // /* uint32_t */ temp = temp->status_
1895 __ LoadFromOffset(kLoadWord, temp, temp, mirror::Class::StatusOffset().Int32Value());
1896 // Compare the bitstring bits using XOR.
1897 if (IsUint<16>(path_to_root)) {
1898 __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
1899 } else {
1900 __ LoadConst32(TMP, path_to_root);
1901 __ Xor(temp, temp, TMP);
1902 }
1903 // Shift out bits that do not contribute to the comparison.
1904 __ Sll(temp, temp, 32 - mask_bits);
1905 }
1906}
1907
Alexey Frunze4dda3372015-06-01 18:31:49 -07001908void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1909 __ Sync(0); // only stype 0 is supported
1910}
1911
1912void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1913 HBasicBlock* successor) {
1914 SuspendCheckSlowPathMIPS64* slow_path =
Chris Larsena2045912017-11-02 12:39:54 -07001915 down_cast<SuspendCheckSlowPathMIPS64*>(instruction->GetSlowPath());
1916
1917 if (slow_path == nullptr) {
1918 slow_path =
1919 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathMIPS64(instruction, successor);
1920 instruction->SetSlowPath(slow_path);
1921 codegen_->AddSlowPath(slow_path);
1922 if (successor != nullptr) {
1923 DCHECK(successor->IsLoopHeader());
1924 }
1925 } else {
1926 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1927 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001928
1929 __ LoadFromOffset(kLoadUnsignedHalfword,
1930 TMP,
1931 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001932 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001933 if (successor == nullptr) {
1934 __ Bnezc(TMP, slow_path->GetEntryLabel());
1935 __ Bind(slow_path->GetReturnLabel());
1936 } else {
1937 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001938 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001939 // slow_path will return to GetLabelOf(successor).
1940 }
1941}
1942
1943InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1944 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001945 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001946 assembler_(codegen->GetAssembler()),
1947 codegen_(codegen) {}
1948
1949void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1950 DCHECK_EQ(instruction->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001951 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001952 DataType::Type type = instruction->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001953 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001954 case DataType::Type::kInt32:
1955 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001956 locations->SetInAt(0, Location::RequiresRegister());
1957 HInstruction* right = instruction->InputAt(1);
1958 bool can_use_imm = false;
1959 if (right->IsConstant()) {
1960 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1961 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1962 can_use_imm = IsUint<16>(imm);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001963 } else {
Lena Djokic38530172017-11-16 11:11:50 +01001964 DCHECK(instruction->IsAdd() || instruction->IsSub());
1965 bool single_use = right->GetUses().HasExactlyOneElement();
1966 if (instruction->IsSub()) {
1967 if (!(type == DataType::Type::kInt32 && imm == INT32_MIN)) {
1968 imm = -imm;
1969 }
1970 }
1971 if (type == DataType::Type::kInt32) {
1972 can_use_imm = IsInt<16>(imm) || (Low16Bits(imm) == 0) || single_use;
1973 } else {
1974 can_use_imm = IsInt<16>(imm) || (IsInt<32>(imm) && (Low16Bits(imm) == 0)) || single_use;
1975 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001976 }
1977 }
1978 if (can_use_imm)
1979 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1980 else
1981 locations->SetInAt(1, Location::RequiresRegister());
1982 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1983 }
1984 break;
1985
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001986 case DataType::Type::kFloat32:
1987 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001988 locations->SetInAt(0, Location::RequiresFpuRegister());
1989 locations->SetInAt(1, Location::RequiresFpuRegister());
1990 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1991 break;
1992
1993 default:
1994 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1995 }
1996}
1997
1998void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001999 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002000 LocationSummary* locations = instruction->GetLocations();
2001
2002 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002003 case DataType::Type::kInt32:
2004 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002005 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2006 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2007 Location rhs_location = locations->InAt(1);
2008
2009 GpuRegister rhs_reg = ZERO;
2010 int64_t rhs_imm = 0;
2011 bool use_imm = rhs_location.IsConstant();
2012 if (use_imm) {
2013 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2014 } else {
2015 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2016 }
2017
2018 if (instruction->IsAnd()) {
2019 if (use_imm)
2020 __ Andi(dst, lhs, rhs_imm);
2021 else
2022 __ And(dst, lhs, rhs_reg);
2023 } else if (instruction->IsOr()) {
2024 if (use_imm)
2025 __ Ori(dst, lhs, rhs_imm);
2026 else
2027 __ Or(dst, lhs, rhs_reg);
2028 } else if (instruction->IsXor()) {
2029 if (use_imm)
2030 __ Xori(dst, lhs, rhs_imm);
2031 else
2032 __ Xor(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01002033 } else if (instruction->IsAdd() || instruction->IsSub()) {
2034 if (instruction->IsSub()) {
2035 rhs_imm = -rhs_imm;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002036 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002037 if (type == DataType::Type::kInt32) {
Lena Djokic38530172017-11-16 11:11:50 +01002038 if (use_imm) {
2039 if (IsInt<16>(rhs_imm)) {
2040 __ Addiu(dst, lhs, rhs_imm);
2041 } else {
2042 int16_t rhs_imm_high = High16Bits(rhs_imm);
2043 int16_t rhs_imm_low = Low16Bits(rhs_imm);
2044 if (rhs_imm_low < 0) {
2045 rhs_imm_high += 1;
2046 }
2047 __ Aui(dst, lhs, rhs_imm_high);
2048 if (rhs_imm_low != 0) {
2049 __ Addiu(dst, dst, rhs_imm_low);
2050 }
2051 }
2052 } else {
2053 if (instruction->IsAdd()) {
2054 __ Addu(dst, lhs, rhs_reg);
2055 } else {
2056 DCHECK(instruction->IsSub());
2057 __ Subu(dst, lhs, rhs_reg);
2058 }
2059 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002060 } else {
Lena Djokic38530172017-11-16 11:11:50 +01002061 if (use_imm) {
2062 if (IsInt<16>(rhs_imm)) {
2063 __ Daddiu(dst, lhs, rhs_imm);
2064 } else if (IsInt<32>(rhs_imm)) {
2065 int16_t rhs_imm_high = High16Bits(rhs_imm);
2066 int16_t rhs_imm_low = Low16Bits(rhs_imm);
2067 bool overflow_hi16 = false;
2068 if (rhs_imm_low < 0) {
2069 rhs_imm_high += 1;
2070 overflow_hi16 = (rhs_imm_high == -32768);
2071 }
2072 __ Daui(dst, lhs, rhs_imm_high);
2073 if (rhs_imm_low != 0) {
2074 __ Daddiu(dst, dst, rhs_imm_low);
2075 }
2076 if (overflow_hi16) {
2077 __ Dahi(dst, 1);
2078 }
2079 } else {
2080 int16_t rhs_imm_low = Low16Bits(Low32Bits(rhs_imm));
2081 if (rhs_imm_low < 0) {
2082 rhs_imm += (INT64_C(1) << 16);
2083 }
2084 int16_t rhs_imm_upper = High16Bits(Low32Bits(rhs_imm));
2085 if (rhs_imm_upper < 0) {
2086 rhs_imm += (INT64_C(1) << 32);
2087 }
2088 int16_t rhs_imm_high = Low16Bits(High32Bits(rhs_imm));
2089 if (rhs_imm_high < 0) {
2090 rhs_imm += (INT64_C(1) << 48);
2091 }
2092 int16_t rhs_imm_top = High16Bits(High32Bits(rhs_imm));
2093 GpuRegister tmp = lhs;
2094 if (rhs_imm_low != 0) {
2095 __ Daddiu(dst, tmp, rhs_imm_low);
2096 tmp = dst;
2097 }
2098 // Dahi and Dati must use the same input and output register, so we have to initialize
2099 // the dst register using Daddiu or Daui, even when the intermediate value is zero:
2100 // Daui(dst, lhs, 0).
2101 if ((rhs_imm_upper != 0) || (rhs_imm_low == 0)) {
2102 __ Daui(dst, tmp, rhs_imm_upper);
2103 }
2104 if (rhs_imm_high != 0) {
2105 __ Dahi(dst, rhs_imm_high);
2106 }
2107 if (rhs_imm_top != 0) {
2108 __ Dati(dst, rhs_imm_top);
2109 }
2110 }
2111 } else if (instruction->IsAdd()) {
2112 __ Daddu(dst, lhs, rhs_reg);
2113 } else {
2114 DCHECK(instruction->IsSub());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002115 __ Dsubu(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01002116 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002117 }
2118 }
2119 break;
2120 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002121 case DataType::Type::kFloat32:
2122 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002123 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2124 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2125 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2126 if (instruction->IsAdd()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002127 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002128 __ AddS(dst, lhs, rhs);
2129 else
2130 __ AddD(dst, lhs, rhs);
2131 } else if (instruction->IsSub()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002132 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002133 __ SubS(dst, lhs, rhs);
2134 else
2135 __ SubD(dst, lhs, rhs);
2136 } else {
2137 LOG(FATAL) << "Unexpected floating-point binary operation";
2138 }
2139 break;
2140 }
2141 default:
2142 LOG(FATAL) << "Unexpected binary operation type " << type;
2143 }
2144}
2145
2146void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002147 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002148
Vladimir Markoca6fff82017-10-03 14:49:14 +01002149 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002150 DataType::Type type = instr->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002151 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002152 case DataType::Type::kInt32:
2153 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002154 locations->SetInAt(0, Location::RequiresRegister());
2155 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002156 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002157 break;
2158 }
2159 default:
2160 LOG(FATAL) << "Unexpected shift type " << type;
2161 }
2162}
2163
2164void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002165 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002166 LocationSummary* locations = instr->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002167 DataType::Type type = instr->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002168
2169 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002170 case DataType::Type::kInt32:
2171 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002172 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2173 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2174 Location rhs_location = locations->InAt(1);
2175
2176 GpuRegister rhs_reg = ZERO;
2177 int64_t rhs_imm = 0;
2178 bool use_imm = rhs_location.IsConstant();
2179 if (use_imm) {
2180 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2181 } else {
2182 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2183 }
2184
2185 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00002186 uint32_t shift_value = rhs_imm &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002187 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002188
Alexey Frunze92d90602015-12-18 18:16:36 -08002189 if (shift_value == 0) {
2190 if (dst != lhs) {
2191 __ Move(dst, lhs);
2192 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002193 } else if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002194 if (instr->IsShl()) {
2195 __ Sll(dst, lhs, shift_value);
2196 } else if (instr->IsShr()) {
2197 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002198 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002199 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002200 } else {
2201 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002202 }
2203 } else {
2204 if (shift_value < 32) {
2205 if (instr->IsShl()) {
2206 __ Dsll(dst, lhs, shift_value);
2207 } else if (instr->IsShr()) {
2208 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002209 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002210 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002211 } else {
2212 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002213 }
2214 } else {
2215 shift_value -= 32;
2216 if (instr->IsShl()) {
2217 __ Dsll32(dst, lhs, shift_value);
2218 } else if (instr->IsShr()) {
2219 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002220 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002221 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002222 } else {
2223 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002224 }
2225 }
2226 }
2227 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002228 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002229 if (instr->IsShl()) {
2230 __ Sllv(dst, lhs, rhs_reg);
2231 } else if (instr->IsShr()) {
2232 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002233 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002234 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002235 } else {
2236 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002237 }
2238 } else {
2239 if (instr->IsShl()) {
2240 __ Dsllv(dst, lhs, rhs_reg);
2241 } else if (instr->IsShr()) {
2242 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002243 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002244 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002245 } else {
2246 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002247 }
2248 }
2249 }
2250 break;
2251 }
2252 default:
2253 LOG(FATAL) << "Unexpected shift operation type " << type;
2254 }
2255}
2256
2257void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
2258 HandleBinaryOp(instruction);
2259}
2260
2261void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
2262 HandleBinaryOp(instruction);
2263}
2264
2265void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
2266 HandleBinaryOp(instruction);
2267}
2268
2269void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
2270 HandleBinaryOp(instruction);
2271}
2272
2273void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002274 DataType::Type type = instruction->GetType();
Alexey Frunze15958152017-02-09 19:08:30 -08002275 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002276 kEmitCompilerReadBarrier && (type == DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002277 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002278 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2279 object_array_get_with_read_barrier
2280 ? LocationSummary::kCallOnSlowPath
2281 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002282 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2283 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2284 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002285 locations->SetInAt(0, Location::RequiresRegister());
2286 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002287 if (DataType::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002288 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2289 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002290 // The output overlaps in the case of an object array get with
2291 // read barriers enabled: we do not want the move to overwrite the
2292 // array's location, as we need it to emit the read barrier.
2293 locations->SetOut(Location::RequiresRegister(),
2294 object_array_get_with_read_barrier
2295 ? Location::kOutputOverlap
2296 : Location::kNoOutputOverlap);
2297 }
2298 // We need a temporary register for the read barrier marking slow
2299 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2300 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002301 bool temp_needed = instruction->GetIndex()->IsConstant()
2302 ? !kBakerReadBarrierThunksEnableForFields
2303 : !kBakerReadBarrierThunksEnableForArrays;
2304 if (temp_needed) {
2305 locations->AddTemp(Location::RequiresRegister());
2306 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002307 }
2308}
2309
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002310static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2311 auto null_checker = [codegen, instruction]() {
2312 codegen->MaybeRecordImplicitNullCheck(instruction);
2313 };
2314 return null_checker;
2315}
2316
Alexey Frunze4dda3372015-06-01 18:31:49 -07002317void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2318 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002319 Location obj_loc = locations->InAt(0);
2320 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2321 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002322 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002323 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002324 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002325
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002326 DataType::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002327 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2328 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002329 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002330 case DataType::Type::kBool:
2331 case DataType::Type::kUint8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002332 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002333 if (index.IsConstant()) {
2334 size_t offset =
2335 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002336 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002337 } else {
2338 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002339 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002340 }
2341 break;
2342 }
2343
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002344 case DataType::Type::kInt8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002345 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002346 if (index.IsConstant()) {
2347 size_t offset =
2348 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002349 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002350 } else {
2351 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002352 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002353 }
2354 break;
2355 }
2356
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002357 case DataType::Type::kUint16: {
Alexey Frunze15958152017-02-09 19:08:30 -08002358 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002359 if (maybe_compressed_char_at) {
2360 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002361 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002362 __ Dext(TMP, TMP, 0, 1);
2363 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2364 "Expecting 0=compressed, 1=uncompressed");
2365 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002366 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002367 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2368 if (maybe_compressed_char_at) {
2369 Mips64Label uncompressed_load, done;
2370 __ Bnezc(TMP, &uncompressed_load);
2371 __ LoadFromOffset(kLoadUnsignedByte,
2372 out,
2373 obj,
2374 data_offset + (const_index << TIMES_1));
2375 __ Bc(&done);
2376 __ Bind(&uncompressed_load);
2377 __ LoadFromOffset(kLoadUnsignedHalfword,
2378 out,
2379 obj,
2380 data_offset + (const_index << TIMES_2));
2381 __ Bind(&done);
2382 } else {
2383 __ LoadFromOffset(kLoadUnsignedHalfword,
2384 out,
2385 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002386 data_offset + (const_index << TIMES_2),
2387 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002388 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002389 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002390 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2391 if (maybe_compressed_char_at) {
2392 Mips64Label uncompressed_load, done;
2393 __ Bnezc(TMP, &uncompressed_load);
2394 __ Daddu(TMP, obj, index_reg);
2395 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2396 __ Bc(&done);
2397 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002398 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002399 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2400 __ Bind(&done);
2401 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002402 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002403 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002404 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002405 }
2406 break;
2407 }
2408
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002409 case DataType::Type::kInt16: {
2410 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2411 if (index.IsConstant()) {
2412 size_t offset =
2413 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2414 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
2415 } else {
2416 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
2417 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
2418 }
2419 break;
2420 }
2421
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002422 case DataType::Type::kInt32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002423 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002424 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002425 LoadOperandType load_type =
2426 (type == DataType::Type::kReference) ? kLoadUnsignedWord : kLoadWord;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002427 if (index.IsConstant()) {
2428 size_t offset =
2429 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002430 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002431 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002432 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002433 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002434 }
2435 break;
2436 }
2437
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002438 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002439 static_assert(
2440 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2441 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2442 // /* HeapReference<Object> */ out =
2443 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2444 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002445 bool temp_needed = index.IsConstant()
2446 ? !kBakerReadBarrierThunksEnableForFields
2447 : !kBakerReadBarrierThunksEnableForArrays;
2448 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002449 // Note that a potential implicit null check is handled in this
2450 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002451 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2452 if (index.IsConstant()) {
2453 // Array load with a constant index can be treated as a field load.
2454 size_t offset =
2455 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2456 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2457 out_loc,
2458 obj,
2459 offset,
2460 temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08002461 /* needs_null_check= */ false);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002462 } else {
2463 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2464 out_loc,
2465 obj,
2466 data_offset,
2467 index,
2468 temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08002469 /* needs_null_check= */ false);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002470 }
Alexey Frunze15958152017-02-09 19:08:30 -08002471 } else {
2472 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2473 if (index.IsConstant()) {
2474 size_t offset =
2475 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2476 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2477 // If read barriers are enabled, emit read barriers other than
2478 // Baker's using a slow path (and also unpoison the loaded
2479 // reference, if heap poisoning is enabled).
2480 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2481 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002482 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002483 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2484 // If read barriers are enabled, emit read barriers other than
2485 // Baker's using a slow path (and also unpoison the loaded
2486 // reference, if heap poisoning is enabled).
2487 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2488 out_loc,
2489 out_loc,
2490 obj_loc,
2491 data_offset,
2492 index);
2493 }
2494 }
2495 break;
2496 }
2497
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002498 case DataType::Type::kInt64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002499 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002500 if (index.IsConstant()) {
2501 size_t offset =
2502 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002503 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002504 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002505 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002506 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002507 }
2508 break;
2509 }
2510
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002511 case DataType::Type::kFloat32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002512 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002513 if (index.IsConstant()) {
2514 size_t offset =
2515 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002516 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002517 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002518 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002519 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002520 }
2521 break;
2522 }
2523
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002524 case DataType::Type::kFloat64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002525 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002526 if (index.IsConstant()) {
2527 size_t offset =
2528 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002529 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002530 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002531 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002532 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002533 }
2534 break;
2535 }
2536
Aart Bik66c158e2018-01-31 12:55:04 -08002537 case DataType::Type::kUint32:
2538 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002539 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002540 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2541 UNREACHABLE();
2542 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002543}
2544
2545void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002546 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002547 locations->SetInAt(0, Location::RequiresRegister());
2548 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2549}
2550
2551void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2552 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002553 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002554 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2555 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2556 __ LoadFromOffset(kLoadWord, out, obj, offset);
2557 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002558 // Mask out compression flag from String's array length.
2559 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2560 __ Srl(out, out, 1u);
2561 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002562}
2563
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002564Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2565 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2566 ? Location::ConstantLocation(instruction->AsConstant())
2567 : Location::RequiresRegister();
2568}
2569
2570Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2571 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2572 // We can store a non-zero float or double constant without first loading it into the FPU,
2573 // but we should only prefer this if the constant has a single use.
2574 if (instruction->IsConstant() &&
2575 (instruction->AsConstant()->IsZeroBitPattern() ||
2576 instruction->GetUses().HasExactlyOneElement())) {
2577 return Location::ConstantLocation(instruction->AsConstant());
2578 // Otherwise fall through and require an FPU register for the constant.
2579 }
2580 return Location::RequiresFpuRegister();
2581}
2582
Alexey Frunze4dda3372015-06-01 18:31:49 -07002583void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002584 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002585
2586 bool needs_write_barrier =
2587 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2588 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2589
Vladimir Markoca6fff82017-10-03 14:49:14 +01002590 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze4dda3372015-06-01 18:31:49 -07002591 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002592 may_need_runtime_call_for_type_check ?
2593 LocationSummary::kCallOnSlowPath :
2594 LocationSummary::kNoCall);
2595
2596 locations->SetInAt(0, Location::RequiresRegister());
2597 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002598 if (DataType::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
Alexey Frunze15958152017-02-09 19:08:30 -08002599 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002600 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002601 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2602 }
2603 if (needs_write_barrier) {
2604 // Temporary register for the write barrier.
2605 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002606 }
2607}
2608
2609void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2610 LocationSummary* locations = instruction->GetLocations();
2611 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2612 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002613 Location value_location = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002614 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002615 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002616 bool needs_write_barrier =
2617 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002618 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002619 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002620
2621 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002622 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002623 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002624 case DataType::Type::kInt8: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002625 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002626 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002627 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002628 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002629 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2630 }
2631 if (value_location.IsConstant()) {
2632 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2633 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2634 } else {
2635 GpuRegister value = value_location.AsRegister<GpuRegister>();
2636 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002637 }
2638 break;
2639 }
2640
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002641 case DataType::Type::kUint16:
2642 case DataType::Type::kInt16: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002643 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002644 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002645 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002646 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002647 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002648 }
2649 if (value_location.IsConstant()) {
2650 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2651 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2652 } else {
2653 GpuRegister value = value_location.AsRegister<GpuRegister>();
2654 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002655 }
2656 break;
2657 }
2658
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002659 case DataType::Type::kInt32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002660 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2661 if (index.IsConstant()) {
2662 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2663 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002664 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002665 }
2666 if (value_location.IsConstant()) {
2667 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2668 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2669 } else {
2670 GpuRegister value = value_location.AsRegister<GpuRegister>();
2671 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2672 }
2673 break;
2674 }
2675
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002676 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002677 if (value_location.IsConstant()) {
2678 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002679 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002680 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002681 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002682 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002683 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002684 }
Alexey Frunze15958152017-02-09 19:08:30 -08002685 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2686 DCHECK_EQ(value, 0);
2687 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2688 DCHECK(!needs_write_barrier);
2689 DCHECK(!may_need_runtime_call_for_type_check);
2690 break;
2691 }
2692
2693 DCHECK(needs_write_barrier);
2694 GpuRegister value = value_location.AsRegister<GpuRegister>();
2695 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2696 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2697 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2698 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2699 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2700 Mips64Label done;
2701 SlowPathCodeMIPS64* slow_path = nullptr;
2702
2703 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002704 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathMIPS64(instruction);
Alexey Frunze15958152017-02-09 19:08:30 -08002705 codegen_->AddSlowPath(slow_path);
2706 if (instruction->GetValueCanBeNull()) {
2707 Mips64Label non_zero;
2708 __ Bnezc(value, &non_zero);
2709 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2710 if (index.IsConstant()) {
2711 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002712 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002713 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002714 }
Alexey Frunze15958152017-02-09 19:08:30 -08002715 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2716 __ Bc(&done);
2717 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002718 }
Alexey Frunze15958152017-02-09 19:08:30 -08002719
2720 // Note that when read barriers are enabled, the type checks
2721 // are performed without read barriers. This is fine, even in
2722 // the case where a class object is in the from-space after
2723 // the flip, as a comparison involving such a type would not
2724 // produce a false positive; it may of course produce a false
2725 // negative, in which case we would take the ArraySet slow
2726 // path.
2727
2728 // /* HeapReference<Class> */ temp1 = obj->klass_
2729 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2730 __ MaybeUnpoisonHeapReference(temp1);
2731
2732 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2733 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2734 // /* HeapReference<Class> */ temp2 = value->klass_
2735 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2736 // If heap poisoning is enabled, no need to unpoison `temp1`
2737 // nor `temp2`, as we are comparing two poisoned references.
2738
2739 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2740 Mips64Label do_put;
2741 __ Beqc(temp1, temp2, &do_put);
2742 // If heap poisoning is enabled, the `temp1` reference has
2743 // not been unpoisoned yet; unpoison it now.
2744 __ MaybeUnpoisonHeapReference(temp1);
2745
2746 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2747 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2748 // If heap poisoning is enabled, no need to unpoison
2749 // `temp1`, as we are comparing against null below.
2750 __ Bnezc(temp1, slow_path->GetEntryLabel());
2751 __ Bind(&do_put);
2752 } else {
2753 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2754 }
2755 }
2756
2757 GpuRegister source = value;
2758 if (kPoisonHeapReferences) {
2759 // Note that in the case where `value` is a null reference,
2760 // we do not enter this block, as a null reference does not
2761 // need poisoning.
2762 __ Move(temp1, value);
2763 __ PoisonHeapReference(temp1);
2764 source = temp1;
2765 }
2766
2767 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2768 if (index.IsConstant()) {
2769 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002770 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002771 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002772 }
2773 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2774
2775 if (!may_need_runtime_call_for_type_check) {
2776 codegen_->MaybeRecordImplicitNullCheck(instruction);
2777 }
2778
2779 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2780
2781 if (done.IsLinked()) {
2782 __ Bind(&done);
2783 }
2784
2785 if (slow_path != nullptr) {
2786 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002787 }
2788 break;
2789 }
2790
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002791 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002792 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002793 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002794 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002795 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002796 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002797 }
2798 if (value_location.IsConstant()) {
2799 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2800 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2801 } else {
2802 GpuRegister value = value_location.AsRegister<GpuRegister>();
2803 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002804 }
2805 break;
2806 }
2807
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002808 case DataType::Type::kFloat32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002809 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002810 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002811 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002812 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002813 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002814 }
2815 if (value_location.IsConstant()) {
2816 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2817 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2818 } else {
2819 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2820 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002821 }
2822 break;
2823 }
2824
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002825 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002826 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002827 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002828 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002829 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002830 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002831 }
2832 if (value_location.IsConstant()) {
2833 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2834 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2835 } else {
2836 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2837 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002838 }
2839 break;
2840 }
2841
Aart Bik66c158e2018-01-31 12:55:04 -08002842 case DataType::Type::kUint32:
2843 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002844 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002845 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2846 UNREACHABLE();
2847 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002848}
2849
2850void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002851 RegisterSet caller_saves = RegisterSet::Empty();
2852 InvokeRuntimeCallingConvention calling_convention;
2853 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2854 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2855 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002856
2857 HInstruction* index = instruction->InputAt(0);
2858 HInstruction* length = instruction->InputAt(1);
2859
2860 bool const_index = false;
2861 bool const_length = false;
2862
2863 if (index->IsConstant()) {
2864 if (length->IsConstant()) {
2865 const_index = true;
2866 const_length = true;
2867 } else {
2868 int32_t index_value = index->AsIntConstant()->GetValue();
2869 if (index_value < 0 || IsInt<16>(index_value + 1)) {
2870 const_index = true;
2871 }
2872 }
2873 } else if (length->IsConstant()) {
2874 int32_t length_value = length->AsIntConstant()->GetValue();
2875 if (IsUint<15>(length_value)) {
2876 const_length = true;
2877 }
2878 }
2879
2880 locations->SetInAt(0, const_index
2881 ? Location::ConstantLocation(index->AsConstant())
2882 : Location::RequiresRegister());
2883 locations->SetInAt(1, const_length
2884 ? Location::ConstantLocation(length->AsConstant())
2885 : Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002886}
2887
2888void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2889 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002890 Location index_loc = locations->InAt(0);
2891 Location length_loc = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002892
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002893 if (length_loc.IsConstant()) {
2894 int32_t length = length_loc.GetConstant()->AsIntConstant()->GetValue();
2895 if (index_loc.IsConstant()) {
2896 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2897 if (index < 0 || index >= length) {
2898 BoundsCheckSlowPathMIPS64* slow_path =
2899 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2900 codegen_->AddSlowPath(slow_path);
2901 __ Bc(slow_path->GetEntryLabel());
2902 } else {
2903 // Nothing to be done.
2904 }
2905 return;
2906 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002907
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002908 BoundsCheckSlowPathMIPS64* slow_path =
2909 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2910 codegen_->AddSlowPath(slow_path);
2911 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2912 if (length == 0) {
2913 __ Bc(slow_path->GetEntryLabel());
2914 } else if (length == 1) {
2915 __ Bnezc(index, slow_path->GetEntryLabel());
2916 } else {
2917 DCHECK(IsUint<15>(length)) << length;
2918 __ Sltiu(TMP, index, length);
2919 __ Beqzc(TMP, slow_path->GetEntryLabel());
2920 }
2921 } else {
2922 GpuRegister length = length_loc.AsRegister<GpuRegister>();
2923 BoundsCheckSlowPathMIPS64* slow_path =
2924 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2925 codegen_->AddSlowPath(slow_path);
2926 if (index_loc.IsConstant()) {
2927 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2928 if (index < 0) {
2929 __ Bc(slow_path->GetEntryLabel());
2930 } else if (index == 0) {
2931 __ Blezc(length, slow_path->GetEntryLabel());
2932 } else {
2933 DCHECK(IsInt<16>(index + 1)) << index;
2934 __ Sltiu(TMP, length, index + 1);
2935 __ Bnezc(TMP, slow_path->GetEntryLabel());
2936 }
2937 } else {
2938 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2939 __ Bgeuc(index, length, slow_path->GetEntryLabel());
2940 }
2941 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002942}
2943
Alexey Frunze15958152017-02-09 19:08:30 -08002944// Temp is used for read barrier.
2945static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2946 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002947 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08002948 (kUseBakerReadBarrier ||
2949 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2950 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2951 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2952 return 1;
2953 }
2954 return 0;
2955}
2956
2957// Extra temp is used for read barrier.
2958static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2959 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2960}
2961
Alexey Frunze4dda3372015-06-01 18:31:49 -07002962void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002963 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzedfc30af2018-01-24 16:25:10 -08002964 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01002965 LocationSummary* locations =
2966 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002967 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00002968 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
2969 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
2970 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2971 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
2972 } else {
2973 locations->SetInAt(1, Location::RequiresRegister());
2974 }
Alexey Frunze15958152017-02-09 19:08:30 -08002975 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002976}
2977
2978void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002979 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002980 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002981 Location obj_loc = locations->InAt(0);
2982 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Vladimir Marko175e7862018-03-27 09:03:13 +00002983 Location cls = locations->InAt(1);
Alexey Frunze15958152017-02-09 19:08:30 -08002984 Location temp_loc = locations->GetTemp(0);
2985 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2986 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2987 DCHECK_LE(num_temps, 2u);
2988 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002989 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2990 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2991 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2992 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2993 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2994 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2995 const uint32_t object_array_data_offset =
2996 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2997 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002998
Alexey Frunzedfc30af2018-01-24 16:25:10 -08002999 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01003000 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003001 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
3002 instruction, is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003003 codegen_->AddSlowPath(slow_path);
3004
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003005 // Avoid this check if we know `obj` is not null.
3006 if (instruction->MustDoNullCheck()) {
3007 __ Beqzc(obj, &done);
3008 }
3009
3010 switch (type_check_kind) {
3011 case TypeCheckKind::kExactCheck:
3012 case TypeCheckKind::kArrayCheck: {
3013 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003014 GenerateReferenceLoadTwoRegisters(instruction,
3015 temp_loc,
3016 obj_loc,
3017 class_offset,
3018 maybe_temp2_loc,
3019 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003020 // Jump to slow path for throwing the exception or doing a
3021 // more involved array check.
Vladimir Marko175e7862018-03-27 09:03:13 +00003022 __ Bnec(temp, cls.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003023 break;
3024 }
3025
3026 case TypeCheckKind::kAbstractClassCheck: {
3027 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003028 GenerateReferenceLoadTwoRegisters(instruction,
3029 temp_loc,
3030 obj_loc,
3031 class_offset,
3032 maybe_temp2_loc,
3033 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003034 // If the class is abstract, we eagerly fetch the super class of the
3035 // object to avoid doing a comparison we know will fail.
3036 Mips64Label loop;
3037 __ Bind(&loop);
3038 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003039 GenerateReferenceLoadOneRegister(instruction,
3040 temp_loc,
3041 super_offset,
3042 maybe_temp2_loc,
3043 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003044 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3045 // exception.
3046 __ Beqzc(temp, slow_path->GetEntryLabel());
3047 // Otherwise, compare the classes.
Vladimir Marko175e7862018-03-27 09:03:13 +00003048 __ Bnec(temp, cls.AsRegister<GpuRegister>(), &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003049 break;
3050 }
3051
3052 case TypeCheckKind::kClassHierarchyCheck: {
3053 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003054 GenerateReferenceLoadTwoRegisters(instruction,
3055 temp_loc,
3056 obj_loc,
3057 class_offset,
3058 maybe_temp2_loc,
3059 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003060 // Walk over the class hierarchy to find a match.
3061 Mips64Label loop;
3062 __ Bind(&loop);
Vladimir Marko175e7862018-03-27 09:03:13 +00003063 __ Beqc(temp, cls.AsRegister<GpuRegister>(), &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003064 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003065 GenerateReferenceLoadOneRegister(instruction,
3066 temp_loc,
3067 super_offset,
3068 maybe_temp2_loc,
3069 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003070 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3071 // exception. Otherwise, jump to the beginning of the loop.
3072 __ Bnezc(temp, &loop);
3073 __ Bc(slow_path->GetEntryLabel());
3074 break;
3075 }
3076
3077 case TypeCheckKind::kArrayObjectCheck: {
3078 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003079 GenerateReferenceLoadTwoRegisters(instruction,
3080 temp_loc,
3081 obj_loc,
3082 class_offset,
3083 maybe_temp2_loc,
3084 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003085 // Do an exact check.
Vladimir Marko175e7862018-03-27 09:03:13 +00003086 __ Beqc(temp, cls.AsRegister<GpuRegister>(), &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003087 // Otherwise, we need to check that the object's class is a non-primitive array.
3088 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003089 GenerateReferenceLoadOneRegister(instruction,
3090 temp_loc,
3091 component_offset,
3092 maybe_temp2_loc,
3093 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003094 // If the component type is null, jump to the slow path to throw the exception.
3095 __ Beqzc(temp, slow_path->GetEntryLabel());
3096 // Otherwise, the object is indeed an array, further check that this component
3097 // type is not a primitive type.
3098 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3099 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3100 __ Bnezc(temp, slow_path->GetEntryLabel());
3101 break;
3102 }
3103
3104 case TypeCheckKind::kUnresolvedCheck:
3105 // We always go into the type check slow path for the unresolved check case.
3106 // We cannot directly call the CheckCast runtime entry point
3107 // without resorting to a type checking slow path here (i.e. by
3108 // calling InvokeRuntime directly), as it would require to
3109 // assign fixed registers for the inputs of this HInstanceOf
3110 // instruction (following the runtime calling convention), which
3111 // might be cluttered by the potential first read barrier
3112 // emission at the beginning of this method.
3113 __ Bc(slow_path->GetEntryLabel());
3114 break;
3115
3116 case TypeCheckKind::kInterfaceCheck: {
3117 // Avoid read barriers to improve performance of the fast path. We can not get false
3118 // positives by doing this.
3119 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003120 GenerateReferenceLoadTwoRegisters(instruction,
3121 temp_loc,
3122 obj_loc,
3123 class_offset,
3124 maybe_temp2_loc,
3125 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003126 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003127 GenerateReferenceLoadTwoRegisters(instruction,
3128 temp_loc,
3129 temp_loc,
3130 iftable_offset,
3131 maybe_temp2_loc,
3132 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003133 // Iftable is never null.
3134 __ Lw(TMP, temp, array_length_offset);
3135 // Loop through the iftable and check if any class matches.
3136 Mips64Label loop;
3137 __ Bind(&loop);
3138 __ Beqzc(TMP, slow_path->GetEntryLabel());
3139 __ Lwu(AT, temp, object_array_data_offset);
3140 __ MaybeUnpoisonHeapReference(AT);
3141 // Go to next interface.
3142 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
3143 __ Addiu(TMP, TMP, -2);
3144 // Compare the classes and continue the loop if they do not match.
Vladimir Marko175e7862018-03-27 09:03:13 +00003145 __ Bnec(AT, cls.AsRegister<GpuRegister>(), &loop);
3146 break;
3147 }
3148
3149 case TypeCheckKind::kBitstringCheck: {
3150 // /* HeapReference<Class> */ temp = obj->klass_
3151 GenerateReferenceLoadTwoRegisters(instruction,
3152 temp_loc,
3153 obj_loc,
3154 class_offset,
3155 maybe_temp2_loc,
3156 kWithoutReadBarrier);
3157
3158 GenerateBitstringTypeCheckCompare(instruction, temp);
3159 __ Bnezc(temp, slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003160 break;
3161 }
3162 }
3163
3164 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003165 __ Bind(slow_path->GetExitLabel());
3166}
3167
3168void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
3169 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003170 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003171 locations->SetInAt(0, Location::RequiresRegister());
3172 if (check->HasUses()) {
3173 locations->SetOut(Location::SameAsFirstInput());
3174 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01003175 // Rely on the type initialization to save everything we need.
3176 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003177}
3178
3179void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
3180 // We assume the class is not null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01003181 SlowPathCodeMIPS64* slow_path =
3182 new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(check->GetLoadClass(), check);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003183 codegen_->AddSlowPath(slow_path);
3184 GenerateClassInitializationCheck(slow_path,
3185 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
3186}
3187
3188void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003189 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003190
Vladimir Markoca6fff82017-10-03 14:49:14 +01003191 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003192
3193 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003194 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003195 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003196 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003197 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003198 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003199 case DataType::Type::kInt32:
3200 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003201 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003202 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003203 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3204 break;
3205
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003206 case DataType::Type::kFloat32:
3207 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003208 locations->SetInAt(0, Location::RequiresFpuRegister());
3209 locations->SetInAt(1, Location::RequiresFpuRegister());
3210 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003211 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003212
3213 default:
3214 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3215 }
3216}
3217
3218void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
3219 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003220 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003221 DataType::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003222
3223 // 0 if: left == right
3224 // 1 if: left > right
3225 // -1 if: left < right
3226 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003227 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003228 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003229 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003230 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003231 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003232 case DataType::Type::kInt32:
3233 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003234 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003235 Location rhs_location = locations->InAt(1);
3236 bool use_imm = rhs_location.IsConstant();
3237 GpuRegister rhs = ZERO;
3238 if (use_imm) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003239 if (in_type == DataType::Type::kInt64) {
Aart Bika19616e2016-02-01 18:57:58 -08003240 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
3241 if (value != 0) {
3242 rhs = AT;
3243 __ LoadConst64(rhs, value);
3244 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00003245 } else {
3246 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
3247 if (value != 0) {
3248 rhs = AT;
3249 __ LoadConst32(rhs, value);
3250 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003251 }
3252 } else {
3253 rhs = rhs_location.AsRegister<GpuRegister>();
3254 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003255 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08003256 __ Slt(res, rhs, lhs);
3257 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003258 break;
3259 }
3260
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003261 case DataType::Type::kFloat32: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003262 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3263 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3264 Mips64Label done;
3265 __ CmpEqS(FTMP, lhs, rhs);
3266 __ LoadConst32(res, 0);
3267 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003268 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003269 __ CmpLtS(FTMP, lhs, rhs);
3270 __ LoadConst32(res, -1);
3271 __ Bc1nez(FTMP, &done);
3272 __ LoadConst32(res, 1);
3273 } else {
3274 __ CmpLtS(FTMP, rhs, lhs);
3275 __ LoadConst32(res, 1);
3276 __ Bc1nez(FTMP, &done);
3277 __ LoadConst32(res, -1);
3278 }
3279 __ Bind(&done);
3280 break;
3281 }
3282
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003283 case DataType::Type::kFloat64: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003284 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3285 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3286 Mips64Label done;
3287 __ CmpEqD(FTMP, lhs, rhs);
3288 __ LoadConst32(res, 0);
3289 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003290 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003291 __ CmpLtD(FTMP, lhs, rhs);
3292 __ LoadConst32(res, -1);
3293 __ Bc1nez(FTMP, &done);
3294 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003295 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08003296 __ CmpLtD(FTMP, rhs, lhs);
3297 __ LoadConst32(res, 1);
3298 __ Bc1nez(FTMP, &done);
3299 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003300 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003301 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003302 break;
3303 }
3304
3305 default:
3306 LOG(FATAL) << "Unimplemented compare type " << in_type;
3307 }
3308}
3309
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003310void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003311 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08003312 switch (instruction->InputAt(0)->GetType()) {
3313 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003314 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003315 locations->SetInAt(0, Location::RequiresRegister());
3316 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3317 break;
3318
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003319 case DataType::Type::kFloat32:
3320 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003321 locations->SetInAt(0, Location::RequiresFpuRegister());
3322 locations->SetInAt(1, Location::RequiresFpuRegister());
3323 break;
3324 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003325 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003326 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3327 }
3328}
3329
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003330void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003331 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003332 return;
3333 }
3334
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003335 DataType::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003336 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003337 switch (type) {
3338 default:
3339 // Integer case.
Andreas Gampe3db70682018-12-26 15:12:03 -08003340 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit= */ false, locations);
Alexey Frunze299a9392015-12-08 16:08:02 -08003341 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003342 case DataType::Type::kInt64:
Andreas Gampe3db70682018-12-26 15:12:03 -08003343 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit= */ true, locations);
Alexey Frunze299a9392015-12-08 16:08:02 -08003344 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003345 case DataType::Type::kFloat32:
3346 case DataType::Type::kFloat64:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003347 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3348 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003349 }
3350}
3351
Alexey Frunzec857c742015-09-23 15:12:39 -07003352void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3353 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003354 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003355
3356 LocationSummary* locations = instruction->GetLocations();
3357 Location second = locations->InAt(1);
3358 DCHECK(second.IsConstant());
3359
3360 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3361 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3362 int64_t imm = Int64FromConstant(second.GetConstant());
3363 DCHECK(imm == 1 || imm == -1);
3364
3365 if (instruction->IsRem()) {
3366 __ Move(out, ZERO);
3367 } else {
3368 if (imm == -1) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003369 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003370 __ Subu(out, ZERO, dividend);
3371 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003372 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003373 __ Dsubu(out, ZERO, dividend);
3374 }
3375 } else if (out != dividend) {
3376 __ Move(out, dividend);
3377 }
3378 }
3379}
3380
3381void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3382 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003383 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003384
3385 LocationSummary* locations = instruction->GetLocations();
3386 Location second = locations->InAt(1);
3387 DCHECK(second.IsConstant());
3388
3389 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3390 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3391 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003392 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003393 int ctz_imm = CTZ(abs_imm);
3394
3395 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003396 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003397 if (ctz_imm == 1) {
3398 // Fast path for division by +/-2, which is very common.
3399 __ Srl(TMP, dividend, 31);
3400 } else {
3401 __ Sra(TMP, dividend, 31);
3402 __ Srl(TMP, TMP, 32 - ctz_imm);
3403 }
3404 __ Addu(out, dividend, TMP);
3405 __ Sra(out, out, ctz_imm);
3406 if (imm < 0) {
3407 __ Subu(out, ZERO, out);
3408 }
3409 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003410 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003411 if (ctz_imm == 1) {
3412 // Fast path for division by +/-2, which is very common.
3413 __ Dsrl32(TMP, dividend, 31);
3414 } else {
3415 __ Dsra32(TMP, dividend, 31);
3416 if (ctz_imm > 32) {
3417 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3418 } else {
3419 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3420 }
3421 }
3422 __ Daddu(out, dividend, TMP);
3423 if (ctz_imm < 32) {
3424 __ Dsra(out, out, ctz_imm);
3425 } else {
3426 __ Dsra32(out, out, ctz_imm - 32);
3427 }
3428 if (imm < 0) {
3429 __ Dsubu(out, ZERO, out);
3430 }
3431 }
3432 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003433 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003434 if (ctz_imm == 1) {
3435 // Fast path for modulo +/-2, which is very common.
3436 __ Sra(TMP, dividend, 31);
3437 __ Subu(out, dividend, TMP);
3438 __ Andi(out, out, 1);
3439 __ Addu(out, out, TMP);
3440 } else {
3441 __ Sra(TMP, dividend, 31);
3442 __ Srl(TMP, TMP, 32 - ctz_imm);
3443 __ Addu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003444 __ Ins(out, ZERO, ctz_imm, 32 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003445 __ Subu(out, out, TMP);
3446 }
3447 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003448 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003449 if (ctz_imm == 1) {
3450 // Fast path for modulo +/-2, which is very common.
3451 __ Dsra32(TMP, dividend, 31);
3452 __ Dsubu(out, dividend, TMP);
3453 __ Andi(out, out, 1);
3454 __ Daddu(out, out, TMP);
3455 } else {
3456 __ Dsra32(TMP, dividend, 31);
3457 if (ctz_imm > 32) {
3458 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3459 } else {
3460 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3461 }
3462 __ Daddu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003463 __ DblIns(out, ZERO, ctz_imm, 64 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003464 __ Dsubu(out, out, TMP);
3465 }
3466 }
3467 }
3468}
3469
3470void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3471 DCHECK(instruction->IsDiv() || instruction->IsRem());
3472
3473 LocationSummary* locations = instruction->GetLocations();
3474 Location second = locations->InAt(1);
3475 DCHECK(second.IsConstant());
3476
3477 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3478 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3479 int64_t imm = Int64FromConstant(second.GetConstant());
3480
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003481 DataType::Type type = instruction->GetResultType();
3482 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003483
3484 int64_t magic;
3485 int shift;
3486 CalculateMagicAndShiftForDivRem(imm,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003487 (type == DataType::Type::kInt64),
Alexey Frunzec857c742015-09-23 15:12:39 -07003488 &magic,
3489 &shift);
3490
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003491 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003492 __ LoadConst32(TMP, magic);
3493 __ MuhR6(TMP, dividend, TMP);
3494
3495 if (imm > 0 && magic < 0) {
3496 __ Addu(TMP, TMP, dividend);
3497 } else if (imm < 0 && magic > 0) {
3498 __ Subu(TMP, TMP, dividend);
3499 }
3500
3501 if (shift != 0) {
3502 __ Sra(TMP, TMP, shift);
3503 }
3504
3505 if (instruction->IsDiv()) {
3506 __ Sra(out, TMP, 31);
3507 __ Subu(out, TMP, out);
3508 } else {
3509 __ Sra(AT, TMP, 31);
3510 __ Subu(AT, TMP, AT);
3511 __ LoadConst32(TMP, imm);
3512 __ MulR6(TMP, AT, TMP);
3513 __ Subu(out, dividend, TMP);
3514 }
3515 } else {
3516 __ LoadConst64(TMP, magic);
3517 __ Dmuh(TMP, dividend, TMP);
3518
3519 if (imm > 0 && magic < 0) {
3520 __ Daddu(TMP, TMP, dividend);
3521 } else if (imm < 0 && magic > 0) {
3522 __ Dsubu(TMP, TMP, dividend);
3523 }
3524
3525 if (shift >= 32) {
3526 __ Dsra32(TMP, TMP, shift - 32);
3527 } else if (shift > 0) {
3528 __ Dsra(TMP, TMP, shift);
3529 }
3530
3531 if (instruction->IsDiv()) {
3532 __ Dsra32(out, TMP, 31);
3533 __ Dsubu(out, TMP, out);
3534 } else {
3535 __ Dsra32(AT, TMP, 31);
3536 __ Dsubu(AT, TMP, AT);
3537 __ LoadConst64(TMP, imm);
3538 __ Dmul(TMP, AT, TMP);
3539 __ Dsubu(out, dividend, TMP);
3540 }
3541 }
3542}
3543
3544void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3545 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003546 DataType::Type type = instruction->GetResultType();
3547 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003548
3549 LocationSummary* locations = instruction->GetLocations();
3550 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3551 Location second = locations->InAt(1);
3552
3553 if (second.IsConstant()) {
3554 int64_t imm = Int64FromConstant(second.GetConstant());
3555 if (imm == 0) {
3556 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3557 } else if (imm == 1 || imm == -1) {
3558 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003559 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003560 DivRemByPowerOfTwo(instruction);
3561 } else {
3562 DCHECK(imm <= -2 || imm >= 2);
3563 GenerateDivRemWithAnyConstant(instruction);
3564 }
3565 } else {
3566 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3567 GpuRegister divisor = second.AsRegister<GpuRegister>();
3568 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003569 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003570 __ DivR6(out, dividend, divisor);
3571 else
3572 __ Ddiv(out, dividend, divisor);
3573 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003574 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003575 __ ModR6(out, dividend, divisor);
3576 else
3577 __ Dmod(out, dividend, divisor);
3578 }
3579 }
3580}
3581
Alexey Frunze4dda3372015-06-01 18:31:49 -07003582void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3583 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003584 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003585 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003586 case DataType::Type::kInt32:
3587 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003588 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003589 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003590 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3591 break;
3592
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003593 case DataType::Type::kFloat32:
3594 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003595 locations->SetInAt(0, Location::RequiresFpuRegister());
3596 locations->SetInAt(1, Location::RequiresFpuRegister());
3597 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3598 break;
3599
3600 default:
3601 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3602 }
3603}
3604
3605void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003606 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003607 LocationSummary* locations = instruction->GetLocations();
3608
3609 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003610 case DataType::Type::kInt32:
3611 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07003612 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003613 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003614 case DataType::Type::kFloat32:
3615 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003616 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3617 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3618 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003619 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07003620 __ DivS(dst, lhs, rhs);
3621 else
3622 __ DivD(dst, lhs, rhs);
3623 break;
3624 }
3625 default:
3626 LOG(FATAL) << "Unexpected div type " << type;
3627 }
3628}
3629
3630void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003631 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003632 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003633}
3634
3635void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3636 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003637 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003638 codegen_->AddSlowPath(slow_path);
3639 Location value = instruction->GetLocations()->InAt(0);
3640
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003641 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003642
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003643 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003644 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Elliott Hughesc1896c92018-11-29 11:33:18 -08003645 UNREACHABLE();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003646 }
3647
3648 if (value.IsConstant()) {
3649 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3650 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003651 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003652 } else {
3653 // A division by a non-null constant is valid. We don't need to perform
3654 // any check, so simply fall through.
3655 }
3656 } else {
3657 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3658 }
3659}
3660
3661void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3662 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003663 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003664 locations->SetOut(Location::ConstantLocation(constant));
3665}
3666
3667void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3668 // Will be generated at use site.
3669}
3670
3671void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3672 exit->SetLocations(nullptr);
3673}
3674
3675void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3676}
3677
3678void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3679 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003680 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003681 locations->SetOut(Location::ConstantLocation(constant));
3682}
3683
3684void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3685 // Will be generated at use site.
3686}
3687
David Brazdilfc6a86a2015-06-26 10:33:45 +00003688void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003689 if (successor->IsExitBlock()) {
3690 DCHECK(got->GetPrevious()->AlwaysThrows());
3691 return; // no code needed
3692 }
3693
Alexey Frunze4dda3372015-06-01 18:31:49 -07003694 HBasicBlock* block = got->GetBlock();
3695 HInstruction* previous = got->GetPrevious();
3696 HLoopInformation* info = block->GetLoopInformation();
3697
3698 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Goran Jakovljevicfeec1672018-02-08 10:20:14 +01003699 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
3700 __ Ld(AT, SP, kCurrentMethodStackOffset);
3701 __ Lhu(TMP, AT, ArtMethod::HotnessCountOffset().Int32Value());
3702 __ Addiu(TMP, TMP, 1);
3703 __ Sh(TMP, AT, ArtMethod::HotnessCountOffset().Int32Value());
3704 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003705 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3706 return;
3707 }
3708 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3709 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3710 }
3711 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003712 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003713 }
3714}
3715
David Brazdilfc6a86a2015-06-26 10:33:45 +00003716void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3717 got->SetLocations(nullptr);
3718}
3719
3720void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3721 HandleGoto(got, got->GetSuccessor());
3722}
3723
3724void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3725 try_boundary->SetLocations(nullptr);
3726}
3727
3728void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3729 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3730 if (!successor->IsExitBlock()) {
3731 HandleGoto(try_boundary, successor);
3732 }
3733}
3734
Alexey Frunze299a9392015-12-08 16:08:02 -08003735void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3736 bool is64bit,
3737 LocationSummary* locations) {
3738 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3739 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3740 Location rhs_location = locations->InAt(1);
3741 GpuRegister rhs_reg = ZERO;
3742 int64_t rhs_imm = 0;
3743 bool use_imm = rhs_location.IsConstant();
3744 if (use_imm) {
3745 if (is64bit) {
3746 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3747 } else {
3748 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3749 }
3750 } else {
3751 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3752 }
3753 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3754
3755 switch (cond) {
3756 case kCondEQ:
3757 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003758 if (use_imm && IsInt<16>(-rhs_imm)) {
3759 if (rhs_imm == 0) {
3760 if (cond == kCondEQ) {
3761 __ Sltiu(dst, lhs, 1);
3762 } else {
3763 __ Sltu(dst, ZERO, lhs);
3764 }
3765 } else {
3766 if (is64bit) {
3767 __ Daddiu(dst, lhs, -rhs_imm);
3768 } else {
3769 __ Addiu(dst, lhs, -rhs_imm);
3770 }
3771 if (cond == kCondEQ) {
3772 __ Sltiu(dst, dst, 1);
3773 } else {
3774 __ Sltu(dst, ZERO, dst);
3775 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003776 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003777 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003778 if (use_imm && IsUint<16>(rhs_imm)) {
3779 __ Xori(dst, lhs, rhs_imm);
3780 } else {
3781 if (use_imm) {
3782 rhs_reg = TMP;
3783 __ LoadConst64(rhs_reg, rhs_imm);
3784 }
3785 __ Xor(dst, lhs, rhs_reg);
3786 }
3787 if (cond == kCondEQ) {
3788 __ Sltiu(dst, dst, 1);
3789 } else {
3790 __ Sltu(dst, ZERO, dst);
3791 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003792 }
3793 break;
3794
3795 case kCondLT:
3796 case kCondGE:
3797 if (use_imm && IsInt<16>(rhs_imm)) {
3798 __ Slti(dst, lhs, rhs_imm);
3799 } else {
3800 if (use_imm) {
3801 rhs_reg = TMP;
3802 __ LoadConst64(rhs_reg, rhs_imm);
3803 }
3804 __ Slt(dst, lhs, rhs_reg);
3805 }
3806 if (cond == kCondGE) {
3807 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3808 // only the slt instruction but no sge.
3809 __ Xori(dst, dst, 1);
3810 }
3811 break;
3812
3813 case kCondLE:
3814 case kCondGT:
3815 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3816 // Simulate lhs <= rhs via lhs < rhs + 1.
3817 __ Slti(dst, lhs, rhs_imm_plus_one);
3818 if (cond == kCondGT) {
3819 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3820 // only the slti instruction but no sgti.
3821 __ Xori(dst, dst, 1);
3822 }
3823 } else {
3824 if (use_imm) {
3825 rhs_reg = TMP;
3826 __ LoadConst64(rhs_reg, rhs_imm);
3827 }
3828 __ Slt(dst, rhs_reg, lhs);
3829 if (cond == kCondLE) {
3830 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3831 // only the slt instruction but no sle.
3832 __ Xori(dst, dst, 1);
3833 }
3834 }
3835 break;
3836
3837 case kCondB:
3838 case kCondAE:
3839 if (use_imm && IsInt<16>(rhs_imm)) {
3840 // Sltiu sign-extends its 16-bit immediate operand before
3841 // the comparison and thus lets us compare directly with
3842 // unsigned values in the ranges [0, 0x7fff] and
3843 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3844 __ Sltiu(dst, lhs, rhs_imm);
3845 } else {
3846 if (use_imm) {
3847 rhs_reg = TMP;
3848 __ LoadConst64(rhs_reg, rhs_imm);
3849 }
3850 __ Sltu(dst, lhs, rhs_reg);
3851 }
3852 if (cond == kCondAE) {
3853 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3854 // only the sltu instruction but no sgeu.
3855 __ Xori(dst, dst, 1);
3856 }
3857 break;
3858
3859 case kCondBE:
3860 case kCondA:
3861 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3862 // Simulate lhs <= rhs via lhs < rhs + 1.
3863 // Note that this only works if rhs + 1 does not overflow
3864 // to 0, hence the check above.
3865 // Sltiu sign-extends its 16-bit immediate operand before
3866 // the comparison and thus lets us compare directly with
3867 // unsigned values in the ranges [0, 0x7fff] and
3868 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3869 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3870 if (cond == kCondA) {
3871 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3872 // only the sltiu instruction but no sgtiu.
3873 __ Xori(dst, dst, 1);
3874 }
3875 } else {
3876 if (use_imm) {
3877 rhs_reg = TMP;
3878 __ LoadConst64(rhs_reg, rhs_imm);
3879 }
3880 __ Sltu(dst, rhs_reg, lhs);
3881 if (cond == kCondBE) {
3882 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3883 // only the sltu instruction but no sleu.
3884 __ Xori(dst, dst, 1);
3885 }
3886 }
3887 break;
3888 }
3889}
3890
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003891bool InstructionCodeGeneratorMIPS64::MaterializeIntLongCompare(IfCondition cond,
3892 bool is64bit,
3893 LocationSummary* input_locations,
3894 GpuRegister dst) {
3895 GpuRegister lhs = input_locations->InAt(0).AsRegister<GpuRegister>();
3896 Location rhs_location = input_locations->InAt(1);
3897 GpuRegister rhs_reg = ZERO;
3898 int64_t rhs_imm = 0;
3899 bool use_imm = rhs_location.IsConstant();
3900 if (use_imm) {
3901 if (is64bit) {
3902 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3903 } else {
3904 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3905 }
3906 } else {
3907 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3908 }
3909 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3910
3911 switch (cond) {
3912 case kCondEQ:
3913 case kCondNE:
3914 if (use_imm && IsInt<16>(-rhs_imm)) {
3915 if (is64bit) {
3916 __ Daddiu(dst, lhs, -rhs_imm);
3917 } else {
3918 __ Addiu(dst, lhs, -rhs_imm);
3919 }
3920 } else if (use_imm && IsUint<16>(rhs_imm)) {
3921 __ Xori(dst, lhs, rhs_imm);
3922 } else {
3923 if (use_imm) {
3924 rhs_reg = TMP;
3925 __ LoadConst64(rhs_reg, rhs_imm);
3926 }
3927 __ Xor(dst, lhs, rhs_reg);
3928 }
3929 return (cond == kCondEQ);
3930
3931 case kCondLT:
3932 case kCondGE:
3933 if (use_imm && IsInt<16>(rhs_imm)) {
3934 __ Slti(dst, lhs, rhs_imm);
3935 } else {
3936 if (use_imm) {
3937 rhs_reg = TMP;
3938 __ LoadConst64(rhs_reg, rhs_imm);
3939 }
3940 __ Slt(dst, lhs, rhs_reg);
3941 }
3942 return (cond == kCondGE);
3943
3944 case kCondLE:
3945 case kCondGT:
3946 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3947 // Simulate lhs <= rhs via lhs < rhs + 1.
3948 __ Slti(dst, lhs, rhs_imm_plus_one);
3949 return (cond == kCondGT);
3950 } else {
3951 if (use_imm) {
3952 rhs_reg = TMP;
3953 __ LoadConst64(rhs_reg, rhs_imm);
3954 }
3955 __ Slt(dst, rhs_reg, lhs);
3956 return (cond == kCondLE);
3957 }
3958
3959 case kCondB:
3960 case kCondAE:
3961 if (use_imm && IsInt<16>(rhs_imm)) {
3962 // Sltiu sign-extends its 16-bit immediate operand before
3963 // the comparison and thus lets us compare directly with
3964 // unsigned values in the ranges [0, 0x7fff] and
3965 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3966 __ Sltiu(dst, lhs, rhs_imm);
3967 } else {
3968 if (use_imm) {
3969 rhs_reg = TMP;
3970 __ LoadConst64(rhs_reg, rhs_imm);
3971 }
3972 __ Sltu(dst, lhs, rhs_reg);
3973 }
3974 return (cond == kCondAE);
3975
3976 case kCondBE:
3977 case kCondA:
3978 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3979 // Simulate lhs <= rhs via lhs < rhs + 1.
3980 // Note that this only works if rhs + 1 does not overflow
3981 // to 0, hence the check above.
3982 // Sltiu sign-extends its 16-bit immediate operand before
3983 // the comparison and thus lets us compare directly with
3984 // unsigned values in the ranges [0, 0x7fff] and
3985 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3986 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3987 return (cond == kCondA);
3988 } else {
3989 if (use_imm) {
3990 rhs_reg = TMP;
3991 __ LoadConst64(rhs_reg, rhs_imm);
3992 }
3993 __ Sltu(dst, rhs_reg, lhs);
3994 return (cond == kCondBE);
3995 }
3996 }
3997}
3998
Alexey Frunze299a9392015-12-08 16:08:02 -08003999void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
4000 bool is64bit,
4001 LocationSummary* locations,
4002 Mips64Label* label) {
4003 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
4004 Location rhs_location = locations->InAt(1);
4005 GpuRegister rhs_reg = ZERO;
4006 int64_t rhs_imm = 0;
4007 bool use_imm = rhs_location.IsConstant();
4008 if (use_imm) {
4009 if (is64bit) {
4010 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
4011 } else {
4012 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4013 }
4014 } else {
4015 rhs_reg = rhs_location.AsRegister<GpuRegister>();
4016 }
4017
4018 if (use_imm && rhs_imm == 0) {
4019 switch (cond) {
4020 case kCondEQ:
4021 case kCondBE: // <= 0 if zero
4022 __ Beqzc(lhs, label);
4023 break;
4024 case kCondNE:
4025 case kCondA: // > 0 if non-zero
4026 __ Bnezc(lhs, label);
4027 break;
4028 case kCondLT:
4029 __ Bltzc(lhs, label);
4030 break;
4031 case kCondGE:
4032 __ Bgezc(lhs, label);
4033 break;
4034 case kCondLE:
4035 __ Blezc(lhs, label);
4036 break;
4037 case kCondGT:
4038 __ Bgtzc(lhs, label);
4039 break;
4040 case kCondB: // always false
4041 break;
4042 case kCondAE: // always true
4043 __ Bc(label);
4044 break;
4045 }
4046 } else {
4047 if (use_imm) {
4048 rhs_reg = TMP;
4049 __ LoadConst64(rhs_reg, rhs_imm);
4050 }
4051 switch (cond) {
4052 case kCondEQ:
4053 __ Beqc(lhs, rhs_reg, label);
4054 break;
4055 case kCondNE:
4056 __ Bnec(lhs, rhs_reg, label);
4057 break;
4058 case kCondLT:
4059 __ Bltc(lhs, rhs_reg, label);
4060 break;
4061 case kCondGE:
4062 __ Bgec(lhs, rhs_reg, label);
4063 break;
4064 case kCondLE:
4065 __ Bgec(rhs_reg, lhs, label);
4066 break;
4067 case kCondGT:
4068 __ Bltc(rhs_reg, lhs, label);
4069 break;
4070 case kCondB:
4071 __ Bltuc(lhs, rhs_reg, label);
4072 break;
4073 case kCondAE:
4074 __ Bgeuc(lhs, rhs_reg, label);
4075 break;
4076 case kCondBE:
4077 __ Bgeuc(rhs_reg, lhs, label);
4078 break;
4079 case kCondA:
4080 __ Bltuc(rhs_reg, lhs, label);
4081 break;
4082 }
4083 }
4084}
4085
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004086void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
4087 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004088 DataType::Type type,
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004089 LocationSummary* locations) {
4090 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
4091 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4092 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004093 if (type == DataType::Type::kFloat32) {
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004094 switch (cond) {
4095 case kCondEQ:
4096 __ CmpEqS(FTMP, lhs, rhs);
4097 __ Mfc1(dst, FTMP);
4098 __ Andi(dst, dst, 1);
4099 break;
4100 case kCondNE:
4101 __ CmpEqS(FTMP, lhs, rhs);
4102 __ Mfc1(dst, FTMP);
4103 __ Addiu(dst, dst, 1);
4104 break;
4105 case kCondLT:
4106 if (gt_bias) {
4107 __ CmpLtS(FTMP, lhs, rhs);
4108 } else {
4109 __ CmpUltS(FTMP, lhs, rhs);
4110 }
4111 __ Mfc1(dst, FTMP);
4112 __ Andi(dst, dst, 1);
4113 break;
4114 case kCondLE:
4115 if (gt_bias) {
4116 __ CmpLeS(FTMP, lhs, rhs);
4117 } else {
4118 __ CmpUleS(FTMP, lhs, rhs);
4119 }
4120 __ Mfc1(dst, FTMP);
4121 __ Andi(dst, dst, 1);
4122 break;
4123 case kCondGT:
4124 if (gt_bias) {
4125 __ CmpUltS(FTMP, rhs, lhs);
4126 } else {
4127 __ CmpLtS(FTMP, rhs, lhs);
4128 }
4129 __ Mfc1(dst, FTMP);
4130 __ Andi(dst, dst, 1);
4131 break;
4132 case kCondGE:
4133 if (gt_bias) {
4134 __ CmpUleS(FTMP, rhs, lhs);
4135 } else {
4136 __ CmpLeS(FTMP, rhs, lhs);
4137 }
4138 __ Mfc1(dst, FTMP);
4139 __ Andi(dst, dst, 1);
4140 break;
4141 default:
4142 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4143 UNREACHABLE();
4144 }
4145 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004146 DCHECK_EQ(type, DataType::Type::kFloat64);
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004147 switch (cond) {
4148 case kCondEQ:
4149 __ CmpEqD(FTMP, lhs, rhs);
4150 __ Mfc1(dst, FTMP);
4151 __ Andi(dst, dst, 1);
4152 break;
4153 case kCondNE:
4154 __ CmpEqD(FTMP, lhs, rhs);
4155 __ Mfc1(dst, FTMP);
4156 __ Addiu(dst, dst, 1);
4157 break;
4158 case kCondLT:
4159 if (gt_bias) {
4160 __ CmpLtD(FTMP, lhs, rhs);
4161 } else {
4162 __ CmpUltD(FTMP, lhs, rhs);
4163 }
4164 __ Mfc1(dst, FTMP);
4165 __ Andi(dst, dst, 1);
4166 break;
4167 case kCondLE:
4168 if (gt_bias) {
4169 __ CmpLeD(FTMP, lhs, rhs);
4170 } else {
4171 __ CmpUleD(FTMP, lhs, rhs);
4172 }
4173 __ Mfc1(dst, FTMP);
4174 __ Andi(dst, dst, 1);
4175 break;
4176 case kCondGT:
4177 if (gt_bias) {
4178 __ CmpUltD(FTMP, rhs, lhs);
4179 } else {
4180 __ CmpLtD(FTMP, rhs, lhs);
4181 }
4182 __ Mfc1(dst, FTMP);
4183 __ Andi(dst, dst, 1);
4184 break;
4185 case kCondGE:
4186 if (gt_bias) {
4187 __ CmpUleD(FTMP, rhs, lhs);
4188 } else {
4189 __ CmpLeD(FTMP, rhs, lhs);
4190 }
4191 __ Mfc1(dst, FTMP);
4192 __ Andi(dst, dst, 1);
4193 break;
4194 default:
4195 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4196 UNREACHABLE();
4197 }
4198 }
4199}
4200
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004201bool InstructionCodeGeneratorMIPS64::MaterializeFpCompare(IfCondition cond,
4202 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004203 DataType::Type type,
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004204 LocationSummary* input_locations,
4205 FpuRegister dst) {
4206 FpuRegister lhs = input_locations->InAt(0).AsFpuRegister<FpuRegister>();
4207 FpuRegister rhs = input_locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004208 if (type == DataType::Type::kFloat32) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004209 switch (cond) {
4210 case kCondEQ:
4211 __ CmpEqS(dst, lhs, rhs);
4212 return false;
4213 case kCondNE:
4214 __ CmpEqS(dst, lhs, rhs);
4215 return true;
4216 case kCondLT:
4217 if (gt_bias) {
4218 __ CmpLtS(dst, lhs, rhs);
4219 } else {
4220 __ CmpUltS(dst, lhs, rhs);
4221 }
4222 return false;
4223 case kCondLE:
4224 if (gt_bias) {
4225 __ CmpLeS(dst, lhs, rhs);
4226 } else {
4227 __ CmpUleS(dst, lhs, rhs);
4228 }
4229 return false;
4230 case kCondGT:
4231 if (gt_bias) {
4232 __ CmpUltS(dst, rhs, lhs);
4233 } else {
4234 __ CmpLtS(dst, rhs, lhs);
4235 }
4236 return false;
4237 case kCondGE:
4238 if (gt_bias) {
4239 __ CmpUleS(dst, rhs, lhs);
4240 } else {
4241 __ CmpLeS(dst, rhs, lhs);
4242 }
4243 return false;
4244 default:
4245 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4246 UNREACHABLE();
4247 }
4248 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004249 DCHECK_EQ(type, DataType::Type::kFloat64);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004250 switch (cond) {
4251 case kCondEQ:
4252 __ CmpEqD(dst, lhs, rhs);
4253 return false;
4254 case kCondNE:
4255 __ CmpEqD(dst, lhs, rhs);
4256 return true;
4257 case kCondLT:
4258 if (gt_bias) {
4259 __ CmpLtD(dst, lhs, rhs);
4260 } else {
4261 __ CmpUltD(dst, lhs, rhs);
4262 }
4263 return false;
4264 case kCondLE:
4265 if (gt_bias) {
4266 __ CmpLeD(dst, lhs, rhs);
4267 } else {
4268 __ CmpUleD(dst, lhs, rhs);
4269 }
4270 return false;
4271 case kCondGT:
4272 if (gt_bias) {
4273 __ CmpUltD(dst, rhs, lhs);
4274 } else {
4275 __ CmpLtD(dst, rhs, lhs);
4276 }
4277 return false;
4278 case kCondGE:
4279 if (gt_bias) {
4280 __ CmpUleD(dst, rhs, lhs);
4281 } else {
4282 __ CmpLeD(dst, rhs, lhs);
4283 }
4284 return false;
4285 default:
4286 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4287 UNREACHABLE();
4288 }
4289 }
4290}
4291
Alexey Frunze299a9392015-12-08 16:08:02 -08004292void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
4293 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004294 DataType::Type type,
Alexey Frunze299a9392015-12-08 16:08:02 -08004295 LocationSummary* locations,
4296 Mips64Label* label) {
4297 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4298 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004299 if (type == DataType::Type::kFloat32) {
Alexey Frunze299a9392015-12-08 16:08:02 -08004300 switch (cond) {
4301 case kCondEQ:
4302 __ CmpEqS(FTMP, lhs, rhs);
4303 __ Bc1nez(FTMP, label);
4304 break;
4305 case kCondNE:
4306 __ CmpEqS(FTMP, lhs, rhs);
4307 __ Bc1eqz(FTMP, label);
4308 break;
4309 case kCondLT:
4310 if (gt_bias) {
4311 __ CmpLtS(FTMP, lhs, rhs);
4312 } else {
4313 __ CmpUltS(FTMP, lhs, rhs);
4314 }
4315 __ Bc1nez(FTMP, label);
4316 break;
4317 case kCondLE:
4318 if (gt_bias) {
4319 __ CmpLeS(FTMP, lhs, rhs);
4320 } else {
4321 __ CmpUleS(FTMP, lhs, rhs);
4322 }
4323 __ Bc1nez(FTMP, label);
4324 break;
4325 case kCondGT:
4326 if (gt_bias) {
4327 __ CmpUltS(FTMP, rhs, lhs);
4328 } else {
4329 __ CmpLtS(FTMP, rhs, lhs);
4330 }
4331 __ Bc1nez(FTMP, label);
4332 break;
4333 case kCondGE:
4334 if (gt_bias) {
4335 __ CmpUleS(FTMP, rhs, lhs);
4336 } else {
4337 __ CmpLeS(FTMP, rhs, lhs);
4338 }
4339 __ Bc1nez(FTMP, label);
4340 break;
4341 default:
4342 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004343 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004344 }
4345 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004346 DCHECK_EQ(type, DataType::Type::kFloat64);
Alexey Frunze299a9392015-12-08 16:08:02 -08004347 switch (cond) {
4348 case kCondEQ:
4349 __ CmpEqD(FTMP, lhs, rhs);
4350 __ Bc1nez(FTMP, label);
4351 break;
4352 case kCondNE:
4353 __ CmpEqD(FTMP, lhs, rhs);
4354 __ Bc1eqz(FTMP, label);
4355 break;
4356 case kCondLT:
4357 if (gt_bias) {
4358 __ CmpLtD(FTMP, lhs, rhs);
4359 } else {
4360 __ CmpUltD(FTMP, lhs, rhs);
4361 }
4362 __ Bc1nez(FTMP, label);
4363 break;
4364 case kCondLE:
4365 if (gt_bias) {
4366 __ CmpLeD(FTMP, lhs, rhs);
4367 } else {
4368 __ CmpUleD(FTMP, lhs, rhs);
4369 }
4370 __ Bc1nez(FTMP, label);
4371 break;
4372 case kCondGT:
4373 if (gt_bias) {
4374 __ CmpUltD(FTMP, rhs, lhs);
4375 } else {
4376 __ CmpLtD(FTMP, rhs, lhs);
4377 }
4378 __ Bc1nez(FTMP, label);
4379 break;
4380 case kCondGE:
4381 if (gt_bias) {
4382 __ CmpUleD(FTMP, rhs, lhs);
4383 } else {
4384 __ CmpLeD(FTMP, rhs, lhs);
4385 }
4386 __ Bc1nez(FTMP, label);
4387 break;
4388 default:
4389 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004390 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004391 }
4392 }
4393}
4394
Alexey Frunze4dda3372015-06-01 18:31:49 -07004395void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00004396 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004397 Mips64Label* true_target,
4398 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00004399 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004400
David Brazdil0debae72015-11-12 18:37:00 +00004401 if (true_target == nullptr && false_target == nullptr) {
4402 // Nothing to do. The code always falls through.
4403 return;
4404 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00004405 // Constant condition, statically compared against "true" (integer value 1).
4406 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00004407 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004408 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004409 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004410 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00004411 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00004412 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004413 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00004414 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004415 }
David Brazdil0debae72015-11-12 18:37:00 +00004416 return;
4417 }
4418
4419 // The following code generates these patterns:
4420 // (1) true_target == nullptr && false_target != nullptr
4421 // - opposite condition true => branch to false_target
4422 // (2) true_target != nullptr && false_target == nullptr
4423 // - condition true => branch to true_target
4424 // (3) true_target != nullptr && false_target != nullptr
4425 // - condition true => branch to true_target
4426 // - branch to false_target
4427 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004428 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00004429 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004430 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00004431 if (true_target == nullptr) {
4432 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
4433 } else {
4434 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
4435 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004436 } else {
4437 // The condition instruction has not been materialized, use its inputs as
4438 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00004439 HCondition* condition = cond->AsCondition();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004440 DataType::Type type = condition->InputAt(0)->GetType();
Alexey Frunze299a9392015-12-08 16:08:02 -08004441 LocationSummary* locations = cond->GetLocations();
4442 IfCondition if_cond = condition->GetCondition();
4443 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00004444
David Brazdil0debae72015-11-12 18:37:00 +00004445 if (true_target == nullptr) {
4446 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004447 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00004448 }
4449
Alexey Frunze299a9392015-12-08 16:08:02 -08004450 switch (type) {
4451 default:
Andreas Gampe3db70682018-12-26 15:12:03 -08004452 GenerateIntLongCompareAndBranch(if_cond, /* is64bit= */ false, locations, branch_target);
Alexey Frunze299a9392015-12-08 16:08:02 -08004453 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004454 case DataType::Type::kInt64:
Andreas Gampe3db70682018-12-26 15:12:03 -08004455 GenerateIntLongCompareAndBranch(if_cond, /* is64bit= */ true, locations, branch_target);
Alexey Frunze299a9392015-12-08 16:08:02 -08004456 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004457 case DataType::Type::kFloat32:
4458 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004459 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
4460 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004461 }
4462 }
David Brazdil0debae72015-11-12 18:37:00 +00004463
4464 // If neither branch falls through (case 3), the conditional branch to `true_target`
4465 // was already emitted (case 2) and we need to emit a jump to `false_target`.
4466 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004467 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004468 }
4469}
4470
4471void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004472 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00004473 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004474 locations->SetInAt(0, Location::RequiresRegister());
4475 }
4476}
4477
4478void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00004479 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
4480 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004481 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004482 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004483 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004484 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08004485 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004486}
4487
4488void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004489 LocationSummary* locations = new (GetGraph()->GetAllocator())
Alexey Frunze4dda3372015-06-01 18:31:49 -07004490 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01004491 InvokeRuntimeCallingConvention calling_convention;
4492 RegisterSet caller_saves = RegisterSet::Empty();
4493 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4494 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00004495 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004496 locations->SetInAt(0, Location::RequiresRegister());
4497 }
4498}
4499
4500void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08004501 SlowPathCodeMIPS64* slow_path =
4502 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00004503 GenerateTestAndBranch(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08004504 /* condition_input_index= */ 0,
David Brazdil0debae72015-11-12 18:37:00 +00004505 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08004506 /* false_target= */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004507}
4508
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004509// This function returns true if a conditional move can be generated for HSelect.
4510// Otherwise it returns false and HSelect must be implemented in terms of conditonal
4511// branches and regular moves.
4512//
4513// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
4514//
4515// While determining feasibility of a conditional move and setting inputs/outputs
4516// are two distinct tasks, this function does both because they share quite a bit
4517// of common logic.
4518static bool CanMoveConditionally(HSelect* select, LocationSummary* locations_to_set) {
4519 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
Andreas Gampe3db70682018-12-26 15:12:03 -08004520 HInstruction* cond = select->InputAt(/* i= */ 2);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004521 HCondition* condition = cond->AsCondition();
4522
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004523 DataType::Type cond_type =
4524 materialized ? DataType::Type::kInt32 : condition->InputAt(0)->GetType();
4525 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004526
4527 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
4528 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
4529 bool is_true_value_zero_constant =
4530 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
4531 bool is_false_value_zero_constant =
4532 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
4533
4534 bool can_move_conditionally = false;
4535 bool use_const_for_false_in = false;
4536 bool use_const_for_true_in = false;
4537
4538 if (!cond->IsConstant()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004539 if (!DataType::IsFloatingPointType(cond_type)) {
4540 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004541 // Moving int/long on int/long condition.
4542 if (is_true_value_zero_constant) {
4543 // seleqz out_reg, false_reg, cond_reg
4544 can_move_conditionally = true;
4545 use_const_for_true_in = true;
4546 } else if (is_false_value_zero_constant) {
4547 // selnez out_reg, true_reg, cond_reg
4548 can_move_conditionally = true;
4549 use_const_for_false_in = true;
4550 } else if (materialized) {
4551 // Not materializing unmaterialized int conditions
4552 // to keep the instruction count low.
4553 // selnez AT, true_reg, cond_reg
4554 // seleqz TMP, false_reg, cond_reg
4555 // or out_reg, AT, TMP
4556 can_move_conditionally = true;
4557 }
4558 } else {
4559 // Moving float/double on int/long condition.
4560 if (materialized) {
4561 // Not materializing unmaterialized int conditions
4562 // to keep the instruction count low.
4563 can_move_conditionally = true;
4564 if (is_true_value_zero_constant) {
4565 // sltu TMP, ZERO, cond_reg
4566 // mtc1 TMP, temp_cond_reg
4567 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4568 use_const_for_true_in = true;
4569 } else if (is_false_value_zero_constant) {
4570 // sltu TMP, ZERO, cond_reg
4571 // mtc1 TMP, temp_cond_reg
4572 // selnez.fmt out_reg, true_reg, temp_cond_reg
4573 use_const_for_false_in = true;
4574 } else {
4575 // sltu TMP, ZERO, cond_reg
4576 // mtc1 TMP, temp_cond_reg
4577 // sel.fmt temp_cond_reg, false_reg, true_reg
4578 // mov.fmt out_reg, temp_cond_reg
4579 }
4580 }
4581 }
4582 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004583 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004584 // Moving int/long on float/double condition.
4585 can_move_conditionally = true;
4586 if (is_true_value_zero_constant) {
4587 // mfc1 TMP, temp_cond_reg
4588 // seleqz out_reg, false_reg, TMP
4589 use_const_for_true_in = true;
4590 } else if (is_false_value_zero_constant) {
4591 // mfc1 TMP, temp_cond_reg
4592 // selnez out_reg, true_reg, TMP
4593 use_const_for_false_in = true;
4594 } else {
4595 // mfc1 TMP, temp_cond_reg
4596 // selnez AT, true_reg, TMP
4597 // seleqz TMP, false_reg, TMP
4598 // or out_reg, AT, TMP
4599 }
4600 } else {
4601 // Moving float/double on float/double condition.
4602 can_move_conditionally = true;
4603 if (is_true_value_zero_constant) {
4604 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4605 use_const_for_true_in = true;
4606 } else if (is_false_value_zero_constant) {
4607 // selnez.fmt out_reg, true_reg, temp_cond_reg
4608 use_const_for_false_in = true;
4609 } else {
4610 // sel.fmt temp_cond_reg, false_reg, true_reg
4611 // mov.fmt out_reg, temp_cond_reg
4612 }
4613 }
4614 }
4615 }
4616
4617 if (can_move_conditionally) {
4618 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
4619 } else {
4620 DCHECK(!use_const_for_false_in);
4621 DCHECK(!use_const_for_true_in);
4622 }
4623
4624 if (locations_to_set != nullptr) {
4625 if (use_const_for_false_in) {
4626 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
4627 } else {
4628 locations_to_set->SetInAt(0,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004629 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004630 ? Location::RequiresFpuRegister()
4631 : Location::RequiresRegister());
4632 }
4633 if (use_const_for_true_in) {
4634 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
4635 } else {
4636 locations_to_set->SetInAt(1,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004637 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004638 ? Location::RequiresFpuRegister()
4639 : Location::RequiresRegister());
4640 }
4641 if (materialized) {
4642 locations_to_set->SetInAt(2, Location::RequiresRegister());
4643 }
4644
4645 if (can_move_conditionally) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004646 locations_to_set->SetOut(DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004647 ? Location::RequiresFpuRegister()
4648 : Location::RequiresRegister());
4649 } else {
4650 locations_to_set->SetOut(Location::SameAsFirstInput());
4651 }
4652 }
4653
4654 return can_move_conditionally;
4655}
4656
4657
4658void InstructionCodeGeneratorMIPS64::GenConditionalMove(HSelect* select) {
4659 LocationSummary* locations = select->GetLocations();
4660 Location dst = locations->Out();
4661 Location false_src = locations->InAt(0);
4662 Location true_src = locations->InAt(1);
Andreas Gampe3db70682018-12-26 15:12:03 -08004663 HInstruction* cond = select->InputAt(/* i= */ 2);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004664 GpuRegister cond_reg = TMP;
4665 FpuRegister fcond_reg = FTMP;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004666 DataType::Type cond_type = DataType::Type::kInt32;
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004667 bool cond_inverted = false;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004668 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004669
4670 if (IsBooleanValueOrMaterializedCondition(cond)) {
Andreas Gampe3db70682018-12-26 15:12:03 -08004671 cond_reg = locations->InAt(/* at= */ 2).AsRegister<GpuRegister>();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004672 } else {
4673 HCondition* condition = cond->AsCondition();
4674 LocationSummary* cond_locations = cond->GetLocations();
4675 IfCondition if_cond = condition->GetCondition();
4676 cond_type = condition->InputAt(0)->GetType();
4677 switch (cond_type) {
4678 default:
4679 cond_inverted = MaterializeIntLongCompare(if_cond,
Andreas Gampe3db70682018-12-26 15:12:03 -08004680 /* is64bit= */ false,
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004681 cond_locations,
4682 cond_reg);
4683 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004684 case DataType::Type::kInt64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004685 cond_inverted = MaterializeIntLongCompare(if_cond,
Andreas Gampe3db70682018-12-26 15:12:03 -08004686 /* is64bit= */ true,
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004687 cond_locations,
4688 cond_reg);
4689 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004690 case DataType::Type::kFloat32:
4691 case DataType::Type::kFloat64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004692 cond_inverted = MaterializeFpCompare(if_cond,
4693 condition->IsGtBias(),
4694 cond_type,
4695 cond_locations,
4696 fcond_reg);
4697 break;
4698 }
4699 }
4700
4701 if (true_src.IsConstant()) {
4702 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
4703 }
4704 if (false_src.IsConstant()) {
4705 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
4706 }
4707
4708 switch (dst_type) {
4709 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004710 if (DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004711 __ Mfc1(cond_reg, fcond_reg);
4712 }
4713 if (true_src.IsConstant()) {
4714 if (cond_inverted) {
4715 __ Selnez(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4716 } else {
4717 __ Seleqz(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4718 }
4719 } else if (false_src.IsConstant()) {
4720 if (cond_inverted) {
4721 __ Seleqz(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4722 } else {
4723 __ Selnez(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4724 }
4725 } else {
4726 DCHECK_NE(cond_reg, AT);
4727 if (cond_inverted) {
4728 __ Seleqz(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4729 __ Selnez(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4730 } else {
4731 __ Selnez(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4732 __ Seleqz(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4733 }
4734 __ Or(dst.AsRegister<GpuRegister>(), AT, TMP);
4735 }
4736 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004737 case DataType::Type::kFloat32: {
4738 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004739 // sel*.fmt tests bit 0 of the condition register, account for that.
4740 __ Sltu(TMP, ZERO, cond_reg);
4741 __ Mtc1(TMP, fcond_reg);
4742 }
4743 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4744 if (true_src.IsConstant()) {
4745 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4746 if (cond_inverted) {
4747 __ SelnezS(dst_reg, src_reg, fcond_reg);
4748 } else {
4749 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4750 }
4751 } else if (false_src.IsConstant()) {
4752 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4753 if (cond_inverted) {
4754 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4755 } else {
4756 __ SelnezS(dst_reg, src_reg, fcond_reg);
4757 }
4758 } else {
4759 if (cond_inverted) {
4760 __ SelS(fcond_reg,
4761 true_src.AsFpuRegister<FpuRegister>(),
4762 false_src.AsFpuRegister<FpuRegister>());
4763 } else {
4764 __ SelS(fcond_reg,
4765 false_src.AsFpuRegister<FpuRegister>(),
4766 true_src.AsFpuRegister<FpuRegister>());
4767 }
4768 __ MovS(dst_reg, fcond_reg);
4769 }
4770 break;
4771 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004772 case DataType::Type::kFloat64: {
4773 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004774 // sel*.fmt tests bit 0 of the condition register, account for that.
4775 __ Sltu(TMP, ZERO, cond_reg);
4776 __ Mtc1(TMP, fcond_reg);
4777 }
4778 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4779 if (true_src.IsConstant()) {
4780 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4781 if (cond_inverted) {
4782 __ SelnezD(dst_reg, src_reg, fcond_reg);
4783 } else {
4784 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4785 }
4786 } else if (false_src.IsConstant()) {
4787 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4788 if (cond_inverted) {
4789 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4790 } else {
4791 __ SelnezD(dst_reg, src_reg, fcond_reg);
4792 }
4793 } else {
4794 if (cond_inverted) {
4795 __ SelD(fcond_reg,
4796 true_src.AsFpuRegister<FpuRegister>(),
4797 false_src.AsFpuRegister<FpuRegister>());
4798 } else {
4799 __ SelD(fcond_reg,
4800 false_src.AsFpuRegister<FpuRegister>(),
4801 true_src.AsFpuRegister<FpuRegister>());
4802 }
4803 __ MovD(dst_reg, fcond_reg);
4804 }
4805 break;
4806 }
4807 }
4808}
4809
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004810void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004811 LocationSummary* locations = new (GetGraph()->GetAllocator())
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004812 LocationSummary(flag, LocationSummary::kNoCall);
4813 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07004814}
4815
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004816void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4817 __ LoadFromOffset(kLoadWord,
4818 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
4819 SP,
4820 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07004821}
4822
David Brazdil74eb1b22015-12-14 11:44:01 +00004823void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004824 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004825 CanMoveConditionally(select, locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00004826}
4827
4828void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
Andreas Gampe3db70682018-12-26 15:12:03 -08004829 if (CanMoveConditionally(select, /* locations_to_set= */ nullptr)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004830 GenConditionalMove(select);
4831 } else {
4832 LocationSummary* locations = select->GetLocations();
4833 Mips64Label false_target;
4834 GenerateTestAndBranch(select,
Andreas Gampe3db70682018-12-26 15:12:03 -08004835 /* condition_input_index= */ 2,
4836 /* true_target= */ nullptr,
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004837 &false_target);
4838 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
4839 __ Bind(&false_target);
4840 }
David Brazdil74eb1b22015-12-14 11:44:01 +00004841}
4842
David Srbecky0cf44932015-12-09 14:09:59 +00004843void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004844 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00004845}
4846
David Srbeckyd28f4a02016-03-14 17:14:24 +00004847void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
4848 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00004849}
4850
4851void CodeGeneratorMIPS64::GenerateNop() {
4852 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00004853}
4854
Alexey Frunze4dda3372015-06-01 18:31:49 -07004855void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08004856 const FieldInfo& field_info) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004857 DataType::Type field_type = field_info.GetFieldType();
Alexey Frunze15958152017-02-09 19:08:30 -08004858 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004859 kEmitCompilerReadBarrier && (field_type == DataType::Type::kReference);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004860 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08004861 instruction,
4862 object_field_get_with_read_barrier
4863 ? LocationSummary::kCallOnSlowPath
4864 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004865 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4866 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4867 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004868 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004869 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004870 locations->SetOut(Location::RequiresFpuRegister());
4871 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004872 // The output overlaps in the case of an object field get with
4873 // read barriers enabled: we do not want the move to overwrite the
4874 // object's location, as we need it to emit the read barrier.
4875 locations->SetOut(Location::RequiresRegister(),
4876 object_field_get_with_read_barrier
4877 ? Location::kOutputOverlap
4878 : Location::kNoOutputOverlap);
4879 }
4880 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4881 // We need a temporary register for the read barrier marking slow
4882 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004883 if (!kBakerReadBarrierThunksEnableForFields) {
4884 locations->AddTemp(Location::RequiresRegister());
4885 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004886 }
4887}
4888
4889void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4890 const FieldInfo& field_info) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004891 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4892 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004893 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004894 Location obj_loc = locations->InAt(0);
4895 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4896 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004897 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004898 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004899 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004900 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4901
Alexey Frunze4dda3372015-06-01 18:31:49 -07004902 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004903 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004904 case DataType::Type::kUint8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004905 load_type = kLoadUnsignedByte;
4906 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004907 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004908 load_type = kLoadSignedByte;
4909 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004910 case DataType::Type::kUint16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004911 load_type = kLoadUnsignedHalfword;
4912 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004913 case DataType::Type::kInt16:
4914 load_type = kLoadSignedHalfword;
4915 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004916 case DataType::Type::kInt32:
4917 case DataType::Type::kFloat32:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004918 load_type = kLoadWord;
4919 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004920 case DataType::Type::kInt64:
4921 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004922 load_type = kLoadDoubleword;
4923 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004924 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004925 load_type = kLoadUnsignedWord;
4926 break;
Aart Bik66c158e2018-01-31 12:55:04 -08004927 case DataType::Type::kUint32:
4928 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004929 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004930 LOG(FATAL) << "Unreachable type " << type;
4931 UNREACHABLE();
4932 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004933 if (!DataType::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004934 DCHECK(dst_loc.IsRegister());
4935 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004936 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08004937 // /* HeapReference<Object> */ dst = *(obj + offset)
4938 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004939 Location temp_loc =
4940 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08004941 // Note that a potential implicit null check is handled in this
4942 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4943 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4944 dst_loc,
4945 obj,
4946 offset,
4947 temp_loc,
Andreas Gampe3db70682018-12-26 15:12:03 -08004948 /* needs_null_check= */ true);
Alexey Frunze15958152017-02-09 19:08:30 -08004949 if (is_volatile) {
4950 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4951 }
4952 } else {
4953 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4954 if (is_volatile) {
4955 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4956 }
4957 // If read barriers are enabled, emit read barriers other than
4958 // Baker's using a slow path (and also unpoison the loaded
4959 // reference, if heap poisoning is enabled).
4960 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4961 }
4962 } else {
4963 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4964 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004965 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004966 DCHECK(dst_loc.IsFpuRegister());
4967 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004968 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004969 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004970
Alexey Frunze15958152017-02-09 19:08:30 -08004971 // Memory barriers, in the case of references, are handled in the
4972 // previous switch statement.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004973 if (is_volatile && (type != DataType::Type::kReference)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004974 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004975 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004976}
4977
4978void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4979 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4980 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004981 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004982 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004983 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004984 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004985 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004986 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004987 }
4988}
4989
4990void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004991 const FieldInfo& field_info,
4992 bool value_can_be_null) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004993 DataType::Type type = field_info.GetFieldType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004994 LocationSummary* locations = instruction->GetLocations();
4995 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004996 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004997 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004998 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004999 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5000 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01005001 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
5002
Alexey Frunze4dda3372015-06-01 18:31:49 -07005003 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005004 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005005 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005006 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07005007 store_type = kStoreByte;
5008 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005009 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005010 case DataType::Type::kInt16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07005011 store_type = kStoreHalfword;
5012 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005013 case DataType::Type::kInt32:
5014 case DataType::Type::kFloat32:
5015 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07005016 store_type = kStoreWord;
5017 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005018 case DataType::Type::kInt64:
5019 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07005020 store_type = kStoreDoubleword;
5021 break;
Aart Bik66c158e2018-01-31 12:55:04 -08005022 case DataType::Type::kUint32:
5023 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005024 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07005025 LOG(FATAL) << "Unreachable type " << type;
5026 UNREACHABLE();
5027 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005028
Alexey Frunze15958152017-02-09 19:08:30 -08005029 if (is_volatile) {
5030 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
5031 }
5032
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005033 if (value_location.IsConstant()) {
5034 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
5035 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
5036 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005037 if (!DataType::IsFloatingPointType(type)) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005038 DCHECK(value_location.IsRegister());
5039 GpuRegister src = value_location.AsRegister<GpuRegister>();
5040 if (kPoisonHeapReferences && needs_write_barrier) {
5041 // Note that in the case where `value` is a null reference,
5042 // we do not enter this block, as a null reference does not
5043 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005044 DCHECK_EQ(type, DataType::Type::kReference);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005045 __ PoisonHeapReference(TMP, src);
5046 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
5047 } else {
5048 __ StoreToOffset(store_type, src, obj, offset, null_checker);
5049 }
5050 } else {
5051 DCHECK(value_location.IsFpuRegister());
5052 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
5053 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
5054 }
5055 }
Alexey Frunze15958152017-02-09 19:08:30 -08005056
Alexey Frunzec061de12017-02-14 13:27:23 -08005057 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005058 DCHECK(value_location.IsRegister());
5059 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01005060 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005061 }
Alexey Frunze15958152017-02-09 19:08:30 -08005062
5063 if (is_volatile) {
5064 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
5065 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005066}
5067
5068void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5069 HandleFieldGet(instruction, instruction->GetFieldInfo());
5070}
5071
5072void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5073 HandleFieldGet(instruction, instruction->GetFieldInfo());
5074}
5075
5076void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5077 HandleFieldSet(instruction, instruction->GetFieldInfo());
5078}
5079
5080void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01005081 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005082}
5083
Alexey Frunze15958152017-02-09 19:08:30 -08005084void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
5085 HInstruction* instruction,
5086 Location out,
5087 uint32_t offset,
5088 Location maybe_temp,
5089 ReadBarrierOption read_barrier_option) {
5090 GpuRegister out_reg = out.AsRegister<GpuRegister>();
5091 if (read_barrier_option == kWithReadBarrier) {
5092 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005093 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
5094 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
5095 }
Alexey Frunze15958152017-02-09 19:08:30 -08005096 if (kUseBakerReadBarrier) {
5097 // Load with fast path based Baker's read barrier.
5098 // /* HeapReference<Object> */ out = *(out + offset)
5099 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5100 out,
5101 out_reg,
5102 offset,
5103 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08005104 /* needs_null_check= */ false);
Alexey Frunze15958152017-02-09 19:08:30 -08005105 } else {
5106 // Load with slow path based read barrier.
5107 // Save the value of `out` into `maybe_temp` before overwriting it
5108 // in the following move operation, as we will need it for the
5109 // read barrier below.
5110 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
5111 // /* HeapReference<Object> */ out = *(out + offset)
5112 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
5113 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5114 }
5115 } else {
5116 // Plain load with no read barrier.
5117 // /* HeapReference<Object> */ out = *(out + offset)
5118 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
5119 __ MaybeUnpoisonHeapReference(out_reg);
5120 }
5121}
5122
5123void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
5124 HInstruction* instruction,
5125 Location out,
5126 Location obj,
5127 uint32_t offset,
5128 Location maybe_temp,
5129 ReadBarrierOption read_barrier_option) {
5130 GpuRegister out_reg = out.AsRegister<GpuRegister>();
5131 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
5132 if (read_barrier_option == kWithReadBarrier) {
5133 CHECK(kEmitCompilerReadBarrier);
5134 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005135 if (!kBakerReadBarrierThunksEnableForFields) {
5136 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
5137 }
Alexey Frunze15958152017-02-09 19:08:30 -08005138 // Load with fast path based Baker's read barrier.
5139 // /* HeapReference<Object> */ out = *(obj + offset)
5140 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5141 out,
5142 obj_reg,
5143 offset,
5144 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08005145 /* needs_null_check= */ false);
Alexey Frunze15958152017-02-09 19:08:30 -08005146 } else {
5147 // Load with slow path based read barrier.
5148 // /* HeapReference<Object> */ out = *(obj + offset)
5149 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5150 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5151 }
5152 } else {
5153 // Plain load with no read barrier.
5154 // /* HeapReference<Object> */ out = *(obj + offset)
5155 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5156 __ MaybeUnpoisonHeapReference(out_reg);
5157 }
5158}
5159
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005160static inline int GetBakerMarkThunkNumber(GpuRegister reg) {
5161 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 20, "Expecting equal");
5162 if (reg >= V0 && reg <= T2) { // 13 consequtive regs.
5163 return reg - V0;
5164 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
5165 return 13 + (reg - S2);
5166 } else if (reg == S8) { // One more.
5167 return 19;
5168 }
5169 LOG(FATAL) << "Unexpected register " << reg;
5170 UNREACHABLE();
5171}
5172
5173static inline int GetBakerMarkFieldArrayThunkDisplacement(GpuRegister reg, bool short_offset) {
5174 int num = GetBakerMarkThunkNumber(reg) +
5175 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
5176 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
5177}
5178
5179static inline int GetBakerMarkGcRootThunkDisplacement(GpuRegister reg) {
5180 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
5181 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
5182}
5183
5184void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(HInstruction* instruction,
5185 Location root,
5186 GpuRegister obj,
5187 uint32_t offset,
5188 ReadBarrierOption read_barrier_option,
5189 Mips64Label* label_low) {
5190 if (label_low != nullptr) {
5191 DCHECK_EQ(offset, 0x5678u);
5192 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005193 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005194 if (read_barrier_option == kWithReadBarrier) {
5195 DCHECK(kEmitCompilerReadBarrier);
5196 if (kUseBakerReadBarrier) {
5197 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5198 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005199 if (kBakerReadBarrierThunksEnableForGcRoots) {
5200 // Note that we do not actually check the value of `GetIsGcMarking()`
5201 // to decide whether to mark the loaded GC root or not. Instead, we
5202 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5203 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5204 // vice versa.
5205 //
5206 // We use thunks for the slow path. That thunk checks the reference
5207 // and jumps to the entrypoint if needed.
5208 //
5209 // temp = Thread::Current()->pReadBarrierMarkReg00
5210 // // AKA &art_quick_read_barrier_mark_introspection.
5211 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5212 // if (temp != nullptr) {
5213 // temp = &gc_root_thunk<root_reg>
5214 // root = temp(root)
5215 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005216
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005217 const int32_t entry_point_offset =
5218 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5219 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
5220 int16_t offset_low = Low16Bits(offset);
5221 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
5222 // extension in lwu.
5223 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5224 GpuRegister base = short_offset ? obj : TMP;
5225 // Loading the entrypoint does not require a load acquire since it is only changed when
5226 // threads are suspended or running a checkpoint.
5227 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5228 if (!short_offset) {
5229 DCHECK(!label_low);
5230 __ Daui(base, obj, offset_high);
5231 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07005232 Mips64Label skip_call;
Andreas Gampe3db70682018-12-26 15:12:03 -08005233 __ Beqz(T9, &skip_call, /* is_bare= */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005234 if (label_low != nullptr) {
5235 DCHECK(short_offset);
5236 __ Bind(label_low);
5237 }
5238 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5239 __ LoadFromOffset(kLoadUnsignedWord, root_reg, base, offset_low); // Single instruction
5240 // in delay slot.
5241 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005242 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005243 } else {
5244 // Note that we do not actually check the value of `GetIsGcMarking()`
5245 // to decide whether to mark the loaded GC root or not. Instead, we
5246 // load into `temp` (T9) the read barrier mark entry point corresponding
5247 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
5248 // is false, and vice versa.
5249 //
5250 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5251 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5252 // if (temp != null) {
5253 // root = temp(root)
5254 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005255
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005256 if (label_low != nullptr) {
5257 __ Bind(label_low);
5258 }
5259 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5260 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5261 static_assert(
5262 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5263 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5264 "have different sizes.");
5265 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5266 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5267 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08005268
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005269 // Slow path marking the GC root `root`.
5270 Location temp = Location::RegisterLocation(T9);
5271 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005272 new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005273 instruction,
5274 root,
5275 /*entrypoint*/ temp);
5276 codegen_->AddSlowPath(slow_path);
5277
5278 const int32_t entry_point_offset =
5279 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
5280 // Loading the entrypoint does not require a load acquire since it is only changed when
5281 // threads are suspended or running a checkpoint.
5282 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
5283 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5284 __ Bind(slow_path->GetExitLabel());
5285 }
Alexey Frunze15958152017-02-09 19:08:30 -08005286 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005287 if (label_low != nullptr) {
5288 __ Bind(label_low);
5289 }
Alexey Frunze15958152017-02-09 19:08:30 -08005290 // GC root loaded through a slow path for read barriers other
5291 // than Baker's.
5292 // /* GcRoot<mirror::Object>* */ root = obj + offset
5293 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
5294 // /* mirror::Object* */ root = root->Read()
5295 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5296 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005297 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005298 if (label_low != nullptr) {
5299 __ Bind(label_low);
5300 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005301 // Plain GC root load with no read barrier.
5302 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5303 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5304 // Note that GC roots are not affected by heap poisoning, thus we
5305 // do not have to unpoison `root_reg` here.
5306 }
5307}
5308
Alexey Frunze15958152017-02-09 19:08:30 -08005309void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5310 Location ref,
5311 GpuRegister obj,
5312 uint32_t offset,
5313 Location temp,
5314 bool needs_null_check) {
5315 DCHECK(kEmitCompilerReadBarrier);
5316 DCHECK(kUseBakerReadBarrier);
5317
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005318 if (kBakerReadBarrierThunksEnableForFields) {
5319 // Note that we do not actually check the value of `GetIsGcMarking()`
5320 // to decide whether to mark the loaded reference or not. Instead, we
5321 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5322 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5323 // vice versa.
5324 //
5325 // We use thunks for the slow path. That thunk checks the reference
5326 // and jumps to the entrypoint if needed. If the holder is not gray,
5327 // it issues a load-load memory barrier and returns to the original
5328 // reference load.
5329 //
5330 // temp = Thread::Current()->pReadBarrierMarkReg00
5331 // // AKA &art_quick_read_barrier_mark_introspection.
5332 // if (temp != nullptr) {
5333 // temp = &field_array_thunk<holder_reg>
5334 // temp()
5335 // }
5336 // not_gray_return_address:
5337 // // If the offset is too large to fit into the lw instruction, we
5338 // // use an adjusted base register (TMP) here. This register
5339 // // receives bits 16 ... 31 of the offset before the thunk invocation
5340 // // and the thunk benefits from it.
5341 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
5342 // gray_return_address:
5343
5344 DCHECK(temp.IsInvalid());
5345 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5346 const int32_t entry_point_offset =
5347 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5348 // There may have or may have not been a null check if the field offset is smaller than
5349 // the page size.
5350 // There must've been a null check in case it's actually a load from an array.
5351 // We will, however, perform an explicit null check in the thunk as it's easier to
5352 // do it than not.
5353 if (instruction->IsArrayGet()) {
5354 DCHECK(!needs_null_check);
5355 }
5356 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
5357 // Loading the entrypoint does not require a load acquire since it is only changed when
5358 // threads are suspended or running a checkpoint.
5359 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5360 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07005361 Mips64Label skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005362 if (short_offset) {
Andreas Gampe3db70682018-12-26 15:12:03 -08005363 __ Beqzc(T9, &skip_call, /* is_bare= */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005364 __ Nop(); // In forbidden slot.
5365 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005366 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005367 // /* HeapReference<Object> */ ref = *(obj + offset)
5368 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset); // Single instruction.
5369 } else {
5370 int16_t offset_low = Low16Bits(offset);
5371 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lwu.
Andreas Gampe3db70682018-12-26 15:12:03 -08005372 __ Beqz(T9, &skip_call, /* is_bare= */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005373 __ Daui(TMP, obj, offset_high); // In delay slot.
5374 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005375 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005376 // /* HeapReference<Object> */ ref = *(obj + offset)
5377 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset_low); // Single instruction.
5378 }
5379 if (needs_null_check) {
5380 MaybeRecordImplicitNullCheck(instruction);
5381 }
5382 __ MaybeUnpoisonHeapReference(ref_reg);
5383 return;
5384 }
5385
Alexey Frunze15958152017-02-09 19:08:30 -08005386 // /* HeapReference<Object> */ ref = *(obj + offset)
5387 Location no_index = Location::NoLocation();
5388 ScaleFactor no_scale_factor = TIMES_1;
5389 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5390 ref,
5391 obj,
5392 offset,
5393 no_index,
5394 no_scale_factor,
5395 temp,
5396 needs_null_check);
5397}
5398
5399void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5400 Location ref,
5401 GpuRegister obj,
5402 uint32_t data_offset,
5403 Location index,
5404 Location temp,
5405 bool needs_null_check) {
5406 DCHECK(kEmitCompilerReadBarrier);
5407 DCHECK(kUseBakerReadBarrier);
5408
5409 static_assert(
5410 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5411 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005412 ScaleFactor scale_factor = TIMES_4;
5413
5414 if (kBakerReadBarrierThunksEnableForArrays) {
5415 // Note that we do not actually check the value of `GetIsGcMarking()`
5416 // to decide whether to mark the loaded reference or not. Instead, we
5417 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5418 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5419 // vice versa.
5420 //
5421 // We use thunks for the slow path. That thunk checks the reference
5422 // and jumps to the entrypoint if needed. If the holder is not gray,
5423 // it issues a load-load memory barrier and returns to the original
5424 // reference load.
5425 //
5426 // temp = Thread::Current()->pReadBarrierMarkReg00
5427 // // AKA &art_quick_read_barrier_mark_introspection.
5428 // if (temp != nullptr) {
5429 // temp = &field_array_thunk<holder_reg>
5430 // temp()
5431 // }
5432 // not_gray_return_address:
5433 // // The element address is pre-calculated in the TMP register before the
5434 // // thunk invocation and the thunk benefits from it.
5435 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
5436 // gray_return_address:
5437
5438 DCHECK(temp.IsInvalid());
5439 DCHECK(index.IsValid());
5440 const int32_t entry_point_offset =
5441 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5442 // We will not do the explicit null check in the thunk as some form of a null check
5443 // must've been done earlier.
5444 DCHECK(!needs_null_check);
Andreas Gampe3db70682018-12-26 15:12:03 -08005445 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset= */ false);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005446 // Loading the entrypoint does not require a load acquire since it is only changed when
5447 // threads are suspended or running a checkpoint.
5448 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005449 Mips64Label skip_call;
Andreas Gampe3db70682018-12-26 15:12:03 -08005450 __ Beqz(T9, &skip_call, /* is_bare= */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005451 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5452 GpuRegister index_reg = index.AsRegister<GpuRegister>();
5453 __ Dlsa(TMP, index_reg, obj, scale_factor); // In delay slot.
5454 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005455 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005456 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
5457 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
5458 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, data_offset); // Single instruction.
5459 __ MaybeUnpoisonHeapReference(ref_reg);
5460 return;
5461 }
5462
Alexey Frunze15958152017-02-09 19:08:30 -08005463 // /* HeapReference<Object> */ ref =
5464 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08005465 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5466 ref,
5467 obj,
5468 data_offset,
5469 index,
5470 scale_factor,
5471 temp,
5472 needs_null_check);
5473}
5474
5475void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5476 Location ref,
5477 GpuRegister obj,
5478 uint32_t offset,
5479 Location index,
5480 ScaleFactor scale_factor,
5481 Location temp,
5482 bool needs_null_check,
5483 bool always_update_field) {
5484 DCHECK(kEmitCompilerReadBarrier);
5485 DCHECK(kUseBakerReadBarrier);
5486
5487 // In slow path based read barriers, the read barrier call is
5488 // inserted after the original load. However, in fast path based
5489 // Baker's read barriers, we need to perform the load of
5490 // mirror::Object::monitor_ *before* the original reference load.
5491 // This load-load ordering is required by the read barrier.
5492 // The fast path/slow path (for Baker's algorithm) should look like:
5493 //
5494 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5495 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5496 // HeapReference<Object> ref = *src; // Original reference load.
5497 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5498 // if (is_gray) {
5499 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5500 // }
5501 //
5502 // Note: the original implementation in ReadBarrier::Barrier is
5503 // slightly more complex as it performs additional checks that we do
5504 // not do here for performance reasons.
5505
5506 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5507 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
5508 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5509
5510 // /* int32_t */ monitor = obj->monitor_
5511 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
5512 if (needs_null_check) {
5513 MaybeRecordImplicitNullCheck(instruction);
5514 }
5515 // /* LockWord */ lock_word = LockWord(monitor)
5516 static_assert(sizeof(LockWord) == sizeof(int32_t),
5517 "art::LockWord and int32_t have different sizes.");
5518
5519 __ Sync(0); // Barrier to prevent load-load reordering.
5520
5521 // The actual reference load.
5522 if (index.IsValid()) {
5523 // Load types involving an "index": ArrayGet,
5524 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5525 // intrinsics.
5526 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5527 if (index.IsConstant()) {
5528 size_t computed_offset =
5529 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
5530 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
5531 } else {
5532 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07005533 if (scale_factor == TIMES_1) {
5534 __ Daddu(TMP, index_reg, obj);
5535 } else {
5536 __ Dlsa(TMP, index_reg, obj, scale_factor);
5537 }
Alexey Frunze15958152017-02-09 19:08:30 -08005538 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
5539 }
5540 } else {
5541 // /* HeapReference<Object> */ ref = *(obj + offset)
5542 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
5543 }
5544
5545 // Object* ref = ref_addr->AsMirrorPtr()
5546 __ MaybeUnpoisonHeapReference(ref_reg);
5547
5548 // Slow path marking the object `ref` when it is gray.
5549 SlowPathCodeMIPS64* slow_path;
5550 if (always_update_field) {
5551 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
5552 // of the form `obj + field_offset`, where `obj` is a register and
5553 // `field_offset` is a register. Thus `offset` and `scale_factor`
5554 // above are expected to be null in this code path.
5555 DCHECK_EQ(offset, 0u);
5556 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005557 slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005558 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
5559 ref,
5560 obj,
Andreas Gampe3db70682018-12-26 15:12:03 -08005561 /* field_offset= */ index,
Alexey Frunze15958152017-02-09 19:08:30 -08005562 temp_reg);
5563 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005564 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
Alexey Frunze15958152017-02-09 19:08:30 -08005565 }
5566 AddSlowPath(slow_path);
5567
5568 // if (rb_state == ReadBarrier::GrayState())
5569 // ref = ReadBarrier::Mark(ref);
5570 // Given the numeric representation, it's enough to check the low bit of the
5571 // rb_state. We do that by shifting the bit into the sign bit (31) and
5572 // performing a branch on less than zero.
Roland Levillain14e5a292018-06-28 12:00:56 +01005573 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Alexey Frunze15958152017-02-09 19:08:30 -08005574 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5575 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
5576 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
5577 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
5578 __ Bind(slow_path->GetExitLabel());
5579}
5580
5581void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
5582 Location out,
5583 Location ref,
5584 Location obj,
5585 uint32_t offset,
5586 Location index) {
5587 DCHECK(kEmitCompilerReadBarrier);
5588
5589 // Insert a slow path based read barrier *after* the reference load.
5590 //
5591 // If heap poisoning is enabled, the unpoisoning of the loaded
5592 // reference will be carried out by the runtime within the slow
5593 // path.
5594 //
5595 // Note that `ref` currently does not get unpoisoned (when heap
5596 // poisoning is enabled), which is alright as the `ref` argument is
5597 // not used by the artReadBarrierSlow entry point.
5598 //
5599 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01005600 SlowPathCodeMIPS64* slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005601 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
5602 AddSlowPath(slow_path);
5603
5604 __ Bc(slow_path->GetEntryLabel());
5605 __ Bind(slow_path->GetExitLabel());
5606}
5607
5608void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5609 Location out,
5610 Location ref,
5611 Location obj,
5612 uint32_t offset,
5613 Location index) {
5614 if (kEmitCompilerReadBarrier) {
5615 // Baker's read barriers shall be handled by the fast path
5616 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
5617 DCHECK(!kUseBakerReadBarrier);
5618 // If heap poisoning is enabled, unpoisoning will be taken care of
5619 // by the runtime within the slow path.
5620 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
5621 } else if (kPoisonHeapReferences) {
5622 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
5623 }
5624}
5625
5626void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5627 Location out,
5628 Location root) {
5629 DCHECK(kEmitCompilerReadBarrier);
5630
5631 // Insert a slow path based read barrier *after* the GC root load.
5632 //
5633 // Note that GC roots are not affected by heap poisoning, so we do
5634 // not need to do anything special for this here.
5635 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005636 new (GetScopedAllocator()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
Alexey Frunze15958152017-02-09 19:08:30 -08005637 AddSlowPath(slow_path);
5638
5639 __ Bc(slow_path->GetEntryLabel());
5640 __ Bind(slow_path->GetExitLabel());
5641}
5642
Alexey Frunze4dda3372015-06-01 18:31:49 -07005643void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005644 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5645 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07005646 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005647 switch (type_check_kind) {
5648 case TypeCheckKind::kExactCheck:
5649 case TypeCheckKind::kAbstractClassCheck:
5650 case TypeCheckKind::kClassHierarchyCheck:
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005651 case TypeCheckKind::kArrayObjectCheck: {
5652 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
5653 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
5654 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005655 break;
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005656 }
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005657 case TypeCheckKind::kArrayCheck:
5658 case TypeCheckKind::kUnresolvedCheck:
5659 case TypeCheckKind::kInterfaceCheck:
5660 call_kind = LocationSummary::kCallOnSlowPath;
5661 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00005662 case TypeCheckKind::kBitstringCheck:
5663 break;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005664 }
5665
Vladimir Markoca6fff82017-10-03 14:49:14 +01005666 LocationSummary* locations =
5667 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005668 if (baker_read_barrier_slow_path) {
5669 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5670 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005671 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00005672 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
5673 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
5674 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
5675 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
5676 } else {
5677 locations->SetInAt(1, Location::RequiresRegister());
5678 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005679 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005680 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005681 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08005682 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005683}
5684
5685void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005686 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005687 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08005688 Location obj_loc = locations->InAt(0);
5689 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Vladimir Marko175e7862018-03-27 09:03:13 +00005690 Location cls = locations->InAt(1);
Alexey Frunze15958152017-02-09 19:08:30 -08005691 Location out_loc = locations->Out();
5692 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5693 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
5694 DCHECK_LE(num_temps, 1u);
5695 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005696 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5697 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5698 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5699 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005700 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005701 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005702
5703 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005704 // Avoid this check if we know `obj` is not null.
5705 if (instruction->MustDoNullCheck()) {
5706 __ Move(out, ZERO);
5707 __ Beqzc(obj, &done);
5708 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005709
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005710 switch (type_check_kind) {
5711 case TypeCheckKind::kExactCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005712 ReadBarrierOption read_barrier_option =
5713 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005714 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005715 GenerateReferenceLoadTwoRegisters(instruction,
5716 out_loc,
5717 obj_loc,
5718 class_offset,
5719 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005720 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005721 // Classes must be equal for the instanceof to succeed.
Vladimir Marko175e7862018-03-27 09:03:13 +00005722 __ Xor(out, out, cls.AsRegister<GpuRegister>());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005723 __ Sltiu(out, out, 1);
5724 break;
5725 }
5726
5727 case TypeCheckKind::kAbstractClassCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005728 ReadBarrierOption read_barrier_option =
5729 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005730 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005731 GenerateReferenceLoadTwoRegisters(instruction,
5732 out_loc,
5733 obj_loc,
5734 class_offset,
5735 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005736 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005737 // If the class is abstract, we eagerly fetch the super class of the
5738 // object to avoid doing a comparison we know will fail.
5739 Mips64Label loop;
5740 __ Bind(&loop);
5741 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005742 GenerateReferenceLoadOneRegister(instruction,
5743 out_loc,
5744 super_offset,
5745 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005746 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005747 // If `out` is null, we use it for the result, and jump to `done`.
5748 __ Beqzc(out, &done);
Vladimir Marko175e7862018-03-27 09:03:13 +00005749 __ Bnec(out, cls.AsRegister<GpuRegister>(), &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005750 __ LoadConst32(out, 1);
5751 break;
5752 }
5753
5754 case TypeCheckKind::kClassHierarchyCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005755 ReadBarrierOption read_barrier_option =
5756 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005757 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005758 GenerateReferenceLoadTwoRegisters(instruction,
5759 out_loc,
5760 obj_loc,
5761 class_offset,
5762 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005763 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005764 // Walk over the class hierarchy to find a match.
5765 Mips64Label loop, success;
5766 __ Bind(&loop);
Vladimir Marko175e7862018-03-27 09:03:13 +00005767 __ Beqc(out, cls.AsRegister<GpuRegister>(), &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005768 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005769 GenerateReferenceLoadOneRegister(instruction,
5770 out_loc,
5771 super_offset,
5772 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005773 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005774 __ Bnezc(out, &loop);
5775 // If `out` is null, we use it for the result, and jump to `done`.
5776 __ Bc(&done);
5777 __ Bind(&success);
5778 __ LoadConst32(out, 1);
5779 break;
5780 }
5781
5782 case TypeCheckKind::kArrayObjectCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005783 ReadBarrierOption read_barrier_option =
5784 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005785 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005786 GenerateReferenceLoadTwoRegisters(instruction,
5787 out_loc,
5788 obj_loc,
5789 class_offset,
5790 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005791 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005792 // Do an exact check.
5793 Mips64Label success;
Vladimir Marko175e7862018-03-27 09:03:13 +00005794 __ Beqc(out, cls.AsRegister<GpuRegister>(), &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005795 // Otherwise, we need to check that the object's class is a non-primitive array.
5796 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08005797 GenerateReferenceLoadOneRegister(instruction,
5798 out_loc,
5799 component_offset,
5800 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005801 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005802 // If `out` is null, we use it for the result, and jump to `done`.
5803 __ Beqzc(out, &done);
5804 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
5805 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
5806 __ Sltiu(out, out, 1);
5807 __ Bc(&done);
5808 __ Bind(&success);
5809 __ LoadConst32(out, 1);
5810 break;
5811 }
5812
5813 case TypeCheckKind::kArrayCheck: {
5814 // No read barrier since the slow path will retry upon failure.
5815 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005816 GenerateReferenceLoadTwoRegisters(instruction,
5817 out_loc,
5818 obj_loc,
5819 class_offset,
5820 maybe_temp_loc,
5821 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005822 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005823 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
Andreas Gampe3db70682018-12-26 15:12:03 -08005824 instruction, /* is_fatal= */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005825 codegen_->AddSlowPath(slow_path);
Vladimir Marko175e7862018-03-27 09:03:13 +00005826 __ Bnec(out, cls.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005827 __ LoadConst32(out, 1);
5828 break;
5829 }
5830
5831 case TypeCheckKind::kUnresolvedCheck:
5832 case TypeCheckKind::kInterfaceCheck: {
5833 // Note that we indeed only call on slow path, but we always go
5834 // into the slow path for the unresolved and interface check
5835 // cases.
5836 //
5837 // We cannot directly call the InstanceofNonTrivial runtime
5838 // entry point without resorting to a type checking slow path
5839 // here (i.e. by calling InvokeRuntime directly), as it would
5840 // require to assign fixed registers for the inputs of this
5841 // HInstanceOf instruction (following the runtime calling
5842 // convention), which might be cluttered by the potential first
5843 // read barrier emission at the beginning of this method.
5844 //
5845 // TODO: Introduce a new runtime entry point taking the object
5846 // to test (instead of its class) as argument, and let it deal
5847 // with the read barrier issues. This will let us refactor this
5848 // case of the `switch` code as it was previously (with a direct
5849 // call to the runtime not using a type checking slow path).
5850 // This should also be beneficial for the other cases above.
5851 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005852 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
Andreas Gampe3db70682018-12-26 15:12:03 -08005853 instruction, /* is_fatal= */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005854 codegen_->AddSlowPath(slow_path);
5855 __ Bc(slow_path->GetEntryLabel());
5856 break;
5857 }
Vladimir Marko175e7862018-03-27 09:03:13 +00005858
5859 case TypeCheckKind::kBitstringCheck: {
5860 // /* HeapReference<Class> */ temp = obj->klass_
5861 GenerateReferenceLoadTwoRegisters(instruction,
5862 out_loc,
5863 obj_loc,
5864 class_offset,
5865 maybe_temp_loc,
5866 kWithoutReadBarrier);
5867
5868 GenerateBitstringTypeCheckCompare(instruction, out);
5869 __ Sltiu(out, out, 1);
5870 break;
5871 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005872 }
5873
5874 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005875
5876 if (slow_path != nullptr) {
5877 __ Bind(slow_path->GetExitLabel());
5878 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005879}
5880
5881void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005882 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005883 locations->SetOut(Location::ConstantLocation(constant));
5884}
5885
5886void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
5887 // Will be generated at use site.
5888}
5889
5890void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005891 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005892 locations->SetOut(Location::ConstantLocation(constant));
5893}
5894
5895void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
5896 // Will be generated at use site.
5897}
5898
Calin Juravle175dc732015-08-25 15:42:32 +01005899void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5900 // The trampoline uses the same calling convention as dex calling conventions,
5901 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
5902 // the method_idx.
5903 HandleInvoke(invoke);
5904}
5905
5906void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5907 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
5908}
5909
Alexey Frunze4dda3372015-06-01 18:31:49 -07005910void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
5911 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
5912 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
5913}
5914
5915void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5916 HandleInvoke(invoke);
5917 // The register T0 is required to be used for the hidden argument in
5918 // art_quick_imt_conflict_trampoline, so add the hidden argument.
5919 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
5920}
5921
5922void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5923 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
5924 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005925 Location receiver = invoke->GetLocations()->InAt(0);
5926 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005927 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005928
5929 // Set the hidden argument.
5930 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
5931 invoke->GetDexMethodIndex());
5932
5933 // temp = object->GetClass();
5934 if (receiver.IsStackSlot()) {
5935 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
5936 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
5937 } else {
5938 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
5939 }
5940 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005941 // Instead of simply (possibly) unpoisoning `temp` here, we should
5942 // emit a read barrier for the previous class reference load.
5943 // However this is not required in practice, as this is an
5944 // intermediate/temporary reference and because the current
5945 // concurrent copying collector keeps the from-space memory
5946 // intact/accessible until the end of the marking phase (the
5947 // concurrent copying collector may not in the future).
5948 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005949 __ LoadFromOffset(kLoadDoubleword, temp, temp,
5950 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
5951 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005952 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005953 // temp = temp->GetImtEntryAt(method_offset);
5954 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5955 // T9 = temp->GetEntryPoint();
5956 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5957 // T9();
5958 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005959 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005960 DCHECK(!codegen_->IsLeafMethod());
5961 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5962}
5963
5964void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07005965 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5966 if (intrinsic.TryDispatch(invoke)) {
5967 return;
5968 }
5969
Alexey Frunze4dda3372015-06-01 18:31:49 -07005970 HandleInvoke(invoke);
5971}
5972
5973void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005974 // Explicit clinit checks triggered by static invokes must have been pruned by
5975 // art::PrepareForRegisterAllocation.
5976 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005977
Chris Larsen3039e382015-08-26 07:54:08 -07005978 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5979 if (intrinsic.TryDispatch(invoke)) {
5980 return;
5981 }
5982
Alexey Frunze4dda3372015-06-01 18:31:49 -07005983 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005984}
5985
Orion Hodsonac141392017-01-13 11:53:47 +00005986void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5987 HandleInvoke(invoke);
5988}
5989
5990void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5991 codegen_->GenerateInvokePolymorphicCall(invoke);
5992}
5993
Orion Hodson4c8e12e2018-05-18 08:33:20 +01005994void LocationsBuilderMIPS64::VisitInvokeCustom(HInvokeCustom* invoke) {
5995 HandleInvoke(invoke);
5996}
5997
5998void InstructionCodeGeneratorMIPS64::VisitInvokeCustom(HInvokeCustom* invoke) {
5999 codegen_->GenerateInvokeCustomCall(invoke);
6000}
6001
Chris Larsen3039e382015-08-26 07:54:08 -07006002static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006003 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07006004 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
6005 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006006 return true;
6007 }
6008 return false;
6009}
6010
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006011HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08006012 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006013 bool fallback_load = false;
6014 switch (desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006015 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006016 case HLoadString::LoadKind::kBootImageRelRo:
Alexey Frunzef63f5692016-12-13 17:43:11 -08006017 case HLoadString::LoadKind::kBssEntry:
6018 DCHECK(!Runtime::Current()->UseJitCompilation());
6019 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006020 case HLoadString::LoadKind::kJitBootImageAddress:
Alexey Frunzef63f5692016-12-13 17:43:11 -08006021 case HLoadString::LoadKind::kJitTableAddress:
6022 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006023 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006024 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko764d4542017-05-16 10:31:41 +01006025 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006026 }
6027 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006028 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006029 }
6030 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006031}
6032
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006033HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
6034 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006035 bool fallback_load = false;
6036 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006037 case HLoadClass::LoadKind::kInvalid:
6038 LOG(FATAL) << "UNREACHABLE";
6039 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006040 case HLoadClass::LoadKind::kReferrersClass:
6041 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006042 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006043 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006044 case HLoadClass::LoadKind::kBssEntry:
6045 DCHECK(!Runtime::Current()->UseJitCompilation());
6046 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006047 case HLoadClass::LoadKind::kJitBootImageAddress:
Alexey Frunzef63f5692016-12-13 17:43:11 -08006048 case HLoadClass::LoadKind::kJitTableAddress:
6049 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006050 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006051 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunzef63f5692016-12-13 17:43:11 -08006052 break;
6053 }
6054 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006055 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006056 }
6057 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006058}
6059
Vladimir Markodc151b22015-10-15 18:02:30 +01006060HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
6061 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +01006062 ArtMethod* method ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08006063 // On MIPS64 we support all dispatch types.
6064 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01006065}
6066
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006067void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(
6068 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006069 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00006070 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08006071 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
6072 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
6073
Alexey Frunze19f6c692016-11-30 19:19:55 -08006074 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006075 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00006076 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006077 uint32_t offset =
6078 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00006079 __ LoadFromOffset(kLoadDoubleword,
6080 temp.AsRegister<GpuRegister>(),
6081 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006082 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00006083 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006084 }
Vladimir Marko58155012015-08-19 12:49:41 +00006085 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00006086 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00006087 break;
Vladimir Marko65979462017-05-19 17:25:12 +01006088 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
6089 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006090 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006091 NewBootImageMethodPatch(invoke->GetTargetMethod());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006092 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006093 NewBootImageMethodPatch(invoke->GetTargetMethod(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006094 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Andreas Gampe3db70682018-12-26 15:12:03 -08006095 __ Daddiu(temp.AsRegister<GpuRegister>(), AT, /* imm16= */ 0x5678);
Vladimir Marko65979462017-05-19 17:25:12 +01006096 break;
6097 }
Vladimir Markob066d432018-01-03 13:14:37 +00006098 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006099 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00006100 PcRelativePatchInfo* info_high = NewBootImageRelRoPatch(boot_image_offset);
6101 PcRelativePatchInfo* info_low = NewBootImageRelRoPatch(boot_image_offset, info_high);
6102 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6103 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
Andreas Gampe3db70682018-12-26 15:12:03 -08006104 __ Lwu(temp.AsRegister<GpuRegister>(), AT, /* imm16= */ 0x5678);
Vladimir Markob066d432018-01-03 13:14:37 +00006105 break;
6106 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01006107 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006108 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01006109 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006110 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
6111 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
6112 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Andreas Gampe3db70682018-12-26 15:12:03 -08006113 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* imm16= */ 0x5678);
Alexey Frunze19f6c692016-11-30 19:19:55 -08006114 break;
6115 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006116 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
6117 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
6118 kLoadDoubleword,
6119 DeduplicateUint64Literal(invoke->GetMethodAddress()));
6120 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006121 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
6122 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
6123 return; // No code pointer retrieval; the runtime performs the call directly.
Alexey Frunze4dda3372015-06-01 18:31:49 -07006124 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006125 }
6126
Alexey Frunze19f6c692016-11-30 19:19:55 -08006127 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00006128 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08006129 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00006130 break;
Vladimir Marko58155012015-08-19 12:49:41 +00006131 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
6132 // T9 = callee_method->entry_point_from_quick_compiled_code_;
6133 __ LoadFromOffset(kLoadDoubleword,
6134 T9,
6135 callee_method.AsRegister<GpuRegister>(),
6136 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07006137 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00006138 // T9()
6139 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006140 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00006141 break;
6142 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006143 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
6144
Alexey Frunze4dda3372015-06-01 18:31:49 -07006145 DCHECK(!IsLeafMethod());
6146}
6147
6148void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00006149 // Explicit clinit checks triggered by static invokes must have been pruned by
6150 // art::PrepareForRegisterAllocation.
6151 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006152
6153 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
6154 return;
6155 }
6156
6157 LocationSummary* locations = invoke->GetLocations();
6158 codegen_->GenerateStaticOrDirectCall(invoke,
6159 locations->HasTemps()
6160 ? locations->GetTemp(0)
6161 : Location::NoLocation());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006162}
6163
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006164void CodeGeneratorMIPS64::GenerateVirtualCall(
6165 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00006166 // Use the calling convention instead of the location of the receiver, as
6167 // intrinsics may have put the receiver in a different register. In the intrinsics
6168 // slow path, the arguments have been moved to the right place, so here we are
6169 // guaranteed that the receiver is the first register of the calling convention.
6170 InvokeDexCallingConvention calling_convention;
6171 GpuRegister receiver = calling_convention.GetRegisterAt(0);
6172
Alexey Frunze53afca12015-11-05 16:34:23 -08006173 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006174 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
6175 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
6176 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07006177 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006178
6179 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00006180 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08006181 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08006182 // Instead of simply (possibly) unpoisoning `temp` here, we should
6183 // emit a read barrier for the previous class reference load.
6184 // However this is not required in practice, as this is an
6185 // intermediate/temporary reference and because the current
6186 // concurrent copying collector keeps the from-space memory
6187 // intact/accessible until the end of the marking phase (the
6188 // concurrent copying collector may not in the future).
6189 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006190 // temp = temp->GetMethodAt(method_offset);
6191 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
6192 // T9 = temp->GetEntryPoint();
6193 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
6194 // T9();
6195 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006196 __ Nop();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006197 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexey Frunze53afca12015-11-05 16:34:23 -08006198}
6199
6200void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
6201 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
6202 return;
6203 }
6204
6205 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006206 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006207}
6208
6209void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006210 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006211 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006212 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006213 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6214 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006215 return;
6216 }
Vladimir Marko41559982017-01-06 14:04:23 +00006217 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006218
Alexey Frunze15958152017-02-09 19:08:30 -08006219 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6220 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08006221 ? LocationSummary::kCallOnSlowPath
6222 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006223 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07006224 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
6225 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6226 }
Vladimir Marko41559982017-01-06 14:04:23 +00006227 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006228 locations->SetInAt(0, Location::RequiresRegister());
6229 }
6230 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006231 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6232 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6233 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006234 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006235 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006236 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006237 }
6238 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006239}
6240
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006241// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6242// move.
6243void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006244 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006245 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006246 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006247 return;
6248 }
Vladimir Marko41559982017-01-06 14:04:23 +00006249 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006250
Vladimir Marko41559982017-01-06 14:04:23 +00006251 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006252 Location out_loc = locations->Out();
6253 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6254 GpuRegister current_method_reg = ZERO;
6255 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006256 load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006257 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
6258 }
6259
Alexey Frunze15958152017-02-09 19:08:30 -08006260 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6261 ? kWithoutReadBarrier
6262 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006263 bool generate_null_check = false;
6264 switch (load_kind) {
6265 case HLoadClass::LoadKind::kReferrersClass:
6266 DCHECK(!cls->CanCallRuntime());
6267 DCHECK(!cls->MustGenerateClinitCheck());
6268 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6269 GenerateGcRootFieldLoad(cls,
6270 out_loc,
6271 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08006272 ArtMethod::DeclaringClassOffset().Int32Value(),
6273 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006274 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006275 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006276 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08006277 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006278 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006279 codegen_->NewBootImageTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006280 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006281 codegen_->NewBootImageTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006282 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Andreas Gampe3db70682018-12-26 15:12:03 -08006283 __ Daddiu(out, AT, /* imm16= */ 0x5678);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006284 break;
6285 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006286 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006287 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006288 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006289 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006290 codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006291 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006292 codegen_->NewBootImageRelRoPatch(boot_image_offset, info_high);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006293 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Andreas Gampe3db70682018-12-26 15:12:03 -08006294 __ Lwu(out, AT, /* imm16= */ 0x5678);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006295 break;
6296 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006297 case HLoadClass::LoadKind::kBssEntry: {
Vladimir Markof3c52b42017-11-17 17:32:12 +00006298 CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high =
6299 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006300 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6301 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006302 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high, out);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006303 GenerateGcRootFieldLoad(cls,
6304 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006305 out,
Andreas Gampe3db70682018-12-26 15:12:03 -08006306 /* offset= */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006307 read_barrier_option,
6308 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006309 generate_null_check = true;
6310 break;
6311 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006312 case HLoadClass::LoadKind::kJitBootImageAddress: {
6313 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
6314 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
6315 DCHECK_NE(address, 0u);
6316 __ LoadLiteral(out,
6317 kLoadUnsignedWord,
6318 codegen_->DeduplicateBootImageAddressLiteral(address));
6319 break;
6320 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006321 case HLoadClass::LoadKind::kJitTableAddress:
6322 __ LoadLiteral(out,
6323 kLoadUnsignedWord,
6324 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
6325 cls->GetTypeIndex(),
6326 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08006327 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006328 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006329 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006330 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006331 LOG(FATAL) << "UNREACHABLE";
6332 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006333 }
6334
6335 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6336 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006337 SlowPathCodeMIPS64* slow_path =
6338 new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(cls, cls);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006339 codegen_->AddSlowPath(slow_path);
6340 if (generate_null_check) {
6341 __ Beqzc(out, slow_path->GetEntryLabel());
6342 }
6343 if (cls->MustGenerateClinitCheck()) {
6344 GenerateClassInitializationCheck(slow_path, out);
6345 } else {
6346 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006347 }
6348 }
6349}
6350
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006351void LocationsBuilderMIPS64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6352 InvokeRuntimeCallingConvention calling_convention;
6353 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6354 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, loc, loc);
6355}
6356
6357void InstructionCodeGeneratorMIPS64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6358 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6359}
6360
Orion Hodson18259d72018-04-12 11:18:23 +01006361void LocationsBuilderMIPS64::VisitLoadMethodType(HLoadMethodType* load) {
6362 InvokeRuntimeCallingConvention calling_convention;
6363 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6364 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, loc, loc);
6365}
6366
6367void InstructionCodeGeneratorMIPS64::VisitLoadMethodType(HLoadMethodType* load) {
6368 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6369}
6370
David Brazdilcb1c0552015-08-04 16:22:25 +01006371static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006372 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01006373}
6374
Alexey Frunze4dda3372015-06-01 18:31:49 -07006375void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
6376 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006377 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006378 locations->SetOut(Location::RequiresRegister());
6379}
6380
6381void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
6382 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01006383 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
6384}
6385
6386void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006387 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006388}
6389
6390void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6391 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006392}
6393
Alexey Frunze4dda3372015-06-01 18:31:49 -07006394void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006395 HLoadString::LoadKind load_kind = load->GetLoadKind();
6396 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006397 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006398 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006399 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006400 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08006401 } else {
6402 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006403 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6404 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6405 // Rely on the pResolveString and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006406 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006407 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006408 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006409 }
6410 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08006411 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006412}
6413
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006414// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6415// move.
6416void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006417 HLoadString::LoadKind load_kind = load->GetLoadKind();
6418 LocationSummary* locations = load->GetLocations();
6419 Location out_loc = locations->Out();
6420 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6421
6422 switch (load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006423 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
6424 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006425 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006426 codegen_->NewBootImageStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006427 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006428 codegen_->NewBootImageStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006429 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Andreas Gampe3db70682018-12-26 15:12:03 -08006430 __ Daddiu(out, AT, /* imm16= */ 0x5678);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006431 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006432 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006433 case HLoadString::LoadKind::kBootImageRelRo: {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006434 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006435 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006436 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006437 codegen_->NewBootImageRelRoPatch(boot_image_offset);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006438 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006439 codegen_->NewBootImageRelRoPatch(boot_image_offset, info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006440 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Andreas Gampe3db70682018-12-26 15:12:03 -08006441 __ Lwu(out, AT, /* imm16= */ 0x5678);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006442 return;
6443 }
6444 case HLoadString::LoadKind::kBssEntry: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006445 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6446 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
6447 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6448 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006449 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, out);
Alexey Frunze15958152017-02-09 19:08:30 -08006450 GenerateGcRootFieldLoad(load,
6451 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006452 out,
Andreas Gampe3db70682018-12-26 15:12:03 -08006453 /* offset= */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006454 kCompilerReadBarrierOption,
6455 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006456 SlowPathCodeMIPS64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00006457 new (codegen_->GetScopedAllocator()) LoadStringSlowPathMIPS64(load);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006458 codegen_->AddSlowPath(slow_path);
6459 __ Beqzc(out, slow_path->GetEntryLabel());
6460 __ Bind(slow_path->GetExitLabel());
6461 return;
6462 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006463 case HLoadString::LoadKind::kJitBootImageAddress: {
6464 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6465 DCHECK_NE(address, 0u);
6466 __ LoadLiteral(out,
6467 kLoadUnsignedWord,
6468 codegen_->DeduplicateBootImageAddressLiteral(address));
6469 return;
6470 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006471 case HLoadString::LoadKind::kJitTableAddress:
6472 __ LoadLiteral(out,
6473 kLoadUnsignedWord,
6474 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
6475 load->GetStringIndex(),
6476 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08006477 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08006478 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006479 default:
6480 break;
6481 }
6482
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006483 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006484 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006485 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006486 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006487 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
6488 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6489 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006490}
6491
Alexey Frunze4dda3372015-06-01 18:31:49 -07006492void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006493 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006494 locations->SetOut(Location::ConstantLocation(constant));
6495}
6496
6497void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
6498 // Will be generated at use site.
6499}
6500
6501void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006502 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6503 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006504 InvokeRuntimeCallingConvention calling_convention;
6505 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6506}
6507
6508void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006509 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07006510 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01006511 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006512 if (instruction->IsEnter()) {
6513 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6514 } else {
6515 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6516 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006517}
6518
6519void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
6520 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006521 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006522 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006523 case DataType::Type::kInt32:
6524 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006525 locations->SetInAt(0, Location::RequiresRegister());
6526 locations->SetInAt(1, Location::RequiresRegister());
6527 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6528 break;
6529
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006530 case DataType::Type::kFloat32:
6531 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006532 locations->SetInAt(0, Location::RequiresFpuRegister());
6533 locations->SetInAt(1, Location::RequiresFpuRegister());
6534 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6535 break;
6536
6537 default:
6538 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
6539 }
6540}
6541
6542void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006543 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006544 LocationSummary* locations = instruction->GetLocations();
6545
6546 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006547 case DataType::Type::kInt32:
6548 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006549 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6550 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6551 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006552 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006553 __ MulR6(dst, lhs, rhs);
6554 else
6555 __ Dmul(dst, lhs, rhs);
6556 break;
6557 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006558 case DataType::Type::kFloat32:
6559 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006560 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6561 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
6562 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006563 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006564 __ MulS(dst, lhs, rhs);
6565 else
6566 __ MulD(dst, lhs, rhs);
6567 break;
6568 }
6569 default:
6570 LOG(FATAL) << "Unexpected mul type " << type;
6571 }
6572}
6573
6574void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
6575 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006576 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006577 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006578 case DataType::Type::kInt32:
6579 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006580 locations->SetInAt(0, Location::RequiresRegister());
6581 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6582 break;
6583
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006584 case DataType::Type::kFloat32:
6585 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006586 locations->SetInAt(0, Location::RequiresFpuRegister());
6587 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6588 break;
6589
6590 default:
6591 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
6592 }
6593}
6594
6595void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006596 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006597 LocationSummary* locations = instruction->GetLocations();
6598
6599 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006600 case DataType::Type::kInt32:
6601 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006602 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6603 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006604 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006605 __ Subu(dst, ZERO, src);
6606 else
6607 __ Dsubu(dst, ZERO, src);
6608 break;
6609 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006610 case DataType::Type::kFloat32:
6611 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006612 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6613 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006614 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006615 __ NegS(dst, src);
6616 else
6617 __ NegD(dst, src);
6618 break;
6619 }
6620 default:
6621 LOG(FATAL) << "Unexpected neg type " << type;
6622 }
6623}
6624
6625void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006626 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6627 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006628 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006629 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006630 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6631 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006632}
6633
6634void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01006635 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
6636 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Goran Jakovljevic854df412017-06-27 14:41:39 +02006637 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006638 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02006639 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006640}
6641
6642void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006643 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6644 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006645 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07006646 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006647 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006648}
6649
6650void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07006651 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
6652 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006653}
6654
6655void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006656 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006657 locations->SetInAt(0, Location::RequiresRegister());
6658 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6659}
6660
6661void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006662 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006663 LocationSummary* locations = instruction->GetLocations();
6664
6665 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006666 case DataType::Type::kInt32:
6667 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006668 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6669 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6670 __ Nor(dst, src, ZERO);
6671 break;
6672 }
6673
6674 default:
6675 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
6676 }
6677}
6678
6679void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006680 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006681 locations->SetInAt(0, Location::RequiresRegister());
6682 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6683}
6684
6685void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6686 LocationSummary* locations = instruction->GetLocations();
6687 __ Xori(locations->Out().AsRegister<GpuRegister>(),
6688 locations->InAt(0).AsRegister<GpuRegister>(),
6689 1);
6690}
6691
6692void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006693 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6694 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006695}
6696
Calin Juravle2ae48182016-03-16 14:05:09 +00006697void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
6698 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006699 return;
6700 }
6701 Location obj = instruction->GetLocations()->InAt(0);
6702
6703 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00006704 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006705}
6706
Calin Juravle2ae48182016-03-16 14:05:09 +00006707void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006708 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006709 new (GetScopedAllocator()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006710 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006711
6712 Location obj = instruction->GetLocations()->InAt(0);
6713
6714 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
6715}
6716
6717void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006718 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006719}
6720
6721void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
6722 HandleBinaryOp(instruction);
6723}
6724
6725void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
6726 HandleBinaryOp(instruction);
6727}
6728
6729void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
6730 LOG(FATAL) << "Unreachable";
6731}
6732
6733void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006734 if (instruction->GetNext()->IsSuspendCheck() &&
6735 instruction->GetBlock()->GetLoopInformation() != nullptr) {
6736 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
6737 // The back edge will generate the suspend check.
6738 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6739 }
6740
Alexey Frunze4dda3372015-06-01 18:31:49 -07006741 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6742}
6743
6744void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006745 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006746 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
6747 if (location.IsStackSlot()) {
6748 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6749 } else if (location.IsDoubleStackSlot()) {
6750 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6751 }
6752 locations->SetOut(location);
6753}
6754
6755void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
6756 ATTRIBUTE_UNUSED) {
6757 // Nothing to do, the parameter is already at its location.
6758}
6759
6760void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
6761 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006762 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006763 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
6764}
6765
6766void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
6767 ATTRIBUTE_UNUSED) {
6768 // Nothing to do, the method is already at its location.
6769}
6770
6771void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006772 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01006773 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006774 locations->SetInAt(i, Location::Any());
6775 }
6776 locations->SetOut(Location::Any());
6777}
6778
6779void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
6780 LOG(FATAL) << "Unreachable";
6781}
6782
6783void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006784 DataType::Type type = rem->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006785 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006786 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
6787 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006788 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006789
6790 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006791 case DataType::Type::kInt32:
6792 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006793 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07006794 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006795 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6796 break;
6797
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006798 case DataType::Type::kFloat32:
6799 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006800 InvokeRuntimeCallingConvention calling_convention;
6801 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
6802 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
6803 locations->SetOut(calling_convention.GetReturnLocation(type));
6804 break;
6805 }
6806
6807 default:
6808 LOG(FATAL) << "Unexpected rem type " << type;
6809 }
6810}
6811
6812void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006813 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006814
6815 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006816 case DataType::Type::kInt32:
6817 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07006818 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006819 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006820
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006821 case DataType::Type::kFloat32:
6822 case DataType::Type::kFloat64: {
6823 QuickEntrypointEnum entrypoint =
6824 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescufc734082016-07-19 17:18:07 +01006825 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006826 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00006827 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
6828 } else {
6829 CheckEntrypointTypes<kQuickFmod, double, double, double>();
6830 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006831 break;
6832 }
6833 default:
6834 LOG(FATAL) << "Unexpected rem type " << type;
6835 }
6836}
6837
Aart Bik1f8d51b2018-02-15 10:42:37 -08006838static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
6839 LocationSummary* locations = new (allocator) LocationSummary(minmax);
6840 switch (minmax->GetResultType()) {
6841 case DataType::Type::kInt32:
6842 case DataType::Type::kInt64:
6843 locations->SetInAt(0, Location::RequiresRegister());
6844 locations->SetInAt(1, Location::RequiresRegister());
6845 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6846 break;
6847 case DataType::Type::kFloat32:
6848 case DataType::Type::kFloat64:
6849 locations->SetInAt(0, Location::RequiresFpuRegister());
6850 locations->SetInAt(1, Location::RequiresFpuRegister());
6851 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6852 break;
6853 default:
6854 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
6855 }
6856}
6857
Aart Bik351df3e2018-03-07 11:54:57 -08006858void InstructionCodeGeneratorMIPS64::GenerateMinMaxInt(LocationSummary* locations, bool is_min) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08006859 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6860 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
6861 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
6862
6863 if (lhs == rhs) {
6864 if (out != lhs) {
6865 __ Move(out, lhs);
6866 }
6867 } else {
6868 // Some architectures, such as ARM and MIPS (prior to r6), have a
6869 // conditional move instruction which only changes the target
6870 // (output) register if the condition is true (MIPS prior to r6 had
6871 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
6872 // change the target (output) register. If the condition is true the
6873 // output register gets the contents of the "rs" register; otherwise,
6874 // the output register is set to zero. One consequence of this is
6875 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
6876 // needs to use a pair of SELEQZ/SELNEZ instructions. After
6877 // executing this pair of instructions one of the output registers
6878 // from the pair will necessarily contain zero. Then the code ORs the
6879 // output registers from the SELEQZ/SELNEZ instructions to get the
6880 // final result.
6881 //
6882 // The initial test to see if the output register is same as the
6883 // first input register is needed to make sure that value in the
6884 // first input register isn't clobbered before we've finished
6885 // computing the output value. The logic in the corresponding else
6886 // clause performs the same task but makes sure the second input
6887 // register isn't clobbered in the event that it's the same register
6888 // as the output register; the else clause also handles the case
6889 // where the output register is distinct from both the first, and the
6890 // second input registers.
6891 if (out == lhs) {
6892 __ Slt(AT, rhs, lhs);
6893 if (is_min) {
6894 __ Seleqz(out, lhs, AT);
6895 __ Selnez(AT, rhs, AT);
6896 } else {
6897 __ Selnez(out, lhs, AT);
6898 __ Seleqz(AT, rhs, AT);
6899 }
6900 } else {
6901 __ Slt(AT, lhs, rhs);
6902 if (is_min) {
6903 __ Seleqz(out, rhs, AT);
6904 __ Selnez(AT, lhs, AT);
6905 } else {
6906 __ Selnez(out, rhs, AT);
6907 __ Seleqz(AT, lhs, AT);
6908 }
6909 }
6910 __ Or(out, out, AT);
6911 }
6912}
6913
6914void InstructionCodeGeneratorMIPS64::GenerateMinMaxFP(LocationSummary* locations,
6915 bool is_min,
6916 DataType::Type type) {
6917 FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
6918 FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
6919 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
6920
6921 Mips64Label noNaNs;
6922 Mips64Label done;
6923 FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
6924
6925 // When Java computes min/max it prefers a NaN to a number; the
6926 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
6927 // the inputs is a NaN and the other is a valid number, the MIPS
6928 // instruction will return the number; Java wants the NaN value
6929 // returned. This is why there is extra logic preceding the use of
6930 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
6931 // NaN, return the NaN, otherwise return the min/max.
6932 if (type == DataType::Type::kFloat64) {
6933 __ CmpUnD(FTMP, a, b);
6934 __ Bc1eqz(FTMP, &noNaNs);
6935
6936 // One of the inputs is a NaN
6937 __ CmpEqD(ftmp, a, a);
6938 // If a == a then b is the NaN, otherwise a is the NaN.
6939 __ SelD(ftmp, a, b);
6940
6941 if (ftmp != out) {
6942 __ MovD(out, ftmp);
6943 }
6944
6945 __ Bc(&done);
6946
6947 __ Bind(&noNaNs);
6948
6949 if (is_min) {
6950 __ MinD(out, a, b);
6951 } else {
6952 __ MaxD(out, a, b);
6953 }
6954 } else {
6955 DCHECK_EQ(type, DataType::Type::kFloat32);
6956 __ CmpUnS(FTMP, a, b);
6957 __ Bc1eqz(FTMP, &noNaNs);
6958
6959 // One of the inputs is a NaN
6960 __ CmpEqS(ftmp, a, a);
6961 // If a == a then b is the NaN, otherwise a is the NaN.
6962 __ SelS(ftmp, a, b);
6963
6964 if (ftmp != out) {
6965 __ MovS(out, ftmp);
6966 }
6967
6968 __ Bc(&done);
6969
6970 __ Bind(&noNaNs);
6971
6972 if (is_min) {
6973 __ MinS(out, a, b);
6974 } else {
6975 __ MaxS(out, a, b);
6976 }
6977 }
6978
6979 __ Bind(&done);
6980}
6981
Aart Bik351df3e2018-03-07 11:54:57 -08006982void InstructionCodeGeneratorMIPS64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
6983 DataType::Type type = minmax->GetResultType();
6984 switch (type) {
6985 case DataType::Type::kInt32:
6986 case DataType::Type::kInt64:
6987 GenerateMinMaxInt(minmax->GetLocations(), is_min);
6988 break;
6989 case DataType::Type::kFloat32:
6990 case DataType::Type::kFloat64:
6991 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
6992 break;
6993 default:
6994 LOG(FATAL) << "Unexpected type for HMinMax " << type;
6995 }
6996}
6997
Aart Bik1f8d51b2018-02-15 10:42:37 -08006998void LocationsBuilderMIPS64::VisitMin(HMin* min) {
6999 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
7000}
7001
7002void InstructionCodeGeneratorMIPS64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08007003 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08007004}
7005
7006void LocationsBuilderMIPS64::VisitMax(HMax* max) {
7007 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
7008}
7009
7010void InstructionCodeGeneratorMIPS64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08007011 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08007012}
7013
Aart Bik3dad3412018-02-28 12:01:46 -08007014void LocationsBuilderMIPS64::VisitAbs(HAbs* abs) {
7015 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
7016 switch (abs->GetResultType()) {
7017 case DataType::Type::kInt32:
7018 case DataType::Type::kInt64:
7019 locations->SetInAt(0, Location::RequiresRegister());
7020 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7021 break;
7022 case DataType::Type::kFloat32:
7023 case DataType::Type::kFloat64:
7024 locations->SetInAt(0, Location::RequiresFpuRegister());
7025 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
7026 break;
7027 default:
7028 LOG(FATAL) << "Unexpected abs type " << abs->GetResultType();
7029 }
7030}
7031
7032void InstructionCodeGeneratorMIPS64::VisitAbs(HAbs* abs) {
7033 LocationSummary* locations = abs->GetLocations();
7034 switch (abs->GetResultType()) {
7035 case DataType::Type::kInt32: {
7036 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
7037 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
7038 __ Sra(AT, in, 31);
7039 __ Xor(out, in, AT);
7040 __ Subu(out, out, AT);
7041 break;
7042 }
7043 case DataType::Type::kInt64: {
7044 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
7045 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
7046 __ Dsra32(AT, in, 31);
7047 __ Xor(out, in, AT);
7048 __ Dsubu(out, out, AT);
7049 break;
7050 }
7051 case DataType::Type::kFloat32: {
7052 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
7053 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
7054 __ AbsS(out, in);
7055 break;
7056 }
7057 case DataType::Type::kFloat64: {
7058 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
7059 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
7060 __ AbsD(out, in);
7061 break;
7062 }
7063 default:
7064 LOG(FATAL) << "Unexpected abs type " << abs->GetResultType();
7065 }
7066}
7067
Igor Murashkind01745e2017-04-05 16:40:31 -07007068void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
7069 constructor_fence->SetLocations(nullptr);
7070}
7071
7072void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
7073 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
7074 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
7075}
7076
Alexey Frunze4dda3372015-06-01 18:31:49 -07007077void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
7078 memory_barrier->SetLocations(nullptr);
7079}
7080
7081void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
7082 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
7083}
7084
7085void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007086 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(ret);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007087 DataType::Type return_type = ret->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07007088 locations->SetInAt(0, Mips64ReturnLocation(return_type));
7089}
7090
7091void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
7092 codegen_->GenerateFrameExit();
7093}
7094
7095void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
7096 ret->SetLocations(nullptr);
7097}
7098
7099void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
7100 codegen_->GenerateFrameExit();
7101}
7102
Alexey Frunze92d90602015-12-18 18:16:36 -08007103void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
7104 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00007105}
7106
Alexey Frunze92d90602015-12-18 18:16:36 -08007107void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
7108 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00007109}
7110
Alexey Frunze4dda3372015-06-01 18:31:49 -07007111void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
7112 HandleShift(shl);
7113}
7114
7115void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
7116 HandleShift(shl);
7117}
7118
7119void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
7120 HandleShift(shr);
7121}
7122
7123void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
7124 HandleShift(shr);
7125}
7126
Alexey Frunze4dda3372015-06-01 18:31:49 -07007127void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
7128 HandleBinaryOp(instruction);
7129}
7130
7131void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
7132 HandleBinaryOp(instruction);
7133}
7134
7135void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
7136 HandleFieldGet(instruction, instruction->GetFieldInfo());
7137}
7138
7139void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
7140 HandleFieldGet(instruction, instruction->GetFieldInfo());
7141}
7142
7143void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
7144 HandleFieldSet(instruction, instruction->GetFieldInfo());
7145}
7146
7147void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01007148 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007149}
7150
Calin Juravlee460d1d2015-09-29 04:52:17 +01007151void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
7152 HUnresolvedInstanceFieldGet* instruction) {
7153 FieldAccessCallingConventionMIPS64 calling_convention;
7154 codegen_->CreateUnresolvedFieldLocationSummary(
7155 instruction, instruction->GetFieldType(), calling_convention);
7156}
7157
7158void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
7159 HUnresolvedInstanceFieldGet* instruction) {
7160 FieldAccessCallingConventionMIPS64 calling_convention;
7161 codegen_->GenerateUnresolvedFieldAccess(instruction,
7162 instruction->GetFieldType(),
7163 instruction->GetFieldIndex(),
7164 instruction->GetDexPc(),
7165 calling_convention);
7166}
7167
7168void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
7169 HUnresolvedInstanceFieldSet* instruction) {
7170 FieldAccessCallingConventionMIPS64 calling_convention;
7171 codegen_->CreateUnresolvedFieldLocationSummary(
7172 instruction, instruction->GetFieldType(), calling_convention);
7173}
7174
7175void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
7176 HUnresolvedInstanceFieldSet* instruction) {
7177 FieldAccessCallingConventionMIPS64 calling_convention;
7178 codegen_->GenerateUnresolvedFieldAccess(instruction,
7179 instruction->GetFieldType(),
7180 instruction->GetFieldIndex(),
7181 instruction->GetDexPc(),
7182 calling_convention);
7183}
7184
7185void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
7186 HUnresolvedStaticFieldGet* instruction) {
7187 FieldAccessCallingConventionMIPS64 calling_convention;
7188 codegen_->CreateUnresolvedFieldLocationSummary(
7189 instruction, instruction->GetFieldType(), calling_convention);
7190}
7191
7192void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
7193 HUnresolvedStaticFieldGet* instruction) {
7194 FieldAccessCallingConventionMIPS64 calling_convention;
7195 codegen_->GenerateUnresolvedFieldAccess(instruction,
7196 instruction->GetFieldType(),
7197 instruction->GetFieldIndex(),
7198 instruction->GetDexPc(),
7199 calling_convention);
7200}
7201
7202void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
7203 HUnresolvedStaticFieldSet* instruction) {
7204 FieldAccessCallingConventionMIPS64 calling_convention;
7205 codegen_->CreateUnresolvedFieldLocationSummary(
7206 instruction, instruction->GetFieldType(), calling_convention);
7207}
7208
7209void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
7210 HUnresolvedStaticFieldSet* instruction) {
7211 FieldAccessCallingConventionMIPS64 calling_convention;
7212 codegen_->GenerateUnresolvedFieldAccess(instruction,
7213 instruction->GetFieldType(),
7214 instruction->GetFieldIndex(),
7215 instruction->GetDexPc(),
7216 calling_convention);
7217}
7218
Alexey Frunze4dda3372015-06-01 18:31:49 -07007219void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007220 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7221 instruction, LocationSummary::kCallOnSlowPath);
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02007222 // In suspend check slow path, usually there are no caller-save registers at all.
7223 // If SIMD instructions are present, however, we force spilling all live SIMD
7224 // registers in full width (since the runtime only saves/restores lower part).
7225 locations->SetCustomSlowPathCallerSaves(
7226 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007227}
7228
7229void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
7230 HBasicBlock* block = instruction->GetBlock();
7231 if (block->GetLoopInformation() != nullptr) {
7232 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
7233 // The back edge will generate the suspend check.
7234 return;
7235 }
7236 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
7237 // The goto will generate the suspend check.
7238 return;
7239 }
7240 GenerateSuspendCheck(instruction, nullptr);
7241}
7242
Alexey Frunze4dda3372015-06-01 18:31:49 -07007243void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007244 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7245 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007246 InvokeRuntimeCallingConvention calling_convention;
7247 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7248}
7249
7250void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01007251 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007252 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
7253}
7254
7255void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007256 DataType::Type input_type = conversion->GetInputType();
7257 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007258 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
7259 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07007260
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007261 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
7262 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007263 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
7264 }
7265
Vladimir Markoca6fff82017-10-03 14:49:14 +01007266 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(conversion);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007267
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007268 if (DataType::IsFloatingPointType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007269 locations->SetInAt(0, Location::RequiresFpuRegister());
7270 } else {
7271 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007272 }
7273
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007274 if (DataType::IsFloatingPointType(result_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007275 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007276 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007277 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007278 }
7279}
7280
7281void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
7282 LocationSummary* locations = conversion->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007283 DataType::Type result_type = conversion->GetResultType();
7284 DataType::Type input_type = conversion->GetInputType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07007285
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007286 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
7287 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07007288
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007289 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007290 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
7291 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
7292
7293 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007294 case DataType::Type::kUint8:
7295 __ Andi(dst, src, 0xFF);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007296 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007297 case DataType::Type::kInt8:
7298 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00007299 // Type conversion from long to types narrower than int is a result of code
7300 // transformations. To avoid unpredictable results for SEB and SEH, we first
7301 // need to sign-extend the low 32-bit value into bits 32 through 63.
7302 __ Sll(dst, src, 0);
7303 __ Seb(dst, dst);
7304 } else {
7305 __ Seb(dst, src);
7306 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007307 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007308 case DataType::Type::kUint16:
7309 __ Andi(dst, src, 0xFFFF);
7310 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007311 case DataType::Type::kInt16:
7312 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00007313 // Type conversion from long to types narrower than int is a result of code
7314 // transformations. To avoid unpredictable results for SEB and SEH, we first
7315 // need to sign-extend the low 32-bit value into bits 32 through 63.
7316 __ Sll(dst, src, 0);
7317 __ Seh(dst, dst);
7318 } else {
7319 __ Seh(dst, src);
7320 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007321 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007322 case DataType::Type::kInt32:
7323 case DataType::Type::kInt64:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01007324 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
7325 // conversions, except when the input and output registers are the same and we are not
7326 // converting longs to shorter types. In these cases, do nothing.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007327 if ((input_type == DataType::Type::kInt64) || (dst != src)) {
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01007328 __ Sll(dst, src, 0);
7329 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007330 break;
7331
7332 default:
7333 LOG(FATAL) << "Unexpected type conversion from " << input_type
7334 << " to " << result_type;
7335 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007336 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007337 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
7338 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007339 if (input_type == DataType::Type::kInt64) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007340 __ Dmtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007341 if (result_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007342 __ Cvtsl(dst, FTMP);
7343 } else {
7344 __ Cvtdl(dst, FTMP);
7345 }
7346 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007347 __ Mtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007348 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007349 __ Cvtsw(dst, FTMP);
7350 } else {
7351 __ Cvtdw(dst, FTMP);
7352 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007353 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007354 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
7355 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007356 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
7357 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007358
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007359 if (result_type == DataType::Type::kInt64) {
7360 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007361 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007362 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007363 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007364 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007365 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00007366 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007367 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007368 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007369 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007370 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007371 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007372 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00007373 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007374 } else if (DataType::IsFloatingPointType(result_type) &&
7375 DataType::IsFloatingPointType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007376 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
7377 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007378 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007379 __ Cvtsd(dst, src);
7380 } else {
7381 __ Cvtds(dst, src);
7382 }
7383 } else {
7384 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
7385 << " to " << result_type;
7386 }
7387}
7388
7389void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
7390 HandleShift(ushr);
7391}
7392
7393void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
7394 HandleShift(ushr);
7395}
7396
7397void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
7398 HandleBinaryOp(instruction);
7399}
7400
7401void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
7402 HandleBinaryOp(instruction);
7403}
7404
7405void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7406 // Nothing to do, this should be removed during prepare for register allocator.
7407 LOG(FATAL) << "Unreachable";
7408}
7409
7410void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7411 // Nothing to do, this should be removed during prepare for register allocator.
7412 LOG(FATAL) << "Unreachable";
7413}
7414
7415void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007416 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007417}
7418
7419void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007420 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007421}
7422
7423void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007424 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007425}
7426
7427void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007428 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007429}
7430
7431void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007432 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007433}
7434
7435void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007436 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007437}
7438
7439void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007440 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007441}
7442
7443void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007444 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007445}
7446
7447void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007448 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007449}
7450
7451void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007452 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007453}
7454
7455void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007456 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007457}
7458
7459void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007460 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007461}
7462
Aart Bike9f37602015-10-09 11:15:55 -07007463void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007464 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007465}
7466
7467void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007468 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007469}
7470
7471void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007472 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007473}
7474
7475void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007476 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007477}
7478
7479void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007480 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007481}
7482
7483void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007484 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007485}
7486
7487void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007488 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007489}
7490
7491void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007492 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007493}
7494
Mark Mendellfe57faa2015-09-18 09:26:15 -04007495// Simple implementation of packed switch - generate cascaded compare/jumps.
7496void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7497 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007498 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007499 locations->SetInAt(0, Location::RequiresRegister());
7500}
7501
Alexey Frunze0960ac52016-12-20 17:24:59 -08007502void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
7503 int32_t lower_bound,
7504 uint32_t num_entries,
7505 HBasicBlock* switch_block,
7506 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007507 // Create a set of compare/jumps.
7508 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08007509 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007510 // Jump to default if index is negative
7511 // Note: We don't check the case that index is positive while value < lower_bound, because in
7512 // this case, index >= num_entries must be true. So that we can save one branch instruction.
7513 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
7514
Alexey Frunze0960ac52016-12-20 17:24:59 -08007515 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007516 // Jump to successors[0] if value == lower_bound.
7517 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
7518 int32_t last_index = 0;
7519 for (; num_entries - last_index > 2; last_index += 2) {
7520 __ Addiu(temp_reg, temp_reg, -2);
7521 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
7522 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
7523 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
7524 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
7525 }
7526 if (num_entries - last_index == 2) {
7527 // The last missing case_value.
7528 __ Addiu(temp_reg, temp_reg, -1);
7529 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007530 }
7531
7532 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08007533 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07007534 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007535 }
7536}
7537
Alexey Frunze0960ac52016-12-20 17:24:59 -08007538void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
7539 int32_t lower_bound,
7540 uint32_t num_entries,
7541 HBasicBlock* switch_block,
7542 HBasicBlock* default_block) {
7543 // Create a jump table.
7544 std::vector<Mips64Label*> labels(num_entries);
7545 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7546 for (uint32_t i = 0; i < num_entries; i++) {
7547 labels[i] = codegen_->GetLabelOf(successors[i]);
7548 }
7549 JumpTable* table = __ CreateJumpTable(std::move(labels));
7550
7551 // Is the value in range?
7552 __ Addiu32(TMP, value_reg, -lower_bound);
7553 __ LoadConst32(AT, num_entries);
7554 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
7555
7556 // We are in the range of the table.
7557 // Load the target address from the jump table, indexing by the value.
7558 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07007559 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08007560 __ Lw(TMP, TMP, 0);
7561 // Compute the absolute target address by adding the table start address
7562 // (the table contains offsets to targets relative to its start).
7563 __ Daddu(TMP, TMP, AT);
7564 // And jump.
7565 __ Jr(TMP);
7566 __ Nop();
7567}
7568
7569void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7570 int32_t lower_bound = switch_instr->GetStartValue();
7571 uint32_t num_entries = switch_instr->GetNumEntries();
7572 LocationSummary* locations = switch_instr->GetLocations();
7573 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
7574 HBasicBlock* switch_block = switch_instr->GetBlock();
7575 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7576
7577 if (num_entries > kPackedSwitchJumpTableThreshold) {
7578 GenTableBasedPackedSwitch(value_reg,
7579 lower_bound,
7580 num_entries,
7581 switch_block,
7582 default_block);
7583 } else {
7584 GenPackedSwitchWithCompares(value_reg,
7585 lower_bound,
7586 num_entries,
7587 switch_block,
7588 default_block);
7589 }
7590}
7591
Chris Larsenc9905a62017-03-13 17:06:18 -07007592void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7593 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007594 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Chris Larsenc9905a62017-03-13 17:06:18 -07007595 locations->SetInAt(0, Location::RequiresRegister());
7596 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007597}
7598
Chris Larsenc9905a62017-03-13 17:06:18 -07007599void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7600 LocationSummary* locations = instruction->GetLocations();
7601 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
7602 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7603 instruction->GetIndex(), kMips64PointerSize).SizeValue();
7604 __ LoadFromOffset(kLoadDoubleword,
7605 locations->Out().AsRegister<GpuRegister>(),
7606 locations->InAt(0).AsRegister<GpuRegister>(),
7607 method_offset);
7608 } else {
7609 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
7610 instruction->GetIndex(), kMips64PointerSize));
7611 __ LoadFromOffset(kLoadDoubleword,
7612 locations->Out().AsRegister<GpuRegister>(),
7613 locations->InAt(0).AsRegister<GpuRegister>(),
7614 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
7615 __ LoadFromOffset(kLoadDoubleword,
7616 locations->Out().AsRegister<GpuRegister>(),
7617 locations->Out().AsRegister<GpuRegister>(),
7618 method_offset);
7619 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007620}
7621
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007622void LocationsBuilderMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7623 ATTRIBUTE_UNUSED) {
7624 LOG(FATAL) << "Unreachable";
7625}
7626
7627void InstructionCodeGeneratorMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7628 ATTRIBUTE_UNUSED) {
7629 LOG(FATAL) << "Unreachable";
7630}
7631
Alexey Frunze4dda3372015-06-01 18:31:49 -07007632} // namespace mips64
7633} // namespace art