blob: 107d5bb572a1aea1770c3e80228c60d3f26ea6f0 [file] [log] [blame]
Andreas Gampe57b34292015-01-14 15:45:59 -08001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_mips64.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Andreas Gampe57b34292015-01-14 15:45:59 -080020#include "base/casts.h"
21#include "entrypoints/quick/quick_entrypoints.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070022#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe57b34292015-01-14 15:45:59 -080023#include "memory_region.h"
24#include "thread.h"
25
26namespace art {
27namespace mips64 {
28
Alexey Frunzea0e87b02015-09-24 22:57:20 -070029void Mips64Assembler::FinalizeCode() {
30 for (auto& exception_block : exception_blocks_) {
31 EmitExceptionPoll(&exception_block);
32 }
33 PromoteBranches();
34}
35
36void Mips64Assembler::FinalizeInstructions(const MemoryRegion& region) {
37 EmitBranches();
38 Assembler::FinalizeInstructions(region);
39 PatchCFI();
40}
41
42void Mips64Assembler::PatchCFI() {
43 if (cfi().NumberOfDelayedAdvancePCs() == 0u) {
44 return;
45 }
46
47 typedef DebugFrameOpCodeWriterForAssembler::DelayedAdvancePC DelayedAdvancePC;
48 const auto data = cfi().ReleaseStreamAndPrepareForDelayedAdvancePC();
49 const std::vector<uint8_t>& old_stream = data.first;
50 const std::vector<DelayedAdvancePC>& advances = data.second;
51
52 // Refill our data buffer with patched opcodes.
53 cfi().ReserveCFIStream(old_stream.size() + advances.size() + 16);
54 size_t stream_pos = 0;
55 for (const DelayedAdvancePC& advance : advances) {
56 DCHECK_GE(advance.stream_pos, stream_pos);
57 // Copy old data up to the point where advance was issued.
58 cfi().AppendRawData(old_stream, stream_pos, advance.stream_pos);
59 stream_pos = advance.stream_pos;
60 // Insert the advance command with its final offset.
61 size_t final_pc = GetAdjustedPosition(advance.pc);
62 cfi().AdvancePC(final_pc);
63 }
64 // Copy the final segment if any.
65 cfi().AppendRawData(old_stream, stream_pos, old_stream.size());
66}
67
68void Mips64Assembler::EmitBranches() {
69 CHECK(!overwriting_);
70 // Switch from appending instructions at the end of the buffer to overwriting
71 // existing instructions (branch placeholders) in the buffer.
72 overwriting_ = true;
73 for (auto& branch : branches_) {
74 EmitBranch(&branch);
75 }
76 overwriting_ = false;
77}
78
Alexey Frunze4dda3372015-06-01 18:31:49 -070079void Mips64Assembler::Emit(uint32_t value) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -070080 if (overwriting_) {
81 // Branches to labels are emitted into their placeholders here.
82 buffer_.Store<uint32_t>(overwrite_location_, value);
83 overwrite_location_ += sizeof(uint32_t);
84 } else {
85 // Other instructions are simply appended at the end here.
86 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
87 buffer_.Emit<uint32_t>(value);
88 }
Andreas Gampe57b34292015-01-14 15:45:59 -080089}
90
91void Mips64Assembler::EmitR(int opcode, GpuRegister rs, GpuRegister rt, GpuRegister rd,
92 int shamt, int funct) {
93 CHECK_NE(rs, kNoGpuRegister);
94 CHECK_NE(rt, kNoGpuRegister);
95 CHECK_NE(rd, kNoGpuRegister);
Alexey Frunze4dda3372015-06-01 18:31:49 -070096 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
97 static_cast<uint32_t>(rs) << kRsShift |
98 static_cast<uint32_t>(rt) << kRtShift |
99 static_cast<uint32_t>(rd) << kRdShift |
100 shamt << kShamtShift |
101 funct;
Andreas Gampe57b34292015-01-14 15:45:59 -0800102 Emit(encoding);
103}
104
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700105void Mips64Assembler::EmitRsd(int opcode, GpuRegister rs, GpuRegister rd,
106 int shamt, int funct) {
107 CHECK_NE(rs, kNoGpuRegister);
108 CHECK_NE(rd, kNoGpuRegister);
109 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
110 static_cast<uint32_t>(rs) << kRsShift |
111 static_cast<uint32_t>(ZERO) << kRtShift |
112 static_cast<uint32_t>(rd) << kRdShift |
113 shamt << kShamtShift |
114 funct;
115 Emit(encoding);
116}
117
118void Mips64Assembler::EmitRtd(int opcode, GpuRegister rt, GpuRegister rd,
119 int shamt, int funct) {
120 CHECK_NE(rt, kNoGpuRegister);
121 CHECK_NE(rd, kNoGpuRegister);
122 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
123 static_cast<uint32_t>(ZERO) << kRsShift |
124 static_cast<uint32_t>(rt) << kRtShift |
125 static_cast<uint32_t>(rd) << kRdShift |
126 shamt << kShamtShift |
127 funct;
128 Emit(encoding);
129}
130
Andreas Gampe57b34292015-01-14 15:45:59 -0800131void Mips64Assembler::EmitI(int opcode, GpuRegister rs, GpuRegister rt, uint16_t imm) {
132 CHECK_NE(rs, kNoGpuRegister);
133 CHECK_NE(rt, kNoGpuRegister);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700134 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
135 static_cast<uint32_t>(rs) << kRsShift |
136 static_cast<uint32_t>(rt) << kRtShift |
137 imm;
Andreas Gampe57b34292015-01-14 15:45:59 -0800138 Emit(encoding);
139}
140
Alexey Frunze4dda3372015-06-01 18:31:49 -0700141void Mips64Assembler::EmitI21(int opcode, GpuRegister rs, uint32_t imm21) {
142 CHECK_NE(rs, kNoGpuRegister);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700143 CHECK(IsUint<21>(imm21)) << imm21;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700144 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
145 static_cast<uint32_t>(rs) << kRsShift |
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700146 imm21;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700147 Emit(encoding);
148}
149
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700150void Mips64Assembler::EmitI26(int opcode, uint32_t imm26) {
151 CHECK(IsUint<26>(imm26)) << imm26;
152 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift | imm26;
Andreas Gampe57b34292015-01-14 15:45:59 -0800153 Emit(encoding);
154}
155
156void Mips64Assembler::EmitFR(int opcode, int fmt, FpuRegister ft, FpuRegister fs, FpuRegister fd,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700157 int funct) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800158 CHECK_NE(ft, kNoFpuRegister);
159 CHECK_NE(fs, kNoFpuRegister);
160 CHECK_NE(fd, kNoFpuRegister);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700161 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
162 fmt << kFmtShift |
163 static_cast<uint32_t>(ft) << kFtShift |
164 static_cast<uint32_t>(fs) << kFsShift |
165 static_cast<uint32_t>(fd) << kFdShift |
166 funct;
Andreas Gampe57b34292015-01-14 15:45:59 -0800167 Emit(encoding);
168}
169
Alexey Frunze4dda3372015-06-01 18:31:49 -0700170void Mips64Assembler::EmitFI(int opcode, int fmt, FpuRegister ft, uint16_t imm) {
171 CHECK_NE(ft, kNoFpuRegister);
172 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
173 fmt << kFmtShift |
174 static_cast<uint32_t>(ft) << kFtShift |
175 imm;
Andreas Gampe57b34292015-01-14 15:45:59 -0800176 Emit(encoding);
177}
178
Andreas Gampe57b34292015-01-14 15:45:59 -0800179void Mips64Assembler::Addu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
180 EmitR(0, rs, rt, rd, 0, 0x21);
181}
182
183void Mips64Assembler::Addiu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
184 EmitI(0x9, rs, rt, imm16);
185}
186
Alexey Frunze4dda3372015-06-01 18:31:49 -0700187void Mips64Assembler::Daddu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
188 EmitR(0, rs, rt, rd, 0, 0x2d);
189}
190
Andreas Gampe57b34292015-01-14 15:45:59 -0800191void Mips64Assembler::Daddiu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
192 EmitI(0x19, rs, rt, imm16);
193}
194
Andreas Gampe57b34292015-01-14 15:45:59 -0800195void Mips64Assembler::Subu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
196 EmitR(0, rs, rt, rd, 0, 0x23);
197}
198
Alexey Frunze4dda3372015-06-01 18:31:49 -0700199void Mips64Assembler::Dsubu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
200 EmitR(0, rs, rt, rd, 0, 0x2f);
201}
202
Alexey Frunze4dda3372015-06-01 18:31:49 -0700203void Mips64Assembler::MulR6(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
204 EmitR(0, rs, rt, rd, 2, 0x18);
205}
206
Alexey Frunzec857c742015-09-23 15:12:39 -0700207void Mips64Assembler::MuhR6(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
208 EmitR(0, rs, rt, rd, 3, 0x18);
209}
210
Alexey Frunze4dda3372015-06-01 18:31:49 -0700211void Mips64Assembler::DivR6(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
212 EmitR(0, rs, rt, rd, 2, 0x1a);
213}
214
215void Mips64Assembler::ModR6(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
216 EmitR(0, rs, rt, rd, 3, 0x1a);
217}
218
219void Mips64Assembler::DivuR6(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
220 EmitR(0, rs, rt, rd, 2, 0x1b);
221}
222
223void Mips64Assembler::ModuR6(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
224 EmitR(0, rs, rt, rd, 3, 0x1b);
225}
226
227void Mips64Assembler::Dmul(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
228 EmitR(0, rs, rt, rd, 2, 0x1c);
229}
230
Alexey Frunzec857c742015-09-23 15:12:39 -0700231void Mips64Assembler::Dmuh(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
232 EmitR(0, rs, rt, rd, 3, 0x1c);
233}
234
Alexey Frunze4dda3372015-06-01 18:31:49 -0700235void Mips64Assembler::Ddiv(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
236 EmitR(0, rs, rt, rd, 2, 0x1e);
237}
238
239void Mips64Assembler::Dmod(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
240 EmitR(0, rs, rt, rd, 3, 0x1e);
241}
242
243void Mips64Assembler::Ddivu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
244 EmitR(0, rs, rt, rd, 2, 0x1f);
245}
246
247void Mips64Assembler::Dmodu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
248 EmitR(0, rs, rt, rd, 3, 0x1f);
249}
250
Andreas Gampe57b34292015-01-14 15:45:59 -0800251void Mips64Assembler::And(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
252 EmitR(0, rs, rt, rd, 0, 0x24);
253}
254
255void Mips64Assembler::Andi(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
256 EmitI(0xc, rs, rt, imm16);
257}
258
259void Mips64Assembler::Or(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
260 EmitR(0, rs, rt, rd, 0, 0x25);
261}
262
263void Mips64Assembler::Ori(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
264 EmitI(0xd, rs, rt, imm16);
265}
266
267void Mips64Assembler::Xor(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
268 EmitR(0, rs, rt, rd, 0, 0x26);
269}
270
271void Mips64Assembler::Xori(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
272 EmitI(0xe, rs, rt, imm16);
273}
274
275void Mips64Assembler::Nor(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
276 EmitR(0, rs, rt, rd, 0, 0x27);
277}
278
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700279void Mips64Assembler::Bitswap(GpuRegister rd, GpuRegister rt) {
280 EmitRtd(0x1f, rt, rd, 0x0, 0x20);
281}
282
283void Mips64Assembler::Dbitswap(GpuRegister rd, GpuRegister rt) {
284 EmitRtd(0x1f, rt, rd, 0x0, 0x24);
285}
286
Alexey Frunze4dda3372015-06-01 18:31:49 -0700287void Mips64Assembler::Seb(GpuRegister rd, GpuRegister rt) {
288 EmitR(0x1f, static_cast<GpuRegister>(0), rt, rd, 0x10, 0x20);
Andreas Gampe57b34292015-01-14 15:45:59 -0800289}
290
Alexey Frunze4dda3372015-06-01 18:31:49 -0700291void Mips64Assembler::Seh(GpuRegister rd, GpuRegister rt) {
292 EmitR(0x1f, static_cast<GpuRegister>(0), rt, rd, 0x18, 0x20);
Andreas Gampe57b34292015-01-14 15:45:59 -0800293}
294
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700295void Mips64Assembler::Dsbh(GpuRegister rd, GpuRegister rt) {
296 EmitRtd(0x1f, rt, rd, 0x2, 0x24);
297}
298
299void Mips64Assembler::Dshd(GpuRegister rd, GpuRegister rt) {
300 EmitRtd(0x1f, rt, rd, 0x5, 0x24);
301}
302
Alexey Frunze4dda3372015-06-01 18:31:49 -0700303void Mips64Assembler::Dext(GpuRegister rt, GpuRegister rs, int pos, int size_less_one) {
304 DCHECK(0 <= pos && pos < 32) << pos;
305 DCHECK(0 <= size_less_one && size_less_one < 32) << size_less_one;
306 EmitR(0x1f, rs, rt, static_cast<GpuRegister>(size_less_one), pos, 3);
Andreas Gampe57b34292015-01-14 15:45:59 -0800307}
308
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700309void Mips64Assembler::Wsbh(GpuRegister rd, GpuRegister rt) {
310 EmitRtd(0x1f, rt, rd, 2, 0x20);
311}
312
313void Mips64Assembler::Sc(GpuRegister rt, GpuRegister base, int16_t imm9) {
314 DCHECK((-256 <= imm9) && (imm9 < 256));
315 EmitI(0x1f, base, rt, ((imm9 & 0x1FF) << 7) | 0x26);
316}
317
318void Mips64Assembler::Scd(GpuRegister rt, GpuRegister base, int16_t imm9) {
319 DCHECK((-256 <= imm9) && (imm9 < 256));
320 EmitI(0x1f, base, rt, ((imm9 & 0x1FF) << 7) | 0x27);
321}
322
323void Mips64Assembler::Ll(GpuRegister rt, GpuRegister base, int16_t imm9) {
324 DCHECK((-256 <= imm9) && (imm9 < 256));
325 EmitI(0x1f, base, rt, ((imm9 & 0x1FF) << 7) | 0x36);
326}
327
328void Mips64Assembler::Lld(GpuRegister rt, GpuRegister base, int16_t imm9) {
329 DCHECK((-256 <= imm9) && (imm9 < 256));
330 EmitI(0x1f, base, rt, ((imm9 & 0x1FF) << 7) | 0x37);
331}
332
Alexey Frunze4dda3372015-06-01 18:31:49 -0700333void Mips64Assembler::Sll(GpuRegister rd, GpuRegister rt, int shamt) {
334 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x00);
335}
336
337void Mips64Assembler::Srl(GpuRegister rd, GpuRegister rt, int shamt) {
338 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x02);
339}
340
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700341void Mips64Assembler::Rotr(GpuRegister rd, GpuRegister rt, int shamt) {
342 EmitR(0, static_cast<GpuRegister>(1), rt, rd, shamt, 0x02);
343}
344
Alexey Frunze4dda3372015-06-01 18:31:49 -0700345void Mips64Assembler::Sra(GpuRegister rd, GpuRegister rt, int shamt) {
346 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x03);
347}
348
349void Mips64Assembler::Sllv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800350 EmitR(0, rs, rt, rd, 0, 0x04);
351}
352
Chris Larsen9aebff22015-09-22 17:54:15 -0700353void Mips64Assembler::Rotrv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
354 EmitR(0, rs, rt, rd, 1, 0x06);
355}
356
Alexey Frunze4dda3372015-06-01 18:31:49 -0700357void Mips64Assembler::Srlv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800358 EmitR(0, rs, rt, rd, 0, 0x06);
359}
360
Alexey Frunze4dda3372015-06-01 18:31:49 -0700361void Mips64Assembler::Srav(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800362 EmitR(0, rs, rt, rd, 0, 0x07);
363}
364
Alexey Frunze4dda3372015-06-01 18:31:49 -0700365void Mips64Assembler::Dsll(GpuRegister rd, GpuRegister rt, int shamt) {
366 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x38);
367}
368
369void Mips64Assembler::Dsrl(GpuRegister rd, GpuRegister rt, int shamt) {
370 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x3a);
371}
372
Chris Larsen9aebff22015-09-22 17:54:15 -0700373void Mips64Assembler::Drotr(GpuRegister rd, GpuRegister rt, int shamt) {
374 EmitR(0, static_cast<GpuRegister>(1), rt, rd, shamt, 0x3a);
375}
376
Alexey Frunze4dda3372015-06-01 18:31:49 -0700377void Mips64Assembler::Dsra(GpuRegister rd, GpuRegister rt, int shamt) {
378 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x3b);
379}
380
381void Mips64Assembler::Dsll32(GpuRegister rd, GpuRegister rt, int shamt) {
382 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x3c);
383}
384
385void Mips64Assembler::Dsrl32(GpuRegister rd, GpuRegister rt, int shamt) {
386 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x3e);
387}
388
Chris Larsen9aebff22015-09-22 17:54:15 -0700389void Mips64Assembler::Drotr32(GpuRegister rd, GpuRegister rt, int shamt) {
390 EmitR(0, static_cast<GpuRegister>(1), rt, rd, shamt, 0x3e);
391}
392
Alexey Frunze4dda3372015-06-01 18:31:49 -0700393void Mips64Assembler::Dsra32(GpuRegister rd, GpuRegister rt, int shamt) {
394 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x3f);
395}
396
397void Mips64Assembler::Dsllv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
398 EmitR(0, rs, rt, rd, 0, 0x14);
399}
400
401void Mips64Assembler::Dsrlv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
402 EmitR(0, rs, rt, rd, 0, 0x16);
403}
404
Chris Larsen9aebff22015-09-22 17:54:15 -0700405void Mips64Assembler::Drotrv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
406 EmitR(0, rs, rt, rd, 1, 0x16);
407}
408
Alexey Frunze4dda3372015-06-01 18:31:49 -0700409void Mips64Assembler::Dsrav(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
410 EmitR(0, rs, rt, rd, 0, 0x17);
411}
412
Andreas Gampe57b34292015-01-14 15:45:59 -0800413void Mips64Assembler::Lb(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
414 EmitI(0x20, rs, rt, imm16);
415}
416
417void Mips64Assembler::Lh(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
418 EmitI(0x21, rs, rt, imm16);
419}
420
421void Mips64Assembler::Lw(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
422 EmitI(0x23, rs, rt, imm16);
423}
424
425void Mips64Assembler::Ld(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
426 EmitI(0x37, rs, rt, imm16);
427}
428
429void Mips64Assembler::Lbu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
430 EmitI(0x24, rs, rt, imm16);
431}
432
433void Mips64Assembler::Lhu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
434 EmitI(0x25, rs, rt, imm16);
435}
436
Douglas Leungd90957f2015-04-30 19:22:49 -0700437void Mips64Assembler::Lwu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
438 EmitI(0x27, rs, rt, imm16);
439}
440
Andreas Gampe57b34292015-01-14 15:45:59 -0800441void Mips64Assembler::Lui(GpuRegister rt, uint16_t imm16) {
442 EmitI(0xf, static_cast<GpuRegister>(0), rt, imm16);
443}
444
Alexey Frunze4dda3372015-06-01 18:31:49 -0700445void Mips64Assembler::Dahi(GpuRegister rs, uint16_t imm16) {
446 EmitI(1, rs, static_cast<GpuRegister>(6), imm16);
447}
448
449void Mips64Assembler::Dati(GpuRegister rs, uint16_t imm16) {
450 EmitI(1, rs, static_cast<GpuRegister>(0x1e), imm16);
451}
452
453void Mips64Assembler::Sync(uint32_t stype) {
454 EmitR(0, static_cast<GpuRegister>(0), static_cast<GpuRegister>(0),
455 static_cast<GpuRegister>(0), stype & 0x1f, 0xf);
456}
457
Andreas Gampe57b34292015-01-14 15:45:59 -0800458void Mips64Assembler::Sb(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
459 EmitI(0x28, rs, rt, imm16);
460}
461
462void Mips64Assembler::Sh(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
463 EmitI(0x29, rs, rt, imm16);
464}
465
466void Mips64Assembler::Sw(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
467 EmitI(0x2b, rs, rt, imm16);
468}
469
470void Mips64Assembler::Sd(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
471 EmitI(0x3f, rs, rt, imm16);
472}
473
474void Mips64Assembler::Slt(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
475 EmitR(0, rs, rt, rd, 0, 0x2a);
476}
477
478void Mips64Assembler::Sltu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
479 EmitR(0, rs, rt, rd, 0, 0x2b);
480}
481
482void Mips64Assembler::Slti(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
483 EmitI(0xa, rs, rt, imm16);
484}
485
486void Mips64Assembler::Sltiu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
487 EmitI(0xb, rs, rt, imm16);
488}
489
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700490void Mips64Assembler::Seleqz(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
491 EmitR(0, rs, rt, rd, 0, 0x35);
492}
493
494void Mips64Assembler::Selnez(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
495 EmitR(0, rs, rt, rd, 0, 0x37);
496}
497
498void Mips64Assembler::Clz(GpuRegister rd, GpuRegister rs) {
499 EmitRsd(0, rs, rd, 0x01, 0x10);
500}
501
502void Mips64Assembler::Clo(GpuRegister rd, GpuRegister rs) {
503 EmitRsd(0, rs, rd, 0x01, 0x11);
504}
505
506void Mips64Assembler::Dclz(GpuRegister rd, GpuRegister rs) {
507 EmitRsd(0, rs, rd, 0x01, 0x12);
508}
509
510void Mips64Assembler::Dclo(GpuRegister rd, GpuRegister rs) {
511 EmitRsd(0, rs, rd, 0x01, 0x13);
512}
513
Alexey Frunze4dda3372015-06-01 18:31:49 -0700514void Mips64Assembler::Jalr(GpuRegister rd, GpuRegister rs) {
515 EmitR(0, rs, static_cast<GpuRegister>(0), rd, 0, 0x09);
Andreas Gampe57b34292015-01-14 15:45:59 -0800516}
517
518void Mips64Assembler::Jalr(GpuRegister rs) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700519 Jalr(RA, rs);
520}
521
522void Mips64Assembler::Jr(GpuRegister rs) {
523 Jalr(ZERO, rs);
524}
525
526void Mips64Assembler::Auipc(GpuRegister rs, uint16_t imm16) {
527 EmitI(0x3B, rs, static_cast<GpuRegister>(0x1E), imm16);
528}
529
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700530void Mips64Assembler::Addiupc(GpuRegister rs, uint32_t imm19) {
531 CHECK(IsUint<19>(imm19)) << imm19;
532 EmitI21(0x3B, rs, imm19);
533}
534
535void Mips64Assembler::Bc(uint32_t imm26) {
536 EmitI26(0x32, imm26);
537}
538
Alexey Frunze4dda3372015-06-01 18:31:49 -0700539void Mips64Assembler::Jic(GpuRegister rt, uint16_t imm16) {
540 EmitI(0x36, static_cast<GpuRegister>(0), rt, imm16);
541}
542
543void Mips64Assembler::Jialc(GpuRegister rt, uint16_t imm16) {
544 EmitI(0x3E, static_cast<GpuRegister>(0), rt, imm16);
545}
546
547void Mips64Assembler::Bltc(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
548 CHECK_NE(rs, ZERO);
549 CHECK_NE(rt, ZERO);
550 CHECK_NE(rs, rt);
551 EmitI(0x17, rs, rt, imm16);
552}
553
554void Mips64Assembler::Bltzc(GpuRegister rt, uint16_t imm16) {
555 CHECK_NE(rt, ZERO);
556 EmitI(0x17, rt, rt, imm16);
557}
558
559void Mips64Assembler::Bgtzc(GpuRegister rt, uint16_t imm16) {
560 CHECK_NE(rt, ZERO);
561 EmitI(0x17, static_cast<GpuRegister>(0), rt, imm16);
562}
563
564void Mips64Assembler::Bgec(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
565 CHECK_NE(rs, ZERO);
566 CHECK_NE(rt, ZERO);
567 CHECK_NE(rs, rt);
568 EmitI(0x16, rs, rt, imm16);
569}
570
571void Mips64Assembler::Bgezc(GpuRegister rt, uint16_t imm16) {
572 CHECK_NE(rt, ZERO);
573 EmitI(0x16, rt, rt, imm16);
574}
575
576void Mips64Assembler::Blezc(GpuRegister rt, uint16_t imm16) {
577 CHECK_NE(rt, ZERO);
578 EmitI(0x16, static_cast<GpuRegister>(0), rt, imm16);
579}
580
581void Mips64Assembler::Bltuc(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
582 CHECK_NE(rs, ZERO);
583 CHECK_NE(rt, ZERO);
584 CHECK_NE(rs, rt);
585 EmitI(0x7, rs, rt, imm16);
586}
587
588void Mips64Assembler::Bgeuc(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
589 CHECK_NE(rs, ZERO);
590 CHECK_NE(rt, ZERO);
591 CHECK_NE(rs, rt);
592 EmitI(0x6, rs, rt, imm16);
593}
594
595void Mips64Assembler::Beqc(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
596 CHECK_NE(rs, ZERO);
597 CHECK_NE(rt, ZERO);
598 CHECK_NE(rs, rt);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700599 EmitI(0x8, std::min(rs, rt), std::max(rs, rt), imm16);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700600}
601
602void Mips64Assembler::Bnec(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
603 CHECK_NE(rs, ZERO);
604 CHECK_NE(rt, ZERO);
605 CHECK_NE(rs, rt);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700606 EmitI(0x18, std::min(rs, rt), std::max(rs, rt), imm16);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700607}
608
609void Mips64Assembler::Beqzc(GpuRegister rs, uint32_t imm21) {
610 CHECK_NE(rs, ZERO);
611 EmitI21(0x36, rs, imm21);
612}
613
614void Mips64Assembler::Bnezc(GpuRegister rs, uint32_t imm21) {
615 CHECK_NE(rs, ZERO);
616 EmitI21(0x3E, rs, imm21);
Andreas Gampe57b34292015-01-14 15:45:59 -0800617}
618
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700619void Mips64Assembler::EmitBcondc(BranchCondition cond,
620 GpuRegister rs,
621 GpuRegister rt,
622 uint32_t imm16_21) {
623 switch (cond) {
624 case kCondLT:
625 Bltc(rs, rt, imm16_21);
626 break;
627 case kCondGE:
628 Bgec(rs, rt, imm16_21);
629 break;
630 case kCondLE:
631 Bgec(rt, rs, imm16_21);
632 break;
633 case kCondGT:
634 Bltc(rt, rs, imm16_21);
635 break;
636 case kCondLTZ:
637 CHECK_EQ(rt, ZERO);
638 Bltzc(rs, imm16_21);
639 break;
640 case kCondGEZ:
641 CHECK_EQ(rt, ZERO);
642 Bgezc(rs, imm16_21);
643 break;
644 case kCondLEZ:
645 CHECK_EQ(rt, ZERO);
646 Blezc(rs, imm16_21);
647 break;
648 case kCondGTZ:
649 CHECK_EQ(rt, ZERO);
650 Bgtzc(rs, imm16_21);
651 break;
652 case kCondEQ:
653 Beqc(rs, rt, imm16_21);
654 break;
655 case kCondNE:
656 Bnec(rs, rt, imm16_21);
657 break;
658 case kCondEQZ:
659 CHECK_EQ(rt, ZERO);
660 Beqzc(rs, imm16_21);
661 break;
662 case kCondNEZ:
663 CHECK_EQ(rt, ZERO);
664 Bnezc(rs, imm16_21);
665 break;
666 case kCondLTU:
667 Bltuc(rs, rt, imm16_21);
668 break;
669 case kCondGEU:
670 Bgeuc(rs, rt, imm16_21);
671 break;
672 case kUncond:
673 LOG(FATAL) << "Unexpected branch condition " << cond;
674 UNREACHABLE();
675 }
676}
677
Andreas Gampe57b34292015-01-14 15:45:59 -0800678void Mips64Assembler::AddS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
679 EmitFR(0x11, 0x10, ft, fs, fd, 0x0);
680}
681
682void Mips64Assembler::SubS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
683 EmitFR(0x11, 0x10, ft, fs, fd, 0x1);
684}
685
686void Mips64Assembler::MulS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
687 EmitFR(0x11, 0x10, ft, fs, fd, 0x2);
688}
689
690void Mips64Assembler::DivS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
691 EmitFR(0x11, 0x10, ft, fs, fd, 0x3);
692}
693
694void Mips64Assembler::AddD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700695 EmitFR(0x11, 0x11, ft, fs, fd, 0x0);
Andreas Gampe57b34292015-01-14 15:45:59 -0800696}
697
698void Mips64Assembler::SubD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700699 EmitFR(0x11, 0x11, ft, fs, fd, 0x1);
Andreas Gampe57b34292015-01-14 15:45:59 -0800700}
701
702void Mips64Assembler::MulD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700703 EmitFR(0x11, 0x11, ft, fs, fd, 0x2);
Andreas Gampe57b34292015-01-14 15:45:59 -0800704}
705
706void Mips64Assembler::DivD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700707 EmitFR(0x11, 0x11, ft, fs, fd, 0x3);
Andreas Gampe57b34292015-01-14 15:45:59 -0800708}
709
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700710void Mips64Assembler::SqrtS(FpuRegister fd, FpuRegister fs) {
711 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x4);
712}
713
714void Mips64Assembler::SqrtD(FpuRegister fd, FpuRegister fs) {
715 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x4);
716}
717
718void Mips64Assembler::AbsS(FpuRegister fd, FpuRegister fs) {
719 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x5);
720}
721
722void Mips64Assembler::AbsD(FpuRegister fd, FpuRegister fs) {
723 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x5);
724}
725
Andreas Gampe57b34292015-01-14 15:45:59 -0800726void Mips64Assembler::MovS(FpuRegister fd, FpuRegister fs) {
727 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x6);
728}
729
730void Mips64Assembler::MovD(FpuRegister fd, FpuRegister fs) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700731 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x6);
732}
733
734void Mips64Assembler::NegS(FpuRegister fd, FpuRegister fs) {
735 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x7);
736}
737
738void Mips64Assembler::NegD(FpuRegister fd, FpuRegister fs) {
739 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x7);
740}
741
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700742void Mips64Assembler::RoundLS(FpuRegister fd, FpuRegister fs) {
743 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x8);
744}
745
746void Mips64Assembler::RoundLD(FpuRegister fd, FpuRegister fs) {
747 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x8);
748}
749
750void Mips64Assembler::RoundWS(FpuRegister fd, FpuRegister fs) {
751 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0xc);
752}
753
754void Mips64Assembler::RoundWD(FpuRegister fd, FpuRegister fs) {
755 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0xc);
756}
757
758void Mips64Assembler::CeilLS(FpuRegister fd, FpuRegister fs) {
759 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0xa);
760}
761
762void Mips64Assembler::CeilLD(FpuRegister fd, FpuRegister fs) {
763 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0xa);
764}
765
766void Mips64Assembler::CeilWS(FpuRegister fd, FpuRegister fs) {
767 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0xe);
768}
769
770void Mips64Assembler::CeilWD(FpuRegister fd, FpuRegister fs) {
771 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0xe);
772}
773
774void Mips64Assembler::FloorLS(FpuRegister fd, FpuRegister fs) {
775 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0xb);
776}
777
778void Mips64Assembler::FloorLD(FpuRegister fd, FpuRegister fs) {
779 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0xb);
780}
781
782void Mips64Assembler::FloorWS(FpuRegister fd, FpuRegister fs) {
783 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0xf);
784}
785
786void Mips64Assembler::FloorWD(FpuRegister fd, FpuRegister fs) {
787 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0xf);
788}
789
790void Mips64Assembler::SelS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
791 EmitFR(0x11, 0x10, ft, fs, fd, 0x10);
792}
793
794void Mips64Assembler::SelD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
795 EmitFR(0x11, 0x11, ft, fs, fd, 0x10);
796}
797
798void Mips64Assembler::RintS(FpuRegister fd, FpuRegister fs) {
799 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x1a);
800}
801
802void Mips64Assembler::RintD(FpuRegister fd, FpuRegister fs) {
803 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x1a);
804}
805
806void Mips64Assembler::ClassS(FpuRegister fd, FpuRegister fs) {
807 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x1b);
808}
809
810void Mips64Assembler::ClassD(FpuRegister fd, FpuRegister fs) {
811 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x1b);
812}
813
814void Mips64Assembler::MinS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
815 EmitFR(0x11, 0x10, ft, fs, fd, 0x1c);
816}
817
818void Mips64Assembler::MinD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
819 EmitFR(0x11, 0x11, ft, fs, fd, 0x1c);
820}
821
822void Mips64Assembler::MaxS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
823 EmitFR(0x11, 0x10, ft, fs, fd, 0x1e);
824}
825
826void Mips64Assembler::MaxD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
827 EmitFR(0x11, 0x11, ft, fs, fd, 0x1e);
828}
829
Alexey Frunze4dda3372015-06-01 18:31:49 -0700830void Mips64Assembler::Cvtsw(FpuRegister fd, FpuRegister fs) {
831 EmitFR(0x11, 0x14, static_cast<FpuRegister>(0), fs, fd, 0x20);
832}
833
834void Mips64Assembler::Cvtdw(FpuRegister fd, FpuRegister fs) {
835 EmitFR(0x11, 0x14, static_cast<FpuRegister>(0), fs, fd, 0x21);
836}
837
838void Mips64Assembler::Cvtsd(FpuRegister fd, FpuRegister fs) {
839 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x20);
840}
841
842void Mips64Assembler::Cvtds(FpuRegister fd, FpuRegister fs) {
843 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x21);
Andreas Gampe57b34292015-01-14 15:45:59 -0800844}
845
Chris Larsen51417632015-10-02 13:24:25 -0700846void Mips64Assembler::Cvtsl(FpuRegister fd, FpuRegister fs) {
847 EmitFR(0x11, 0x15, static_cast<FpuRegister>(0), fs, fd, 0x20);
848}
849
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700850void Mips64Assembler::Cvtdl(FpuRegister fd, FpuRegister fs) {
851 EmitFR(0x11, 0x15, static_cast<FpuRegister>(0), fs, fd, 0x21);
852}
853
Andreas Gampe57b34292015-01-14 15:45:59 -0800854void Mips64Assembler::Mfc1(GpuRegister rt, FpuRegister fs) {
855 EmitFR(0x11, 0x00, static_cast<FpuRegister>(rt), fs, static_cast<FpuRegister>(0), 0x0);
856}
857
Alexey Frunze4dda3372015-06-01 18:31:49 -0700858void Mips64Assembler::Mtc1(GpuRegister rt, FpuRegister fs) {
859 EmitFR(0x11, 0x04, static_cast<FpuRegister>(rt), fs, static_cast<FpuRegister>(0), 0x0);
860}
861
862void Mips64Assembler::Dmfc1(GpuRegister rt, FpuRegister fs) {
863 EmitFR(0x11, 0x01, static_cast<FpuRegister>(rt), fs, static_cast<FpuRegister>(0), 0x0);
864}
865
866void Mips64Assembler::Dmtc1(GpuRegister rt, FpuRegister fs) {
867 EmitFR(0x11, 0x05, static_cast<FpuRegister>(rt), fs, static_cast<FpuRegister>(0), 0x0);
Andreas Gampe57b34292015-01-14 15:45:59 -0800868}
869
870void Mips64Assembler::Lwc1(FpuRegister ft, GpuRegister rs, uint16_t imm16) {
871 EmitI(0x31, rs, static_cast<GpuRegister>(ft), imm16);
872}
873
874void Mips64Assembler::Ldc1(FpuRegister ft, GpuRegister rs, uint16_t imm16) {
875 EmitI(0x35, rs, static_cast<GpuRegister>(ft), imm16);
876}
877
878void Mips64Assembler::Swc1(FpuRegister ft, GpuRegister rs, uint16_t imm16) {
879 EmitI(0x39, rs, static_cast<GpuRegister>(ft), imm16);
880}
881
882void Mips64Assembler::Sdc1(FpuRegister ft, GpuRegister rs, uint16_t imm16) {
883 EmitI(0x3d, rs, static_cast<GpuRegister>(ft), imm16);
884}
885
886void Mips64Assembler::Break() {
887 EmitR(0, static_cast<GpuRegister>(0), static_cast<GpuRegister>(0),
888 static_cast<GpuRegister>(0), 0, 0xD);
889}
890
891void Mips64Assembler::Nop() {
892 EmitR(0x0, static_cast<GpuRegister>(0), static_cast<GpuRegister>(0),
893 static_cast<GpuRegister>(0), 0, 0x0);
894}
895
Alexey Frunze4dda3372015-06-01 18:31:49 -0700896void Mips64Assembler::Move(GpuRegister rd, GpuRegister rs) {
897 Or(rd, rs, ZERO);
Andreas Gampe57b34292015-01-14 15:45:59 -0800898}
899
Alexey Frunze4dda3372015-06-01 18:31:49 -0700900void Mips64Assembler::Clear(GpuRegister rd) {
901 Move(rd, ZERO);
Andreas Gampe57b34292015-01-14 15:45:59 -0800902}
903
Alexey Frunze4dda3372015-06-01 18:31:49 -0700904void Mips64Assembler::Not(GpuRegister rd, GpuRegister rs) {
905 Nor(rd, rs, ZERO);
Andreas Gampe57b34292015-01-14 15:45:59 -0800906}
907
Alexey Frunze4dda3372015-06-01 18:31:49 -0700908void Mips64Assembler::LoadConst32(GpuRegister rd, int32_t value) {
909 if (IsUint<16>(value)) {
910 // Use OR with (unsigned) immediate to encode 16b unsigned int.
911 Ori(rd, ZERO, value);
912 } else if (IsInt<16>(value)) {
913 // Use ADD with (signed) immediate to encode 16b signed int.
914 Addiu(rd, ZERO, value);
915 } else {
916 Lui(rd, value >> 16);
917 if (value & 0xFFFF)
918 Ori(rd, rd, value);
919 }
Andreas Gampe57b34292015-01-14 15:45:59 -0800920}
921
Alexey Frunze4dda3372015-06-01 18:31:49 -0700922void Mips64Assembler::LoadConst64(GpuRegister rd, int64_t value) {
923 int bit31 = (value & UINT64_C(0x80000000)) != 0;
924
925 // Loads with 1 instruction.
926 if (IsUint<16>(value)) {
927 Ori(rd, ZERO, value);
928 } else if (IsInt<16>(value)) {
929 Daddiu(rd, ZERO, value);
930 } else if ((value & 0xFFFF) == 0 && IsInt<16>(value >> 16)) {
931 Lui(rd, value >> 16);
932 } else if (IsInt<32>(value)) {
933 // Loads with 2 instructions.
934 Lui(rd, value >> 16);
935 Ori(rd, rd, value);
936 } else if ((value & 0xFFFF0000) == 0 && IsInt<16>(value >> 32)) {
937 Ori(rd, ZERO, value);
938 Dahi(rd, value >> 32);
939 } else if ((value & UINT64_C(0xFFFFFFFF0000)) == 0) {
940 Ori(rd, ZERO, value);
941 Dati(rd, value >> 48);
942 } else if ((value & 0xFFFF) == 0 &&
943 (-32768 - bit31) <= (value >> 32) && (value >> 32) <= (32767 - bit31)) {
944 Lui(rd, value >> 16);
945 Dahi(rd, (value >> 32) + bit31);
946 } else if ((value & 0xFFFF) == 0 && ((value >> 31) & 0x1FFFF) == ((0x20000 - bit31) & 0x1FFFF)) {
947 Lui(rd, value >> 16);
948 Dati(rd, (value >> 48) + bit31);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700949 } else if (IsPowerOfTwo(value + UINT64_C(1))) {
950 int shift_cnt = 64 - CTZ(value + UINT64_C(1));
951 Daddiu(rd, ZERO, -1);
952 if (shift_cnt < 32) {
953 Dsrl(rd, rd, shift_cnt);
954 } else {
955 Dsrl32(rd, rd, shift_cnt & 31);
956 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700957 } else {
958 int shift_cnt = CTZ(value);
959 int64_t tmp = value >> shift_cnt;
960 if (IsUint<16>(tmp)) {
961 Ori(rd, ZERO, tmp);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700962 if (shift_cnt < 32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700963 Dsll(rd, rd, shift_cnt);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700964 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700965 Dsll32(rd, rd, shift_cnt & 31);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700966 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700967 } else if (IsInt<16>(tmp)) {
968 Daddiu(rd, ZERO, tmp);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700969 if (shift_cnt < 32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700970 Dsll(rd, rd, shift_cnt);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700971 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700972 Dsll32(rd, rd, shift_cnt & 31);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700973 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700974 } else if (IsInt<32>(tmp)) {
975 // Loads with 3 instructions.
976 Lui(rd, tmp >> 16);
977 Ori(rd, rd, tmp);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700978 if (shift_cnt < 32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700979 Dsll(rd, rd, shift_cnt);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700980 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700981 Dsll32(rd, rd, shift_cnt & 31);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700982 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700983 } else {
984 shift_cnt = 16 + CTZ(value >> 16);
985 tmp = value >> shift_cnt;
986 if (IsUint<16>(tmp)) {
987 Ori(rd, ZERO, tmp);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700988 if (shift_cnt < 32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700989 Dsll(rd, rd, shift_cnt);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700990 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700991 Dsll32(rd, rd, shift_cnt & 31);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700992 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700993 Ori(rd, rd, value);
994 } else if (IsInt<16>(tmp)) {
995 Daddiu(rd, ZERO, tmp);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700996 if (shift_cnt < 32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700997 Dsll(rd, rd, shift_cnt);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700998 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700999 Dsll32(rd, rd, shift_cnt & 31);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001000 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001001 Ori(rd, rd, value);
1002 } else {
1003 // Loads with 3-4 instructions.
1004 uint64_t tmp2 = value;
1005 bool used_lui = false;
1006 if (((tmp2 >> 16) & 0xFFFF) != 0 || (tmp2 & 0xFFFFFFFF) == 0) {
1007 Lui(rd, tmp2 >> 16);
1008 used_lui = true;
1009 }
1010 if ((tmp2 & 0xFFFF) != 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001011 if (used_lui) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001012 Ori(rd, rd, tmp2);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001013 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001014 Ori(rd, ZERO, tmp2);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001015 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001016 }
1017 if (bit31) {
1018 tmp2 += UINT64_C(0x100000000);
1019 }
1020 if (((tmp2 >> 32) & 0xFFFF) != 0) {
1021 Dahi(rd, tmp2 >> 32);
1022 }
1023 if (tmp2 & UINT64_C(0x800000000000)) {
1024 tmp2 += UINT64_C(0x1000000000000);
1025 }
1026 if ((tmp2 >> 48) != 0) {
1027 Dati(rd, tmp2 >> 48);
1028 }
1029 }
1030 }
1031 }
Andreas Gampe57b34292015-01-14 15:45:59 -08001032}
1033
Alexey Frunze4dda3372015-06-01 18:31:49 -07001034void Mips64Assembler::Daddiu64(GpuRegister rt, GpuRegister rs, int64_t value, GpuRegister rtmp) {
1035 if (IsInt<16>(value)) {
1036 Daddiu(rt, rs, value);
1037 } else {
1038 LoadConst64(rtmp, value);
1039 Daddu(rt, rs, rtmp);
1040 }
Andreas Gampe57b34292015-01-14 15:45:59 -08001041}
1042
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001043void Mips64Assembler::Branch::InitShortOrLong(Mips64Assembler::Branch::OffsetBits offset_size,
1044 Mips64Assembler::Branch::Type short_type,
1045 Mips64Assembler::Branch::Type long_type) {
1046 type_ = (offset_size <= branch_info_[short_type].offset_size) ? short_type : long_type;
1047}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001048
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001049void Mips64Assembler::Branch::InitializeType(bool is_call) {
1050 OffsetBits offset_size = GetOffsetSizeNeeded(location_, target_);
1051 if (is_call) {
1052 InitShortOrLong(offset_size, kCall, kLongCall);
1053 } else if (condition_ == kUncond) {
1054 InitShortOrLong(offset_size, kUncondBranch, kLongUncondBranch);
1055 } else {
1056 if (condition_ == kCondEQZ || condition_ == kCondNEZ) {
1057 // Special case for beqzc/bnezc with longer offset than in other b<cond>c instructions.
1058 type_ = (offset_size <= kOffset23) ? kCondBranch : kLongCondBranch;
1059 } else {
1060 InitShortOrLong(offset_size, kCondBranch, kLongCondBranch);
1061 }
1062 }
1063 old_type_ = type_;
1064}
1065
1066bool Mips64Assembler::Branch::IsNop(BranchCondition condition, GpuRegister lhs, GpuRegister rhs) {
1067 switch (condition) {
1068 case kCondLT:
1069 case kCondGT:
1070 case kCondNE:
1071 case kCondLTU:
1072 return lhs == rhs;
1073 default:
1074 return false;
1075 }
1076}
1077
1078bool Mips64Assembler::Branch::IsUncond(BranchCondition condition,
1079 GpuRegister lhs,
1080 GpuRegister rhs) {
1081 switch (condition) {
1082 case kUncond:
1083 return true;
1084 case kCondGE:
1085 case kCondLE:
1086 case kCondEQ:
1087 case kCondGEU:
1088 return lhs == rhs;
1089 default:
1090 return false;
1091 }
1092}
1093
1094Mips64Assembler::Branch::Branch(uint32_t location, uint32_t target)
1095 : old_location_(location),
1096 location_(location),
1097 target_(target),
1098 lhs_reg_(ZERO),
1099 rhs_reg_(ZERO),
1100 condition_(kUncond) {
1101 InitializeType(false);
1102}
1103
1104Mips64Assembler::Branch::Branch(uint32_t location,
1105 uint32_t target,
1106 Mips64Assembler::BranchCondition condition,
1107 GpuRegister lhs_reg,
1108 GpuRegister rhs_reg)
1109 : old_location_(location),
1110 location_(location),
1111 target_(target),
1112 lhs_reg_(lhs_reg),
1113 rhs_reg_(rhs_reg),
1114 condition_(condition) {
1115 CHECK_NE(condition, kUncond);
1116 switch (condition) {
1117 case kCondEQ:
1118 case kCondNE:
1119 case kCondLT:
1120 case kCondGE:
1121 case kCondLE:
1122 case kCondGT:
1123 case kCondLTU:
1124 case kCondGEU:
1125 CHECK_NE(lhs_reg, ZERO);
1126 CHECK_NE(rhs_reg, ZERO);
1127 break;
1128 case kCondLTZ:
1129 case kCondGEZ:
1130 case kCondLEZ:
1131 case kCondGTZ:
1132 case kCondEQZ:
1133 case kCondNEZ:
1134 CHECK_NE(lhs_reg, ZERO);
1135 CHECK_EQ(rhs_reg, ZERO);
1136 break;
1137 case kUncond:
1138 UNREACHABLE();
1139 }
1140 CHECK(!IsNop(condition, lhs_reg, rhs_reg));
1141 if (IsUncond(condition, lhs_reg, rhs_reg)) {
1142 // Branch condition is always true, make the branch unconditional.
1143 condition_ = kUncond;
1144 }
1145 InitializeType(false);
1146}
1147
1148Mips64Assembler::Branch::Branch(uint32_t location, uint32_t target, GpuRegister indirect_reg)
1149 : old_location_(location),
1150 location_(location),
1151 target_(target),
1152 lhs_reg_(indirect_reg),
1153 rhs_reg_(ZERO),
1154 condition_(kUncond) {
1155 CHECK_NE(indirect_reg, ZERO);
1156 CHECK_NE(indirect_reg, AT);
1157 InitializeType(true);
1158}
1159
1160Mips64Assembler::BranchCondition Mips64Assembler::Branch::OppositeCondition(
1161 Mips64Assembler::BranchCondition cond) {
1162 switch (cond) {
1163 case kCondLT:
1164 return kCondGE;
1165 case kCondGE:
1166 return kCondLT;
1167 case kCondLE:
1168 return kCondGT;
1169 case kCondGT:
1170 return kCondLE;
1171 case kCondLTZ:
1172 return kCondGEZ;
1173 case kCondGEZ:
1174 return kCondLTZ;
1175 case kCondLEZ:
1176 return kCondGTZ;
1177 case kCondGTZ:
1178 return kCondLEZ;
1179 case kCondEQ:
1180 return kCondNE;
1181 case kCondNE:
1182 return kCondEQ;
1183 case kCondEQZ:
1184 return kCondNEZ;
1185 case kCondNEZ:
1186 return kCondEQZ;
1187 case kCondLTU:
1188 return kCondGEU;
1189 case kCondGEU:
1190 return kCondLTU;
1191 case kUncond:
1192 LOG(FATAL) << "Unexpected branch condition " << cond;
1193 }
1194 UNREACHABLE();
1195}
1196
1197Mips64Assembler::Branch::Type Mips64Assembler::Branch::GetType() const {
1198 return type_;
1199}
1200
1201Mips64Assembler::BranchCondition Mips64Assembler::Branch::GetCondition() const {
1202 return condition_;
1203}
1204
1205GpuRegister Mips64Assembler::Branch::GetLeftRegister() const {
1206 return lhs_reg_;
1207}
1208
1209GpuRegister Mips64Assembler::Branch::GetRightRegister() const {
1210 return rhs_reg_;
1211}
1212
1213uint32_t Mips64Assembler::Branch::GetTarget() const {
1214 return target_;
1215}
1216
1217uint32_t Mips64Assembler::Branch::GetLocation() const {
1218 return location_;
1219}
1220
1221uint32_t Mips64Assembler::Branch::GetOldLocation() const {
1222 return old_location_;
1223}
1224
1225uint32_t Mips64Assembler::Branch::GetLength() const {
1226 return branch_info_[type_].length;
1227}
1228
1229uint32_t Mips64Assembler::Branch::GetOldLength() const {
1230 return branch_info_[old_type_].length;
1231}
1232
1233uint32_t Mips64Assembler::Branch::GetSize() const {
1234 return GetLength() * sizeof(uint32_t);
1235}
1236
1237uint32_t Mips64Assembler::Branch::GetOldSize() const {
1238 return GetOldLength() * sizeof(uint32_t);
1239}
1240
1241uint32_t Mips64Assembler::Branch::GetEndLocation() const {
1242 return GetLocation() + GetSize();
1243}
1244
1245uint32_t Mips64Assembler::Branch::GetOldEndLocation() const {
1246 return GetOldLocation() + GetOldSize();
1247}
1248
1249bool Mips64Assembler::Branch::IsLong() const {
1250 switch (type_) {
1251 // Short branches.
1252 case kUncondBranch:
1253 case kCondBranch:
1254 case kCall:
1255 return false;
1256 // Long branches.
1257 case kLongUncondBranch:
1258 case kLongCondBranch:
1259 case kLongCall:
1260 return true;
1261 }
1262 UNREACHABLE();
1263}
1264
1265bool Mips64Assembler::Branch::IsResolved() const {
1266 return target_ != kUnresolved;
1267}
1268
1269Mips64Assembler::Branch::OffsetBits Mips64Assembler::Branch::GetOffsetSize() const {
1270 OffsetBits offset_size =
1271 (type_ == kCondBranch && (condition_ == kCondEQZ || condition_ == kCondNEZ))
1272 ? kOffset23
1273 : branch_info_[type_].offset_size;
1274 return offset_size;
1275}
1276
1277Mips64Assembler::Branch::OffsetBits Mips64Assembler::Branch::GetOffsetSizeNeeded(uint32_t location,
1278 uint32_t target) {
1279 // For unresolved targets assume the shortest encoding
1280 // (later it will be made longer if needed).
1281 if (target == kUnresolved)
1282 return kOffset16;
1283 int64_t distance = static_cast<int64_t>(target) - location;
1284 // To simplify calculations in composite branches consisting of multiple instructions
1285 // bump up the distance by a value larger than the max byte size of a composite branch.
1286 distance += (distance >= 0) ? kMaxBranchSize : -kMaxBranchSize;
1287 if (IsInt<kOffset16>(distance))
1288 return kOffset16;
1289 else if (IsInt<kOffset18>(distance))
1290 return kOffset18;
1291 else if (IsInt<kOffset21>(distance))
1292 return kOffset21;
1293 else if (IsInt<kOffset23>(distance))
1294 return kOffset23;
1295 else if (IsInt<kOffset28>(distance))
1296 return kOffset28;
1297 return kOffset32;
1298}
1299
1300void Mips64Assembler::Branch::Resolve(uint32_t target) {
1301 target_ = target;
1302}
1303
1304void Mips64Assembler::Branch::Relocate(uint32_t expand_location, uint32_t delta) {
1305 if (location_ > expand_location) {
1306 location_ += delta;
1307 }
1308 if (!IsResolved()) {
1309 return; // Don't know the target yet.
1310 }
1311 if (target_ > expand_location) {
1312 target_ += delta;
1313 }
1314}
1315
1316void Mips64Assembler::Branch::PromoteToLong() {
1317 switch (type_) {
1318 // Short branches.
1319 case kUncondBranch:
1320 type_ = kLongUncondBranch;
1321 break;
1322 case kCondBranch:
1323 type_ = kLongCondBranch;
1324 break;
1325 case kCall:
1326 type_ = kLongCall;
1327 break;
1328 default:
1329 // Note: 'type_' is already long.
1330 break;
1331 }
1332 CHECK(IsLong());
1333}
1334
1335uint32_t Mips64Assembler::Branch::PromoteIfNeeded(uint32_t max_short_distance) {
1336 // If the branch is still unresolved or already long, nothing to do.
1337 if (IsLong() || !IsResolved()) {
1338 return 0;
1339 }
1340 // Promote the short branch to long if the offset size is too small
1341 // to hold the distance between location_ and target_.
1342 if (GetOffsetSizeNeeded(location_, target_) > GetOffsetSize()) {
1343 PromoteToLong();
1344 uint32_t old_size = GetOldSize();
1345 uint32_t new_size = GetSize();
1346 CHECK_GT(new_size, old_size);
1347 return new_size - old_size;
1348 }
1349 // The following logic is for debugging/testing purposes.
1350 // Promote some short branches to long when it's not really required.
1351 if (UNLIKELY(max_short_distance != std::numeric_limits<uint32_t>::max())) {
1352 int64_t distance = static_cast<int64_t>(target_) - location_;
1353 distance = (distance >= 0) ? distance : -distance;
1354 if (distance >= max_short_distance) {
1355 PromoteToLong();
1356 uint32_t old_size = GetOldSize();
1357 uint32_t new_size = GetSize();
1358 CHECK_GT(new_size, old_size);
1359 return new_size - old_size;
1360 }
1361 }
1362 return 0;
1363}
1364
1365uint32_t Mips64Assembler::Branch::GetOffsetLocation() const {
1366 return location_ + branch_info_[type_].instr_offset * sizeof(uint32_t);
1367}
1368
1369uint32_t Mips64Assembler::Branch::GetOffset() const {
1370 CHECK(IsResolved());
1371 uint32_t ofs_mask = 0xFFFFFFFF >> (32 - GetOffsetSize());
1372 // Calculate the byte distance between instructions and also account for
1373 // different PC-relative origins.
1374 uint32_t offset = target_ - GetOffsetLocation() - branch_info_[type_].pc_org * sizeof(uint32_t);
1375 // Prepare the offset for encoding into the instruction(s).
1376 offset = (offset & ofs_mask) >> branch_info_[type_].offset_shift;
1377 return offset;
1378}
1379
1380Mips64Assembler::Branch* Mips64Assembler::GetBranch(uint32_t branch_id) {
1381 CHECK_LT(branch_id, branches_.size());
1382 return &branches_[branch_id];
1383}
1384
1385const Mips64Assembler::Branch* Mips64Assembler::GetBranch(uint32_t branch_id) const {
1386 CHECK_LT(branch_id, branches_.size());
1387 return &branches_[branch_id];
1388}
1389
1390void Mips64Assembler::Bind(Mips64Label* label) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001391 CHECK(!label->IsBound());
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001392 uint32_t bound_pc = buffer_.Size();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001393
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001394 // Walk the list of branches referring to and preceding this label.
1395 // Store the previously unknown target addresses in them.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001396 while (label->IsLinked()) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001397 uint32_t branch_id = label->Position();
1398 Branch* branch = GetBranch(branch_id);
1399 branch->Resolve(bound_pc);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001400
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001401 uint32_t branch_location = branch->GetLocation();
1402 // Extract the location of the previous branch in the list (walking the list backwards;
1403 // the previous branch ID was stored in the space reserved for this branch).
1404 uint32_t prev = buffer_.Load<uint32_t>(branch_location);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001405
1406 // On to the previous branch in the list...
1407 label->position_ = prev;
1408 }
1409
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001410 // Now make the label object contain its own location (relative to the end of the preceding
1411 // branch, if any; it will be used by the branches referring to and following this label).
1412 label->prev_branch_id_plus_one_ = branches_.size();
1413 if (label->prev_branch_id_plus_one_) {
1414 uint32_t branch_id = label->prev_branch_id_plus_one_ - 1;
1415 const Branch* branch = GetBranch(branch_id);
1416 bound_pc -= branch->GetEndLocation();
1417 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001418 label->BindTo(bound_pc);
1419}
1420
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001421uint32_t Mips64Assembler::GetLabelLocation(Mips64Label* label) const {
1422 CHECK(label->IsBound());
1423 uint32_t target = label->Position();
1424 if (label->prev_branch_id_plus_one_) {
1425 // Get label location based on the branch preceding it.
1426 uint32_t branch_id = label->prev_branch_id_plus_one_ - 1;
1427 const Branch* branch = GetBranch(branch_id);
1428 target += branch->GetEndLocation();
1429 }
1430 return target;
1431}
1432
1433uint32_t Mips64Assembler::GetAdjustedPosition(uint32_t old_position) {
1434 // We can reconstruct the adjustment by going through all the branches from the beginning
1435 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
1436 // with increasing old_position, we can use the data from last AdjustedPosition() to
1437 // continue where we left off and the whole loop should be O(m+n) where m is the number
1438 // of positions to adjust and n is the number of branches.
1439 if (old_position < last_old_position_) {
1440 last_position_adjustment_ = 0;
1441 last_old_position_ = 0;
1442 last_branch_id_ = 0;
1443 }
1444 while (last_branch_id_ != branches_.size()) {
1445 const Branch* branch = GetBranch(last_branch_id_);
1446 if (branch->GetLocation() >= old_position + last_position_adjustment_) {
1447 break;
1448 }
1449 last_position_adjustment_ += branch->GetSize() - branch->GetOldSize();
1450 ++last_branch_id_;
1451 }
1452 last_old_position_ = old_position;
1453 return old_position + last_position_adjustment_;
1454}
1455
1456void Mips64Assembler::FinalizeLabeledBranch(Mips64Label* label) {
1457 uint32_t length = branches_.back().GetLength();
1458 if (!label->IsBound()) {
1459 // Branch forward (to a following label), distance is unknown.
1460 // The first branch forward will contain 0, serving as the terminator of
1461 // the list of forward-reaching branches.
1462 Emit(label->position_);
1463 length--;
1464 // Now make the label object point to this branch
1465 // (this forms a linked list of branches preceding this label).
1466 uint32_t branch_id = branches_.size() - 1;
1467 label->LinkTo(branch_id);
1468 }
1469 // Reserve space for the branch.
1470 while (length--) {
1471 Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001472 }
1473}
1474
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001475void Mips64Assembler::Buncond(Mips64Label* label) {
1476 uint32_t target = label->IsBound() ? GetLabelLocation(label) : Branch::kUnresolved;
1477 branches_.emplace_back(buffer_.Size(), target);
1478 FinalizeLabeledBranch(label);
1479}
1480
1481void Mips64Assembler::Bcond(Mips64Label* label,
1482 BranchCondition condition,
1483 GpuRegister lhs,
1484 GpuRegister rhs) {
1485 // If lhs = rhs, this can be a NOP.
1486 if (Branch::IsNop(condition, lhs, rhs)) {
1487 return;
1488 }
1489 uint32_t target = label->IsBound() ? GetLabelLocation(label) : Branch::kUnresolved;
1490 branches_.emplace_back(buffer_.Size(), target, condition, lhs, rhs);
1491 FinalizeLabeledBranch(label);
1492}
1493
1494void Mips64Assembler::Call(Mips64Label* label, GpuRegister indirect_reg) {
1495 uint32_t target = label->IsBound() ? GetLabelLocation(label) : Branch::kUnresolved;
1496 branches_.emplace_back(buffer_.Size(), target, indirect_reg);
1497 FinalizeLabeledBranch(label);
1498}
1499
1500void Mips64Assembler::PromoteBranches() {
1501 // Promote short branches to long as necessary.
1502 bool changed;
1503 do {
1504 changed = false;
1505 for (auto& branch : branches_) {
1506 CHECK(branch.IsResolved());
1507 uint32_t delta = branch.PromoteIfNeeded();
1508 // If this branch has been promoted and needs to expand in size,
1509 // relocate all branches by the expansion size.
1510 if (delta) {
1511 changed = true;
1512 uint32_t expand_location = branch.GetLocation();
1513 for (auto& branch2 : branches_) {
1514 branch2.Relocate(expand_location, delta);
1515 }
1516 }
1517 }
1518 } while (changed);
1519
1520 // Account for branch expansion by resizing the code buffer
1521 // and moving the code in it to its final location.
1522 size_t branch_count = branches_.size();
1523 if (branch_count > 0) {
1524 // Resize.
1525 Branch& last_branch = branches_[branch_count - 1];
1526 uint32_t size_delta = last_branch.GetEndLocation() - last_branch.GetOldEndLocation();
1527 uint32_t old_size = buffer_.Size();
1528 buffer_.Resize(old_size + size_delta);
1529 // Move the code residing between branch placeholders.
1530 uint32_t end = old_size;
1531 for (size_t i = branch_count; i > 0; ) {
1532 Branch& branch = branches_[--i];
1533 uint32_t size = end - branch.GetOldEndLocation();
1534 buffer_.Move(branch.GetEndLocation(), branch.GetOldEndLocation(), size);
1535 end = branch.GetOldLocation();
1536 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001537 }
1538}
1539
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001540// Note: make sure branch_info_[] and EmitBranch() are kept synchronized.
1541const Mips64Assembler::Branch::BranchInfo Mips64Assembler::Branch::branch_info_[] = {
1542 // Short branches.
1543 { 1, 0, 1, Mips64Assembler::Branch::kOffset28, 2 }, // kUncondBranch
1544 { 2, 0, 1, Mips64Assembler::Branch::kOffset18, 2 }, // kCondBranch
1545 // Exception: kOffset23 for beqzc/bnezc
1546 { 2, 0, 0, Mips64Assembler::Branch::kOffset21, 2 }, // kCall
1547 // Long branches.
1548 { 2, 0, 0, Mips64Assembler::Branch::kOffset32, 0 }, // kLongUncondBranch
1549 { 3, 1, 0, Mips64Assembler::Branch::kOffset32, 0 }, // kLongCondBranch
1550 { 3, 0, 0, Mips64Assembler::Branch::kOffset32, 0 }, // kLongCall
1551};
1552
1553// Note: make sure branch_info_[] and EmitBranch() are kept synchronized.
1554void Mips64Assembler::EmitBranch(Mips64Assembler::Branch* branch) {
1555 CHECK(overwriting_);
1556 overwrite_location_ = branch->GetLocation();
1557 uint32_t offset = branch->GetOffset();
1558 BranchCondition condition = branch->GetCondition();
1559 GpuRegister lhs = branch->GetLeftRegister();
1560 GpuRegister rhs = branch->GetRightRegister();
1561 switch (branch->GetType()) {
1562 // Short branches.
1563 case Branch::kUncondBranch:
1564 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
1565 Bc(offset);
1566 break;
1567 case Branch::kCondBranch:
1568 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
1569 EmitBcondc(condition, lhs, rhs, offset);
1570 Nop(); // TODO: improve by filling the forbidden slot.
1571 break;
1572 case Branch::kCall:
1573 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
1574 Addiupc(lhs, offset);
1575 Jialc(lhs, 0);
1576 break;
1577
1578 // Long branches.
1579 case Branch::kLongUncondBranch:
1580 offset += (offset & 0x8000) << 1; // Account for sign extension in jic.
1581 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
1582 Auipc(AT, High16Bits(offset));
1583 Jic(AT, Low16Bits(offset));
1584 break;
1585 case Branch::kLongCondBranch:
1586 EmitBcondc(Branch::OppositeCondition(condition), lhs, rhs, 2);
1587 offset += (offset & 0x8000) << 1; // Account for sign extension in jic.
1588 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
1589 Auipc(AT, High16Bits(offset));
1590 Jic(AT, Low16Bits(offset));
1591 break;
1592 case Branch::kLongCall:
1593 offset += (offset & 0x8000) << 1; // Account for sign extension in daddiu.
1594 CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
1595 Auipc(lhs, High16Bits(offset));
1596 Daddiu(lhs, lhs, Low16Bits(offset));
1597 Jialc(lhs, 0);
1598 break;
1599 }
1600 CHECK_EQ(overwrite_location_, branch->GetEndLocation());
1601 CHECK_LT(branch->GetSize(), static_cast<uint32_t>(Branch::kMaxBranchSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001602}
1603
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001604void Mips64Assembler::Bc(Mips64Label* label) {
1605 Buncond(label);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001606}
1607
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001608void Mips64Assembler::Jialc(Mips64Label* label, GpuRegister indirect_reg) {
1609 Call(label, indirect_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001610}
1611
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001612void Mips64Assembler::Bltc(GpuRegister rs, GpuRegister rt, Mips64Label* label) {
1613 Bcond(label, kCondLT, rs, rt);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001614}
1615
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001616void Mips64Assembler::Bltzc(GpuRegister rt, Mips64Label* label) {
1617 Bcond(label, kCondLTZ, rt);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001618}
1619
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001620void Mips64Assembler::Bgtzc(GpuRegister rt, Mips64Label* label) {
1621 Bcond(label, kCondGTZ, rt);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001622}
1623
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001624void Mips64Assembler::Bgec(GpuRegister rs, GpuRegister rt, Mips64Label* label) {
1625 Bcond(label, kCondGE, rs, rt);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001626}
1627
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001628void Mips64Assembler::Bgezc(GpuRegister rt, Mips64Label* label) {
1629 Bcond(label, kCondGEZ, rt);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001630}
1631
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001632void Mips64Assembler::Blezc(GpuRegister rt, Mips64Label* label) {
1633 Bcond(label, kCondLEZ, rt);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001634}
1635
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001636void Mips64Assembler::Bltuc(GpuRegister rs, GpuRegister rt, Mips64Label* label) {
1637 Bcond(label, kCondLTU, rs, rt);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001638}
1639
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001640void Mips64Assembler::Bgeuc(GpuRegister rs, GpuRegister rt, Mips64Label* label) {
1641 Bcond(label, kCondGEU, rs, rt);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001642}
1643
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001644void Mips64Assembler::Beqc(GpuRegister rs, GpuRegister rt, Mips64Label* label) {
1645 Bcond(label, kCondEQ, rs, rt);
1646}
1647
1648void Mips64Assembler::Bnec(GpuRegister rs, GpuRegister rt, Mips64Label* label) {
1649 Bcond(label, kCondNE, rs, rt);
1650}
1651
1652void Mips64Assembler::Beqzc(GpuRegister rs, Mips64Label* label) {
1653 Bcond(label, kCondEQZ, rs);
1654}
1655
1656void Mips64Assembler::Bnezc(GpuRegister rs, Mips64Label* label) {
1657 Bcond(label, kCondNEZ, rs);
Andreas Gampe57b34292015-01-14 15:45:59 -08001658}
1659
1660void Mips64Assembler::LoadFromOffset(LoadOperandType type, GpuRegister reg, GpuRegister base,
1661 int32_t offset) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001662 if (!IsInt<16>(offset)) {
1663 LoadConst32(AT, offset);
1664 Daddu(AT, AT, base);
1665 base = AT;
1666 offset = 0;
1667 }
1668
Andreas Gampe57b34292015-01-14 15:45:59 -08001669 switch (type) {
1670 case kLoadSignedByte:
1671 Lb(reg, base, offset);
1672 break;
1673 case kLoadUnsignedByte:
1674 Lbu(reg, base, offset);
1675 break;
1676 case kLoadSignedHalfword:
1677 Lh(reg, base, offset);
1678 break;
1679 case kLoadUnsignedHalfword:
1680 Lhu(reg, base, offset);
1681 break;
1682 case kLoadWord:
1683 Lw(reg, base, offset);
1684 break;
Douglas Leungd90957f2015-04-30 19:22:49 -07001685 case kLoadUnsignedWord:
1686 Lwu(reg, base, offset);
1687 break;
Andreas Gampe57b34292015-01-14 15:45:59 -08001688 case kLoadDoubleword:
Andreas Gampe57b34292015-01-14 15:45:59 -08001689 Ld(reg, base, offset);
1690 break;
Andreas Gampe57b34292015-01-14 15:45:59 -08001691 }
1692}
1693
1694void Mips64Assembler::LoadFpuFromOffset(LoadOperandType type, FpuRegister reg, GpuRegister base,
1695 int32_t offset) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001696 if (!IsInt<16>(offset)) {
1697 LoadConst32(AT, offset);
1698 Daddu(AT, AT, base);
1699 base = AT;
1700 offset = 0;
1701 }
1702
Andreas Gampe57b34292015-01-14 15:45:59 -08001703 switch (type) {
1704 case kLoadWord:
1705 Lwc1(reg, base, offset);
1706 break;
1707 case kLoadDoubleword:
Andreas Gampe57b34292015-01-14 15:45:59 -08001708 Ldc1(reg, base, offset);
1709 break;
1710 default:
1711 LOG(FATAL) << "UNREACHABLE";
1712 }
1713}
1714
1715void Mips64Assembler::EmitLoad(ManagedRegister m_dst, GpuRegister src_register, int32_t src_offset,
1716 size_t size) {
1717 Mips64ManagedRegister dst = m_dst.AsMips64();
1718 if (dst.IsNoRegister()) {
1719 CHECK_EQ(0u, size) << dst;
1720 } else if (dst.IsGpuRegister()) {
1721 if (size == 4) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001722 LoadFromOffset(kLoadWord, dst.AsGpuRegister(), src_register, src_offset);
1723 } else if (size == 8) {
1724 CHECK_EQ(8u, size) << dst;
1725 LoadFromOffset(kLoadDoubleword, dst.AsGpuRegister(), src_register, src_offset);
1726 } else {
1727 UNIMPLEMENTED(FATAL) << "We only support Load() of size 4 and 8";
1728 }
1729 } else if (dst.IsFpuRegister()) {
1730 if (size == 4) {
1731 CHECK_EQ(4u, size) << dst;
1732 LoadFpuFromOffset(kLoadWord, dst.AsFpuRegister(), src_register, src_offset);
1733 } else if (size == 8) {
1734 CHECK_EQ(8u, size) << dst;
1735 LoadFpuFromOffset(kLoadDoubleword, dst.AsFpuRegister(), src_register, src_offset);
1736 } else {
1737 UNIMPLEMENTED(FATAL) << "We only support Load() of size 4 and 8";
1738 }
1739 }
1740}
1741
1742void Mips64Assembler::StoreToOffset(StoreOperandType type, GpuRegister reg, GpuRegister base,
1743 int32_t offset) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001744 if (!IsInt<16>(offset)) {
1745 LoadConst32(AT, offset);
1746 Daddu(AT, AT, base);
1747 base = AT;
1748 offset = 0;
1749 }
1750
Andreas Gampe57b34292015-01-14 15:45:59 -08001751 switch (type) {
1752 case kStoreByte:
1753 Sb(reg, base, offset);
1754 break;
1755 case kStoreHalfword:
1756 Sh(reg, base, offset);
1757 break;
1758 case kStoreWord:
1759 Sw(reg, base, offset);
1760 break;
1761 case kStoreDoubleword:
Andreas Gampe57b34292015-01-14 15:45:59 -08001762 Sd(reg, base, offset);
1763 break;
1764 default:
1765 LOG(FATAL) << "UNREACHABLE";
1766 }
1767}
1768
1769void Mips64Assembler::StoreFpuToOffset(StoreOperandType type, FpuRegister reg, GpuRegister base,
1770 int32_t offset) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001771 if (!IsInt<16>(offset)) {
1772 LoadConst32(AT, offset);
1773 Daddu(AT, AT, base);
1774 base = AT;
1775 offset = 0;
1776 }
1777
Andreas Gampe57b34292015-01-14 15:45:59 -08001778 switch (type) {
1779 case kStoreWord:
1780 Swc1(reg, base, offset);
1781 break;
1782 case kStoreDoubleword:
1783 Sdc1(reg, base, offset);
1784 break;
1785 default:
1786 LOG(FATAL) << "UNREACHABLE";
1787 }
1788}
1789
David Srbeckydd973932015-04-07 20:29:48 +01001790static dwarf::Reg DWARFReg(GpuRegister reg) {
1791 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1792}
1793
Andreas Gampe57b34292015-01-14 15:45:59 -08001794constexpr size_t kFramePointerSize = 8;
1795
1796void Mips64Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
1797 const std::vector<ManagedRegister>& callee_save_regs,
1798 const ManagedRegisterEntrySpills& entry_spills) {
1799 CHECK_ALIGNED(frame_size, kStackAlignment);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001800 DCHECK(!overwriting_);
Andreas Gampe57b34292015-01-14 15:45:59 -08001801
1802 // Increase frame to required size.
1803 IncreaseFrameSize(frame_size);
1804
1805 // Push callee saves and return address
1806 int stack_offset = frame_size - kFramePointerSize;
1807 StoreToOffset(kStoreDoubleword, RA, SP, stack_offset);
David Srbeckydd973932015-04-07 20:29:48 +01001808 cfi_.RelOffset(DWARFReg(RA), stack_offset);
Andreas Gampe57b34292015-01-14 15:45:59 -08001809 for (int i = callee_save_regs.size() - 1; i >= 0; --i) {
1810 stack_offset -= kFramePointerSize;
1811 GpuRegister reg = callee_save_regs.at(i).AsMips64().AsGpuRegister();
1812 StoreToOffset(kStoreDoubleword, reg, SP, stack_offset);
David Srbeckydd973932015-04-07 20:29:48 +01001813 cfi_.RelOffset(DWARFReg(reg), stack_offset);
Andreas Gampe57b34292015-01-14 15:45:59 -08001814 }
1815
1816 // Write out Method*.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001817 StoreToOffset(kStoreDoubleword, method_reg.AsMips64().AsGpuRegister(), SP, 0);
Andreas Gampe57b34292015-01-14 15:45:59 -08001818
1819 // Write out entry spills.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001820 int32_t offset = frame_size + kFramePointerSize;
Andreas Gampe57b34292015-01-14 15:45:59 -08001821 for (size_t i = 0; i < entry_spills.size(); ++i) {
1822 Mips64ManagedRegister reg = entry_spills.at(i).AsMips64();
1823 ManagedRegisterSpill spill = entry_spills.at(i);
1824 int32_t size = spill.getSize();
1825 if (reg.IsNoRegister()) {
1826 // only increment stack offset.
1827 offset += size;
1828 } else if (reg.IsFpuRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001829 StoreFpuToOffset((size == 4) ? kStoreWord : kStoreDoubleword,
1830 reg.AsFpuRegister(), SP, offset);
Andreas Gampe57b34292015-01-14 15:45:59 -08001831 offset += size;
1832 } else if (reg.IsGpuRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001833 StoreToOffset((size == 4) ? kStoreWord : kStoreDoubleword,
1834 reg.AsGpuRegister(), SP, offset);
Andreas Gampe57b34292015-01-14 15:45:59 -08001835 offset += size;
1836 }
1837 }
1838}
1839
1840void Mips64Assembler::RemoveFrame(size_t frame_size,
1841 const std::vector<ManagedRegister>& callee_save_regs) {
1842 CHECK_ALIGNED(frame_size, kStackAlignment);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001843 DCHECK(!overwriting_);
David Srbeckydd973932015-04-07 20:29:48 +01001844 cfi_.RememberState();
Andreas Gampe57b34292015-01-14 15:45:59 -08001845
1846 // Pop callee saves and return address
1847 int stack_offset = frame_size - (callee_save_regs.size() * kFramePointerSize) - kFramePointerSize;
1848 for (size_t i = 0; i < callee_save_regs.size(); ++i) {
1849 GpuRegister reg = callee_save_regs.at(i).AsMips64().AsGpuRegister();
1850 LoadFromOffset(kLoadDoubleword, reg, SP, stack_offset);
David Srbeckydd973932015-04-07 20:29:48 +01001851 cfi_.Restore(DWARFReg(reg));
Andreas Gampe57b34292015-01-14 15:45:59 -08001852 stack_offset += kFramePointerSize;
1853 }
1854 LoadFromOffset(kLoadDoubleword, RA, SP, stack_offset);
David Srbeckydd973932015-04-07 20:29:48 +01001855 cfi_.Restore(DWARFReg(RA));
Andreas Gampe57b34292015-01-14 15:45:59 -08001856
1857 // Decrease frame to required size.
1858 DecreaseFrameSize(frame_size);
1859
1860 // Then jump to the return address.
1861 Jr(RA);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001862 Nop();
David Srbeckydd973932015-04-07 20:29:48 +01001863
1864 // The CFI should be restored for any code that follows the exit block.
1865 cfi_.RestoreState();
1866 cfi_.DefCFAOffset(frame_size);
Andreas Gampe57b34292015-01-14 15:45:59 -08001867}
1868
1869void Mips64Assembler::IncreaseFrameSize(size_t adjust) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001870 CHECK_ALIGNED(adjust, kFramePointerSize);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001871 DCHECK(!overwriting_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001872 Daddiu64(SP, SP, static_cast<int32_t>(-adjust));
David Srbeckydd973932015-04-07 20:29:48 +01001873 cfi_.AdjustCFAOffset(adjust);
Andreas Gampe57b34292015-01-14 15:45:59 -08001874}
1875
1876void Mips64Assembler::DecreaseFrameSize(size_t adjust) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001877 CHECK_ALIGNED(adjust, kFramePointerSize);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001878 DCHECK(!overwriting_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001879 Daddiu64(SP, SP, static_cast<int32_t>(adjust));
David Srbeckydd973932015-04-07 20:29:48 +01001880 cfi_.AdjustCFAOffset(-adjust);
Andreas Gampe57b34292015-01-14 15:45:59 -08001881}
1882
1883void Mips64Assembler::Store(FrameOffset dest, ManagedRegister msrc, size_t size) {
1884 Mips64ManagedRegister src = msrc.AsMips64();
1885 if (src.IsNoRegister()) {
1886 CHECK_EQ(0u, size);
1887 } else if (src.IsGpuRegister()) {
1888 CHECK(size == 4 || size == 8) << size;
1889 if (size == 8) {
1890 StoreToOffset(kStoreDoubleword, src.AsGpuRegister(), SP, dest.Int32Value());
1891 } else if (size == 4) {
1892 StoreToOffset(kStoreWord, src.AsGpuRegister(), SP, dest.Int32Value());
1893 } else {
1894 UNIMPLEMENTED(FATAL) << "We only support Store() of size 4 and 8";
1895 }
1896 } else if (src.IsFpuRegister()) {
1897 CHECK(size == 4 || size == 8) << size;
1898 if (size == 8) {
1899 StoreFpuToOffset(kStoreDoubleword, src.AsFpuRegister(), SP, dest.Int32Value());
1900 } else if (size == 4) {
1901 StoreFpuToOffset(kStoreWord, src.AsFpuRegister(), SP, dest.Int32Value());
1902 } else {
1903 UNIMPLEMENTED(FATAL) << "We only support Store() of size 4 and 8";
1904 }
1905 }
1906}
1907
1908void Mips64Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
1909 Mips64ManagedRegister src = msrc.AsMips64();
1910 CHECK(src.IsGpuRegister());
1911 StoreToOffset(kStoreWord, src.AsGpuRegister(), SP, dest.Int32Value());
1912}
1913
1914void Mips64Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
1915 Mips64ManagedRegister src = msrc.AsMips64();
1916 CHECK(src.IsGpuRegister());
1917 StoreToOffset(kStoreDoubleword, src.AsGpuRegister(), SP, dest.Int32Value());
1918}
1919
1920void Mips64Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
1921 ManagedRegister mscratch) {
1922 Mips64ManagedRegister scratch = mscratch.AsMips64();
1923 CHECK(scratch.IsGpuRegister()) << scratch;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001924 LoadConst32(scratch.AsGpuRegister(), imm);
Andreas Gampe57b34292015-01-14 15:45:59 -08001925 StoreToOffset(kStoreWord, scratch.AsGpuRegister(), SP, dest.Int32Value());
1926}
1927
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001928void Mips64Assembler::StoreStackOffsetToThread64(ThreadOffset<kMipsDoublewordSize> thr_offs,
Andreas Gampe57b34292015-01-14 15:45:59 -08001929 FrameOffset fr_offs,
1930 ManagedRegister mscratch) {
1931 Mips64ManagedRegister scratch = mscratch.AsMips64();
1932 CHECK(scratch.IsGpuRegister()) << scratch;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001933 Daddiu64(scratch.AsGpuRegister(), SP, fr_offs.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08001934 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), S1, thr_offs.Int32Value());
1935}
1936
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001937void Mips64Assembler::StoreStackPointerToThread64(ThreadOffset<kMipsDoublewordSize> thr_offs) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001938 StoreToOffset(kStoreDoubleword, SP, S1, thr_offs.Int32Value());
1939}
1940
1941void Mips64Assembler::StoreSpanning(FrameOffset dest, ManagedRegister msrc,
1942 FrameOffset in_off, ManagedRegister mscratch) {
1943 Mips64ManagedRegister src = msrc.AsMips64();
1944 Mips64ManagedRegister scratch = mscratch.AsMips64();
1945 StoreToOffset(kStoreDoubleword, src.AsGpuRegister(), SP, dest.Int32Value());
1946 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(), SP, in_off.Int32Value());
1947 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), SP, dest.Int32Value() + 8);
1948}
1949
1950void Mips64Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
1951 return EmitLoad(mdest, SP, src.Int32Value(), size);
1952}
1953
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001954void Mips64Assembler::LoadFromThread64(ManagedRegister mdest,
1955 ThreadOffset<kMipsDoublewordSize> src,
1956 size_t size) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001957 return EmitLoad(mdest, S1, src.Int32Value(), size);
1958}
1959
1960void Mips64Assembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
1961 Mips64ManagedRegister dest = mdest.AsMips64();
1962 CHECK(dest.IsGpuRegister());
Douglas Leungd90957f2015-04-30 19:22:49 -07001963 LoadFromOffset(kLoadUnsignedWord, dest.AsGpuRegister(), SP, src.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08001964}
1965
Mathieu Chartiere401d142015-04-22 13:56:20 -07001966void Mips64Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
Roland Levillain4d027112015-07-01 15:41:14 +01001967 bool unpoison_reference) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001968 Mips64ManagedRegister dest = mdest.AsMips64();
Douglas Leungd90957f2015-04-30 19:22:49 -07001969 CHECK(dest.IsGpuRegister() && base.AsMips64().IsGpuRegister());
1970 LoadFromOffset(kLoadUnsignedWord, dest.AsGpuRegister(),
Andreas Gampe57b34292015-01-14 15:45:59 -08001971 base.AsMips64().AsGpuRegister(), offs.Int32Value());
Roland Levillain4d027112015-07-01 15:41:14 +01001972 if (kPoisonHeapReferences && unpoison_reference) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001973 // TODO: review
1974 // Negate the 32-bit ref
1975 Dsubu(dest.AsGpuRegister(), ZERO, dest.AsGpuRegister());
1976 // And constrain it to 32 bits (zero-extend into bits 32 through 63) as on Arm64 and x86/64
1977 Dext(dest.AsGpuRegister(), dest.AsGpuRegister(), 0, 31);
Andreas Gampe57b34292015-01-14 15:45:59 -08001978 }
1979}
1980
1981void Mips64Assembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001982 Offset offs) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001983 Mips64ManagedRegister dest = mdest.AsMips64();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001984 CHECK(dest.IsGpuRegister() && base.AsMips64().IsGpuRegister());
Andreas Gampe57b34292015-01-14 15:45:59 -08001985 LoadFromOffset(kLoadDoubleword, dest.AsGpuRegister(),
1986 base.AsMips64().AsGpuRegister(), offs.Int32Value());
1987}
1988
1989void Mips64Assembler::LoadRawPtrFromThread64(ManagedRegister mdest,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001990 ThreadOffset<kMipsDoublewordSize> offs) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001991 Mips64ManagedRegister dest = mdest.AsMips64();
1992 CHECK(dest.IsGpuRegister());
1993 LoadFromOffset(kLoadDoubleword, dest.AsGpuRegister(), S1, offs.Int32Value());
1994}
1995
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001996void Mips64Assembler::SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
1997 size_t size ATTRIBUTE_UNUSED) {
1998 UNIMPLEMENTED(FATAL) << "No sign extension necessary for MIPS64";
Andreas Gampe57b34292015-01-14 15:45:59 -08001999}
2000
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002001void Mips64Assembler::ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
2002 size_t size ATTRIBUTE_UNUSED) {
2003 UNIMPLEMENTED(FATAL) << "No zero extension necessary for MIPS64";
Andreas Gampe57b34292015-01-14 15:45:59 -08002004}
2005
2006void Mips64Assembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
2007 Mips64ManagedRegister dest = mdest.AsMips64();
2008 Mips64ManagedRegister src = msrc.AsMips64();
2009 if (!dest.Equals(src)) {
2010 if (dest.IsGpuRegister()) {
2011 CHECK(src.IsGpuRegister()) << src;
2012 Move(dest.AsGpuRegister(), src.AsGpuRegister());
2013 } else if (dest.IsFpuRegister()) {
2014 CHECK(src.IsFpuRegister()) << src;
2015 if (size == 4) {
2016 MovS(dest.AsFpuRegister(), src.AsFpuRegister());
2017 } else if (size == 8) {
2018 MovD(dest.AsFpuRegister(), src.AsFpuRegister());
2019 } else {
2020 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
2021 }
2022 }
2023 }
2024}
2025
2026void Mips64Assembler::CopyRef(FrameOffset dest, FrameOffset src,
2027 ManagedRegister mscratch) {
2028 Mips64ManagedRegister scratch = mscratch.AsMips64();
2029 CHECK(scratch.IsGpuRegister()) << scratch;
2030 LoadFromOffset(kLoadWord, scratch.AsGpuRegister(), SP, src.Int32Value());
2031 StoreToOffset(kStoreWord, scratch.AsGpuRegister(), SP, dest.Int32Value());
2032}
2033
2034void Mips64Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002035 ThreadOffset<kMipsDoublewordSize> thr_offs,
Andreas Gampe57b34292015-01-14 15:45:59 -08002036 ManagedRegister mscratch) {
2037 Mips64ManagedRegister scratch = mscratch.AsMips64();
2038 CHECK(scratch.IsGpuRegister()) << scratch;
2039 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(), S1, thr_offs.Int32Value());
2040 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), SP, fr_offs.Int32Value());
2041}
2042
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002043void Mips64Assembler::CopyRawPtrToThread64(ThreadOffset<kMipsDoublewordSize> thr_offs,
Andreas Gampe57b34292015-01-14 15:45:59 -08002044 FrameOffset fr_offs,
2045 ManagedRegister mscratch) {
2046 Mips64ManagedRegister scratch = mscratch.AsMips64();
2047 CHECK(scratch.IsGpuRegister()) << scratch;
2048 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(),
2049 SP, fr_offs.Int32Value());
2050 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(),
2051 S1, thr_offs.Int32Value());
2052}
2053
2054void Mips64Assembler::Copy(FrameOffset dest, FrameOffset src,
2055 ManagedRegister mscratch, size_t size) {
2056 Mips64ManagedRegister scratch = mscratch.AsMips64();
2057 CHECK(scratch.IsGpuRegister()) << scratch;
2058 CHECK(size == 4 || size == 8) << size;
2059 if (size == 4) {
2060 LoadFromOffset(kLoadWord, scratch.AsGpuRegister(), SP, src.Int32Value());
Lazar Trsicf652d602015-06-24 16:30:21 +02002061 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), SP, dest.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08002062 } else if (size == 8) {
2063 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(), SP, src.Int32Value());
2064 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), SP, dest.Int32Value());
2065 } else {
2066 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
2067 }
2068}
2069
2070void Mips64Assembler::Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset,
Alexey Frunze4dda3372015-06-01 18:31:49 -07002071 ManagedRegister mscratch, size_t size) {
Andreas Gampe57b34292015-01-14 15:45:59 -08002072 GpuRegister scratch = mscratch.AsMips64().AsGpuRegister();
2073 CHECK(size == 4 || size == 8) << size;
2074 if (size == 4) {
2075 LoadFromOffset(kLoadWord, scratch, src_base.AsMips64().AsGpuRegister(),
2076 src_offset.Int32Value());
Lazar Trsicf652d602015-06-24 16:30:21 +02002077 StoreToOffset(kStoreDoubleword, scratch, SP, dest.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08002078 } else if (size == 8) {
2079 LoadFromOffset(kLoadDoubleword, scratch, src_base.AsMips64().AsGpuRegister(),
2080 src_offset.Int32Value());
2081 StoreToOffset(kStoreDoubleword, scratch, SP, dest.Int32Value());
2082 } else {
2083 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
2084 }
2085}
2086
2087void Mips64Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
Alexey Frunze4dda3372015-06-01 18:31:49 -07002088 ManagedRegister mscratch, size_t size) {
Andreas Gampe57b34292015-01-14 15:45:59 -08002089 GpuRegister scratch = mscratch.AsMips64().AsGpuRegister();
2090 CHECK(size == 4 || size == 8) << size;
2091 if (size == 4) {
2092 LoadFromOffset(kLoadWord, scratch, SP, src.Int32Value());
Lazar Trsicf652d602015-06-24 16:30:21 +02002093 StoreToOffset(kStoreDoubleword, scratch, dest_base.AsMips64().AsGpuRegister(),
Andreas Gampe57b34292015-01-14 15:45:59 -08002094 dest_offset.Int32Value());
2095 } else if (size == 8) {
2096 LoadFromOffset(kLoadDoubleword, scratch, SP, src.Int32Value());
2097 StoreToOffset(kStoreDoubleword, scratch, dest_base.AsMips64().AsGpuRegister(),
2098 dest_offset.Int32Value());
2099 } else {
2100 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
2101 }
2102}
2103
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002104void Mips64Assembler::Copy(FrameOffset dest ATTRIBUTE_UNUSED,
2105 FrameOffset src_base ATTRIBUTE_UNUSED,
2106 Offset src_offset ATTRIBUTE_UNUSED,
2107 ManagedRegister mscratch ATTRIBUTE_UNUSED,
2108 size_t size ATTRIBUTE_UNUSED) {
2109 UNIMPLEMENTED(FATAL) << "No MIPS64 implementation";
Andreas Gampe57b34292015-01-14 15:45:59 -08002110}
2111
2112void Mips64Assembler::Copy(ManagedRegister dest, Offset dest_offset,
Alexey Frunze4dda3372015-06-01 18:31:49 -07002113 ManagedRegister src, Offset src_offset,
2114 ManagedRegister mscratch, size_t size) {
Andreas Gampe57b34292015-01-14 15:45:59 -08002115 GpuRegister scratch = mscratch.AsMips64().AsGpuRegister();
2116 CHECK(size == 4 || size == 8) << size;
2117 if (size == 4) {
2118 LoadFromOffset(kLoadWord, scratch, src.AsMips64().AsGpuRegister(), src_offset.Int32Value());
Lazar Trsicf652d602015-06-24 16:30:21 +02002119 StoreToOffset(kStoreDoubleword, scratch, dest.AsMips64().AsGpuRegister(), dest_offset.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08002120 } else if (size == 8) {
2121 LoadFromOffset(kLoadDoubleword, scratch, src.AsMips64().AsGpuRegister(),
2122 src_offset.Int32Value());
2123 StoreToOffset(kStoreDoubleword, scratch, dest.AsMips64().AsGpuRegister(),
2124 dest_offset.Int32Value());
2125 } else {
2126 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
2127 }
2128}
2129
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002130void Mips64Assembler::Copy(FrameOffset dest ATTRIBUTE_UNUSED,
2131 Offset dest_offset ATTRIBUTE_UNUSED,
2132 FrameOffset src ATTRIBUTE_UNUSED,
2133 Offset src_offset ATTRIBUTE_UNUSED,
2134 ManagedRegister mscratch ATTRIBUTE_UNUSED,
2135 size_t size ATTRIBUTE_UNUSED) {
2136 UNIMPLEMENTED(FATAL) << "No MIPS64 implementation";
Andreas Gampe57b34292015-01-14 15:45:59 -08002137}
2138
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002139void Mips64Assembler::MemoryBarrier(ManagedRegister mreg ATTRIBUTE_UNUSED) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002140 // TODO: sync?
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002141 UNIMPLEMENTED(FATAL) << "No MIPS64 implementation";
Andreas Gampe57b34292015-01-14 15:45:59 -08002142}
2143
2144void Mips64Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
Alexey Frunze4dda3372015-06-01 18:31:49 -07002145 FrameOffset handle_scope_offset,
2146 ManagedRegister min_reg,
2147 bool null_allowed) {
Andreas Gampe57b34292015-01-14 15:45:59 -08002148 Mips64ManagedRegister out_reg = mout_reg.AsMips64();
2149 Mips64ManagedRegister in_reg = min_reg.AsMips64();
2150 CHECK(in_reg.IsNoRegister() || in_reg.IsGpuRegister()) << in_reg;
2151 CHECK(out_reg.IsGpuRegister()) << out_reg;
2152 if (null_allowed) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002153 Mips64Label null_arg;
Andreas Gampe57b34292015-01-14 15:45:59 -08002154 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
2155 // the address in the handle scope holding the reference.
2156 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
2157 if (in_reg.IsNoRegister()) {
Douglas Leungd90957f2015-04-30 19:22:49 -07002158 LoadFromOffset(kLoadUnsignedWord, out_reg.AsGpuRegister(),
Andreas Gampe57b34292015-01-14 15:45:59 -08002159 SP, handle_scope_offset.Int32Value());
2160 in_reg = out_reg;
2161 }
2162 if (!out_reg.Equals(in_reg)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002163 LoadConst32(out_reg.AsGpuRegister(), 0);
Andreas Gampe57b34292015-01-14 15:45:59 -08002164 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002165 Beqzc(in_reg.AsGpuRegister(), &null_arg);
2166 Daddiu64(out_reg.AsGpuRegister(), SP, handle_scope_offset.Int32Value());
2167 Bind(&null_arg);
Andreas Gampe57b34292015-01-14 15:45:59 -08002168 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002169 Daddiu64(out_reg.AsGpuRegister(), SP, handle_scope_offset.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08002170 }
2171}
2172
2173void Mips64Assembler::CreateHandleScopeEntry(FrameOffset out_off,
Alexey Frunze4dda3372015-06-01 18:31:49 -07002174 FrameOffset handle_scope_offset,
2175 ManagedRegister mscratch,
2176 bool null_allowed) {
Andreas Gampe57b34292015-01-14 15:45:59 -08002177 Mips64ManagedRegister scratch = mscratch.AsMips64();
2178 CHECK(scratch.IsGpuRegister()) << scratch;
2179 if (null_allowed) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002180 Mips64Label null_arg;
Douglas Leungd90957f2015-04-30 19:22:49 -07002181 LoadFromOffset(kLoadUnsignedWord, scratch.AsGpuRegister(), SP,
Andreas Gampe57b34292015-01-14 15:45:59 -08002182 handle_scope_offset.Int32Value());
2183 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
2184 // the address in the handle scope holding the reference.
2185 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002186 Beqzc(scratch.AsGpuRegister(), &null_arg);
2187 Daddiu64(scratch.AsGpuRegister(), SP, handle_scope_offset.Int32Value());
2188 Bind(&null_arg);
Andreas Gampe57b34292015-01-14 15:45:59 -08002189 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002190 Daddiu64(scratch.AsGpuRegister(), SP, handle_scope_offset.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08002191 }
2192 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), SP, out_off.Int32Value());
2193}
2194
2195// Given a handle scope entry, load the associated reference.
2196void Mips64Assembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
Alexey Frunze4dda3372015-06-01 18:31:49 -07002197 ManagedRegister min_reg) {
Andreas Gampe57b34292015-01-14 15:45:59 -08002198 Mips64ManagedRegister out_reg = mout_reg.AsMips64();
2199 Mips64ManagedRegister in_reg = min_reg.AsMips64();
2200 CHECK(out_reg.IsGpuRegister()) << out_reg;
2201 CHECK(in_reg.IsGpuRegister()) << in_reg;
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002202 Mips64Label null_arg;
Andreas Gampe57b34292015-01-14 15:45:59 -08002203 if (!out_reg.Equals(in_reg)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002204 LoadConst32(out_reg.AsGpuRegister(), 0);
Andreas Gampe57b34292015-01-14 15:45:59 -08002205 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002206 Beqzc(in_reg.AsGpuRegister(), &null_arg);
Andreas Gampe57b34292015-01-14 15:45:59 -08002207 LoadFromOffset(kLoadDoubleword, out_reg.AsGpuRegister(),
2208 in_reg.AsGpuRegister(), 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002209 Bind(&null_arg);
Andreas Gampe57b34292015-01-14 15:45:59 -08002210}
2211
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002212void Mips64Assembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,
2213 bool could_be_null ATTRIBUTE_UNUSED) {
Andreas Gampe57b34292015-01-14 15:45:59 -08002214 // TODO: not validating references
2215}
2216
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002217void Mips64Assembler::VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,
2218 bool could_be_null ATTRIBUTE_UNUSED) {
Andreas Gampe57b34292015-01-14 15:45:59 -08002219 // TODO: not validating references
2220}
2221
2222void Mips64Assembler::Call(ManagedRegister mbase, Offset offset, ManagedRegister mscratch) {
2223 Mips64ManagedRegister base = mbase.AsMips64();
2224 Mips64ManagedRegister scratch = mscratch.AsMips64();
2225 CHECK(base.IsGpuRegister()) << base;
2226 CHECK(scratch.IsGpuRegister()) << scratch;
2227 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(),
2228 base.AsGpuRegister(), offset.Int32Value());
2229 Jalr(scratch.AsGpuRegister());
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002230 Nop();
Andreas Gampe57b34292015-01-14 15:45:59 -08002231 // TODO: place reference map on call
2232}
2233
2234void Mips64Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
2235 Mips64ManagedRegister scratch = mscratch.AsMips64();
2236 CHECK(scratch.IsGpuRegister()) << scratch;
2237 // Call *(*(SP + base) + offset)
Mathieu Chartiere401d142015-04-22 13:56:20 -07002238 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(),
Andreas Gampe57b34292015-01-14 15:45:59 -08002239 SP, base.Int32Value());
2240 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(),
2241 scratch.AsGpuRegister(), offset.Int32Value());
2242 Jalr(scratch.AsGpuRegister());
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002243 Nop();
Andreas Gampe57b34292015-01-14 15:45:59 -08002244 // TODO: place reference map on call
2245}
2246
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002247void Mips64Assembler::CallFromThread64(ThreadOffset<kMipsDoublewordSize> offset ATTRIBUTE_UNUSED,
2248 ManagedRegister mscratch ATTRIBUTE_UNUSED) {
2249 UNIMPLEMENTED(FATAL) << "No MIPS64 implementation";
Andreas Gampe57b34292015-01-14 15:45:59 -08002250}
2251
2252void Mips64Assembler::GetCurrentThread(ManagedRegister tr) {
2253 Move(tr.AsMips64().AsGpuRegister(), S1);
2254}
2255
2256void Mips64Assembler::GetCurrentThread(FrameOffset offset,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002257 ManagedRegister mscratch ATTRIBUTE_UNUSED) {
Andreas Gampe57b34292015-01-14 15:45:59 -08002258 StoreToOffset(kStoreDoubleword, S1, SP, offset.Int32Value());
2259}
2260
2261void Mips64Assembler::ExceptionPoll(ManagedRegister mscratch, size_t stack_adjust) {
2262 Mips64ManagedRegister scratch = mscratch.AsMips64();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002263 exception_blocks_.emplace_back(scratch, stack_adjust);
2264 LoadFromOffset(kLoadDoubleword,
2265 scratch.AsGpuRegister(),
2266 S1,
2267 Thread::ExceptionOffset<kMipsDoublewordSize>().Int32Value());
2268 Bnezc(scratch.AsGpuRegister(), exception_blocks_.back().Entry());
Andreas Gampe57b34292015-01-14 15:45:59 -08002269}
2270
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002271void Mips64Assembler::EmitExceptionPoll(Mips64ExceptionSlowPath* exception) {
2272 Bind(exception->Entry());
2273 if (exception->stack_adjust_ != 0) { // Fix up the frame.
2274 DecreaseFrameSize(exception->stack_adjust_);
Andreas Gampe57b34292015-01-14 15:45:59 -08002275 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002276 // Pass exception object as argument.
2277 // Don't care about preserving A0 as this call won't return.
2278 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
2279 Move(A0, exception->scratch_.AsGpuRegister());
Andreas Gampe57b34292015-01-14 15:45:59 -08002280 // Set up call to Thread::Current()->pDeliverException
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002281 LoadFromOffset(kLoadDoubleword,
2282 T9,
2283 S1,
2284 QUICK_ENTRYPOINT_OFFSET(kMipsDoublewordSize, pDeliverException).Int32Value());
2285 Jr(T9);
2286 Nop();
2287
Andreas Gampe57b34292015-01-14 15:45:59 -08002288 // Call never returns
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002289 Break();
Andreas Gampe57b34292015-01-14 15:45:59 -08002290}
2291
2292} // namespace mips64
2293} // namespace art