blob: 00e8995bff2377e0beb1441f077d2129156e36a7 [file] [log] [blame]
Andreas Gampe57b34292015-01-14 15:45:59 -08001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_mips64.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Andreas Gampe57b34292015-01-14 15:45:59 -080020#include "base/casts.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "memory_region.h"
23#include "thread.h"
24
25namespace art {
26namespace mips64 {
27
Alexey Frunze4dda3372015-06-01 18:31:49 -070028void Mips64Assembler::Emit(uint32_t value) {
Andreas Gampe57b34292015-01-14 15:45:59 -080029 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
Alexey Frunze4dda3372015-06-01 18:31:49 -070030 buffer_.Emit<uint32_t>(value);
Andreas Gampe57b34292015-01-14 15:45:59 -080031}
32
33void Mips64Assembler::EmitR(int opcode, GpuRegister rs, GpuRegister rt, GpuRegister rd,
34 int shamt, int funct) {
35 CHECK_NE(rs, kNoGpuRegister);
36 CHECK_NE(rt, kNoGpuRegister);
37 CHECK_NE(rd, kNoGpuRegister);
Alexey Frunze4dda3372015-06-01 18:31:49 -070038 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
39 static_cast<uint32_t>(rs) << kRsShift |
40 static_cast<uint32_t>(rt) << kRtShift |
41 static_cast<uint32_t>(rd) << kRdShift |
42 shamt << kShamtShift |
43 funct;
Andreas Gampe57b34292015-01-14 15:45:59 -080044 Emit(encoding);
45}
46
Chris Larsen2fadd7b2015-08-14 14:56:10 -070047void Mips64Assembler::EmitRsd(int opcode, GpuRegister rs, GpuRegister rd,
48 int shamt, int funct) {
49 CHECK_NE(rs, kNoGpuRegister);
50 CHECK_NE(rd, kNoGpuRegister);
51 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
52 static_cast<uint32_t>(rs) << kRsShift |
53 static_cast<uint32_t>(ZERO) << kRtShift |
54 static_cast<uint32_t>(rd) << kRdShift |
55 shamt << kShamtShift |
56 funct;
57 Emit(encoding);
58}
59
60void Mips64Assembler::EmitRtd(int opcode, GpuRegister rt, GpuRegister rd,
61 int shamt, int funct) {
62 CHECK_NE(rt, kNoGpuRegister);
63 CHECK_NE(rd, kNoGpuRegister);
64 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
65 static_cast<uint32_t>(ZERO) << kRsShift |
66 static_cast<uint32_t>(rt) << kRtShift |
67 static_cast<uint32_t>(rd) << kRdShift |
68 shamt << kShamtShift |
69 funct;
70 Emit(encoding);
71}
72
Andreas Gampe57b34292015-01-14 15:45:59 -080073void Mips64Assembler::EmitI(int opcode, GpuRegister rs, GpuRegister rt, uint16_t imm) {
74 CHECK_NE(rs, kNoGpuRegister);
75 CHECK_NE(rt, kNoGpuRegister);
Alexey Frunze4dda3372015-06-01 18:31:49 -070076 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
77 static_cast<uint32_t>(rs) << kRsShift |
78 static_cast<uint32_t>(rt) << kRtShift |
79 imm;
Andreas Gampe57b34292015-01-14 15:45:59 -080080 Emit(encoding);
81}
82
Alexey Frunze4dda3372015-06-01 18:31:49 -070083void Mips64Assembler::EmitI21(int opcode, GpuRegister rs, uint32_t imm21) {
84 CHECK_NE(rs, kNoGpuRegister);
85 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
86 static_cast<uint32_t>(rs) << kRsShift |
87 (imm21 & 0x1FFFFF);
88 Emit(encoding);
89}
90
91void Mips64Assembler::EmitJ(int opcode, uint32_t addr26) {
92 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
93 (addr26 & 0x3FFFFFF);
Andreas Gampe57b34292015-01-14 15:45:59 -080094 Emit(encoding);
95}
96
97void Mips64Assembler::EmitFR(int opcode, int fmt, FpuRegister ft, FpuRegister fs, FpuRegister fd,
Alexey Frunze4dda3372015-06-01 18:31:49 -070098 int funct) {
Andreas Gampe57b34292015-01-14 15:45:59 -080099 CHECK_NE(ft, kNoFpuRegister);
100 CHECK_NE(fs, kNoFpuRegister);
101 CHECK_NE(fd, kNoFpuRegister);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700102 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
103 fmt << kFmtShift |
104 static_cast<uint32_t>(ft) << kFtShift |
105 static_cast<uint32_t>(fs) << kFsShift |
106 static_cast<uint32_t>(fd) << kFdShift |
107 funct;
Andreas Gampe57b34292015-01-14 15:45:59 -0800108 Emit(encoding);
109}
110
Alexey Frunze4dda3372015-06-01 18:31:49 -0700111void Mips64Assembler::EmitFI(int opcode, int fmt, FpuRegister ft, uint16_t imm) {
112 CHECK_NE(ft, kNoFpuRegister);
113 uint32_t encoding = static_cast<uint32_t>(opcode) << kOpcodeShift |
114 fmt << kFmtShift |
115 static_cast<uint32_t>(ft) << kFtShift |
116 imm;
Andreas Gampe57b34292015-01-14 15:45:59 -0800117 Emit(encoding);
118}
119
Andreas Gampe57b34292015-01-14 15:45:59 -0800120void Mips64Assembler::Add(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
121 EmitR(0, rs, rt, rd, 0, 0x20);
122}
123
124void Mips64Assembler::Addi(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
125 EmitI(0x8, rs, rt, imm16);
126}
127
128void Mips64Assembler::Addu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
129 EmitR(0, rs, rt, rd, 0, 0x21);
130}
131
132void Mips64Assembler::Addiu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
133 EmitI(0x9, rs, rt, imm16);
134}
135
Alexey Frunze4dda3372015-06-01 18:31:49 -0700136void Mips64Assembler::Daddu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
137 EmitR(0, rs, rt, rd, 0, 0x2d);
138}
139
Andreas Gampe57b34292015-01-14 15:45:59 -0800140void Mips64Assembler::Daddiu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
141 EmitI(0x19, rs, rt, imm16);
142}
143
144void Mips64Assembler::Sub(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
145 EmitR(0, rs, rt, rd, 0, 0x22);
146}
147
148void Mips64Assembler::Subu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
149 EmitR(0, rs, rt, rd, 0, 0x23);
150}
151
Alexey Frunze4dda3372015-06-01 18:31:49 -0700152void Mips64Assembler::Dsubu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
153 EmitR(0, rs, rt, rd, 0, 0x2f);
154}
155
156void Mips64Assembler::MultR2(GpuRegister rs, GpuRegister rt) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800157 EmitR(0, rs, rt, static_cast<GpuRegister>(0), 0, 0x18);
158}
159
Alexey Frunze4dda3372015-06-01 18:31:49 -0700160void Mips64Assembler::MultuR2(GpuRegister rs, GpuRegister rt) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800161 EmitR(0, rs, rt, static_cast<GpuRegister>(0), 0, 0x19);
162}
163
Alexey Frunze4dda3372015-06-01 18:31:49 -0700164void Mips64Assembler::DivR2(GpuRegister rs, GpuRegister rt) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800165 EmitR(0, rs, rt, static_cast<GpuRegister>(0), 0, 0x1a);
166}
167
Alexey Frunze4dda3372015-06-01 18:31:49 -0700168void Mips64Assembler::DivuR2(GpuRegister rs, GpuRegister rt) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800169 EmitR(0, rs, rt, static_cast<GpuRegister>(0), 0, 0x1b);
170}
171
Alexey Frunze4dda3372015-06-01 18:31:49 -0700172void Mips64Assembler::MulR2(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
173 EmitR(0x1c, rs, rt, rd, 0, 2);
174}
175
176void Mips64Assembler::DivR2(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
177 DivR2(rs, rt);
178 Mflo(rd);
179}
180
181void Mips64Assembler::ModR2(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
182 DivR2(rs, rt);
183 Mfhi(rd);
184}
185
186void Mips64Assembler::DivuR2(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
187 DivuR2(rs, rt);
188 Mflo(rd);
189}
190
191void Mips64Assembler::ModuR2(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
192 DivuR2(rs, rt);
193 Mfhi(rd);
194}
195
196void Mips64Assembler::MulR6(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
197 EmitR(0, rs, rt, rd, 2, 0x18);
198}
199
200void Mips64Assembler::DivR6(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
201 EmitR(0, rs, rt, rd, 2, 0x1a);
202}
203
204void Mips64Assembler::ModR6(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
205 EmitR(0, rs, rt, rd, 3, 0x1a);
206}
207
208void Mips64Assembler::DivuR6(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
209 EmitR(0, rs, rt, rd, 2, 0x1b);
210}
211
212void Mips64Assembler::ModuR6(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
213 EmitR(0, rs, rt, rd, 3, 0x1b);
214}
215
216void Mips64Assembler::Dmul(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
217 EmitR(0, rs, rt, rd, 2, 0x1c);
218}
219
220void Mips64Assembler::Ddiv(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
221 EmitR(0, rs, rt, rd, 2, 0x1e);
222}
223
224void Mips64Assembler::Dmod(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
225 EmitR(0, rs, rt, rd, 3, 0x1e);
226}
227
228void Mips64Assembler::Ddivu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
229 EmitR(0, rs, rt, rd, 2, 0x1f);
230}
231
232void Mips64Assembler::Dmodu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
233 EmitR(0, rs, rt, rd, 3, 0x1f);
234}
235
Andreas Gampe57b34292015-01-14 15:45:59 -0800236void Mips64Assembler::And(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
237 EmitR(0, rs, rt, rd, 0, 0x24);
238}
239
240void Mips64Assembler::Andi(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
241 EmitI(0xc, rs, rt, imm16);
242}
243
244void Mips64Assembler::Or(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
245 EmitR(0, rs, rt, rd, 0, 0x25);
246}
247
248void Mips64Assembler::Ori(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
249 EmitI(0xd, rs, rt, imm16);
250}
251
252void Mips64Assembler::Xor(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
253 EmitR(0, rs, rt, rd, 0, 0x26);
254}
255
256void Mips64Assembler::Xori(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
257 EmitI(0xe, rs, rt, imm16);
258}
259
260void Mips64Assembler::Nor(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
261 EmitR(0, rs, rt, rd, 0, 0x27);
262}
263
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700264void Mips64Assembler::Bitswap(GpuRegister rd, GpuRegister rt) {
265 EmitRtd(0x1f, rt, rd, 0x0, 0x20);
266}
267
268void Mips64Assembler::Dbitswap(GpuRegister rd, GpuRegister rt) {
269 EmitRtd(0x1f, rt, rd, 0x0, 0x24);
270}
271
Alexey Frunze4dda3372015-06-01 18:31:49 -0700272void Mips64Assembler::Seb(GpuRegister rd, GpuRegister rt) {
273 EmitR(0x1f, static_cast<GpuRegister>(0), rt, rd, 0x10, 0x20);
Andreas Gampe57b34292015-01-14 15:45:59 -0800274}
275
Alexey Frunze4dda3372015-06-01 18:31:49 -0700276void Mips64Assembler::Seh(GpuRegister rd, GpuRegister rt) {
277 EmitR(0x1f, static_cast<GpuRegister>(0), rt, rd, 0x18, 0x20);
Andreas Gampe57b34292015-01-14 15:45:59 -0800278}
279
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700280void Mips64Assembler::Dsbh(GpuRegister rd, GpuRegister rt) {
281 EmitRtd(0x1f, rt, rd, 0x2, 0x24);
282}
283
284void Mips64Assembler::Dshd(GpuRegister rd, GpuRegister rt) {
285 EmitRtd(0x1f, rt, rd, 0x5, 0x24);
286}
287
Alexey Frunze4dda3372015-06-01 18:31:49 -0700288void Mips64Assembler::Dext(GpuRegister rt, GpuRegister rs, int pos, int size_less_one) {
289 DCHECK(0 <= pos && pos < 32) << pos;
290 DCHECK(0 <= size_less_one && size_less_one < 32) << size_less_one;
291 EmitR(0x1f, rs, rt, static_cast<GpuRegister>(size_less_one), pos, 3);
Andreas Gampe57b34292015-01-14 15:45:59 -0800292}
293
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700294void Mips64Assembler::Wsbh(GpuRegister rd, GpuRegister rt) {
295 EmitRtd(0x1f, rt, rd, 2, 0x20);
296}
297
298void Mips64Assembler::Sc(GpuRegister rt, GpuRegister base, int16_t imm9) {
299 DCHECK((-256 <= imm9) && (imm9 < 256));
300 EmitI(0x1f, base, rt, ((imm9 & 0x1FF) << 7) | 0x26);
301}
302
303void Mips64Assembler::Scd(GpuRegister rt, GpuRegister base, int16_t imm9) {
304 DCHECK((-256 <= imm9) && (imm9 < 256));
305 EmitI(0x1f, base, rt, ((imm9 & 0x1FF) << 7) | 0x27);
306}
307
308void Mips64Assembler::Ll(GpuRegister rt, GpuRegister base, int16_t imm9) {
309 DCHECK((-256 <= imm9) && (imm9 < 256));
310 EmitI(0x1f, base, rt, ((imm9 & 0x1FF) << 7) | 0x36);
311}
312
313void Mips64Assembler::Lld(GpuRegister rt, GpuRegister base, int16_t imm9) {
314 DCHECK((-256 <= imm9) && (imm9 < 256));
315 EmitI(0x1f, base, rt, ((imm9 & 0x1FF) << 7) | 0x37);
316}
317
Alexey Frunze4dda3372015-06-01 18:31:49 -0700318void Mips64Assembler::Sll(GpuRegister rd, GpuRegister rt, int shamt) {
319 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x00);
320}
321
322void Mips64Assembler::Srl(GpuRegister rd, GpuRegister rt, int shamt) {
323 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x02);
324}
325
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700326void Mips64Assembler::Rotr(GpuRegister rd, GpuRegister rt, int shamt) {
327 EmitR(0, static_cast<GpuRegister>(1), rt, rd, shamt, 0x02);
328}
329
Alexey Frunze4dda3372015-06-01 18:31:49 -0700330void Mips64Assembler::Sra(GpuRegister rd, GpuRegister rt, int shamt) {
331 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x03);
332}
333
334void Mips64Assembler::Sllv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800335 EmitR(0, rs, rt, rd, 0, 0x04);
336}
337
Chris Larsen9aebff22015-09-22 17:54:15 -0700338void Mips64Assembler::Rotrv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
339 EmitR(0, rs, rt, rd, 1, 0x06);
340}
341
Alexey Frunze4dda3372015-06-01 18:31:49 -0700342void Mips64Assembler::Srlv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800343 EmitR(0, rs, rt, rd, 0, 0x06);
344}
345
Alexey Frunze4dda3372015-06-01 18:31:49 -0700346void Mips64Assembler::Srav(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800347 EmitR(0, rs, rt, rd, 0, 0x07);
348}
349
Alexey Frunze4dda3372015-06-01 18:31:49 -0700350void Mips64Assembler::Dsll(GpuRegister rd, GpuRegister rt, int shamt) {
351 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x38);
352}
353
354void Mips64Assembler::Dsrl(GpuRegister rd, GpuRegister rt, int shamt) {
355 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x3a);
356}
357
Chris Larsen9aebff22015-09-22 17:54:15 -0700358void Mips64Assembler::Drotr(GpuRegister rd, GpuRegister rt, int shamt) {
359 EmitR(0, static_cast<GpuRegister>(1), rt, rd, shamt, 0x3a);
360}
361
Alexey Frunze4dda3372015-06-01 18:31:49 -0700362void Mips64Assembler::Dsra(GpuRegister rd, GpuRegister rt, int shamt) {
363 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x3b);
364}
365
366void Mips64Assembler::Dsll32(GpuRegister rd, GpuRegister rt, int shamt) {
367 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x3c);
368}
369
370void Mips64Assembler::Dsrl32(GpuRegister rd, GpuRegister rt, int shamt) {
371 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x3e);
372}
373
Chris Larsen9aebff22015-09-22 17:54:15 -0700374void Mips64Assembler::Drotr32(GpuRegister rd, GpuRegister rt, int shamt) {
375 EmitR(0, static_cast<GpuRegister>(1), rt, rd, shamt, 0x3e);
376}
377
Alexey Frunze4dda3372015-06-01 18:31:49 -0700378void Mips64Assembler::Dsra32(GpuRegister rd, GpuRegister rt, int shamt) {
379 EmitR(0, static_cast<GpuRegister>(0), rt, rd, shamt, 0x3f);
380}
381
382void Mips64Assembler::Dsllv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
383 EmitR(0, rs, rt, rd, 0, 0x14);
384}
385
386void Mips64Assembler::Dsrlv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
387 EmitR(0, rs, rt, rd, 0, 0x16);
388}
389
Chris Larsen9aebff22015-09-22 17:54:15 -0700390void Mips64Assembler::Drotrv(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
391 EmitR(0, rs, rt, rd, 1, 0x16);
392}
393
Alexey Frunze4dda3372015-06-01 18:31:49 -0700394void Mips64Assembler::Dsrav(GpuRegister rd, GpuRegister rt, GpuRegister rs) {
395 EmitR(0, rs, rt, rd, 0, 0x17);
396}
397
Andreas Gampe57b34292015-01-14 15:45:59 -0800398void Mips64Assembler::Lb(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
399 EmitI(0x20, rs, rt, imm16);
400}
401
402void Mips64Assembler::Lh(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
403 EmitI(0x21, rs, rt, imm16);
404}
405
406void Mips64Assembler::Lw(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
407 EmitI(0x23, rs, rt, imm16);
408}
409
410void Mips64Assembler::Ld(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
411 EmitI(0x37, rs, rt, imm16);
412}
413
414void Mips64Assembler::Lbu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
415 EmitI(0x24, rs, rt, imm16);
416}
417
418void Mips64Assembler::Lhu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
419 EmitI(0x25, rs, rt, imm16);
420}
421
Douglas Leungd90957f2015-04-30 19:22:49 -0700422void Mips64Assembler::Lwu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
423 EmitI(0x27, rs, rt, imm16);
424}
425
Andreas Gampe57b34292015-01-14 15:45:59 -0800426void Mips64Assembler::Lui(GpuRegister rt, uint16_t imm16) {
427 EmitI(0xf, static_cast<GpuRegister>(0), rt, imm16);
428}
429
Alexey Frunze4dda3372015-06-01 18:31:49 -0700430void Mips64Assembler::Dahi(GpuRegister rs, uint16_t imm16) {
431 EmitI(1, rs, static_cast<GpuRegister>(6), imm16);
432}
433
434void Mips64Assembler::Dati(GpuRegister rs, uint16_t imm16) {
435 EmitI(1, rs, static_cast<GpuRegister>(0x1e), imm16);
436}
437
438void Mips64Assembler::Sync(uint32_t stype) {
439 EmitR(0, static_cast<GpuRegister>(0), static_cast<GpuRegister>(0),
440 static_cast<GpuRegister>(0), stype & 0x1f, 0xf);
441}
442
Andreas Gampe57b34292015-01-14 15:45:59 -0800443void Mips64Assembler::Mfhi(GpuRegister rd) {
444 EmitR(0, static_cast<GpuRegister>(0), static_cast<GpuRegister>(0), rd, 0, 0x10);
445}
446
447void Mips64Assembler::Mflo(GpuRegister rd) {
448 EmitR(0, static_cast<GpuRegister>(0), static_cast<GpuRegister>(0), rd, 0, 0x12);
449}
450
451void Mips64Assembler::Sb(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
452 EmitI(0x28, rs, rt, imm16);
453}
454
455void Mips64Assembler::Sh(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
456 EmitI(0x29, rs, rt, imm16);
457}
458
459void Mips64Assembler::Sw(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
460 EmitI(0x2b, rs, rt, imm16);
461}
462
463void Mips64Assembler::Sd(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
464 EmitI(0x3f, rs, rt, imm16);
465}
466
467void Mips64Assembler::Slt(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
468 EmitR(0, rs, rt, rd, 0, 0x2a);
469}
470
471void Mips64Assembler::Sltu(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
472 EmitR(0, rs, rt, rd, 0, 0x2b);
473}
474
475void Mips64Assembler::Slti(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
476 EmitI(0xa, rs, rt, imm16);
477}
478
479void Mips64Assembler::Sltiu(GpuRegister rt, GpuRegister rs, uint16_t imm16) {
480 EmitI(0xb, rs, rt, imm16);
481}
482
Alexey Frunze4dda3372015-06-01 18:31:49 -0700483void Mips64Assembler::Beq(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800484 EmitI(0x4, rs, rt, imm16);
485 Nop();
486}
487
Alexey Frunze4dda3372015-06-01 18:31:49 -0700488void Mips64Assembler::Bne(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
Andreas Gampe57b34292015-01-14 15:45:59 -0800489 EmitI(0x5, rs, rt, imm16);
490 Nop();
491}
492
Alexey Frunze4dda3372015-06-01 18:31:49 -0700493void Mips64Assembler::J(uint32_t addr26) {
494 EmitJ(0x2, addr26);
Andreas Gampe57b34292015-01-14 15:45:59 -0800495 Nop();
496}
497
Alexey Frunze4dda3372015-06-01 18:31:49 -0700498void Mips64Assembler::Jal(uint32_t addr26) {
499 EmitJ(0x3, addr26);
Andreas Gampe57b34292015-01-14 15:45:59 -0800500 Nop();
501}
502
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700503void Mips64Assembler::Seleqz(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
504 EmitR(0, rs, rt, rd, 0, 0x35);
505}
506
507void Mips64Assembler::Selnez(GpuRegister rd, GpuRegister rs, GpuRegister rt) {
508 EmitR(0, rs, rt, rd, 0, 0x37);
509}
510
511void Mips64Assembler::Clz(GpuRegister rd, GpuRegister rs) {
512 EmitRsd(0, rs, rd, 0x01, 0x10);
513}
514
515void Mips64Assembler::Clo(GpuRegister rd, GpuRegister rs) {
516 EmitRsd(0, rs, rd, 0x01, 0x11);
517}
518
519void Mips64Assembler::Dclz(GpuRegister rd, GpuRegister rs) {
520 EmitRsd(0, rs, rd, 0x01, 0x12);
521}
522
523void Mips64Assembler::Dclo(GpuRegister rd, GpuRegister rs) {
524 EmitRsd(0, rs, rd, 0x01, 0x13);
525}
526
Alexey Frunze4dda3372015-06-01 18:31:49 -0700527void Mips64Assembler::Jalr(GpuRegister rd, GpuRegister rs) {
528 EmitR(0, rs, static_cast<GpuRegister>(0), rd, 0, 0x09);
Andreas Gampe57b34292015-01-14 15:45:59 -0800529 Nop();
530}
531
532void Mips64Assembler::Jalr(GpuRegister rs) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700533 Jalr(RA, rs);
534}
535
536void Mips64Assembler::Jr(GpuRegister rs) {
537 Jalr(ZERO, rs);
538}
539
540void Mips64Assembler::Auipc(GpuRegister rs, uint16_t imm16) {
541 EmitI(0x3B, rs, static_cast<GpuRegister>(0x1E), imm16);
542}
543
544void Mips64Assembler::Jic(GpuRegister rt, uint16_t imm16) {
545 EmitI(0x36, static_cast<GpuRegister>(0), rt, imm16);
546}
547
548void Mips64Assembler::Jialc(GpuRegister rt, uint16_t imm16) {
549 EmitI(0x3E, static_cast<GpuRegister>(0), rt, imm16);
550}
551
552void Mips64Assembler::Bltc(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
553 CHECK_NE(rs, ZERO);
554 CHECK_NE(rt, ZERO);
555 CHECK_NE(rs, rt);
556 EmitI(0x17, rs, rt, imm16);
557}
558
559void Mips64Assembler::Bltzc(GpuRegister rt, uint16_t imm16) {
560 CHECK_NE(rt, ZERO);
561 EmitI(0x17, rt, rt, imm16);
562}
563
564void Mips64Assembler::Bgtzc(GpuRegister rt, uint16_t imm16) {
565 CHECK_NE(rt, ZERO);
566 EmitI(0x17, static_cast<GpuRegister>(0), rt, imm16);
567}
568
569void Mips64Assembler::Bgec(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
570 CHECK_NE(rs, ZERO);
571 CHECK_NE(rt, ZERO);
572 CHECK_NE(rs, rt);
573 EmitI(0x16, rs, rt, imm16);
574}
575
576void Mips64Assembler::Bgezc(GpuRegister rt, uint16_t imm16) {
577 CHECK_NE(rt, ZERO);
578 EmitI(0x16, rt, rt, imm16);
579}
580
581void Mips64Assembler::Blezc(GpuRegister rt, uint16_t imm16) {
582 CHECK_NE(rt, ZERO);
583 EmitI(0x16, static_cast<GpuRegister>(0), rt, imm16);
584}
585
586void Mips64Assembler::Bltuc(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
587 CHECK_NE(rs, ZERO);
588 CHECK_NE(rt, ZERO);
589 CHECK_NE(rs, rt);
590 EmitI(0x7, rs, rt, imm16);
591}
592
593void Mips64Assembler::Bgeuc(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
594 CHECK_NE(rs, ZERO);
595 CHECK_NE(rt, ZERO);
596 CHECK_NE(rs, rt);
597 EmitI(0x6, rs, rt, imm16);
598}
599
600void Mips64Assembler::Beqc(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
601 CHECK_NE(rs, ZERO);
602 CHECK_NE(rt, ZERO);
603 CHECK_NE(rs, rt);
604 EmitI(0x8, (rs < rt) ? rs : rt, (rs < rt) ? rt : rs, imm16);
605}
606
607void Mips64Assembler::Bnec(GpuRegister rs, GpuRegister rt, uint16_t imm16) {
608 CHECK_NE(rs, ZERO);
609 CHECK_NE(rt, ZERO);
610 CHECK_NE(rs, rt);
611 EmitI(0x18, (rs < rt) ? rs : rt, (rs < rt) ? rt : rs, imm16);
612}
613
614void Mips64Assembler::Beqzc(GpuRegister rs, uint32_t imm21) {
615 CHECK_NE(rs, ZERO);
616 EmitI21(0x36, rs, imm21);
617}
618
619void Mips64Assembler::Bnezc(GpuRegister rs, uint32_t imm21) {
620 CHECK_NE(rs, ZERO);
621 EmitI21(0x3E, rs, imm21);
Andreas Gampe57b34292015-01-14 15:45:59 -0800622}
623
624void Mips64Assembler::AddS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
625 EmitFR(0x11, 0x10, ft, fs, fd, 0x0);
626}
627
628void Mips64Assembler::SubS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
629 EmitFR(0x11, 0x10, ft, fs, fd, 0x1);
630}
631
632void Mips64Assembler::MulS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
633 EmitFR(0x11, 0x10, ft, fs, fd, 0x2);
634}
635
636void Mips64Assembler::DivS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
637 EmitFR(0x11, 0x10, ft, fs, fd, 0x3);
638}
639
640void Mips64Assembler::AddD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700641 EmitFR(0x11, 0x11, ft, fs, fd, 0x0);
Andreas Gampe57b34292015-01-14 15:45:59 -0800642}
643
644void Mips64Assembler::SubD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700645 EmitFR(0x11, 0x11, ft, fs, fd, 0x1);
Andreas Gampe57b34292015-01-14 15:45:59 -0800646}
647
648void Mips64Assembler::MulD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700649 EmitFR(0x11, 0x11, ft, fs, fd, 0x2);
Andreas Gampe57b34292015-01-14 15:45:59 -0800650}
651
652void Mips64Assembler::DivD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700653 EmitFR(0x11, 0x11, ft, fs, fd, 0x3);
Andreas Gampe57b34292015-01-14 15:45:59 -0800654}
655
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700656void Mips64Assembler::SqrtS(FpuRegister fd, FpuRegister fs) {
657 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x4);
658}
659
660void Mips64Assembler::SqrtD(FpuRegister fd, FpuRegister fs) {
661 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x4);
662}
663
664void Mips64Assembler::AbsS(FpuRegister fd, FpuRegister fs) {
665 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x5);
666}
667
668void Mips64Assembler::AbsD(FpuRegister fd, FpuRegister fs) {
669 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x5);
670}
671
Andreas Gampe57b34292015-01-14 15:45:59 -0800672void Mips64Assembler::MovS(FpuRegister fd, FpuRegister fs) {
673 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x6);
674}
675
676void Mips64Assembler::MovD(FpuRegister fd, FpuRegister fs) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700677 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x6);
678}
679
680void Mips64Assembler::NegS(FpuRegister fd, FpuRegister fs) {
681 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x7);
682}
683
684void Mips64Assembler::NegD(FpuRegister fd, FpuRegister fs) {
685 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x7);
686}
687
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700688void Mips64Assembler::RoundLS(FpuRegister fd, FpuRegister fs) {
689 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x8);
690}
691
692void Mips64Assembler::RoundLD(FpuRegister fd, FpuRegister fs) {
693 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x8);
694}
695
696void Mips64Assembler::RoundWS(FpuRegister fd, FpuRegister fs) {
697 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0xc);
698}
699
700void Mips64Assembler::RoundWD(FpuRegister fd, FpuRegister fs) {
701 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0xc);
702}
703
704void Mips64Assembler::CeilLS(FpuRegister fd, FpuRegister fs) {
705 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0xa);
706}
707
708void Mips64Assembler::CeilLD(FpuRegister fd, FpuRegister fs) {
709 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0xa);
710}
711
712void Mips64Assembler::CeilWS(FpuRegister fd, FpuRegister fs) {
713 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0xe);
714}
715
716void Mips64Assembler::CeilWD(FpuRegister fd, FpuRegister fs) {
717 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0xe);
718}
719
720void Mips64Assembler::FloorLS(FpuRegister fd, FpuRegister fs) {
721 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0xb);
722}
723
724void Mips64Assembler::FloorLD(FpuRegister fd, FpuRegister fs) {
725 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0xb);
726}
727
728void Mips64Assembler::FloorWS(FpuRegister fd, FpuRegister fs) {
729 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0xf);
730}
731
732void Mips64Assembler::FloorWD(FpuRegister fd, FpuRegister fs) {
733 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0xf);
734}
735
736void Mips64Assembler::SelS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
737 EmitFR(0x11, 0x10, ft, fs, fd, 0x10);
738}
739
740void Mips64Assembler::SelD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
741 EmitFR(0x11, 0x11, ft, fs, fd, 0x10);
742}
743
744void Mips64Assembler::RintS(FpuRegister fd, FpuRegister fs) {
745 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x1a);
746}
747
748void Mips64Assembler::RintD(FpuRegister fd, FpuRegister fs) {
749 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x1a);
750}
751
752void Mips64Assembler::ClassS(FpuRegister fd, FpuRegister fs) {
753 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x1b);
754}
755
756void Mips64Assembler::ClassD(FpuRegister fd, FpuRegister fs) {
757 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x1b);
758}
759
760void Mips64Assembler::MinS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
761 EmitFR(0x11, 0x10, ft, fs, fd, 0x1c);
762}
763
764void Mips64Assembler::MinD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
765 EmitFR(0x11, 0x11, ft, fs, fd, 0x1c);
766}
767
768void Mips64Assembler::MaxS(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
769 EmitFR(0x11, 0x10, ft, fs, fd, 0x1e);
770}
771
772void Mips64Assembler::MaxD(FpuRegister fd, FpuRegister fs, FpuRegister ft) {
773 EmitFR(0x11, 0x11, ft, fs, fd, 0x1e);
774}
775
Alexey Frunze4dda3372015-06-01 18:31:49 -0700776void Mips64Assembler::Cvtsw(FpuRegister fd, FpuRegister fs) {
777 EmitFR(0x11, 0x14, static_cast<FpuRegister>(0), fs, fd, 0x20);
778}
779
780void Mips64Assembler::Cvtdw(FpuRegister fd, FpuRegister fs) {
781 EmitFR(0x11, 0x14, static_cast<FpuRegister>(0), fs, fd, 0x21);
782}
783
784void Mips64Assembler::Cvtsd(FpuRegister fd, FpuRegister fs) {
785 EmitFR(0x11, 0x11, static_cast<FpuRegister>(0), fs, fd, 0x20);
786}
787
788void Mips64Assembler::Cvtds(FpuRegister fd, FpuRegister fs) {
789 EmitFR(0x11, 0x10, static_cast<FpuRegister>(0), fs, fd, 0x21);
Andreas Gampe57b34292015-01-14 15:45:59 -0800790}
791
Chris Larsen51417632015-10-02 13:24:25 -0700792void Mips64Assembler::Cvtsl(FpuRegister fd, FpuRegister fs) {
793 EmitFR(0x11, 0x15, static_cast<FpuRegister>(0), fs, fd, 0x20);
794}
795
Chris Larsen2fadd7b2015-08-14 14:56:10 -0700796void Mips64Assembler::Cvtdl(FpuRegister fd, FpuRegister fs) {
797 EmitFR(0x11, 0x15, static_cast<FpuRegister>(0), fs, fd, 0x21);
798}
799
Andreas Gampe57b34292015-01-14 15:45:59 -0800800void Mips64Assembler::Mfc1(GpuRegister rt, FpuRegister fs) {
801 EmitFR(0x11, 0x00, static_cast<FpuRegister>(rt), fs, static_cast<FpuRegister>(0), 0x0);
802}
803
Alexey Frunze4dda3372015-06-01 18:31:49 -0700804void Mips64Assembler::Mtc1(GpuRegister rt, FpuRegister fs) {
805 EmitFR(0x11, 0x04, static_cast<FpuRegister>(rt), fs, static_cast<FpuRegister>(0), 0x0);
806}
807
808void Mips64Assembler::Dmfc1(GpuRegister rt, FpuRegister fs) {
809 EmitFR(0x11, 0x01, static_cast<FpuRegister>(rt), fs, static_cast<FpuRegister>(0), 0x0);
810}
811
812void Mips64Assembler::Dmtc1(GpuRegister rt, FpuRegister fs) {
813 EmitFR(0x11, 0x05, static_cast<FpuRegister>(rt), fs, static_cast<FpuRegister>(0), 0x0);
Andreas Gampe57b34292015-01-14 15:45:59 -0800814}
815
816void Mips64Assembler::Lwc1(FpuRegister ft, GpuRegister rs, uint16_t imm16) {
817 EmitI(0x31, rs, static_cast<GpuRegister>(ft), imm16);
818}
819
820void Mips64Assembler::Ldc1(FpuRegister ft, GpuRegister rs, uint16_t imm16) {
821 EmitI(0x35, rs, static_cast<GpuRegister>(ft), imm16);
822}
823
824void Mips64Assembler::Swc1(FpuRegister ft, GpuRegister rs, uint16_t imm16) {
825 EmitI(0x39, rs, static_cast<GpuRegister>(ft), imm16);
826}
827
828void Mips64Assembler::Sdc1(FpuRegister ft, GpuRegister rs, uint16_t imm16) {
829 EmitI(0x3d, rs, static_cast<GpuRegister>(ft), imm16);
830}
831
832void Mips64Assembler::Break() {
833 EmitR(0, static_cast<GpuRegister>(0), static_cast<GpuRegister>(0),
834 static_cast<GpuRegister>(0), 0, 0xD);
835}
836
837void Mips64Assembler::Nop() {
838 EmitR(0x0, static_cast<GpuRegister>(0), static_cast<GpuRegister>(0),
839 static_cast<GpuRegister>(0), 0, 0x0);
840}
841
Alexey Frunze4dda3372015-06-01 18:31:49 -0700842void Mips64Assembler::Move(GpuRegister rd, GpuRegister rs) {
843 Or(rd, rs, ZERO);
Andreas Gampe57b34292015-01-14 15:45:59 -0800844}
845
Alexey Frunze4dda3372015-06-01 18:31:49 -0700846void Mips64Assembler::Clear(GpuRegister rd) {
847 Move(rd, ZERO);
Andreas Gampe57b34292015-01-14 15:45:59 -0800848}
849
Alexey Frunze4dda3372015-06-01 18:31:49 -0700850void Mips64Assembler::Not(GpuRegister rd, GpuRegister rs) {
851 Nor(rd, rs, ZERO);
Andreas Gampe57b34292015-01-14 15:45:59 -0800852}
853
Alexey Frunze4dda3372015-06-01 18:31:49 -0700854void Mips64Assembler::LoadConst32(GpuRegister rd, int32_t value) {
855 if (IsUint<16>(value)) {
856 // Use OR with (unsigned) immediate to encode 16b unsigned int.
857 Ori(rd, ZERO, value);
858 } else if (IsInt<16>(value)) {
859 // Use ADD with (signed) immediate to encode 16b signed int.
860 Addiu(rd, ZERO, value);
861 } else {
862 Lui(rd, value >> 16);
863 if (value & 0xFFFF)
864 Ori(rd, rd, value);
865 }
Andreas Gampe57b34292015-01-14 15:45:59 -0800866}
867
Alexey Frunze4dda3372015-06-01 18:31:49 -0700868void Mips64Assembler::LoadConst64(GpuRegister rd, int64_t value) {
869 int bit31 = (value & UINT64_C(0x80000000)) != 0;
870
871 // Loads with 1 instruction.
872 if (IsUint<16>(value)) {
873 Ori(rd, ZERO, value);
874 } else if (IsInt<16>(value)) {
875 Daddiu(rd, ZERO, value);
876 } else if ((value & 0xFFFF) == 0 && IsInt<16>(value >> 16)) {
877 Lui(rd, value >> 16);
878 } else if (IsInt<32>(value)) {
879 // Loads with 2 instructions.
880 Lui(rd, value >> 16);
881 Ori(rd, rd, value);
882 } else if ((value & 0xFFFF0000) == 0 && IsInt<16>(value >> 32)) {
883 Ori(rd, ZERO, value);
884 Dahi(rd, value >> 32);
885 } else if ((value & UINT64_C(0xFFFFFFFF0000)) == 0) {
886 Ori(rd, ZERO, value);
887 Dati(rd, value >> 48);
888 } else if ((value & 0xFFFF) == 0 &&
889 (-32768 - bit31) <= (value >> 32) && (value >> 32) <= (32767 - bit31)) {
890 Lui(rd, value >> 16);
891 Dahi(rd, (value >> 32) + bit31);
892 } else if ((value & 0xFFFF) == 0 && ((value >> 31) & 0x1FFFF) == ((0x20000 - bit31) & 0x1FFFF)) {
893 Lui(rd, value >> 16);
894 Dati(rd, (value >> 48) + bit31);
895 } else {
896 int shift_cnt = CTZ(value);
897 int64_t tmp = value >> shift_cnt;
898 if (IsUint<16>(tmp)) {
899 Ori(rd, ZERO, tmp);
900 if (shift_cnt < 32)
901 Dsll(rd, rd, shift_cnt);
902 else
903 Dsll32(rd, rd, shift_cnt & 31);
904 } else if (IsInt<16>(tmp)) {
905 Daddiu(rd, ZERO, tmp);
906 if (shift_cnt < 32)
907 Dsll(rd, rd, shift_cnt);
908 else
909 Dsll32(rd, rd, shift_cnt & 31);
910 } else if (IsInt<32>(tmp)) {
911 // Loads with 3 instructions.
912 Lui(rd, tmp >> 16);
913 Ori(rd, rd, tmp);
914 if (shift_cnt < 32)
915 Dsll(rd, rd, shift_cnt);
916 else
917 Dsll32(rd, rd, shift_cnt & 31);
918 } else {
919 shift_cnt = 16 + CTZ(value >> 16);
920 tmp = value >> shift_cnt;
921 if (IsUint<16>(tmp)) {
922 Ori(rd, ZERO, tmp);
923 if (shift_cnt < 32)
924 Dsll(rd, rd, shift_cnt);
925 else
926 Dsll32(rd, rd, shift_cnt & 31);
927 Ori(rd, rd, value);
928 } else if (IsInt<16>(tmp)) {
929 Daddiu(rd, ZERO, tmp);
930 if (shift_cnt < 32)
931 Dsll(rd, rd, shift_cnt);
932 else
933 Dsll32(rd, rd, shift_cnt & 31);
934 Ori(rd, rd, value);
935 } else {
936 // Loads with 3-4 instructions.
937 uint64_t tmp2 = value;
938 bool used_lui = false;
939 if (((tmp2 >> 16) & 0xFFFF) != 0 || (tmp2 & 0xFFFFFFFF) == 0) {
940 Lui(rd, tmp2 >> 16);
941 used_lui = true;
942 }
943 if ((tmp2 & 0xFFFF) != 0) {
944 if (used_lui)
945 Ori(rd, rd, tmp2);
946 else
947 Ori(rd, ZERO, tmp2);
948 }
949 if (bit31) {
950 tmp2 += UINT64_C(0x100000000);
951 }
952 if (((tmp2 >> 32) & 0xFFFF) != 0) {
953 Dahi(rd, tmp2 >> 32);
954 }
955 if (tmp2 & UINT64_C(0x800000000000)) {
956 tmp2 += UINT64_C(0x1000000000000);
957 }
958 if ((tmp2 >> 48) != 0) {
959 Dati(rd, tmp2 >> 48);
960 }
961 }
962 }
963 }
Andreas Gampe57b34292015-01-14 15:45:59 -0800964}
965
Alexey Frunze4dda3372015-06-01 18:31:49 -0700966void Mips64Assembler::Addiu32(GpuRegister rt, GpuRegister rs, int32_t value, GpuRegister rtmp) {
967 if (IsInt<16>(value)) {
968 Addiu(rt, rs, value);
969 } else {
970 LoadConst32(rtmp, value);
971 Addu(rt, rs, rtmp);
972 }
Andreas Gampe57b34292015-01-14 15:45:59 -0800973}
974
Alexey Frunze4dda3372015-06-01 18:31:49 -0700975void Mips64Assembler::Daddiu64(GpuRegister rt, GpuRegister rs, int64_t value, GpuRegister rtmp) {
976 if (IsInt<16>(value)) {
977 Daddiu(rt, rs, value);
978 } else {
979 LoadConst64(rtmp, value);
980 Daddu(rt, rs, rtmp);
981 }
Andreas Gampe57b34292015-01-14 15:45:59 -0800982}
983
Alexey Frunze4dda3372015-06-01 18:31:49 -0700984//
985// MIPS64R6 branches
986//
987//
988// Unconditional (pc + 32-bit signed offset):
989//
990// auipc at, ofs_high
991// jic at, ofs_low
992// // no delay/forbidden slot
993//
994//
995// Conditional (pc + 32-bit signed offset):
996//
997// b<cond>c reg, +2 // skip next 2 instructions
998// auipc at, ofs_high
999// jic at, ofs_low
1000// // no delay/forbidden slot
1001//
1002//
1003// Unconditional (pc + 32-bit signed offset) and link:
1004//
1005// auipc reg, ofs_high
1006// daddiu reg, ofs_low
1007// jialc reg, 0
1008// // no delay/forbidden slot
1009//
1010//
1011// TODO: use shorter instruction sequences whenever possible.
1012//
1013
1014void Mips64Assembler::Bind(Label* label) {
1015 CHECK(!label->IsBound());
1016 int32_t bound_pc = buffer_.Size();
1017
1018 // Walk the list of the branches (auipc + jic pairs) referring to and preceding this label.
1019 // Embed the previously unknown pc-relative addresses in them.
1020 while (label->IsLinked()) {
1021 int32_t position = label->Position();
1022 // Extract the branch (instruction pair)
1023 uint32_t auipc = buffer_.Load<uint32_t>(position);
1024 uint32_t jic = buffer_.Load<uint32_t>(position + 4); // actually, jic or daddiu
1025
1026 // Extract the location of the previous pair in the list (walking the list backwards;
1027 // the previous pair location was stored in the immediate operands of the instructions)
1028 int32_t prev = (auipc << 16) | (jic & 0xFFFF);
1029
1030 // Get the pc-relative address
1031 uint32_t offset = bound_pc - position;
1032 offset += (offset & 0x8000) << 1; // account for sign extension in jic/daddiu
1033
1034 // Embed it in the two instructions
1035 auipc = (auipc & 0xFFFF0000) | (offset >> 16);
1036 jic = (jic & 0xFFFF0000) | (offset & 0xFFFF);
1037
1038 // Save the adjusted instructions
1039 buffer_.Store<uint32_t>(position, auipc);
1040 buffer_.Store<uint32_t>(position + 4, jic);
1041
1042 // On to the previous branch in the list...
1043 label->position_ = prev;
1044 }
1045
1046 // Now make the label object contain its own location
1047 // (it will be used by the branches referring to and following this label)
1048 label->BindTo(bound_pc);
1049}
1050
1051void Mips64Assembler::B(Label* label) {
1052 if (label->IsBound()) {
1053 // Branch backwards (to a preceding label), distance is known
1054 uint32_t offset = label->Position() - buffer_.Size();
1055 CHECK_LE(static_cast<int32_t>(offset), 0);
1056 offset += (offset & 0x8000) << 1; // account for sign extension in jic
1057 Auipc(AT, offset >> 16);
1058 Jic(AT, offset);
1059 } else {
1060 // Branch forward (to a following label), distance is unknown
1061 int32_t position = buffer_.Size();
1062 // The first branch forward will have 0 in its pc-relative address (copied from label's
1063 // position). It will be the terminator of the list of forward-reaching branches.
1064 uint32_t prev = label->position_;
1065 Auipc(AT, prev >> 16);
1066 Jic(AT, prev);
1067 // Now make the link object point to the location of this branch
1068 // (this forms a linked list of branches preceding this label)
1069 label->LinkTo(position);
1070 }
1071}
1072
1073void Mips64Assembler::Jalr(Label* label, GpuRegister indirect_reg) {
1074 if (label->IsBound()) {
1075 // Branch backwards (to a preceding label), distance is known
1076 uint32_t offset = label->Position() - buffer_.Size();
1077 CHECK_LE(static_cast<int32_t>(offset), 0);
1078 offset += (offset & 0x8000) << 1; // account for sign extension in daddiu
1079 Auipc(indirect_reg, offset >> 16);
1080 Daddiu(indirect_reg, indirect_reg, offset);
1081 Jialc(indirect_reg, 0);
1082 } else {
1083 // Branch forward (to a following label), distance is unknown
1084 int32_t position = buffer_.Size();
1085 // The first branch forward will have 0 in its pc-relative address (copied from label's
1086 // position). It will be the terminator of the list of forward-reaching branches.
1087 uint32_t prev = label->position_;
1088 Auipc(indirect_reg, prev >> 16);
1089 Daddiu(indirect_reg, indirect_reg, prev);
1090 Jialc(indirect_reg, 0);
1091 // Now make the link object point to the location of this branch
1092 // (this forms a linked list of branches preceding this label)
1093 label->LinkTo(position);
1094 }
1095}
1096
1097void Mips64Assembler::Bltc(GpuRegister rs, GpuRegister rt, Label* label) {
1098 Bgec(rs, rt, 2);
1099 B(label);
1100}
1101
1102void Mips64Assembler::Bltzc(GpuRegister rt, Label* label) {
1103 Bgezc(rt, 2);
1104 B(label);
1105}
1106
1107void Mips64Assembler::Bgtzc(GpuRegister rt, Label* label) {
1108 Blezc(rt, 2);
1109 B(label);
1110}
1111
1112void Mips64Assembler::Bgec(GpuRegister rs, GpuRegister rt, Label* label) {
1113 Bltc(rs, rt, 2);
1114 B(label);
1115}
1116
1117void Mips64Assembler::Bgezc(GpuRegister rt, Label* label) {
1118 Bltzc(rt, 2);
1119 B(label);
1120}
1121
1122void Mips64Assembler::Blezc(GpuRegister rt, Label* label) {
1123 Bgtzc(rt, 2);
1124 B(label);
1125}
1126
1127void Mips64Assembler::Bltuc(GpuRegister rs, GpuRegister rt, Label* label) {
1128 Bgeuc(rs, rt, 2);
1129 B(label);
1130}
1131
1132void Mips64Assembler::Bgeuc(GpuRegister rs, GpuRegister rt, Label* label) {
1133 Bltuc(rs, rt, 2);
1134 B(label);
1135}
1136
1137void Mips64Assembler::Beqc(GpuRegister rs, GpuRegister rt, Label* label) {
1138 Bnec(rs, rt, 2);
1139 B(label);
1140}
1141
1142void Mips64Assembler::Bnec(GpuRegister rs, GpuRegister rt, Label* label) {
1143 Beqc(rs, rt, 2);
1144 B(label);
1145}
1146
1147void Mips64Assembler::Beqzc(GpuRegister rs, Label* label) {
1148 Bnezc(rs, 2);
1149 B(label);
1150}
1151
1152void Mips64Assembler::Bnezc(GpuRegister rs, Label* label) {
1153 Beqzc(rs, 2);
1154 B(label);
Andreas Gampe57b34292015-01-14 15:45:59 -08001155}
1156
1157void Mips64Assembler::LoadFromOffset(LoadOperandType type, GpuRegister reg, GpuRegister base,
1158 int32_t offset) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001159 if (!IsInt<16>(offset)) {
1160 LoadConst32(AT, offset);
1161 Daddu(AT, AT, base);
1162 base = AT;
1163 offset = 0;
1164 }
1165
Andreas Gampe57b34292015-01-14 15:45:59 -08001166 switch (type) {
1167 case kLoadSignedByte:
1168 Lb(reg, base, offset);
1169 break;
1170 case kLoadUnsignedByte:
1171 Lbu(reg, base, offset);
1172 break;
1173 case kLoadSignedHalfword:
1174 Lh(reg, base, offset);
1175 break;
1176 case kLoadUnsignedHalfword:
1177 Lhu(reg, base, offset);
1178 break;
1179 case kLoadWord:
1180 Lw(reg, base, offset);
1181 break;
Douglas Leungd90957f2015-04-30 19:22:49 -07001182 case kLoadUnsignedWord:
1183 Lwu(reg, base, offset);
1184 break;
Andreas Gampe57b34292015-01-14 15:45:59 -08001185 case kLoadDoubleword:
Andreas Gampe57b34292015-01-14 15:45:59 -08001186 Ld(reg, base, offset);
1187 break;
Andreas Gampe57b34292015-01-14 15:45:59 -08001188 }
1189}
1190
1191void Mips64Assembler::LoadFpuFromOffset(LoadOperandType type, FpuRegister reg, GpuRegister base,
1192 int32_t offset) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001193 if (!IsInt<16>(offset)) {
1194 LoadConst32(AT, offset);
1195 Daddu(AT, AT, base);
1196 base = AT;
1197 offset = 0;
1198 }
1199
Andreas Gampe57b34292015-01-14 15:45:59 -08001200 switch (type) {
1201 case kLoadWord:
1202 Lwc1(reg, base, offset);
1203 break;
1204 case kLoadDoubleword:
Andreas Gampe57b34292015-01-14 15:45:59 -08001205 Ldc1(reg, base, offset);
1206 break;
1207 default:
1208 LOG(FATAL) << "UNREACHABLE";
1209 }
1210}
1211
1212void Mips64Assembler::EmitLoad(ManagedRegister m_dst, GpuRegister src_register, int32_t src_offset,
1213 size_t size) {
1214 Mips64ManagedRegister dst = m_dst.AsMips64();
1215 if (dst.IsNoRegister()) {
1216 CHECK_EQ(0u, size) << dst;
1217 } else if (dst.IsGpuRegister()) {
1218 if (size == 4) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001219 LoadFromOffset(kLoadWord, dst.AsGpuRegister(), src_register, src_offset);
1220 } else if (size == 8) {
1221 CHECK_EQ(8u, size) << dst;
1222 LoadFromOffset(kLoadDoubleword, dst.AsGpuRegister(), src_register, src_offset);
1223 } else {
1224 UNIMPLEMENTED(FATAL) << "We only support Load() of size 4 and 8";
1225 }
1226 } else if (dst.IsFpuRegister()) {
1227 if (size == 4) {
1228 CHECK_EQ(4u, size) << dst;
1229 LoadFpuFromOffset(kLoadWord, dst.AsFpuRegister(), src_register, src_offset);
1230 } else if (size == 8) {
1231 CHECK_EQ(8u, size) << dst;
1232 LoadFpuFromOffset(kLoadDoubleword, dst.AsFpuRegister(), src_register, src_offset);
1233 } else {
1234 UNIMPLEMENTED(FATAL) << "We only support Load() of size 4 and 8";
1235 }
1236 }
1237}
1238
1239void Mips64Assembler::StoreToOffset(StoreOperandType type, GpuRegister reg, GpuRegister base,
1240 int32_t offset) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001241 if (!IsInt<16>(offset)) {
1242 LoadConst32(AT, offset);
1243 Daddu(AT, AT, base);
1244 base = AT;
1245 offset = 0;
1246 }
1247
Andreas Gampe57b34292015-01-14 15:45:59 -08001248 switch (type) {
1249 case kStoreByte:
1250 Sb(reg, base, offset);
1251 break;
1252 case kStoreHalfword:
1253 Sh(reg, base, offset);
1254 break;
1255 case kStoreWord:
1256 Sw(reg, base, offset);
1257 break;
1258 case kStoreDoubleword:
Andreas Gampe57b34292015-01-14 15:45:59 -08001259 Sd(reg, base, offset);
1260 break;
1261 default:
1262 LOG(FATAL) << "UNREACHABLE";
1263 }
1264}
1265
1266void Mips64Assembler::StoreFpuToOffset(StoreOperandType type, FpuRegister reg, GpuRegister base,
1267 int32_t offset) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001268 if (!IsInt<16>(offset)) {
1269 LoadConst32(AT, offset);
1270 Daddu(AT, AT, base);
1271 base = AT;
1272 offset = 0;
1273 }
1274
Andreas Gampe57b34292015-01-14 15:45:59 -08001275 switch (type) {
1276 case kStoreWord:
1277 Swc1(reg, base, offset);
1278 break;
1279 case kStoreDoubleword:
1280 Sdc1(reg, base, offset);
1281 break;
1282 default:
1283 LOG(FATAL) << "UNREACHABLE";
1284 }
1285}
1286
David Srbeckydd973932015-04-07 20:29:48 +01001287static dwarf::Reg DWARFReg(GpuRegister reg) {
1288 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1289}
1290
Andreas Gampe57b34292015-01-14 15:45:59 -08001291constexpr size_t kFramePointerSize = 8;
1292
1293void Mips64Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
1294 const std::vector<ManagedRegister>& callee_save_regs,
1295 const ManagedRegisterEntrySpills& entry_spills) {
1296 CHECK_ALIGNED(frame_size, kStackAlignment);
1297
1298 // Increase frame to required size.
1299 IncreaseFrameSize(frame_size);
1300
1301 // Push callee saves and return address
1302 int stack_offset = frame_size - kFramePointerSize;
1303 StoreToOffset(kStoreDoubleword, RA, SP, stack_offset);
David Srbeckydd973932015-04-07 20:29:48 +01001304 cfi_.RelOffset(DWARFReg(RA), stack_offset);
Andreas Gampe57b34292015-01-14 15:45:59 -08001305 for (int i = callee_save_regs.size() - 1; i >= 0; --i) {
1306 stack_offset -= kFramePointerSize;
1307 GpuRegister reg = callee_save_regs.at(i).AsMips64().AsGpuRegister();
1308 StoreToOffset(kStoreDoubleword, reg, SP, stack_offset);
David Srbeckydd973932015-04-07 20:29:48 +01001309 cfi_.RelOffset(DWARFReg(reg), stack_offset);
Andreas Gampe57b34292015-01-14 15:45:59 -08001310 }
1311
1312 // Write out Method*.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001313 StoreToOffset(kStoreDoubleword, method_reg.AsMips64().AsGpuRegister(), SP, 0);
Andreas Gampe57b34292015-01-14 15:45:59 -08001314
1315 // Write out entry spills.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001316 int32_t offset = frame_size + kFramePointerSize;
Andreas Gampe57b34292015-01-14 15:45:59 -08001317 for (size_t i = 0; i < entry_spills.size(); ++i) {
1318 Mips64ManagedRegister reg = entry_spills.at(i).AsMips64();
1319 ManagedRegisterSpill spill = entry_spills.at(i);
1320 int32_t size = spill.getSize();
1321 if (reg.IsNoRegister()) {
1322 // only increment stack offset.
1323 offset += size;
1324 } else if (reg.IsFpuRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001325 StoreFpuToOffset((size == 4) ? kStoreWord : kStoreDoubleword,
1326 reg.AsFpuRegister(), SP, offset);
Andreas Gampe57b34292015-01-14 15:45:59 -08001327 offset += size;
1328 } else if (reg.IsGpuRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001329 StoreToOffset((size == 4) ? kStoreWord : kStoreDoubleword,
1330 reg.AsGpuRegister(), SP, offset);
Andreas Gampe57b34292015-01-14 15:45:59 -08001331 offset += size;
1332 }
1333 }
1334}
1335
1336void Mips64Assembler::RemoveFrame(size_t frame_size,
1337 const std::vector<ManagedRegister>& callee_save_regs) {
1338 CHECK_ALIGNED(frame_size, kStackAlignment);
David Srbeckydd973932015-04-07 20:29:48 +01001339 cfi_.RememberState();
Andreas Gampe57b34292015-01-14 15:45:59 -08001340
1341 // Pop callee saves and return address
1342 int stack_offset = frame_size - (callee_save_regs.size() * kFramePointerSize) - kFramePointerSize;
1343 for (size_t i = 0; i < callee_save_regs.size(); ++i) {
1344 GpuRegister reg = callee_save_regs.at(i).AsMips64().AsGpuRegister();
1345 LoadFromOffset(kLoadDoubleword, reg, SP, stack_offset);
David Srbeckydd973932015-04-07 20:29:48 +01001346 cfi_.Restore(DWARFReg(reg));
Andreas Gampe57b34292015-01-14 15:45:59 -08001347 stack_offset += kFramePointerSize;
1348 }
1349 LoadFromOffset(kLoadDoubleword, RA, SP, stack_offset);
David Srbeckydd973932015-04-07 20:29:48 +01001350 cfi_.Restore(DWARFReg(RA));
Andreas Gampe57b34292015-01-14 15:45:59 -08001351
1352 // Decrease frame to required size.
1353 DecreaseFrameSize(frame_size);
1354
1355 // Then jump to the return address.
1356 Jr(RA);
David Srbeckydd973932015-04-07 20:29:48 +01001357
1358 // The CFI should be restored for any code that follows the exit block.
1359 cfi_.RestoreState();
1360 cfi_.DefCFAOffset(frame_size);
Andreas Gampe57b34292015-01-14 15:45:59 -08001361}
1362
1363void Mips64Assembler::IncreaseFrameSize(size_t adjust) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001364 CHECK_ALIGNED(adjust, kFramePointerSize);
1365 Daddiu64(SP, SP, static_cast<int32_t>(-adjust));
David Srbeckydd973932015-04-07 20:29:48 +01001366 cfi_.AdjustCFAOffset(adjust);
Andreas Gampe57b34292015-01-14 15:45:59 -08001367}
1368
1369void Mips64Assembler::DecreaseFrameSize(size_t adjust) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001370 CHECK_ALIGNED(adjust, kFramePointerSize);
1371 Daddiu64(SP, SP, static_cast<int32_t>(adjust));
David Srbeckydd973932015-04-07 20:29:48 +01001372 cfi_.AdjustCFAOffset(-adjust);
Andreas Gampe57b34292015-01-14 15:45:59 -08001373}
1374
1375void Mips64Assembler::Store(FrameOffset dest, ManagedRegister msrc, size_t size) {
1376 Mips64ManagedRegister src = msrc.AsMips64();
1377 if (src.IsNoRegister()) {
1378 CHECK_EQ(0u, size);
1379 } else if (src.IsGpuRegister()) {
1380 CHECK(size == 4 || size == 8) << size;
1381 if (size == 8) {
1382 StoreToOffset(kStoreDoubleword, src.AsGpuRegister(), SP, dest.Int32Value());
1383 } else if (size == 4) {
1384 StoreToOffset(kStoreWord, src.AsGpuRegister(), SP, dest.Int32Value());
1385 } else {
1386 UNIMPLEMENTED(FATAL) << "We only support Store() of size 4 and 8";
1387 }
1388 } else if (src.IsFpuRegister()) {
1389 CHECK(size == 4 || size == 8) << size;
1390 if (size == 8) {
1391 StoreFpuToOffset(kStoreDoubleword, src.AsFpuRegister(), SP, dest.Int32Value());
1392 } else if (size == 4) {
1393 StoreFpuToOffset(kStoreWord, src.AsFpuRegister(), SP, dest.Int32Value());
1394 } else {
1395 UNIMPLEMENTED(FATAL) << "We only support Store() of size 4 and 8";
1396 }
1397 }
1398}
1399
1400void Mips64Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
1401 Mips64ManagedRegister src = msrc.AsMips64();
1402 CHECK(src.IsGpuRegister());
1403 StoreToOffset(kStoreWord, src.AsGpuRegister(), SP, dest.Int32Value());
1404}
1405
1406void Mips64Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
1407 Mips64ManagedRegister src = msrc.AsMips64();
1408 CHECK(src.IsGpuRegister());
1409 StoreToOffset(kStoreDoubleword, src.AsGpuRegister(), SP, dest.Int32Value());
1410}
1411
1412void Mips64Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
1413 ManagedRegister mscratch) {
1414 Mips64ManagedRegister scratch = mscratch.AsMips64();
1415 CHECK(scratch.IsGpuRegister()) << scratch;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001416 LoadConst32(scratch.AsGpuRegister(), imm);
Andreas Gampe57b34292015-01-14 15:45:59 -08001417 StoreToOffset(kStoreWord, scratch.AsGpuRegister(), SP, dest.Int32Value());
1418}
1419
1420void Mips64Assembler::StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm,
1421 ManagedRegister mscratch) {
1422 Mips64ManagedRegister scratch = mscratch.AsMips64();
1423 CHECK(scratch.IsGpuRegister()) << scratch;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001424 // TODO: it's unclear wether 32 or 64 bits need to be stored (Arm64 and x86/x64 disagree?).
1425 // Is this function even referenced anywhere else in the code?
1426 LoadConst32(scratch.AsGpuRegister(), imm);
Andreas Gampe57b34292015-01-14 15:45:59 -08001427 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), S1, dest.Int32Value());
1428}
1429
1430void Mips64Assembler::StoreStackOffsetToThread64(ThreadOffset<8> thr_offs,
1431 FrameOffset fr_offs,
1432 ManagedRegister mscratch) {
1433 Mips64ManagedRegister scratch = mscratch.AsMips64();
1434 CHECK(scratch.IsGpuRegister()) << scratch;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001435 Daddiu64(scratch.AsGpuRegister(), SP, fr_offs.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08001436 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), S1, thr_offs.Int32Value());
1437}
1438
1439void Mips64Assembler::StoreStackPointerToThread64(ThreadOffset<8> thr_offs) {
1440 StoreToOffset(kStoreDoubleword, SP, S1, thr_offs.Int32Value());
1441}
1442
1443void Mips64Assembler::StoreSpanning(FrameOffset dest, ManagedRegister msrc,
1444 FrameOffset in_off, ManagedRegister mscratch) {
1445 Mips64ManagedRegister src = msrc.AsMips64();
1446 Mips64ManagedRegister scratch = mscratch.AsMips64();
1447 StoreToOffset(kStoreDoubleword, src.AsGpuRegister(), SP, dest.Int32Value());
1448 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(), SP, in_off.Int32Value());
1449 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), SP, dest.Int32Value() + 8);
1450}
1451
1452void Mips64Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
1453 return EmitLoad(mdest, SP, src.Int32Value(), size);
1454}
1455
1456void Mips64Assembler::LoadFromThread64(ManagedRegister mdest, ThreadOffset<8> src, size_t size) {
1457 return EmitLoad(mdest, S1, src.Int32Value(), size);
1458}
1459
1460void Mips64Assembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
1461 Mips64ManagedRegister dest = mdest.AsMips64();
1462 CHECK(dest.IsGpuRegister());
Douglas Leungd90957f2015-04-30 19:22:49 -07001463 LoadFromOffset(kLoadUnsignedWord, dest.AsGpuRegister(), SP, src.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08001464}
1465
Mathieu Chartiere401d142015-04-22 13:56:20 -07001466void Mips64Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
Roland Levillain4d027112015-07-01 15:41:14 +01001467 bool unpoison_reference) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001468 Mips64ManagedRegister dest = mdest.AsMips64();
Douglas Leungd90957f2015-04-30 19:22:49 -07001469 CHECK(dest.IsGpuRegister() && base.AsMips64().IsGpuRegister());
1470 LoadFromOffset(kLoadUnsignedWord, dest.AsGpuRegister(),
Andreas Gampe57b34292015-01-14 15:45:59 -08001471 base.AsMips64().AsGpuRegister(), offs.Int32Value());
Roland Levillain4d027112015-07-01 15:41:14 +01001472 if (kPoisonHeapReferences && unpoison_reference) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001473 // TODO: review
1474 // Negate the 32-bit ref
1475 Dsubu(dest.AsGpuRegister(), ZERO, dest.AsGpuRegister());
1476 // And constrain it to 32 bits (zero-extend into bits 32 through 63) as on Arm64 and x86/64
1477 Dext(dest.AsGpuRegister(), dest.AsGpuRegister(), 0, 31);
Andreas Gampe57b34292015-01-14 15:45:59 -08001478 }
1479}
1480
1481void Mips64Assembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001482 Offset offs) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001483 Mips64ManagedRegister dest = mdest.AsMips64();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001484 CHECK(dest.IsGpuRegister() && base.AsMips64().IsGpuRegister());
Andreas Gampe57b34292015-01-14 15:45:59 -08001485 LoadFromOffset(kLoadDoubleword, dest.AsGpuRegister(),
1486 base.AsMips64().AsGpuRegister(), offs.Int32Value());
1487}
1488
1489void Mips64Assembler::LoadRawPtrFromThread64(ManagedRegister mdest,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001490 ThreadOffset<8> offs) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001491 Mips64ManagedRegister dest = mdest.AsMips64();
1492 CHECK(dest.IsGpuRegister());
1493 LoadFromOffset(kLoadDoubleword, dest.AsGpuRegister(), S1, offs.Int32Value());
1494}
1495
1496void Mips64Assembler::SignExtend(ManagedRegister /*mreg*/, size_t /*size*/) {
1497 UNIMPLEMENTED(FATAL) << "no sign extension necessary for mips";
1498}
1499
1500void Mips64Assembler::ZeroExtend(ManagedRegister /*mreg*/, size_t /*size*/) {
1501 UNIMPLEMENTED(FATAL) << "no zero extension necessary for mips";
1502}
1503
1504void Mips64Assembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
1505 Mips64ManagedRegister dest = mdest.AsMips64();
1506 Mips64ManagedRegister src = msrc.AsMips64();
1507 if (!dest.Equals(src)) {
1508 if (dest.IsGpuRegister()) {
1509 CHECK(src.IsGpuRegister()) << src;
1510 Move(dest.AsGpuRegister(), src.AsGpuRegister());
1511 } else if (dest.IsFpuRegister()) {
1512 CHECK(src.IsFpuRegister()) << src;
1513 if (size == 4) {
1514 MovS(dest.AsFpuRegister(), src.AsFpuRegister());
1515 } else if (size == 8) {
1516 MovD(dest.AsFpuRegister(), src.AsFpuRegister());
1517 } else {
1518 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
1519 }
1520 }
1521 }
1522}
1523
1524void Mips64Assembler::CopyRef(FrameOffset dest, FrameOffset src,
1525 ManagedRegister mscratch) {
1526 Mips64ManagedRegister scratch = mscratch.AsMips64();
1527 CHECK(scratch.IsGpuRegister()) << scratch;
1528 LoadFromOffset(kLoadWord, scratch.AsGpuRegister(), SP, src.Int32Value());
1529 StoreToOffset(kStoreWord, scratch.AsGpuRegister(), SP, dest.Int32Value());
1530}
1531
1532void Mips64Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs,
1533 ThreadOffset<8> thr_offs,
1534 ManagedRegister mscratch) {
1535 Mips64ManagedRegister scratch = mscratch.AsMips64();
1536 CHECK(scratch.IsGpuRegister()) << scratch;
1537 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(), S1, thr_offs.Int32Value());
1538 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), SP, fr_offs.Int32Value());
1539}
1540
1541void Mips64Assembler::CopyRawPtrToThread64(ThreadOffset<8> thr_offs,
1542 FrameOffset fr_offs,
1543 ManagedRegister mscratch) {
1544 Mips64ManagedRegister scratch = mscratch.AsMips64();
1545 CHECK(scratch.IsGpuRegister()) << scratch;
1546 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(),
1547 SP, fr_offs.Int32Value());
1548 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(),
1549 S1, thr_offs.Int32Value());
1550}
1551
1552void Mips64Assembler::Copy(FrameOffset dest, FrameOffset src,
1553 ManagedRegister mscratch, size_t size) {
1554 Mips64ManagedRegister scratch = mscratch.AsMips64();
1555 CHECK(scratch.IsGpuRegister()) << scratch;
1556 CHECK(size == 4 || size == 8) << size;
1557 if (size == 4) {
1558 LoadFromOffset(kLoadWord, scratch.AsGpuRegister(), SP, src.Int32Value());
Lazar Trsicf652d602015-06-24 16:30:21 +02001559 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), SP, dest.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08001560 } else if (size == 8) {
1561 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(), SP, src.Int32Value());
1562 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), SP, dest.Int32Value());
1563 } else {
1564 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
1565 }
1566}
1567
1568void Mips64Assembler::Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001569 ManagedRegister mscratch, size_t size) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001570 GpuRegister scratch = mscratch.AsMips64().AsGpuRegister();
1571 CHECK(size == 4 || size == 8) << size;
1572 if (size == 4) {
1573 LoadFromOffset(kLoadWord, scratch, src_base.AsMips64().AsGpuRegister(),
1574 src_offset.Int32Value());
Lazar Trsicf652d602015-06-24 16:30:21 +02001575 StoreToOffset(kStoreDoubleword, scratch, SP, dest.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08001576 } else if (size == 8) {
1577 LoadFromOffset(kLoadDoubleword, scratch, src_base.AsMips64().AsGpuRegister(),
1578 src_offset.Int32Value());
1579 StoreToOffset(kStoreDoubleword, scratch, SP, dest.Int32Value());
1580 } else {
1581 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
1582 }
1583}
1584
1585void Mips64Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001586 ManagedRegister mscratch, size_t size) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001587 GpuRegister scratch = mscratch.AsMips64().AsGpuRegister();
1588 CHECK(size == 4 || size == 8) << size;
1589 if (size == 4) {
1590 LoadFromOffset(kLoadWord, scratch, SP, src.Int32Value());
Lazar Trsicf652d602015-06-24 16:30:21 +02001591 StoreToOffset(kStoreDoubleword, scratch, dest_base.AsMips64().AsGpuRegister(),
Andreas Gampe57b34292015-01-14 15:45:59 -08001592 dest_offset.Int32Value());
1593 } else if (size == 8) {
1594 LoadFromOffset(kLoadDoubleword, scratch, SP, src.Int32Value());
1595 StoreToOffset(kStoreDoubleword, scratch, dest_base.AsMips64().AsGpuRegister(),
1596 dest_offset.Int32Value());
1597 } else {
1598 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
1599 }
1600}
1601
1602void Mips64Assembler::Copy(FrameOffset /*dest*/, FrameOffset /*src_base*/, Offset /*src_offset*/,
1603 ManagedRegister /*mscratch*/, size_t /*size*/) {
1604 UNIMPLEMENTED(FATAL) << "no mips64 implementation";
1605}
1606
1607void Mips64Assembler::Copy(ManagedRegister dest, Offset dest_offset,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001608 ManagedRegister src, Offset src_offset,
1609 ManagedRegister mscratch, size_t size) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001610 GpuRegister scratch = mscratch.AsMips64().AsGpuRegister();
1611 CHECK(size == 4 || size == 8) << size;
1612 if (size == 4) {
1613 LoadFromOffset(kLoadWord, scratch, src.AsMips64().AsGpuRegister(), src_offset.Int32Value());
Lazar Trsicf652d602015-06-24 16:30:21 +02001614 StoreToOffset(kStoreDoubleword, scratch, dest.AsMips64().AsGpuRegister(), dest_offset.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08001615 } else if (size == 8) {
1616 LoadFromOffset(kLoadDoubleword, scratch, src.AsMips64().AsGpuRegister(),
1617 src_offset.Int32Value());
1618 StoreToOffset(kStoreDoubleword, scratch, dest.AsMips64().AsGpuRegister(),
1619 dest_offset.Int32Value());
1620 } else {
1621 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
1622 }
1623}
1624
1625void Mips64Assembler::Copy(FrameOffset /*dest*/, Offset /*dest_offset*/, FrameOffset /*src*/, Offset
1626/*src_offset*/,
1627 ManagedRegister /*mscratch*/, size_t /*size*/) {
1628 UNIMPLEMENTED(FATAL) << "no mips64 implementation";
1629}
1630
1631void Mips64Assembler::MemoryBarrier(ManagedRegister) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001632 // TODO: sync?
Andreas Gampe57b34292015-01-14 15:45:59 -08001633 UNIMPLEMENTED(FATAL) << "no mips64 implementation";
1634}
1635
1636void Mips64Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001637 FrameOffset handle_scope_offset,
1638 ManagedRegister min_reg,
1639 bool null_allowed) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001640 Mips64ManagedRegister out_reg = mout_reg.AsMips64();
1641 Mips64ManagedRegister in_reg = min_reg.AsMips64();
1642 CHECK(in_reg.IsNoRegister() || in_reg.IsGpuRegister()) << in_reg;
1643 CHECK(out_reg.IsGpuRegister()) << out_reg;
1644 if (null_allowed) {
1645 Label null_arg;
1646 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
1647 // the address in the handle scope holding the reference.
1648 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
1649 if (in_reg.IsNoRegister()) {
Douglas Leungd90957f2015-04-30 19:22:49 -07001650 LoadFromOffset(kLoadUnsignedWord, out_reg.AsGpuRegister(),
Andreas Gampe57b34292015-01-14 15:45:59 -08001651 SP, handle_scope_offset.Int32Value());
1652 in_reg = out_reg;
1653 }
1654 if (!out_reg.Equals(in_reg)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001655 LoadConst32(out_reg.AsGpuRegister(), 0);
Andreas Gampe57b34292015-01-14 15:45:59 -08001656 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001657 Beqzc(in_reg.AsGpuRegister(), &null_arg);
1658 Daddiu64(out_reg.AsGpuRegister(), SP, handle_scope_offset.Int32Value());
1659 Bind(&null_arg);
Andreas Gampe57b34292015-01-14 15:45:59 -08001660 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001661 Daddiu64(out_reg.AsGpuRegister(), SP, handle_scope_offset.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08001662 }
1663}
1664
1665void Mips64Assembler::CreateHandleScopeEntry(FrameOffset out_off,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001666 FrameOffset handle_scope_offset,
1667 ManagedRegister mscratch,
1668 bool null_allowed) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001669 Mips64ManagedRegister scratch = mscratch.AsMips64();
1670 CHECK(scratch.IsGpuRegister()) << scratch;
1671 if (null_allowed) {
1672 Label null_arg;
Douglas Leungd90957f2015-04-30 19:22:49 -07001673 LoadFromOffset(kLoadUnsignedWord, scratch.AsGpuRegister(), SP,
Andreas Gampe57b34292015-01-14 15:45:59 -08001674 handle_scope_offset.Int32Value());
1675 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
1676 // the address in the handle scope holding the reference.
1677 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
Alexey Frunze4dda3372015-06-01 18:31:49 -07001678 Beqzc(scratch.AsGpuRegister(), &null_arg);
1679 Daddiu64(scratch.AsGpuRegister(), SP, handle_scope_offset.Int32Value());
1680 Bind(&null_arg);
Andreas Gampe57b34292015-01-14 15:45:59 -08001681 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001682 Daddiu64(scratch.AsGpuRegister(), SP, handle_scope_offset.Int32Value());
Andreas Gampe57b34292015-01-14 15:45:59 -08001683 }
1684 StoreToOffset(kStoreDoubleword, scratch.AsGpuRegister(), SP, out_off.Int32Value());
1685}
1686
1687// Given a handle scope entry, load the associated reference.
1688void Mips64Assembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001689 ManagedRegister min_reg) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001690 Mips64ManagedRegister out_reg = mout_reg.AsMips64();
1691 Mips64ManagedRegister in_reg = min_reg.AsMips64();
1692 CHECK(out_reg.IsGpuRegister()) << out_reg;
1693 CHECK(in_reg.IsGpuRegister()) << in_reg;
1694 Label null_arg;
1695 if (!out_reg.Equals(in_reg)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001696 LoadConst32(out_reg.AsGpuRegister(), 0);
Andreas Gampe57b34292015-01-14 15:45:59 -08001697 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001698 Beqzc(in_reg.AsGpuRegister(), &null_arg);
Andreas Gampe57b34292015-01-14 15:45:59 -08001699 LoadFromOffset(kLoadDoubleword, out_reg.AsGpuRegister(),
1700 in_reg.AsGpuRegister(), 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001701 Bind(&null_arg);
Andreas Gampe57b34292015-01-14 15:45:59 -08001702}
1703
1704void Mips64Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
1705 // TODO: not validating references
1706}
1707
1708void Mips64Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
1709 // TODO: not validating references
1710}
1711
1712void Mips64Assembler::Call(ManagedRegister mbase, Offset offset, ManagedRegister mscratch) {
1713 Mips64ManagedRegister base = mbase.AsMips64();
1714 Mips64ManagedRegister scratch = mscratch.AsMips64();
1715 CHECK(base.IsGpuRegister()) << base;
1716 CHECK(scratch.IsGpuRegister()) << scratch;
1717 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(),
1718 base.AsGpuRegister(), offset.Int32Value());
1719 Jalr(scratch.AsGpuRegister());
1720 // TODO: place reference map on call
1721}
1722
1723void Mips64Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
1724 Mips64ManagedRegister scratch = mscratch.AsMips64();
1725 CHECK(scratch.IsGpuRegister()) << scratch;
1726 // Call *(*(SP + base) + offset)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001727 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(),
Andreas Gampe57b34292015-01-14 15:45:59 -08001728 SP, base.Int32Value());
1729 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(),
1730 scratch.AsGpuRegister(), offset.Int32Value());
1731 Jalr(scratch.AsGpuRegister());
1732 // TODO: place reference map on call
1733}
1734
1735void Mips64Assembler::CallFromThread64(ThreadOffset<8> /*offset*/, ManagedRegister /*mscratch*/) {
1736 UNIMPLEMENTED(FATAL) << "no mips64 implementation";
1737}
1738
1739void Mips64Assembler::GetCurrentThread(ManagedRegister tr) {
1740 Move(tr.AsMips64().AsGpuRegister(), S1);
1741}
1742
1743void Mips64Assembler::GetCurrentThread(FrameOffset offset,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001744 ManagedRegister /*mscratch*/) {
Andreas Gampe57b34292015-01-14 15:45:59 -08001745 StoreToOffset(kStoreDoubleword, S1, SP, offset.Int32Value());
1746}
1747
1748void Mips64Assembler::ExceptionPoll(ManagedRegister mscratch, size_t stack_adjust) {
1749 Mips64ManagedRegister scratch = mscratch.AsMips64();
1750 Mips64ExceptionSlowPath* slow = new Mips64ExceptionSlowPath(scratch, stack_adjust);
1751 buffer_.EnqueueSlowPath(slow);
1752 LoadFromOffset(kLoadDoubleword, scratch.AsGpuRegister(),
1753 S1, Thread::ExceptionOffset<8>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001754 Bnezc(scratch.AsGpuRegister(), slow->Entry());
Andreas Gampe57b34292015-01-14 15:45:59 -08001755}
1756
1757void Mips64ExceptionSlowPath::Emit(Assembler* sasm) {
1758 Mips64Assembler* sp_asm = down_cast<Mips64Assembler*>(sasm);
1759#define __ sp_asm->
Alexey Frunze4dda3372015-06-01 18:31:49 -07001760 __ Bind(&entry_);
Andreas Gampe57b34292015-01-14 15:45:59 -08001761 if (stack_adjust_ != 0) { // Fix up the frame.
1762 __ DecreaseFrameSize(stack_adjust_);
1763 }
1764 // Pass exception object as argument
1765 // Don't care about preserving A0 as this call won't return
1766 __ Move(A0, scratch_.AsGpuRegister());
1767 // Set up call to Thread::Current()->pDeliverException
1768 __ LoadFromOffset(kLoadDoubleword, T9, S1,
Goran Jakovljevic75c40d42015-04-03 15:45:21 +02001769 QUICK_ENTRYPOINT_OFFSET(8, pDeliverException).Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001770 // TODO: check T9 usage
Andreas Gampe57b34292015-01-14 15:45:59 -08001771 __ Jr(T9);
1772 // Call never returns
1773 __ Break();
1774#undef __
1775}
1776
1777} // namespace mips64
1778} // namespace art