blob: a7dbacd3a9d2ec6b3e94274367e8dac12f003ca1 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_arm32.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +000028bool Arm32Assembler::ShifterOperandCanHoldArm32(uint32_t immediate, ShifterOperand* shifter_op) {
29 // Avoid the more expensive test for frequent small immediate values.
30 if (immediate < (1 << kImmed8Bits)) {
31 shifter_op->type_ = ShifterOperand::kImmediate;
32 shifter_op->is_rotate_ = true;
33 shifter_op->rotate_ = 0;
34 shifter_op->immed_ = immediate;
35 return true;
36 }
37 // Note that immediate must be unsigned for the test to work correctly.
38 for (int rot = 0; rot < 16; rot++) {
39 uint32_t imm8 = (immediate << 2*rot) | (immediate >> (32 - 2*rot));
40 if (imm8 < (1 << kImmed8Bits)) {
41 shifter_op->type_ = ShifterOperand::kImmediate;
42 shifter_op->is_rotate_ = true;
43 shifter_op->rotate_ = rot;
44 shifter_op->immed_ = imm8;
45 return true;
46 }
47 }
48 return false;
49}
50
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +010051bool Arm32Assembler::ShifterOperandCanAlwaysHold(uint32_t immediate) {
52 ShifterOperand shifter_op;
53 return ShifterOperandCanHoldArm32(immediate, &shifter_op);
54}
55
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +000056bool Arm32Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
57 Register rn ATTRIBUTE_UNUSED,
58 Opcode opcode ATTRIBUTE_UNUSED,
59 uint32_t immediate,
60 ShifterOperand* shifter_op) {
61 return ShifterOperandCanHoldArm32(immediate, shifter_op);
62}
63
Dave Allison65fcc2c2014-04-28 13:45:27 -070064void Arm32Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010065 Condition cond, SetCc set_cc) {
66 EmitType01(cond, so.type(), AND, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070067}
68
69
70void Arm32Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010071 Condition cond, SetCc set_cc) {
72 EmitType01(cond, so.type(), EOR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070073}
74
75
76void Arm32Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010077 Condition cond, SetCc set_cc) {
78 EmitType01(cond, so.type(), SUB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070079}
80
81void Arm32Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010082 Condition cond, SetCc set_cc) {
83 EmitType01(cond, so.type(), RSB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070084}
85
Dave Allison65fcc2c2014-04-28 13:45:27 -070086void Arm32Assembler::add(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010087 Condition cond, SetCc set_cc) {
88 EmitType01(cond, so.type(), ADD, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070089}
90
91
92void Arm32Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010093 Condition cond, SetCc set_cc) {
94 EmitType01(cond, so.type(), ADC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -070095}
96
97
98void Arm32Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +010099 Condition cond, SetCc set_cc) {
100 EmitType01(cond, so.type(), SBC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700101}
102
103
104void Arm32Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100105 Condition cond, SetCc set_cc) {
106 EmitType01(cond, so.type(), RSC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700107}
108
109
110void Arm32Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
111 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100112 EmitType01(cond, so.type(), TST, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700113}
114
115
116void Arm32Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
117 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100118 EmitType01(cond, so.type(), TEQ, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700119}
120
121
122void Arm32Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100123 EmitType01(cond, so.type(), CMP, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700124}
125
126
127void Arm32Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100128 EmitType01(cond, so.type(), CMN, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700129}
130
131
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100132void Arm32Assembler::orr(Register rd, Register rn, const ShifterOperand& so,
133 Condition cond, SetCc set_cc) {
134 EmitType01(cond, so.type(), ORR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700135}
136
137
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100138void Arm32Assembler::orn(Register rd ATTRIBUTE_UNUSED,
139 Register rn ATTRIBUTE_UNUSED,
140 const ShifterOperand& so ATTRIBUTE_UNUSED,
141 Condition cond ATTRIBUTE_UNUSED,
142 SetCc set_cc ATTRIBUTE_UNUSED) {
143 LOG(FATAL) << "orn is not supported on ARM32";
144}
145
146
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100147void Arm32Assembler::mov(Register rd, const ShifterOperand& so,
148 Condition cond, SetCc set_cc) {
149 EmitType01(cond, so.type(), MOV, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700150}
151
152
153void Arm32Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100154 Condition cond, SetCc set_cc) {
155 EmitType01(cond, so.type(), BIC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700156}
157
158
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100159void Arm32Assembler::mvn(Register rd, const ShifterOperand& so,
160 Condition cond, SetCc set_cc) {
161 EmitType01(cond, so.type(), MVN, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700162}
163
164
165void Arm32Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
166 // Assembler registers rd, rn, rm are encoded as rn, rm, rs.
167 EmitMulOp(cond, 0, R0, rd, rn, rm);
168}
169
170
171void Arm32Assembler::mla(Register rd, Register rn, Register rm, Register ra,
172 Condition cond) {
173 // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
174 EmitMulOp(cond, B21, ra, rd, rn, rm);
175}
176
177
178void Arm32Assembler::mls(Register rd, Register rn, Register rm, Register ra,
179 Condition cond) {
180 // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
181 EmitMulOp(cond, B22 | B21, ra, rd, rn, rm);
182}
183
184
Zheng Xuc6667102015-05-15 16:08:45 +0800185void Arm32Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
186 Register rm, Condition cond) {
187 // Assembler registers rd_lo, rd_hi, rn, rm are encoded as rd, rn, rm, rs.
188 EmitMulOp(cond, B23 | B22, rd_lo, rd_hi, rn, rm);
189}
190
191
Dave Allison65fcc2c2014-04-28 13:45:27 -0700192void Arm32Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
193 Register rm, Condition cond) {
194 // Assembler registers rd_lo, rd_hi, rn, rm are encoded as rd, rn, rm, rs.
195 EmitMulOp(cond, B23, rd_lo, rd_hi, rn, rm);
196}
197
198
199void Arm32Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
200 CHECK_NE(rd, kNoRegister);
201 CHECK_NE(rn, kNoRegister);
202 CHECK_NE(rm, kNoRegister);
203 CHECK_NE(cond, kNoCondition);
204 int32_t encoding = B26 | B25 | B24 | B20 |
205 B15 | B14 | B13 | B12 |
206 (static_cast<int32_t>(cond) << kConditionShift) |
207 (static_cast<int32_t>(rn) << 0) |
208 (static_cast<int32_t>(rd) << 16) |
209 (static_cast<int32_t>(rm) << 8) |
210 B4;
211 Emit(encoding);
212}
213
214
215void Arm32Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
216 CHECK_NE(rd, kNoRegister);
217 CHECK_NE(rn, kNoRegister);
218 CHECK_NE(rm, kNoRegister);
219 CHECK_NE(cond, kNoCondition);
220 int32_t encoding = B26 | B25 | B24 | B21 | B20 |
221 B15 | B14 | B13 | B12 |
222 (static_cast<int32_t>(cond) << kConditionShift) |
223 (static_cast<int32_t>(rn) << 0) |
224 (static_cast<int32_t>(rd) << 16) |
225 (static_cast<int32_t>(rm) << 8) |
226 B4;
227 Emit(encoding);
228}
229
230
Roland Levillain51d3fc42014-11-13 14:11:42 +0000231void Arm32Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
232 CHECK_NE(rd, kNoRegister);
233 CHECK_NE(rn, kNoRegister);
234 CHECK_NE(cond, kNoCondition);
235 CHECK_LE(lsb, 31U);
236 CHECK(1U <= width && width <= 32U) << width;
237 uint32_t widthminus1 = width - 1;
238
239 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
240 B26 | B25 | B24 | B23 | B21 |
241 (widthminus1 << 16) |
242 (static_cast<uint32_t>(rd) << 12) |
243 (lsb << 7) |
244 B6 | B4 |
245 static_cast<uint32_t>(rn);
246 Emit(encoding);
247}
248
249
Roland Levillain981e4542014-11-14 11:47:14 +0000250void Arm32Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
251 CHECK_NE(rd, kNoRegister);
252 CHECK_NE(rn, kNoRegister);
253 CHECK_NE(cond, kNoCondition);
254 CHECK_LE(lsb, 31U);
255 CHECK(1U <= width && width <= 32U) << width;
256 uint32_t widthminus1 = width - 1;
257
258 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
259 B26 | B25 | B24 | B23 | B22 | B21 |
260 (widthminus1 << 16) |
261 (static_cast<uint32_t>(rd) << 12) |
262 (lsb << 7) |
263 B6 | B4 |
264 static_cast<uint32_t>(rn);
265 Emit(encoding);
266}
267
268
Dave Allison65fcc2c2014-04-28 13:45:27 -0700269void Arm32Assembler::ldr(Register rd, const Address& ad, Condition cond) {
270 EmitMemOp(cond, true, false, rd, ad);
271}
272
273
274void Arm32Assembler::str(Register rd, const Address& ad, Condition cond) {
275 EmitMemOp(cond, false, false, rd, ad);
276}
277
278
279void Arm32Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
280 EmitMemOp(cond, true, true, rd, ad);
281}
282
283
284void Arm32Assembler::strb(Register rd, const Address& ad, Condition cond) {
285 EmitMemOp(cond, false, true, rd, ad);
286}
287
288
289void Arm32Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
290 EmitMemOpAddressMode3(cond, L | B7 | H | B4, rd, ad);
291}
292
293
294void Arm32Assembler::strh(Register rd, const Address& ad, Condition cond) {
295 EmitMemOpAddressMode3(cond, B7 | H | B4, rd, ad);
296}
297
298
299void Arm32Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
300 EmitMemOpAddressMode3(cond, L | B7 | B6 | B4, rd, ad);
301}
302
303
304void Arm32Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
305 EmitMemOpAddressMode3(cond, L | B7 | B6 | H | B4, rd, ad);
306}
307
308
309void Arm32Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
310 CHECK_EQ(rd % 2, 0);
311 EmitMemOpAddressMode3(cond, B7 | B6 | B4, rd, ad);
312}
313
314
315void Arm32Assembler::strd(Register rd, const Address& ad, Condition cond) {
316 CHECK_EQ(rd % 2, 0);
317 EmitMemOpAddressMode3(cond, B7 | B6 | B5 | B4, rd, ad);
318}
319
320
321void Arm32Assembler::ldm(BlockAddressMode am,
322 Register base,
323 RegList regs,
324 Condition cond) {
325 EmitMultiMemOp(cond, am, true, base, regs);
326}
327
328
329void Arm32Assembler::stm(BlockAddressMode am,
330 Register base,
331 RegList regs,
332 Condition cond) {
333 EmitMultiMemOp(cond, am, false, base, regs);
334}
335
336
337void Arm32Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
338 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
339}
340
341
342void Arm32Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
343 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
344}
345
346
347bool Arm32Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
348 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
349 if (((imm32 & ((1 << 19) - 1)) == 0) &&
350 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
351 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
352 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
353 ((imm32 >> 19) & ((1 << 6) -1));
354 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
355 sd, S0, S0);
356 return true;
357 }
358 return false;
359}
360
361
362bool Arm32Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
363 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
364 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
365 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
366 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
367 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
368 ((imm64 >> 48) & ((1 << 6) -1));
369 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
370 dd, D0, D0);
371 return true;
372 }
373 return false;
374}
375
376
377void Arm32Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
378 Condition cond) {
379 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
380}
381
382
383void Arm32Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
384 Condition cond) {
385 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
386}
387
388
389void Arm32Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
390 Condition cond) {
391 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
392}
393
394
395void Arm32Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
396 Condition cond) {
397 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
398}
399
400
401void Arm32Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
402 Condition cond) {
403 EmitVFPsss(cond, B21, sd, sn, sm);
404}
405
406
407void Arm32Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
408 Condition cond) {
409 EmitVFPddd(cond, B21, dd, dn, dm);
410}
411
412
413void Arm32Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
414 Condition cond) {
415 EmitVFPsss(cond, 0, sd, sn, sm);
416}
417
418
419void Arm32Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
420 Condition cond) {
421 EmitVFPddd(cond, 0, dd, dn, dm);
422}
423
424
425void Arm32Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
426 Condition cond) {
427 EmitVFPsss(cond, B6, sd, sn, sm);
428}
429
430
431void Arm32Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
432 Condition cond) {
433 EmitVFPddd(cond, B6, dd, dn, dm);
434}
435
436
437void Arm32Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
438 Condition cond) {
439 EmitVFPsss(cond, B23, sd, sn, sm);
440}
441
442
443void Arm32Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
444 Condition cond) {
445 EmitVFPddd(cond, B23, dd, dn, dm);
446}
447
448
449void Arm32Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
450 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
451}
452
453
454void Arm32Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
455 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
456}
457
458
459void Arm32Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
460 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
461}
462
463
464void Arm32Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
465 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
466}
467
468
469void Arm32Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
470 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
471}
472
473void Arm32Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
474 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
475}
476
477
478void Arm32Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
479 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
480}
481
482
483void Arm32Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
484 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
485}
486
487
488void Arm32Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
489 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
490}
491
492
493void Arm32Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
494 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
495}
496
497
498void Arm32Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
499 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
500}
501
502
503void Arm32Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
504 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
505}
506
507
508void Arm32Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
509 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
510}
511
512
513void Arm32Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
514 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
515}
516
517
518void Arm32Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
519 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
520}
521
522
523void Arm32Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
524 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
525}
526
527
528void Arm32Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
529 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
530}
531
532
533void Arm32Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
534 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
535}
536
537
538void Arm32Assembler::vcmpsz(SRegister sd, Condition cond) {
539 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
540}
541
542
543void Arm32Assembler::vcmpdz(DRegister dd, Condition cond) {
544 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
545}
546
547void Arm32Assembler::b(Label* label, Condition cond) {
548 EmitBranch(cond, label, false);
549}
550
551
552void Arm32Assembler::bl(Label* label, Condition cond) {
553 EmitBranch(cond, label, true);
554}
555
556
557void Arm32Assembler::MarkExceptionHandler(Label* label) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100558 EmitType01(AL, 1, TST, kCcSet, PC, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700559 Label l;
560 b(&l);
561 EmitBranch(AL, label, false);
562 Bind(&l);
563}
564
565
566void Arm32Assembler::Emit(int32_t value) {
567 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
568 buffer_.Emit<int32_t>(value);
569}
570
571
572void Arm32Assembler::EmitType01(Condition cond,
573 int type,
574 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100575 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -0700576 Register rn,
577 Register rd,
578 const ShifterOperand& so) {
579 CHECK_NE(rd, kNoRegister);
580 CHECK_NE(cond, kNoCondition);
581 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
582 type << kTypeShift |
583 static_cast<int32_t>(opcode) << kOpcodeShift |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100584 (set_cc == kCcSet ? 1 : 0) << kSShift |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700585 static_cast<int32_t>(rn) << kRnShift |
586 static_cast<int32_t>(rd) << kRdShift |
587 so.encodingArm();
588 Emit(encoding);
589}
590
591
592void Arm32Assembler::EmitType5(Condition cond, int offset, bool link) {
593 CHECK_NE(cond, kNoCondition);
594 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
595 5 << kTypeShift |
596 (link ? 1 : 0) << kLinkShift;
597 Emit(Arm32Assembler::EncodeBranchOffset(offset, encoding));
598}
599
600
601void Arm32Assembler::EmitMemOp(Condition cond,
Dave Allison45fdb932014-06-25 12:37:10 -0700602 bool load,
603 bool byte,
604 Register rd,
605 const Address& ad) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700606 CHECK_NE(rd, kNoRegister);
607 CHECK_NE(cond, kNoCondition);
608 const Address& addr = static_cast<const Address&>(ad);
609
Dave Allison45fdb932014-06-25 12:37:10 -0700610 int32_t encoding = 0;
611 if (!ad.IsImmediate() && ad.GetRegisterOffset() == PC) {
612 // PC relative LDR(literal)
613 int32_t offset = ad.GetOffset();
614 int32_t u = B23;
615 if (offset < 0) {
616 offset = -offset;
617 u = 0;
618 }
619 CHECK_LT(offset, (1 << 12));
620 encoding = (static_cast<int32_t>(cond) << kConditionShift) |
621 B26 | B24 | u | B20 |
622 (load ? L : 0) |
623 (byte ? B : 0) |
624 (static_cast<int32_t>(rd) << kRdShift) |
625 0xf << 16 |
626 (offset & 0xfff);
627
628 } else {
629 encoding = (static_cast<int32_t>(cond) << kConditionShift) |
630 B26 |
631 (load ? L : 0) |
632 (byte ? B : 0) |
633 (static_cast<int32_t>(rd) << kRdShift) |
634 addr.encodingArm();
635 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700636 Emit(encoding);
637}
638
639
640void Arm32Assembler::EmitMemOpAddressMode3(Condition cond,
641 int32_t mode,
642 Register rd,
643 const Address& ad) {
644 CHECK_NE(rd, kNoRegister);
645 CHECK_NE(cond, kNoCondition);
646 const Address& addr = static_cast<const Address&>(ad);
647 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
648 B22 |
649 mode |
650 (static_cast<int32_t>(rd) << kRdShift) |
651 addr.encoding3();
652 Emit(encoding);
653}
654
655
656void Arm32Assembler::EmitMultiMemOp(Condition cond,
657 BlockAddressMode am,
658 bool load,
659 Register base,
660 RegList regs) {
661 CHECK_NE(base, kNoRegister);
662 CHECK_NE(cond, kNoCondition);
663 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
664 B27 |
665 am |
666 (load ? L : 0) |
667 (static_cast<int32_t>(base) << kRnShift) |
668 regs;
669 Emit(encoding);
670}
671
672
673void Arm32Assembler::EmitShiftImmediate(Condition cond,
674 Shift opcode,
675 Register rd,
676 Register rm,
677 const ShifterOperand& so) {
678 CHECK_NE(cond, kNoCondition);
679 CHECK(so.IsImmediate());
680 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
681 static_cast<int32_t>(MOV) << kOpcodeShift |
682 static_cast<int32_t>(rd) << kRdShift |
683 so.encodingArm() << kShiftImmShift |
684 static_cast<int32_t>(opcode) << kShiftShift |
685 static_cast<int32_t>(rm);
686 Emit(encoding);
687}
688
689
690void Arm32Assembler::EmitShiftRegister(Condition cond,
691 Shift opcode,
692 Register rd,
693 Register rm,
694 const ShifterOperand& so) {
695 CHECK_NE(cond, kNoCondition);
696 CHECK(so.IsRegister());
697 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
698 static_cast<int32_t>(MOV) << kOpcodeShift |
699 static_cast<int32_t>(rd) << kRdShift |
700 so.encodingArm() << kShiftRegisterShift |
701 static_cast<int32_t>(opcode) << kShiftShift |
702 B4 |
703 static_cast<int32_t>(rm);
704 Emit(encoding);
705}
706
707
708void Arm32Assembler::EmitBranch(Condition cond, Label* label, bool link) {
709 if (label->IsBound()) {
710 EmitType5(cond, label->Position() - buffer_.Size(), link);
711 } else {
712 int position = buffer_.Size();
713 // Use the offset field of the branch instruction for linking the sites.
714 EmitType5(cond, label->position_, link);
715 label->LinkTo(position);
716 }
717}
718
719
720void Arm32Assembler::clz(Register rd, Register rm, Condition cond) {
721 CHECK_NE(rd, kNoRegister);
722 CHECK_NE(rm, kNoRegister);
723 CHECK_NE(cond, kNoCondition);
724 CHECK_NE(rd, PC);
725 CHECK_NE(rm, PC);
726 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
727 B24 | B22 | B21 | (0xf << 16) |
728 (static_cast<int32_t>(rd) << kRdShift) |
729 (0xf << 8) | B4 | static_cast<int32_t>(rm);
730 Emit(encoding);
731}
732
733
734void Arm32Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
735 CHECK_NE(cond, kNoCondition);
736 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
737 B25 | B24 | ((imm16 >> 12) << 16) |
738 static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
739 Emit(encoding);
740}
741
742
743void Arm32Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
744 CHECK_NE(cond, kNoCondition);
745 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
746 B25 | B24 | B22 | ((imm16 >> 12) << 16) |
747 static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
748 Emit(encoding);
749}
750
751
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100752void Arm32Assembler::rbit(Register rd, Register rm, Condition cond) {
753 CHECK_NE(rd, kNoRegister);
754 CHECK_NE(rm, kNoRegister);
755 CHECK_NE(cond, kNoCondition);
756 CHECK_NE(rd, PC);
757 CHECK_NE(rm, PC);
758 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
759 B26 | B25 | B23 | B22 | B21 | B20 | (0xf << 16) |
760 (static_cast<int32_t>(rd) << kRdShift) |
761 (0xf << 8) | B5 | B4 | static_cast<int32_t>(rm);
762 Emit(encoding);
763}
764
765
Dave Allison65fcc2c2014-04-28 13:45:27 -0700766void Arm32Assembler::EmitMulOp(Condition cond, int32_t opcode,
767 Register rd, Register rn,
768 Register rm, Register rs) {
769 CHECK_NE(rd, kNoRegister);
770 CHECK_NE(rn, kNoRegister);
771 CHECK_NE(rm, kNoRegister);
772 CHECK_NE(rs, kNoRegister);
773 CHECK_NE(cond, kNoCondition);
774 int32_t encoding = opcode |
775 (static_cast<int32_t>(cond) << kConditionShift) |
776 (static_cast<int32_t>(rn) << kRnShift) |
777 (static_cast<int32_t>(rd) << kRdShift) |
778 (static_cast<int32_t>(rs) << kRsShift) |
779 B7 | B4 |
780 (static_cast<int32_t>(rm) << kRmShift);
781 Emit(encoding);
782}
783
Calin Juravle52c48962014-12-16 17:02:57 +0000784
Dave Allison65fcc2c2014-04-28 13:45:27 -0700785void Arm32Assembler::ldrex(Register rt, Register rn, Condition cond) {
786 CHECK_NE(rn, kNoRegister);
787 CHECK_NE(rt, kNoRegister);
788 CHECK_NE(cond, kNoCondition);
789 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
790 B24 |
791 B23 |
792 L |
793 (static_cast<int32_t>(rn) << kLdExRnShift) |
794 (static_cast<int32_t>(rt) << kLdExRtShift) |
795 B11 | B10 | B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0;
796 Emit(encoding);
797}
798
799
Calin Juravle52c48962014-12-16 17:02:57 +0000800void Arm32Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
801 CHECK_NE(rn, kNoRegister);
802 CHECK_NE(rt, kNoRegister);
803 CHECK_NE(rt2, kNoRegister);
804 CHECK_NE(rt, R14);
805 CHECK_EQ(0u, static_cast<uint32_t>(rt) % 2);
806 CHECK_EQ(static_cast<uint32_t>(rt) + 1, static_cast<uint32_t>(rt2));
807 CHECK_NE(cond, kNoCondition);
808
809 int32_t encoding =
810 (static_cast<uint32_t>(cond) << kConditionShift) |
811 B24 | B23 | B21 | B20 |
812 static_cast<uint32_t>(rn) << 16 |
813 static_cast<uint32_t>(rt) << 12 |
814 B11 | B10 | B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0;
815 Emit(encoding);
816}
817
818
Dave Allison65fcc2c2014-04-28 13:45:27 -0700819void Arm32Assembler::strex(Register rd,
820 Register rt,
821 Register rn,
822 Condition cond) {
823 CHECK_NE(rn, kNoRegister);
824 CHECK_NE(rd, kNoRegister);
825 CHECK_NE(rt, kNoRegister);
826 CHECK_NE(cond, kNoCondition);
827 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
828 B24 |
829 B23 |
830 (static_cast<int32_t>(rn) << kStrExRnShift) |
831 (static_cast<int32_t>(rd) << kStrExRdShift) |
832 B11 | B10 | B9 | B8 | B7 | B4 |
833 (static_cast<int32_t>(rt) << kStrExRtShift);
834 Emit(encoding);
835}
836
Calin Juravle52c48962014-12-16 17:02:57 +0000837void Arm32Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
838 CHECK_NE(rd, kNoRegister);
839 CHECK_NE(rn, kNoRegister);
840 CHECK_NE(rt, kNoRegister);
841 CHECK_NE(rt2, kNoRegister);
842 CHECK_NE(rt, R14);
843 CHECK_NE(rd, rt);
844 CHECK_NE(rd, rt2);
845 CHECK_EQ(0u, static_cast<uint32_t>(rt) % 2);
846 CHECK_EQ(static_cast<uint32_t>(rt) + 1, static_cast<uint32_t>(rt2));
847 CHECK_NE(cond, kNoCondition);
848
849 int32_t encoding =
850 (static_cast<uint32_t>(cond) << kConditionShift) |
851 B24 | B23 | B21 |
852 static_cast<uint32_t>(rn) << 16 |
853 static_cast<uint32_t>(rd) << 12 |
854 B11 | B10 | B9 | B8 | B7 | B4 |
855 static_cast<uint32_t>(rt);
856 Emit(encoding);
857}
858
Dave Allison65fcc2c2014-04-28 13:45:27 -0700859
860void Arm32Assembler::clrex(Condition cond) {
861 CHECK_EQ(cond, AL); // This cannot be conditional on ARM.
862 int32_t encoding = (kSpecialCondition << kConditionShift) |
863 B26 | B24 | B22 | B21 | B20 | (0xff << 12) | B4 | 0xf;
864 Emit(encoding);
865}
866
867
868void Arm32Assembler::nop(Condition cond) {
869 CHECK_NE(cond, kNoCondition);
870 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
871 B25 | B24 | B21 | (0xf << 12);
872 Emit(encoding);
873}
874
875
876void Arm32Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
877 CHECK_NE(sn, kNoSRegister);
878 CHECK_NE(rt, kNoRegister);
879 CHECK_NE(rt, SP);
880 CHECK_NE(rt, PC);
881 CHECK_NE(cond, kNoCondition);
882 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
883 B27 | B26 | B25 |
884 ((static_cast<int32_t>(sn) >> 1)*B16) |
885 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
886 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
887 Emit(encoding);
888}
889
890
891void Arm32Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
892 CHECK_NE(sn, kNoSRegister);
893 CHECK_NE(rt, kNoRegister);
894 CHECK_NE(rt, SP);
895 CHECK_NE(rt, PC);
896 CHECK_NE(cond, kNoCondition);
897 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
898 B27 | B26 | B25 | B20 |
899 ((static_cast<int32_t>(sn) >> 1)*B16) |
900 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
901 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
902 Emit(encoding);
903}
904
905
906void Arm32Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
907 Condition cond) {
908 CHECK_NE(sm, kNoSRegister);
909 CHECK_NE(sm, S31);
910 CHECK_NE(rt, kNoRegister);
911 CHECK_NE(rt, SP);
912 CHECK_NE(rt, PC);
913 CHECK_NE(rt2, kNoRegister);
914 CHECK_NE(rt2, SP);
915 CHECK_NE(rt2, PC);
916 CHECK_NE(cond, kNoCondition);
917 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
918 B27 | B26 | B22 |
919 (static_cast<int32_t>(rt2)*B16) |
920 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
921 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
922 (static_cast<int32_t>(sm) >> 1);
923 Emit(encoding);
924}
925
926
927void Arm32Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
928 Condition cond) {
929 CHECK_NE(sm, kNoSRegister);
930 CHECK_NE(sm, S31);
931 CHECK_NE(rt, kNoRegister);
932 CHECK_NE(rt, SP);
933 CHECK_NE(rt, PC);
934 CHECK_NE(rt2, kNoRegister);
935 CHECK_NE(rt2, SP);
936 CHECK_NE(rt2, PC);
937 CHECK_NE(rt, rt2);
938 CHECK_NE(cond, kNoCondition);
939 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
940 B27 | B26 | B22 | B20 |
941 (static_cast<int32_t>(rt2)*B16) |
942 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
943 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
944 (static_cast<int32_t>(sm) >> 1);
945 Emit(encoding);
946}
947
948
949void Arm32Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
950 Condition cond) {
951 CHECK_NE(dm, kNoDRegister);
952 CHECK_NE(rt, kNoRegister);
953 CHECK_NE(rt, SP);
954 CHECK_NE(rt, PC);
955 CHECK_NE(rt2, kNoRegister);
956 CHECK_NE(rt2, SP);
957 CHECK_NE(rt2, PC);
958 CHECK_NE(cond, kNoCondition);
959 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
960 B27 | B26 | B22 |
961 (static_cast<int32_t>(rt2)*B16) |
962 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
963 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
964 (static_cast<int32_t>(dm) & 0xf);
965 Emit(encoding);
966}
967
968
969void Arm32Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
970 Condition cond) {
971 CHECK_NE(dm, kNoDRegister);
972 CHECK_NE(rt, kNoRegister);
973 CHECK_NE(rt, SP);
974 CHECK_NE(rt, PC);
975 CHECK_NE(rt2, kNoRegister);
976 CHECK_NE(rt2, SP);
977 CHECK_NE(rt2, PC);
978 CHECK_NE(rt, rt2);
979 CHECK_NE(cond, kNoCondition);
980 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
981 B27 | B26 | B22 | B20 |
982 (static_cast<int32_t>(rt2)*B16) |
983 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
984 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
985 (static_cast<int32_t>(dm) & 0xf);
986 Emit(encoding);
987}
988
989
990void Arm32Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
991 const Address& addr = static_cast<const Address&>(ad);
992 CHECK_NE(sd, kNoSRegister);
993 CHECK_NE(cond, kNoCondition);
994 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
995 B27 | B26 | B24 | B20 |
996 ((static_cast<int32_t>(sd) & 1)*B22) |
997 ((static_cast<int32_t>(sd) >> 1)*B12) |
998 B11 | B9 | addr.vencoding();
999 Emit(encoding);
1000}
1001
1002
1003void Arm32Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
1004 const Address& addr = static_cast<const Address&>(ad);
1005 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
1006 CHECK_NE(sd, kNoSRegister);
1007 CHECK_NE(cond, kNoCondition);
1008 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1009 B27 | B26 | B24 |
1010 ((static_cast<int32_t>(sd) & 1)*B22) |
1011 ((static_cast<int32_t>(sd) >> 1)*B12) |
1012 B11 | B9 | addr.vencoding();
1013 Emit(encoding);
1014}
1015
1016
1017void Arm32Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
1018 const Address& addr = static_cast<const Address&>(ad);
1019 CHECK_NE(dd, kNoDRegister);
1020 CHECK_NE(cond, kNoCondition);
1021 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1022 B27 | B26 | B24 | B20 |
1023 ((static_cast<int32_t>(dd) >> 4)*B22) |
1024 ((static_cast<int32_t>(dd) & 0xf)*B12) |
1025 B11 | B9 | B8 | addr.vencoding();
1026 Emit(encoding);
1027}
1028
1029
1030void Arm32Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
1031 const Address& addr = static_cast<const Address&>(ad);
1032 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
1033 CHECK_NE(dd, kNoDRegister);
1034 CHECK_NE(cond, kNoCondition);
1035 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1036 B27 | B26 | B24 |
1037 ((static_cast<int32_t>(dd) >> 4)*B22) |
1038 ((static_cast<int32_t>(dd) & 0xf)*B12) |
1039 B11 | B9 | B8 | addr.vencoding();
1040 Emit(encoding);
1041}
1042
1043
1044void Arm32Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
1045 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
1046}
1047
1048
1049void Arm32Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
1050 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
1051}
1052
1053
1054void Arm32Assembler::vpops(SRegister reg, int nregs, Condition cond) {
1055 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
1056}
1057
1058
1059void Arm32Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
1060 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
1061}
1062
1063
1064void Arm32Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
1065 CHECK_NE(cond, kNoCondition);
1066 CHECK_GT(nregs, 0);
1067 uint32_t D;
1068 uint32_t Vd;
1069 if (dbl) {
1070 // Encoded as D:Vd.
1071 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001072 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001073 } else {
1074 // Encoded as Vd:D.
1075 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001076 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001077 }
1078 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
1079 B11 | B9 |
1080 (dbl ? B8 : 0) |
1081 (push ? B24 : (B23 | B20)) |
1082 static_cast<int32_t>(cond) << kConditionShift |
1083 nregs << (dbl ? 1 : 0) |
1084 D << 22 |
1085 Vd << 12;
1086 Emit(encoding);
1087}
1088
1089
1090void Arm32Assembler::EmitVFPsss(Condition cond, int32_t opcode,
1091 SRegister sd, SRegister sn, SRegister sm) {
1092 CHECK_NE(sd, kNoSRegister);
1093 CHECK_NE(sn, kNoSRegister);
1094 CHECK_NE(sm, kNoSRegister);
1095 CHECK_NE(cond, kNoCondition);
1096 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1097 B27 | B26 | B25 | B11 | B9 | opcode |
1098 ((static_cast<int32_t>(sd) & 1)*B22) |
1099 ((static_cast<int32_t>(sn) >> 1)*B16) |
1100 ((static_cast<int32_t>(sd) >> 1)*B12) |
1101 ((static_cast<int32_t>(sn) & 1)*B7) |
1102 ((static_cast<int32_t>(sm) & 1)*B5) |
1103 (static_cast<int32_t>(sm) >> 1);
1104 Emit(encoding);
1105}
1106
1107
1108void Arm32Assembler::EmitVFPddd(Condition cond, int32_t opcode,
1109 DRegister dd, DRegister dn, DRegister dm) {
1110 CHECK_NE(dd, kNoDRegister);
1111 CHECK_NE(dn, kNoDRegister);
1112 CHECK_NE(dm, kNoDRegister);
1113 CHECK_NE(cond, kNoCondition);
1114 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1115 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
1116 ((static_cast<int32_t>(dd) >> 4)*B22) |
1117 ((static_cast<int32_t>(dn) & 0xf)*B16) |
1118 ((static_cast<int32_t>(dd) & 0xf)*B12) |
1119 ((static_cast<int32_t>(dn) >> 4)*B7) |
1120 ((static_cast<int32_t>(dm) >> 4)*B5) |
1121 (static_cast<int32_t>(dm) & 0xf);
1122 Emit(encoding);
1123}
1124
1125
1126void Arm32Assembler::EmitVFPsd(Condition cond, int32_t opcode,
1127 SRegister sd, DRegister dm) {
1128 CHECK_NE(sd, kNoSRegister);
1129 CHECK_NE(dm, kNoDRegister);
1130 CHECK_NE(cond, kNoCondition);
1131 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1132 B27 | B26 | B25 | B11 | B9 | opcode |
1133 ((static_cast<int32_t>(sd) & 1)*B22) |
1134 ((static_cast<int32_t>(sd) >> 1)*B12) |
1135 ((static_cast<int32_t>(dm) >> 4)*B5) |
1136 (static_cast<int32_t>(dm) & 0xf);
1137 Emit(encoding);
1138}
1139
1140
1141void Arm32Assembler::EmitVFPds(Condition cond, int32_t opcode,
1142 DRegister dd, SRegister sm) {
1143 CHECK_NE(dd, kNoDRegister);
1144 CHECK_NE(sm, kNoSRegister);
1145 CHECK_NE(cond, kNoCondition);
1146 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1147 B27 | B26 | B25 | B11 | B9 | opcode |
1148 ((static_cast<int32_t>(dd) >> 4)*B22) |
1149 ((static_cast<int32_t>(dd) & 0xf)*B12) |
1150 ((static_cast<int32_t>(sm) & 1)*B5) |
1151 (static_cast<int32_t>(sm) >> 1);
1152 Emit(encoding);
1153}
1154
1155
1156void Arm32Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001157 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001158 CHECK_LE(shift_imm, 31u);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001159 mov(rd, ShifterOperand(rm, LSL, shift_imm), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001160}
1161
1162
1163void Arm32Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001164 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001165 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001166 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001167 mov(rd, ShifterOperand(rm, LSR, shift_imm), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001168}
1169
1170
1171void Arm32Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001172 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001173 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001174 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001175 mov(rd, ShifterOperand(rm, ASR, shift_imm), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001176}
1177
1178
1179void Arm32Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001180 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00001181 CHECK(1u <= shift_imm && shift_imm <= 31u);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001182 mov(rd, ShifterOperand(rm, ROR, shift_imm), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001183}
1184
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001185void Arm32Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) {
1186 mov(rd, ShifterOperand(rm, ROR, 0), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001187}
1188
1189
Dave Allison45fdb932014-06-25 12:37:10 -07001190void Arm32Assembler::Lsl(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001191 Condition cond, SetCc set_cc) {
1192 mov(rd, ShifterOperand(rm, LSL, rn), cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07001193}
1194
1195
1196void Arm32Assembler::Lsr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001197 Condition cond, SetCc set_cc) {
1198 mov(rd, ShifterOperand(rm, LSR, rn), cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07001199}
1200
1201
1202void Arm32Assembler::Asr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001203 Condition cond, SetCc set_cc) {
1204 mov(rd, ShifterOperand(rm, ASR, rn), cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07001205}
1206
1207
1208void Arm32Assembler::Ror(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001209 Condition cond, SetCc set_cc) {
1210 mov(rd, ShifterOperand(rm, ROR, rn), cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07001211}
1212
Dave Allison65fcc2c2014-04-28 13:45:27 -07001213void Arm32Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR
1214 CHECK_NE(cond, kNoCondition);
1215 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1216 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
1217 (static_cast<int32_t>(PC)*B12) |
1218 B11 | B9 | B4;
1219 Emit(encoding);
1220}
1221
1222
1223void Arm32Assembler::svc(uint32_t imm24) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08001224 CHECK(IsUint<24>(imm24)) << imm24;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001225 int32_t encoding = (AL << kConditionShift) | B27 | B26 | B25 | B24 | imm24;
1226 Emit(encoding);
1227}
1228
1229
1230void Arm32Assembler::bkpt(uint16_t imm16) {
1231 int32_t encoding = (AL << kConditionShift) | B24 | B21 |
1232 ((imm16 >> 4) << 8) | B6 | B5 | B4 | (imm16 & 0xf);
1233 Emit(encoding);
1234}
1235
1236
1237void Arm32Assembler::blx(Register rm, Condition cond) {
1238 CHECK_NE(rm, kNoRegister);
1239 CHECK_NE(cond, kNoCondition);
1240 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1241 B24 | B21 | (0xfff << 8) | B5 | B4 |
1242 (static_cast<int32_t>(rm) << kRmShift);
1243 Emit(encoding);
1244}
1245
1246
1247void Arm32Assembler::bx(Register rm, Condition cond) {
1248 CHECK_NE(rm, kNoRegister);
1249 CHECK_NE(cond, kNoCondition);
1250 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1251 B24 | B21 | (0xfff << 8) | B4 |
1252 (static_cast<int32_t>(rm) << kRmShift);
1253 Emit(encoding);
1254}
1255
1256
1257void Arm32Assembler::Push(Register rd, Condition cond) {
1258 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
1259}
1260
1261
1262void Arm32Assembler::Pop(Register rd, Condition cond) {
1263 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
1264}
1265
1266
1267void Arm32Assembler::PushList(RegList regs, Condition cond) {
1268 stm(DB_W, SP, regs, cond);
1269}
1270
1271
1272void Arm32Assembler::PopList(RegList regs, Condition cond) {
1273 ldm(IA_W, SP, regs, cond);
1274}
1275
1276
1277void Arm32Assembler::Mov(Register rd, Register rm, Condition cond) {
1278 if (rd != rm) {
1279 mov(rd, ShifterOperand(rm), cond);
1280 }
1281}
1282
1283
1284void Arm32Assembler::Bind(Label* label) {
1285 CHECK(!label->IsBound());
1286 int bound_pc = buffer_.Size();
1287 while (label->IsLinked()) {
1288 int32_t position = label->Position();
1289 int32_t next = buffer_.Load<int32_t>(position);
1290 int32_t encoded = Arm32Assembler::EncodeBranchOffset(bound_pc - position, next);
1291 buffer_.Store<int32_t>(position, encoded);
1292 label->position_ = Arm32Assembler::DecodeBranchOffset(next);
1293 }
1294 label->BindTo(bound_pc);
1295}
1296
1297
1298int32_t Arm32Assembler::EncodeBranchOffset(int offset, int32_t inst) {
1299 // The offset is off by 8 due to the way the ARM CPUs read PC.
1300 offset -= 8;
1301 CHECK_ALIGNED(offset, 4);
1302 CHECK(IsInt(POPCOUNT(kBranchOffsetMask), offset)) << offset;
1303
1304 // Properly preserve only the bits supported in the instruction.
1305 offset >>= 2;
1306 offset &= kBranchOffsetMask;
1307 return (inst & ~kBranchOffsetMask) | offset;
1308}
1309
1310
1311int Arm32Assembler::DecodeBranchOffset(int32_t inst) {
1312 // Sign-extend, left-shift by 2, then add 8.
1313 return ((((inst & kBranchOffsetMask) << 8) >> 6) + 8);
1314}
1315
1316
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001317uint32_t Arm32Assembler::GetAdjustedPosition(uint32_t old_position ATTRIBUTE_UNUSED) {
1318 LOG(FATAL) << "Unimplemented.";
1319 UNREACHABLE();
1320}
1321
1322Literal* Arm32Assembler::NewLiteral(size_t size ATTRIBUTE_UNUSED,
1323 const uint8_t* data ATTRIBUTE_UNUSED) {
1324 LOG(FATAL) << "Unimplemented.";
1325 UNREACHABLE();
1326}
1327
1328void Arm32Assembler::LoadLiteral(Register rt ATTRIBUTE_UNUSED,
1329 Literal* literal ATTRIBUTE_UNUSED) {
1330 LOG(FATAL) << "Unimplemented.";
1331 UNREACHABLE();
1332}
1333
1334void Arm32Assembler::LoadLiteral(Register rt ATTRIBUTE_UNUSED, Register rt2 ATTRIBUTE_UNUSED,
1335 Literal* literal ATTRIBUTE_UNUSED) {
1336 LOG(FATAL) << "Unimplemented.";
1337 UNREACHABLE();
1338}
1339
1340void Arm32Assembler::LoadLiteral(SRegister sd ATTRIBUTE_UNUSED,
1341 Literal* literal ATTRIBUTE_UNUSED) {
1342 LOG(FATAL) << "Unimplemented.";
1343 UNREACHABLE();
1344}
1345
1346void Arm32Assembler::LoadLiteral(DRegister dd ATTRIBUTE_UNUSED,
1347 Literal* literal ATTRIBUTE_UNUSED) {
1348 LOG(FATAL) << "Unimplemented.";
1349 UNREACHABLE();
1350}
1351
Dave Allison65fcc2c2014-04-28 13:45:27 -07001352
1353void Arm32Assembler::AddConstant(Register rd, Register rn, int32_t value,
Vladimir Marko449b1092015-09-08 12:16:45 +01001354 Condition cond, SetCc set_cc) {
1355 if (value == 0 && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001356 if (rd != rn) {
Vladimir Marko449b1092015-09-08 12:16:45 +01001357 mov(rd, ShifterOperand(rn), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001358 }
1359 return;
1360 }
1361 // We prefer to select the shorter code sequence rather than selecting add for
1362 // positive values and sub for negatives ones, which would slightly improve
1363 // the readability of generated code for some constants.
1364 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001365 if (ShifterOperandCanHoldArm32(value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01001366 add(rd, rn, shifter_op, cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001367 } else if (ShifterOperandCanHoldArm32(-value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01001368 sub(rd, rn, shifter_op, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001369 } else {
1370 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001371 if (ShifterOperandCanHoldArm32(~value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01001372 mvn(IP, shifter_op, cond, kCcKeep);
1373 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001374 } else if (ShifterOperandCanHoldArm32(~(-value), &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01001375 mvn(IP, shifter_op, cond, kCcKeep);
1376 sub(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001377 } else {
1378 movw(IP, Low16Bits(value), cond);
1379 uint16_t value_high = High16Bits(value);
1380 if (value_high != 0) {
1381 movt(IP, value_high, cond);
1382 }
Vladimir Marko449b1092015-09-08 12:16:45 +01001383 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001384 }
1385 }
1386}
1387
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001388void Arm32Assembler::CmpConstant(Register rn, int32_t value, Condition cond) {
1389 ShifterOperand shifter_op;
1390 if (ShifterOperandCanHoldArm32(value, &shifter_op)) {
1391 cmp(rn, shifter_op, cond);
1392 } else if (ShifterOperandCanHoldArm32(~value, &shifter_op)) {
1393 cmn(rn, shifter_op, cond);
1394 } else {
1395 movw(IP, Low16Bits(value), cond);
1396 uint16_t value_high = High16Bits(value);
1397 if (value_high != 0) {
1398 movt(IP, value_high, cond);
1399 }
1400 cmp(rn, ShifterOperand(IP), cond);
1401 }
1402}
Dave Allison65fcc2c2014-04-28 13:45:27 -07001403
Dave Allison65fcc2c2014-04-28 13:45:27 -07001404void Arm32Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
1405 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001406 if (ShifterOperandCanHoldArm32(value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001407 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00001408 } else if (ShifterOperandCanHoldArm32(~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001409 mvn(rd, shifter_op, cond);
1410 } else {
1411 movw(rd, Low16Bits(value), cond);
1412 uint16_t value_high = High16Bits(value);
1413 if (value_high != 0) {
1414 movt(rd, value_high, cond);
1415 }
1416 }
1417}
1418
1419
1420// Implementation note: this method must emit at most one instruction when
1421// Address::CanHoldLoadOffsetArm.
1422void Arm32Assembler::LoadFromOffset(LoadOperandType type,
1423 Register reg,
1424 Register base,
1425 int32_t offset,
1426 Condition cond) {
1427 if (!Address::CanHoldLoadOffsetArm(type, offset)) {
1428 CHECK(base != IP);
1429 LoadImmediate(IP, offset, cond);
1430 add(IP, IP, ShifterOperand(base), cond);
1431 base = IP;
1432 offset = 0;
1433 }
1434 CHECK(Address::CanHoldLoadOffsetArm(type, offset));
1435 switch (type) {
1436 case kLoadSignedByte:
1437 ldrsb(reg, Address(base, offset), cond);
1438 break;
1439 case kLoadUnsignedByte:
1440 ldrb(reg, Address(base, offset), cond);
1441 break;
1442 case kLoadSignedHalfword:
1443 ldrsh(reg, Address(base, offset), cond);
1444 break;
1445 case kLoadUnsignedHalfword:
1446 ldrh(reg, Address(base, offset), cond);
1447 break;
1448 case kLoadWord:
1449 ldr(reg, Address(base, offset), cond);
1450 break;
1451 case kLoadWordPair:
1452 ldrd(reg, Address(base, offset), cond);
1453 break;
1454 default:
1455 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07001456 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001457 }
1458}
1459
1460
1461// Implementation note: this method must emit at most one instruction when
1462// Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset.
1463void Arm32Assembler::LoadSFromOffset(SRegister reg,
1464 Register base,
1465 int32_t offset,
1466 Condition cond) {
1467 if (!Address::CanHoldLoadOffsetArm(kLoadSWord, offset)) {
1468 CHECK_NE(base, IP);
1469 LoadImmediate(IP, offset, cond);
1470 add(IP, IP, ShifterOperand(base), cond);
1471 base = IP;
1472 offset = 0;
1473 }
1474 CHECK(Address::CanHoldLoadOffsetArm(kLoadSWord, offset));
1475 vldrs(reg, Address(base, offset), cond);
1476}
1477
1478
1479// Implementation note: this method must emit at most one instruction when
1480// Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset.
1481void Arm32Assembler::LoadDFromOffset(DRegister reg,
1482 Register base,
1483 int32_t offset,
1484 Condition cond) {
1485 if (!Address::CanHoldLoadOffsetArm(kLoadDWord, offset)) {
1486 CHECK_NE(base, IP);
1487 LoadImmediate(IP, offset, cond);
1488 add(IP, IP, ShifterOperand(base), cond);
1489 base = IP;
1490 offset = 0;
1491 }
1492 CHECK(Address::CanHoldLoadOffsetArm(kLoadDWord, offset));
1493 vldrd(reg, Address(base, offset), cond);
1494}
1495
1496
1497// Implementation note: this method must emit at most one instruction when
1498// Address::CanHoldStoreOffsetArm.
1499void Arm32Assembler::StoreToOffset(StoreOperandType type,
1500 Register reg,
1501 Register base,
1502 int32_t offset,
1503 Condition cond) {
1504 if (!Address::CanHoldStoreOffsetArm(type, offset)) {
1505 CHECK(reg != IP);
1506 CHECK(base != IP);
1507 LoadImmediate(IP, offset, cond);
1508 add(IP, IP, ShifterOperand(base), cond);
1509 base = IP;
1510 offset = 0;
1511 }
1512 CHECK(Address::CanHoldStoreOffsetArm(type, offset));
1513 switch (type) {
1514 case kStoreByte:
1515 strb(reg, Address(base, offset), cond);
1516 break;
1517 case kStoreHalfword:
1518 strh(reg, Address(base, offset), cond);
1519 break;
1520 case kStoreWord:
1521 str(reg, Address(base, offset), cond);
1522 break;
1523 case kStoreWordPair:
1524 strd(reg, Address(base, offset), cond);
1525 break;
1526 default:
1527 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07001528 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001529 }
1530}
1531
1532
1533// Implementation note: this method must emit at most one instruction when
1534// Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreToOffset.
1535void Arm32Assembler::StoreSToOffset(SRegister reg,
1536 Register base,
1537 int32_t offset,
1538 Condition cond) {
1539 if (!Address::CanHoldStoreOffsetArm(kStoreSWord, offset)) {
1540 CHECK_NE(base, IP);
1541 LoadImmediate(IP, offset, cond);
1542 add(IP, IP, ShifterOperand(base), cond);
1543 base = IP;
1544 offset = 0;
1545 }
1546 CHECK(Address::CanHoldStoreOffsetArm(kStoreSWord, offset));
1547 vstrs(reg, Address(base, offset), cond);
1548}
1549
1550
1551// Implementation note: this method must emit at most one instruction when
1552// Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreSToOffset.
1553void Arm32Assembler::StoreDToOffset(DRegister reg,
1554 Register base,
1555 int32_t offset,
1556 Condition cond) {
1557 if (!Address::CanHoldStoreOffsetArm(kStoreDWord, offset)) {
1558 CHECK_NE(base, IP);
1559 LoadImmediate(IP, offset, cond);
1560 add(IP, IP, ShifterOperand(base), cond);
1561 base = IP;
1562 offset = 0;
1563 }
1564 CHECK(Address::CanHoldStoreOffsetArm(kStoreDWord, offset));
1565 vstrd(reg, Address(base, offset), cond);
1566}
1567
1568
1569void Arm32Assembler::MemoryBarrier(ManagedRegister mscratch) {
1570 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01001571 dmb(SY);
1572}
1573
1574
1575void Arm32Assembler::dmb(DmbOptions flavor) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001576 int32_t encoding = 0xf57ff05f; // dmb
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01001577 Emit(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001578}
1579
1580
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001581void Arm32Assembler::cbz(Register rn ATTRIBUTE_UNUSED, Label* target ATTRIBUTE_UNUSED) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001582 LOG(FATAL) << "cbz is not supported on ARM32";
1583}
1584
1585
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001586void Arm32Assembler::cbnz(Register rn ATTRIBUTE_UNUSED, Label* target ATTRIBUTE_UNUSED) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001587 LOG(FATAL) << "cbnz is not supported on ARM32";
1588}
1589
1590
1591void Arm32Assembler::CompareAndBranchIfZero(Register r, Label* label) {
1592 cmp(r, ShifterOperand(0));
1593 b(label, EQ);
1594}
1595
1596
1597void Arm32Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
1598 cmp(r, ShifterOperand(0));
1599 b(label, NE);
1600}
1601
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001602JumpTable* Arm32Assembler::CreateJumpTable(std::vector<Label*>&& labels ATTRIBUTE_UNUSED,
1603 Register base_reg ATTRIBUTE_UNUSED) {
1604 LOG(FATAL) << "CreateJumpTable is not supported on ARM32";
1605 UNREACHABLE();
1606}
1607
1608void Arm32Assembler::EmitJumpTableDispatch(JumpTable* jump_table ATTRIBUTE_UNUSED,
1609 Register displacement_reg ATTRIBUTE_UNUSED) {
1610 LOG(FATAL) << "EmitJumpTableDispatch is not supported on ARM32";
1611 UNREACHABLE();
1612}
1613
1614void Arm32Assembler::FinalizeCode() {
1615 ArmAssembler::FinalizeCode();
1616 // Currently the arm32 assembler does not support fixups, and thus no tracking. We must not call
1617 // FinalizeTrackedLabels(), which would lead to an abort.
1618}
Dave Allison65fcc2c2014-04-28 13:45:27 -07001619
1620} // namespace arm
1621} // namespace art