blob: b2a354b63c5fa92a267d3ce0cf808b46b06bcf87 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Nicolas Geoffray96f89a22014-07-11 10:57:49 +010017#include <dirent.h>
Andreas Gampefd114702015-05-13 17:00:41 -070018#include <errno.h>
Dave Allison65fcc2c2014-04-28 13:45:27 -070019#include <fstream>
Nicolas Geoffray96f89a22014-07-11 10:57:49 +010020#include <map>
Andreas Gampefd114702015-05-13 17:00:41 -070021#include <string.h>
22#include <sys/types.h>
Dave Allison65fcc2c2014-04-28 13:45:27 -070023
24#include "gtest/gtest.h"
25#include "utils/arm/assembler_thumb2.h"
26#include "base/hex_dump.h"
27#include "common_runtime_test.h"
28
29namespace art {
30namespace arm {
31
32// Include results file (generated manually)
33#include "assembler_thumb_test_expected.cc.inc"
34
Andreas Gampec60e1b72015-07-30 08:57:50 -070035#ifndef __ANDROID__
Dave Allison45fdb932014-06-25 12:37:10 -070036// This controls whether the results are printed to the
37// screen or compared against the expected output.
38// To generate new expected output, set this to true and
39// copy the output into the .cc.inc file in the form
40// of the other results.
41//
42// When this is false, the results are not printed to the
43// output, but are compared against the expected results
44// in the .cc.inc file.
Dave Allison65fcc2c2014-04-28 13:45:27 -070045static constexpr bool kPrintResults = false;
Dave Allisond20ddb22014-06-05 14:16:30 -070046#endif
Dave Allison65fcc2c2014-04-28 13:45:27 -070047
48void SetAndroidData() {
49 const char* data = getenv("ANDROID_DATA");
50 if (data == nullptr) {
51 setenv("ANDROID_DATA", "/tmp", 1);
52 }
53}
54
Dave Allison45fdb932014-06-25 12:37:10 -070055int CompareIgnoringSpace(const char* s1, const char* s2) {
56 while (*s1 != '\0') {
57 while (isspace(*s1)) ++s1;
58 while (isspace(*s2)) ++s2;
59 if (*s1 == '\0' || *s1 != *s2) {
60 break;
61 }
62 ++s1;
63 ++s2;
64 }
65 return *s1 - *s2;
66}
67
Vladimir Markocf93a5c2015-06-16 11:33:24 +000068void InitResults() {
69 if (test_results.empty()) {
70 setup_results();
71 }
72}
73
74std::string GetToolsDir() {
Andreas Gampec60e1b72015-07-30 08:57:50 -070075#ifndef __ANDROID__
Vladimir Markocf93a5c2015-06-16 11:33:24 +000076 // This will only work on the host. There is no as, objcopy or objdump on the device.
Dave Allison65fcc2c2014-04-28 13:45:27 -070077 static std::string toolsdir;
78
Vladimir Markocf93a5c2015-06-16 11:33:24 +000079 if (toolsdir.empty()) {
Dave Allison65fcc2c2014-04-28 13:45:27 -070080 setup_results();
David Srbecky3e52aa42015-04-12 07:45:18 +010081 toolsdir = CommonRuntimeTest::GetAndroidTargetToolsDir(kThumb2);
Dave Allison65fcc2c2014-04-28 13:45:27 -070082 SetAndroidData();
Dave Allison65fcc2c2014-04-28 13:45:27 -070083 }
84
Vladimir Markocf93a5c2015-06-16 11:33:24 +000085 return toolsdir;
86#else
87 return std::string();
88#endif
89}
90
91void DumpAndCheck(std::vector<uint8_t>& code, const char* testname, const char* const* results) {
Andreas Gampec60e1b72015-07-30 08:57:50 -070092#ifndef __ANDROID__
Vladimir Markocf93a5c2015-06-16 11:33:24 +000093 static std::string toolsdir = GetToolsDir();
94
Dave Allison65fcc2c2014-04-28 13:45:27 -070095 ScratchFile file;
96
97 const char* filename = file.GetFilename().c_str();
98
99 std::ofstream out(filename);
100 if (out) {
101 out << ".section \".text\"\n";
102 out << ".syntax unified\n";
103 out << ".arch armv7-a\n";
104 out << ".thumb\n";
105 out << ".thumb_func\n";
106 out << ".type " << testname << ", #function\n";
107 out << ".global " << testname << "\n";
108 out << testname << ":\n";
109 out << ".fnstart\n";
110
111 for (uint32_t i = 0 ; i < code.size(); ++i) {
112 out << ".byte " << (static_cast<int>(code[i]) & 0xff) << "\n";
113 }
114 out << ".fnend\n";
115 out << ".size " << testname << ", .-" << testname << "\n";
116 }
117 out.close();
118
Andreas Gampe4470c1d2014-07-21 18:32:59 -0700119 char cmd[1024];
Dave Allison65fcc2c2014-04-28 13:45:27 -0700120
121 // Assemble the .S
David Srbecky3e52aa42015-04-12 07:45:18 +0100122 snprintf(cmd, sizeof(cmd), "%sas %s -o %s.o", toolsdir.c_str(), filename, filename);
Andreas Gampefd114702015-05-13 17:00:41 -0700123 int cmd_result = system(cmd);
124 ASSERT_EQ(cmd_result, 0) << strerror(errno);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700125
126 // Remove the $d symbols to prevent the disassembler dumping the instructions
127 // as .word
David Srbecky3e52aa42015-04-12 07:45:18 +0100128 snprintf(cmd, sizeof(cmd), "%sobjcopy -N '$d' %s.o %s.oo", toolsdir.c_str(), filename, filename);
Andreas Gampefd114702015-05-13 17:00:41 -0700129 int cmd_result2 = system(cmd);
130 ASSERT_EQ(cmd_result2, 0) << strerror(errno);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700131
132 // Disassemble.
133
David Srbecky3e52aa42015-04-12 07:45:18 +0100134 snprintf(cmd, sizeof(cmd), "%sobjdump -d %s.oo | grep '^ *[0-9a-f][0-9a-f]*:'",
135 toolsdir.c_str(), filename);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700136 if (kPrintResults) {
137 // Print the results only, don't check. This is used to generate new output for inserting
138 // into the .inc file.
Andreas Gampefd114702015-05-13 17:00:41 -0700139 int cmd_result3 = system(cmd);
140 ASSERT_EQ(cmd_result3, 0) << strerror(errno);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700141 } else {
142 // Check the results match the appropriate results in the .inc file.
143 FILE *fp = popen(cmd, "r");
144 ASSERT_TRUE(fp != nullptr);
145
Dave Allison65fcc2c2014-04-28 13:45:27 -0700146 uint32_t lineindex = 0;
147
148 while (!feof(fp)) {
149 char testline[256];
150 char *s = fgets(testline, sizeof(testline), fp);
151 if (s == nullptr) {
152 break;
153 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000154 if (CompareIgnoringSpace(results[lineindex], testline) != 0) {
Dave Allison45fdb932014-06-25 12:37:10 -0700155 LOG(FATAL) << "Output is not as expected at line: " << lineindex
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000156 << results[lineindex] << "/" << testline;
Dave Allison45fdb932014-06-25 12:37:10 -0700157 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700158 ++lineindex;
159 }
160 // Check that we are at the end.
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000161 ASSERT_TRUE(results[lineindex] == nullptr);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700162 fclose(fp);
163 }
164
165 char buf[FILENAME_MAX];
166 snprintf(buf, sizeof(buf), "%s.o", filename);
167 unlink(buf);
168
169 snprintf(buf, sizeof(buf), "%s.oo", filename);
170 unlink(buf);
171#endif
172}
173
174#define __ assembler->
175
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000176void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname,
177 const char* const* results) {
178 __ FinalizeCode();
179 size_t cs = __ CodeSize();
180 std::vector<uint8_t> managed_code(cs);
181 MemoryRegion code(&managed_code[0], managed_code.size());
182 __ FinalizeInstructions(code);
183
184 DumpAndCheck(managed_code, testname, results);
185}
186
187void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname) {
188 InitResults();
189 std::map<std::string, const char* const*>::iterator results = test_results.find(testname);
190 ASSERT_NE(results, test_results.end());
191
192 EmitAndCheck(assembler, testname, results->second);
193}
194
195#undef __
196
197#define __ assembler.
198
Dave Allison65fcc2c2014-04-28 13:45:27 -0700199TEST(Thumb2AssemblerTest, SimpleMov) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000200 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700201
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100202 __ movs(R0, ShifterOperand(R1));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700203 __ mov(R0, ShifterOperand(R1));
204 __ mov(R8, ShifterOperand(R9));
205
206 __ mov(R0, ShifterOperand(1));
207 __ mov(R8, ShifterOperand(9));
208
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000209 EmitAndCheck(&assembler, "SimpleMov");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700210}
211
212TEST(Thumb2AssemblerTest, SimpleMov32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000213 arm::Thumb2Assembler assembler;
214 __ Force32Bit();
Dave Allison65fcc2c2014-04-28 13:45:27 -0700215
216 __ mov(R0, ShifterOperand(R1));
217 __ mov(R8, ShifterOperand(R9));
218
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000219 EmitAndCheck(&assembler, "SimpleMov32");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700220}
221
222TEST(Thumb2AssemblerTest, SimpleMovAdd) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000223 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700224
225 __ mov(R0, ShifterOperand(R1));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100226 __ adds(R0, R1, ShifterOperand(R2));
227 __ add(R0, R1, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700228
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000229 EmitAndCheck(&assembler, "SimpleMovAdd");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700230}
231
232TEST(Thumb2AssemblerTest, DataProcessingRegister) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000233 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700234
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100235 // 32 bit variants using low registers.
236 __ mvn(R0, ShifterOperand(R1), AL, kCcKeep);
237 __ add(R0, R1, ShifterOperand(R2), AL, kCcKeep);
238 __ sub(R0, R1, ShifterOperand(R2), AL, kCcKeep);
239 __ and_(R0, R1, ShifterOperand(R2), AL, kCcKeep);
240 __ orr(R0, R1, ShifterOperand(R2), AL, kCcKeep);
241 __ eor(R0, R1, ShifterOperand(R2), AL, kCcKeep);
242 __ bic(R0, R1, ShifterOperand(R2), AL, kCcKeep);
243 __ adc(R0, R1, ShifterOperand(R2), AL, kCcKeep);
244 __ sbc(R0, R1, ShifterOperand(R2), AL, kCcKeep);
245 __ rsb(R0, R1, ShifterOperand(R2), AL, kCcKeep);
246 __ teq(R0, ShifterOperand(R1));
247
248 // 16 bit variants using low registers.
249 __ movs(R0, ShifterOperand(R1));
250 __ mov(R0, ShifterOperand(R1), AL, kCcKeep);
251 __ mvns(R0, ShifterOperand(R1));
252 __ add(R0, R0, ShifterOperand(R1), AL, kCcKeep);
253 __ adds(R0, R1, ShifterOperand(R2));
254 __ subs(R0, R1, ShifterOperand(R2));
255 __ adcs(R0, R0, ShifterOperand(R1));
256 __ sbcs(R0, R0, ShifterOperand(R1));
257 __ ands(R0, R0, ShifterOperand(R1));
258 __ orrs(R0, R0, ShifterOperand(R1));
259 __ eors(R0, R0, ShifterOperand(R1));
260 __ bics(R0, R0, ShifterOperand(R1));
261 __ tst(R0, ShifterOperand(R1));
262 __ cmp(R0, ShifterOperand(R1));
263 __ cmn(R0, ShifterOperand(R1));
264
265 // 16-bit variants using high registers.
266 __ mov(R1, ShifterOperand(R8), AL, kCcKeep);
267 __ mov(R9, ShifterOperand(R0), AL, kCcKeep);
268 __ mov(R8, ShifterOperand(R9), AL, kCcKeep);
269 __ add(R1, R1, ShifterOperand(R8), AL, kCcKeep);
270 __ add(R9, R9, ShifterOperand(R0), AL, kCcKeep);
271 __ add(R8, R8, ShifterOperand(R9), AL, kCcKeep);
272 __ cmp(R0, ShifterOperand(R9));
273 __ cmp(R8, ShifterOperand(R1));
274 __ cmp(R9, ShifterOperand(R8));
275
276 // The 16-bit RSBS Rd, Rn, #0, also known as NEGS Rd, Rn is specified using
277 // an immediate (0) but emitted without any, so we test it here.
278 __ rsbs(R0, R1, ShifterOperand(0));
279 __ rsbs(R0, R0, ShifterOperand(0)); // Check Rd == Rn code path.
280
281 // 32 bit variants using high registers that would be 16-bit if using low registers.
282 __ movs(R0, ShifterOperand(R8));
283 __ mvns(R0, ShifterOperand(R8));
284 __ add(R0, R1, ShifterOperand(R8), AL, kCcKeep);
285 __ adds(R0, R1, ShifterOperand(R8));
286 __ subs(R0, R1, ShifterOperand(R8));
287 __ adcs(R0, R0, ShifterOperand(R8));
288 __ sbcs(R0, R0, ShifterOperand(R8));
289 __ ands(R0, R0, ShifterOperand(R8));
290 __ orrs(R0, R0, ShifterOperand(R8));
291 __ eors(R0, R0, ShifterOperand(R8));
292 __ bics(R0, R0, ShifterOperand(R8));
293 __ tst(R0, ShifterOperand(R8));
294 __ cmn(R0, ShifterOperand(R8));
295 __ rsbs(R0, R8, ShifterOperand(0)); // Check that this is not emitted as 16-bit.
296 __ rsbs(R8, R8, ShifterOperand(0)); // Check that this is not emitted as 16-bit (Rd == Rn).
297
298 // 32-bit variants of instructions that would be 16-bit outside IT block.
299 __ it(arm::EQ);
300 __ mvns(R0, ShifterOperand(R1), arm::EQ);
301 __ it(arm::EQ);
302 __ adds(R0, R1, ShifterOperand(R2), arm::EQ);
303 __ it(arm::EQ);
304 __ subs(R0, R1, ShifterOperand(R2), arm::EQ);
305 __ it(arm::EQ);
306 __ adcs(R0, R0, ShifterOperand(R1), arm::EQ);
307 __ it(arm::EQ);
308 __ sbcs(R0, R0, ShifterOperand(R1), arm::EQ);
309 __ it(arm::EQ);
310 __ ands(R0, R0, ShifterOperand(R1), arm::EQ);
311 __ it(arm::EQ);
312 __ orrs(R0, R0, ShifterOperand(R1), arm::EQ);
313 __ it(arm::EQ);
314 __ eors(R0, R0, ShifterOperand(R1), arm::EQ);
315 __ it(arm::EQ);
316 __ bics(R0, R0, ShifterOperand(R1), arm::EQ);
317
318 // 16-bit variants of instructions that would be 32-bit outside IT block.
319 __ it(arm::EQ);
320 __ mvn(R0, ShifterOperand(R1), arm::EQ, kCcKeep);
321 __ it(arm::EQ);
322 __ add(R0, R1, ShifterOperand(R2), arm::EQ, kCcKeep);
323 __ it(arm::EQ);
324 __ sub(R0, R1, ShifterOperand(R2), arm::EQ, kCcKeep);
325 __ it(arm::EQ);
326 __ adc(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
327 __ it(arm::EQ);
328 __ sbc(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
329 __ it(arm::EQ);
330 __ and_(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
331 __ it(arm::EQ);
332 __ orr(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
333 __ it(arm::EQ);
334 __ eor(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
335 __ it(arm::EQ);
336 __ bic(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
337
338 // 16 bit variants selected for the default kCcDontCare.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700339 __ mov(R0, ShifterOperand(R1));
340 __ mvn(R0, ShifterOperand(R1));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100341 __ add(R0, R0, ShifterOperand(R1));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700342 __ add(R0, R1, ShifterOperand(R2));
343 __ sub(R0, R1, ShifterOperand(R2));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100344 __ adc(R0, R0, ShifterOperand(R1));
345 __ sbc(R0, R0, ShifterOperand(R1));
Andreas Gampe7b7e5242015-02-02 19:17:11 -0800346 __ and_(R0, R0, ShifterOperand(R1));
347 __ orr(R0, R0, ShifterOperand(R1));
348 __ eor(R0, R0, ShifterOperand(R1));
349 __ bic(R0, R0, ShifterOperand(R1));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100350 __ mov(R1, ShifterOperand(R8));
351 __ mov(R9, ShifterOperand(R0));
352 __ mov(R8, ShifterOperand(R9));
353 __ add(R1, R1, ShifterOperand(R8));
354 __ add(R9, R9, ShifterOperand(R0));
355 __ add(R8, R8, ShifterOperand(R9));
356 __ rsb(R0, R1, ShifterOperand(0));
357 __ rsb(R0, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700358
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100359 // And an arbitrary 32-bit instruction using IP.
360 __ add(R12, R1, ShifterOperand(R0), AL, kCcKeep);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100361
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000362 EmitAndCheck(&assembler, "DataProcessingRegister");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700363}
364
365TEST(Thumb2AssemblerTest, DataProcessingImmediate) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000366 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700367
368 __ mov(R0, ShifterOperand(0x55));
369 __ mvn(R0, ShifterOperand(0x55));
370 __ add(R0, R1, ShifterOperand(0x55));
371 __ sub(R0, R1, ShifterOperand(0x55));
372 __ and_(R0, R1, ShifterOperand(0x55));
373 __ orr(R0, R1, ShifterOperand(0x55));
374 __ eor(R0, R1, ShifterOperand(0x55));
375 __ bic(R0, R1, ShifterOperand(0x55));
376 __ adc(R0, R1, ShifterOperand(0x55));
377 __ sbc(R0, R1, ShifterOperand(0x55));
378 __ rsb(R0, R1, ShifterOperand(0x55));
379
380 __ tst(R0, ShifterOperand(0x55));
381 __ teq(R0, ShifterOperand(0x55));
382 __ cmp(R0, ShifterOperand(0x55));
383 __ cmn(R0, ShifterOperand(0x55));
384
385 __ add(R0, R1, ShifterOperand(5));
386 __ sub(R0, R1, ShifterOperand(5));
387
388 __ movs(R0, ShifterOperand(0x55));
389 __ mvns(R0, ShifterOperand(0x55));
390
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100391 __ adds(R0, R1, ShifterOperand(5));
392 __ subs(R0, R1, ShifterOperand(5));
393
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000394 EmitAndCheck(&assembler, "DataProcessingImmediate");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700395}
396
397TEST(Thumb2AssemblerTest, DataProcessingModifiedImmediate) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000398 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700399
400 __ mov(R0, ShifterOperand(0x550055));
401 __ mvn(R0, ShifterOperand(0x550055));
402 __ add(R0, R1, ShifterOperand(0x550055));
403 __ sub(R0, R1, ShifterOperand(0x550055));
404 __ and_(R0, R1, ShifterOperand(0x550055));
405 __ orr(R0, R1, ShifterOperand(0x550055));
406 __ eor(R0, R1, ShifterOperand(0x550055));
407 __ bic(R0, R1, ShifterOperand(0x550055));
408 __ adc(R0, R1, ShifterOperand(0x550055));
409 __ sbc(R0, R1, ShifterOperand(0x550055));
410 __ rsb(R0, R1, ShifterOperand(0x550055));
411
412 __ tst(R0, ShifterOperand(0x550055));
413 __ teq(R0, ShifterOperand(0x550055));
414 __ cmp(R0, ShifterOperand(0x550055));
415 __ cmn(R0, ShifterOperand(0x550055));
416
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000417 EmitAndCheck(&assembler, "DataProcessingModifiedImmediate");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700418}
419
420
421TEST(Thumb2AssemblerTest, DataProcessingModifiedImmediates) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000422 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700423
424 __ mov(R0, ShifterOperand(0x550055));
425 __ mov(R0, ShifterOperand(0x55005500));
426 __ mov(R0, ShifterOperand(0x55555555));
427 __ mov(R0, ShifterOperand(0xd5000000)); // rotated to first position
428 __ mov(R0, ShifterOperand(0x6a000000)); // rotated to second position
429 __ mov(R0, ShifterOperand(0x350)); // rotated to 2nd last position
430 __ mov(R0, ShifterOperand(0x1a8)); // rotated to last position
431
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000432 EmitAndCheck(&assembler, "DataProcessingModifiedImmediates");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700433}
434
435TEST(Thumb2AssemblerTest, DataProcessingShiftedRegister) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000436 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700437
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100438 // 16-bit variants.
439 __ movs(R3, ShifterOperand(R4, LSL, 4));
440 __ movs(R3, ShifterOperand(R4, LSR, 5));
441 __ movs(R3, ShifterOperand(R4, ASR, 6));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700442
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100443 // 32-bit ROR because ROR immediate doesn't have the same 16-bit version as other shifts.
444 __ movs(R3, ShifterOperand(R4, ROR, 7));
445
446 // 32-bit RRX because RRX has no 16-bit version.
447 __ movs(R3, ShifterOperand(R4, RRX));
448
449 // 32 bit variants (not setting condition codes).
450 __ mov(R3, ShifterOperand(R4, LSL, 4), AL, kCcKeep);
451 __ mov(R3, ShifterOperand(R4, LSR, 5), AL, kCcKeep);
452 __ mov(R3, ShifterOperand(R4, ASR, 6), AL, kCcKeep);
453 __ mov(R3, ShifterOperand(R4, ROR, 7), AL, kCcKeep);
454 __ mov(R3, ShifterOperand(R4, RRX), AL, kCcKeep);
455
456 // 32 bit variants (high registers).
457 __ movs(R8, ShifterOperand(R4, LSL, 4));
458 __ movs(R8, ShifterOperand(R4, LSR, 5));
459 __ movs(R8, ShifterOperand(R4, ASR, 6));
460 __ movs(R8, ShifterOperand(R4, ROR, 7));
461 __ movs(R8, ShifterOperand(R4, RRX));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700462
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000463 EmitAndCheck(&assembler, "DataProcessingShiftedRegister");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700464}
465
466
467TEST(Thumb2AssemblerTest, BasicLoad) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000468 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700469
470 __ ldr(R3, Address(R4, 24));
471 __ ldrb(R3, Address(R4, 24));
472 __ ldrh(R3, Address(R4, 24));
473 __ ldrsb(R3, Address(R4, 24));
474 __ ldrsh(R3, Address(R4, 24));
475
476 __ ldr(R3, Address(SP, 24));
477
478 // 32 bit variants
479 __ ldr(R8, Address(R4, 24));
480 __ ldrb(R8, Address(R4, 24));
481 __ ldrh(R8, Address(R4, 24));
482 __ ldrsb(R8, Address(R4, 24));
483 __ ldrsh(R8, Address(R4, 24));
484
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000485 EmitAndCheck(&assembler, "BasicLoad");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700486}
487
488
489TEST(Thumb2AssemblerTest, BasicStore) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000490 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700491
492 __ str(R3, Address(R4, 24));
493 __ strb(R3, Address(R4, 24));
494 __ strh(R3, Address(R4, 24));
495
496 __ str(R3, Address(SP, 24));
497
498 // 32 bit variants.
499 __ str(R8, Address(R4, 24));
500 __ strb(R8, Address(R4, 24));
501 __ strh(R8, Address(R4, 24));
502
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000503 EmitAndCheck(&assembler, "BasicStore");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700504}
505
506TEST(Thumb2AssemblerTest, ComplexLoad) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000507 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700508
509 __ ldr(R3, Address(R4, 24, Address::Mode::Offset));
510 __ ldr(R3, Address(R4, 24, Address::Mode::PreIndex));
511 __ ldr(R3, Address(R4, 24, Address::Mode::PostIndex));
512 __ ldr(R3, Address(R4, 24, Address::Mode::NegOffset));
513 __ ldr(R3, Address(R4, 24, Address::Mode::NegPreIndex));
514 __ ldr(R3, Address(R4, 24, Address::Mode::NegPostIndex));
515
516 __ ldrb(R3, Address(R4, 24, Address::Mode::Offset));
517 __ ldrb(R3, Address(R4, 24, Address::Mode::PreIndex));
518 __ ldrb(R3, Address(R4, 24, Address::Mode::PostIndex));
519 __ ldrb(R3, Address(R4, 24, Address::Mode::NegOffset));
520 __ ldrb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
521 __ ldrb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
522
523 __ ldrh(R3, Address(R4, 24, Address::Mode::Offset));
524 __ ldrh(R3, Address(R4, 24, Address::Mode::PreIndex));
525 __ ldrh(R3, Address(R4, 24, Address::Mode::PostIndex));
526 __ ldrh(R3, Address(R4, 24, Address::Mode::NegOffset));
527 __ ldrh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
528 __ ldrh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
529
530 __ ldrsb(R3, Address(R4, 24, Address::Mode::Offset));
531 __ ldrsb(R3, Address(R4, 24, Address::Mode::PreIndex));
532 __ ldrsb(R3, Address(R4, 24, Address::Mode::PostIndex));
533 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegOffset));
534 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
535 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
536
537 __ ldrsh(R3, Address(R4, 24, Address::Mode::Offset));
538 __ ldrsh(R3, Address(R4, 24, Address::Mode::PreIndex));
539 __ ldrsh(R3, Address(R4, 24, Address::Mode::PostIndex));
540 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegOffset));
541 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
542 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
543
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000544 EmitAndCheck(&assembler, "ComplexLoad");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700545}
546
547
548TEST(Thumb2AssemblerTest, ComplexStore) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000549 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700550
551 __ str(R3, Address(R4, 24, Address::Mode::Offset));
552 __ str(R3, Address(R4, 24, Address::Mode::PreIndex));
553 __ str(R3, Address(R4, 24, Address::Mode::PostIndex));
554 __ str(R3, Address(R4, 24, Address::Mode::NegOffset));
555 __ str(R3, Address(R4, 24, Address::Mode::NegPreIndex));
556 __ str(R3, Address(R4, 24, Address::Mode::NegPostIndex));
557
558 __ strb(R3, Address(R4, 24, Address::Mode::Offset));
559 __ strb(R3, Address(R4, 24, Address::Mode::PreIndex));
560 __ strb(R3, Address(R4, 24, Address::Mode::PostIndex));
561 __ strb(R3, Address(R4, 24, Address::Mode::NegOffset));
562 __ strb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
563 __ strb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
564
565 __ strh(R3, Address(R4, 24, Address::Mode::Offset));
566 __ strh(R3, Address(R4, 24, Address::Mode::PreIndex));
567 __ strh(R3, Address(R4, 24, Address::Mode::PostIndex));
568 __ strh(R3, Address(R4, 24, Address::Mode::NegOffset));
569 __ strh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
570 __ strh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
571
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000572 EmitAndCheck(&assembler, "ComplexStore");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700573}
574
575TEST(Thumb2AssemblerTest, NegativeLoadStore) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000576 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700577
578 __ ldr(R3, Address(R4, -24, Address::Mode::Offset));
579 __ ldr(R3, Address(R4, -24, Address::Mode::PreIndex));
580 __ ldr(R3, Address(R4, -24, Address::Mode::PostIndex));
581 __ ldr(R3, Address(R4, -24, Address::Mode::NegOffset));
582 __ ldr(R3, Address(R4, -24, Address::Mode::NegPreIndex));
583 __ ldr(R3, Address(R4, -24, Address::Mode::NegPostIndex));
584
585 __ ldrb(R3, Address(R4, -24, Address::Mode::Offset));
586 __ ldrb(R3, Address(R4, -24, Address::Mode::PreIndex));
587 __ ldrb(R3, Address(R4, -24, Address::Mode::PostIndex));
588 __ ldrb(R3, Address(R4, -24, Address::Mode::NegOffset));
589 __ ldrb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
590 __ ldrb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
591
592 __ ldrh(R3, Address(R4, -24, Address::Mode::Offset));
593 __ ldrh(R3, Address(R4, -24, Address::Mode::PreIndex));
594 __ ldrh(R3, Address(R4, -24, Address::Mode::PostIndex));
595 __ ldrh(R3, Address(R4, -24, Address::Mode::NegOffset));
596 __ ldrh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
597 __ ldrh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
598
599 __ ldrsb(R3, Address(R4, -24, Address::Mode::Offset));
600 __ ldrsb(R3, Address(R4, -24, Address::Mode::PreIndex));
601 __ ldrsb(R3, Address(R4, -24, Address::Mode::PostIndex));
602 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegOffset));
603 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
604 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
605
606 __ ldrsh(R3, Address(R4, -24, Address::Mode::Offset));
607 __ ldrsh(R3, Address(R4, -24, Address::Mode::PreIndex));
608 __ ldrsh(R3, Address(R4, -24, Address::Mode::PostIndex));
609 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegOffset));
610 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
611 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
612
613 __ str(R3, Address(R4, -24, Address::Mode::Offset));
614 __ str(R3, Address(R4, -24, Address::Mode::PreIndex));
615 __ str(R3, Address(R4, -24, Address::Mode::PostIndex));
616 __ str(R3, Address(R4, -24, Address::Mode::NegOffset));
617 __ str(R3, Address(R4, -24, Address::Mode::NegPreIndex));
618 __ str(R3, Address(R4, -24, Address::Mode::NegPostIndex));
619
620 __ strb(R3, Address(R4, -24, Address::Mode::Offset));
621 __ strb(R3, Address(R4, -24, Address::Mode::PreIndex));
622 __ strb(R3, Address(R4, -24, Address::Mode::PostIndex));
623 __ strb(R3, Address(R4, -24, Address::Mode::NegOffset));
624 __ strb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
625 __ strb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
626
627 __ strh(R3, Address(R4, -24, Address::Mode::Offset));
628 __ strh(R3, Address(R4, -24, Address::Mode::PreIndex));
629 __ strh(R3, Address(R4, -24, Address::Mode::PostIndex));
630 __ strh(R3, Address(R4, -24, Address::Mode::NegOffset));
631 __ strh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
632 __ strh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
633
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000634 EmitAndCheck(&assembler, "NegativeLoadStore");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700635}
636
637TEST(Thumb2AssemblerTest, SimpleLoadStoreDual) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000638 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700639
640 __ strd(R2, Address(R0, 24, Address::Mode::Offset));
641 __ ldrd(R2, Address(R0, 24, Address::Mode::Offset));
642
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000643 EmitAndCheck(&assembler, "SimpleLoadStoreDual");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700644}
645
646TEST(Thumb2AssemblerTest, ComplexLoadStoreDual) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000647 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700648
649 __ strd(R2, Address(R0, 24, Address::Mode::Offset));
650 __ strd(R2, Address(R0, 24, Address::Mode::PreIndex));
651 __ strd(R2, Address(R0, 24, Address::Mode::PostIndex));
652 __ strd(R2, Address(R0, 24, Address::Mode::NegOffset));
653 __ strd(R2, Address(R0, 24, Address::Mode::NegPreIndex));
654 __ strd(R2, Address(R0, 24, Address::Mode::NegPostIndex));
655
656 __ ldrd(R2, Address(R0, 24, Address::Mode::Offset));
657 __ ldrd(R2, Address(R0, 24, Address::Mode::PreIndex));
658 __ ldrd(R2, Address(R0, 24, Address::Mode::PostIndex));
659 __ ldrd(R2, Address(R0, 24, Address::Mode::NegOffset));
660 __ ldrd(R2, Address(R0, 24, Address::Mode::NegPreIndex));
661 __ ldrd(R2, Address(R0, 24, Address::Mode::NegPostIndex));
662
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000663 EmitAndCheck(&assembler, "ComplexLoadStoreDual");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700664}
665
666TEST(Thumb2AssemblerTest, NegativeLoadStoreDual) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000667 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700668
669 __ strd(R2, Address(R0, -24, Address::Mode::Offset));
670 __ strd(R2, Address(R0, -24, Address::Mode::PreIndex));
671 __ strd(R2, Address(R0, -24, Address::Mode::PostIndex));
672 __ strd(R2, Address(R0, -24, Address::Mode::NegOffset));
673 __ strd(R2, Address(R0, -24, Address::Mode::NegPreIndex));
674 __ strd(R2, Address(R0, -24, Address::Mode::NegPostIndex));
675
676 __ ldrd(R2, Address(R0, -24, Address::Mode::Offset));
677 __ ldrd(R2, Address(R0, -24, Address::Mode::PreIndex));
678 __ ldrd(R2, Address(R0, -24, Address::Mode::PostIndex));
679 __ ldrd(R2, Address(R0, -24, Address::Mode::NegOffset));
680 __ ldrd(R2, Address(R0, -24, Address::Mode::NegPreIndex));
681 __ ldrd(R2, Address(R0, -24, Address::Mode::NegPostIndex));
682
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000683 EmitAndCheck(&assembler, "NegativeLoadStoreDual");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700684}
685
686TEST(Thumb2AssemblerTest, SimpleBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000687 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700688
689 Label l1;
690 __ mov(R0, ShifterOperand(2));
691 __ Bind(&l1);
692 __ mov(R1, ShifterOperand(1));
693 __ b(&l1);
694 Label l2;
695 __ b(&l2);
696 __ mov(R1, ShifterOperand(2));
697 __ Bind(&l2);
698 __ mov(R0, ShifterOperand(3));
699
700 Label l3;
701 __ mov(R0, ShifterOperand(2));
702 __ Bind(&l3);
703 __ mov(R1, ShifterOperand(1));
704 __ b(&l3, EQ);
705
706 Label l4;
707 __ b(&l4, EQ);
708 __ mov(R1, ShifterOperand(2));
709 __ Bind(&l4);
710 __ mov(R0, ShifterOperand(3));
711
712 // 2 linked labels.
713 Label l5;
714 __ b(&l5);
715 __ mov(R1, ShifterOperand(4));
716 __ b(&l5);
717 __ mov(R1, ShifterOperand(5));
718 __ Bind(&l5);
719 __ mov(R0, ShifterOperand(6));
720
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000721 EmitAndCheck(&assembler, "SimpleBranch");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700722}
723
724TEST(Thumb2AssemblerTest, LongBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000725 arm::Thumb2Assembler assembler;
726 __ Force32Bit();
Dave Allison65fcc2c2014-04-28 13:45:27 -0700727 // 32 bit branches.
728 Label l1;
729 __ mov(R0, ShifterOperand(2));
730 __ Bind(&l1);
731 __ mov(R1, ShifterOperand(1));
732 __ b(&l1);
733
734 Label l2;
735 __ b(&l2);
736 __ mov(R1, ShifterOperand(2));
737 __ Bind(&l2);
738 __ mov(R0, ShifterOperand(3));
739
740 Label l3;
741 __ mov(R0, ShifterOperand(2));
742 __ Bind(&l3);
743 __ mov(R1, ShifterOperand(1));
744 __ b(&l3, EQ);
745
746 Label l4;
747 __ b(&l4, EQ);
748 __ mov(R1, ShifterOperand(2));
749 __ Bind(&l4);
750 __ mov(R0, ShifterOperand(3));
751
752 // 2 linked labels.
753 Label l5;
754 __ b(&l5);
755 __ mov(R1, ShifterOperand(4));
756 __ b(&l5);
757 __ mov(R1, ShifterOperand(5));
758 __ Bind(&l5);
759 __ mov(R0, ShifterOperand(6));
760
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000761 EmitAndCheck(&assembler, "LongBranch");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700762}
763
764TEST(Thumb2AssemblerTest, LoadMultiple) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000765 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700766
767 // 16 bit.
768 __ ldm(DB_W, R4, (1 << R0 | 1 << R3));
769
770 // 32 bit.
771 __ ldm(DB_W, R4, (1 << LR | 1 << R11));
772 __ ldm(DB, R4, (1 << LR | 1 << R11));
773
774 // Single reg is converted to ldr
775 __ ldm(DB_W, R4, (1 << R5));
776
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000777 EmitAndCheck(&assembler, "LoadMultiple");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700778}
779
780TEST(Thumb2AssemblerTest, StoreMultiple) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000781 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700782
783 // 16 bit.
784 __ stm(IA_W, R4, (1 << R0 | 1 << R3));
785
786 // 32 bit.
787 __ stm(IA_W, R4, (1 << LR | 1 << R11));
788 __ stm(IA, R4, (1 << LR | 1 << R11));
789
790 // Single reg is converted to str
791 __ stm(IA_W, R4, (1 << R5));
792 __ stm(IA, R4, (1 << R5));
793
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000794 EmitAndCheck(&assembler, "StoreMultiple");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700795}
796
797TEST(Thumb2AssemblerTest, MovWMovT) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000798 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700799
800 __ movw(R4, 0); // 16 bit.
801 __ movw(R4, 0x34); // 16 bit.
802 __ movw(R9, 0x34); // 32 bit due to high register.
803 __ movw(R3, 0x1234); // 32 bit due to large value.
804 __ movw(R9, 0xffff); // 32 bit due to large value and high register.
805
806 // Always 32 bit.
807 __ movt(R0, 0);
808 __ movt(R0, 0x1234);
809 __ movt(R1, 0xffff);
810
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000811 EmitAndCheck(&assembler, "MovWMovT");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700812}
813
814TEST(Thumb2AssemblerTest, SpecialAddSub) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000815 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700816
817 __ add(R2, SP, ShifterOperand(0x50)); // 16 bit.
818 __ add(SP, SP, ShifterOperand(0x50)); // 16 bit.
819 __ add(R8, SP, ShifterOperand(0x50)); // 32 bit.
820
821 __ add(R2, SP, ShifterOperand(0xf00)); // 32 bit due to imm size.
822 __ add(SP, SP, ShifterOperand(0xf00)); // 32 bit due to imm size.
823
824 __ sub(SP, SP, ShifterOperand(0x50)); // 16 bit
825 __ sub(R0, SP, ShifterOperand(0x50)); // 32 bit
826 __ sub(R8, SP, ShifterOperand(0x50)); // 32 bit.
827
828 __ sub(SP, SP, ShifterOperand(0xf00)); // 32 bit due to imm size
829
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000830 EmitAndCheck(&assembler, "SpecialAddSub");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700831}
832
833TEST(Thumb2AssemblerTest, StoreToOffset) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000834 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700835
836 __ StoreToOffset(kStoreWord, R2, R4, 12); // Simple
837 __ StoreToOffset(kStoreWord, R2, R4, 0x2000); // Offset too big.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100838 __ StoreToOffset(kStoreWord, R0, R12, 12);
839 __ StoreToOffset(kStoreHalfword, R0, R12, 12);
840 __ StoreToOffset(kStoreByte, R2, R12, 12);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700841
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000842 EmitAndCheck(&assembler, "StoreToOffset");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700843}
844
845
846TEST(Thumb2AssemblerTest, IfThen) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000847 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700848
849 __ it(EQ);
850 __ mov(R1, ShifterOperand(1), EQ);
851
852 __ it(EQ, kItThen);
853 __ mov(R1, ShifterOperand(1), EQ);
854 __ mov(R2, ShifterOperand(2), EQ);
855
856 __ it(EQ, kItElse);
857 __ mov(R1, ShifterOperand(1), EQ);
858 __ mov(R2, ShifterOperand(2), NE);
859
860 __ it(EQ, kItThen, kItElse);
861 __ mov(R1, ShifterOperand(1), EQ);
862 __ mov(R2, ShifterOperand(2), EQ);
863 __ mov(R3, ShifterOperand(3), NE);
864
865 __ it(EQ, kItElse, kItElse);
866 __ mov(R1, ShifterOperand(1), EQ);
867 __ mov(R2, ShifterOperand(2), NE);
868 __ mov(R3, ShifterOperand(3), NE);
869
870 __ it(EQ, kItThen, kItThen, kItElse);
871 __ mov(R1, ShifterOperand(1), EQ);
872 __ mov(R2, ShifterOperand(2), EQ);
873 __ mov(R3, ShifterOperand(3), EQ);
874 __ mov(R4, ShifterOperand(4), NE);
875
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000876 EmitAndCheck(&assembler, "IfThen");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700877}
878
879TEST(Thumb2AssemblerTest, CbzCbnz) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000880 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700881
882 Label l1;
883 __ cbz(R2, &l1);
884 __ mov(R1, ShifterOperand(3));
885 __ mov(R2, ShifterOperand(3));
886 __ Bind(&l1);
887 __ mov(R2, ShifterOperand(4));
888
889 Label l2;
890 __ cbnz(R2, &l2);
891 __ mov(R8, ShifterOperand(3));
892 __ mov(R2, ShifterOperand(3));
893 __ Bind(&l2);
894 __ mov(R2, ShifterOperand(4));
895
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000896 EmitAndCheck(&assembler, "CbzCbnz");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700897}
898
899TEST(Thumb2AssemblerTest, Multiply) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000900 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700901
902 __ mul(R0, R1, R0);
903 __ mul(R0, R1, R2);
904 __ mul(R8, R9, R8);
905 __ mul(R8, R9, R10);
906
907 __ mla(R0, R1, R2, R3);
908 __ mla(R8, R9, R8, R9);
909
910 __ mls(R0, R1, R2, R3);
911 __ mls(R8, R9, R8, R9);
912
913 __ umull(R0, R1, R2, R3);
914 __ umull(R8, R9, R10, R11);
915
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000916 EmitAndCheck(&assembler, "Multiply");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700917}
918
919TEST(Thumb2AssemblerTest, Divide) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000920 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700921
922 __ sdiv(R0, R1, R2);
923 __ sdiv(R8, R9, R10);
924
925 __ udiv(R0, R1, R2);
926 __ udiv(R8, R9, R10);
927
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000928 EmitAndCheck(&assembler, "Divide");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700929}
930
931TEST(Thumb2AssemblerTest, VMov) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000932 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700933
934 __ vmovs(S1, 1.0);
935 __ vmovd(D1, 1.0);
936
937 __ vmovs(S1, S2);
938 __ vmovd(D1, D2);
939
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000940 EmitAndCheck(&assembler, "VMov");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700941}
942
943
944TEST(Thumb2AssemblerTest, BasicFloatingPoint) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000945 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700946
947 __ vadds(S0, S1, S2);
948 __ vsubs(S0, S1, S2);
949 __ vmuls(S0, S1, S2);
950 __ vmlas(S0, S1, S2);
951 __ vmlss(S0, S1, S2);
952 __ vdivs(S0, S1, S2);
953 __ vabss(S0, S1);
954 __ vnegs(S0, S1);
955 __ vsqrts(S0, S1);
956
957 __ vaddd(D0, D1, D2);
958 __ vsubd(D0, D1, D2);
959 __ vmuld(D0, D1, D2);
960 __ vmlad(D0, D1, D2);
961 __ vmlsd(D0, D1, D2);
962 __ vdivd(D0, D1, D2);
963 __ vabsd(D0, D1);
964 __ vnegd(D0, D1);
965 __ vsqrtd(D0, D1);
966
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000967 EmitAndCheck(&assembler, "BasicFloatingPoint");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700968}
969
970TEST(Thumb2AssemblerTest, FloatingPointConversions) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000971 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700972
973 __ vcvtsd(S2, D2);
974 __ vcvtds(D2, S2);
975
976 __ vcvtis(S1, S2);
977 __ vcvtsi(S1, S2);
978
979 __ vcvtid(S1, D2);
980 __ vcvtdi(D1, S2);
981
982 __ vcvtus(S1, S2);
983 __ vcvtsu(S1, S2);
984
985 __ vcvtud(S1, D2);
986 __ vcvtdu(D1, S2);
987
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000988 EmitAndCheck(&assembler, "FloatingPointConversions");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700989}
990
991TEST(Thumb2AssemblerTest, FloatingPointComparisons) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000992 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700993
994 __ vcmps(S0, S1);
995 __ vcmpd(D0, D1);
996
997 __ vcmpsz(S2);
998 __ vcmpdz(D2);
999
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001000 EmitAndCheck(&assembler, "FloatingPointComparisons");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001001}
1002
1003TEST(Thumb2AssemblerTest, Calls) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001004 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001005
1006 __ blx(LR);
1007 __ bx(LR);
1008
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001009 EmitAndCheck(&assembler, "Calls");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001010}
1011
1012TEST(Thumb2AssemblerTest, Breakpoint) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001013 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001014
1015 __ bkpt(0);
1016
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001017 EmitAndCheck(&assembler, "Breakpoint");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001018}
1019
1020TEST(Thumb2AssemblerTest, StrR1) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001021 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001022
1023 __ str(R1, Address(SP, 68));
1024 __ str(R1, Address(SP, 1068));
1025
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001026 EmitAndCheck(&assembler, "StrR1");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001027}
1028
1029TEST(Thumb2AssemblerTest, VPushPop) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001030 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001031
1032 __ vpushs(S2, 4);
1033 __ vpushd(D2, 4);
1034
1035 __ vpops(S2, 4);
1036 __ vpopd(D2, 4);
1037
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001038 EmitAndCheck(&assembler, "VPushPop");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001039}
1040
1041TEST(Thumb2AssemblerTest, Max16BitBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001042 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001043
1044 Label l1;
1045 __ b(&l1);
1046 for (int i = 0 ; i < (1 << 11) ; i += 2) {
1047 __ mov(R3, ShifterOperand(i & 0xff));
1048 }
1049 __ Bind(&l1);
1050 __ mov(R1, ShifterOperand(R2));
1051
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001052 EmitAndCheck(&assembler, "Max16BitBranch");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001053}
1054
1055TEST(Thumb2AssemblerTest, Branch32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001056 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001057
1058 Label l1;
1059 __ b(&l1);
1060 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1061 __ mov(R3, ShifterOperand(i & 0xff));
1062 }
1063 __ Bind(&l1);
1064 __ mov(R1, ShifterOperand(R2));
1065
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001066 EmitAndCheck(&assembler, "Branch32");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001067}
1068
1069TEST(Thumb2AssemblerTest, CompareAndBranchMax) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001070 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001071
1072 Label l1;
1073 __ cbz(R4, &l1);
1074 for (int i = 0 ; i < (1 << 7) ; i += 2) {
1075 __ mov(R3, ShifterOperand(i & 0xff));
1076 }
1077 __ Bind(&l1);
1078 __ mov(R1, ShifterOperand(R2));
1079
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001080 EmitAndCheck(&assembler, "CompareAndBranchMax");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001081}
1082
1083TEST(Thumb2AssemblerTest, CompareAndBranchRelocation16) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001084 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001085
1086 Label l1;
1087 __ cbz(R4, &l1);
1088 for (int i = 0 ; i < (1 << 7) + 2 ; i += 2) {
1089 __ mov(R3, ShifterOperand(i & 0xff));
1090 }
1091 __ Bind(&l1);
1092 __ mov(R1, ShifterOperand(R2));
1093
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001094 EmitAndCheck(&assembler, "CompareAndBranchRelocation16");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001095}
1096
1097TEST(Thumb2AssemblerTest, CompareAndBranchRelocation32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001098 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001099
1100 Label l1;
1101 __ cbz(R4, &l1);
1102 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1103 __ mov(R3, ShifterOperand(i & 0xff));
1104 }
1105 __ Bind(&l1);
1106 __ mov(R1, ShifterOperand(R2));
1107
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001108 EmitAndCheck(&assembler, "CompareAndBranchRelocation32");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001109}
1110
1111TEST(Thumb2AssemblerTest, MixedBranch32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001112 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001113
1114 Label l1;
1115 Label l2;
1116 __ b(&l1); // Forwards.
1117 __ Bind(&l2);
1118
1119 // Space to force relocation.
1120 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1121 __ mov(R3, ShifterOperand(i & 0xff));
1122 }
1123 __ b(&l2); // Backwards.
1124 __ Bind(&l1);
1125 __ mov(R1, ShifterOperand(R2));
1126
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001127 EmitAndCheck(&assembler, "MixedBranch32");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001128}
1129
Dave Allison45fdb932014-06-25 12:37:10 -07001130TEST(Thumb2AssemblerTest, Shifts) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001131 arm::Thumb2Assembler assembler;
Dave Allison45fdb932014-06-25 12:37:10 -07001132
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001133 // 16 bit selected for CcDontCare.
Dave Allison45fdb932014-06-25 12:37:10 -07001134 __ Lsl(R0, R1, 5);
1135 __ Lsr(R0, R1, 5);
1136 __ Asr(R0, R1, 5);
1137
1138 __ Lsl(R0, R0, R1);
1139 __ Lsr(R0, R0, R1);
1140 __ Asr(R0, R0, R1);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001141 __ Ror(R0, R0, R1);
1142
1143 // 16 bit with kCcSet.
1144 __ Lsls(R0, R1, 5);
1145 __ Lsrs(R0, R1, 5);
1146 __ Asrs(R0, R1, 5);
1147
1148 __ Lsls(R0, R0, R1);
1149 __ Lsrs(R0, R0, R1);
1150 __ Asrs(R0, R0, R1);
1151 __ Rors(R0, R0, R1);
1152
1153 // 32-bit with kCcKeep.
1154 __ Lsl(R0, R1, 5, AL, kCcKeep);
1155 __ Lsr(R0, R1, 5, AL, kCcKeep);
1156 __ Asr(R0, R1, 5, AL, kCcKeep);
1157
1158 __ Lsl(R0, R0, R1, AL, kCcKeep);
1159 __ Lsr(R0, R0, R1, AL, kCcKeep);
1160 __ Asr(R0, R0, R1, AL, kCcKeep);
1161 __ Ror(R0, R0, R1, AL, kCcKeep);
1162
1163 // 32-bit because ROR immediate doesn't have a 16-bit version like the other shifts.
1164 __ Ror(R0, R1, 5);
1165 __ Rors(R0, R1, 5);
1166 __ Ror(R0, R1, 5, AL, kCcKeep);
Dave Allison45fdb932014-06-25 12:37:10 -07001167
1168 // 32 bit due to high registers.
1169 __ Lsl(R8, R1, 5);
1170 __ Lsr(R0, R8, 5);
1171 __ Asr(R8, R1, 5);
1172 __ Ror(R0, R8, 5);
1173
1174 // 32 bit due to different Rd and Rn.
1175 __ Lsl(R0, R1, R2);
1176 __ Lsr(R0, R1, R2);
1177 __ Asr(R0, R1, R2);
1178 __ Ror(R0, R1, R2);
1179
1180 // 32 bit due to use of high registers.
1181 __ Lsl(R8, R1, R2);
1182 __ Lsr(R0, R8, R2);
1183 __ Asr(R0, R1, R8);
1184
1185 // S bit (all 32 bit)
1186
1187 // 32 bit due to high registers.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001188 __ Lsls(R8, R1, 5);
1189 __ Lsrs(R0, R8, 5);
1190 __ Asrs(R8, R1, 5);
1191 __ Rors(R0, R8, 5);
Dave Allison45fdb932014-06-25 12:37:10 -07001192
1193 // 32 bit due to different Rd and Rn.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001194 __ Lsls(R0, R1, R2);
1195 __ Lsrs(R0, R1, R2);
1196 __ Asrs(R0, R1, R2);
1197 __ Rors(R0, R1, R2);
Dave Allison45fdb932014-06-25 12:37:10 -07001198
1199 // 32 bit due to use of high registers.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001200 __ Lsls(R8, R1, R2);
1201 __ Lsrs(R0, R8, R2);
1202 __ Asrs(R0, R1, R8);
Dave Allison45fdb932014-06-25 12:37:10 -07001203
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001204 EmitAndCheck(&assembler, "Shifts");
Dave Allison45fdb932014-06-25 12:37:10 -07001205}
1206
1207TEST(Thumb2AssemblerTest, LoadStoreRegOffset) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001208 arm::Thumb2Assembler assembler;
Dave Allison45fdb932014-06-25 12:37:10 -07001209
1210 // 16 bit.
1211 __ ldr(R0, Address(R1, R2));
1212 __ str(R0, Address(R1, R2));
1213
1214 // 32 bit due to shift.
1215 __ ldr(R0, Address(R1, R2, LSL, 1));
1216 __ str(R0, Address(R1, R2, LSL, 1));
1217
1218 __ ldr(R0, Address(R1, R2, LSL, 3));
1219 __ str(R0, Address(R1, R2, LSL, 3));
1220
1221 // 32 bit due to high register use.
1222 __ ldr(R8, Address(R1, R2));
1223 __ str(R8, Address(R1, R2));
1224
1225 __ ldr(R1, Address(R8, R2));
1226 __ str(R2, Address(R8, R2));
1227
1228 __ ldr(R0, Address(R1, R8));
1229 __ str(R0, Address(R1, R8));
1230
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001231 EmitAndCheck(&assembler, "LoadStoreRegOffset");
Dave Allison45fdb932014-06-25 12:37:10 -07001232}
1233
1234TEST(Thumb2AssemblerTest, LoadStoreLiteral) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001235 arm::Thumb2Assembler assembler;
Dave Allison45fdb932014-06-25 12:37:10 -07001236
1237 __ ldr(R0, Address(4));
1238 __ str(R0, Address(4));
1239
1240 __ ldr(R0, Address(-8));
1241 __ str(R0, Address(-8));
1242
1243 // Limits.
1244 __ ldr(R0, Address(0x3ff)); // 10 bits (16 bit).
1245 __ ldr(R0, Address(0x7ff)); // 11 bits (32 bit).
1246 __ str(R0, Address(0x3ff)); // 32 bit (no 16 bit str(literal)).
1247 __ str(R0, Address(0x7ff)); // 11 bits (32 bit).
1248
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001249 EmitAndCheck(&assembler, "LoadStoreLiteral");
Dave Allison45fdb932014-06-25 12:37:10 -07001250}
1251
Dave Allison0bb9ade2014-06-26 17:57:36 -07001252TEST(Thumb2AssemblerTest, LoadStoreLimits) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001253 arm::Thumb2Assembler assembler;
Dave Allison0bb9ade2014-06-26 17:57:36 -07001254
1255 __ ldr(R0, Address(R4, 124)); // 16 bit.
1256 __ ldr(R0, Address(R4, 128)); // 32 bit.
1257
1258 __ ldrb(R0, Address(R4, 31)); // 16 bit.
1259 __ ldrb(R0, Address(R4, 32)); // 32 bit.
1260
1261 __ ldrh(R0, Address(R4, 62)); // 16 bit.
1262 __ ldrh(R0, Address(R4, 64)); // 32 bit.
1263
1264 __ ldrsb(R0, Address(R4, 31)); // 32 bit.
1265 __ ldrsb(R0, Address(R4, 32)); // 32 bit.
1266
1267 __ ldrsh(R0, Address(R4, 62)); // 32 bit.
1268 __ ldrsh(R0, Address(R4, 64)); // 32 bit.
1269
1270 __ str(R0, Address(R4, 124)); // 16 bit.
1271 __ str(R0, Address(R4, 128)); // 32 bit.
1272
1273 __ strb(R0, Address(R4, 31)); // 16 bit.
1274 __ strb(R0, Address(R4, 32)); // 32 bit.
1275
1276 __ strh(R0, Address(R4, 62)); // 16 bit.
1277 __ strh(R0, Address(R4, 64)); // 32 bit.
1278
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001279 EmitAndCheck(&assembler, "LoadStoreLimits");
Dave Allison0bb9ade2014-06-26 17:57:36 -07001280}
1281
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001282TEST(Thumb2AssemblerTest, CompareAndBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001283 arm::Thumb2Assembler assembler;
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001284
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001285 Label label;
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001286 __ CompareAndBranchIfZero(arm::R0, &label);
1287 __ CompareAndBranchIfZero(arm::R11, &label);
1288 __ CompareAndBranchIfNonZero(arm::R0, &label);
1289 __ CompareAndBranchIfNonZero(arm::R11, &label);
1290 __ Bind(&label);
1291
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001292 EmitAndCheck(&assembler, "CompareAndBranch");
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001293}
1294
Dave Allison65fcc2c2014-04-28 13:45:27 -07001295#undef __
1296} // namespace arm
1297} // namespace art