blob: 5ae2cc28a27b7ef9151749aae6b367336ce212c5 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Nicolas Geoffray96f89a22014-07-11 10:57:49 +010017#include <dirent.h>
Andreas Gampefd114702015-05-13 17:00:41 -070018#include <errno.h>
Dave Allison65fcc2c2014-04-28 13:45:27 -070019#include <fstream>
Nicolas Geoffray96f89a22014-07-11 10:57:49 +010020#include <map>
Andreas Gampefd114702015-05-13 17:00:41 -070021#include <string.h>
22#include <sys/types.h>
Dave Allison65fcc2c2014-04-28 13:45:27 -070023
24#include "gtest/gtest.h"
25#include "utils/arm/assembler_thumb2.h"
26#include "base/hex_dump.h"
27#include "common_runtime_test.h"
28
29namespace art {
30namespace arm {
31
32// Include results file (generated manually)
33#include "assembler_thumb_test_expected.cc.inc"
34
Andreas Gampec60e1b72015-07-30 08:57:50 -070035#ifndef __ANDROID__
Dave Allison45fdb932014-06-25 12:37:10 -070036// This controls whether the results are printed to the
37// screen or compared against the expected output.
38// To generate new expected output, set this to true and
39// copy the output into the .cc.inc file in the form
40// of the other results.
41//
42// When this is false, the results are not printed to the
43// output, but are compared against the expected results
44// in the .cc.inc file.
Dave Allison65fcc2c2014-04-28 13:45:27 -070045static constexpr bool kPrintResults = false;
Dave Allisond20ddb22014-06-05 14:16:30 -070046#endif
Dave Allison65fcc2c2014-04-28 13:45:27 -070047
48void SetAndroidData() {
49 const char* data = getenv("ANDROID_DATA");
50 if (data == nullptr) {
51 setenv("ANDROID_DATA", "/tmp", 1);
52 }
53}
54
Dave Allison45fdb932014-06-25 12:37:10 -070055int CompareIgnoringSpace(const char* s1, const char* s2) {
56 while (*s1 != '\0') {
57 while (isspace(*s1)) ++s1;
58 while (isspace(*s2)) ++s2;
59 if (*s1 == '\0' || *s1 != *s2) {
60 break;
61 }
62 ++s1;
63 ++s2;
64 }
65 return *s1 - *s2;
66}
67
Vladimir Markocf93a5c2015-06-16 11:33:24 +000068void InitResults() {
69 if (test_results.empty()) {
70 setup_results();
71 }
72}
73
74std::string GetToolsDir() {
Andreas Gampec60e1b72015-07-30 08:57:50 -070075#ifndef __ANDROID__
Vladimir Markocf93a5c2015-06-16 11:33:24 +000076 // This will only work on the host. There is no as, objcopy or objdump on the device.
Dave Allison65fcc2c2014-04-28 13:45:27 -070077 static std::string toolsdir;
78
Vladimir Markocf93a5c2015-06-16 11:33:24 +000079 if (toolsdir.empty()) {
Dave Allison65fcc2c2014-04-28 13:45:27 -070080 setup_results();
David Srbecky3e52aa42015-04-12 07:45:18 +010081 toolsdir = CommonRuntimeTest::GetAndroidTargetToolsDir(kThumb2);
Dave Allison65fcc2c2014-04-28 13:45:27 -070082 SetAndroidData();
Dave Allison65fcc2c2014-04-28 13:45:27 -070083 }
84
Vladimir Markocf93a5c2015-06-16 11:33:24 +000085 return toolsdir;
86#else
87 return std::string();
88#endif
89}
90
91void DumpAndCheck(std::vector<uint8_t>& code, const char* testname, const char* const* results) {
Andreas Gampec60e1b72015-07-30 08:57:50 -070092#ifndef __ANDROID__
Vladimir Markocf93a5c2015-06-16 11:33:24 +000093 static std::string toolsdir = GetToolsDir();
94
Dave Allison65fcc2c2014-04-28 13:45:27 -070095 ScratchFile file;
96
97 const char* filename = file.GetFilename().c_str();
98
99 std::ofstream out(filename);
100 if (out) {
101 out << ".section \".text\"\n";
102 out << ".syntax unified\n";
103 out << ".arch armv7-a\n";
104 out << ".thumb\n";
105 out << ".thumb_func\n";
106 out << ".type " << testname << ", #function\n";
107 out << ".global " << testname << "\n";
108 out << testname << ":\n";
109 out << ".fnstart\n";
110
111 for (uint32_t i = 0 ; i < code.size(); ++i) {
112 out << ".byte " << (static_cast<int>(code[i]) & 0xff) << "\n";
113 }
114 out << ".fnend\n";
115 out << ".size " << testname << ", .-" << testname << "\n";
116 }
117 out.close();
118
Andreas Gampe4470c1d2014-07-21 18:32:59 -0700119 char cmd[1024];
Dave Allison65fcc2c2014-04-28 13:45:27 -0700120
121 // Assemble the .S
David Srbecky3e52aa42015-04-12 07:45:18 +0100122 snprintf(cmd, sizeof(cmd), "%sas %s -o %s.o", toolsdir.c_str(), filename, filename);
Andreas Gampefd114702015-05-13 17:00:41 -0700123 int cmd_result = system(cmd);
124 ASSERT_EQ(cmd_result, 0) << strerror(errno);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700125
126 // Remove the $d symbols to prevent the disassembler dumping the instructions
127 // as .word
David Srbecky3e52aa42015-04-12 07:45:18 +0100128 snprintf(cmd, sizeof(cmd), "%sobjcopy -N '$d' %s.o %s.oo", toolsdir.c_str(), filename, filename);
Andreas Gampefd114702015-05-13 17:00:41 -0700129 int cmd_result2 = system(cmd);
130 ASSERT_EQ(cmd_result2, 0) << strerror(errno);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700131
132 // Disassemble.
133
David Srbecky3e52aa42015-04-12 07:45:18 +0100134 snprintf(cmd, sizeof(cmd), "%sobjdump -d %s.oo | grep '^ *[0-9a-f][0-9a-f]*:'",
135 toolsdir.c_str(), filename);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700136 if (kPrintResults) {
137 // Print the results only, don't check. This is used to generate new output for inserting
138 // into the .inc file.
Andreas Gampefd114702015-05-13 17:00:41 -0700139 int cmd_result3 = system(cmd);
140 ASSERT_EQ(cmd_result3, 0) << strerror(errno);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700141 } else {
142 // Check the results match the appropriate results in the .inc file.
143 FILE *fp = popen(cmd, "r");
144 ASSERT_TRUE(fp != nullptr);
145
Dave Allison65fcc2c2014-04-28 13:45:27 -0700146 uint32_t lineindex = 0;
147
148 while (!feof(fp)) {
149 char testline[256];
150 char *s = fgets(testline, sizeof(testline), fp);
151 if (s == nullptr) {
152 break;
153 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000154 if (CompareIgnoringSpace(results[lineindex], testline) != 0) {
Dave Allison45fdb932014-06-25 12:37:10 -0700155 LOG(FATAL) << "Output is not as expected at line: " << lineindex
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000156 << results[lineindex] << "/" << testline;
Dave Allison45fdb932014-06-25 12:37:10 -0700157 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700158 ++lineindex;
159 }
160 // Check that we are at the end.
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000161 ASSERT_TRUE(results[lineindex] == nullptr);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700162 fclose(fp);
163 }
164
165 char buf[FILENAME_MAX];
166 snprintf(buf, sizeof(buf), "%s.o", filename);
167 unlink(buf);
168
169 snprintf(buf, sizeof(buf), "%s.oo", filename);
170 unlink(buf);
171#endif
172}
173
174#define __ assembler->
175
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000176void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname,
177 const char* const* results) {
178 __ FinalizeCode();
179 size_t cs = __ CodeSize();
180 std::vector<uint8_t> managed_code(cs);
181 MemoryRegion code(&managed_code[0], managed_code.size());
182 __ FinalizeInstructions(code);
183
184 DumpAndCheck(managed_code, testname, results);
185}
186
187void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname) {
188 InitResults();
189 std::map<std::string, const char* const*>::iterator results = test_results.find(testname);
190 ASSERT_NE(results, test_results.end());
191
192 EmitAndCheck(assembler, testname, results->second);
193}
194
195#undef __
196
197#define __ assembler.
198
Dave Allison65fcc2c2014-04-28 13:45:27 -0700199TEST(Thumb2AssemblerTest, SimpleMov) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000200 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700201
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100202 __ movs(R0, ShifterOperand(R1));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700203 __ mov(R0, ShifterOperand(R1));
204 __ mov(R8, ShifterOperand(R9));
205
206 __ mov(R0, ShifterOperand(1));
207 __ mov(R8, ShifterOperand(9));
208
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000209 EmitAndCheck(&assembler, "SimpleMov");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700210}
211
212TEST(Thumb2AssemblerTest, SimpleMov32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000213 arm::Thumb2Assembler assembler;
214 __ Force32Bit();
Dave Allison65fcc2c2014-04-28 13:45:27 -0700215
216 __ mov(R0, ShifterOperand(R1));
217 __ mov(R8, ShifterOperand(R9));
218
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000219 EmitAndCheck(&assembler, "SimpleMov32");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700220}
221
222TEST(Thumb2AssemblerTest, SimpleMovAdd) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000223 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700224
225 __ mov(R0, ShifterOperand(R1));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100226 __ adds(R0, R1, ShifterOperand(R2));
227 __ add(R0, R1, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700228
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000229 EmitAndCheck(&assembler, "SimpleMovAdd");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700230}
231
232TEST(Thumb2AssemblerTest, DataProcessingRegister) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000233 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700234
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100235 // 32 bit variants using low registers.
236 __ mvn(R0, ShifterOperand(R1), AL, kCcKeep);
237 __ add(R0, R1, ShifterOperand(R2), AL, kCcKeep);
238 __ sub(R0, R1, ShifterOperand(R2), AL, kCcKeep);
239 __ and_(R0, R1, ShifterOperand(R2), AL, kCcKeep);
240 __ orr(R0, R1, ShifterOperand(R2), AL, kCcKeep);
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100241 __ orn(R0, R1, ShifterOperand(R2), AL, kCcKeep);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100242 __ eor(R0, R1, ShifterOperand(R2), AL, kCcKeep);
243 __ bic(R0, R1, ShifterOperand(R2), AL, kCcKeep);
244 __ adc(R0, R1, ShifterOperand(R2), AL, kCcKeep);
245 __ sbc(R0, R1, ShifterOperand(R2), AL, kCcKeep);
246 __ rsb(R0, R1, ShifterOperand(R2), AL, kCcKeep);
247 __ teq(R0, ShifterOperand(R1));
248
249 // 16 bit variants using low registers.
250 __ movs(R0, ShifterOperand(R1));
251 __ mov(R0, ShifterOperand(R1), AL, kCcKeep);
252 __ mvns(R0, ShifterOperand(R1));
253 __ add(R0, R0, ShifterOperand(R1), AL, kCcKeep);
254 __ adds(R0, R1, ShifterOperand(R2));
255 __ subs(R0, R1, ShifterOperand(R2));
256 __ adcs(R0, R0, ShifterOperand(R1));
257 __ sbcs(R0, R0, ShifterOperand(R1));
258 __ ands(R0, R0, ShifterOperand(R1));
259 __ orrs(R0, R0, ShifterOperand(R1));
260 __ eors(R0, R0, ShifterOperand(R1));
261 __ bics(R0, R0, ShifterOperand(R1));
262 __ tst(R0, ShifterOperand(R1));
263 __ cmp(R0, ShifterOperand(R1));
264 __ cmn(R0, ShifterOperand(R1));
265
266 // 16-bit variants using high registers.
267 __ mov(R1, ShifterOperand(R8), AL, kCcKeep);
268 __ mov(R9, ShifterOperand(R0), AL, kCcKeep);
269 __ mov(R8, ShifterOperand(R9), AL, kCcKeep);
270 __ add(R1, R1, ShifterOperand(R8), AL, kCcKeep);
271 __ add(R9, R9, ShifterOperand(R0), AL, kCcKeep);
272 __ add(R8, R8, ShifterOperand(R9), AL, kCcKeep);
273 __ cmp(R0, ShifterOperand(R9));
274 __ cmp(R8, ShifterOperand(R1));
275 __ cmp(R9, ShifterOperand(R8));
276
277 // The 16-bit RSBS Rd, Rn, #0, also known as NEGS Rd, Rn is specified using
278 // an immediate (0) but emitted without any, so we test it here.
279 __ rsbs(R0, R1, ShifterOperand(0));
280 __ rsbs(R0, R0, ShifterOperand(0)); // Check Rd == Rn code path.
281
282 // 32 bit variants using high registers that would be 16-bit if using low registers.
283 __ movs(R0, ShifterOperand(R8));
284 __ mvns(R0, ShifterOperand(R8));
285 __ add(R0, R1, ShifterOperand(R8), AL, kCcKeep);
286 __ adds(R0, R1, ShifterOperand(R8));
287 __ subs(R0, R1, ShifterOperand(R8));
288 __ adcs(R0, R0, ShifterOperand(R8));
289 __ sbcs(R0, R0, ShifterOperand(R8));
290 __ ands(R0, R0, ShifterOperand(R8));
291 __ orrs(R0, R0, ShifterOperand(R8));
292 __ eors(R0, R0, ShifterOperand(R8));
293 __ bics(R0, R0, ShifterOperand(R8));
294 __ tst(R0, ShifterOperand(R8));
295 __ cmn(R0, ShifterOperand(R8));
296 __ rsbs(R0, R8, ShifterOperand(0)); // Check that this is not emitted as 16-bit.
297 __ rsbs(R8, R8, ShifterOperand(0)); // Check that this is not emitted as 16-bit (Rd == Rn).
298
299 // 32-bit variants of instructions that would be 16-bit outside IT block.
300 __ it(arm::EQ);
301 __ mvns(R0, ShifterOperand(R1), arm::EQ);
302 __ it(arm::EQ);
303 __ adds(R0, R1, ShifterOperand(R2), arm::EQ);
304 __ it(arm::EQ);
305 __ subs(R0, R1, ShifterOperand(R2), arm::EQ);
306 __ it(arm::EQ);
307 __ adcs(R0, R0, ShifterOperand(R1), arm::EQ);
308 __ it(arm::EQ);
309 __ sbcs(R0, R0, ShifterOperand(R1), arm::EQ);
310 __ it(arm::EQ);
311 __ ands(R0, R0, ShifterOperand(R1), arm::EQ);
312 __ it(arm::EQ);
313 __ orrs(R0, R0, ShifterOperand(R1), arm::EQ);
314 __ it(arm::EQ);
315 __ eors(R0, R0, ShifterOperand(R1), arm::EQ);
316 __ it(arm::EQ);
317 __ bics(R0, R0, ShifterOperand(R1), arm::EQ);
318
319 // 16-bit variants of instructions that would be 32-bit outside IT block.
320 __ it(arm::EQ);
321 __ mvn(R0, ShifterOperand(R1), arm::EQ, kCcKeep);
322 __ it(arm::EQ);
323 __ add(R0, R1, ShifterOperand(R2), arm::EQ, kCcKeep);
324 __ it(arm::EQ);
325 __ sub(R0, R1, ShifterOperand(R2), arm::EQ, kCcKeep);
326 __ it(arm::EQ);
327 __ adc(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
328 __ it(arm::EQ);
329 __ sbc(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
330 __ it(arm::EQ);
331 __ and_(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
332 __ it(arm::EQ);
333 __ orr(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
334 __ it(arm::EQ);
335 __ eor(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
336 __ it(arm::EQ);
337 __ bic(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
338
339 // 16 bit variants selected for the default kCcDontCare.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700340 __ mov(R0, ShifterOperand(R1));
341 __ mvn(R0, ShifterOperand(R1));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100342 __ add(R0, R0, ShifterOperand(R1));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700343 __ add(R0, R1, ShifterOperand(R2));
344 __ sub(R0, R1, ShifterOperand(R2));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100345 __ adc(R0, R0, ShifterOperand(R1));
346 __ sbc(R0, R0, ShifterOperand(R1));
Andreas Gampe7b7e5242015-02-02 19:17:11 -0800347 __ and_(R0, R0, ShifterOperand(R1));
348 __ orr(R0, R0, ShifterOperand(R1));
349 __ eor(R0, R0, ShifterOperand(R1));
350 __ bic(R0, R0, ShifterOperand(R1));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100351 __ mov(R1, ShifterOperand(R8));
352 __ mov(R9, ShifterOperand(R0));
353 __ mov(R8, ShifterOperand(R9));
354 __ add(R1, R1, ShifterOperand(R8));
355 __ add(R9, R9, ShifterOperand(R0));
356 __ add(R8, R8, ShifterOperand(R9));
357 __ rsb(R0, R1, ShifterOperand(0));
358 __ rsb(R0, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700359
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100360 // And an arbitrary 32-bit instruction using IP.
361 __ add(R12, R1, ShifterOperand(R0), AL, kCcKeep);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100362
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000363 EmitAndCheck(&assembler, "DataProcessingRegister");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700364}
365
366TEST(Thumb2AssemblerTest, DataProcessingImmediate) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000367 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700368
369 __ mov(R0, ShifterOperand(0x55));
370 __ mvn(R0, ShifterOperand(0x55));
371 __ add(R0, R1, ShifterOperand(0x55));
372 __ sub(R0, R1, ShifterOperand(0x55));
373 __ and_(R0, R1, ShifterOperand(0x55));
374 __ orr(R0, R1, ShifterOperand(0x55));
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100375 __ orn(R0, R1, ShifterOperand(0x55));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700376 __ eor(R0, R1, ShifterOperand(0x55));
377 __ bic(R0, R1, ShifterOperand(0x55));
378 __ adc(R0, R1, ShifterOperand(0x55));
379 __ sbc(R0, R1, ShifterOperand(0x55));
380 __ rsb(R0, R1, ShifterOperand(0x55));
381
382 __ tst(R0, ShifterOperand(0x55));
383 __ teq(R0, ShifterOperand(0x55));
384 __ cmp(R0, ShifterOperand(0x55));
385 __ cmn(R0, ShifterOperand(0x55));
386
387 __ add(R0, R1, ShifterOperand(5));
388 __ sub(R0, R1, ShifterOperand(5));
389
390 __ movs(R0, ShifterOperand(0x55));
391 __ mvns(R0, ShifterOperand(0x55));
392
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100393 __ adds(R0, R1, ShifterOperand(5));
394 __ subs(R0, R1, ShifterOperand(5));
395
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000396 EmitAndCheck(&assembler, "DataProcessingImmediate");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700397}
398
399TEST(Thumb2AssemblerTest, DataProcessingModifiedImmediate) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000400 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700401
402 __ mov(R0, ShifterOperand(0x550055));
403 __ mvn(R0, ShifterOperand(0x550055));
404 __ add(R0, R1, ShifterOperand(0x550055));
405 __ sub(R0, R1, ShifterOperand(0x550055));
406 __ and_(R0, R1, ShifterOperand(0x550055));
407 __ orr(R0, R1, ShifterOperand(0x550055));
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100408 __ orn(R0, R1, ShifterOperand(0x550055));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700409 __ eor(R0, R1, ShifterOperand(0x550055));
410 __ bic(R0, R1, ShifterOperand(0x550055));
411 __ adc(R0, R1, ShifterOperand(0x550055));
412 __ sbc(R0, R1, ShifterOperand(0x550055));
413 __ rsb(R0, R1, ShifterOperand(0x550055));
414
415 __ tst(R0, ShifterOperand(0x550055));
416 __ teq(R0, ShifterOperand(0x550055));
417 __ cmp(R0, ShifterOperand(0x550055));
418 __ cmn(R0, ShifterOperand(0x550055));
419
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000420 EmitAndCheck(&assembler, "DataProcessingModifiedImmediate");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700421}
422
423
424TEST(Thumb2AssemblerTest, DataProcessingModifiedImmediates) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000425 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700426
427 __ mov(R0, ShifterOperand(0x550055));
428 __ mov(R0, ShifterOperand(0x55005500));
429 __ mov(R0, ShifterOperand(0x55555555));
430 __ mov(R0, ShifterOperand(0xd5000000)); // rotated to first position
431 __ mov(R0, ShifterOperand(0x6a000000)); // rotated to second position
432 __ mov(R0, ShifterOperand(0x350)); // rotated to 2nd last position
433 __ mov(R0, ShifterOperand(0x1a8)); // rotated to last position
434
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000435 EmitAndCheck(&assembler, "DataProcessingModifiedImmediates");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700436}
437
438TEST(Thumb2AssemblerTest, DataProcessingShiftedRegister) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000439 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700440
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100441 // 16-bit variants.
442 __ movs(R3, ShifterOperand(R4, LSL, 4));
443 __ movs(R3, ShifterOperand(R4, LSR, 5));
444 __ movs(R3, ShifterOperand(R4, ASR, 6));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700445
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100446 // 32-bit ROR because ROR immediate doesn't have the same 16-bit version as other shifts.
447 __ movs(R3, ShifterOperand(R4, ROR, 7));
448
449 // 32-bit RRX because RRX has no 16-bit version.
450 __ movs(R3, ShifterOperand(R4, RRX));
451
452 // 32 bit variants (not setting condition codes).
453 __ mov(R3, ShifterOperand(R4, LSL, 4), AL, kCcKeep);
454 __ mov(R3, ShifterOperand(R4, LSR, 5), AL, kCcKeep);
455 __ mov(R3, ShifterOperand(R4, ASR, 6), AL, kCcKeep);
456 __ mov(R3, ShifterOperand(R4, ROR, 7), AL, kCcKeep);
457 __ mov(R3, ShifterOperand(R4, RRX), AL, kCcKeep);
458
459 // 32 bit variants (high registers).
460 __ movs(R8, ShifterOperand(R4, LSL, 4));
461 __ movs(R8, ShifterOperand(R4, LSR, 5));
462 __ movs(R8, ShifterOperand(R4, ASR, 6));
463 __ movs(R8, ShifterOperand(R4, ROR, 7));
464 __ movs(R8, ShifterOperand(R4, RRX));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700465
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000466 EmitAndCheck(&assembler, "DataProcessingShiftedRegister");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700467}
468
Vladimir Markof9d741e2015-11-20 15:08:11 +0000469TEST(Thumb2AssemblerTest, ShiftImmediate) {
470 // Note: This test produces the same results as DataProcessingShiftedRegister
471 // but it does so using shift functions instead of mov().
472 arm::Thumb2Assembler assembler;
473
474 // 16-bit variants.
475 __ Lsl(R3, R4, 4);
476 __ Lsr(R3, R4, 5);
477 __ Asr(R3, R4, 6);
478
479 // 32-bit ROR because ROR immediate doesn't have the same 16-bit version as other shifts.
480 __ Ror(R3, R4, 7);
481
482 // 32-bit RRX because RRX has no 16-bit version.
483 __ Rrx(R3, R4);
484
485 // 32 bit variants (not setting condition codes).
486 __ Lsl(R3, R4, 4, AL, kCcKeep);
487 __ Lsr(R3, R4, 5, AL, kCcKeep);
488 __ Asr(R3, R4, 6, AL, kCcKeep);
489 __ Ror(R3, R4, 7, AL, kCcKeep);
490 __ Rrx(R3, R4, AL, kCcKeep);
491
492 // 32 bit variants (high registers).
493 __ Lsls(R8, R4, 4);
494 __ Lsrs(R8, R4, 5);
495 __ Asrs(R8, R4, 6);
496 __ Rors(R8, R4, 7);
497 __ Rrxs(R8, R4);
498
499 EmitAndCheck(&assembler, "ShiftImmediate");
500}
Dave Allison65fcc2c2014-04-28 13:45:27 -0700501
502TEST(Thumb2AssemblerTest, BasicLoad) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000503 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700504
505 __ ldr(R3, Address(R4, 24));
506 __ ldrb(R3, Address(R4, 24));
507 __ ldrh(R3, Address(R4, 24));
508 __ ldrsb(R3, Address(R4, 24));
509 __ ldrsh(R3, Address(R4, 24));
510
511 __ ldr(R3, Address(SP, 24));
512
513 // 32 bit variants
514 __ ldr(R8, Address(R4, 24));
515 __ ldrb(R8, Address(R4, 24));
516 __ ldrh(R8, Address(R4, 24));
517 __ ldrsb(R8, Address(R4, 24));
518 __ ldrsh(R8, Address(R4, 24));
519
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000520 EmitAndCheck(&assembler, "BasicLoad");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700521}
522
523
524TEST(Thumb2AssemblerTest, BasicStore) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000525 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700526
527 __ str(R3, Address(R4, 24));
528 __ strb(R3, Address(R4, 24));
529 __ strh(R3, Address(R4, 24));
530
531 __ str(R3, Address(SP, 24));
532
533 // 32 bit variants.
534 __ str(R8, Address(R4, 24));
535 __ strb(R8, Address(R4, 24));
536 __ strh(R8, Address(R4, 24));
537
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000538 EmitAndCheck(&assembler, "BasicStore");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700539}
540
541TEST(Thumb2AssemblerTest, ComplexLoad) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000542 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700543
544 __ ldr(R3, Address(R4, 24, Address::Mode::Offset));
545 __ ldr(R3, Address(R4, 24, Address::Mode::PreIndex));
546 __ ldr(R3, Address(R4, 24, Address::Mode::PostIndex));
547 __ ldr(R3, Address(R4, 24, Address::Mode::NegOffset));
548 __ ldr(R3, Address(R4, 24, Address::Mode::NegPreIndex));
549 __ ldr(R3, Address(R4, 24, Address::Mode::NegPostIndex));
550
551 __ ldrb(R3, Address(R4, 24, Address::Mode::Offset));
552 __ ldrb(R3, Address(R4, 24, Address::Mode::PreIndex));
553 __ ldrb(R3, Address(R4, 24, Address::Mode::PostIndex));
554 __ ldrb(R3, Address(R4, 24, Address::Mode::NegOffset));
555 __ ldrb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
556 __ ldrb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
557
558 __ ldrh(R3, Address(R4, 24, Address::Mode::Offset));
559 __ ldrh(R3, Address(R4, 24, Address::Mode::PreIndex));
560 __ ldrh(R3, Address(R4, 24, Address::Mode::PostIndex));
561 __ ldrh(R3, Address(R4, 24, Address::Mode::NegOffset));
562 __ ldrh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
563 __ ldrh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
564
565 __ ldrsb(R3, Address(R4, 24, Address::Mode::Offset));
566 __ ldrsb(R3, Address(R4, 24, Address::Mode::PreIndex));
567 __ ldrsb(R3, Address(R4, 24, Address::Mode::PostIndex));
568 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegOffset));
569 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
570 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
571
572 __ ldrsh(R3, Address(R4, 24, Address::Mode::Offset));
573 __ ldrsh(R3, Address(R4, 24, Address::Mode::PreIndex));
574 __ ldrsh(R3, Address(R4, 24, Address::Mode::PostIndex));
575 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegOffset));
576 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
577 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
578
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000579 EmitAndCheck(&assembler, "ComplexLoad");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700580}
581
582
583TEST(Thumb2AssemblerTest, ComplexStore) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000584 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700585
586 __ str(R3, Address(R4, 24, Address::Mode::Offset));
587 __ str(R3, Address(R4, 24, Address::Mode::PreIndex));
588 __ str(R3, Address(R4, 24, Address::Mode::PostIndex));
589 __ str(R3, Address(R4, 24, Address::Mode::NegOffset));
590 __ str(R3, Address(R4, 24, Address::Mode::NegPreIndex));
591 __ str(R3, Address(R4, 24, Address::Mode::NegPostIndex));
592
593 __ strb(R3, Address(R4, 24, Address::Mode::Offset));
594 __ strb(R3, Address(R4, 24, Address::Mode::PreIndex));
595 __ strb(R3, Address(R4, 24, Address::Mode::PostIndex));
596 __ strb(R3, Address(R4, 24, Address::Mode::NegOffset));
597 __ strb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
598 __ strb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
599
600 __ strh(R3, Address(R4, 24, Address::Mode::Offset));
601 __ strh(R3, Address(R4, 24, Address::Mode::PreIndex));
602 __ strh(R3, Address(R4, 24, Address::Mode::PostIndex));
603 __ strh(R3, Address(R4, 24, Address::Mode::NegOffset));
604 __ strh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
605 __ strh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
606
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000607 EmitAndCheck(&assembler, "ComplexStore");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700608}
609
610TEST(Thumb2AssemblerTest, NegativeLoadStore) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000611 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700612
613 __ ldr(R3, Address(R4, -24, Address::Mode::Offset));
614 __ ldr(R3, Address(R4, -24, Address::Mode::PreIndex));
615 __ ldr(R3, Address(R4, -24, Address::Mode::PostIndex));
616 __ ldr(R3, Address(R4, -24, Address::Mode::NegOffset));
617 __ ldr(R3, Address(R4, -24, Address::Mode::NegPreIndex));
618 __ ldr(R3, Address(R4, -24, Address::Mode::NegPostIndex));
619
620 __ ldrb(R3, Address(R4, -24, Address::Mode::Offset));
621 __ ldrb(R3, Address(R4, -24, Address::Mode::PreIndex));
622 __ ldrb(R3, Address(R4, -24, Address::Mode::PostIndex));
623 __ ldrb(R3, Address(R4, -24, Address::Mode::NegOffset));
624 __ ldrb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
625 __ ldrb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
626
627 __ ldrh(R3, Address(R4, -24, Address::Mode::Offset));
628 __ ldrh(R3, Address(R4, -24, Address::Mode::PreIndex));
629 __ ldrh(R3, Address(R4, -24, Address::Mode::PostIndex));
630 __ ldrh(R3, Address(R4, -24, Address::Mode::NegOffset));
631 __ ldrh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
632 __ ldrh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
633
634 __ ldrsb(R3, Address(R4, -24, Address::Mode::Offset));
635 __ ldrsb(R3, Address(R4, -24, Address::Mode::PreIndex));
636 __ ldrsb(R3, Address(R4, -24, Address::Mode::PostIndex));
637 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegOffset));
638 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
639 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
640
641 __ ldrsh(R3, Address(R4, -24, Address::Mode::Offset));
642 __ ldrsh(R3, Address(R4, -24, Address::Mode::PreIndex));
643 __ ldrsh(R3, Address(R4, -24, Address::Mode::PostIndex));
644 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegOffset));
645 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
646 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
647
648 __ str(R3, Address(R4, -24, Address::Mode::Offset));
649 __ str(R3, Address(R4, -24, Address::Mode::PreIndex));
650 __ str(R3, Address(R4, -24, Address::Mode::PostIndex));
651 __ str(R3, Address(R4, -24, Address::Mode::NegOffset));
652 __ str(R3, Address(R4, -24, Address::Mode::NegPreIndex));
653 __ str(R3, Address(R4, -24, Address::Mode::NegPostIndex));
654
655 __ strb(R3, Address(R4, -24, Address::Mode::Offset));
656 __ strb(R3, Address(R4, -24, Address::Mode::PreIndex));
657 __ strb(R3, Address(R4, -24, Address::Mode::PostIndex));
658 __ strb(R3, Address(R4, -24, Address::Mode::NegOffset));
659 __ strb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
660 __ strb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
661
662 __ strh(R3, Address(R4, -24, Address::Mode::Offset));
663 __ strh(R3, Address(R4, -24, Address::Mode::PreIndex));
664 __ strh(R3, Address(R4, -24, Address::Mode::PostIndex));
665 __ strh(R3, Address(R4, -24, Address::Mode::NegOffset));
666 __ strh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
667 __ strh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
668
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000669 EmitAndCheck(&assembler, "NegativeLoadStore");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700670}
671
672TEST(Thumb2AssemblerTest, SimpleLoadStoreDual) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000673 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700674
675 __ strd(R2, Address(R0, 24, Address::Mode::Offset));
676 __ ldrd(R2, Address(R0, 24, Address::Mode::Offset));
677
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000678 EmitAndCheck(&assembler, "SimpleLoadStoreDual");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700679}
680
681TEST(Thumb2AssemblerTest, ComplexLoadStoreDual) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000682 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700683
684 __ strd(R2, Address(R0, 24, Address::Mode::Offset));
685 __ strd(R2, Address(R0, 24, Address::Mode::PreIndex));
686 __ strd(R2, Address(R0, 24, Address::Mode::PostIndex));
687 __ strd(R2, Address(R0, 24, Address::Mode::NegOffset));
688 __ strd(R2, Address(R0, 24, Address::Mode::NegPreIndex));
689 __ strd(R2, Address(R0, 24, Address::Mode::NegPostIndex));
690
691 __ ldrd(R2, Address(R0, 24, Address::Mode::Offset));
692 __ ldrd(R2, Address(R0, 24, Address::Mode::PreIndex));
693 __ ldrd(R2, Address(R0, 24, Address::Mode::PostIndex));
694 __ ldrd(R2, Address(R0, 24, Address::Mode::NegOffset));
695 __ ldrd(R2, Address(R0, 24, Address::Mode::NegPreIndex));
696 __ ldrd(R2, Address(R0, 24, Address::Mode::NegPostIndex));
697
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000698 EmitAndCheck(&assembler, "ComplexLoadStoreDual");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700699}
700
701TEST(Thumb2AssemblerTest, NegativeLoadStoreDual) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000702 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700703
704 __ strd(R2, Address(R0, -24, Address::Mode::Offset));
705 __ strd(R2, Address(R0, -24, Address::Mode::PreIndex));
706 __ strd(R2, Address(R0, -24, Address::Mode::PostIndex));
707 __ strd(R2, Address(R0, -24, Address::Mode::NegOffset));
708 __ strd(R2, Address(R0, -24, Address::Mode::NegPreIndex));
709 __ strd(R2, Address(R0, -24, Address::Mode::NegPostIndex));
710
711 __ ldrd(R2, Address(R0, -24, Address::Mode::Offset));
712 __ ldrd(R2, Address(R0, -24, Address::Mode::PreIndex));
713 __ ldrd(R2, Address(R0, -24, Address::Mode::PostIndex));
714 __ ldrd(R2, Address(R0, -24, Address::Mode::NegOffset));
715 __ ldrd(R2, Address(R0, -24, Address::Mode::NegPreIndex));
716 __ ldrd(R2, Address(R0, -24, Address::Mode::NegPostIndex));
717
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000718 EmitAndCheck(&assembler, "NegativeLoadStoreDual");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700719}
720
721TEST(Thumb2AssemblerTest, SimpleBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000722 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700723
724 Label l1;
725 __ mov(R0, ShifterOperand(2));
726 __ Bind(&l1);
727 __ mov(R1, ShifterOperand(1));
728 __ b(&l1);
729 Label l2;
730 __ b(&l2);
731 __ mov(R1, ShifterOperand(2));
732 __ Bind(&l2);
733 __ mov(R0, ShifterOperand(3));
734
735 Label l3;
736 __ mov(R0, ShifterOperand(2));
737 __ Bind(&l3);
738 __ mov(R1, ShifterOperand(1));
739 __ b(&l3, EQ);
740
741 Label l4;
742 __ b(&l4, EQ);
743 __ mov(R1, ShifterOperand(2));
744 __ Bind(&l4);
745 __ mov(R0, ShifterOperand(3));
746
747 // 2 linked labels.
748 Label l5;
749 __ b(&l5);
750 __ mov(R1, ShifterOperand(4));
751 __ b(&l5);
752 __ mov(R1, ShifterOperand(5));
753 __ Bind(&l5);
754 __ mov(R0, ShifterOperand(6));
755
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000756 EmitAndCheck(&assembler, "SimpleBranch");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700757}
758
759TEST(Thumb2AssemblerTest, LongBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000760 arm::Thumb2Assembler assembler;
761 __ Force32Bit();
Dave Allison65fcc2c2014-04-28 13:45:27 -0700762 // 32 bit branches.
763 Label l1;
764 __ mov(R0, ShifterOperand(2));
765 __ Bind(&l1);
766 __ mov(R1, ShifterOperand(1));
767 __ b(&l1);
768
769 Label l2;
770 __ b(&l2);
771 __ mov(R1, ShifterOperand(2));
772 __ Bind(&l2);
773 __ mov(R0, ShifterOperand(3));
774
775 Label l3;
776 __ mov(R0, ShifterOperand(2));
777 __ Bind(&l3);
778 __ mov(R1, ShifterOperand(1));
779 __ b(&l3, EQ);
780
781 Label l4;
782 __ b(&l4, EQ);
783 __ mov(R1, ShifterOperand(2));
784 __ Bind(&l4);
785 __ mov(R0, ShifterOperand(3));
786
787 // 2 linked labels.
788 Label l5;
789 __ b(&l5);
790 __ mov(R1, ShifterOperand(4));
791 __ b(&l5);
792 __ mov(R1, ShifterOperand(5));
793 __ Bind(&l5);
794 __ mov(R0, ShifterOperand(6));
795
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000796 EmitAndCheck(&assembler, "LongBranch");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700797}
798
799TEST(Thumb2AssemblerTest, LoadMultiple) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000800 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700801
802 // 16 bit.
803 __ ldm(DB_W, R4, (1 << R0 | 1 << R3));
804
805 // 32 bit.
806 __ ldm(DB_W, R4, (1 << LR | 1 << R11));
807 __ ldm(DB, R4, (1 << LR | 1 << R11));
808
809 // Single reg is converted to ldr
810 __ ldm(DB_W, R4, (1 << R5));
811
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000812 EmitAndCheck(&assembler, "LoadMultiple");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700813}
814
815TEST(Thumb2AssemblerTest, StoreMultiple) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000816 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700817
818 // 16 bit.
819 __ stm(IA_W, R4, (1 << R0 | 1 << R3));
820
821 // 32 bit.
822 __ stm(IA_W, R4, (1 << LR | 1 << R11));
823 __ stm(IA, R4, (1 << LR | 1 << R11));
824
825 // Single reg is converted to str
826 __ stm(IA_W, R4, (1 << R5));
827 __ stm(IA, R4, (1 << R5));
828
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000829 EmitAndCheck(&assembler, "StoreMultiple");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700830}
831
832TEST(Thumb2AssemblerTest, MovWMovT) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000833 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700834
Vladimir Markob4536b72015-11-24 13:45:23 +0000835 // Always 32 bit.
836 __ movw(R4, 0);
837 __ movw(R4, 0x34);
838 __ movw(R9, 0x34);
839 __ movw(R3, 0x1234);
840 __ movw(R9, 0xffff);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700841
842 // Always 32 bit.
843 __ movt(R0, 0);
844 __ movt(R0, 0x1234);
845 __ movt(R1, 0xffff);
846
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000847 EmitAndCheck(&assembler, "MovWMovT");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700848}
849
850TEST(Thumb2AssemblerTest, SpecialAddSub) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000851 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700852
853 __ add(R2, SP, ShifterOperand(0x50)); // 16 bit.
854 __ add(SP, SP, ShifterOperand(0x50)); // 16 bit.
855 __ add(R8, SP, ShifterOperand(0x50)); // 32 bit.
856
857 __ add(R2, SP, ShifterOperand(0xf00)); // 32 bit due to imm size.
858 __ add(SP, SP, ShifterOperand(0xf00)); // 32 bit due to imm size.
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000859 __ add(SP, SP, ShifterOperand(0xffc)); // 32 bit due to imm size; encoding T4.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700860
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000861 __ sub(SP, SP, ShifterOperand(0x50)); // 16 bit
862 __ sub(R0, SP, ShifterOperand(0x50)); // 32 bit
863 __ sub(R8, SP, ShifterOperand(0x50)); // 32 bit.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700864
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000865 __ sub(SP, SP, ShifterOperand(0xf00)); // 32 bit due to imm size
866 __ sub(SP, SP, ShifterOperand(0xffc)); // 32 bit due to imm size; encoding T4.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700867
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000868 EmitAndCheck(&assembler, "SpecialAddSub");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700869}
870
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000871TEST(Thumb2AssemblerTest, LoadFromOffset) {
872 arm::Thumb2Assembler assembler;
873
874 __ LoadFromOffset(kLoadWord, R2, R4, 12);
875 __ LoadFromOffset(kLoadWord, R2, R4, 0xfff);
876 __ LoadFromOffset(kLoadWord, R2, R4, 0x1000);
877 __ LoadFromOffset(kLoadWord, R2, R4, 0x1000a4);
878 __ LoadFromOffset(kLoadWord, R2, R4, 0x101000);
879 __ LoadFromOffset(kLoadWord, R4, R4, 0x101000);
880 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 12);
881 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0xfff);
882 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x1000);
883 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x1000a4);
884 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x101000);
885 __ LoadFromOffset(kLoadUnsignedHalfword, R4, R4, 0x101000);
886 __ LoadFromOffset(kLoadWordPair, R2, R4, 12);
887 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x3fc);
888 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x400);
889 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x400a4);
890 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x40400);
891 __ LoadFromOffset(kLoadWordPair, R4, R4, 0x40400);
892
893 __ LoadFromOffset(kLoadWord, R0, R12, 12); // 32-bit because of R12.
894 __ LoadFromOffset(kLoadWord, R2, R4, 0xa4 - 0x100000);
895
896 __ LoadFromOffset(kLoadSignedByte, R2, R4, 12);
897 __ LoadFromOffset(kLoadUnsignedByte, R2, R4, 12);
898 __ LoadFromOffset(kLoadSignedHalfword, R2, R4, 12);
899
900 EmitAndCheck(&assembler, "LoadFromOffset");
901}
902
Dave Allison65fcc2c2014-04-28 13:45:27 -0700903TEST(Thumb2AssemblerTest, StoreToOffset) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000904 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700905
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000906 __ StoreToOffset(kStoreWord, R2, R4, 12);
907 __ StoreToOffset(kStoreWord, R2, R4, 0xfff);
908 __ StoreToOffset(kStoreWord, R2, R4, 0x1000);
909 __ StoreToOffset(kStoreWord, R2, R4, 0x1000a4);
910 __ StoreToOffset(kStoreWord, R2, R4, 0x101000);
911 __ StoreToOffset(kStoreWord, R4, R4, 0x101000);
912 __ StoreToOffset(kStoreHalfword, R2, R4, 12);
913 __ StoreToOffset(kStoreHalfword, R2, R4, 0xfff);
914 __ StoreToOffset(kStoreHalfword, R2, R4, 0x1000);
915 __ StoreToOffset(kStoreHalfword, R2, R4, 0x1000a4);
916 __ StoreToOffset(kStoreHalfword, R2, R4, 0x101000);
917 __ StoreToOffset(kStoreHalfword, R4, R4, 0x101000);
918 __ StoreToOffset(kStoreWordPair, R2, R4, 12);
919 __ StoreToOffset(kStoreWordPair, R2, R4, 0x3fc);
920 __ StoreToOffset(kStoreWordPair, R2, R4, 0x400);
921 __ StoreToOffset(kStoreWordPair, R2, R4, 0x400a4);
922 __ StoreToOffset(kStoreWordPair, R2, R4, 0x40400);
923 __ StoreToOffset(kStoreWordPair, R4, R4, 0x40400);
924
925 __ StoreToOffset(kStoreWord, R0, R12, 12); // 32-bit because of R12.
926 __ StoreToOffset(kStoreWord, R2, R4, 0xa4 - 0x100000);
927
928 __ StoreToOffset(kStoreByte, R2, R4, 12);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700929
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000930 EmitAndCheck(&assembler, "StoreToOffset");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700931}
932
Dave Allison65fcc2c2014-04-28 13:45:27 -0700933TEST(Thumb2AssemblerTest, IfThen) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000934 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700935
936 __ it(EQ);
937 __ mov(R1, ShifterOperand(1), EQ);
938
939 __ it(EQ, kItThen);
940 __ mov(R1, ShifterOperand(1), EQ);
941 __ mov(R2, ShifterOperand(2), EQ);
942
943 __ it(EQ, kItElse);
944 __ mov(R1, ShifterOperand(1), EQ);
945 __ mov(R2, ShifterOperand(2), NE);
946
947 __ it(EQ, kItThen, kItElse);
948 __ mov(R1, ShifterOperand(1), EQ);
949 __ mov(R2, ShifterOperand(2), EQ);
950 __ mov(R3, ShifterOperand(3), NE);
951
952 __ it(EQ, kItElse, kItElse);
953 __ mov(R1, ShifterOperand(1), EQ);
954 __ mov(R2, ShifterOperand(2), NE);
955 __ mov(R3, ShifterOperand(3), NE);
956
957 __ it(EQ, kItThen, kItThen, kItElse);
958 __ mov(R1, ShifterOperand(1), EQ);
959 __ mov(R2, ShifterOperand(2), EQ);
960 __ mov(R3, ShifterOperand(3), EQ);
961 __ mov(R4, ShifterOperand(4), NE);
962
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000963 EmitAndCheck(&assembler, "IfThen");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700964}
965
966TEST(Thumb2AssemblerTest, CbzCbnz) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000967 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700968
969 Label l1;
970 __ cbz(R2, &l1);
971 __ mov(R1, ShifterOperand(3));
972 __ mov(R2, ShifterOperand(3));
973 __ Bind(&l1);
974 __ mov(R2, ShifterOperand(4));
975
976 Label l2;
977 __ cbnz(R2, &l2);
978 __ mov(R8, ShifterOperand(3));
979 __ mov(R2, ShifterOperand(3));
980 __ Bind(&l2);
981 __ mov(R2, ShifterOperand(4));
982
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000983 EmitAndCheck(&assembler, "CbzCbnz");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700984}
985
986TEST(Thumb2AssemblerTest, Multiply) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000987 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700988
989 __ mul(R0, R1, R0);
990 __ mul(R0, R1, R2);
991 __ mul(R8, R9, R8);
992 __ mul(R8, R9, R10);
993
994 __ mla(R0, R1, R2, R3);
995 __ mla(R8, R9, R8, R9);
996
997 __ mls(R0, R1, R2, R3);
998 __ mls(R8, R9, R8, R9);
999
1000 __ umull(R0, R1, R2, R3);
1001 __ umull(R8, R9, R10, R11);
1002
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001003 EmitAndCheck(&assembler, "Multiply");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001004}
1005
1006TEST(Thumb2AssemblerTest, Divide) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001007 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001008
1009 __ sdiv(R0, R1, R2);
1010 __ sdiv(R8, R9, R10);
1011
1012 __ udiv(R0, R1, R2);
1013 __ udiv(R8, R9, R10);
1014
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001015 EmitAndCheck(&assembler, "Divide");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001016}
1017
1018TEST(Thumb2AssemblerTest, VMov) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001019 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001020
1021 __ vmovs(S1, 1.0);
1022 __ vmovd(D1, 1.0);
1023
1024 __ vmovs(S1, S2);
1025 __ vmovd(D1, D2);
1026
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001027 EmitAndCheck(&assembler, "VMov");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001028}
1029
1030
1031TEST(Thumb2AssemblerTest, BasicFloatingPoint) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001032 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001033
1034 __ vadds(S0, S1, S2);
1035 __ vsubs(S0, S1, S2);
1036 __ vmuls(S0, S1, S2);
1037 __ vmlas(S0, S1, S2);
1038 __ vmlss(S0, S1, S2);
1039 __ vdivs(S0, S1, S2);
1040 __ vabss(S0, S1);
1041 __ vnegs(S0, S1);
1042 __ vsqrts(S0, S1);
1043
1044 __ vaddd(D0, D1, D2);
1045 __ vsubd(D0, D1, D2);
1046 __ vmuld(D0, D1, D2);
1047 __ vmlad(D0, D1, D2);
1048 __ vmlsd(D0, D1, D2);
1049 __ vdivd(D0, D1, D2);
1050 __ vabsd(D0, D1);
1051 __ vnegd(D0, D1);
1052 __ vsqrtd(D0, D1);
1053
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001054 EmitAndCheck(&assembler, "BasicFloatingPoint");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001055}
1056
1057TEST(Thumb2AssemblerTest, FloatingPointConversions) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001058 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001059
1060 __ vcvtsd(S2, D2);
1061 __ vcvtds(D2, S2);
1062
1063 __ vcvtis(S1, S2);
1064 __ vcvtsi(S1, S2);
1065
1066 __ vcvtid(S1, D2);
1067 __ vcvtdi(D1, S2);
1068
1069 __ vcvtus(S1, S2);
1070 __ vcvtsu(S1, S2);
1071
1072 __ vcvtud(S1, D2);
1073 __ vcvtdu(D1, S2);
1074
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001075 EmitAndCheck(&assembler, "FloatingPointConversions");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001076}
1077
1078TEST(Thumb2AssemblerTest, FloatingPointComparisons) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001079 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001080
1081 __ vcmps(S0, S1);
1082 __ vcmpd(D0, D1);
1083
1084 __ vcmpsz(S2);
1085 __ vcmpdz(D2);
1086
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001087 EmitAndCheck(&assembler, "FloatingPointComparisons");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001088}
1089
1090TEST(Thumb2AssemblerTest, Calls) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001091 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001092
1093 __ blx(LR);
1094 __ bx(LR);
1095
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001096 EmitAndCheck(&assembler, "Calls");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001097}
1098
1099TEST(Thumb2AssemblerTest, Breakpoint) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001100 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001101
1102 __ bkpt(0);
1103
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001104 EmitAndCheck(&assembler, "Breakpoint");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001105}
1106
1107TEST(Thumb2AssemblerTest, StrR1) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001108 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001109
1110 __ str(R1, Address(SP, 68));
1111 __ str(R1, Address(SP, 1068));
1112
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001113 EmitAndCheck(&assembler, "StrR1");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001114}
1115
1116TEST(Thumb2AssemblerTest, VPushPop) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001117 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001118
1119 __ vpushs(S2, 4);
1120 __ vpushd(D2, 4);
1121
1122 __ vpops(S2, 4);
1123 __ vpopd(D2, 4);
1124
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001125 EmitAndCheck(&assembler, "VPushPop");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001126}
1127
1128TEST(Thumb2AssemblerTest, Max16BitBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001129 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001130
1131 Label l1;
1132 __ b(&l1);
1133 for (int i = 0 ; i < (1 << 11) ; i += 2) {
1134 __ mov(R3, ShifterOperand(i & 0xff));
1135 }
1136 __ Bind(&l1);
1137 __ mov(R1, ShifterOperand(R2));
1138
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001139 EmitAndCheck(&assembler, "Max16BitBranch");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001140}
1141
1142TEST(Thumb2AssemblerTest, Branch32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001143 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001144
1145 Label l1;
1146 __ b(&l1);
1147 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1148 __ mov(R3, ShifterOperand(i & 0xff));
1149 }
1150 __ Bind(&l1);
1151 __ mov(R1, ShifterOperand(R2));
1152
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001153 EmitAndCheck(&assembler, "Branch32");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001154}
1155
1156TEST(Thumb2AssemblerTest, CompareAndBranchMax) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001157 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001158
1159 Label l1;
1160 __ cbz(R4, &l1);
1161 for (int i = 0 ; i < (1 << 7) ; i += 2) {
1162 __ mov(R3, ShifterOperand(i & 0xff));
1163 }
1164 __ Bind(&l1);
1165 __ mov(R1, ShifterOperand(R2));
1166
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001167 EmitAndCheck(&assembler, "CompareAndBranchMax");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001168}
1169
1170TEST(Thumb2AssemblerTest, CompareAndBranchRelocation16) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001171 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001172
1173 Label l1;
1174 __ cbz(R4, &l1);
1175 for (int i = 0 ; i < (1 << 7) + 2 ; i += 2) {
1176 __ mov(R3, ShifterOperand(i & 0xff));
1177 }
1178 __ Bind(&l1);
1179 __ mov(R1, ShifterOperand(R2));
1180
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001181 EmitAndCheck(&assembler, "CompareAndBranchRelocation16");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001182}
1183
1184TEST(Thumb2AssemblerTest, CompareAndBranchRelocation32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001185 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001186
1187 Label l1;
1188 __ cbz(R4, &l1);
1189 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1190 __ mov(R3, ShifterOperand(i & 0xff));
1191 }
1192 __ Bind(&l1);
1193 __ mov(R1, ShifterOperand(R2));
1194
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001195 EmitAndCheck(&assembler, "CompareAndBranchRelocation32");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001196}
1197
1198TEST(Thumb2AssemblerTest, MixedBranch32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001199 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001200
1201 Label l1;
1202 Label l2;
1203 __ b(&l1); // Forwards.
1204 __ Bind(&l2);
1205
1206 // Space to force relocation.
1207 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1208 __ mov(R3, ShifterOperand(i & 0xff));
1209 }
1210 __ b(&l2); // Backwards.
1211 __ Bind(&l1);
1212 __ mov(R1, ShifterOperand(R2));
1213
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001214 EmitAndCheck(&assembler, "MixedBranch32");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001215}
1216
Dave Allison45fdb932014-06-25 12:37:10 -07001217TEST(Thumb2AssemblerTest, Shifts) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001218 arm::Thumb2Assembler assembler;
Dave Allison45fdb932014-06-25 12:37:10 -07001219
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001220 // 16 bit selected for CcDontCare.
Dave Allison45fdb932014-06-25 12:37:10 -07001221 __ Lsl(R0, R1, 5);
1222 __ Lsr(R0, R1, 5);
1223 __ Asr(R0, R1, 5);
1224
1225 __ Lsl(R0, R0, R1);
1226 __ Lsr(R0, R0, R1);
1227 __ Asr(R0, R0, R1);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001228 __ Ror(R0, R0, R1);
1229
1230 // 16 bit with kCcSet.
1231 __ Lsls(R0, R1, 5);
1232 __ Lsrs(R0, R1, 5);
1233 __ Asrs(R0, R1, 5);
1234
1235 __ Lsls(R0, R0, R1);
1236 __ Lsrs(R0, R0, R1);
1237 __ Asrs(R0, R0, R1);
1238 __ Rors(R0, R0, R1);
1239
1240 // 32-bit with kCcKeep.
1241 __ Lsl(R0, R1, 5, AL, kCcKeep);
1242 __ Lsr(R0, R1, 5, AL, kCcKeep);
1243 __ Asr(R0, R1, 5, AL, kCcKeep);
1244
1245 __ Lsl(R0, R0, R1, AL, kCcKeep);
1246 __ Lsr(R0, R0, R1, AL, kCcKeep);
1247 __ Asr(R0, R0, R1, AL, kCcKeep);
1248 __ Ror(R0, R0, R1, AL, kCcKeep);
1249
1250 // 32-bit because ROR immediate doesn't have a 16-bit version like the other shifts.
1251 __ Ror(R0, R1, 5);
1252 __ Rors(R0, R1, 5);
1253 __ Ror(R0, R1, 5, AL, kCcKeep);
Dave Allison45fdb932014-06-25 12:37:10 -07001254
1255 // 32 bit due to high registers.
1256 __ Lsl(R8, R1, 5);
1257 __ Lsr(R0, R8, 5);
1258 __ Asr(R8, R1, 5);
1259 __ Ror(R0, R8, 5);
1260
1261 // 32 bit due to different Rd and Rn.
1262 __ Lsl(R0, R1, R2);
1263 __ Lsr(R0, R1, R2);
1264 __ Asr(R0, R1, R2);
1265 __ Ror(R0, R1, R2);
1266
1267 // 32 bit due to use of high registers.
1268 __ Lsl(R8, R1, R2);
1269 __ Lsr(R0, R8, R2);
1270 __ Asr(R0, R1, R8);
1271
1272 // S bit (all 32 bit)
1273
1274 // 32 bit due to high registers.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001275 __ Lsls(R8, R1, 5);
1276 __ Lsrs(R0, R8, 5);
1277 __ Asrs(R8, R1, 5);
1278 __ Rors(R0, R8, 5);
Dave Allison45fdb932014-06-25 12:37:10 -07001279
1280 // 32 bit due to different Rd and Rn.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001281 __ Lsls(R0, R1, R2);
1282 __ Lsrs(R0, R1, R2);
1283 __ Asrs(R0, R1, R2);
1284 __ Rors(R0, R1, R2);
Dave Allison45fdb932014-06-25 12:37:10 -07001285
1286 // 32 bit due to use of high registers.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001287 __ Lsls(R8, R1, R2);
1288 __ Lsrs(R0, R8, R2);
1289 __ Asrs(R0, R1, R8);
Dave Allison45fdb932014-06-25 12:37:10 -07001290
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001291 EmitAndCheck(&assembler, "Shifts");
Dave Allison45fdb932014-06-25 12:37:10 -07001292}
1293
1294TEST(Thumb2AssemblerTest, LoadStoreRegOffset) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001295 arm::Thumb2Assembler assembler;
Dave Allison45fdb932014-06-25 12:37:10 -07001296
1297 // 16 bit.
1298 __ ldr(R0, Address(R1, R2));
1299 __ str(R0, Address(R1, R2));
1300
1301 // 32 bit due to shift.
1302 __ ldr(R0, Address(R1, R2, LSL, 1));
1303 __ str(R0, Address(R1, R2, LSL, 1));
1304
1305 __ ldr(R0, Address(R1, R2, LSL, 3));
1306 __ str(R0, Address(R1, R2, LSL, 3));
1307
1308 // 32 bit due to high register use.
1309 __ ldr(R8, Address(R1, R2));
1310 __ str(R8, Address(R1, R2));
1311
1312 __ ldr(R1, Address(R8, R2));
1313 __ str(R2, Address(R8, R2));
1314
1315 __ ldr(R0, Address(R1, R8));
1316 __ str(R0, Address(R1, R8));
1317
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001318 EmitAndCheck(&assembler, "LoadStoreRegOffset");
Dave Allison45fdb932014-06-25 12:37:10 -07001319}
1320
1321TEST(Thumb2AssemblerTest, LoadStoreLiteral) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001322 arm::Thumb2Assembler assembler;
Dave Allison45fdb932014-06-25 12:37:10 -07001323
1324 __ ldr(R0, Address(4));
1325 __ str(R0, Address(4));
1326
1327 __ ldr(R0, Address(-8));
1328 __ str(R0, Address(-8));
1329
1330 // Limits.
1331 __ ldr(R0, Address(0x3ff)); // 10 bits (16 bit).
1332 __ ldr(R0, Address(0x7ff)); // 11 bits (32 bit).
1333 __ str(R0, Address(0x3ff)); // 32 bit (no 16 bit str(literal)).
1334 __ str(R0, Address(0x7ff)); // 11 bits (32 bit).
1335
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001336 EmitAndCheck(&assembler, "LoadStoreLiteral");
Dave Allison45fdb932014-06-25 12:37:10 -07001337}
1338
Dave Allison0bb9ade2014-06-26 17:57:36 -07001339TEST(Thumb2AssemblerTest, LoadStoreLimits) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001340 arm::Thumb2Assembler assembler;
Dave Allison0bb9ade2014-06-26 17:57:36 -07001341
1342 __ ldr(R0, Address(R4, 124)); // 16 bit.
1343 __ ldr(R0, Address(R4, 128)); // 32 bit.
1344
1345 __ ldrb(R0, Address(R4, 31)); // 16 bit.
1346 __ ldrb(R0, Address(R4, 32)); // 32 bit.
1347
1348 __ ldrh(R0, Address(R4, 62)); // 16 bit.
1349 __ ldrh(R0, Address(R4, 64)); // 32 bit.
1350
1351 __ ldrsb(R0, Address(R4, 31)); // 32 bit.
1352 __ ldrsb(R0, Address(R4, 32)); // 32 bit.
1353
1354 __ ldrsh(R0, Address(R4, 62)); // 32 bit.
1355 __ ldrsh(R0, Address(R4, 64)); // 32 bit.
1356
1357 __ str(R0, Address(R4, 124)); // 16 bit.
1358 __ str(R0, Address(R4, 128)); // 32 bit.
1359
1360 __ strb(R0, Address(R4, 31)); // 16 bit.
1361 __ strb(R0, Address(R4, 32)); // 32 bit.
1362
1363 __ strh(R0, Address(R4, 62)); // 16 bit.
1364 __ strh(R0, Address(R4, 64)); // 32 bit.
1365
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001366 EmitAndCheck(&assembler, "LoadStoreLimits");
Dave Allison0bb9ade2014-06-26 17:57:36 -07001367}
1368
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001369TEST(Thumb2AssemblerTest, CompareAndBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001370 arm::Thumb2Assembler assembler;
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001371
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001372 Label label;
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001373 __ CompareAndBranchIfZero(arm::R0, &label);
1374 __ CompareAndBranchIfZero(arm::R11, &label);
1375 __ CompareAndBranchIfNonZero(arm::R0, &label);
1376 __ CompareAndBranchIfNonZero(arm::R11, &label);
1377 __ Bind(&label);
1378
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001379 EmitAndCheck(&assembler, "CompareAndBranch");
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001380}
1381
Dave Allison65fcc2c2014-04-28 13:45:27 -07001382#undef __
1383} // namespace arm
1384} // namespace art