blob: 55fbed1e4411c1db3e82281dd5170423cc4e8e08 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <fstream>
18
19#include "gtest/gtest.h"
20#include "utils/arm/assembler_thumb2.h"
21#include "base/hex_dump.h"
22#include "common_runtime_test.h"
23
24namespace art {
25namespace arm {
26
27// Include results file (generated manually)
28#include "assembler_thumb_test_expected.cc.inc"
29
Dave Allisond20ddb22014-06-05 14:16:30 -070030#ifndef HAVE_ANDROID_OS
Dave Allison65fcc2c2014-04-28 13:45:27 -070031static constexpr bool kPrintResults = false;
Dave Allisond20ddb22014-06-05 14:16:30 -070032#endif
Dave Allison65fcc2c2014-04-28 13:45:27 -070033
34void SetAndroidData() {
35 const char* data = getenv("ANDROID_DATA");
36 if (data == nullptr) {
37 setenv("ANDROID_DATA", "/tmp", 1);
38 }
39}
40
41std::string GetAndroidToolsDir() {
42 std::string root;
43 const char* android_build_top = getenv("ANDROID_BUILD_TOP");
44 if (android_build_top != nullptr) {
45 root += android_build_top;
46 } else {
47 // Not set by build server, so default to current directory
48 char* cwd = getcwd(nullptr, 0);
49 setenv("ANDROID_BUILD_TOP", cwd, 1);
50 root += cwd;
51 free(cwd);
52 }
53
54 // Look for "prebuilts"
55 std::string toolsdir = root;
56 struct stat st;
57 while (toolsdir != "") {
58 std::string prebuilts = toolsdir + "/prebuilts";
59 if (stat(prebuilts.c_str(), &st) == 0) {
60 // Found prebuilts.
61 toolsdir += "/prebuilts/gcc/linux-x86/arm";
62 break;
63 }
64 // Not present, move up one dir.
65 size_t slash = toolsdir.rfind('/');
66 if (slash == std::string::npos) {
67 toolsdir = "";
68 } else {
69 toolsdir = toolsdir.substr(0, slash-1);
70 }
71 }
72 bool statok = stat(toolsdir.c_str(), &st) == 0;
73 if (!statok) {
Dave Allisonc819e0d2014-06-05 13:58:56 -070074 return ""; // Use path.
Dave Allison65fcc2c2014-04-28 13:45:27 -070075 }
76
77 DIR* dir = opendir(toolsdir.c_str());
78 if (dir == nullptr) {
Dave Allisonc819e0d2014-06-05 13:58:56 -070079 return ""; // Use path.
Dave Allison65fcc2c2014-04-28 13:45:27 -070080 }
81
82 struct dirent* entry;
83 std::string founddir;
84 double maxversion = 0;
85
86 // Find the latest version of the arm-eabi tools (biggest version number).
87 // Suffix on toolsdir will be something like "arm-eabi-4.8"
88 while ((entry = readdir(dir)) != nullptr) {
89 std::string subdir = toolsdir + std::string("/") + std::string(entry->d_name);
90 size_t eabi = subdir.find("arm-eabi-");
91 if (eabi != std::string::npos) {
92 std::string suffix = subdir.substr(eabi + sizeof("arm-eabi-"));
93 double version = strtod(suffix.c_str(), nullptr);
94 if (version > maxversion) {
95 maxversion = version;
96 founddir = subdir;
97 }
98 }
99 }
100 closedir(dir);
101 bool found = founddir != "";
102 if (!found) {
Dave Allisonc819e0d2014-06-05 13:58:56 -0700103 return ""; // Use path.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700104 }
105
Dave Allisonc819e0d2014-06-05 13:58:56 -0700106 return founddir + "/bin/";
Dave Allison65fcc2c2014-04-28 13:45:27 -0700107}
108
109void dump(std::vector<uint8_t>& code, const char* testname) {
110 // This will only work on the host. There is no as, objcopy or objdump on the
111 // device.
112#ifndef HAVE_ANDROID_OS
113 static bool results_ok = false;
114 static std::string toolsdir;
115
116 if (!results_ok) {
117 setup_results();
118 toolsdir = GetAndroidToolsDir();
119 SetAndroidData();
120 results_ok = true;
121 }
122
123 ScratchFile file;
124
125 const char* filename = file.GetFilename().c_str();
126
127 std::ofstream out(filename);
128 if (out) {
129 out << ".section \".text\"\n";
130 out << ".syntax unified\n";
131 out << ".arch armv7-a\n";
132 out << ".thumb\n";
133 out << ".thumb_func\n";
134 out << ".type " << testname << ", #function\n";
135 out << ".global " << testname << "\n";
136 out << testname << ":\n";
137 out << ".fnstart\n";
138
139 for (uint32_t i = 0 ; i < code.size(); ++i) {
140 out << ".byte " << (static_cast<int>(code[i]) & 0xff) << "\n";
141 }
142 out << ".fnend\n";
143 out << ".size " << testname << ", .-" << testname << "\n";
144 }
145 out.close();
146
147 char cmd[256];
148
149 // Assemble the .S
Dave Allisonc819e0d2014-06-05 13:58:56 -0700150 snprintf(cmd, sizeof(cmd), "%sarm-eabi-as %s -o %s.o", toolsdir.c_str(), filename, filename);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700151 system(cmd);
152
153 // Remove the $d symbols to prevent the disassembler dumping the instructions
154 // as .word
Dave Allisonc819e0d2014-06-05 13:58:56 -0700155 snprintf(cmd, sizeof(cmd), "%sarm-eabi-objcopy -N '$d' %s.o %s.oo", toolsdir.c_str(),
Dave Allison65fcc2c2014-04-28 13:45:27 -0700156 filename, filename);
157 system(cmd);
158
159 // Disassemble.
160
Dave Allisonc819e0d2014-06-05 13:58:56 -0700161 snprintf(cmd, sizeof(cmd), "%sarm-eabi-objdump -d %s.oo | grep '^ *[0-9a-f][0-9a-f]*:'",
Dave Allison65fcc2c2014-04-28 13:45:27 -0700162 toolsdir.c_str(), filename);
163 if (kPrintResults) {
164 // Print the results only, don't check. This is used to generate new output for inserting
165 // into the .inc file.
166 system(cmd);
167 } else {
168 // Check the results match the appropriate results in the .inc file.
169 FILE *fp = popen(cmd, "r");
170 ASSERT_TRUE(fp != nullptr);
171
172 std::map<std::string, const char**>::iterator results = test_results.find(testname);
173 ASSERT_NE(results, test_results.end());
174
175 uint32_t lineindex = 0;
176
177 while (!feof(fp)) {
178 char testline[256];
179 char *s = fgets(testline, sizeof(testline), fp);
180 if (s == nullptr) {
181 break;
182 }
183 ASSERT_EQ(strcmp(results->second[lineindex], testline), 0);
184 ++lineindex;
185 }
186 // Check that we are at the end.
187 ASSERT_TRUE(results->second[lineindex] == nullptr);
188 fclose(fp);
189 }
190
191 char buf[FILENAME_MAX];
192 snprintf(buf, sizeof(buf), "%s.o", filename);
193 unlink(buf);
194
195 snprintf(buf, sizeof(buf), "%s.oo", filename);
196 unlink(buf);
197#endif
198}
199
200#define __ assembler->
201
202TEST(Thumb2AssemblerTest, SimpleMov) {
203 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
204
205 __ mov(R0, ShifterOperand(R1));
206 __ mov(R8, ShifterOperand(R9));
207
208 __ mov(R0, ShifterOperand(1));
209 __ mov(R8, ShifterOperand(9));
210
211 size_t cs = __ CodeSize();
212 std::vector<uint8_t> managed_code(cs);
213 MemoryRegion code(&managed_code[0], managed_code.size());
214 __ FinalizeInstructions(code);
215 dump(managed_code, "SimpleMov");
216 delete assembler;
217}
218
219TEST(Thumb2AssemblerTest, SimpleMov32) {
220 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
221 assembler->Force32Bit();
222
223 __ mov(R0, ShifterOperand(R1));
224 __ mov(R8, ShifterOperand(R9));
225
226 size_t cs = __ CodeSize();
227 std::vector<uint8_t> managed_code(cs);
228 MemoryRegion code(&managed_code[0], managed_code.size());
229 __ FinalizeInstructions(code);
230 dump(managed_code, "SimpleMov32");
231 delete assembler;
232}
233
234TEST(Thumb2AssemblerTest, SimpleMovAdd) {
235 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
236
237 __ mov(R0, ShifterOperand(R1));
238 __ add(R0, R1, ShifterOperand(R2));
239 __ add(R0, R1, ShifterOperand());
240
241 size_t cs = __ CodeSize();
242 std::vector<uint8_t> managed_code(cs);
243 MemoryRegion code(&managed_code[0], managed_code.size());
244 __ FinalizeInstructions(code);
245 dump(managed_code, "SimpleMovAdd");
246 delete assembler;
247}
248
249TEST(Thumb2AssemblerTest, DataProcessingRegister) {
250 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
251
252 __ mov(R0, ShifterOperand(R1));
253 __ mvn(R0, ShifterOperand(R1));
254
255 // 32 bit variants.
256 __ add(R0, R1, ShifterOperand(R2));
257 __ sub(R0, R1, ShifterOperand(R2));
258 __ and_(R0, R1, ShifterOperand(R2));
259 __ orr(R0, R1, ShifterOperand(R2));
260 __ eor(R0, R1, ShifterOperand(R2));
261 __ bic(R0, R1, ShifterOperand(R2));
262 __ adc(R0, R1, ShifterOperand(R2));
263 __ sbc(R0, R1, ShifterOperand(R2));
264 __ rsb(R0, R1, ShifterOperand(R2));
265
266 // 16 bit variants.
267 __ add(R0, R1, ShifterOperand());
268 __ sub(R0, R1, ShifterOperand());
269 __ and_(R0, R1, ShifterOperand());
270 __ orr(R0, R1, ShifterOperand());
271 __ eor(R0, R1, ShifterOperand());
272 __ bic(R0, R1, ShifterOperand());
273 __ adc(R0, R1, ShifterOperand());
274 __ sbc(R0, R1, ShifterOperand());
275 __ rsb(R0, R1, ShifterOperand());
276
277 __ tst(R0, ShifterOperand(R1));
278 __ teq(R0, ShifterOperand(R1));
279 __ cmp(R0, ShifterOperand(R1));
280 __ cmn(R0, ShifterOperand(R1));
281
282 __ movs(R0, ShifterOperand(R1));
283 __ mvns(R0, ShifterOperand(R1));
284
285 size_t cs = __ CodeSize();
286 std::vector<uint8_t> managed_code(cs);
287 MemoryRegion code(&managed_code[0], managed_code.size());
288 __ FinalizeInstructions(code);
289 dump(managed_code, "DataProcessingRegister");
290 delete assembler;
291}
292
293TEST(Thumb2AssemblerTest, DataProcessingImmediate) {
294 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
295
296 __ mov(R0, ShifterOperand(0x55));
297 __ mvn(R0, ShifterOperand(0x55));
298 __ add(R0, R1, ShifterOperand(0x55));
299 __ sub(R0, R1, ShifterOperand(0x55));
300 __ and_(R0, R1, ShifterOperand(0x55));
301 __ orr(R0, R1, ShifterOperand(0x55));
302 __ eor(R0, R1, ShifterOperand(0x55));
303 __ bic(R0, R1, ShifterOperand(0x55));
304 __ adc(R0, R1, ShifterOperand(0x55));
305 __ sbc(R0, R1, ShifterOperand(0x55));
306 __ rsb(R0, R1, ShifterOperand(0x55));
307
308 __ tst(R0, ShifterOperand(0x55));
309 __ teq(R0, ShifterOperand(0x55));
310 __ cmp(R0, ShifterOperand(0x55));
311 __ cmn(R0, ShifterOperand(0x55));
312
313 __ add(R0, R1, ShifterOperand(5));
314 __ sub(R0, R1, ShifterOperand(5));
315
316 __ movs(R0, ShifterOperand(0x55));
317 __ mvns(R0, ShifterOperand(0x55));
318
319 size_t cs = __ CodeSize();
320 std::vector<uint8_t> managed_code(cs);
321 MemoryRegion code(&managed_code[0], managed_code.size());
322 __ FinalizeInstructions(code);
323 dump(managed_code, "DataProcessingImmediate");
324 delete assembler;
325}
326
327TEST(Thumb2AssemblerTest, DataProcessingModifiedImmediate) {
328 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
329
330 __ mov(R0, ShifterOperand(0x550055));
331 __ mvn(R0, ShifterOperand(0x550055));
332 __ add(R0, R1, ShifterOperand(0x550055));
333 __ sub(R0, R1, ShifterOperand(0x550055));
334 __ and_(R0, R1, ShifterOperand(0x550055));
335 __ orr(R0, R1, ShifterOperand(0x550055));
336 __ eor(R0, R1, ShifterOperand(0x550055));
337 __ bic(R0, R1, ShifterOperand(0x550055));
338 __ adc(R0, R1, ShifterOperand(0x550055));
339 __ sbc(R0, R1, ShifterOperand(0x550055));
340 __ rsb(R0, R1, ShifterOperand(0x550055));
341
342 __ tst(R0, ShifterOperand(0x550055));
343 __ teq(R0, ShifterOperand(0x550055));
344 __ cmp(R0, ShifterOperand(0x550055));
345 __ cmn(R0, ShifterOperand(0x550055));
346
347 size_t cs = __ CodeSize();
348 std::vector<uint8_t> managed_code(cs);
349 MemoryRegion code(&managed_code[0], managed_code.size());
350 __ FinalizeInstructions(code);
351 dump(managed_code, "DataProcessingModifiedImmediate");
352 delete assembler;
353}
354
355
356TEST(Thumb2AssemblerTest, DataProcessingModifiedImmediates) {
357 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
358
359 __ mov(R0, ShifterOperand(0x550055));
360 __ mov(R0, ShifterOperand(0x55005500));
361 __ mov(R0, ShifterOperand(0x55555555));
362 __ mov(R0, ShifterOperand(0xd5000000)); // rotated to first position
363 __ mov(R0, ShifterOperand(0x6a000000)); // rotated to second position
364 __ mov(R0, ShifterOperand(0x350)); // rotated to 2nd last position
365 __ mov(R0, ShifterOperand(0x1a8)); // rotated to last position
366
367 size_t cs = __ CodeSize();
368 std::vector<uint8_t> managed_code(cs);
369 MemoryRegion code(&managed_code[0], managed_code.size());
370 __ FinalizeInstructions(code);
371 dump(managed_code, "DataProcessingModifiedImmediates");
372 delete assembler;
373}
374
375TEST(Thumb2AssemblerTest, DataProcessingShiftedRegister) {
376 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
377
378 __ mov(R3, ShifterOperand(R4, LSL, 4));
379 __ mov(R3, ShifterOperand(R4, LSR, 5));
380 __ mov(R3, ShifterOperand(R4, ASR, 6));
381 __ mov(R3, ShifterOperand(R4, ROR, 7));
382 __ mov(R3, ShifterOperand(R4, ROR));
383
384 // 32 bit variants.
385 __ mov(R8, ShifterOperand(R4, LSL, 4));
386 __ mov(R8, ShifterOperand(R4, LSR, 5));
387 __ mov(R8, ShifterOperand(R4, ASR, 6));
388 __ mov(R8, ShifterOperand(R4, ROR, 7));
389 __ mov(R8, ShifterOperand(R4, RRX));
390
391 size_t cs = __ CodeSize();
392 std::vector<uint8_t> managed_code(cs);
393 MemoryRegion code(&managed_code[0], managed_code.size());
394 __ FinalizeInstructions(code);
395 dump(managed_code, "DataProcessingShiftedRegister");
396 delete assembler;
397}
398
399
400TEST(Thumb2AssemblerTest, BasicLoad) {
401 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
402
403 __ ldr(R3, Address(R4, 24));
404 __ ldrb(R3, Address(R4, 24));
405 __ ldrh(R3, Address(R4, 24));
406 __ ldrsb(R3, Address(R4, 24));
407 __ ldrsh(R3, Address(R4, 24));
408
409 __ ldr(R3, Address(SP, 24));
410
411 // 32 bit variants
412 __ ldr(R8, Address(R4, 24));
413 __ ldrb(R8, Address(R4, 24));
414 __ ldrh(R8, Address(R4, 24));
415 __ ldrsb(R8, Address(R4, 24));
416 __ ldrsh(R8, Address(R4, 24));
417
418 size_t cs = __ CodeSize();
419 std::vector<uint8_t> managed_code(cs);
420 MemoryRegion code(&managed_code[0], managed_code.size());
421 __ FinalizeInstructions(code);
422 dump(managed_code, "BasicLoad");
423 delete assembler;
424}
425
426
427TEST(Thumb2AssemblerTest, BasicStore) {
428 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
429
430 __ str(R3, Address(R4, 24));
431 __ strb(R3, Address(R4, 24));
432 __ strh(R3, Address(R4, 24));
433
434 __ str(R3, Address(SP, 24));
435
436 // 32 bit variants.
437 __ str(R8, Address(R4, 24));
438 __ strb(R8, Address(R4, 24));
439 __ strh(R8, Address(R4, 24));
440
441 size_t cs = __ CodeSize();
442 std::vector<uint8_t> managed_code(cs);
443 MemoryRegion code(&managed_code[0], managed_code.size());
444 __ FinalizeInstructions(code);
445 dump(managed_code, "BasicStore");
446 delete assembler;
447}
448
449TEST(Thumb2AssemblerTest, ComplexLoad) {
450 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
451
452 __ ldr(R3, Address(R4, 24, Address::Mode::Offset));
453 __ ldr(R3, Address(R4, 24, Address::Mode::PreIndex));
454 __ ldr(R3, Address(R4, 24, Address::Mode::PostIndex));
455 __ ldr(R3, Address(R4, 24, Address::Mode::NegOffset));
456 __ ldr(R3, Address(R4, 24, Address::Mode::NegPreIndex));
457 __ ldr(R3, Address(R4, 24, Address::Mode::NegPostIndex));
458
459 __ ldrb(R3, Address(R4, 24, Address::Mode::Offset));
460 __ ldrb(R3, Address(R4, 24, Address::Mode::PreIndex));
461 __ ldrb(R3, Address(R4, 24, Address::Mode::PostIndex));
462 __ ldrb(R3, Address(R4, 24, Address::Mode::NegOffset));
463 __ ldrb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
464 __ ldrb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
465
466 __ ldrh(R3, Address(R4, 24, Address::Mode::Offset));
467 __ ldrh(R3, Address(R4, 24, Address::Mode::PreIndex));
468 __ ldrh(R3, Address(R4, 24, Address::Mode::PostIndex));
469 __ ldrh(R3, Address(R4, 24, Address::Mode::NegOffset));
470 __ ldrh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
471 __ ldrh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
472
473 __ ldrsb(R3, Address(R4, 24, Address::Mode::Offset));
474 __ ldrsb(R3, Address(R4, 24, Address::Mode::PreIndex));
475 __ ldrsb(R3, Address(R4, 24, Address::Mode::PostIndex));
476 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegOffset));
477 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
478 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
479
480 __ ldrsh(R3, Address(R4, 24, Address::Mode::Offset));
481 __ ldrsh(R3, Address(R4, 24, Address::Mode::PreIndex));
482 __ ldrsh(R3, Address(R4, 24, Address::Mode::PostIndex));
483 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegOffset));
484 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
485 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
486
487 size_t cs = __ CodeSize();
488 std::vector<uint8_t> managed_code(cs);
489 MemoryRegion code(&managed_code[0], managed_code.size());
490 __ FinalizeInstructions(code);
491 dump(managed_code, "ComplexLoad");
492 delete assembler;
493}
494
495
496TEST(Thumb2AssemblerTest, ComplexStore) {
497 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
498
499 __ str(R3, Address(R4, 24, Address::Mode::Offset));
500 __ str(R3, Address(R4, 24, Address::Mode::PreIndex));
501 __ str(R3, Address(R4, 24, Address::Mode::PostIndex));
502 __ str(R3, Address(R4, 24, Address::Mode::NegOffset));
503 __ str(R3, Address(R4, 24, Address::Mode::NegPreIndex));
504 __ str(R3, Address(R4, 24, Address::Mode::NegPostIndex));
505
506 __ strb(R3, Address(R4, 24, Address::Mode::Offset));
507 __ strb(R3, Address(R4, 24, Address::Mode::PreIndex));
508 __ strb(R3, Address(R4, 24, Address::Mode::PostIndex));
509 __ strb(R3, Address(R4, 24, Address::Mode::NegOffset));
510 __ strb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
511 __ strb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
512
513 __ strh(R3, Address(R4, 24, Address::Mode::Offset));
514 __ strh(R3, Address(R4, 24, Address::Mode::PreIndex));
515 __ strh(R3, Address(R4, 24, Address::Mode::PostIndex));
516 __ strh(R3, Address(R4, 24, Address::Mode::NegOffset));
517 __ strh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
518 __ strh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
519
520 size_t cs = __ CodeSize();
521 std::vector<uint8_t> managed_code(cs);
522 MemoryRegion code(&managed_code[0], managed_code.size());
523 __ FinalizeInstructions(code);
524 dump(managed_code, "ComplexStore");
525 delete assembler;
526}
527
528TEST(Thumb2AssemblerTest, NegativeLoadStore) {
529 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
530
531 __ ldr(R3, Address(R4, -24, Address::Mode::Offset));
532 __ ldr(R3, Address(R4, -24, Address::Mode::PreIndex));
533 __ ldr(R3, Address(R4, -24, Address::Mode::PostIndex));
534 __ ldr(R3, Address(R4, -24, Address::Mode::NegOffset));
535 __ ldr(R3, Address(R4, -24, Address::Mode::NegPreIndex));
536 __ ldr(R3, Address(R4, -24, Address::Mode::NegPostIndex));
537
538 __ ldrb(R3, Address(R4, -24, Address::Mode::Offset));
539 __ ldrb(R3, Address(R4, -24, Address::Mode::PreIndex));
540 __ ldrb(R3, Address(R4, -24, Address::Mode::PostIndex));
541 __ ldrb(R3, Address(R4, -24, Address::Mode::NegOffset));
542 __ ldrb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
543 __ ldrb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
544
545 __ ldrh(R3, Address(R4, -24, Address::Mode::Offset));
546 __ ldrh(R3, Address(R4, -24, Address::Mode::PreIndex));
547 __ ldrh(R3, Address(R4, -24, Address::Mode::PostIndex));
548 __ ldrh(R3, Address(R4, -24, Address::Mode::NegOffset));
549 __ ldrh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
550 __ ldrh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
551
552 __ ldrsb(R3, Address(R4, -24, Address::Mode::Offset));
553 __ ldrsb(R3, Address(R4, -24, Address::Mode::PreIndex));
554 __ ldrsb(R3, Address(R4, -24, Address::Mode::PostIndex));
555 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegOffset));
556 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
557 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
558
559 __ ldrsh(R3, Address(R4, -24, Address::Mode::Offset));
560 __ ldrsh(R3, Address(R4, -24, Address::Mode::PreIndex));
561 __ ldrsh(R3, Address(R4, -24, Address::Mode::PostIndex));
562 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegOffset));
563 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
564 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
565
566 __ str(R3, Address(R4, -24, Address::Mode::Offset));
567 __ str(R3, Address(R4, -24, Address::Mode::PreIndex));
568 __ str(R3, Address(R4, -24, Address::Mode::PostIndex));
569 __ str(R3, Address(R4, -24, Address::Mode::NegOffset));
570 __ str(R3, Address(R4, -24, Address::Mode::NegPreIndex));
571 __ str(R3, Address(R4, -24, Address::Mode::NegPostIndex));
572
573 __ strb(R3, Address(R4, -24, Address::Mode::Offset));
574 __ strb(R3, Address(R4, -24, Address::Mode::PreIndex));
575 __ strb(R3, Address(R4, -24, Address::Mode::PostIndex));
576 __ strb(R3, Address(R4, -24, Address::Mode::NegOffset));
577 __ strb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
578 __ strb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
579
580 __ strh(R3, Address(R4, -24, Address::Mode::Offset));
581 __ strh(R3, Address(R4, -24, Address::Mode::PreIndex));
582 __ strh(R3, Address(R4, -24, Address::Mode::PostIndex));
583 __ strh(R3, Address(R4, -24, Address::Mode::NegOffset));
584 __ strh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
585 __ strh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
586
587 size_t cs = __ CodeSize();
588 std::vector<uint8_t> managed_code(cs);
589 MemoryRegion code(&managed_code[0], managed_code.size());
590 __ FinalizeInstructions(code);
591 dump(managed_code, "NegativeLoadStore");
592 delete assembler;
593}
594
595TEST(Thumb2AssemblerTest, SimpleLoadStoreDual) {
596 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
597
598 __ strd(R2, Address(R0, 24, Address::Mode::Offset));
599 __ ldrd(R2, Address(R0, 24, Address::Mode::Offset));
600
601 size_t cs = __ CodeSize();
602 std::vector<uint8_t> managed_code(cs);
603 MemoryRegion code(&managed_code[0], managed_code.size());
604 __ FinalizeInstructions(code);
605 dump(managed_code, "SimpleLoadStoreDual");
606 delete assembler;
607}
608
609TEST(Thumb2AssemblerTest, ComplexLoadStoreDual) {
610 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
611
612 __ strd(R2, Address(R0, 24, Address::Mode::Offset));
613 __ strd(R2, Address(R0, 24, Address::Mode::PreIndex));
614 __ strd(R2, Address(R0, 24, Address::Mode::PostIndex));
615 __ strd(R2, Address(R0, 24, Address::Mode::NegOffset));
616 __ strd(R2, Address(R0, 24, Address::Mode::NegPreIndex));
617 __ strd(R2, Address(R0, 24, Address::Mode::NegPostIndex));
618
619 __ ldrd(R2, Address(R0, 24, Address::Mode::Offset));
620 __ ldrd(R2, Address(R0, 24, Address::Mode::PreIndex));
621 __ ldrd(R2, Address(R0, 24, Address::Mode::PostIndex));
622 __ ldrd(R2, Address(R0, 24, Address::Mode::NegOffset));
623 __ ldrd(R2, Address(R0, 24, Address::Mode::NegPreIndex));
624 __ ldrd(R2, Address(R0, 24, Address::Mode::NegPostIndex));
625
626 size_t cs = __ CodeSize();
627 std::vector<uint8_t> managed_code(cs);
628 MemoryRegion code(&managed_code[0], managed_code.size());
629 __ FinalizeInstructions(code);
630 dump(managed_code, "ComplexLoadStoreDual");
631 delete assembler;
632}
633
634TEST(Thumb2AssemblerTest, NegativeLoadStoreDual) {
635 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
636
637 __ strd(R2, Address(R0, -24, Address::Mode::Offset));
638 __ strd(R2, Address(R0, -24, Address::Mode::PreIndex));
639 __ strd(R2, Address(R0, -24, Address::Mode::PostIndex));
640 __ strd(R2, Address(R0, -24, Address::Mode::NegOffset));
641 __ strd(R2, Address(R0, -24, Address::Mode::NegPreIndex));
642 __ strd(R2, Address(R0, -24, Address::Mode::NegPostIndex));
643
644 __ ldrd(R2, Address(R0, -24, Address::Mode::Offset));
645 __ ldrd(R2, Address(R0, -24, Address::Mode::PreIndex));
646 __ ldrd(R2, Address(R0, -24, Address::Mode::PostIndex));
647 __ ldrd(R2, Address(R0, -24, Address::Mode::NegOffset));
648 __ ldrd(R2, Address(R0, -24, Address::Mode::NegPreIndex));
649 __ ldrd(R2, Address(R0, -24, Address::Mode::NegPostIndex));
650
651 size_t cs = __ CodeSize();
652 std::vector<uint8_t> managed_code(cs);
653 MemoryRegion code(&managed_code[0], managed_code.size());
654 __ FinalizeInstructions(code);
655 dump(managed_code, "NegativeLoadStoreDual");
656 delete assembler;
657}
658
659TEST(Thumb2AssemblerTest, SimpleBranch) {
660 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
661
662 Label l1;
663 __ mov(R0, ShifterOperand(2));
664 __ Bind(&l1);
665 __ mov(R1, ShifterOperand(1));
666 __ b(&l1);
667 Label l2;
668 __ b(&l2);
669 __ mov(R1, ShifterOperand(2));
670 __ Bind(&l2);
671 __ mov(R0, ShifterOperand(3));
672
673 Label l3;
674 __ mov(R0, ShifterOperand(2));
675 __ Bind(&l3);
676 __ mov(R1, ShifterOperand(1));
677 __ b(&l3, EQ);
678
679 Label l4;
680 __ b(&l4, EQ);
681 __ mov(R1, ShifterOperand(2));
682 __ Bind(&l4);
683 __ mov(R0, ShifterOperand(3));
684
685 // 2 linked labels.
686 Label l5;
687 __ b(&l5);
688 __ mov(R1, ShifterOperand(4));
689 __ b(&l5);
690 __ mov(R1, ShifterOperand(5));
691 __ Bind(&l5);
692 __ mov(R0, ShifterOperand(6));
693
694 size_t cs = __ CodeSize();
695 std::vector<uint8_t> managed_code(cs);
696 MemoryRegion code(&managed_code[0], managed_code.size());
697 __ FinalizeInstructions(code);
698 dump(managed_code, "SimpleBranch");
699 delete assembler;
700}
701
702TEST(Thumb2AssemblerTest, LongBranch) {
703 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
704 assembler->Force32Bit();
705 // 32 bit branches.
706 Label l1;
707 __ mov(R0, ShifterOperand(2));
708 __ Bind(&l1);
709 __ mov(R1, ShifterOperand(1));
710 __ b(&l1);
711
712 Label l2;
713 __ b(&l2);
714 __ mov(R1, ShifterOperand(2));
715 __ Bind(&l2);
716 __ mov(R0, ShifterOperand(3));
717
718 Label l3;
719 __ mov(R0, ShifterOperand(2));
720 __ Bind(&l3);
721 __ mov(R1, ShifterOperand(1));
722 __ b(&l3, EQ);
723
724 Label l4;
725 __ b(&l4, EQ);
726 __ mov(R1, ShifterOperand(2));
727 __ Bind(&l4);
728 __ mov(R0, ShifterOperand(3));
729
730 // 2 linked labels.
731 Label l5;
732 __ b(&l5);
733 __ mov(R1, ShifterOperand(4));
734 __ b(&l5);
735 __ mov(R1, ShifterOperand(5));
736 __ Bind(&l5);
737 __ mov(R0, ShifterOperand(6));
738
739 size_t cs = __ CodeSize();
740 std::vector<uint8_t> managed_code(cs);
741 MemoryRegion code(&managed_code[0], managed_code.size());
742 __ FinalizeInstructions(code);
743 dump(managed_code, "LongBranch");
744 delete assembler;
745}
746
747TEST(Thumb2AssemblerTest, LoadMultiple) {
748 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
749
750 // 16 bit.
751 __ ldm(DB_W, R4, (1 << R0 | 1 << R3));
752
753 // 32 bit.
754 __ ldm(DB_W, R4, (1 << LR | 1 << R11));
755 __ ldm(DB, R4, (1 << LR | 1 << R11));
756
757 // Single reg is converted to ldr
758 __ ldm(DB_W, R4, (1 << R5));
759
760 size_t cs = __ CodeSize();
761 std::vector<uint8_t> managed_code(cs);
762 MemoryRegion code(&managed_code[0], managed_code.size());
763 __ FinalizeInstructions(code);
764 dump(managed_code, "LoadMultiple");
765 delete assembler;
766}
767
768TEST(Thumb2AssemblerTest, StoreMultiple) {
769 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
770
771 // 16 bit.
772 __ stm(IA_W, R4, (1 << R0 | 1 << R3));
773
774 // 32 bit.
775 __ stm(IA_W, R4, (1 << LR | 1 << R11));
776 __ stm(IA, R4, (1 << LR | 1 << R11));
777
778 // Single reg is converted to str
779 __ stm(IA_W, R4, (1 << R5));
780 __ stm(IA, R4, (1 << R5));
781
782 size_t cs = __ CodeSize();
783 std::vector<uint8_t> managed_code(cs);
784 MemoryRegion code(&managed_code[0], managed_code.size());
785 __ FinalizeInstructions(code);
786 dump(managed_code, "StoreMultiple");
787 delete assembler;
788}
789
790TEST(Thumb2AssemblerTest, MovWMovT) {
791 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
792
793 __ movw(R4, 0); // 16 bit.
794 __ movw(R4, 0x34); // 16 bit.
795 __ movw(R9, 0x34); // 32 bit due to high register.
796 __ movw(R3, 0x1234); // 32 bit due to large value.
797 __ movw(R9, 0xffff); // 32 bit due to large value and high register.
798
799 // Always 32 bit.
800 __ movt(R0, 0);
801 __ movt(R0, 0x1234);
802 __ movt(R1, 0xffff);
803
804 size_t cs = __ CodeSize();
805 std::vector<uint8_t> managed_code(cs);
806 MemoryRegion code(&managed_code[0], managed_code.size());
807 __ FinalizeInstructions(code);
808 dump(managed_code, "MovWMovT");
809 delete assembler;
810}
811
812TEST(Thumb2AssemblerTest, SpecialAddSub) {
813 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
814
815 __ add(R2, SP, ShifterOperand(0x50)); // 16 bit.
816 __ add(SP, SP, ShifterOperand(0x50)); // 16 bit.
817 __ add(R8, SP, ShifterOperand(0x50)); // 32 bit.
818
819 __ add(R2, SP, ShifterOperand(0xf00)); // 32 bit due to imm size.
820 __ add(SP, SP, ShifterOperand(0xf00)); // 32 bit due to imm size.
821
822 __ sub(SP, SP, ShifterOperand(0x50)); // 16 bit
823 __ sub(R0, SP, ShifterOperand(0x50)); // 32 bit
824 __ sub(R8, SP, ShifterOperand(0x50)); // 32 bit.
825
826 __ sub(SP, SP, ShifterOperand(0xf00)); // 32 bit due to imm size
827
828 size_t cs = __ CodeSize();
829 std::vector<uint8_t> managed_code(cs);
830 MemoryRegion code(&managed_code[0], managed_code.size());
831 __ FinalizeInstructions(code);
832 dump(managed_code, "SpecialAddSub");
833 delete assembler;
834}
835
836TEST(Thumb2AssemblerTest, StoreToOffset) {
837 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
838
839 __ StoreToOffset(kStoreWord, R2, R4, 12); // Simple
840 __ StoreToOffset(kStoreWord, R2, R4, 0x2000); // Offset too big.
841
842 size_t cs = __ CodeSize();
843 std::vector<uint8_t> managed_code(cs);
844 MemoryRegion code(&managed_code[0], managed_code.size());
845 __ FinalizeInstructions(code);
846 dump(managed_code, "StoreToOffset");
847 delete assembler;
848}
849
850
851TEST(Thumb2AssemblerTest, IfThen) {
852 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
853
854 __ it(EQ);
855 __ mov(R1, ShifterOperand(1), EQ);
856
857 __ it(EQ, kItThen);
858 __ mov(R1, ShifterOperand(1), EQ);
859 __ mov(R2, ShifterOperand(2), EQ);
860
861 __ it(EQ, kItElse);
862 __ mov(R1, ShifterOperand(1), EQ);
863 __ mov(R2, ShifterOperand(2), NE);
864
865 __ it(EQ, kItThen, kItElse);
866 __ mov(R1, ShifterOperand(1), EQ);
867 __ mov(R2, ShifterOperand(2), EQ);
868 __ mov(R3, ShifterOperand(3), NE);
869
870 __ it(EQ, kItElse, kItElse);
871 __ mov(R1, ShifterOperand(1), EQ);
872 __ mov(R2, ShifterOperand(2), NE);
873 __ mov(R3, ShifterOperand(3), NE);
874
875 __ it(EQ, kItThen, kItThen, kItElse);
876 __ mov(R1, ShifterOperand(1), EQ);
877 __ mov(R2, ShifterOperand(2), EQ);
878 __ mov(R3, ShifterOperand(3), EQ);
879 __ mov(R4, ShifterOperand(4), NE);
880
881 size_t cs = __ CodeSize();
882 std::vector<uint8_t> managed_code(cs);
883 MemoryRegion code(&managed_code[0], managed_code.size());
884 __ FinalizeInstructions(code);
885 dump(managed_code, "IfThen");
886 delete assembler;
887}
888
889TEST(Thumb2AssemblerTest, CbzCbnz) {
890 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
891
892 Label l1;
893 __ cbz(R2, &l1);
894 __ mov(R1, ShifterOperand(3));
895 __ mov(R2, ShifterOperand(3));
896 __ Bind(&l1);
897 __ mov(R2, ShifterOperand(4));
898
899 Label l2;
900 __ cbnz(R2, &l2);
901 __ mov(R8, ShifterOperand(3));
902 __ mov(R2, ShifterOperand(3));
903 __ Bind(&l2);
904 __ mov(R2, ShifterOperand(4));
905
906 size_t cs = __ CodeSize();
907 std::vector<uint8_t> managed_code(cs);
908 MemoryRegion code(&managed_code[0], managed_code.size());
909 __ FinalizeInstructions(code);
910 dump(managed_code, "CbzCbnz");
911 delete assembler;
912}
913
914TEST(Thumb2AssemblerTest, Multiply) {
915 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
916
917 __ mul(R0, R1, R0);
918 __ mul(R0, R1, R2);
919 __ mul(R8, R9, R8);
920 __ mul(R8, R9, R10);
921
922 __ mla(R0, R1, R2, R3);
923 __ mla(R8, R9, R8, R9);
924
925 __ mls(R0, R1, R2, R3);
926 __ mls(R8, R9, R8, R9);
927
928 __ umull(R0, R1, R2, R3);
929 __ umull(R8, R9, R10, R11);
930
931 size_t cs = __ CodeSize();
932 std::vector<uint8_t> managed_code(cs);
933 MemoryRegion code(&managed_code[0], managed_code.size());
934 __ FinalizeInstructions(code);
935 dump(managed_code, "Multiply");
936 delete assembler;
937}
938
939TEST(Thumb2AssemblerTest, Divide) {
940 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
941
942 __ sdiv(R0, R1, R2);
943 __ sdiv(R8, R9, R10);
944
945 __ udiv(R0, R1, R2);
946 __ udiv(R8, R9, R10);
947
948 size_t cs = __ CodeSize();
949 std::vector<uint8_t> managed_code(cs);
950 MemoryRegion code(&managed_code[0], managed_code.size());
951 __ FinalizeInstructions(code);
952 dump(managed_code, "Divide");
953 delete assembler;
954}
955
956TEST(Thumb2AssemblerTest, VMov) {
957 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
958
959 __ vmovs(S1, 1.0);
960 __ vmovd(D1, 1.0);
961
962 __ vmovs(S1, S2);
963 __ vmovd(D1, D2);
964
965 size_t cs = __ CodeSize();
966 std::vector<uint8_t> managed_code(cs);
967 MemoryRegion code(&managed_code[0], managed_code.size());
968 __ FinalizeInstructions(code);
969 dump(managed_code, "VMov");
970 delete assembler;
971}
972
973
974TEST(Thumb2AssemblerTest, BasicFloatingPoint) {
975 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
976
977 __ vadds(S0, S1, S2);
978 __ vsubs(S0, S1, S2);
979 __ vmuls(S0, S1, S2);
980 __ vmlas(S0, S1, S2);
981 __ vmlss(S0, S1, S2);
982 __ vdivs(S0, S1, S2);
983 __ vabss(S0, S1);
984 __ vnegs(S0, S1);
985 __ vsqrts(S0, S1);
986
987 __ vaddd(D0, D1, D2);
988 __ vsubd(D0, D1, D2);
989 __ vmuld(D0, D1, D2);
990 __ vmlad(D0, D1, D2);
991 __ vmlsd(D0, D1, D2);
992 __ vdivd(D0, D1, D2);
993 __ vabsd(D0, D1);
994 __ vnegd(D0, D1);
995 __ vsqrtd(D0, D1);
996
997 size_t cs = __ CodeSize();
998 std::vector<uint8_t> managed_code(cs);
999 MemoryRegion code(&managed_code[0], managed_code.size());
1000 __ FinalizeInstructions(code);
1001 dump(managed_code, "BasicFloatingPoint");
1002 delete assembler;
1003}
1004
1005TEST(Thumb2AssemblerTest, FloatingPointConversions) {
1006 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1007
1008 __ vcvtsd(S2, D2);
1009 __ vcvtds(D2, S2);
1010
1011 __ vcvtis(S1, S2);
1012 __ vcvtsi(S1, S2);
1013
1014 __ vcvtid(S1, D2);
1015 __ vcvtdi(D1, S2);
1016
1017 __ vcvtus(S1, S2);
1018 __ vcvtsu(S1, S2);
1019
1020 __ vcvtud(S1, D2);
1021 __ vcvtdu(D1, S2);
1022
1023 size_t cs = __ CodeSize();
1024 std::vector<uint8_t> managed_code(cs);
1025 MemoryRegion code(&managed_code[0], managed_code.size());
1026 __ FinalizeInstructions(code);
1027 dump(managed_code, "FloatingPointConversions");
1028 delete assembler;
1029}
1030
1031TEST(Thumb2AssemblerTest, FloatingPointComparisons) {
1032 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1033
1034 __ vcmps(S0, S1);
1035 __ vcmpd(D0, D1);
1036
1037 __ vcmpsz(S2);
1038 __ vcmpdz(D2);
1039
1040 size_t cs = __ CodeSize();
1041 std::vector<uint8_t> managed_code(cs);
1042 MemoryRegion code(&managed_code[0], managed_code.size());
1043 __ FinalizeInstructions(code);
1044 dump(managed_code, "FloatingPointComparisons");
1045 delete assembler;
1046}
1047
1048TEST(Thumb2AssemblerTest, Calls) {
1049 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1050
1051 __ blx(LR);
1052 __ bx(LR);
1053
1054 size_t cs = __ CodeSize();
1055 std::vector<uint8_t> managed_code(cs);
1056 MemoryRegion code(&managed_code[0], managed_code.size());
1057 __ FinalizeInstructions(code);
1058 dump(managed_code, "Calls");
1059 delete assembler;
1060}
1061
1062TEST(Thumb2AssemblerTest, Breakpoint) {
1063 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1064
1065 __ bkpt(0);
1066
1067 size_t cs = __ CodeSize();
1068 std::vector<uint8_t> managed_code(cs);
1069 MemoryRegion code(&managed_code[0], managed_code.size());
1070 __ FinalizeInstructions(code);
1071 dump(managed_code, "Breakpoint");
1072 delete assembler;
1073}
1074
1075TEST(Thumb2AssemblerTest, StrR1) {
1076 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1077
1078 __ str(R1, Address(SP, 68));
1079 __ str(R1, Address(SP, 1068));
1080
1081 size_t cs = __ CodeSize();
1082 std::vector<uint8_t> managed_code(cs);
1083 MemoryRegion code(&managed_code[0], managed_code.size());
1084 __ FinalizeInstructions(code);
1085 dump(managed_code, "StrR1");
1086 delete assembler;
1087}
1088
1089TEST(Thumb2AssemblerTest, VPushPop) {
1090 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1091
1092 __ vpushs(S2, 4);
1093 __ vpushd(D2, 4);
1094
1095 __ vpops(S2, 4);
1096 __ vpopd(D2, 4);
1097
1098 size_t cs = __ CodeSize();
1099 std::vector<uint8_t> managed_code(cs);
1100 MemoryRegion code(&managed_code[0], managed_code.size());
1101 __ FinalizeInstructions(code);
1102 dump(managed_code, "VPushPop");
1103 delete assembler;
1104}
1105
1106TEST(Thumb2AssemblerTest, Max16BitBranch) {
1107 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1108
1109 Label l1;
1110 __ b(&l1);
1111 for (int i = 0 ; i < (1 << 11) ; i += 2) {
1112 __ mov(R3, ShifterOperand(i & 0xff));
1113 }
1114 __ Bind(&l1);
1115 __ mov(R1, ShifterOperand(R2));
1116
1117 size_t cs = __ CodeSize();
1118 std::vector<uint8_t> managed_code(cs);
1119 MemoryRegion code(&managed_code[0], managed_code.size());
1120 __ FinalizeInstructions(code);
1121 dump(managed_code, "Max16BitBranch");
1122 delete assembler;
1123}
1124
1125TEST(Thumb2AssemblerTest, Branch32) {
1126 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1127
1128 Label l1;
1129 __ b(&l1);
1130 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1131 __ mov(R3, ShifterOperand(i & 0xff));
1132 }
1133 __ Bind(&l1);
1134 __ mov(R1, ShifterOperand(R2));
1135
1136 size_t cs = __ CodeSize();
1137 std::vector<uint8_t> managed_code(cs);
1138 MemoryRegion code(&managed_code[0], managed_code.size());
1139 __ FinalizeInstructions(code);
1140 dump(managed_code, "Branch32");
1141 delete assembler;
1142}
1143
1144TEST(Thumb2AssemblerTest, CompareAndBranchMax) {
1145 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1146
1147 Label l1;
1148 __ cbz(R4, &l1);
1149 for (int i = 0 ; i < (1 << 7) ; i += 2) {
1150 __ mov(R3, ShifterOperand(i & 0xff));
1151 }
1152 __ Bind(&l1);
1153 __ mov(R1, ShifterOperand(R2));
1154
1155 size_t cs = __ CodeSize();
1156 std::vector<uint8_t> managed_code(cs);
1157 MemoryRegion code(&managed_code[0], managed_code.size());
1158 __ FinalizeInstructions(code);
1159 dump(managed_code, "CompareAndBranchMax");
1160 delete assembler;
1161}
1162
1163TEST(Thumb2AssemblerTest, CompareAndBranchRelocation16) {
1164 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1165
1166 Label l1;
1167 __ cbz(R4, &l1);
1168 for (int i = 0 ; i < (1 << 7) + 2 ; i += 2) {
1169 __ mov(R3, ShifterOperand(i & 0xff));
1170 }
1171 __ Bind(&l1);
1172 __ mov(R1, ShifterOperand(R2));
1173
1174 size_t cs = __ CodeSize();
1175 std::vector<uint8_t> managed_code(cs);
1176 MemoryRegion code(&managed_code[0], managed_code.size());
1177 __ FinalizeInstructions(code);
1178 dump(managed_code, "CompareAndBranchRelocation16");
1179 delete assembler;
1180}
1181
1182TEST(Thumb2AssemblerTest, CompareAndBranchRelocation32) {
1183 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1184
1185 Label l1;
1186 __ cbz(R4, &l1);
1187 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1188 __ mov(R3, ShifterOperand(i & 0xff));
1189 }
1190 __ Bind(&l1);
1191 __ mov(R1, ShifterOperand(R2));
1192
1193 size_t cs = __ CodeSize();
1194 std::vector<uint8_t> managed_code(cs);
1195 MemoryRegion code(&managed_code[0], managed_code.size());
1196 __ FinalizeInstructions(code);
1197 dump(managed_code, "CompareAndBranchRelocation32");
1198 delete assembler;
1199}
1200
1201TEST(Thumb2AssemblerTest, MixedBranch32) {
1202 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1203
1204 Label l1;
1205 Label l2;
1206 __ b(&l1); // Forwards.
1207 __ Bind(&l2);
1208
1209 // Space to force relocation.
1210 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1211 __ mov(R3, ShifterOperand(i & 0xff));
1212 }
1213 __ b(&l2); // Backwards.
1214 __ Bind(&l1);
1215 __ mov(R1, ShifterOperand(R2));
1216
1217 size_t cs = __ CodeSize();
1218 std::vector<uint8_t> managed_code(cs);
1219 MemoryRegion code(&managed_code[0], managed_code.size());
1220 __ FinalizeInstructions(code);
1221 dump(managed_code, "MixedBranch32");
1222 delete assembler;
1223}
1224
1225#undef __
1226} // namespace arm
1227} // namespace art