blob: 1e3e569d8b26f88bcdf3763b34015567b1510608 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <fstream>
18
19#include "gtest/gtest.h"
20#include "utils/arm/assembler_thumb2.h"
21#include "base/hex_dump.h"
22#include "common_runtime_test.h"
23
24namespace art {
25namespace arm {
26
27// Include results file (generated manually)
28#include "assembler_thumb_test_expected.cc.inc"
29
Dave Allisond20ddb22014-06-05 14:16:30 -070030#ifndef HAVE_ANDROID_OS
Dave Allison45fdb932014-06-25 12:37:10 -070031// This controls whether the results are printed to the
32// screen or compared against the expected output.
33// To generate new expected output, set this to true and
34// copy the output into the .cc.inc file in the form
35// of the other results.
36//
37// When this is false, the results are not printed to the
38// output, but are compared against the expected results
39// in the .cc.inc file.
Dave Allison65fcc2c2014-04-28 13:45:27 -070040static constexpr bool kPrintResults = false;
Dave Allisond20ddb22014-06-05 14:16:30 -070041#endif
Dave Allison65fcc2c2014-04-28 13:45:27 -070042
43void SetAndroidData() {
44 const char* data = getenv("ANDROID_DATA");
45 if (data == nullptr) {
46 setenv("ANDROID_DATA", "/tmp", 1);
47 }
48}
49
Dave Allison45fdb932014-06-25 12:37:10 -070050int CompareIgnoringSpace(const char* s1, const char* s2) {
51 while (*s1 != '\0') {
52 while (isspace(*s1)) ++s1;
53 while (isspace(*s2)) ++s2;
54 if (*s1 == '\0' || *s1 != *s2) {
55 break;
56 }
57 ++s1;
58 ++s2;
59 }
60 return *s1 - *s2;
61}
62
Dave Allison65fcc2c2014-04-28 13:45:27 -070063std::string GetAndroidToolsDir() {
64 std::string root;
65 const char* android_build_top = getenv("ANDROID_BUILD_TOP");
66 if (android_build_top != nullptr) {
67 root += android_build_top;
68 } else {
69 // Not set by build server, so default to current directory
70 char* cwd = getcwd(nullptr, 0);
71 setenv("ANDROID_BUILD_TOP", cwd, 1);
72 root += cwd;
73 free(cwd);
74 }
75
76 // Look for "prebuilts"
77 std::string toolsdir = root;
78 struct stat st;
79 while (toolsdir != "") {
80 std::string prebuilts = toolsdir + "/prebuilts";
81 if (stat(prebuilts.c_str(), &st) == 0) {
82 // Found prebuilts.
83 toolsdir += "/prebuilts/gcc/linux-x86/arm";
84 break;
85 }
86 // Not present, move up one dir.
87 size_t slash = toolsdir.rfind('/');
88 if (slash == std::string::npos) {
89 toolsdir = "";
90 } else {
91 toolsdir = toolsdir.substr(0, slash-1);
92 }
93 }
94 bool statok = stat(toolsdir.c_str(), &st) == 0;
95 if (!statok) {
Dave Allisonc819e0d2014-06-05 13:58:56 -070096 return ""; // Use path.
Dave Allison65fcc2c2014-04-28 13:45:27 -070097 }
98
99 DIR* dir = opendir(toolsdir.c_str());
100 if (dir == nullptr) {
Dave Allisonc819e0d2014-06-05 13:58:56 -0700101 return ""; // Use path.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700102 }
103
104 struct dirent* entry;
105 std::string founddir;
106 double maxversion = 0;
107
108 // Find the latest version of the arm-eabi tools (biggest version number).
109 // Suffix on toolsdir will be something like "arm-eabi-4.8"
110 while ((entry = readdir(dir)) != nullptr) {
111 std::string subdir = toolsdir + std::string("/") + std::string(entry->d_name);
112 size_t eabi = subdir.find("arm-eabi-");
113 if (eabi != std::string::npos) {
114 std::string suffix = subdir.substr(eabi + sizeof("arm-eabi-"));
115 double version = strtod(suffix.c_str(), nullptr);
116 if (version > maxversion) {
117 maxversion = version;
118 founddir = subdir;
119 }
120 }
121 }
122 closedir(dir);
123 bool found = founddir != "";
124 if (!found) {
Dave Allisonc819e0d2014-06-05 13:58:56 -0700125 return ""; // Use path.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700126 }
127
Dave Allisonc819e0d2014-06-05 13:58:56 -0700128 return founddir + "/bin/";
Dave Allison65fcc2c2014-04-28 13:45:27 -0700129}
130
131void dump(std::vector<uint8_t>& code, const char* testname) {
132 // This will only work on the host. There is no as, objcopy or objdump on the
133 // device.
134#ifndef HAVE_ANDROID_OS
135 static bool results_ok = false;
136 static std::string toolsdir;
137
138 if (!results_ok) {
139 setup_results();
140 toolsdir = GetAndroidToolsDir();
141 SetAndroidData();
142 results_ok = true;
143 }
144
145 ScratchFile file;
146
147 const char* filename = file.GetFilename().c_str();
148
149 std::ofstream out(filename);
150 if (out) {
151 out << ".section \".text\"\n";
152 out << ".syntax unified\n";
153 out << ".arch armv7-a\n";
154 out << ".thumb\n";
155 out << ".thumb_func\n";
156 out << ".type " << testname << ", #function\n";
157 out << ".global " << testname << "\n";
158 out << testname << ":\n";
159 out << ".fnstart\n";
160
161 for (uint32_t i = 0 ; i < code.size(); ++i) {
162 out << ".byte " << (static_cast<int>(code[i]) & 0xff) << "\n";
163 }
164 out << ".fnend\n";
165 out << ".size " << testname << ", .-" << testname << "\n";
166 }
167 out.close();
168
169 char cmd[256];
170
171 // Assemble the .S
Dave Allisonc819e0d2014-06-05 13:58:56 -0700172 snprintf(cmd, sizeof(cmd), "%sarm-eabi-as %s -o %s.o", toolsdir.c_str(), filename, filename);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700173 system(cmd);
174
175 // Remove the $d symbols to prevent the disassembler dumping the instructions
176 // as .word
Dave Allisonc819e0d2014-06-05 13:58:56 -0700177 snprintf(cmd, sizeof(cmd), "%sarm-eabi-objcopy -N '$d' %s.o %s.oo", toolsdir.c_str(),
Dave Allison65fcc2c2014-04-28 13:45:27 -0700178 filename, filename);
179 system(cmd);
180
181 // Disassemble.
182
Dave Allisonc819e0d2014-06-05 13:58:56 -0700183 snprintf(cmd, sizeof(cmd), "%sarm-eabi-objdump -d %s.oo | grep '^ *[0-9a-f][0-9a-f]*:'",
Dave Allison65fcc2c2014-04-28 13:45:27 -0700184 toolsdir.c_str(), filename);
185 if (kPrintResults) {
186 // Print the results only, don't check. This is used to generate new output for inserting
187 // into the .inc file.
188 system(cmd);
189 } else {
190 // Check the results match the appropriate results in the .inc file.
191 FILE *fp = popen(cmd, "r");
192 ASSERT_TRUE(fp != nullptr);
193
194 std::map<std::string, const char**>::iterator results = test_results.find(testname);
195 ASSERT_NE(results, test_results.end());
196
197 uint32_t lineindex = 0;
198
199 while (!feof(fp)) {
200 char testline[256];
201 char *s = fgets(testline, sizeof(testline), fp);
202 if (s == nullptr) {
203 break;
204 }
Dave Allison45fdb932014-06-25 12:37:10 -0700205 if (CompareIgnoringSpace(results->second[lineindex], testline) != 0) {
206 LOG(FATAL) << "Output is not as expected at line: " << lineindex
207 << results->second[lineindex] << "/" << testline;
208 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700209 ++lineindex;
210 }
211 // Check that we are at the end.
212 ASSERT_TRUE(results->second[lineindex] == nullptr);
213 fclose(fp);
214 }
215
216 char buf[FILENAME_MAX];
217 snprintf(buf, sizeof(buf), "%s.o", filename);
218 unlink(buf);
219
220 snprintf(buf, sizeof(buf), "%s.oo", filename);
221 unlink(buf);
222#endif
223}
224
225#define __ assembler->
226
227TEST(Thumb2AssemblerTest, SimpleMov) {
228 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
229
230 __ mov(R0, ShifterOperand(R1));
231 __ mov(R8, ShifterOperand(R9));
232
233 __ mov(R0, ShifterOperand(1));
234 __ mov(R8, ShifterOperand(9));
235
236 size_t cs = __ CodeSize();
237 std::vector<uint8_t> managed_code(cs);
238 MemoryRegion code(&managed_code[0], managed_code.size());
239 __ FinalizeInstructions(code);
240 dump(managed_code, "SimpleMov");
241 delete assembler;
242}
243
244TEST(Thumb2AssemblerTest, SimpleMov32) {
245 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
246 assembler->Force32Bit();
247
248 __ mov(R0, ShifterOperand(R1));
249 __ mov(R8, ShifterOperand(R9));
250
251 size_t cs = __ CodeSize();
252 std::vector<uint8_t> managed_code(cs);
253 MemoryRegion code(&managed_code[0], managed_code.size());
254 __ FinalizeInstructions(code);
255 dump(managed_code, "SimpleMov32");
256 delete assembler;
257}
258
259TEST(Thumb2AssemblerTest, SimpleMovAdd) {
260 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
261
262 __ mov(R0, ShifterOperand(R1));
263 __ add(R0, R1, ShifterOperand(R2));
264 __ add(R0, R1, ShifterOperand());
265
266 size_t cs = __ CodeSize();
267 std::vector<uint8_t> managed_code(cs);
268 MemoryRegion code(&managed_code[0], managed_code.size());
269 __ FinalizeInstructions(code);
270 dump(managed_code, "SimpleMovAdd");
271 delete assembler;
272}
273
274TEST(Thumb2AssemblerTest, DataProcessingRegister) {
275 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
276
277 __ mov(R0, ShifterOperand(R1));
278 __ mvn(R0, ShifterOperand(R1));
279
280 // 32 bit variants.
281 __ add(R0, R1, ShifterOperand(R2));
282 __ sub(R0, R1, ShifterOperand(R2));
283 __ and_(R0, R1, ShifterOperand(R2));
284 __ orr(R0, R1, ShifterOperand(R2));
285 __ eor(R0, R1, ShifterOperand(R2));
286 __ bic(R0, R1, ShifterOperand(R2));
287 __ adc(R0, R1, ShifterOperand(R2));
288 __ sbc(R0, R1, ShifterOperand(R2));
289 __ rsb(R0, R1, ShifterOperand(R2));
290
291 // 16 bit variants.
292 __ add(R0, R1, ShifterOperand());
293 __ sub(R0, R1, ShifterOperand());
294 __ and_(R0, R1, ShifterOperand());
295 __ orr(R0, R1, ShifterOperand());
296 __ eor(R0, R1, ShifterOperand());
297 __ bic(R0, R1, ShifterOperand());
298 __ adc(R0, R1, ShifterOperand());
299 __ sbc(R0, R1, ShifterOperand());
300 __ rsb(R0, R1, ShifterOperand());
301
302 __ tst(R0, ShifterOperand(R1));
303 __ teq(R0, ShifterOperand(R1));
304 __ cmp(R0, ShifterOperand(R1));
305 __ cmn(R0, ShifterOperand(R1));
306
307 __ movs(R0, ShifterOperand(R1));
308 __ mvns(R0, ShifterOperand(R1));
309
310 size_t cs = __ CodeSize();
311 std::vector<uint8_t> managed_code(cs);
312 MemoryRegion code(&managed_code[0], managed_code.size());
313 __ FinalizeInstructions(code);
314 dump(managed_code, "DataProcessingRegister");
315 delete assembler;
316}
317
318TEST(Thumb2AssemblerTest, DataProcessingImmediate) {
319 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
320
321 __ mov(R0, ShifterOperand(0x55));
322 __ mvn(R0, ShifterOperand(0x55));
323 __ add(R0, R1, ShifterOperand(0x55));
324 __ sub(R0, R1, ShifterOperand(0x55));
325 __ and_(R0, R1, ShifterOperand(0x55));
326 __ orr(R0, R1, ShifterOperand(0x55));
327 __ eor(R0, R1, ShifterOperand(0x55));
328 __ bic(R0, R1, ShifterOperand(0x55));
329 __ adc(R0, R1, ShifterOperand(0x55));
330 __ sbc(R0, R1, ShifterOperand(0x55));
331 __ rsb(R0, R1, ShifterOperand(0x55));
332
333 __ tst(R0, ShifterOperand(0x55));
334 __ teq(R0, ShifterOperand(0x55));
335 __ cmp(R0, ShifterOperand(0x55));
336 __ cmn(R0, ShifterOperand(0x55));
337
338 __ add(R0, R1, ShifterOperand(5));
339 __ sub(R0, R1, ShifterOperand(5));
340
341 __ movs(R0, ShifterOperand(0x55));
342 __ mvns(R0, ShifterOperand(0x55));
343
344 size_t cs = __ CodeSize();
345 std::vector<uint8_t> managed_code(cs);
346 MemoryRegion code(&managed_code[0], managed_code.size());
347 __ FinalizeInstructions(code);
348 dump(managed_code, "DataProcessingImmediate");
349 delete assembler;
350}
351
352TEST(Thumb2AssemblerTest, DataProcessingModifiedImmediate) {
353 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
354
355 __ mov(R0, ShifterOperand(0x550055));
356 __ mvn(R0, ShifterOperand(0x550055));
357 __ add(R0, R1, ShifterOperand(0x550055));
358 __ sub(R0, R1, ShifterOperand(0x550055));
359 __ and_(R0, R1, ShifterOperand(0x550055));
360 __ orr(R0, R1, ShifterOperand(0x550055));
361 __ eor(R0, R1, ShifterOperand(0x550055));
362 __ bic(R0, R1, ShifterOperand(0x550055));
363 __ adc(R0, R1, ShifterOperand(0x550055));
364 __ sbc(R0, R1, ShifterOperand(0x550055));
365 __ rsb(R0, R1, ShifterOperand(0x550055));
366
367 __ tst(R0, ShifterOperand(0x550055));
368 __ teq(R0, ShifterOperand(0x550055));
369 __ cmp(R0, ShifterOperand(0x550055));
370 __ cmn(R0, ShifterOperand(0x550055));
371
372 size_t cs = __ CodeSize();
373 std::vector<uint8_t> managed_code(cs);
374 MemoryRegion code(&managed_code[0], managed_code.size());
375 __ FinalizeInstructions(code);
376 dump(managed_code, "DataProcessingModifiedImmediate");
377 delete assembler;
378}
379
380
381TEST(Thumb2AssemblerTest, DataProcessingModifiedImmediates) {
382 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
383
384 __ mov(R0, ShifterOperand(0x550055));
385 __ mov(R0, ShifterOperand(0x55005500));
386 __ mov(R0, ShifterOperand(0x55555555));
387 __ mov(R0, ShifterOperand(0xd5000000)); // rotated to first position
388 __ mov(R0, ShifterOperand(0x6a000000)); // rotated to second position
389 __ mov(R0, ShifterOperand(0x350)); // rotated to 2nd last position
390 __ mov(R0, ShifterOperand(0x1a8)); // rotated to last position
391
392 size_t cs = __ CodeSize();
393 std::vector<uint8_t> managed_code(cs);
394 MemoryRegion code(&managed_code[0], managed_code.size());
395 __ FinalizeInstructions(code);
396 dump(managed_code, "DataProcessingModifiedImmediates");
397 delete assembler;
398}
399
400TEST(Thumb2AssemblerTest, DataProcessingShiftedRegister) {
401 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
402
403 __ mov(R3, ShifterOperand(R4, LSL, 4));
404 __ mov(R3, ShifterOperand(R4, LSR, 5));
405 __ mov(R3, ShifterOperand(R4, ASR, 6));
406 __ mov(R3, ShifterOperand(R4, ROR, 7));
407 __ mov(R3, ShifterOperand(R4, ROR));
408
409 // 32 bit variants.
410 __ mov(R8, ShifterOperand(R4, LSL, 4));
411 __ mov(R8, ShifterOperand(R4, LSR, 5));
412 __ mov(R8, ShifterOperand(R4, ASR, 6));
413 __ mov(R8, ShifterOperand(R4, ROR, 7));
414 __ mov(R8, ShifterOperand(R4, RRX));
415
416 size_t cs = __ CodeSize();
417 std::vector<uint8_t> managed_code(cs);
418 MemoryRegion code(&managed_code[0], managed_code.size());
419 __ FinalizeInstructions(code);
420 dump(managed_code, "DataProcessingShiftedRegister");
421 delete assembler;
422}
423
424
425TEST(Thumb2AssemblerTest, BasicLoad) {
426 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
427
428 __ ldr(R3, Address(R4, 24));
429 __ ldrb(R3, Address(R4, 24));
430 __ ldrh(R3, Address(R4, 24));
431 __ ldrsb(R3, Address(R4, 24));
432 __ ldrsh(R3, Address(R4, 24));
433
434 __ ldr(R3, Address(SP, 24));
435
436 // 32 bit variants
437 __ ldr(R8, Address(R4, 24));
438 __ ldrb(R8, Address(R4, 24));
439 __ ldrh(R8, Address(R4, 24));
440 __ ldrsb(R8, Address(R4, 24));
441 __ ldrsh(R8, Address(R4, 24));
442
443 size_t cs = __ CodeSize();
444 std::vector<uint8_t> managed_code(cs);
445 MemoryRegion code(&managed_code[0], managed_code.size());
446 __ FinalizeInstructions(code);
447 dump(managed_code, "BasicLoad");
448 delete assembler;
449}
450
451
452TEST(Thumb2AssemblerTest, BasicStore) {
453 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
454
455 __ str(R3, Address(R4, 24));
456 __ strb(R3, Address(R4, 24));
457 __ strh(R3, Address(R4, 24));
458
459 __ str(R3, Address(SP, 24));
460
461 // 32 bit variants.
462 __ str(R8, Address(R4, 24));
463 __ strb(R8, Address(R4, 24));
464 __ strh(R8, Address(R4, 24));
465
466 size_t cs = __ CodeSize();
467 std::vector<uint8_t> managed_code(cs);
468 MemoryRegion code(&managed_code[0], managed_code.size());
469 __ FinalizeInstructions(code);
470 dump(managed_code, "BasicStore");
471 delete assembler;
472}
473
474TEST(Thumb2AssemblerTest, ComplexLoad) {
475 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
476
477 __ ldr(R3, Address(R4, 24, Address::Mode::Offset));
478 __ ldr(R3, Address(R4, 24, Address::Mode::PreIndex));
479 __ ldr(R3, Address(R4, 24, Address::Mode::PostIndex));
480 __ ldr(R3, Address(R4, 24, Address::Mode::NegOffset));
481 __ ldr(R3, Address(R4, 24, Address::Mode::NegPreIndex));
482 __ ldr(R3, Address(R4, 24, Address::Mode::NegPostIndex));
483
484 __ ldrb(R3, Address(R4, 24, Address::Mode::Offset));
485 __ ldrb(R3, Address(R4, 24, Address::Mode::PreIndex));
486 __ ldrb(R3, Address(R4, 24, Address::Mode::PostIndex));
487 __ ldrb(R3, Address(R4, 24, Address::Mode::NegOffset));
488 __ ldrb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
489 __ ldrb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
490
491 __ ldrh(R3, Address(R4, 24, Address::Mode::Offset));
492 __ ldrh(R3, Address(R4, 24, Address::Mode::PreIndex));
493 __ ldrh(R3, Address(R4, 24, Address::Mode::PostIndex));
494 __ ldrh(R3, Address(R4, 24, Address::Mode::NegOffset));
495 __ ldrh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
496 __ ldrh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
497
498 __ ldrsb(R3, Address(R4, 24, Address::Mode::Offset));
499 __ ldrsb(R3, Address(R4, 24, Address::Mode::PreIndex));
500 __ ldrsb(R3, Address(R4, 24, Address::Mode::PostIndex));
501 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegOffset));
502 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
503 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
504
505 __ ldrsh(R3, Address(R4, 24, Address::Mode::Offset));
506 __ ldrsh(R3, Address(R4, 24, Address::Mode::PreIndex));
507 __ ldrsh(R3, Address(R4, 24, Address::Mode::PostIndex));
508 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegOffset));
509 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
510 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
511
512 size_t cs = __ CodeSize();
513 std::vector<uint8_t> managed_code(cs);
514 MemoryRegion code(&managed_code[0], managed_code.size());
515 __ FinalizeInstructions(code);
516 dump(managed_code, "ComplexLoad");
517 delete assembler;
518}
519
520
521TEST(Thumb2AssemblerTest, ComplexStore) {
522 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
523
524 __ str(R3, Address(R4, 24, Address::Mode::Offset));
525 __ str(R3, Address(R4, 24, Address::Mode::PreIndex));
526 __ str(R3, Address(R4, 24, Address::Mode::PostIndex));
527 __ str(R3, Address(R4, 24, Address::Mode::NegOffset));
528 __ str(R3, Address(R4, 24, Address::Mode::NegPreIndex));
529 __ str(R3, Address(R4, 24, Address::Mode::NegPostIndex));
530
531 __ strb(R3, Address(R4, 24, Address::Mode::Offset));
532 __ strb(R3, Address(R4, 24, Address::Mode::PreIndex));
533 __ strb(R3, Address(R4, 24, Address::Mode::PostIndex));
534 __ strb(R3, Address(R4, 24, Address::Mode::NegOffset));
535 __ strb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
536 __ strb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
537
538 __ strh(R3, Address(R4, 24, Address::Mode::Offset));
539 __ strh(R3, Address(R4, 24, Address::Mode::PreIndex));
540 __ strh(R3, Address(R4, 24, Address::Mode::PostIndex));
541 __ strh(R3, Address(R4, 24, Address::Mode::NegOffset));
542 __ strh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
543 __ strh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
544
545 size_t cs = __ CodeSize();
546 std::vector<uint8_t> managed_code(cs);
547 MemoryRegion code(&managed_code[0], managed_code.size());
548 __ FinalizeInstructions(code);
549 dump(managed_code, "ComplexStore");
550 delete assembler;
551}
552
553TEST(Thumb2AssemblerTest, NegativeLoadStore) {
554 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
555
556 __ ldr(R3, Address(R4, -24, Address::Mode::Offset));
557 __ ldr(R3, Address(R4, -24, Address::Mode::PreIndex));
558 __ ldr(R3, Address(R4, -24, Address::Mode::PostIndex));
559 __ ldr(R3, Address(R4, -24, Address::Mode::NegOffset));
560 __ ldr(R3, Address(R4, -24, Address::Mode::NegPreIndex));
561 __ ldr(R3, Address(R4, -24, Address::Mode::NegPostIndex));
562
563 __ ldrb(R3, Address(R4, -24, Address::Mode::Offset));
564 __ ldrb(R3, Address(R4, -24, Address::Mode::PreIndex));
565 __ ldrb(R3, Address(R4, -24, Address::Mode::PostIndex));
566 __ ldrb(R3, Address(R4, -24, Address::Mode::NegOffset));
567 __ ldrb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
568 __ ldrb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
569
570 __ ldrh(R3, Address(R4, -24, Address::Mode::Offset));
571 __ ldrh(R3, Address(R4, -24, Address::Mode::PreIndex));
572 __ ldrh(R3, Address(R4, -24, Address::Mode::PostIndex));
573 __ ldrh(R3, Address(R4, -24, Address::Mode::NegOffset));
574 __ ldrh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
575 __ ldrh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
576
577 __ ldrsb(R3, Address(R4, -24, Address::Mode::Offset));
578 __ ldrsb(R3, Address(R4, -24, Address::Mode::PreIndex));
579 __ ldrsb(R3, Address(R4, -24, Address::Mode::PostIndex));
580 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegOffset));
581 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
582 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
583
584 __ ldrsh(R3, Address(R4, -24, Address::Mode::Offset));
585 __ ldrsh(R3, Address(R4, -24, Address::Mode::PreIndex));
586 __ ldrsh(R3, Address(R4, -24, Address::Mode::PostIndex));
587 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegOffset));
588 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
589 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
590
591 __ str(R3, Address(R4, -24, Address::Mode::Offset));
592 __ str(R3, Address(R4, -24, Address::Mode::PreIndex));
593 __ str(R3, Address(R4, -24, Address::Mode::PostIndex));
594 __ str(R3, Address(R4, -24, Address::Mode::NegOffset));
595 __ str(R3, Address(R4, -24, Address::Mode::NegPreIndex));
596 __ str(R3, Address(R4, -24, Address::Mode::NegPostIndex));
597
598 __ strb(R3, Address(R4, -24, Address::Mode::Offset));
599 __ strb(R3, Address(R4, -24, Address::Mode::PreIndex));
600 __ strb(R3, Address(R4, -24, Address::Mode::PostIndex));
601 __ strb(R3, Address(R4, -24, Address::Mode::NegOffset));
602 __ strb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
603 __ strb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
604
605 __ strh(R3, Address(R4, -24, Address::Mode::Offset));
606 __ strh(R3, Address(R4, -24, Address::Mode::PreIndex));
607 __ strh(R3, Address(R4, -24, Address::Mode::PostIndex));
608 __ strh(R3, Address(R4, -24, Address::Mode::NegOffset));
609 __ strh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
610 __ strh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
611
612 size_t cs = __ CodeSize();
613 std::vector<uint8_t> managed_code(cs);
614 MemoryRegion code(&managed_code[0], managed_code.size());
615 __ FinalizeInstructions(code);
616 dump(managed_code, "NegativeLoadStore");
617 delete assembler;
618}
619
620TEST(Thumb2AssemblerTest, SimpleLoadStoreDual) {
621 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
622
623 __ strd(R2, Address(R0, 24, Address::Mode::Offset));
624 __ ldrd(R2, Address(R0, 24, Address::Mode::Offset));
625
626 size_t cs = __ CodeSize();
627 std::vector<uint8_t> managed_code(cs);
628 MemoryRegion code(&managed_code[0], managed_code.size());
629 __ FinalizeInstructions(code);
630 dump(managed_code, "SimpleLoadStoreDual");
631 delete assembler;
632}
633
634TEST(Thumb2AssemblerTest, ComplexLoadStoreDual) {
635 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
636
637 __ strd(R2, Address(R0, 24, Address::Mode::Offset));
638 __ strd(R2, Address(R0, 24, Address::Mode::PreIndex));
639 __ strd(R2, Address(R0, 24, Address::Mode::PostIndex));
640 __ strd(R2, Address(R0, 24, Address::Mode::NegOffset));
641 __ strd(R2, Address(R0, 24, Address::Mode::NegPreIndex));
642 __ strd(R2, Address(R0, 24, Address::Mode::NegPostIndex));
643
644 __ ldrd(R2, Address(R0, 24, Address::Mode::Offset));
645 __ ldrd(R2, Address(R0, 24, Address::Mode::PreIndex));
646 __ ldrd(R2, Address(R0, 24, Address::Mode::PostIndex));
647 __ ldrd(R2, Address(R0, 24, Address::Mode::NegOffset));
648 __ ldrd(R2, Address(R0, 24, Address::Mode::NegPreIndex));
649 __ ldrd(R2, Address(R0, 24, Address::Mode::NegPostIndex));
650
651 size_t cs = __ CodeSize();
652 std::vector<uint8_t> managed_code(cs);
653 MemoryRegion code(&managed_code[0], managed_code.size());
654 __ FinalizeInstructions(code);
655 dump(managed_code, "ComplexLoadStoreDual");
656 delete assembler;
657}
658
659TEST(Thumb2AssemblerTest, NegativeLoadStoreDual) {
660 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
661
662 __ strd(R2, Address(R0, -24, Address::Mode::Offset));
663 __ strd(R2, Address(R0, -24, Address::Mode::PreIndex));
664 __ strd(R2, Address(R0, -24, Address::Mode::PostIndex));
665 __ strd(R2, Address(R0, -24, Address::Mode::NegOffset));
666 __ strd(R2, Address(R0, -24, Address::Mode::NegPreIndex));
667 __ strd(R2, Address(R0, -24, Address::Mode::NegPostIndex));
668
669 __ ldrd(R2, Address(R0, -24, Address::Mode::Offset));
670 __ ldrd(R2, Address(R0, -24, Address::Mode::PreIndex));
671 __ ldrd(R2, Address(R0, -24, Address::Mode::PostIndex));
672 __ ldrd(R2, Address(R0, -24, Address::Mode::NegOffset));
673 __ ldrd(R2, Address(R0, -24, Address::Mode::NegPreIndex));
674 __ ldrd(R2, Address(R0, -24, Address::Mode::NegPostIndex));
675
676 size_t cs = __ CodeSize();
677 std::vector<uint8_t> managed_code(cs);
678 MemoryRegion code(&managed_code[0], managed_code.size());
679 __ FinalizeInstructions(code);
680 dump(managed_code, "NegativeLoadStoreDual");
681 delete assembler;
682}
683
684TEST(Thumb2AssemblerTest, SimpleBranch) {
685 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
686
687 Label l1;
688 __ mov(R0, ShifterOperand(2));
689 __ Bind(&l1);
690 __ mov(R1, ShifterOperand(1));
691 __ b(&l1);
692 Label l2;
693 __ b(&l2);
694 __ mov(R1, ShifterOperand(2));
695 __ Bind(&l2);
696 __ mov(R0, ShifterOperand(3));
697
698 Label l3;
699 __ mov(R0, ShifterOperand(2));
700 __ Bind(&l3);
701 __ mov(R1, ShifterOperand(1));
702 __ b(&l3, EQ);
703
704 Label l4;
705 __ b(&l4, EQ);
706 __ mov(R1, ShifterOperand(2));
707 __ Bind(&l4);
708 __ mov(R0, ShifterOperand(3));
709
710 // 2 linked labels.
711 Label l5;
712 __ b(&l5);
713 __ mov(R1, ShifterOperand(4));
714 __ b(&l5);
715 __ mov(R1, ShifterOperand(5));
716 __ Bind(&l5);
717 __ mov(R0, ShifterOperand(6));
718
719 size_t cs = __ CodeSize();
720 std::vector<uint8_t> managed_code(cs);
721 MemoryRegion code(&managed_code[0], managed_code.size());
722 __ FinalizeInstructions(code);
723 dump(managed_code, "SimpleBranch");
724 delete assembler;
725}
726
727TEST(Thumb2AssemblerTest, LongBranch) {
728 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
729 assembler->Force32Bit();
730 // 32 bit branches.
731 Label l1;
732 __ mov(R0, ShifterOperand(2));
733 __ Bind(&l1);
734 __ mov(R1, ShifterOperand(1));
735 __ b(&l1);
736
737 Label l2;
738 __ b(&l2);
739 __ mov(R1, ShifterOperand(2));
740 __ Bind(&l2);
741 __ mov(R0, ShifterOperand(3));
742
743 Label l3;
744 __ mov(R0, ShifterOperand(2));
745 __ Bind(&l3);
746 __ mov(R1, ShifterOperand(1));
747 __ b(&l3, EQ);
748
749 Label l4;
750 __ b(&l4, EQ);
751 __ mov(R1, ShifterOperand(2));
752 __ Bind(&l4);
753 __ mov(R0, ShifterOperand(3));
754
755 // 2 linked labels.
756 Label l5;
757 __ b(&l5);
758 __ mov(R1, ShifterOperand(4));
759 __ b(&l5);
760 __ mov(R1, ShifterOperand(5));
761 __ Bind(&l5);
762 __ mov(R0, ShifterOperand(6));
763
764 size_t cs = __ CodeSize();
765 std::vector<uint8_t> managed_code(cs);
766 MemoryRegion code(&managed_code[0], managed_code.size());
767 __ FinalizeInstructions(code);
768 dump(managed_code, "LongBranch");
769 delete assembler;
770}
771
772TEST(Thumb2AssemblerTest, LoadMultiple) {
773 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
774
775 // 16 bit.
776 __ ldm(DB_W, R4, (1 << R0 | 1 << R3));
777
778 // 32 bit.
779 __ ldm(DB_W, R4, (1 << LR | 1 << R11));
780 __ ldm(DB, R4, (1 << LR | 1 << R11));
781
782 // Single reg is converted to ldr
783 __ ldm(DB_W, R4, (1 << R5));
784
785 size_t cs = __ CodeSize();
786 std::vector<uint8_t> managed_code(cs);
787 MemoryRegion code(&managed_code[0], managed_code.size());
788 __ FinalizeInstructions(code);
789 dump(managed_code, "LoadMultiple");
790 delete assembler;
791}
792
793TEST(Thumb2AssemblerTest, StoreMultiple) {
794 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
795
796 // 16 bit.
797 __ stm(IA_W, R4, (1 << R0 | 1 << R3));
798
799 // 32 bit.
800 __ stm(IA_W, R4, (1 << LR | 1 << R11));
801 __ stm(IA, R4, (1 << LR | 1 << R11));
802
803 // Single reg is converted to str
804 __ stm(IA_W, R4, (1 << R5));
805 __ stm(IA, R4, (1 << R5));
806
807 size_t cs = __ CodeSize();
808 std::vector<uint8_t> managed_code(cs);
809 MemoryRegion code(&managed_code[0], managed_code.size());
810 __ FinalizeInstructions(code);
811 dump(managed_code, "StoreMultiple");
812 delete assembler;
813}
814
815TEST(Thumb2AssemblerTest, MovWMovT) {
816 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
817
818 __ movw(R4, 0); // 16 bit.
819 __ movw(R4, 0x34); // 16 bit.
820 __ movw(R9, 0x34); // 32 bit due to high register.
821 __ movw(R3, 0x1234); // 32 bit due to large value.
822 __ movw(R9, 0xffff); // 32 bit due to large value and high register.
823
824 // Always 32 bit.
825 __ movt(R0, 0);
826 __ movt(R0, 0x1234);
827 __ movt(R1, 0xffff);
828
829 size_t cs = __ CodeSize();
830 std::vector<uint8_t> managed_code(cs);
831 MemoryRegion code(&managed_code[0], managed_code.size());
832 __ FinalizeInstructions(code);
833 dump(managed_code, "MovWMovT");
834 delete assembler;
835}
836
837TEST(Thumb2AssemblerTest, SpecialAddSub) {
838 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
839
840 __ add(R2, SP, ShifterOperand(0x50)); // 16 bit.
841 __ add(SP, SP, ShifterOperand(0x50)); // 16 bit.
842 __ add(R8, SP, ShifterOperand(0x50)); // 32 bit.
843
844 __ add(R2, SP, ShifterOperand(0xf00)); // 32 bit due to imm size.
845 __ add(SP, SP, ShifterOperand(0xf00)); // 32 bit due to imm size.
846
847 __ sub(SP, SP, ShifterOperand(0x50)); // 16 bit
848 __ sub(R0, SP, ShifterOperand(0x50)); // 32 bit
849 __ sub(R8, SP, ShifterOperand(0x50)); // 32 bit.
850
851 __ sub(SP, SP, ShifterOperand(0xf00)); // 32 bit due to imm size
852
853 size_t cs = __ CodeSize();
854 std::vector<uint8_t> managed_code(cs);
855 MemoryRegion code(&managed_code[0], managed_code.size());
856 __ FinalizeInstructions(code);
857 dump(managed_code, "SpecialAddSub");
858 delete assembler;
859}
860
861TEST(Thumb2AssemblerTest, StoreToOffset) {
862 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
863
864 __ StoreToOffset(kStoreWord, R2, R4, 12); // Simple
865 __ StoreToOffset(kStoreWord, R2, R4, 0x2000); // Offset too big.
866
867 size_t cs = __ CodeSize();
868 std::vector<uint8_t> managed_code(cs);
869 MemoryRegion code(&managed_code[0], managed_code.size());
870 __ FinalizeInstructions(code);
871 dump(managed_code, "StoreToOffset");
872 delete assembler;
873}
874
875
876TEST(Thumb2AssemblerTest, IfThen) {
877 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
878
879 __ it(EQ);
880 __ mov(R1, ShifterOperand(1), EQ);
881
882 __ it(EQ, kItThen);
883 __ mov(R1, ShifterOperand(1), EQ);
884 __ mov(R2, ShifterOperand(2), EQ);
885
886 __ it(EQ, kItElse);
887 __ mov(R1, ShifterOperand(1), EQ);
888 __ mov(R2, ShifterOperand(2), NE);
889
890 __ it(EQ, kItThen, kItElse);
891 __ mov(R1, ShifterOperand(1), EQ);
892 __ mov(R2, ShifterOperand(2), EQ);
893 __ mov(R3, ShifterOperand(3), NE);
894
895 __ it(EQ, kItElse, kItElse);
896 __ mov(R1, ShifterOperand(1), EQ);
897 __ mov(R2, ShifterOperand(2), NE);
898 __ mov(R3, ShifterOperand(3), NE);
899
900 __ it(EQ, kItThen, kItThen, kItElse);
901 __ mov(R1, ShifterOperand(1), EQ);
902 __ mov(R2, ShifterOperand(2), EQ);
903 __ mov(R3, ShifterOperand(3), EQ);
904 __ mov(R4, ShifterOperand(4), NE);
905
906 size_t cs = __ CodeSize();
907 std::vector<uint8_t> managed_code(cs);
908 MemoryRegion code(&managed_code[0], managed_code.size());
909 __ FinalizeInstructions(code);
910 dump(managed_code, "IfThen");
911 delete assembler;
912}
913
914TEST(Thumb2AssemblerTest, CbzCbnz) {
915 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
916
917 Label l1;
918 __ cbz(R2, &l1);
919 __ mov(R1, ShifterOperand(3));
920 __ mov(R2, ShifterOperand(3));
921 __ Bind(&l1);
922 __ mov(R2, ShifterOperand(4));
923
924 Label l2;
925 __ cbnz(R2, &l2);
926 __ mov(R8, ShifterOperand(3));
927 __ mov(R2, ShifterOperand(3));
928 __ Bind(&l2);
929 __ mov(R2, ShifterOperand(4));
930
931 size_t cs = __ CodeSize();
932 std::vector<uint8_t> managed_code(cs);
933 MemoryRegion code(&managed_code[0], managed_code.size());
934 __ FinalizeInstructions(code);
935 dump(managed_code, "CbzCbnz");
936 delete assembler;
937}
938
939TEST(Thumb2AssemblerTest, Multiply) {
940 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
941
942 __ mul(R0, R1, R0);
943 __ mul(R0, R1, R2);
944 __ mul(R8, R9, R8);
945 __ mul(R8, R9, R10);
946
947 __ mla(R0, R1, R2, R3);
948 __ mla(R8, R9, R8, R9);
949
950 __ mls(R0, R1, R2, R3);
951 __ mls(R8, R9, R8, R9);
952
953 __ umull(R0, R1, R2, R3);
954 __ umull(R8, R9, R10, R11);
955
956 size_t cs = __ CodeSize();
957 std::vector<uint8_t> managed_code(cs);
958 MemoryRegion code(&managed_code[0], managed_code.size());
959 __ FinalizeInstructions(code);
960 dump(managed_code, "Multiply");
961 delete assembler;
962}
963
964TEST(Thumb2AssemblerTest, Divide) {
965 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
966
967 __ sdiv(R0, R1, R2);
968 __ sdiv(R8, R9, R10);
969
970 __ udiv(R0, R1, R2);
971 __ udiv(R8, R9, R10);
972
973 size_t cs = __ CodeSize();
974 std::vector<uint8_t> managed_code(cs);
975 MemoryRegion code(&managed_code[0], managed_code.size());
976 __ FinalizeInstructions(code);
977 dump(managed_code, "Divide");
978 delete assembler;
979}
980
981TEST(Thumb2AssemblerTest, VMov) {
982 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
983
984 __ vmovs(S1, 1.0);
985 __ vmovd(D1, 1.0);
986
987 __ vmovs(S1, S2);
988 __ vmovd(D1, D2);
989
990 size_t cs = __ CodeSize();
991 std::vector<uint8_t> managed_code(cs);
992 MemoryRegion code(&managed_code[0], managed_code.size());
993 __ FinalizeInstructions(code);
994 dump(managed_code, "VMov");
995 delete assembler;
996}
997
998
999TEST(Thumb2AssemblerTest, BasicFloatingPoint) {
1000 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1001
1002 __ vadds(S0, S1, S2);
1003 __ vsubs(S0, S1, S2);
1004 __ vmuls(S0, S1, S2);
1005 __ vmlas(S0, S1, S2);
1006 __ vmlss(S0, S1, S2);
1007 __ vdivs(S0, S1, S2);
1008 __ vabss(S0, S1);
1009 __ vnegs(S0, S1);
1010 __ vsqrts(S0, S1);
1011
1012 __ vaddd(D0, D1, D2);
1013 __ vsubd(D0, D1, D2);
1014 __ vmuld(D0, D1, D2);
1015 __ vmlad(D0, D1, D2);
1016 __ vmlsd(D0, D1, D2);
1017 __ vdivd(D0, D1, D2);
1018 __ vabsd(D0, D1);
1019 __ vnegd(D0, D1);
1020 __ vsqrtd(D0, D1);
1021
1022 size_t cs = __ CodeSize();
1023 std::vector<uint8_t> managed_code(cs);
1024 MemoryRegion code(&managed_code[0], managed_code.size());
1025 __ FinalizeInstructions(code);
1026 dump(managed_code, "BasicFloatingPoint");
1027 delete assembler;
1028}
1029
1030TEST(Thumb2AssemblerTest, FloatingPointConversions) {
1031 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1032
1033 __ vcvtsd(S2, D2);
1034 __ vcvtds(D2, S2);
1035
1036 __ vcvtis(S1, S2);
1037 __ vcvtsi(S1, S2);
1038
1039 __ vcvtid(S1, D2);
1040 __ vcvtdi(D1, S2);
1041
1042 __ vcvtus(S1, S2);
1043 __ vcvtsu(S1, S2);
1044
1045 __ vcvtud(S1, D2);
1046 __ vcvtdu(D1, S2);
1047
1048 size_t cs = __ CodeSize();
1049 std::vector<uint8_t> managed_code(cs);
1050 MemoryRegion code(&managed_code[0], managed_code.size());
1051 __ FinalizeInstructions(code);
1052 dump(managed_code, "FloatingPointConversions");
1053 delete assembler;
1054}
1055
1056TEST(Thumb2AssemblerTest, FloatingPointComparisons) {
1057 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1058
1059 __ vcmps(S0, S1);
1060 __ vcmpd(D0, D1);
1061
1062 __ vcmpsz(S2);
1063 __ vcmpdz(D2);
1064
1065 size_t cs = __ CodeSize();
1066 std::vector<uint8_t> managed_code(cs);
1067 MemoryRegion code(&managed_code[0], managed_code.size());
1068 __ FinalizeInstructions(code);
1069 dump(managed_code, "FloatingPointComparisons");
1070 delete assembler;
1071}
1072
1073TEST(Thumb2AssemblerTest, Calls) {
1074 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1075
1076 __ blx(LR);
1077 __ bx(LR);
1078
1079 size_t cs = __ CodeSize();
1080 std::vector<uint8_t> managed_code(cs);
1081 MemoryRegion code(&managed_code[0], managed_code.size());
1082 __ FinalizeInstructions(code);
1083 dump(managed_code, "Calls");
1084 delete assembler;
1085}
1086
1087TEST(Thumb2AssemblerTest, Breakpoint) {
1088 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1089
1090 __ bkpt(0);
1091
1092 size_t cs = __ CodeSize();
1093 std::vector<uint8_t> managed_code(cs);
1094 MemoryRegion code(&managed_code[0], managed_code.size());
1095 __ FinalizeInstructions(code);
1096 dump(managed_code, "Breakpoint");
1097 delete assembler;
1098}
1099
1100TEST(Thumb2AssemblerTest, StrR1) {
1101 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1102
1103 __ str(R1, Address(SP, 68));
1104 __ str(R1, Address(SP, 1068));
1105
1106 size_t cs = __ CodeSize();
1107 std::vector<uint8_t> managed_code(cs);
1108 MemoryRegion code(&managed_code[0], managed_code.size());
1109 __ FinalizeInstructions(code);
1110 dump(managed_code, "StrR1");
1111 delete assembler;
1112}
1113
1114TEST(Thumb2AssemblerTest, VPushPop) {
1115 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1116
1117 __ vpushs(S2, 4);
1118 __ vpushd(D2, 4);
1119
1120 __ vpops(S2, 4);
1121 __ vpopd(D2, 4);
1122
1123 size_t cs = __ CodeSize();
1124 std::vector<uint8_t> managed_code(cs);
1125 MemoryRegion code(&managed_code[0], managed_code.size());
1126 __ FinalizeInstructions(code);
1127 dump(managed_code, "VPushPop");
1128 delete assembler;
1129}
1130
1131TEST(Thumb2AssemblerTest, Max16BitBranch) {
1132 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1133
1134 Label l1;
1135 __ b(&l1);
1136 for (int i = 0 ; i < (1 << 11) ; i += 2) {
1137 __ mov(R3, ShifterOperand(i & 0xff));
1138 }
1139 __ Bind(&l1);
1140 __ mov(R1, ShifterOperand(R2));
1141
1142 size_t cs = __ CodeSize();
1143 std::vector<uint8_t> managed_code(cs);
1144 MemoryRegion code(&managed_code[0], managed_code.size());
1145 __ FinalizeInstructions(code);
1146 dump(managed_code, "Max16BitBranch");
1147 delete assembler;
1148}
1149
1150TEST(Thumb2AssemblerTest, Branch32) {
1151 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1152
1153 Label l1;
1154 __ b(&l1);
1155 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1156 __ mov(R3, ShifterOperand(i & 0xff));
1157 }
1158 __ Bind(&l1);
1159 __ mov(R1, ShifterOperand(R2));
1160
1161 size_t cs = __ CodeSize();
1162 std::vector<uint8_t> managed_code(cs);
1163 MemoryRegion code(&managed_code[0], managed_code.size());
1164 __ FinalizeInstructions(code);
1165 dump(managed_code, "Branch32");
1166 delete assembler;
1167}
1168
1169TEST(Thumb2AssemblerTest, CompareAndBranchMax) {
1170 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1171
1172 Label l1;
1173 __ cbz(R4, &l1);
1174 for (int i = 0 ; i < (1 << 7) ; i += 2) {
1175 __ mov(R3, ShifterOperand(i & 0xff));
1176 }
1177 __ Bind(&l1);
1178 __ mov(R1, ShifterOperand(R2));
1179
1180 size_t cs = __ CodeSize();
1181 std::vector<uint8_t> managed_code(cs);
1182 MemoryRegion code(&managed_code[0], managed_code.size());
1183 __ FinalizeInstructions(code);
1184 dump(managed_code, "CompareAndBranchMax");
1185 delete assembler;
1186}
1187
1188TEST(Thumb2AssemblerTest, CompareAndBranchRelocation16) {
1189 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1190
1191 Label l1;
1192 __ cbz(R4, &l1);
1193 for (int i = 0 ; i < (1 << 7) + 2 ; i += 2) {
1194 __ mov(R3, ShifterOperand(i & 0xff));
1195 }
1196 __ Bind(&l1);
1197 __ mov(R1, ShifterOperand(R2));
1198
1199 size_t cs = __ CodeSize();
1200 std::vector<uint8_t> managed_code(cs);
1201 MemoryRegion code(&managed_code[0], managed_code.size());
1202 __ FinalizeInstructions(code);
1203 dump(managed_code, "CompareAndBranchRelocation16");
1204 delete assembler;
1205}
1206
1207TEST(Thumb2AssemblerTest, CompareAndBranchRelocation32) {
1208 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1209
1210 Label l1;
1211 __ cbz(R4, &l1);
1212 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1213 __ mov(R3, ShifterOperand(i & 0xff));
1214 }
1215 __ Bind(&l1);
1216 __ mov(R1, ShifterOperand(R2));
1217
1218 size_t cs = __ CodeSize();
1219 std::vector<uint8_t> managed_code(cs);
1220 MemoryRegion code(&managed_code[0], managed_code.size());
1221 __ FinalizeInstructions(code);
1222 dump(managed_code, "CompareAndBranchRelocation32");
1223 delete assembler;
1224}
1225
1226TEST(Thumb2AssemblerTest, MixedBranch32) {
1227 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1228
1229 Label l1;
1230 Label l2;
1231 __ b(&l1); // Forwards.
1232 __ Bind(&l2);
1233
1234 // Space to force relocation.
1235 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1236 __ mov(R3, ShifterOperand(i & 0xff));
1237 }
1238 __ b(&l2); // Backwards.
1239 __ Bind(&l1);
1240 __ mov(R1, ShifterOperand(R2));
1241
1242 size_t cs = __ CodeSize();
1243 std::vector<uint8_t> managed_code(cs);
1244 MemoryRegion code(&managed_code[0], managed_code.size());
1245 __ FinalizeInstructions(code);
1246 dump(managed_code, "MixedBranch32");
1247 delete assembler;
1248}
1249
Dave Allison45fdb932014-06-25 12:37:10 -07001250TEST(Thumb2AssemblerTest, Shifts) {
1251 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1252
1253 // 16 bit
1254 __ Lsl(R0, R1, 5);
1255 __ Lsr(R0, R1, 5);
1256 __ Asr(R0, R1, 5);
1257
1258 __ Lsl(R0, R0, R1);
1259 __ Lsr(R0, R0, R1);
1260 __ Asr(R0, R0, R1);
1261
1262 // 32 bit due to high registers.
1263 __ Lsl(R8, R1, 5);
1264 __ Lsr(R0, R8, 5);
1265 __ Asr(R8, R1, 5);
1266 __ Ror(R0, R8, 5);
1267
1268 // 32 bit due to different Rd and Rn.
1269 __ Lsl(R0, R1, R2);
1270 __ Lsr(R0, R1, R2);
1271 __ Asr(R0, R1, R2);
1272 __ Ror(R0, R1, R2);
1273
1274 // 32 bit due to use of high registers.
1275 __ Lsl(R8, R1, R2);
1276 __ Lsr(R0, R8, R2);
1277 __ Asr(R0, R1, R8);
1278
1279 // S bit (all 32 bit)
1280
1281 // 32 bit due to high registers.
1282 __ Lsl(R8, R1, 5, true);
1283 __ Lsr(R0, R8, 5, true);
1284 __ Asr(R8, R1, 5, true);
1285 __ Ror(R0, R8, 5, true);
1286
1287 // 32 bit due to different Rd and Rn.
1288 __ Lsl(R0, R1, R2, true);
1289 __ Lsr(R0, R1, R2, true);
1290 __ Asr(R0, R1, R2, true);
1291 __ Ror(R0, R1, R2, true);
1292
1293 // 32 bit due to use of high registers.
1294 __ Lsl(R8, R1, R2, true);
1295 __ Lsr(R0, R8, R2, true);
1296 __ Asr(R0, R1, R8, true);
1297
1298 size_t cs = __ CodeSize();
1299 std::vector<uint8_t> managed_code(cs);
1300 MemoryRegion code(&managed_code[0], managed_code.size());
1301 __ FinalizeInstructions(code);
1302 dump(managed_code, "Shifts");
1303 delete assembler;
1304}
1305
1306TEST(Thumb2AssemblerTest, LoadStoreRegOffset) {
1307 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1308
1309 // 16 bit.
1310 __ ldr(R0, Address(R1, R2));
1311 __ str(R0, Address(R1, R2));
1312
1313 // 32 bit due to shift.
1314 __ ldr(R0, Address(R1, R2, LSL, 1));
1315 __ str(R0, Address(R1, R2, LSL, 1));
1316
1317 __ ldr(R0, Address(R1, R2, LSL, 3));
1318 __ str(R0, Address(R1, R2, LSL, 3));
1319
1320 // 32 bit due to high register use.
1321 __ ldr(R8, Address(R1, R2));
1322 __ str(R8, Address(R1, R2));
1323
1324 __ ldr(R1, Address(R8, R2));
1325 __ str(R2, Address(R8, R2));
1326
1327 __ ldr(R0, Address(R1, R8));
1328 __ str(R0, Address(R1, R8));
1329
1330 size_t cs = __ CodeSize();
1331 std::vector<uint8_t> managed_code(cs);
1332 MemoryRegion code(&managed_code[0], managed_code.size());
1333 __ FinalizeInstructions(code);
1334 dump(managed_code, "LoadStoreRegOffset");
1335 delete assembler;
1336}
1337
1338TEST(Thumb2AssemblerTest, LoadStoreLiteral) {
1339 arm::Thumb2Assembler* assembler = static_cast<arm::Thumb2Assembler*>(Assembler::Create(kThumb2));
1340
1341 __ ldr(R0, Address(4));
1342 __ str(R0, Address(4));
1343
1344 __ ldr(R0, Address(-8));
1345 __ str(R0, Address(-8));
1346
1347 // Limits.
1348 __ ldr(R0, Address(0x3ff)); // 10 bits (16 bit).
1349 __ ldr(R0, Address(0x7ff)); // 11 bits (32 bit).
1350 __ str(R0, Address(0x3ff)); // 32 bit (no 16 bit str(literal)).
1351 __ str(R0, Address(0x7ff)); // 11 bits (32 bit).
1352
1353 size_t cs = __ CodeSize();
1354 std::vector<uint8_t> managed_code(cs);
1355 MemoryRegion code(&managed_code[0], managed_code.size());
1356 __ FinalizeInstructions(code);
1357 dump(managed_code, "LoadStoreLiteral");
1358 delete assembler;
1359}
1360
Dave Allison65fcc2c2014-04-28 13:45:27 -07001361#undef __
1362} // namespace arm
1363} // namespace art