blob: 2df9b177bf0c8ca702d8cce0f7ba8faa70849734 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Nicolas Geoffray96f89a22014-07-11 10:57:49 +010017#include <dirent.h>
Andreas Gampefd114702015-05-13 17:00:41 -070018#include <errno.h>
Dave Allison65fcc2c2014-04-28 13:45:27 -070019#include <fstream>
Nicolas Geoffray96f89a22014-07-11 10:57:49 +010020#include <map>
Andreas Gampefd114702015-05-13 17:00:41 -070021#include <string.h>
22#include <sys/types.h>
Dave Allison65fcc2c2014-04-28 13:45:27 -070023
24#include "gtest/gtest.h"
25#include "utils/arm/assembler_thumb2.h"
26#include "base/hex_dump.h"
27#include "common_runtime_test.h"
28
29namespace art {
30namespace arm {
31
32// Include results file (generated manually)
33#include "assembler_thumb_test_expected.cc.inc"
34
Andreas Gampec60e1b72015-07-30 08:57:50 -070035#ifndef __ANDROID__
Dave Allison45fdb932014-06-25 12:37:10 -070036// This controls whether the results are printed to the
37// screen or compared against the expected output.
38// To generate new expected output, set this to true and
39// copy the output into the .cc.inc file in the form
40// of the other results.
41//
42// When this is false, the results are not printed to the
43// output, but are compared against the expected results
44// in the .cc.inc file.
Dave Allison65fcc2c2014-04-28 13:45:27 -070045static constexpr bool kPrintResults = false;
Dave Allisond20ddb22014-06-05 14:16:30 -070046#endif
Dave Allison65fcc2c2014-04-28 13:45:27 -070047
48void SetAndroidData() {
49 const char* data = getenv("ANDROID_DATA");
50 if (data == nullptr) {
51 setenv("ANDROID_DATA", "/tmp", 1);
52 }
53}
54
Dave Allison45fdb932014-06-25 12:37:10 -070055int CompareIgnoringSpace(const char* s1, const char* s2) {
56 while (*s1 != '\0') {
57 while (isspace(*s1)) ++s1;
58 while (isspace(*s2)) ++s2;
59 if (*s1 == '\0' || *s1 != *s2) {
60 break;
61 }
62 ++s1;
63 ++s2;
64 }
65 return *s1 - *s2;
66}
67
Vladimir Markocf93a5c2015-06-16 11:33:24 +000068void InitResults() {
69 if (test_results.empty()) {
70 setup_results();
71 }
72}
73
74std::string GetToolsDir() {
Andreas Gampec60e1b72015-07-30 08:57:50 -070075#ifndef __ANDROID__
Vladimir Markocf93a5c2015-06-16 11:33:24 +000076 // This will only work on the host. There is no as, objcopy or objdump on the device.
Dave Allison65fcc2c2014-04-28 13:45:27 -070077 static std::string toolsdir;
78
Vladimir Markocf93a5c2015-06-16 11:33:24 +000079 if (toolsdir.empty()) {
Dave Allison65fcc2c2014-04-28 13:45:27 -070080 setup_results();
David Srbecky3e52aa42015-04-12 07:45:18 +010081 toolsdir = CommonRuntimeTest::GetAndroidTargetToolsDir(kThumb2);
Dave Allison65fcc2c2014-04-28 13:45:27 -070082 SetAndroidData();
Dave Allison65fcc2c2014-04-28 13:45:27 -070083 }
84
Vladimir Markocf93a5c2015-06-16 11:33:24 +000085 return toolsdir;
86#else
87 return std::string();
88#endif
89}
90
91void DumpAndCheck(std::vector<uint8_t>& code, const char* testname, const char* const* results) {
Andreas Gampec60e1b72015-07-30 08:57:50 -070092#ifndef __ANDROID__
Vladimir Markocf93a5c2015-06-16 11:33:24 +000093 static std::string toolsdir = GetToolsDir();
94
Dave Allison65fcc2c2014-04-28 13:45:27 -070095 ScratchFile file;
96
97 const char* filename = file.GetFilename().c_str();
98
99 std::ofstream out(filename);
100 if (out) {
101 out << ".section \".text\"\n";
102 out << ".syntax unified\n";
103 out << ".arch armv7-a\n";
104 out << ".thumb\n";
105 out << ".thumb_func\n";
106 out << ".type " << testname << ", #function\n";
107 out << ".global " << testname << "\n";
108 out << testname << ":\n";
109 out << ".fnstart\n";
110
111 for (uint32_t i = 0 ; i < code.size(); ++i) {
112 out << ".byte " << (static_cast<int>(code[i]) & 0xff) << "\n";
113 }
114 out << ".fnend\n";
115 out << ".size " << testname << ", .-" << testname << "\n";
116 }
117 out.close();
118
Andreas Gampe4470c1d2014-07-21 18:32:59 -0700119 char cmd[1024];
Dave Allison65fcc2c2014-04-28 13:45:27 -0700120
121 // Assemble the .S
David Srbecky3e52aa42015-04-12 07:45:18 +0100122 snprintf(cmd, sizeof(cmd), "%sas %s -o %s.o", toolsdir.c_str(), filename, filename);
Andreas Gampefd114702015-05-13 17:00:41 -0700123 int cmd_result = system(cmd);
124 ASSERT_EQ(cmd_result, 0) << strerror(errno);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700125
126 // Remove the $d symbols to prevent the disassembler dumping the instructions
127 // as .word
David Srbecky3e52aa42015-04-12 07:45:18 +0100128 snprintf(cmd, sizeof(cmd), "%sobjcopy -N '$d' %s.o %s.oo", toolsdir.c_str(), filename, filename);
Andreas Gampefd114702015-05-13 17:00:41 -0700129 int cmd_result2 = system(cmd);
130 ASSERT_EQ(cmd_result2, 0) << strerror(errno);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700131
132 // Disassemble.
133
David Srbecky3e52aa42015-04-12 07:45:18 +0100134 snprintf(cmd, sizeof(cmd), "%sobjdump -d %s.oo | grep '^ *[0-9a-f][0-9a-f]*:'",
135 toolsdir.c_str(), filename);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700136 if (kPrintResults) {
137 // Print the results only, don't check. This is used to generate new output for inserting
Vladimir Markof5c09c32015-12-17 12:08:08 +0000138 // into the .inc file, so let's add the appropriate prefix/suffix needed in the C++ code.
139 strcat(cmd, " | sed '-es/^/ \"/' | sed '-es/$/\\\\n\",/'");
Andreas Gampefd114702015-05-13 17:00:41 -0700140 int cmd_result3 = system(cmd);
141 ASSERT_EQ(cmd_result3, 0) << strerror(errno);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700142 } else {
143 // Check the results match the appropriate results in the .inc file.
144 FILE *fp = popen(cmd, "r");
145 ASSERT_TRUE(fp != nullptr);
146
Dave Allison65fcc2c2014-04-28 13:45:27 -0700147 uint32_t lineindex = 0;
148
149 while (!feof(fp)) {
150 char testline[256];
151 char *s = fgets(testline, sizeof(testline), fp);
152 if (s == nullptr) {
153 break;
154 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000155 if (CompareIgnoringSpace(results[lineindex], testline) != 0) {
Dave Allison45fdb932014-06-25 12:37:10 -0700156 LOG(FATAL) << "Output is not as expected at line: " << lineindex
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000157 << results[lineindex] << "/" << testline;
Dave Allison45fdb932014-06-25 12:37:10 -0700158 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700159 ++lineindex;
160 }
161 // Check that we are at the end.
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000162 ASSERT_TRUE(results[lineindex] == nullptr);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700163 fclose(fp);
164 }
165
166 char buf[FILENAME_MAX];
167 snprintf(buf, sizeof(buf), "%s.o", filename);
168 unlink(buf);
169
170 snprintf(buf, sizeof(buf), "%s.oo", filename);
171 unlink(buf);
172#endif
173}
174
175#define __ assembler->
176
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000177void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname,
178 const char* const* results) {
179 __ FinalizeCode();
180 size_t cs = __ CodeSize();
181 std::vector<uint8_t> managed_code(cs);
182 MemoryRegion code(&managed_code[0], managed_code.size());
183 __ FinalizeInstructions(code);
184
185 DumpAndCheck(managed_code, testname, results);
186}
187
188void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname) {
189 InitResults();
190 std::map<std::string, const char* const*>::iterator results = test_results.find(testname);
191 ASSERT_NE(results, test_results.end());
192
193 EmitAndCheck(assembler, testname, results->second);
194}
195
196#undef __
197
198#define __ assembler.
199
Dave Allison65fcc2c2014-04-28 13:45:27 -0700200TEST(Thumb2AssemblerTest, SimpleMov) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000201 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700202
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100203 __ movs(R0, ShifterOperand(R1));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700204 __ mov(R0, ShifterOperand(R1));
205 __ mov(R8, ShifterOperand(R9));
206
207 __ mov(R0, ShifterOperand(1));
208 __ mov(R8, ShifterOperand(9));
209
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000210 EmitAndCheck(&assembler, "SimpleMov");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700211}
212
213TEST(Thumb2AssemblerTest, SimpleMov32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000214 arm::Thumb2Assembler assembler;
215 __ Force32Bit();
Dave Allison65fcc2c2014-04-28 13:45:27 -0700216
217 __ mov(R0, ShifterOperand(R1));
218 __ mov(R8, ShifterOperand(R9));
219
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000220 EmitAndCheck(&assembler, "SimpleMov32");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700221}
222
223TEST(Thumb2AssemblerTest, SimpleMovAdd) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000224 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700225
226 __ mov(R0, ShifterOperand(R1));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100227 __ adds(R0, R1, ShifterOperand(R2));
228 __ add(R0, R1, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700229
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000230 EmitAndCheck(&assembler, "SimpleMovAdd");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700231}
232
233TEST(Thumb2AssemblerTest, DataProcessingRegister) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000234 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700235
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100236 // 32 bit variants using low registers.
237 __ mvn(R0, ShifterOperand(R1), AL, kCcKeep);
238 __ add(R0, R1, ShifterOperand(R2), AL, kCcKeep);
239 __ sub(R0, R1, ShifterOperand(R2), AL, kCcKeep);
240 __ and_(R0, R1, ShifterOperand(R2), AL, kCcKeep);
241 __ orr(R0, R1, ShifterOperand(R2), AL, kCcKeep);
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100242 __ orn(R0, R1, ShifterOperand(R2), AL, kCcKeep);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100243 __ eor(R0, R1, ShifterOperand(R2), AL, kCcKeep);
244 __ bic(R0, R1, ShifterOperand(R2), AL, kCcKeep);
245 __ adc(R0, R1, ShifterOperand(R2), AL, kCcKeep);
246 __ sbc(R0, R1, ShifterOperand(R2), AL, kCcKeep);
247 __ rsb(R0, R1, ShifterOperand(R2), AL, kCcKeep);
248 __ teq(R0, ShifterOperand(R1));
249
250 // 16 bit variants using low registers.
251 __ movs(R0, ShifterOperand(R1));
252 __ mov(R0, ShifterOperand(R1), AL, kCcKeep);
253 __ mvns(R0, ShifterOperand(R1));
254 __ add(R0, R0, ShifterOperand(R1), AL, kCcKeep);
255 __ adds(R0, R1, ShifterOperand(R2));
256 __ subs(R0, R1, ShifterOperand(R2));
257 __ adcs(R0, R0, ShifterOperand(R1));
258 __ sbcs(R0, R0, ShifterOperand(R1));
259 __ ands(R0, R0, ShifterOperand(R1));
260 __ orrs(R0, R0, ShifterOperand(R1));
261 __ eors(R0, R0, ShifterOperand(R1));
262 __ bics(R0, R0, ShifterOperand(R1));
263 __ tst(R0, ShifterOperand(R1));
264 __ cmp(R0, ShifterOperand(R1));
265 __ cmn(R0, ShifterOperand(R1));
266
267 // 16-bit variants using high registers.
268 __ mov(R1, ShifterOperand(R8), AL, kCcKeep);
269 __ mov(R9, ShifterOperand(R0), AL, kCcKeep);
270 __ mov(R8, ShifterOperand(R9), AL, kCcKeep);
271 __ add(R1, R1, ShifterOperand(R8), AL, kCcKeep);
272 __ add(R9, R9, ShifterOperand(R0), AL, kCcKeep);
273 __ add(R8, R8, ShifterOperand(R9), AL, kCcKeep);
274 __ cmp(R0, ShifterOperand(R9));
275 __ cmp(R8, ShifterOperand(R1));
276 __ cmp(R9, ShifterOperand(R8));
277
278 // The 16-bit RSBS Rd, Rn, #0, also known as NEGS Rd, Rn is specified using
279 // an immediate (0) but emitted without any, so we test it here.
280 __ rsbs(R0, R1, ShifterOperand(0));
281 __ rsbs(R0, R0, ShifterOperand(0)); // Check Rd == Rn code path.
282
283 // 32 bit variants using high registers that would be 16-bit if using low registers.
284 __ movs(R0, ShifterOperand(R8));
285 __ mvns(R0, ShifterOperand(R8));
286 __ add(R0, R1, ShifterOperand(R8), AL, kCcKeep);
287 __ adds(R0, R1, ShifterOperand(R8));
288 __ subs(R0, R1, ShifterOperand(R8));
289 __ adcs(R0, R0, ShifterOperand(R8));
290 __ sbcs(R0, R0, ShifterOperand(R8));
291 __ ands(R0, R0, ShifterOperand(R8));
292 __ orrs(R0, R0, ShifterOperand(R8));
293 __ eors(R0, R0, ShifterOperand(R8));
294 __ bics(R0, R0, ShifterOperand(R8));
295 __ tst(R0, ShifterOperand(R8));
296 __ cmn(R0, ShifterOperand(R8));
297 __ rsbs(R0, R8, ShifterOperand(0)); // Check that this is not emitted as 16-bit.
298 __ rsbs(R8, R8, ShifterOperand(0)); // Check that this is not emitted as 16-bit (Rd == Rn).
299
300 // 32-bit variants of instructions that would be 16-bit outside IT block.
301 __ it(arm::EQ);
302 __ mvns(R0, ShifterOperand(R1), arm::EQ);
303 __ it(arm::EQ);
304 __ adds(R0, R1, ShifterOperand(R2), arm::EQ);
305 __ it(arm::EQ);
306 __ subs(R0, R1, ShifterOperand(R2), arm::EQ);
307 __ it(arm::EQ);
308 __ adcs(R0, R0, ShifterOperand(R1), arm::EQ);
309 __ it(arm::EQ);
310 __ sbcs(R0, R0, ShifterOperand(R1), arm::EQ);
311 __ it(arm::EQ);
312 __ ands(R0, R0, ShifterOperand(R1), arm::EQ);
313 __ it(arm::EQ);
314 __ orrs(R0, R0, ShifterOperand(R1), arm::EQ);
315 __ it(arm::EQ);
316 __ eors(R0, R0, ShifterOperand(R1), arm::EQ);
317 __ it(arm::EQ);
318 __ bics(R0, R0, ShifterOperand(R1), arm::EQ);
319
320 // 16-bit variants of instructions that would be 32-bit outside IT block.
321 __ it(arm::EQ);
322 __ mvn(R0, ShifterOperand(R1), arm::EQ, kCcKeep);
323 __ it(arm::EQ);
324 __ add(R0, R1, ShifterOperand(R2), arm::EQ, kCcKeep);
325 __ it(arm::EQ);
326 __ sub(R0, R1, ShifterOperand(R2), arm::EQ, kCcKeep);
327 __ it(arm::EQ);
328 __ adc(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
329 __ it(arm::EQ);
330 __ sbc(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
331 __ it(arm::EQ);
332 __ and_(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
333 __ it(arm::EQ);
334 __ orr(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
335 __ it(arm::EQ);
336 __ eor(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
337 __ it(arm::EQ);
338 __ bic(R0, R0, ShifterOperand(R1), arm::EQ, kCcKeep);
339
340 // 16 bit variants selected for the default kCcDontCare.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700341 __ mov(R0, ShifterOperand(R1));
342 __ mvn(R0, ShifterOperand(R1));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100343 __ add(R0, R0, ShifterOperand(R1));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700344 __ add(R0, R1, ShifterOperand(R2));
345 __ sub(R0, R1, ShifterOperand(R2));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100346 __ adc(R0, R0, ShifterOperand(R1));
347 __ sbc(R0, R0, ShifterOperand(R1));
Andreas Gampe7b7e5242015-02-02 19:17:11 -0800348 __ and_(R0, R0, ShifterOperand(R1));
349 __ orr(R0, R0, ShifterOperand(R1));
350 __ eor(R0, R0, ShifterOperand(R1));
351 __ bic(R0, R0, ShifterOperand(R1));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100352 __ mov(R1, ShifterOperand(R8));
353 __ mov(R9, ShifterOperand(R0));
354 __ mov(R8, ShifterOperand(R9));
355 __ add(R1, R1, ShifterOperand(R8));
356 __ add(R9, R9, ShifterOperand(R0));
357 __ add(R8, R8, ShifterOperand(R9));
358 __ rsb(R0, R1, ShifterOperand(0));
359 __ rsb(R0, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700360
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100361 // And an arbitrary 32-bit instruction using IP.
362 __ add(R12, R1, ShifterOperand(R0), AL, kCcKeep);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100363
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000364 EmitAndCheck(&assembler, "DataProcessingRegister");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700365}
366
367TEST(Thumb2AssemblerTest, DataProcessingImmediate) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000368 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700369
370 __ mov(R0, ShifterOperand(0x55));
371 __ mvn(R0, ShifterOperand(0x55));
372 __ add(R0, R1, ShifterOperand(0x55));
373 __ sub(R0, R1, ShifterOperand(0x55));
374 __ and_(R0, R1, ShifterOperand(0x55));
375 __ orr(R0, R1, ShifterOperand(0x55));
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100376 __ orn(R0, R1, ShifterOperand(0x55));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700377 __ eor(R0, R1, ShifterOperand(0x55));
378 __ bic(R0, R1, ShifterOperand(0x55));
379 __ adc(R0, R1, ShifterOperand(0x55));
380 __ sbc(R0, R1, ShifterOperand(0x55));
381 __ rsb(R0, R1, ShifterOperand(0x55));
382
383 __ tst(R0, ShifterOperand(0x55));
384 __ teq(R0, ShifterOperand(0x55));
385 __ cmp(R0, ShifterOperand(0x55));
386 __ cmn(R0, ShifterOperand(0x55));
387
388 __ add(R0, R1, ShifterOperand(5));
389 __ sub(R0, R1, ShifterOperand(5));
390
391 __ movs(R0, ShifterOperand(0x55));
392 __ mvns(R0, ShifterOperand(0x55));
393
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100394 __ adds(R0, R1, ShifterOperand(5));
395 __ subs(R0, R1, ShifterOperand(5));
396
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000397 EmitAndCheck(&assembler, "DataProcessingImmediate");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700398}
399
400TEST(Thumb2AssemblerTest, DataProcessingModifiedImmediate) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000401 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700402
403 __ mov(R0, ShifterOperand(0x550055));
404 __ mvn(R0, ShifterOperand(0x550055));
405 __ add(R0, R1, ShifterOperand(0x550055));
406 __ sub(R0, R1, ShifterOperand(0x550055));
407 __ and_(R0, R1, ShifterOperand(0x550055));
408 __ orr(R0, R1, ShifterOperand(0x550055));
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100409 __ orn(R0, R1, ShifterOperand(0x550055));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700410 __ eor(R0, R1, ShifterOperand(0x550055));
411 __ bic(R0, R1, ShifterOperand(0x550055));
412 __ adc(R0, R1, ShifterOperand(0x550055));
413 __ sbc(R0, R1, ShifterOperand(0x550055));
414 __ rsb(R0, R1, ShifterOperand(0x550055));
415
416 __ tst(R0, ShifterOperand(0x550055));
417 __ teq(R0, ShifterOperand(0x550055));
418 __ cmp(R0, ShifterOperand(0x550055));
419 __ cmn(R0, ShifterOperand(0x550055));
420
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000421 EmitAndCheck(&assembler, "DataProcessingModifiedImmediate");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700422}
423
424
425TEST(Thumb2AssemblerTest, DataProcessingModifiedImmediates) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000426 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700427
428 __ mov(R0, ShifterOperand(0x550055));
429 __ mov(R0, ShifterOperand(0x55005500));
430 __ mov(R0, ShifterOperand(0x55555555));
431 __ mov(R0, ShifterOperand(0xd5000000)); // rotated to first position
432 __ mov(R0, ShifterOperand(0x6a000000)); // rotated to second position
433 __ mov(R0, ShifterOperand(0x350)); // rotated to 2nd last position
434 __ mov(R0, ShifterOperand(0x1a8)); // rotated to last position
435
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000436 EmitAndCheck(&assembler, "DataProcessingModifiedImmediates");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700437}
438
439TEST(Thumb2AssemblerTest, DataProcessingShiftedRegister) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000440 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700441
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100442 // 16-bit variants.
443 __ movs(R3, ShifterOperand(R4, LSL, 4));
444 __ movs(R3, ShifterOperand(R4, LSR, 5));
445 __ movs(R3, ShifterOperand(R4, ASR, 6));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700446
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100447 // 32-bit ROR because ROR immediate doesn't have the same 16-bit version as other shifts.
448 __ movs(R3, ShifterOperand(R4, ROR, 7));
449
450 // 32-bit RRX because RRX has no 16-bit version.
451 __ movs(R3, ShifterOperand(R4, RRX));
452
453 // 32 bit variants (not setting condition codes).
454 __ mov(R3, ShifterOperand(R4, LSL, 4), AL, kCcKeep);
455 __ mov(R3, ShifterOperand(R4, LSR, 5), AL, kCcKeep);
456 __ mov(R3, ShifterOperand(R4, ASR, 6), AL, kCcKeep);
457 __ mov(R3, ShifterOperand(R4, ROR, 7), AL, kCcKeep);
458 __ mov(R3, ShifterOperand(R4, RRX), AL, kCcKeep);
459
460 // 32 bit variants (high registers).
461 __ movs(R8, ShifterOperand(R4, LSL, 4));
462 __ movs(R8, ShifterOperand(R4, LSR, 5));
463 __ movs(R8, ShifterOperand(R4, ASR, 6));
464 __ movs(R8, ShifterOperand(R4, ROR, 7));
465 __ movs(R8, ShifterOperand(R4, RRX));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700466
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000467 EmitAndCheck(&assembler, "DataProcessingShiftedRegister");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700468}
469
Vladimir Markof9d741e2015-11-20 15:08:11 +0000470TEST(Thumb2AssemblerTest, ShiftImmediate) {
471 // Note: This test produces the same results as DataProcessingShiftedRegister
472 // but it does so using shift functions instead of mov().
473 arm::Thumb2Assembler assembler;
474
475 // 16-bit variants.
476 __ Lsl(R3, R4, 4);
477 __ Lsr(R3, R4, 5);
478 __ Asr(R3, R4, 6);
479
480 // 32-bit ROR because ROR immediate doesn't have the same 16-bit version as other shifts.
481 __ Ror(R3, R4, 7);
482
483 // 32-bit RRX because RRX has no 16-bit version.
484 __ Rrx(R3, R4);
485
486 // 32 bit variants (not setting condition codes).
487 __ Lsl(R3, R4, 4, AL, kCcKeep);
488 __ Lsr(R3, R4, 5, AL, kCcKeep);
489 __ Asr(R3, R4, 6, AL, kCcKeep);
490 __ Ror(R3, R4, 7, AL, kCcKeep);
491 __ Rrx(R3, R4, AL, kCcKeep);
492
493 // 32 bit variants (high registers).
494 __ Lsls(R8, R4, 4);
495 __ Lsrs(R8, R4, 5);
496 __ Asrs(R8, R4, 6);
497 __ Rors(R8, R4, 7);
498 __ Rrxs(R8, R4);
499
500 EmitAndCheck(&assembler, "ShiftImmediate");
501}
Dave Allison65fcc2c2014-04-28 13:45:27 -0700502
503TEST(Thumb2AssemblerTest, BasicLoad) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000504 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700505
506 __ ldr(R3, Address(R4, 24));
507 __ ldrb(R3, Address(R4, 24));
508 __ ldrh(R3, Address(R4, 24));
509 __ ldrsb(R3, Address(R4, 24));
510 __ ldrsh(R3, Address(R4, 24));
511
512 __ ldr(R3, Address(SP, 24));
513
514 // 32 bit variants
515 __ ldr(R8, Address(R4, 24));
516 __ ldrb(R8, Address(R4, 24));
517 __ ldrh(R8, Address(R4, 24));
518 __ ldrsb(R8, Address(R4, 24));
519 __ ldrsh(R8, Address(R4, 24));
520
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000521 EmitAndCheck(&assembler, "BasicLoad");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700522}
523
524
525TEST(Thumb2AssemblerTest, BasicStore) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000526 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700527
528 __ str(R3, Address(R4, 24));
529 __ strb(R3, Address(R4, 24));
530 __ strh(R3, Address(R4, 24));
531
532 __ str(R3, Address(SP, 24));
533
534 // 32 bit variants.
535 __ str(R8, Address(R4, 24));
536 __ strb(R8, Address(R4, 24));
537 __ strh(R8, Address(R4, 24));
538
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000539 EmitAndCheck(&assembler, "BasicStore");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700540}
541
542TEST(Thumb2AssemblerTest, ComplexLoad) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000543 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700544
545 __ ldr(R3, Address(R4, 24, Address::Mode::Offset));
546 __ ldr(R3, Address(R4, 24, Address::Mode::PreIndex));
547 __ ldr(R3, Address(R4, 24, Address::Mode::PostIndex));
548 __ ldr(R3, Address(R4, 24, Address::Mode::NegOffset));
549 __ ldr(R3, Address(R4, 24, Address::Mode::NegPreIndex));
550 __ ldr(R3, Address(R4, 24, Address::Mode::NegPostIndex));
551
552 __ ldrb(R3, Address(R4, 24, Address::Mode::Offset));
553 __ ldrb(R3, Address(R4, 24, Address::Mode::PreIndex));
554 __ ldrb(R3, Address(R4, 24, Address::Mode::PostIndex));
555 __ ldrb(R3, Address(R4, 24, Address::Mode::NegOffset));
556 __ ldrb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
557 __ ldrb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
558
559 __ ldrh(R3, Address(R4, 24, Address::Mode::Offset));
560 __ ldrh(R3, Address(R4, 24, Address::Mode::PreIndex));
561 __ ldrh(R3, Address(R4, 24, Address::Mode::PostIndex));
562 __ ldrh(R3, Address(R4, 24, Address::Mode::NegOffset));
563 __ ldrh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
564 __ ldrh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
565
566 __ ldrsb(R3, Address(R4, 24, Address::Mode::Offset));
567 __ ldrsb(R3, Address(R4, 24, Address::Mode::PreIndex));
568 __ ldrsb(R3, Address(R4, 24, Address::Mode::PostIndex));
569 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegOffset));
570 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
571 __ ldrsb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
572
573 __ ldrsh(R3, Address(R4, 24, Address::Mode::Offset));
574 __ ldrsh(R3, Address(R4, 24, Address::Mode::PreIndex));
575 __ ldrsh(R3, Address(R4, 24, Address::Mode::PostIndex));
576 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegOffset));
577 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
578 __ ldrsh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
579
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000580 EmitAndCheck(&assembler, "ComplexLoad");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700581}
582
583
584TEST(Thumb2AssemblerTest, ComplexStore) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000585 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700586
587 __ str(R3, Address(R4, 24, Address::Mode::Offset));
588 __ str(R3, Address(R4, 24, Address::Mode::PreIndex));
589 __ str(R3, Address(R4, 24, Address::Mode::PostIndex));
590 __ str(R3, Address(R4, 24, Address::Mode::NegOffset));
591 __ str(R3, Address(R4, 24, Address::Mode::NegPreIndex));
592 __ str(R3, Address(R4, 24, Address::Mode::NegPostIndex));
593
594 __ strb(R3, Address(R4, 24, Address::Mode::Offset));
595 __ strb(R3, Address(R4, 24, Address::Mode::PreIndex));
596 __ strb(R3, Address(R4, 24, Address::Mode::PostIndex));
597 __ strb(R3, Address(R4, 24, Address::Mode::NegOffset));
598 __ strb(R3, Address(R4, 24, Address::Mode::NegPreIndex));
599 __ strb(R3, Address(R4, 24, Address::Mode::NegPostIndex));
600
601 __ strh(R3, Address(R4, 24, Address::Mode::Offset));
602 __ strh(R3, Address(R4, 24, Address::Mode::PreIndex));
603 __ strh(R3, Address(R4, 24, Address::Mode::PostIndex));
604 __ strh(R3, Address(R4, 24, Address::Mode::NegOffset));
605 __ strh(R3, Address(R4, 24, Address::Mode::NegPreIndex));
606 __ strh(R3, Address(R4, 24, Address::Mode::NegPostIndex));
607
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000608 EmitAndCheck(&assembler, "ComplexStore");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700609}
610
611TEST(Thumb2AssemblerTest, NegativeLoadStore) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000612 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700613
614 __ ldr(R3, Address(R4, -24, Address::Mode::Offset));
615 __ ldr(R3, Address(R4, -24, Address::Mode::PreIndex));
616 __ ldr(R3, Address(R4, -24, Address::Mode::PostIndex));
617 __ ldr(R3, Address(R4, -24, Address::Mode::NegOffset));
618 __ ldr(R3, Address(R4, -24, Address::Mode::NegPreIndex));
619 __ ldr(R3, Address(R4, -24, Address::Mode::NegPostIndex));
620
621 __ ldrb(R3, Address(R4, -24, Address::Mode::Offset));
622 __ ldrb(R3, Address(R4, -24, Address::Mode::PreIndex));
623 __ ldrb(R3, Address(R4, -24, Address::Mode::PostIndex));
624 __ ldrb(R3, Address(R4, -24, Address::Mode::NegOffset));
625 __ ldrb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
626 __ ldrb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
627
628 __ ldrh(R3, Address(R4, -24, Address::Mode::Offset));
629 __ ldrh(R3, Address(R4, -24, Address::Mode::PreIndex));
630 __ ldrh(R3, Address(R4, -24, Address::Mode::PostIndex));
631 __ ldrh(R3, Address(R4, -24, Address::Mode::NegOffset));
632 __ ldrh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
633 __ ldrh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
634
635 __ ldrsb(R3, Address(R4, -24, Address::Mode::Offset));
636 __ ldrsb(R3, Address(R4, -24, Address::Mode::PreIndex));
637 __ ldrsb(R3, Address(R4, -24, Address::Mode::PostIndex));
638 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegOffset));
639 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
640 __ ldrsb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
641
642 __ ldrsh(R3, Address(R4, -24, Address::Mode::Offset));
643 __ ldrsh(R3, Address(R4, -24, Address::Mode::PreIndex));
644 __ ldrsh(R3, Address(R4, -24, Address::Mode::PostIndex));
645 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegOffset));
646 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
647 __ ldrsh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
648
649 __ str(R3, Address(R4, -24, Address::Mode::Offset));
650 __ str(R3, Address(R4, -24, Address::Mode::PreIndex));
651 __ str(R3, Address(R4, -24, Address::Mode::PostIndex));
652 __ str(R3, Address(R4, -24, Address::Mode::NegOffset));
653 __ str(R3, Address(R4, -24, Address::Mode::NegPreIndex));
654 __ str(R3, Address(R4, -24, Address::Mode::NegPostIndex));
655
656 __ strb(R3, Address(R4, -24, Address::Mode::Offset));
657 __ strb(R3, Address(R4, -24, Address::Mode::PreIndex));
658 __ strb(R3, Address(R4, -24, Address::Mode::PostIndex));
659 __ strb(R3, Address(R4, -24, Address::Mode::NegOffset));
660 __ strb(R3, Address(R4, -24, Address::Mode::NegPreIndex));
661 __ strb(R3, Address(R4, -24, Address::Mode::NegPostIndex));
662
663 __ strh(R3, Address(R4, -24, Address::Mode::Offset));
664 __ strh(R3, Address(R4, -24, Address::Mode::PreIndex));
665 __ strh(R3, Address(R4, -24, Address::Mode::PostIndex));
666 __ strh(R3, Address(R4, -24, Address::Mode::NegOffset));
667 __ strh(R3, Address(R4, -24, Address::Mode::NegPreIndex));
668 __ strh(R3, Address(R4, -24, Address::Mode::NegPostIndex));
669
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000670 EmitAndCheck(&assembler, "NegativeLoadStore");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700671}
672
673TEST(Thumb2AssemblerTest, SimpleLoadStoreDual) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000674 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700675
676 __ strd(R2, Address(R0, 24, Address::Mode::Offset));
677 __ ldrd(R2, Address(R0, 24, Address::Mode::Offset));
678
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000679 EmitAndCheck(&assembler, "SimpleLoadStoreDual");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700680}
681
682TEST(Thumb2AssemblerTest, ComplexLoadStoreDual) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000683 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700684
685 __ strd(R2, Address(R0, 24, Address::Mode::Offset));
686 __ strd(R2, Address(R0, 24, Address::Mode::PreIndex));
687 __ strd(R2, Address(R0, 24, Address::Mode::PostIndex));
688 __ strd(R2, Address(R0, 24, Address::Mode::NegOffset));
689 __ strd(R2, Address(R0, 24, Address::Mode::NegPreIndex));
690 __ strd(R2, Address(R0, 24, Address::Mode::NegPostIndex));
691
692 __ ldrd(R2, Address(R0, 24, Address::Mode::Offset));
693 __ ldrd(R2, Address(R0, 24, Address::Mode::PreIndex));
694 __ ldrd(R2, Address(R0, 24, Address::Mode::PostIndex));
695 __ ldrd(R2, Address(R0, 24, Address::Mode::NegOffset));
696 __ ldrd(R2, Address(R0, 24, Address::Mode::NegPreIndex));
697 __ ldrd(R2, Address(R0, 24, Address::Mode::NegPostIndex));
698
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000699 EmitAndCheck(&assembler, "ComplexLoadStoreDual");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700700}
701
702TEST(Thumb2AssemblerTest, NegativeLoadStoreDual) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000703 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700704
705 __ strd(R2, Address(R0, -24, Address::Mode::Offset));
706 __ strd(R2, Address(R0, -24, Address::Mode::PreIndex));
707 __ strd(R2, Address(R0, -24, Address::Mode::PostIndex));
708 __ strd(R2, Address(R0, -24, Address::Mode::NegOffset));
709 __ strd(R2, Address(R0, -24, Address::Mode::NegPreIndex));
710 __ strd(R2, Address(R0, -24, Address::Mode::NegPostIndex));
711
712 __ ldrd(R2, Address(R0, -24, Address::Mode::Offset));
713 __ ldrd(R2, Address(R0, -24, Address::Mode::PreIndex));
714 __ ldrd(R2, Address(R0, -24, Address::Mode::PostIndex));
715 __ ldrd(R2, Address(R0, -24, Address::Mode::NegOffset));
716 __ ldrd(R2, Address(R0, -24, Address::Mode::NegPreIndex));
717 __ ldrd(R2, Address(R0, -24, Address::Mode::NegPostIndex));
718
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000719 EmitAndCheck(&assembler, "NegativeLoadStoreDual");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700720}
721
722TEST(Thumb2AssemblerTest, SimpleBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000723 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700724
725 Label l1;
726 __ mov(R0, ShifterOperand(2));
727 __ Bind(&l1);
728 __ mov(R1, ShifterOperand(1));
729 __ b(&l1);
730 Label l2;
731 __ b(&l2);
732 __ mov(R1, ShifterOperand(2));
733 __ Bind(&l2);
734 __ mov(R0, ShifterOperand(3));
735
736 Label l3;
737 __ mov(R0, ShifterOperand(2));
738 __ Bind(&l3);
739 __ mov(R1, ShifterOperand(1));
740 __ b(&l3, EQ);
741
742 Label l4;
743 __ b(&l4, EQ);
744 __ mov(R1, ShifterOperand(2));
745 __ Bind(&l4);
746 __ mov(R0, ShifterOperand(3));
747
748 // 2 linked labels.
749 Label l5;
750 __ b(&l5);
751 __ mov(R1, ShifterOperand(4));
752 __ b(&l5);
753 __ mov(R1, ShifterOperand(5));
754 __ Bind(&l5);
755 __ mov(R0, ShifterOperand(6));
756
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000757 EmitAndCheck(&assembler, "SimpleBranch");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700758}
759
760TEST(Thumb2AssemblerTest, LongBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000761 arm::Thumb2Assembler assembler;
762 __ Force32Bit();
Dave Allison65fcc2c2014-04-28 13:45:27 -0700763 // 32 bit branches.
764 Label l1;
765 __ mov(R0, ShifterOperand(2));
766 __ Bind(&l1);
767 __ mov(R1, ShifterOperand(1));
768 __ b(&l1);
769
770 Label l2;
771 __ b(&l2);
772 __ mov(R1, ShifterOperand(2));
773 __ Bind(&l2);
774 __ mov(R0, ShifterOperand(3));
775
776 Label l3;
777 __ mov(R0, ShifterOperand(2));
778 __ Bind(&l3);
779 __ mov(R1, ShifterOperand(1));
780 __ b(&l3, EQ);
781
782 Label l4;
783 __ b(&l4, EQ);
784 __ mov(R1, ShifterOperand(2));
785 __ Bind(&l4);
786 __ mov(R0, ShifterOperand(3));
787
788 // 2 linked labels.
789 Label l5;
790 __ b(&l5);
791 __ mov(R1, ShifterOperand(4));
792 __ b(&l5);
793 __ mov(R1, ShifterOperand(5));
794 __ Bind(&l5);
795 __ mov(R0, ShifterOperand(6));
796
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000797 EmitAndCheck(&assembler, "LongBranch");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700798}
799
800TEST(Thumb2AssemblerTest, LoadMultiple) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000801 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700802
803 // 16 bit.
804 __ ldm(DB_W, R4, (1 << R0 | 1 << R3));
805
806 // 32 bit.
807 __ ldm(DB_W, R4, (1 << LR | 1 << R11));
808 __ ldm(DB, R4, (1 << LR | 1 << R11));
809
810 // Single reg is converted to ldr
811 __ ldm(DB_W, R4, (1 << R5));
812
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000813 EmitAndCheck(&assembler, "LoadMultiple");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700814}
815
816TEST(Thumb2AssemblerTest, StoreMultiple) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000817 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700818
819 // 16 bit.
820 __ stm(IA_W, R4, (1 << R0 | 1 << R3));
821
822 // 32 bit.
823 __ stm(IA_W, R4, (1 << LR | 1 << R11));
824 __ stm(IA, R4, (1 << LR | 1 << R11));
825
826 // Single reg is converted to str
827 __ stm(IA_W, R4, (1 << R5));
828 __ stm(IA, R4, (1 << R5));
829
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000830 EmitAndCheck(&assembler, "StoreMultiple");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700831}
832
833TEST(Thumb2AssemblerTest, MovWMovT) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000834 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700835
Vladimir Markob4536b72015-11-24 13:45:23 +0000836 // Always 32 bit.
837 __ movw(R4, 0);
838 __ movw(R4, 0x34);
839 __ movw(R9, 0x34);
840 __ movw(R3, 0x1234);
841 __ movw(R9, 0xffff);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700842
843 // Always 32 bit.
844 __ movt(R0, 0);
845 __ movt(R0, 0x1234);
846 __ movt(R1, 0xffff);
847
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000848 EmitAndCheck(&assembler, "MovWMovT");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700849}
850
851TEST(Thumb2AssemblerTest, SpecialAddSub) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000852 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700853
854 __ add(R2, SP, ShifterOperand(0x50)); // 16 bit.
855 __ add(SP, SP, ShifterOperand(0x50)); // 16 bit.
856 __ add(R8, SP, ShifterOperand(0x50)); // 32 bit.
857
858 __ add(R2, SP, ShifterOperand(0xf00)); // 32 bit due to imm size.
859 __ add(SP, SP, ShifterOperand(0xf00)); // 32 bit due to imm size.
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000860 __ add(SP, SP, ShifterOperand(0xffc)); // 32 bit due to imm size; encoding T4.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700861
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000862 __ sub(SP, SP, ShifterOperand(0x50)); // 16 bit
863 __ sub(R0, SP, ShifterOperand(0x50)); // 32 bit
864 __ sub(R8, SP, ShifterOperand(0x50)); // 32 bit.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700865
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000866 __ sub(SP, SP, ShifterOperand(0xf00)); // 32 bit due to imm size
867 __ sub(SP, SP, ShifterOperand(0xffc)); // 32 bit due to imm size; encoding T4.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700868
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000869 EmitAndCheck(&assembler, "SpecialAddSub");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700870}
871
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000872TEST(Thumb2AssemblerTest, LoadFromOffset) {
873 arm::Thumb2Assembler assembler;
874
875 __ LoadFromOffset(kLoadWord, R2, R4, 12);
876 __ LoadFromOffset(kLoadWord, R2, R4, 0xfff);
877 __ LoadFromOffset(kLoadWord, R2, R4, 0x1000);
878 __ LoadFromOffset(kLoadWord, R2, R4, 0x1000a4);
879 __ LoadFromOffset(kLoadWord, R2, R4, 0x101000);
880 __ LoadFromOffset(kLoadWord, R4, R4, 0x101000);
881 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 12);
882 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0xfff);
883 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x1000);
884 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x1000a4);
885 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x101000);
886 __ LoadFromOffset(kLoadUnsignedHalfword, R4, R4, 0x101000);
887 __ LoadFromOffset(kLoadWordPair, R2, R4, 12);
888 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x3fc);
889 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x400);
890 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x400a4);
891 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x40400);
892 __ LoadFromOffset(kLoadWordPair, R4, R4, 0x40400);
893
894 __ LoadFromOffset(kLoadWord, R0, R12, 12); // 32-bit because of R12.
895 __ LoadFromOffset(kLoadWord, R2, R4, 0xa4 - 0x100000);
896
897 __ LoadFromOffset(kLoadSignedByte, R2, R4, 12);
898 __ LoadFromOffset(kLoadUnsignedByte, R2, R4, 12);
899 __ LoadFromOffset(kLoadSignedHalfword, R2, R4, 12);
900
901 EmitAndCheck(&assembler, "LoadFromOffset");
902}
903
Dave Allison65fcc2c2014-04-28 13:45:27 -0700904TEST(Thumb2AssemblerTest, StoreToOffset) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000905 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700906
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000907 __ StoreToOffset(kStoreWord, R2, R4, 12);
908 __ StoreToOffset(kStoreWord, R2, R4, 0xfff);
909 __ StoreToOffset(kStoreWord, R2, R4, 0x1000);
910 __ StoreToOffset(kStoreWord, R2, R4, 0x1000a4);
911 __ StoreToOffset(kStoreWord, R2, R4, 0x101000);
912 __ StoreToOffset(kStoreWord, R4, R4, 0x101000);
913 __ StoreToOffset(kStoreHalfword, R2, R4, 12);
914 __ StoreToOffset(kStoreHalfword, R2, R4, 0xfff);
915 __ StoreToOffset(kStoreHalfword, R2, R4, 0x1000);
916 __ StoreToOffset(kStoreHalfword, R2, R4, 0x1000a4);
917 __ StoreToOffset(kStoreHalfword, R2, R4, 0x101000);
918 __ StoreToOffset(kStoreHalfword, R4, R4, 0x101000);
919 __ StoreToOffset(kStoreWordPair, R2, R4, 12);
920 __ StoreToOffset(kStoreWordPair, R2, R4, 0x3fc);
921 __ StoreToOffset(kStoreWordPair, R2, R4, 0x400);
922 __ StoreToOffset(kStoreWordPair, R2, R4, 0x400a4);
923 __ StoreToOffset(kStoreWordPair, R2, R4, 0x40400);
924 __ StoreToOffset(kStoreWordPair, R4, R4, 0x40400);
925
926 __ StoreToOffset(kStoreWord, R0, R12, 12); // 32-bit because of R12.
927 __ StoreToOffset(kStoreWord, R2, R4, 0xa4 - 0x100000);
928
929 __ StoreToOffset(kStoreByte, R2, R4, 12);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700930
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000931 EmitAndCheck(&assembler, "StoreToOffset");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700932}
933
Dave Allison65fcc2c2014-04-28 13:45:27 -0700934TEST(Thumb2AssemblerTest, IfThen) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000935 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700936
937 __ it(EQ);
938 __ mov(R1, ShifterOperand(1), EQ);
939
940 __ it(EQ, kItThen);
941 __ mov(R1, ShifterOperand(1), EQ);
942 __ mov(R2, ShifterOperand(2), EQ);
943
944 __ it(EQ, kItElse);
945 __ mov(R1, ShifterOperand(1), EQ);
946 __ mov(R2, ShifterOperand(2), NE);
947
948 __ it(EQ, kItThen, kItElse);
949 __ mov(R1, ShifterOperand(1), EQ);
950 __ mov(R2, ShifterOperand(2), EQ);
951 __ mov(R3, ShifterOperand(3), NE);
952
953 __ it(EQ, kItElse, kItElse);
954 __ mov(R1, ShifterOperand(1), EQ);
955 __ mov(R2, ShifterOperand(2), NE);
956 __ mov(R3, ShifterOperand(3), NE);
957
958 __ it(EQ, kItThen, kItThen, kItElse);
959 __ mov(R1, ShifterOperand(1), EQ);
960 __ mov(R2, ShifterOperand(2), EQ);
961 __ mov(R3, ShifterOperand(3), EQ);
962 __ mov(R4, ShifterOperand(4), NE);
963
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000964 EmitAndCheck(&assembler, "IfThen");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700965}
966
967TEST(Thumb2AssemblerTest, CbzCbnz) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000968 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700969
970 Label l1;
971 __ cbz(R2, &l1);
972 __ mov(R1, ShifterOperand(3));
973 __ mov(R2, ShifterOperand(3));
974 __ Bind(&l1);
975 __ mov(R2, ShifterOperand(4));
976
977 Label l2;
978 __ cbnz(R2, &l2);
979 __ mov(R8, ShifterOperand(3));
980 __ mov(R2, ShifterOperand(3));
981 __ Bind(&l2);
982 __ mov(R2, ShifterOperand(4));
983
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000984 EmitAndCheck(&assembler, "CbzCbnz");
Dave Allison65fcc2c2014-04-28 13:45:27 -0700985}
986
987TEST(Thumb2AssemblerTest, Multiply) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000988 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700989
990 __ mul(R0, R1, R0);
991 __ mul(R0, R1, R2);
992 __ mul(R8, R9, R8);
993 __ mul(R8, R9, R10);
994
995 __ mla(R0, R1, R2, R3);
996 __ mla(R8, R9, R8, R9);
997
998 __ mls(R0, R1, R2, R3);
999 __ mls(R8, R9, R8, R9);
1000
1001 __ umull(R0, R1, R2, R3);
1002 __ umull(R8, R9, R10, R11);
1003
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001004 EmitAndCheck(&assembler, "Multiply");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001005}
1006
1007TEST(Thumb2AssemblerTest, Divide) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001008 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001009
1010 __ sdiv(R0, R1, R2);
1011 __ sdiv(R8, R9, R10);
1012
1013 __ udiv(R0, R1, R2);
1014 __ udiv(R8, R9, R10);
1015
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001016 EmitAndCheck(&assembler, "Divide");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001017}
1018
1019TEST(Thumb2AssemblerTest, VMov) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001020 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001021
1022 __ vmovs(S1, 1.0);
1023 __ vmovd(D1, 1.0);
1024
1025 __ vmovs(S1, S2);
1026 __ vmovd(D1, D2);
1027
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001028 EmitAndCheck(&assembler, "VMov");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001029}
1030
1031
1032TEST(Thumb2AssemblerTest, BasicFloatingPoint) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001033 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001034
1035 __ vadds(S0, S1, S2);
1036 __ vsubs(S0, S1, S2);
1037 __ vmuls(S0, S1, S2);
1038 __ vmlas(S0, S1, S2);
1039 __ vmlss(S0, S1, S2);
1040 __ vdivs(S0, S1, S2);
1041 __ vabss(S0, S1);
1042 __ vnegs(S0, S1);
1043 __ vsqrts(S0, S1);
1044
1045 __ vaddd(D0, D1, D2);
1046 __ vsubd(D0, D1, D2);
1047 __ vmuld(D0, D1, D2);
1048 __ vmlad(D0, D1, D2);
1049 __ vmlsd(D0, D1, D2);
1050 __ vdivd(D0, D1, D2);
1051 __ vabsd(D0, D1);
1052 __ vnegd(D0, D1);
1053 __ vsqrtd(D0, D1);
1054
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001055 EmitAndCheck(&assembler, "BasicFloatingPoint");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001056}
1057
1058TEST(Thumb2AssemblerTest, FloatingPointConversions) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001059 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001060
1061 __ vcvtsd(S2, D2);
1062 __ vcvtds(D2, S2);
1063
1064 __ vcvtis(S1, S2);
1065 __ vcvtsi(S1, S2);
1066
1067 __ vcvtid(S1, D2);
1068 __ vcvtdi(D1, S2);
1069
1070 __ vcvtus(S1, S2);
1071 __ vcvtsu(S1, S2);
1072
1073 __ vcvtud(S1, D2);
1074 __ vcvtdu(D1, S2);
1075
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001076 EmitAndCheck(&assembler, "FloatingPointConversions");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001077}
1078
1079TEST(Thumb2AssemblerTest, FloatingPointComparisons) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001080 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001081
1082 __ vcmps(S0, S1);
1083 __ vcmpd(D0, D1);
1084
1085 __ vcmpsz(S2);
1086 __ vcmpdz(D2);
1087
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001088 EmitAndCheck(&assembler, "FloatingPointComparisons");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001089}
1090
1091TEST(Thumb2AssemblerTest, Calls) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001092 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001093
1094 __ blx(LR);
1095 __ bx(LR);
1096
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001097 EmitAndCheck(&assembler, "Calls");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001098}
1099
1100TEST(Thumb2AssemblerTest, Breakpoint) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001101 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001102
1103 __ bkpt(0);
1104
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001105 EmitAndCheck(&assembler, "Breakpoint");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001106}
1107
1108TEST(Thumb2AssemblerTest, StrR1) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001109 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001110
1111 __ str(R1, Address(SP, 68));
1112 __ str(R1, Address(SP, 1068));
1113
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001114 EmitAndCheck(&assembler, "StrR1");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001115}
1116
1117TEST(Thumb2AssemblerTest, VPushPop) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001118 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001119
1120 __ vpushs(S2, 4);
1121 __ vpushd(D2, 4);
1122
1123 __ vpops(S2, 4);
1124 __ vpopd(D2, 4);
1125
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001126 EmitAndCheck(&assembler, "VPushPop");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001127}
1128
1129TEST(Thumb2AssemblerTest, Max16BitBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001130 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001131
1132 Label l1;
1133 __ b(&l1);
1134 for (int i = 0 ; i < (1 << 11) ; i += 2) {
1135 __ mov(R3, ShifterOperand(i & 0xff));
1136 }
1137 __ Bind(&l1);
1138 __ mov(R1, ShifterOperand(R2));
1139
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001140 EmitAndCheck(&assembler, "Max16BitBranch");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001141}
1142
1143TEST(Thumb2AssemblerTest, Branch32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001144 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001145
1146 Label l1;
1147 __ b(&l1);
1148 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1149 __ mov(R3, ShifterOperand(i & 0xff));
1150 }
1151 __ Bind(&l1);
1152 __ mov(R1, ShifterOperand(R2));
1153
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001154 EmitAndCheck(&assembler, "Branch32");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001155}
1156
1157TEST(Thumb2AssemblerTest, CompareAndBranchMax) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001158 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001159
1160 Label l1;
1161 __ cbz(R4, &l1);
1162 for (int i = 0 ; i < (1 << 7) ; i += 2) {
1163 __ mov(R3, ShifterOperand(i & 0xff));
1164 }
1165 __ Bind(&l1);
1166 __ mov(R1, ShifterOperand(R2));
1167
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001168 EmitAndCheck(&assembler, "CompareAndBranchMax");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001169}
1170
1171TEST(Thumb2AssemblerTest, CompareAndBranchRelocation16) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001172 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001173
1174 Label l1;
1175 __ cbz(R4, &l1);
1176 for (int i = 0 ; i < (1 << 7) + 2 ; i += 2) {
1177 __ mov(R3, ShifterOperand(i & 0xff));
1178 }
1179 __ Bind(&l1);
1180 __ mov(R1, ShifterOperand(R2));
1181
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001182 EmitAndCheck(&assembler, "CompareAndBranchRelocation16");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001183}
1184
1185TEST(Thumb2AssemblerTest, CompareAndBranchRelocation32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001186 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001187
1188 Label l1;
1189 __ cbz(R4, &l1);
1190 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1191 __ mov(R3, ShifterOperand(i & 0xff));
1192 }
1193 __ Bind(&l1);
1194 __ mov(R1, ShifterOperand(R2));
1195
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001196 EmitAndCheck(&assembler, "CompareAndBranchRelocation32");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001197}
1198
1199TEST(Thumb2AssemblerTest, MixedBranch32) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001200 arm::Thumb2Assembler assembler;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001201
1202 Label l1;
1203 Label l2;
1204 __ b(&l1); // Forwards.
1205 __ Bind(&l2);
1206
1207 // Space to force relocation.
1208 for (int i = 0 ; i < (1 << 11) + 2 ; i += 2) {
1209 __ mov(R3, ShifterOperand(i & 0xff));
1210 }
1211 __ b(&l2); // Backwards.
1212 __ Bind(&l1);
1213 __ mov(R1, ShifterOperand(R2));
1214
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001215 EmitAndCheck(&assembler, "MixedBranch32");
Dave Allison65fcc2c2014-04-28 13:45:27 -07001216}
1217
Dave Allison45fdb932014-06-25 12:37:10 -07001218TEST(Thumb2AssemblerTest, Shifts) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001219 arm::Thumb2Assembler assembler;
Dave Allison45fdb932014-06-25 12:37:10 -07001220
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001221 // 16 bit selected for CcDontCare.
Dave Allison45fdb932014-06-25 12:37:10 -07001222 __ Lsl(R0, R1, 5);
1223 __ Lsr(R0, R1, 5);
1224 __ Asr(R0, R1, 5);
1225
1226 __ Lsl(R0, R0, R1);
1227 __ Lsr(R0, R0, R1);
1228 __ Asr(R0, R0, R1);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001229 __ Ror(R0, R0, R1);
1230
1231 // 16 bit with kCcSet.
1232 __ Lsls(R0, R1, 5);
1233 __ Lsrs(R0, R1, 5);
1234 __ Asrs(R0, R1, 5);
1235
1236 __ Lsls(R0, R0, R1);
1237 __ Lsrs(R0, R0, R1);
1238 __ Asrs(R0, R0, R1);
1239 __ Rors(R0, R0, R1);
1240
1241 // 32-bit with kCcKeep.
1242 __ Lsl(R0, R1, 5, AL, kCcKeep);
1243 __ Lsr(R0, R1, 5, AL, kCcKeep);
1244 __ Asr(R0, R1, 5, AL, kCcKeep);
1245
1246 __ Lsl(R0, R0, R1, AL, kCcKeep);
1247 __ Lsr(R0, R0, R1, AL, kCcKeep);
1248 __ Asr(R0, R0, R1, AL, kCcKeep);
1249 __ Ror(R0, R0, R1, AL, kCcKeep);
1250
1251 // 32-bit because ROR immediate doesn't have a 16-bit version like the other shifts.
1252 __ Ror(R0, R1, 5);
1253 __ Rors(R0, R1, 5);
1254 __ Ror(R0, R1, 5, AL, kCcKeep);
Dave Allison45fdb932014-06-25 12:37:10 -07001255
1256 // 32 bit due to high registers.
1257 __ Lsl(R8, R1, 5);
1258 __ Lsr(R0, R8, 5);
1259 __ Asr(R8, R1, 5);
1260 __ Ror(R0, R8, 5);
1261
1262 // 32 bit due to different Rd and Rn.
1263 __ Lsl(R0, R1, R2);
1264 __ Lsr(R0, R1, R2);
1265 __ Asr(R0, R1, R2);
1266 __ Ror(R0, R1, R2);
1267
1268 // 32 bit due to use of high registers.
1269 __ Lsl(R8, R1, R2);
1270 __ Lsr(R0, R8, R2);
1271 __ Asr(R0, R1, R8);
1272
1273 // S bit (all 32 bit)
1274
1275 // 32 bit due to high registers.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001276 __ Lsls(R8, R1, 5);
1277 __ Lsrs(R0, R8, 5);
1278 __ Asrs(R8, R1, 5);
1279 __ Rors(R0, R8, 5);
Dave Allison45fdb932014-06-25 12:37:10 -07001280
1281 // 32 bit due to different Rd and Rn.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001282 __ Lsls(R0, R1, R2);
1283 __ Lsrs(R0, R1, R2);
1284 __ Asrs(R0, R1, R2);
1285 __ Rors(R0, R1, R2);
Dave Allison45fdb932014-06-25 12:37:10 -07001286
1287 // 32 bit due to use of high registers.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001288 __ Lsls(R8, R1, R2);
1289 __ Lsrs(R0, R8, R2);
1290 __ Asrs(R0, R1, R8);
Dave Allison45fdb932014-06-25 12:37:10 -07001291
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001292 EmitAndCheck(&assembler, "Shifts");
Dave Allison45fdb932014-06-25 12:37:10 -07001293}
1294
1295TEST(Thumb2AssemblerTest, LoadStoreRegOffset) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001296 arm::Thumb2Assembler assembler;
Dave Allison45fdb932014-06-25 12:37:10 -07001297
1298 // 16 bit.
1299 __ ldr(R0, Address(R1, R2));
1300 __ str(R0, Address(R1, R2));
1301
1302 // 32 bit due to shift.
1303 __ ldr(R0, Address(R1, R2, LSL, 1));
1304 __ str(R0, Address(R1, R2, LSL, 1));
1305
1306 __ ldr(R0, Address(R1, R2, LSL, 3));
1307 __ str(R0, Address(R1, R2, LSL, 3));
1308
1309 // 32 bit due to high register use.
1310 __ ldr(R8, Address(R1, R2));
1311 __ str(R8, Address(R1, R2));
1312
1313 __ ldr(R1, Address(R8, R2));
1314 __ str(R2, Address(R8, R2));
1315
1316 __ ldr(R0, Address(R1, R8));
1317 __ str(R0, Address(R1, R8));
1318
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001319 EmitAndCheck(&assembler, "LoadStoreRegOffset");
Dave Allison45fdb932014-06-25 12:37:10 -07001320}
1321
1322TEST(Thumb2AssemblerTest, LoadStoreLiteral) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001323 arm::Thumb2Assembler assembler;
Dave Allison45fdb932014-06-25 12:37:10 -07001324
1325 __ ldr(R0, Address(4));
1326 __ str(R0, Address(4));
1327
1328 __ ldr(R0, Address(-8));
1329 __ str(R0, Address(-8));
1330
1331 // Limits.
1332 __ ldr(R0, Address(0x3ff)); // 10 bits (16 bit).
1333 __ ldr(R0, Address(0x7ff)); // 11 bits (32 bit).
1334 __ str(R0, Address(0x3ff)); // 32 bit (no 16 bit str(literal)).
1335 __ str(R0, Address(0x7ff)); // 11 bits (32 bit).
1336
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001337 EmitAndCheck(&assembler, "LoadStoreLiteral");
Dave Allison45fdb932014-06-25 12:37:10 -07001338}
1339
Dave Allison0bb9ade2014-06-26 17:57:36 -07001340TEST(Thumb2AssemblerTest, LoadStoreLimits) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001341 arm::Thumb2Assembler assembler;
Dave Allison0bb9ade2014-06-26 17:57:36 -07001342
1343 __ ldr(R0, Address(R4, 124)); // 16 bit.
1344 __ ldr(R0, Address(R4, 128)); // 32 bit.
1345
1346 __ ldrb(R0, Address(R4, 31)); // 16 bit.
1347 __ ldrb(R0, Address(R4, 32)); // 32 bit.
1348
1349 __ ldrh(R0, Address(R4, 62)); // 16 bit.
1350 __ ldrh(R0, Address(R4, 64)); // 32 bit.
1351
1352 __ ldrsb(R0, Address(R4, 31)); // 32 bit.
1353 __ ldrsb(R0, Address(R4, 32)); // 32 bit.
1354
1355 __ ldrsh(R0, Address(R4, 62)); // 32 bit.
1356 __ ldrsh(R0, Address(R4, 64)); // 32 bit.
1357
1358 __ str(R0, Address(R4, 124)); // 16 bit.
1359 __ str(R0, Address(R4, 128)); // 32 bit.
1360
1361 __ strb(R0, Address(R4, 31)); // 16 bit.
1362 __ strb(R0, Address(R4, 32)); // 32 bit.
1363
1364 __ strh(R0, Address(R4, 62)); // 16 bit.
1365 __ strh(R0, Address(R4, 64)); // 32 bit.
1366
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001367 EmitAndCheck(&assembler, "LoadStoreLimits");
Dave Allison0bb9ade2014-06-26 17:57:36 -07001368}
1369
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001370TEST(Thumb2AssemblerTest, CompareAndBranch) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001371 arm::Thumb2Assembler assembler;
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001372
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001373 Label label;
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001374 __ CompareAndBranchIfZero(arm::R0, &label);
1375 __ CompareAndBranchIfZero(arm::R11, &label);
1376 __ CompareAndBranchIfNonZero(arm::R0, &label);
1377 __ CompareAndBranchIfNonZero(arm::R11, &label);
1378 __ Bind(&label);
1379
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001380 EmitAndCheck(&assembler, "CompareAndBranch");
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001381}
1382
Vladimir Markof5c09c32015-12-17 12:08:08 +00001383TEST(Thumb2AssemblerTest, AddConstant) {
1384 arm::Thumb2Assembler assembler;
1385
1386 // Low registers, Rd != Rn.
1387 __ AddConstant(R0, R1, 0); // MOV.
1388 __ AddConstant(R0, R1, 1); // 16-bit ADDS, encoding T1.
1389 __ AddConstant(R0, R1, 7); // 16-bit ADDS, encoding T1.
1390 __ AddConstant(R0, R1, 8); // 32-bit ADD, encoding T3.
1391 __ AddConstant(R0, R1, 255); // 32-bit ADD, encoding T3.
1392 __ AddConstant(R0, R1, 256); // 32-bit ADD, encoding T3.
1393 __ AddConstant(R0, R1, 257); // 32-bit ADD, encoding T4.
1394 __ AddConstant(R0, R1, 0xfff); // 32-bit ADD, encoding T4.
1395 __ AddConstant(R0, R1, 0x1000); // 32-bit ADD, encoding T3.
1396 __ AddConstant(R0, R1, 0x1001); // MVN+SUB.
1397 __ AddConstant(R0, R1, 0x1002); // MOVW+ADD.
1398 __ AddConstant(R0, R1, 0xffff); // MOVW+ADD.
1399 __ AddConstant(R0, R1, 0x10000); // 32-bit ADD, encoding T3.
1400 __ AddConstant(R0, R1, 0x10001); // 32-bit ADD, encoding T3.
1401 __ AddConstant(R0, R1, 0x10002); // MVN+SUB.
1402 __ AddConstant(R0, R1, 0x10003); // MOVW+MOVT+ADD.
1403 __ AddConstant(R0, R1, -1); // 16-bit SUBS.
1404 __ AddConstant(R0, R1, -7); // 16-bit SUBS.
1405 __ AddConstant(R0, R1, -8); // 32-bit SUB, encoding T3.
1406 __ AddConstant(R0, R1, -255); // 32-bit SUB, encoding T3.
1407 __ AddConstant(R0, R1, -256); // 32-bit SUB, encoding T3.
1408 __ AddConstant(R0, R1, -257); // 32-bit SUB, encoding T4.
1409 __ AddConstant(R0, R1, -0xfff); // 32-bit SUB, encoding T4.
1410 __ AddConstant(R0, R1, -0x1000); // 32-bit SUB, encoding T3.
1411 __ AddConstant(R0, R1, -0x1001); // MVN+ADD.
1412 __ AddConstant(R0, R1, -0x1002); // MOVW+SUB.
1413 __ AddConstant(R0, R1, -0xffff); // MOVW+SUB.
1414 __ AddConstant(R0, R1, -0x10000); // 32-bit SUB, encoding T3.
1415 __ AddConstant(R0, R1, -0x10001); // 32-bit SUB, encoding T3.
1416 __ AddConstant(R0, R1, -0x10002); // MVN+ADD.
1417 __ AddConstant(R0, R1, -0x10003); // MOVW+MOVT+ADD.
1418
1419 // Low registers, Rd == Rn.
1420 __ AddConstant(R0, R0, 0); // Nothing.
1421 __ AddConstant(R1, R1, 1); // 16-bit ADDS, encoding T2,
1422 __ AddConstant(R0, R0, 7); // 16-bit ADDS, encoding T2.
1423 __ AddConstant(R1, R1, 8); // 16-bit ADDS, encoding T2.
1424 __ AddConstant(R0, R0, 255); // 16-bit ADDS, encoding T2.
1425 __ AddConstant(R1, R1, 256); // 32-bit ADD, encoding T3.
1426 __ AddConstant(R0, R0, 257); // 32-bit ADD, encoding T4.
1427 __ AddConstant(R1, R1, 0xfff); // 32-bit ADD, encoding T4.
1428 __ AddConstant(R0, R0, 0x1000); // 32-bit ADD, encoding T3.
1429 __ AddConstant(R1, R1, 0x1001); // MVN+SUB.
1430 __ AddConstant(R0, R0, 0x1002); // MOVW+ADD.
1431 __ AddConstant(R1, R1, 0xffff); // MOVW+ADD.
1432 __ AddConstant(R0, R0, 0x10000); // 32-bit ADD, encoding T3.
1433 __ AddConstant(R1, R1, 0x10001); // 32-bit ADD, encoding T3.
1434 __ AddConstant(R0, R0, 0x10002); // MVN+SUB.
1435 __ AddConstant(R1, R1, 0x10003); // MOVW+MOVT+ADD.
1436 __ AddConstant(R0, R0, -1); // 16-bit SUBS, encoding T2.
1437 __ AddConstant(R1, R1, -7); // 16-bit SUBS, encoding T2.
1438 __ AddConstant(R0, R0, -8); // 16-bit SUBS, encoding T2.
1439 __ AddConstant(R1, R1, -255); // 16-bit SUBS, encoding T2.
1440 __ AddConstant(R0, R0, -256); // 32-bit SUB, encoding T3.
1441 __ AddConstant(R1, R1, -257); // 32-bit SUB, encoding T4.
1442 __ AddConstant(R0, R0, -0xfff); // 32-bit SUB, encoding T4.
1443 __ AddConstant(R1, R1, -0x1000); // 32-bit SUB, encoding T3.
1444 __ AddConstant(R0, R0, -0x1001); // MVN+ADD.
1445 __ AddConstant(R1, R1, -0x1002); // MOVW+SUB.
1446 __ AddConstant(R0, R0, -0xffff); // MOVW+SUB.
1447 __ AddConstant(R1, R1, -0x10000); // 32-bit SUB, encoding T3.
1448 __ AddConstant(R0, R0, -0x10001); // 32-bit SUB, encoding T3.
1449 __ AddConstant(R1, R1, -0x10002); // MVN+ADD.
1450 __ AddConstant(R0, R0, -0x10003); // MOVW+MOVT+ADD.
1451
1452 // High registers.
1453 __ AddConstant(R8, R8, 0); // Nothing.
1454 __ AddConstant(R8, R1, 1); // 32-bit ADD, encoding T3,
1455 __ AddConstant(R0, R8, 7); // 32-bit ADD, encoding T3.
1456 __ AddConstant(R8, R8, 8); // 32-bit ADD, encoding T3.
1457 __ AddConstant(R8, R1, 255); // 32-bit ADD, encoding T3.
1458 __ AddConstant(R0, R8, 256); // 32-bit ADD, encoding T3.
1459 __ AddConstant(R8, R8, 257); // 32-bit ADD, encoding T4.
1460 __ AddConstant(R8, R1, 0xfff); // 32-bit ADD, encoding T4.
1461 __ AddConstant(R0, R8, 0x1000); // 32-bit ADD, encoding T3.
1462 __ AddConstant(R8, R8, 0x1001); // MVN+SUB.
1463 __ AddConstant(R0, R1, 0x1002); // MOVW+ADD.
1464 __ AddConstant(R0, R8, 0xffff); // MOVW+ADD.
1465 __ AddConstant(R8, R8, 0x10000); // 32-bit ADD, encoding T3.
1466 __ AddConstant(R8, R1, 0x10001); // 32-bit ADD, encoding T3.
1467 __ AddConstant(R0, R8, 0x10002); // MVN+SUB.
1468 __ AddConstant(R0, R8, 0x10003); // MOVW+MOVT+ADD.
1469 __ AddConstant(R8, R8, -1); // 32-bit ADD, encoding T3.
1470 __ AddConstant(R8, R1, -7); // 32-bit SUB, encoding T3.
1471 __ AddConstant(R0, R8, -8); // 32-bit SUB, encoding T3.
1472 __ AddConstant(R8, R8, -255); // 32-bit SUB, encoding T3.
1473 __ AddConstant(R8, R1, -256); // 32-bit SUB, encoding T3.
1474 __ AddConstant(R0, R8, -257); // 32-bit SUB, encoding T4.
1475 __ AddConstant(R8, R8, -0xfff); // 32-bit SUB, encoding T4.
1476 __ AddConstant(R8, R1, -0x1000); // 32-bit SUB, encoding T3.
1477 __ AddConstant(R0, R8, -0x1001); // MVN+ADD.
1478 __ AddConstant(R0, R1, -0x1002); // MOVW+SUB.
1479 __ AddConstant(R8, R1, -0xffff); // MOVW+SUB.
1480 __ AddConstant(R0, R8, -0x10000); // 32-bit SUB, encoding T3.
1481 __ AddConstant(R8, R8, -0x10001); // 32-bit SUB, encoding T3.
1482 __ AddConstant(R8, R1, -0x10002); // MVN+SUB.
1483 __ AddConstant(R0, R8, -0x10003); // MOVW+MOVT+ADD.
1484
1485 // Low registers, Rd != Rn, kCcKeep.
1486 __ AddConstant(R0, R1, 0, AL, kCcKeep); // MOV.
1487 __ AddConstant(R0, R1, 1, AL, kCcKeep); // 32-bit ADD, encoding T3.
1488 __ AddConstant(R0, R1, 7, AL, kCcKeep); // 32-bit ADD, encoding T3.
1489 __ AddConstant(R0, R1, 8, AL, kCcKeep); // 32-bit ADD, encoding T3.
1490 __ AddConstant(R0, R1, 255, AL, kCcKeep); // 32-bit ADD, encoding T3.
1491 __ AddConstant(R0, R1, 256, AL, kCcKeep); // 32-bit ADD, encoding T3.
1492 __ AddConstant(R0, R1, 257, AL, kCcKeep); // 32-bit ADD, encoding T4.
1493 __ AddConstant(R0, R1, 0xfff, AL, kCcKeep); // 32-bit ADD, encoding T4.
1494 __ AddConstant(R0, R1, 0x1000, AL, kCcKeep); // 32-bit ADD, encoding T3.
1495 __ AddConstant(R0, R1, 0x1001, AL, kCcKeep); // MVN+SUB.
1496 __ AddConstant(R0, R1, 0x1002, AL, kCcKeep); // MOVW+ADD.
1497 __ AddConstant(R0, R1, 0xffff, AL, kCcKeep); // MOVW+ADD.
1498 __ AddConstant(R0, R1, 0x10000, AL, kCcKeep); // 32-bit ADD, encoding T3.
1499 __ AddConstant(R0, R1, 0x10001, AL, kCcKeep); // 32-bit ADD, encoding T3.
1500 __ AddConstant(R0, R1, 0x10002, AL, kCcKeep); // MVN+SUB.
1501 __ AddConstant(R0, R1, 0x10003, AL, kCcKeep); // MOVW+MOVT+ADD.
1502 __ AddConstant(R0, R1, -1, AL, kCcKeep); // 32-bit ADD, encoding T3.
1503 __ AddConstant(R0, R1, -7, AL, kCcKeep); // 32-bit SUB, encoding T3.
1504 __ AddConstant(R0, R1, -8, AL, kCcKeep); // 32-bit SUB, encoding T3.
1505 __ AddConstant(R0, R1, -255, AL, kCcKeep); // 32-bit SUB, encoding T3.
1506 __ AddConstant(R0, R1, -256, AL, kCcKeep); // 32-bit SUB, encoding T3.
1507 __ AddConstant(R0, R1, -257, AL, kCcKeep); // 32-bit SUB, encoding T4.
1508 __ AddConstant(R0, R1, -0xfff, AL, kCcKeep); // 32-bit SUB, encoding T4.
1509 __ AddConstant(R0, R1, -0x1000, AL, kCcKeep); // 32-bit SUB, encoding T3.
1510 __ AddConstant(R0, R1, -0x1001, AL, kCcKeep); // MVN+ADD.
1511 __ AddConstant(R0, R1, -0x1002, AL, kCcKeep); // MOVW+SUB.
1512 __ AddConstant(R0, R1, -0xffff, AL, kCcKeep); // MOVW+SUB.
1513 __ AddConstant(R0, R1, -0x10000, AL, kCcKeep); // 32-bit SUB, encoding T3.
1514 __ AddConstant(R0, R1, -0x10001, AL, kCcKeep); // 32-bit SUB, encoding T3.
1515 __ AddConstant(R0, R1, -0x10002, AL, kCcKeep); // MVN+ADD.
1516 __ AddConstant(R0, R1, -0x10003, AL, kCcKeep); // MOVW+MOVT+ADD.
1517
1518 // Low registers, Rd == Rn, kCcKeep.
1519 __ AddConstant(R0, R0, 0, AL, kCcKeep); // Nothing.
1520 __ AddConstant(R1, R1, 1, AL, kCcKeep); // 32-bit ADD, encoding T3.
1521 __ AddConstant(R0, R0, 7, AL, kCcKeep); // 32-bit ADD, encoding T3.
1522 __ AddConstant(R1, R1, 8, AL, kCcKeep); // 32-bit ADD, encoding T3.
1523 __ AddConstant(R0, R0, 255, AL, kCcKeep); // 32-bit ADD, encoding T3.
1524 __ AddConstant(R1, R1, 256, AL, kCcKeep); // 32-bit ADD, encoding T3.
1525 __ AddConstant(R0, R0, 257, AL, kCcKeep); // 32-bit ADD, encoding T4.
1526 __ AddConstant(R1, R1, 0xfff, AL, kCcKeep); // 32-bit ADD, encoding T4.
1527 __ AddConstant(R0, R0, 0x1000, AL, kCcKeep); // 32-bit ADD, encoding T3.
1528 __ AddConstant(R1, R1, 0x1001, AL, kCcKeep); // MVN+SUB.
1529 __ AddConstant(R0, R0, 0x1002, AL, kCcKeep); // MOVW+ADD.
1530 __ AddConstant(R1, R1, 0xffff, AL, kCcKeep); // MOVW+ADD.
1531 __ AddConstant(R0, R0, 0x10000, AL, kCcKeep); // 32-bit ADD, encoding T3.
1532 __ AddConstant(R1, R1, 0x10001, AL, kCcKeep); // 32-bit ADD, encoding T3.
1533 __ AddConstant(R0, R0, 0x10002, AL, kCcKeep); // MVN+SUB.
1534 __ AddConstant(R1, R1, 0x10003, AL, kCcKeep); // MOVW+MOVT+ADD.
1535 __ AddConstant(R0, R0, -1, AL, kCcKeep); // 32-bit ADD, encoding T3.
1536 __ AddConstant(R1, R1, -7, AL, kCcKeep); // 32-bit SUB, encoding T3.
1537 __ AddConstant(R0, R0, -8, AL, kCcKeep); // 32-bit SUB, encoding T3.
1538 __ AddConstant(R1, R1, -255, AL, kCcKeep); // 32-bit SUB, encoding T3.
1539 __ AddConstant(R0, R0, -256, AL, kCcKeep); // 32-bit SUB, encoding T3.
1540 __ AddConstant(R1, R1, -257, AL, kCcKeep); // 32-bit SUB, encoding T4.
1541 __ AddConstant(R0, R0, -0xfff, AL, kCcKeep); // 32-bit SUB, encoding T4.
1542 __ AddConstant(R1, R1, -0x1000, AL, kCcKeep); // 32-bit SUB, encoding T3.
1543 __ AddConstant(R0, R0, -0x1001, AL, kCcKeep); // MVN+ADD.
1544 __ AddConstant(R1, R1, -0x1002, AL, kCcKeep); // MOVW+SUB.
1545 __ AddConstant(R0, R0, -0xffff, AL, kCcKeep); // MOVW+SUB.
1546 __ AddConstant(R1, R1, -0x10000, AL, kCcKeep); // 32-bit SUB, encoding T3.
1547 __ AddConstant(R0, R0, -0x10001, AL, kCcKeep); // 32-bit SUB, encoding T3.
1548 __ AddConstant(R1, R1, -0x10002, AL, kCcKeep); // MVN+ADD.
1549 __ AddConstant(R0, R0, -0x10003, AL, kCcKeep); // MOVW+MOVT+ADD.
1550
1551 // Low registers, Rd != Rn, kCcSet.
1552 __ AddConstant(R0, R1, 0, AL, kCcSet); // 16-bit ADDS.
1553 __ AddConstant(R0, R1, 1, AL, kCcSet); // 16-bit ADDS.
1554 __ AddConstant(R0, R1, 7, AL, kCcSet); // 16-bit ADDS.
1555 __ AddConstant(R0, R1, 8, AL, kCcSet); // 32-bit ADDS, encoding T3.
1556 __ AddConstant(R0, R1, 255, AL, kCcSet); // 32-bit ADDS, encoding T3.
1557 __ AddConstant(R0, R1, 256, AL, kCcSet); // 32-bit ADDS, encoding T3.
1558 __ AddConstant(R0, R1, 257, AL, kCcSet); // MVN+SUBS.
1559 __ AddConstant(R0, R1, 0xfff, AL, kCcSet); // MOVW+ADDS.
1560 __ AddConstant(R0, R1, 0x1000, AL, kCcSet); // 32-bit ADDS, encoding T3.
1561 __ AddConstant(R0, R1, 0x1001, AL, kCcSet); // MVN+SUBS.
1562 __ AddConstant(R0, R1, 0x1002, AL, kCcSet); // MOVW+ADDS.
1563 __ AddConstant(R0, R1, 0xffff, AL, kCcSet); // MOVW+ADDS.
1564 __ AddConstant(R0, R1, 0x10000, AL, kCcSet); // 32-bit ADDS, encoding T3.
1565 __ AddConstant(R0, R1, 0x10001, AL, kCcSet); // 32-bit ADDS, encoding T3.
1566 __ AddConstant(R0, R1, 0x10002, AL, kCcSet); // MVN+SUBS.
1567 __ AddConstant(R0, R1, 0x10003, AL, kCcSet); // MOVW+MOVT+ADDS.
1568 __ AddConstant(R0, R1, -1, AL, kCcSet); // 16-bit SUBS.
1569 __ AddConstant(R0, R1, -7, AL, kCcSet); // 16-bit SUBS.
1570 __ AddConstant(R0, R1, -8, AL, kCcSet); // 32-bit SUBS, encoding T3.
1571 __ AddConstant(R0, R1, -255, AL, kCcSet); // 32-bit SUBS, encoding T3.
1572 __ AddConstant(R0, R1, -256, AL, kCcSet); // 32-bit SUBS, encoding T3.
1573 __ AddConstant(R0, R1, -257, AL, kCcSet); // MVN+ADDS.
1574 __ AddConstant(R0, R1, -0xfff, AL, kCcSet); // MOVW+SUBS.
1575 __ AddConstant(R0, R1, -0x1000, AL, kCcSet); // 32-bit SUBS, encoding T3.
1576 __ AddConstant(R0, R1, -0x1001, AL, kCcSet); // MVN+ADDS.
1577 __ AddConstant(R0, R1, -0x1002, AL, kCcSet); // MOVW+SUBS.
1578 __ AddConstant(R0, R1, -0xffff, AL, kCcSet); // MOVW+SUBS.
1579 __ AddConstant(R0, R1, -0x10000, AL, kCcSet); // 32-bit SUBS, encoding T3.
1580 __ AddConstant(R0, R1, -0x10001, AL, kCcSet); // 32-bit SUBS, encoding T3.
1581 __ AddConstant(R0, R1, -0x10002, AL, kCcSet); // MVN+ADDS.
1582 __ AddConstant(R0, R1, -0x10003, AL, kCcSet); // MOVW+MOVT+ADDS.
1583
1584 // Low registers, Rd == Rn, kCcSet.
1585 __ AddConstant(R0, R0, 0, AL, kCcSet); // 16-bit ADDS, encoding T2.
1586 __ AddConstant(R1, R1, 1, AL, kCcSet); // 16-bit ADDS, encoding T2.
1587 __ AddConstant(R0, R0, 7, AL, kCcSet); // 16-bit ADDS, encoding T2.
1588 __ AddConstant(R1, R1, 8, AL, kCcSet); // 16-bit ADDS, encoding T2.
1589 __ AddConstant(R0, R0, 255, AL, kCcSet); // 16-bit ADDS, encoding T2.
1590 __ AddConstant(R1, R1, 256, AL, kCcSet); // 32-bit ADDS, encoding T3.
1591 __ AddConstant(R0, R0, 257, AL, kCcSet); // MVN+SUBS.
1592 __ AddConstant(R1, R1, 0xfff, AL, kCcSet); // MOVW+ADDS.
1593 __ AddConstant(R0, R0, 0x1000, AL, kCcSet); // 32-bit ADDS, encoding T3.
1594 __ AddConstant(R1, R1, 0x1001, AL, kCcSet); // MVN+SUBS.
1595 __ AddConstant(R0, R0, 0x1002, AL, kCcSet); // MOVW+ADDS.
1596 __ AddConstant(R1, R1, 0xffff, AL, kCcSet); // MOVW+ADDS.
1597 __ AddConstant(R0, R0, 0x10000, AL, kCcSet); // 32-bit ADDS, encoding T3.
1598 __ AddConstant(R1, R1, 0x10001, AL, kCcSet); // 32-bit ADDS, encoding T3.
1599 __ AddConstant(R0, R0, 0x10002, AL, kCcSet); // MVN+SUBS.
1600 __ AddConstant(R1, R1, 0x10003, AL, kCcSet); // MOVW+MOVT+ADDS.
1601 __ AddConstant(R0, R0, -1, AL, kCcSet); // 16-bit SUBS, encoding T2.
1602 __ AddConstant(R1, R1, -7, AL, kCcSet); // 16-bit SUBS, encoding T2.
1603 __ AddConstant(R0, R0, -8, AL, kCcSet); // 16-bit SUBS, encoding T2.
1604 __ AddConstant(R1, R1, -255, AL, kCcSet); // 16-bit SUBS, encoding T2.
1605 __ AddConstant(R0, R0, -256, AL, kCcSet); // 32-bit SUB, encoding T3.
1606 __ AddConstant(R1, R1, -257, AL, kCcSet); // MNV+ADDS.
1607 __ AddConstant(R0, R0, -0xfff, AL, kCcSet); // MOVW+SUBS.
1608 __ AddConstant(R1, R1, -0x1000, AL, kCcSet); // 32-bit SUB, encoding T3.
1609 __ AddConstant(R0, R0, -0x1001, AL, kCcSet); // MVN+ADDS.
1610 __ AddConstant(R1, R1, -0x1002, AL, kCcSet); // MOVW+SUBS.
1611 __ AddConstant(R0, R0, -0xffff, AL, kCcSet); // MOVW+SUBS.
1612 __ AddConstant(R1, R1, -0x10000, AL, kCcSet); // 32-bit SUBS, encoding T3.
1613 __ AddConstant(R0, R0, -0x10001, AL, kCcSet); // 32-bit SUBS, encoding T3.
1614 __ AddConstant(R1, R1, -0x10002, AL, kCcSet); // MVN+ADDS.
1615 __ AddConstant(R0, R0, -0x10003, AL, kCcSet); // MOVW+MOVT+ADDS.
1616
1617 __ it(EQ);
1618 __ AddConstant(R0, R1, 1, EQ, kCcSet); // 32-bit ADDS, encoding T3.
1619 __ it(NE);
1620 __ AddConstant(R0, R1, 1, NE, kCcKeep); // 16-bit ADDS, encoding T1.
1621 __ it(GE);
1622 __ AddConstant(R0, R0, 1, GE, kCcSet); // 32-bit ADDS, encoding T3.
1623 __ it(LE);
1624 __ AddConstant(R0, R0, 1, LE, kCcKeep); // 16-bit ADDS, encoding T2.
1625
1626 EmitAndCheck(&assembler, "AddConstant");
1627}
1628
Vladimir Markoac6ac102015-12-17 12:14:00 +00001629TEST(Thumb2AssemblerTest, CmpConstant) {
1630 arm::Thumb2Assembler assembler;
1631
1632 __ CmpConstant(R0, 0); // 16-bit CMP.
1633 __ CmpConstant(R1, 1); // 16-bit CMP.
1634 __ CmpConstant(R0, 7); // 16-bit CMP.
1635 __ CmpConstant(R1, 8); // 16-bit CMP.
1636 __ CmpConstant(R0, 255); // 16-bit CMP.
1637 __ CmpConstant(R1, 256); // 32-bit CMP.
1638 __ CmpConstant(R0, 257); // MNV+CMN.
1639 __ CmpConstant(R1, 0xfff); // MOVW+CMP.
1640 __ CmpConstant(R0, 0x1000); // 32-bit CMP.
1641 __ CmpConstant(R1, 0x1001); // MNV+CMN.
1642 __ CmpConstant(R0, 0x1002); // MOVW+CMP.
1643 __ CmpConstant(R1, 0xffff); // MOVW+CMP.
1644 __ CmpConstant(R0, 0x10000); // 32-bit CMP.
1645 __ CmpConstant(R1, 0x10001); // 32-bit CMP.
1646 __ CmpConstant(R0, 0x10002); // MVN+CMN.
1647 __ CmpConstant(R1, 0x10003); // MOVW+MOVT+CMP.
1648 __ CmpConstant(R0, -1); // 32-bit CMP.
1649 __ CmpConstant(R1, -7); // CMN.
1650 __ CmpConstant(R0, -8); // CMN.
1651 __ CmpConstant(R1, -255); // CMN.
1652 __ CmpConstant(R0, -256); // CMN.
1653 __ CmpConstant(R1, -257); // MNV+CMP.
1654 __ CmpConstant(R0, -0xfff); // MOVW+CMN.
1655 __ CmpConstant(R1, -0x1000); // CMN.
1656 __ CmpConstant(R0, -0x1001); // MNV+CMP.
1657 __ CmpConstant(R1, -0x1002); // MOVW+CMN.
1658 __ CmpConstant(R0, -0xffff); // MOVW+CMN.
1659 __ CmpConstant(R1, -0x10000); // CMN.
1660 __ CmpConstant(R0, -0x10001); // CMN.
1661 __ CmpConstant(R1, -0x10002); // MVN+CMP.
1662 __ CmpConstant(R0, -0x10003); // MOVW+MOVT+CMP.
1663
1664 __ CmpConstant(R8, 0); // 32-bit CMP.
1665 __ CmpConstant(R9, 1); // 32-bit CMP.
1666 __ CmpConstant(R8, 7); // 32-bit CMP.
1667 __ CmpConstant(R9, 8); // 32-bit CMP.
1668 __ CmpConstant(R8, 255); // 32-bit CMP.
1669 __ CmpConstant(R9, 256); // 32-bit CMP.
1670 __ CmpConstant(R8, 257); // MNV+CMN
1671 __ CmpConstant(R9, 0xfff); // MOVW+CMP.
1672 __ CmpConstant(R8, 0x1000); // 32-bit CMP.
1673 __ CmpConstant(R9, 0x1001); // MVN+CMN.
1674 __ CmpConstant(R8, 0x1002); // MOVW+CMP.
1675 __ CmpConstant(R9, 0xffff); // MOVW+CMP.
1676 __ CmpConstant(R8, 0x10000); // 32-bit CMP.
1677 __ CmpConstant(R9, 0x10001); // 32-bit CMP.
1678 __ CmpConstant(R8, 0x10002); // MVN+CMN.
1679 __ CmpConstant(R9, 0x10003); // MOVW+MOVT+CMP.
1680 __ CmpConstant(R8, -1); // 32-bit CMP
1681 __ CmpConstant(R9, -7); // CMN.
1682 __ CmpConstant(R8, -8); // CMN.
1683 __ CmpConstant(R9, -255); // CMN.
1684 __ CmpConstant(R8, -256); // CMN.
1685 __ CmpConstant(R9, -257); // MNV+CMP.
1686 __ CmpConstant(R8, -0xfff); // MOVW+CMN.
1687 __ CmpConstant(R9, -0x1000); // CMN.
1688 __ CmpConstant(R8, -0x1001); // MVN+CMP.
1689 __ CmpConstant(R9, -0x1002); // MOVW+CMN.
1690 __ CmpConstant(R8, -0xffff); // MOVW+CMN.
1691 __ CmpConstant(R9, -0x10000); // CMN.
1692 __ CmpConstant(R8, -0x10001); // CMN.
1693 __ CmpConstant(R9, -0x10002); // MVN+CMP.
1694 __ CmpConstant(R8, -0x10003); // MOVW+MOVT+CMP.
1695
1696 EmitAndCheck(&assembler, "CmpConstant");
1697}
1698
Dave Allison65fcc2c2014-04-28 13:45:27 -07001699#undef __
1700} // namespace arm
1701} // namespace art