blob: 2dde0149a618b8b372784079f44fa125cecc1703 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_thumb2.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Vladimir Markocf93a5c2015-06-16 11:33:24 +000028void Thumb2Assembler::BindLabel(Label* label, uint32_t bound_pc) {
29 CHECK(!label->IsBound());
30
31 while (label->IsLinked()) {
32 FixupId fixup_id = label->Position(); // The id for linked Fixup.
33 Fixup* fixup = GetFixup(fixup_id); // Get the Fixup at this id.
34 fixup->Resolve(bound_pc); // Fixup can be resolved now.
35 // Add this fixup as a dependency of all later fixups.
36 for (FixupId id = fixup_id + 1u, end = fixups_.size(); id != end; ++id) {
37 GetFixup(id)->AddDependent(fixup_id);
38 }
39 uint32_t fixup_location = fixup->GetLocation();
40 uint16_t next = buffer_.Load<uint16_t>(fixup_location); // Get next in chain.
41 buffer_.Store<int16_t>(fixup_location, 0);
42 label->position_ = next; // Move to next.
43 }
44 label->BindTo(bound_pc);
45}
46
47void Thumb2Assembler::BindLiterals() {
48 // We don't add the padding here, that's done only after adjusting the Fixup sizes.
49 uint32_t code_size = buffer_.Size();
50 for (Literal& lit : literals_) {
51 Label* label = lit.GetLabel();
52 BindLabel(label, code_size);
53 code_size += lit.GetSize();
54 }
55}
56
57void Thumb2Assembler::AdjustFixupIfNeeded(Fixup* fixup, uint32_t* current_code_size,
58 std::deque<FixupId>* fixups_to_recalculate) {
59 uint32_t adjustment = fixup->AdjustSizeIfNeeded(*current_code_size);
60 if (adjustment != 0u) {
61 *current_code_size += adjustment;
62 for (FixupId dependent_id : fixup->Dependents()) {
63 Fixup* dependent = GetFixup(dependent_id);
64 dependent->IncreaseAdjustment(adjustment);
65 if (buffer_.Load<int16_t>(dependent->GetLocation()) == 0) {
66 buffer_.Store<int16_t>(dependent->GetLocation(), 1);
67 fixups_to_recalculate->push_back(dependent_id);
68 }
69 }
70 }
71}
72
73uint32_t Thumb2Assembler::AdjustFixups() {
74 uint32_t current_code_size = buffer_.Size();
75 std::deque<FixupId> fixups_to_recalculate;
76 if (kIsDebugBuild) {
77 // We will use the placeholders in the buffer_ to mark whether the fixup has
78 // been added to the fixups_to_recalculate. Make sure we start with zeros.
79 for (Fixup& fixup : fixups_) {
80 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
81 }
82 }
83 for (Fixup& fixup : fixups_) {
84 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
85 }
86 while (!fixups_to_recalculate.empty()) {
87 // Pop the fixup.
88 FixupId fixup_id = fixups_to_recalculate.front();
89 fixups_to_recalculate.pop_front();
90 Fixup* fixup = GetFixup(fixup_id);
91 DCHECK_NE(buffer_.Load<int16_t>(fixup->GetLocation()), 0);
92 buffer_.Store<int16_t>(fixup->GetLocation(), 0);
93 // See if it needs adjustment.
94 AdjustFixupIfNeeded(fixup, &current_code_size, &fixups_to_recalculate);
95 }
96 if (kIsDebugBuild) {
97 // Check that no fixup is marked as being in fixups_to_recalculate anymore.
98 for (Fixup& fixup : fixups_) {
99 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
100 }
101 }
102
103 // Adjust literal pool labels for padding.
104 DCHECK_EQ(current_code_size & 1u, 0u);
105 uint32_t literals_adjustment = current_code_size + (current_code_size & 2) - buffer_.Size();
106 if (literals_adjustment != 0u) {
107 for (Literal& literal : literals_) {
108 Label* label = literal.GetLabel();
109 DCHECK(label->IsBound());
110 int old_position = label->Position();
111 label->Reinitialize();
112 label->BindTo(old_position + literals_adjustment);
113 }
114 }
115
116 return current_code_size;
117}
118
119void Thumb2Assembler::EmitFixups(uint32_t adjusted_code_size) {
120 // Move non-fixup code to its final place and emit fixups.
121 // Process fixups in reverse order so that we don't repeatedly move the same data.
122 size_t src_end = buffer_.Size();
123 size_t dest_end = adjusted_code_size;
124 buffer_.Resize(dest_end);
125 DCHECK_GE(dest_end, src_end);
126 for (auto i = fixups_.rbegin(), end = fixups_.rend(); i != end; ++i) {
127 Fixup* fixup = &*i;
128 if (fixup->GetOriginalSize() == fixup->GetSize()) {
129 // The size of this Fixup didn't change. To avoid moving the data
130 // in small chunks, emit the code to its original position.
131 fixup->Emit(&buffer_, adjusted_code_size);
132 fixup->Finalize(dest_end - src_end);
133 } else {
134 // Move the data between the end of the fixup and src_end to its final location.
135 size_t old_fixup_location = fixup->GetLocation();
136 size_t src_begin = old_fixup_location + fixup->GetOriginalSizeInBytes();
137 size_t data_size = src_end - src_begin;
138 size_t dest_begin = dest_end - data_size;
139 buffer_.Move(dest_begin, src_begin, data_size);
140 src_end = old_fixup_location;
141 dest_end = dest_begin - fixup->GetSizeInBytes();
142 // Finalize the Fixup and emit the data to the new location.
143 fixup->Finalize(dest_end - src_end);
144 fixup->Emit(&buffer_, adjusted_code_size);
145 }
146 }
147 CHECK_EQ(src_end, dest_end);
148}
149
150void Thumb2Assembler::EmitLiterals() {
151 if (!literals_.empty()) {
152 // Load literal instructions (LDR, LDRD, VLDR) require 4-byte alignment.
153 // We don't support byte and half-word literals.
154 uint32_t code_size = buffer_.Size();
155 DCHECK_EQ(code_size & 1u, 0u);
156 if ((code_size & 2u) != 0u) {
157 Emit16(0);
158 }
159 for (Literal& literal : literals_) {
160 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
161 DCHECK_EQ(static_cast<size_t>(literal.GetLabel()->Position()), buffer_.Size());
162 DCHECK(literal.GetSize() == 4u || literal.GetSize() == 8u);
163 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
164 buffer_.Emit<uint8_t>(literal.GetData()[i]);
165 }
166 }
167 }
168}
169
170inline int16_t Thumb2Assembler::BEncoding16(int32_t offset, Condition cond) {
171 DCHECK_EQ(offset & 1, 0);
172 int16_t encoding = B15 | B14;
173 if (cond != AL) {
174 DCHECK(IsInt<9>(offset));
175 encoding |= B12 | (static_cast<int32_t>(cond) << 8) | ((offset >> 1) & 0xff);
176 } else {
177 DCHECK(IsInt<12>(offset));
178 encoding |= B13 | ((offset >> 1) & 0x7ff);
179 }
180 return encoding;
181}
182
183inline int32_t Thumb2Assembler::BEncoding32(int32_t offset, Condition cond) {
184 DCHECK_EQ(offset & 1, 0);
185 int32_t s = (offset >> 31) & 1; // Sign bit.
186 int32_t encoding = B31 | B30 | B29 | B28 | B15 |
187 (s << 26) | // Sign bit goes to bit 26.
188 ((offset >> 1) & 0x7ff); // imm11 goes to bits 0-10.
189 if (cond != AL) {
190 DCHECK(IsInt<21>(offset));
191 // Encode cond, move imm6 from bits 12-17 to bits 16-21 and move J1 and J2.
192 encoding |= (static_cast<int32_t>(cond) << 22) | ((offset & 0x3f000) << (16 - 12)) |
193 ((offset & (1 << 19)) >> (19 - 13)) | // Extract J1 from bit 19 to bit 13.
194 ((offset & (1 << 18)) >> (18 - 11)); // Extract J2 from bit 18 to bit 11.
195 } else {
196 DCHECK(IsInt<25>(offset));
197 int32_t j1 = ((offset >> 23) ^ s ^ 1) & 1; // Calculate J1 from I1 extracted from bit 23.
198 int32_t j2 = ((offset >> 22)^ s ^ 1) & 1; // Calculate J2 from I2 extracted from bit 22.
199 // Move imm10 from bits 12-21 to bits 16-25 and add J1 and J2.
200 encoding |= B12 | ((offset & 0x3ff000) << (16 - 12)) |
201 (j1 << 13) | (j2 << 11);
202 }
203 return encoding;
204}
205
206inline int16_t Thumb2Assembler::CbxzEncoding16(Register rn, int32_t offset, Condition cond) {
207 DCHECK(!IsHighRegister(rn));
208 DCHECK_EQ(offset & 1, 0);
209 DCHECK(IsUint<7>(offset));
210 DCHECK(cond == EQ || cond == NE);
211 return B15 | B13 | B12 | B8 | (cond == NE ? B11 : 0) | static_cast<int32_t>(rn) |
212 ((offset & 0x3e) << (3 - 1)) | // Move imm5 from bits 1-5 to bits 3-7.
213 ((offset & 0x40) << (9 - 6)); // Move i from bit 6 to bit 11
214}
215
216inline int16_t Thumb2Assembler::CmpRnImm8Encoding16(Register rn, int32_t value) {
217 DCHECK(!IsHighRegister(rn));
218 DCHECK(IsUint<8>(value));
219 return B13 | B11 | (rn << 8) | value;
220}
221
222inline int16_t Thumb2Assembler::AddRdnRmEncoding16(Register rdn, Register rm) {
223 // The high bit of rn is moved across 4-bit rm.
224 return B14 | B10 | (static_cast<int32_t>(rm) << 3) |
225 (static_cast<int32_t>(rdn) & 7) | ((static_cast<int32_t>(rdn) & 8) << 4);
226}
227
228inline int32_t Thumb2Assembler::MovwEncoding32(Register rd, int32_t value) {
229 DCHECK(IsUint<16>(value));
230 return B31 | B30 | B29 | B28 | B25 | B22 |
231 (static_cast<int32_t>(rd) << 8) |
232 ((value & 0xf000) << (16 - 12)) | // Move imm4 from bits 12-15 to bits 16-19.
233 ((value & 0x0800) << (26 - 11)) | // Move i from bit 11 to bit 26.
234 ((value & 0x0700) << (12 - 8)) | // Move imm3 from bits 8-10 to bits 12-14.
235 (value & 0xff); // Keep imm8 in bits 0-7.
236}
237
238inline int32_t Thumb2Assembler::MovtEncoding32(Register rd, int32_t value) {
239 DCHECK_EQ(value & 0xffff, 0);
240 int32_t movw_encoding = MovwEncoding32(rd, (value >> 16) & 0xffff);
241 return movw_encoding | B25 | B23;
242}
243
244inline int32_t Thumb2Assembler::MovModImmEncoding32(Register rd, int32_t value) {
245 uint32_t mod_imm = ModifiedImmediate(value);
246 DCHECK_NE(mod_imm, kInvalidModifiedImmediate);
247 return B31 | B30 | B29 | B28 | B22 | B19 | B18 | B17 | B16 |
248 (static_cast<int32_t>(rd) << 8) | static_cast<int32_t>(mod_imm);
249}
250
251inline int16_t Thumb2Assembler::LdrLitEncoding16(Register rt, int32_t offset) {
252 DCHECK(!IsHighRegister(rt));
253 DCHECK_EQ(offset & 3, 0);
254 DCHECK(IsUint<10>(offset));
255 return B14 | B11 | (static_cast<int32_t>(rt) << 8) | (offset >> 2);
256}
257
258inline int32_t Thumb2Assembler::LdrLitEncoding32(Register rt, int32_t offset) {
259 // NOTE: We don't support negative offset, i.e. U=0 (B23).
260 return LdrRtRnImm12Encoding(rt, PC, offset);
261}
262
263inline int32_t Thumb2Assembler::LdrdEncoding32(Register rt, Register rt2, Register rn, int32_t offset) {
264 DCHECK_EQ(offset & 3, 0);
265 CHECK(IsUint<10>(offset));
266 return B31 | B30 | B29 | B27 |
267 B24 /* P = 1 */ | B23 /* U = 1 */ | B22 | 0 /* W = 0 */ | B20 |
268 (static_cast<int32_t>(rn) << 16) | (static_cast<int32_t>(rt) << 12) |
269 (static_cast<int32_t>(rt2) << 8) | (offset >> 2);
270}
271
272inline int32_t Thumb2Assembler::VldrsEncoding32(SRegister sd, Register rn, int32_t offset) {
273 DCHECK_EQ(offset & 3, 0);
274 CHECK(IsUint<10>(offset));
275 return B31 | B30 | B29 | B27 | B26 | B24 |
276 B23 /* U = 1 */ | B20 | B11 | B9 |
277 (static_cast<int32_t>(rn) << 16) |
278 ((static_cast<int32_t>(sd) & 0x01) << (22 - 0)) | // Move D from bit 0 to bit 22.
279 ((static_cast<int32_t>(sd) & 0x1e) << (12 - 1)) | // Move Vd from bits 1-4 to bits 12-15.
280 (offset >> 2);
281}
282
283inline int32_t Thumb2Assembler::VldrdEncoding32(DRegister dd, Register rn, int32_t offset) {
284 DCHECK_EQ(offset & 3, 0);
285 CHECK(IsUint<10>(offset));
286 return B31 | B30 | B29 | B27 | B26 | B24 |
287 B23 /* U = 1 */ | B20 | B11 | B9 | B8 |
288 (rn << 16) |
289 ((static_cast<int32_t>(dd) & 0x10) << (22 - 4)) | // Move D from bit 4 to bit 22.
290 ((static_cast<int32_t>(dd) & 0x0f) << (12 - 0)) | // Move Vd from bits 0-3 to bits 12-15.
291 (offset >> 2);
292}
293
294inline int16_t Thumb2Assembler::LdrRtRnImm5Encoding16(Register rt, Register rn, int32_t offset) {
295 DCHECK(!IsHighRegister(rt));
296 DCHECK(!IsHighRegister(rn));
297 DCHECK_EQ(offset & 3, 0);
298 DCHECK(IsUint<7>(offset));
299 return B14 | B13 | B11 |
300 (static_cast<int32_t>(rn) << 3) | static_cast<int32_t>(rt) |
301 (offset << (6 - 2)); // Move imm5 from bits 2-6 to bits 6-10.
302}
303
304int32_t Thumb2Assembler::Fixup::LoadWideOrFpEncoding(Register rbase, int32_t offset) const {
305 switch (type_) {
306 case kLoadLiteralWide:
307 return LdrdEncoding32(rn_, rt2_, rbase, offset);
308 case kLoadFPLiteralSingle:
309 return VldrsEncoding32(sd_, rbase, offset);
310 case kLoadFPLiteralDouble:
311 return VldrdEncoding32(dd_, rbase, offset);
312 default:
313 LOG(FATAL) << "Unexpected type: " << static_cast<int>(type_);
314 UNREACHABLE();
315 }
316}
317
318inline int32_t Thumb2Assembler::LdrRtRnImm12Encoding(Register rt, Register rn, int32_t offset) {
319 DCHECK(IsUint<12>(offset));
320 return B31 | B30 | B29 | B28 | B27 | B23 | B22 | B20 | (rn << 16) | (rt << 12) | offset;
321}
322
323void Thumb2Assembler::FinalizeCode() {
324 ArmAssembler::FinalizeCode();
325 BindLiterals();
326 uint32_t adjusted_code_size = AdjustFixups();
327 EmitFixups(adjusted_code_size);
328 EmitLiterals();
329}
330
Nicolas Geoffray3d1e7882015-02-03 13:59:52 +0000331bool Thumb2Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
332 Register rn ATTRIBUTE_UNUSED,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000333 Opcode opcode,
334 uint32_t immediate,
335 ShifterOperand* shifter_op) {
336 shifter_op->type_ = ShifterOperand::kImmediate;
337 shifter_op->immed_ = immediate;
338 shifter_op->is_shift_ = false;
339 shifter_op->is_rotate_ = false;
340 switch (opcode) {
341 case ADD:
342 case SUB:
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000343 if (immediate < (1 << 12)) { // Less than (or equal to) 12 bits can always be done.
344 return true;
345 }
346 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
347
348 case MOV:
349 // TODO: Support less than or equal to 12bits.
350 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
351 case MVN:
352 default:
353 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
354 }
355}
356
Dave Allison65fcc2c2014-04-28 13:45:27 -0700357void Thumb2Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
358 Condition cond) {
359 EmitDataProcessing(cond, AND, 0, rn, rd, so);
360}
361
362
363void Thumb2Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
364 Condition cond) {
365 EmitDataProcessing(cond, EOR, 0, rn, rd, so);
366}
367
368
369void Thumb2Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
370 Condition cond) {
371 EmitDataProcessing(cond, SUB, 0, rn, rd, so);
372}
373
374
375void Thumb2Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
376 Condition cond) {
377 EmitDataProcessing(cond, RSB, 0, rn, rd, so);
378}
379
380
381void Thumb2Assembler::rsbs(Register rd, Register rn, const ShifterOperand& so,
382 Condition cond) {
383 EmitDataProcessing(cond, RSB, 1, rn, rd, so);
384}
385
386
387void Thumb2Assembler::add(Register rd, Register rn, const ShifterOperand& so,
388 Condition cond) {
389 EmitDataProcessing(cond, ADD, 0, rn, rd, so);
390}
391
392
393void Thumb2Assembler::adds(Register rd, Register rn, const ShifterOperand& so,
394 Condition cond) {
395 EmitDataProcessing(cond, ADD, 1, rn, rd, so);
396}
397
398
399void Thumb2Assembler::subs(Register rd, Register rn, const ShifterOperand& so,
400 Condition cond) {
401 EmitDataProcessing(cond, SUB, 1, rn, rd, so);
402}
403
404
405void Thumb2Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
406 Condition cond) {
407 EmitDataProcessing(cond, ADC, 0, rn, rd, so);
408}
409
410
411void Thumb2Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
412 Condition cond) {
413 EmitDataProcessing(cond, SBC, 0, rn, rd, so);
414}
415
416
417void Thumb2Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
418 Condition cond) {
419 EmitDataProcessing(cond, RSC, 0, rn, rd, so);
420}
421
422
423void Thumb2Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
424 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
425 EmitDataProcessing(cond, TST, 1, rn, R0, so);
426}
427
428
429void Thumb2Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
430 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
431 EmitDataProcessing(cond, TEQ, 1, rn, R0, so);
432}
433
434
435void Thumb2Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
436 EmitDataProcessing(cond, CMP, 1, rn, R0, so);
437}
438
439
440void Thumb2Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
441 EmitDataProcessing(cond, CMN, 1, rn, R0, so);
442}
443
444
445void Thumb2Assembler::orr(Register rd, Register rn,
446 const ShifterOperand& so, Condition cond) {
447 EmitDataProcessing(cond, ORR, 0, rn, rd, so);
448}
449
450
451void Thumb2Assembler::orrs(Register rd, Register rn,
452 const ShifterOperand& so, Condition cond) {
453 EmitDataProcessing(cond, ORR, 1, rn, rd, so);
454}
455
456
457void Thumb2Assembler::mov(Register rd, const ShifterOperand& so, Condition cond) {
458 EmitDataProcessing(cond, MOV, 0, R0, rd, so);
459}
460
461
462void Thumb2Assembler::movs(Register rd, const ShifterOperand& so, Condition cond) {
463 EmitDataProcessing(cond, MOV, 1, R0, rd, so);
464}
465
466
467void Thumb2Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
468 Condition cond) {
469 EmitDataProcessing(cond, BIC, 0, rn, rd, so);
470}
471
472
473void Thumb2Assembler::mvn(Register rd, const ShifterOperand& so, Condition cond) {
474 EmitDataProcessing(cond, MVN, 0, R0, rd, so);
475}
476
477
478void Thumb2Assembler::mvns(Register rd, const ShifterOperand& so, Condition cond) {
479 EmitDataProcessing(cond, MVN, 1, R0, rd, so);
480}
481
482
483void Thumb2Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700484 CheckCondition(cond);
485
Dave Allison65fcc2c2014-04-28 13:45:27 -0700486 if (rd == rm && !IsHighRegister(rd) && !IsHighRegister(rn) && !force_32bit_) {
487 // 16 bit.
488 int16_t encoding = B14 | B9 | B8 | B6 |
489 rn << 3 | rd;
490 Emit16(encoding);
491 } else {
492 // 32 bit.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700493 uint32_t op1 = 0U /* 0b000 */;
494 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700495 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
496 op1 << 20 |
497 B15 | B14 | B13 | B12 |
498 op2 << 4 |
499 static_cast<uint32_t>(rd) << 8 |
500 static_cast<uint32_t>(rn) << 16 |
501 static_cast<uint32_t>(rm);
502
503 Emit32(encoding);
504 }
505}
506
507
508void Thumb2Assembler::mla(Register rd, Register rn, Register rm, Register ra,
509 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700510 CheckCondition(cond);
511
Andreas Gampec8ccf682014-09-29 20:07:43 -0700512 uint32_t op1 = 0U /* 0b000 */;
513 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700514 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
515 op1 << 20 |
516 op2 << 4 |
517 static_cast<uint32_t>(rd) << 8 |
518 static_cast<uint32_t>(ra) << 12 |
519 static_cast<uint32_t>(rn) << 16 |
520 static_cast<uint32_t>(rm);
521
522 Emit32(encoding);
523}
524
525
526void Thumb2Assembler::mls(Register rd, Register rn, Register rm, Register ra,
527 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700528 CheckCondition(cond);
529
Andreas Gampec8ccf682014-09-29 20:07:43 -0700530 uint32_t op1 = 0U /* 0b000 */;
531 uint32_t op2 = 01 /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700532 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
533 op1 << 20 |
534 op2 << 4 |
535 static_cast<uint32_t>(rd) << 8 |
536 static_cast<uint32_t>(ra) << 12 |
537 static_cast<uint32_t>(rn) << 16 |
538 static_cast<uint32_t>(rm);
539
540 Emit32(encoding);
541}
542
543
Zheng Xuc6667102015-05-15 16:08:45 +0800544void Thumb2Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
545 Register rm, Condition cond) {
546 CheckCondition(cond);
547
548 uint32_t op1 = 0U /* 0b000; */;
549 uint32_t op2 = 0U /* 0b0000 */;
550 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
551 op1 << 20 |
552 op2 << 4 |
553 static_cast<uint32_t>(rd_lo) << 12 |
554 static_cast<uint32_t>(rd_hi) << 8 |
555 static_cast<uint32_t>(rn) << 16 |
556 static_cast<uint32_t>(rm);
557
558 Emit32(encoding);
559}
560
561
Dave Allison65fcc2c2014-04-28 13:45:27 -0700562void Thumb2Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
563 Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700564 CheckCondition(cond);
565
Andreas Gampec8ccf682014-09-29 20:07:43 -0700566 uint32_t op1 = 2U /* 0b010; */;
567 uint32_t op2 = 0U /* 0b0000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700568 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
569 op1 << 20 |
570 op2 << 4 |
571 static_cast<uint32_t>(rd_lo) << 12 |
572 static_cast<uint32_t>(rd_hi) << 8 |
573 static_cast<uint32_t>(rn) << 16 |
574 static_cast<uint32_t>(rm);
575
576 Emit32(encoding);
577}
578
579
580void Thumb2Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700581 CheckCondition(cond);
582
Andreas Gampec8ccf682014-09-29 20:07:43 -0700583 uint32_t op1 = 1U /* 0b001 */;
584 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700585 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B20 |
586 op1 << 20 |
587 op2 << 4 |
588 0xf << 12 |
589 static_cast<uint32_t>(rd) << 8 |
590 static_cast<uint32_t>(rn) << 16 |
591 static_cast<uint32_t>(rm);
592
593 Emit32(encoding);
594}
595
596
597void Thumb2Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700598 CheckCondition(cond);
599
Andreas Gampec8ccf682014-09-29 20:07:43 -0700600 uint32_t op1 = 1U /* 0b001 */;
601 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700602 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B21 | B20 |
603 op1 << 20 |
604 op2 << 4 |
605 0xf << 12 |
606 static_cast<uint32_t>(rd) << 8 |
607 static_cast<uint32_t>(rn) << 16 |
608 static_cast<uint32_t>(rm);
609
610 Emit32(encoding);
611}
612
613
Roland Levillain51d3fc42014-11-13 14:11:42 +0000614void Thumb2Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
615 CheckCondition(cond);
616 CHECK_LE(lsb, 31U);
617 CHECK(1U <= width && width <= 32U) << width;
618 uint32_t widthminus1 = width - 1;
619 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
620 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
621
622 uint32_t op = 20U /* 0b10100 */;
623 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
624 op << 20 |
625 static_cast<uint32_t>(rn) << 16 |
626 imm3 << 12 |
627 static_cast<uint32_t>(rd) << 8 |
628 imm2 << 6 |
629 widthminus1;
630
631 Emit32(encoding);
632}
633
634
Roland Levillain981e4542014-11-14 11:47:14 +0000635void Thumb2Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
636 CheckCondition(cond);
637 CHECK_LE(lsb, 31U);
638 CHECK(1U <= width && width <= 32U) << width;
639 uint32_t widthminus1 = width - 1;
640 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
641 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
642
643 uint32_t op = 28U /* 0b11100 */;
644 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
645 op << 20 |
646 static_cast<uint32_t>(rn) << 16 |
647 imm3 << 12 |
648 static_cast<uint32_t>(rd) << 8 |
649 imm2 << 6 |
650 widthminus1;
651
652 Emit32(encoding);
653}
654
655
Dave Allison65fcc2c2014-04-28 13:45:27 -0700656void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) {
657 EmitLoadStore(cond, true, false, false, false, rd, ad);
658}
659
660
661void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) {
662 EmitLoadStore(cond, false, false, false, false, rd, ad);
663}
664
665
666void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
667 EmitLoadStore(cond, true, true, false, false, rd, ad);
668}
669
670
671void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) {
672 EmitLoadStore(cond, false, true, false, false, rd, ad);
673}
674
675
676void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
677 EmitLoadStore(cond, true, false, true, false, rd, ad);
678}
679
680
681void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) {
682 EmitLoadStore(cond, false, false, true, false, rd, ad);
683}
684
685
686void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
687 EmitLoadStore(cond, true, true, false, true, rd, ad);
688}
689
690
691void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
692 EmitLoadStore(cond, true, false, true, true, rd, ad);
693}
694
695
696void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100697 ldrd(rd, Register(rd + 1), ad, cond);
698}
699
700
701void Thumb2Assembler::ldrd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700702 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100703 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700704 // This is different from other loads. The encoding is like ARM.
705 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
706 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100707 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700708 ad.encodingThumbLdrdStrd();
709 Emit32(encoding);
710}
711
712
713void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100714 strd(rd, Register(rd + 1), ad, cond);
715}
716
717
718void Thumb2Assembler::strd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700719 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100720 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700721 // This is different from other loads. The encoding is like ARM.
722 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
723 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100724 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700725 ad.encodingThumbLdrdStrd();
726 Emit32(encoding);
727}
728
729
730void Thumb2Assembler::ldm(BlockAddressMode am,
731 Register base,
732 RegList regs,
733 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000734 CHECK_NE(regs, 0u); // Do not use ldm if there's nothing to load.
735 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700736 // Thumb doesn't support one reg in the list.
737 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000738 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700739 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700740 CHECK(am == DB_W); // Only writeback is supported.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700741 ldr(static_cast<Register>(reg), Address(base, kRegisterSize, Address::PostIndex), cond);
742 } else {
743 EmitMultiMemOp(cond, am, true, base, regs);
744 }
745}
746
747
748void Thumb2Assembler::stm(BlockAddressMode am,
749 Register base,
750 RegList regs,
751 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000752 CHECK_NE(regs, 0u); // Do not use stm if there's nothing to store.
753 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700754 // Thumb doesn't support one reg in the list.
755 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000756 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700757 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700758 CHECK(am == IA || am == IA_W);
759 Address::Mode strmode = am == IA ? Address::PreIndex : Address::Offset;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700760 str(static_cast<Register>(reg), Address(base, -kRegisterSize, strmode), cond);
761 } else {
762 EmitMultiMemOp(cond, am, false, base, regs);
763 }
764}
765
766
767bool Thumb2Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
768 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
769 if (((imm32 & ((1 << 19) - 1)) == 0) &&
770 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
771 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
772 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
773 ((imm32 >> 19) & ((1 << 6) -1));
774 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
775 sd, S0, S0);
776 return true;
777 }
778 return false;
779}
780
781
782bool Thumb2Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
783 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
784 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
785 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
786 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
787 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
788 ((imm64 >> 48) & ((1 << 6) -1));
789 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
790 dd, D0, D0);
791 return true;
792 }
793 return false;
794}
795
796
797void Thumb2Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
798 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
799}
800
801
802void Thumb2Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
803 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
804}
805
806
807void Thumb2Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
808 Condition cond) {
809 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
810}
811
812
813void Thumb2Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
814 Condition cond) {
815 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
816}
817
818
819void Thumb2Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
820 Condition cond) {
821 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
822}
823
824
825void Thumb2Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
826 Condition cond) {
827 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
828}
829
830
831void Thumb2Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
832 Condition cond) {
833 EmitVFPsss(cond, B21, sd, sn, sm);
834}
835
836
837void Thumb2Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
838 Condition cond) {
839 EmitVFPddd(cond, B21, dd, dn, dm);
840}
841
842
843void Thumb2Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
844 Condition cond) {
845 EmitVFPsss(cond, 0, sd, sn, sm);
846}
847
848
849void Thumb2Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
850 Condition cond) {
851 EmitVFPddd(cond, 0, dd, dn, dm);
852}
853
854
855void Thumb2Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
856 Condition cond) {
857 EmitVFPsss(cond, B6, sd, sn, sm);
858}
859
860
861void Thumb2Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
862 Condition cond) {
863 EmitVFPddd(cond, B6, dd, dn, dm);
864}
865
866
867void Thumb2Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
868 Condition cond) {
869 EmitVFPsss(cond, B23, sd, sn, sm);
870}
871
872
873void Thumb2Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
874 Condition cond) {
875 EmitVFPddd(cond, B23, dd, dn, dm);
876}
877
878
879void Thumb2Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
880 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
881}
882
883
884void Thumb2Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
885 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
886}
887
888
889void Thumb2Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
890 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
891}
892
893
894void Thumb2Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
895 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
896}
897
898
899void Thumb2Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
900 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
901}
902
903void Thumb2Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
904 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
905}
906
907
908void Thumb2Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
909 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
910}
911
912
913void Thumb2Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
914 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
915}
916
917
918void Thumb2Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
919 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
920}
921
922
923void Thumb2Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
924 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
925}
926
927
928void Thumb2Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
929 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
930}
931
932
933void Thumb2Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
934 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
935}
936
937
938void Thumb2Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
939 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
940}
941
942
943void Thumb2Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
944 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
945}
946
947
948void Thumb2Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
949 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
950}
951
952
953void Thumb2Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
954 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
955}
956
957
958void Thumb2Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
959 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
960}
961
962
963void Thumb2Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
964 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
965}
966
967
968void Thumb2Assembler::vcmpsz(SRegister sd, Condition cond) {
969 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
970}
971
972
973void Thumb2Assembler::vcmpdz(DRegister dd, Condition cond) {
974 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
975}
976
977void Thumb2Assembler::b(Label* label, Condition cond) {
978 EmitBranch(cond, label, false, false);
979}
980
981
982void Thumb2Assembler::bl(Label* label, Condition cond) {
983 CheckCondition(cond);
984 EmitBranch(cond, label, true, false);
985}
986
987
988void Thumb2Assembler::blx(Label* label) {
989 EmitBranch(AL, label, true, true);
990}
991
992
993void Thumb2Assembler::MarkExceptionHandler(Label* label) {
994 EmitDataProcessing(AL, TST, 1, PC, R0, ShifterOperand(0));
995 Label l;
996 b(&l);
997 EmitBranch(AL, label, false, false);
998 Bind(&l);
999}
1000
1001
1002void Thumb2Assembler::Emit32(int32_t value) {
1003 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1004 buffer_.Emit<int16_t>(value >> 16);
1005 buffer_.Emit<int16_t>(value & 0xffff);
1006}
1007
1008
1009void Thumb2Assembler::Emit16(int16_t value) {
1010 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1011 buffer_.Emit<int16_t>(value);
1012}
1013
1014
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001015bool Thumb2Assembler::Is32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001016 Opcode opcode,
Andreas Gampeca714582015-04-03 19:41:34 -07001017 bool set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001018 Register rn,
1019 Register rd,
1020 const ShifterOperand& so) {
1021 if (force_32bit_) {
1022 return true;
1023 }
1024
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001025 // Check special case for SP relative ADD and SUB immediate.
1026 if ((opcode == ADD || opcode == SUB) && rn == SP && so.IsImmediate()) {
1027 // If the immediate is in range, use 16 bit.
1028 if (rd == SP) {
1029 if (so.GetImmediate() < (1 << 9)) { // 9 bit immediate.
1030 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001031 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001032 } else if (!IsHighRegister(rd) && opcode == ADD) {
1033 if (so.GetImmediate() < (1 << 10)) { // 10 bit immediate.
1034 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001035 }
1036 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001037 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001038
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001039 bool can_contain_high_register = (opcode == MOV)
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001040 || ((opcode == ADD) && (rn == rd) && !set_cc);
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001041
1042 if (IsHighRegister(rd) || IsHighRegister(rn)) {
1043 if (!can_contain_high_register) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001044 return true;
1045 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001046
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001047 // There are high register instructions available for this opcode.
1048 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
1049 if (so.IsShift() && (so.GetShift() == RRX || so.GetImmediate() != 0u)) {
1050 return true;
1051 }
1052
1053 // The ADD and MOV instructions that work with high registers don't have 16-bit
1054 // immediate variants.
1055 if (so.IsImmediate()) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001056 return true;
1057 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001058 }
1059
1060 if (so.IsRegister() && IsHighRegister(so.GetRegister()) && !can_contain_high_register) {
1061 return true;
1062 }
1063
Dave Allison65fcc2c2014-04-28 13:45:27 -07001064 bool rn_is_valid = true;
1065
1066 // Check for single operand instructions and ADD/SUB.
1067 switch (opcode) {
1068 case CMP:
1069 case MOV:
1070 case TST:
1071 case MVN:
1072 rn_is_valid = false; // There is no Rn for these instructions.
1073 break;
1074 case TEQ:
1075 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001076 case ADD:
1077 case SUB:
1078 break;
1079 default:
1080 if (so.IsRegister() && rd != rn) {
1081 return true;
1082 }
1083 }
1084
1085 if (so.IsImmediate()) {
1086 if (rn_is_valid && rn != rd) {
1087 // The only thumb1 instruction with a register and an immediate are ADD and SUB. The
1088 // immediate must be 3 bits.
1089 if (opcode != ADD && opcode != SUB) {
1090 return true;
1091 } else {
1092 // Check that the immediate is 3 bits for ADD and SUB.
1093 if (so.GetImmediate() >= 8) {
1094 return true;
1095 }
1096 }
1097 } else {
1098 // ADD, SUB, CMP and MOV may be thumb1 only if the immediate is 8 bits.
1099 if (!(opcode == ADD || opcode == SUB || opcode == MOV || opcode == CMP)) {
1100 return true;
1101 } else {
1102 if (so.GetImmediate() > 255) {
1103 return true;
1104 }
1105 }
1106 }
1107 }
1108
Zheng Xuc6667102015-05-15 16:08:45 +08001109 // Check for register shift operand.
1110 if (so.IsRegister() && so.IsShift()) {
1111 if (opcode != MOV) {
1112 return true;
1113 }
1114 // Check for MOV with an ROR.
1115 if (so.GetShift() == ROR) {
1116 if (so.GetImmediate() != 0) {
1117 return true;
1118 }
1119 }
1120 }
1121
Dave Allison65fcc2c2014-04-28 13:45:27 -07001122 // The instruction can be encoded in 16 bits.
1123 return false;
1124}
1125
1126
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001127void Thumb2Assembler::Emit32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001128 Opcode opcode,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001129 bool set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001130 Register rn,
1131 Register rd,
1132 const ShifterOperand& so) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001133 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001134 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001135 case AND: thumb_opcode = 0U /* 0b0000 */; break;
1136 case EOR: thumb_opcode = 4U /* 0b0100 */; break;
1137 case SUB: thumb_opcode = 13U /* 0b1101 */; break;
1138 case RSB: thumb_opcode = 14U /* 0b1110 */; break;
1139 case ADD: thumb_opcode = 8U /* 0b1000 */; break;
Andreas Gampe35c68e32014-09-30 08:39:37 -07001140 case ADC: thumb_opcode = 10U /* 0b1010 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001141 case SBC: thumb_opcode = 11U /* 0b1011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001142 case RSC: break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001143 case TST: thumb_opcode = 0U /* 0b0000 */; set_cc = true; rd = PC; break;
1144 case TEQ: thumb_opcode = 4U /* 0b0100 */; set_cc = true; rd = PC; break;
1145 case CMP: thumb_opcode = 13U /* 0b1101 */; set_cc = true; rd = PC; break;
1146 case CMN: thumb_opcode = 8U /* 0b1000 */; set_cc = true; rd = PC; break;
1147 case ORR: thumb_opcode = 2U /* 0b0010 */; break;
1148 case MOV: thumb_opcode = 2U /* 0b0010 */; rn = PC; break;
1149 case BIC: thumb_opcode = 1U /* 0b0001 */; break;
1150 case MVN: thumb_opcode = 3U /* 0b0011 */; rn = PC; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001151 default:
1152 break;
1153 }
1154
Andreas Gampec8ccf682014-09-29 20:07:43 -07001155 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001156 LOG(FATAL) << "Invalid thumb2 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001157 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001158 }
1159
1160 int32_t encoding = 0;
1161 if (so.IsImmediate()) {
1162 // Check special cases.
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001163 if ((opcode == SUB || opcode == ADD) && (so.GetImmediate() < (1u << 12))) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001164 if (!set_cc) {
1165 if (opcode == SUB) {
1166 thumb_opcode = 5U;
1167 } else if (opcode == ADD) {
1168 thumb_opcode = 0U;
1169 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001170 }
1171 uint32_t imm = so.GetImmediate();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001172
1173 uint32_t i = (imm >> 11) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001174 uint32_t imm3 = (imm >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001175 uint32_t imm8 = imm & 0xff;
1176
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001177 encoding = B31 | B30 | B29 | B28 |
1178 (set_cc ? B20 : B25) |
1179 thumb_opcode << 21 |
1180 rn << 16 |
1181 rd << 8 |
1182 i << 26 |
1183 imm3 << 12 |
1184 imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001185 } else {
1186 // Modified immediate.
Dave Allison45fdb932014-06-25 12:37:10 -07001187 uint32_t imm = ModifiedImmediate(so.encodingThumb());
Dave Allison65fcc2c2014-04-28 13:45:27 -07001188 if (imm == kInvalidModifiedImmediate) {
1189 LOG(FATAL) << "Immediate value cannot fit in thumb2 modified immediate";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001190 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001191 }
1192 encoding = B31 | B30 | B29 | B28 |
1193 thumb_opcode << 21 |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001194 (set_cc ? B20 : 0) |
Dave Allison65fcc2c2014-04-28 13:45:27 -07001195 rn << 16 |
1196 rd << 8 |
1197 imm;
1198 }
1199 } else if (so.IsRegister()) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001200 // Register (possibly shifted)
1201 encoding = B31 | B30 | B29 | B27 | B25 |
1202 thumb_opcode << 21 |
1203 (set_cc ? B20 : 0) |
1204 rn << 16 |
1205 rd << 8 |
1206 so.encodingThumb();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001207 }
1208 Emit32(encoding);
1209}
1210
1211
1212void Thumb2Assembler::Emit16BitDataProcessing(Condition cond,
1213 Opcode opcode,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001214 bool set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001215 Register rn,
1216 Register rd,
1217 const ShifterOperand& so) {
1218 if (opcode == ADD || opcode == SUB) {
1219 Emit16BitAddSub(cond, opcode, set_cc, rn, rd, so);
1220 return;
1221 }
Andreas Gampec8ccf682014-09-29 20:07:43 -07001222 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001223 // Thumb1.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001224 uint8_t dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001225 uint8_t opcode_shift = 6;
1226 uint8_t rd_shift = 0;
1227 uint8_t rn_shift = 3;
1228 uint8_t immediate_shift = 0;
1229 bool use_immediate = false;
1230 uint8_t immediate = 0;
1231
1232 if (opcode == MOV && so.IsRegister() && so.IsShift()) {
1233 // Convert shifted mov operand2 into 16 bit opcodes.
1234 dp_opcode = 0;
1235 opcode_shift = 11;
1236
1237 use_immediate = true;
1238 immediate = so.GetImmediate();
1239 immediate_shift = 6;
1240
1241 rn = so.GetRegister();
1242
1243 switch (so.GetShift()) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001244 case LSL: thumb_opcode = 0U /* 0b00 */; break;
1245 case LSR: thumb_opcode = 1U /* 0b01 */; break;
1246 case ASR: thumb_opcode = 2U /* 0b10 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001247 case ROR:
1248 // ROR doesn't allow immediates.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001249 thumb_opcode = 7U /* 0b111 */;
1250 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001251 opcode_shift = 6;
1252 use_immediate = false;
1253 break;
1254 case RRX: break;
1255 default:
1256 break;
1257 }
1258 } else {
1259 if (so.IsImmediate()) {
1260 use_immediate = true;
1261 immediate = so.GetImmediate();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001262 } else {
Guillaume "Vermeille" Sanchezab4a2f52015-03-11 14:00:30 +00001263 CHECK(!(so.IsRegister() && so.IsShift() && so.GetSecondRegister() != kNoRegister))
1264 << "No register-shifted register instruction available in thumb";
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001265 // Adjust rn and rd: only two registers will be emitted.
1266 switch (opcode) {
1267 case AND:
1268 case ORR:
1269 case EOR:
1270 case RSB:
1271 case ADC:
1272 case SBC:
1273 case BIC: {
1274 if (rn == rd) {
1275 rn = so.GetRegister();
1276 } else {
1277 CHECK_EQ(rd, so.GetRegister());
1278 }
1279 break;
1280 }
1281 case CMP:
1282 case CMN: {
1283 CHECK_EQ(rd, 0);
1284 rd = rn;
1285 rn = so.GetRegister();
1286 break;
1287 }
Andreas Gampe7b7e5242015-02-02 19:17:11 -08001288 case TST:
1289 case TEQ:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001290 case MVN: {
1291 CHECK_EQ(rn, 0);
1292 rn = so.GetRegister();
1293 break;
1294 }
1295 default:
1296 break;
1297 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001298 }
1299
1300 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001301 case AND: thumb_opcode = 0U /* 0b0000 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001302 case ORR: thumb_opcode = 12U /* 0b1100 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001303 case EOR: thumb_opcode = 1U /* 0b0001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001304 case RSB: thumb_opcode = 9U /* 0b1001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001305 case ADC: thumb_opcode = 5U /* 0b0101 */; break;
1306 case SBC: thumb_opcode = 6U /* 0b0110 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001307 case BIC: thumb_opcode = 14U /* 0b1110 */; break;
1308 case TST: thumb_opcode = 8U /* 0b1000 */; CHECK(!use_immediate); break;
1309 case MVN: thumb_opcode = 15U /* 0b1111 */; CHECK(!use_immediate); break;
1310 case CMP: {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001311 if (use_immediate) {
1312 // T2 encoding.
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001313 dp_opcode = 0;
1314 opcode_shift = 11;
1315 thumb_opcode = 5U /* 0b101 */;
1316 rd_shift = 8;
1317 rn_shift = 8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001318 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001319 thumb_opcode = 10U /* 0b1010 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001320 }
1321
1322 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001323 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001324 case CMN: {
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001325 CHECK(!use_immediate);
Andreas Gampec8ccf682014-09-29 20:07:43 -07001326 thumb_opcode = 11U /* 0b1011 */;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001327 break;
1328 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001329 case MOV:
1330 dp_opcode = 0;
1331 if (use_immediate) {
1332 // T2 encoding.
1333 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001334 thumb_opcode = 4U /* 0b100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001335 rd_shift = 8;
1336 rn_shift = 8;
1337 } else {
1338 rn = so.GetRegister();
1339 if (IsHighRegister(rn) || IsHighRegister(rd)) {
1340 // Special mov for high registers.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001341 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001342 opcode_shift = 7;
1343 // Put the top bit of rd into the bottom bit of the opcode.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001344 thumb_opcode = 12U /* 0b0001100 */ | static_cast<uint32_t>(rd) >> 3;
1345 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001346 } else {
1347 thumb_opcode = 0;
1348 }
1349 }
1350 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001351
1352 case TEQ:
1353 case RSC:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001354 default:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001355 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001356 break;
1357 }
1358 }
1359
Andreas Gampec8ccf682014-09-29 20:07:43 -07001360 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001361 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001362 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001363 }
1364
1365 int16_t encoding = dp_opcode << 14 |
1366 (thumb_opcode << opcode_shift) |
1367 rd << rd_shift |
1368 rn << rn_shift |
1369 (use_immediate ? (immediate << immediate_shift) : 0);
1370
1371 Emit16(encoding);
1372}
1373
1374
1375// ADD and SUB are complex enough to warrant their own emitter.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001376void Thumb2Assembler::Emit16BitAddSub(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001377 Opcode opcode,
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001378 bool set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001379 Register rn,
1380 Register rd,
1381 const ShifterOperand& so) {
1382 uint8_t dp_opcode = 0;
1383 uint8_t opcode_shift = 6;
1384 uint8_t rd_shift = 0;
1385 uint8_t rn_shift = 3;
1386 uint8_t immediate_shift = 0;
1387 bool use_immediate = false;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001388 uint32_t immediate = 0; // Should be at most 9 bits but keep the full immediate for CHECKs.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001389 uint8_t thumb_opcode;;
1390
1391 if (so.IsImmediate()) {
1392 use_immediate = true;
1393 immediate = so.GetImmediate();
1394 }
1395
1396 switch (opcode) {
1397 case ADD:
1398 if (so.IsRegister()) {
1399 Register rm = so.GetRegister();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001400 if (rn == rd && !set_cc) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001401 // Can use T2 encoding (allows 4 bit registers)
Andreas Gampec8ccf682014-09-29 20:07:43 -07001402 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001403 opcode_shift = 10;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001404 thumb_opcode = 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001405 // Make Rn also contain the top bit of rd.
1406 rn = static_cast<Register>(static_cast<uint32_t>(rm) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07001407 (static_cast<uint32_t>(rd) & 8U /* 0b1000 */) << 1);
1408 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001409 } else {
1410 // T1.
1411 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001412 thumb_opcode = 12U /* 0b01100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001413 immediate = static_cast<uint32_t>(so.GetRegister());
1414 use_immediate = true;
1415 immediate_shift = 6;
1416 }
1417 } else {
1418 // Immediate.
1419 if (rd == SP && rn == SP) {
1420 // ADD sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001421 dp_opcode = 2U /* 0b10 */;
1422 thumb_opcode = 3U /* 0b11 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001423 opcode_shift = 12;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001424 CHECK_LT(immediate, (1u << 9));
1425 CHECK_EQ((immediate & 3u /* 0b11 */), 0u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001426
1427 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1428 rn = R0;
1429 rd = R0;
1430 rd_shift = 0;
1431 rn_shift = 0;
1432 immediate >>= 2;
1433 } else if (rd != SP && rn == SP) {
1434 // ADD rd, SP, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001435 dp_opcode = 2U /* 0b10 */;
1436 thumb_opcode = 5U /* 0b101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001437 opcode_shift = 11;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001438 CHECK_LT(immediate, (1u << 10));
1439 CHECK_EQ((immediate & 3u /* 0b11 */), 0u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001440
1441 // Remove rn from instruction.
1442 rn = R0;
1443 rn_shift = 0;
1444 rd_shift = 8;
1445 immediate >>= 2;
1446 } else if (rn != rd) {
1447 // Must use T1.
1448 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001449 thumb_opcode = 14U /* 0b01110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001450 immediate_shift = 6;
1451 } else {
1452 // T2 encoding.
1453 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001454 thumb_opcode = 6U /* 0b110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001455 rd_shift = 8;
1456 rn_shift = 8;
1457 }
1458 }
1459 break;
1460
1461 case SUB:
1462 if (so.IsRegister()) {
1463 // T1.
1464 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001465 thumb_opcode = 13U /* 0b01101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001466 immediate = static_cast<uint32_t>(so.GetRegister());
1467 use_immediate = true;
1468 immediate_shift = 6;
1469 } else {
1470 if (rd == SP && rn == SP) {
1471 // SUB sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001472 dp_opcode = 2U /* 0b10 */;
1473 thumb_opcode = 0x61 /* 0b1100001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001474 opcode_shift = 7;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001475 CHECK_LT(immediate, (1u << 9));
1476 CHECK_EQ((immediate & 3u /* 0b11 */), 0u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001477
1478 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1479 rn = R0;
1480 rd = R0;
1481 rd_shift = 0;
1482 rn_shift = 0;
1483 immediate >>= 2;
1484 } else if (rn != rd) {
1485 // Must use T1.
1486 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001487 thumb_opcode = 15U /* 0b01111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001488 immediate_shift = 6;
1489 } else {
1490 // T2 encoding.
1491 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001492 thumb_opcode = 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001493 rd_shift = 8;
1494 rn_shift = 8;
1495 }
1496 }
1497 break;
1498 default:
1499 LOG(FATAL) << "This opcode is not an ADD or SUB: " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001500 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001501 }
1502
1503 int16_t encoding = dp_opcode << 14 |
1504 (thumb_opcode << opcode_shift) |
1505 rd << rd_shift |
1506 rn << rn_shift |
1507 (use_immediate ? (immediate << immediate_shift) : 0);
1508
1509 Emit16(encoding);
1510}
1511
1512
1513void Thumb2Assembler::EmitDataProcessing(Condition cond,
1514 Opcode opcode,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001515 bool set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001516 Register rn,
1517 Register rd,
1518 const ShifterOperand& so) {
1519 CHECK_NE(rd, kNoRegister);
1520 CheckCondition(cond);
1521
1522 if (Is32BitDataProcessing(cond, opcode, set_cc, rn, rd, so)) {
1523 Emit32BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1524 } else {
1525 Emit16BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1526 }
1527}
1528
Dave Allison45fdb932014-06-25 12:37:10 -07001529void Thumb2Assembler::EmitShift(Register rd, Register rm, Shift shift, uint8_t amount, bool setcc) {
1530 CHECK_LT(amount, (1 << 5));
1531 if (IsHighRegister(rd) || IsHighRegister(rm) || shift == ROR || shift == RRX) {
1532 uint16_t opcode = 0;
1533 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001534 case LSL: opcode = 0U /* 0b00 */; break;
1535 case LSR: opcode = 1U /* 0b01 */; break;
1536 case ASR: opcode = 2U /* 0b10 */; break;
1537 case ROR: opcode = 3U /* 0b11 */; break;
1538 case RRX: opcode = 3U /* 0b11 */; amount = 0; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001539 default:
1540 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001541 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001542 }
1543 // 32 bit.
1544 int32_t encoding = B31 | B30 | B29 | B27 | B25 | B22 |
1545 0xf << 16 | (setcc ? B20 : 0);
1546 uint32_t imm3 = amount >> 2;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001547 uint32_t imm2 = amount & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -07001548 encoding |= imm3 << 12 | imm2 << 6 | static_cast<int16_t>(rm) |
1549 static_cast<int16_t>(rd) << 8 | opcode << 4;
1550 Emit32(encoding);
1551 } else {
1552 // 16 bit shift
1553 uint16_t opcode = 0;
1554 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001555 case LSL: opcode = 0U /* 0b00 */; break;
1556 case LSR: opcode = 1U /* 0b01 */; break;
1557 case ASR: opcode = 2U /* 0b10 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001558 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001559 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1560 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001561 }
1562 int16_t encoding = opcode << 11 | amount << 6 | static_cast<int16_t>(rm) << 3 |
1563 static_cast<int16_t>(rd);
1564 Emit16(encoding);
1565 }
1566}
1567
1568void Thumb2Assembler::EmitShift(Register rd, Register rn, Shift shift, Register rm, bool setcc) {
1569 CHECK_NE(shift, RRX);
1570 bool must_be_32bit = false;
1571 if (IsHighRegister(rd) || IsHighRegister(rm) || IsHighRegister(rn) || rd != rn) {
1572 must_be_32bit = true;
1573 }
1574
1575 if (must_be_32bit) {
1576 uint16_t opcode = 0;
1577 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001578 case LSL: opcode = 0U /* 0b00 */; break;
1579 case LSR: opcode = 1U /* 0b01 */; break;
1580 case ASR: opcode = 2U /* 0b10 */; break;
1581 case ROR: opcode = 3U /* 0b11 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001582 default:
1583 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001584 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001585 }
1586 // 32 bit.
1587 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 |
1588 0xf << 12 | (setcc ? B20 : 0);
1589 encoding |= static_cast<int16_t>(rn) << 16 | static_cast<int16_t>(rm) |
1590 static_cast<int16_t>(rd) << 8 | opcode << 21;
1591 Emit32(encoding);
1592 } else {
1593 uint16_t opcode = 0;
1594 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001595 case LSL: opcode = 2U /* 0b0010 */; break;
1596 case LSR: opcode = 3U /* 0b0011 */; break;
1597 case ASR: opcode = 4U /* 0b0100 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001598 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001599 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1600 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001601 }
1602 int16_t encoding = B14 | opcode << 6 | static_cast<int16_t>(rm) << 3 |
1603 static_cast<int16_t>(rd);
1604 Emit16(encoding);
1605 }
1606}
1607
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001608inline size_t Thumb2Assembler::Fixup::SizeInBytes(Size size) {
1609 switch (size) {
1610 case kBranch16Bit:
1611 return 2u;
1612 case kBranch32Bit:
1613 return 4u;
Dave Allison45fdb932014-06-25 12:37:10 -07001614
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001615 case kCbxz16Bit:
1616 return 2u;
1617 case kCbxz32Bit:
1618 return 4u;
1619 case kCbxz48Bit:
1620 return 6u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001621
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001622 case kLiteral1KiB:
1623 return 2u;
1624 case kLiteral4KiB:
1625 return 4u;
1626 case kLiteral64KiB:
1627 return 8u;
1628 case kLiteral1MiB:
1629 return 10u;
1630 case kLiteralFar:
1631 return 14u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001632
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001633 case kLongOrFPLiteral1KiB:
1634 return 4u;
1635 case kLongOrFPLiteral256KiB:
1636 return 10u;
1637 case kLongOrFPLiteralFar:
1638 return 14u;
1639 }
1640 LOG(FATAL) << "Unexpected size: " << static_cast<int>(size);
1641 UNREACHABLE();
1642}
1643
1644inline uint32_t Thumb2Assembler::Fixup::GetOriginalSizeInBytes() const {
1645 return SizeInBytes(original_size_);
1646}
1647
1648inline uint32_t Thumb2Assembler::Fixup::GetSizeInBytes() const {
1649 return SizeInBytes(size_);
1650}
1651
1652inline size_t Thumb2Assembler::Fixup::LiteralPoolPaddingSize(uint32_t current_code_size) {
1653 // The code size must be a multiple of 2.
1654 DCHECK_EQ(current_code_size & 1u, 0u);
1655 // If it isn't a multiple of 4, we need to add a 2-byte padding before the literal pool.
1656 return current_code_size & 2;
1657}
1658
1659inline int32_t Thumb2Assembler::Fixup::GetOffset(uint32_t current_code_size) const {
1660 static constexpr int32_t int32_min = std::numeric_limits<int32_t>::min();
1661 static constexpr int32_t int32_max = std::numeric_limits<int32_t>::max();
1662 DCHECK_LE(target_, static_cast<uint32_t>(int32_max));
1663 DCHECK_LE(location_, static_cast<uint32_t>(int32_max));
1664 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max));
1665 int32_t diff = static_cast<int32_t>(target_) - static_cast<int32_t>(location_);
1666 if (target_ > location_) {
1667 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max - diff));
1668 diff += static_cast<int32_t>(adjustment_);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001669 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001670 DCHECK_LE(int32_min + static_cast<int32_t>(adjustment_), diff);
1671 diff -= static_cast<int32_t>(adjustment_);
1672 }
1673 // The default PC adjustment for Thumb2 is 4 bytes.
1674 DCHECK_GE(diff, int32_min + 4);
1675 diff -= 4;
1676 // Add additional adjustment for instructions preceding the PC usage, padding
1677 // before the literal pool and rounding down the PC for literal loads.
1678 switch (GetSize()) {
1679 case kBranch16Bit:
1680 case kBranch32Bit:
1681 break;
1682
1683 case kCbxz16Bit:
1684 break;
1685 case kCbxz32Bit:
1686 case kCbxz48Bit:
1687 DCHECK_GE(diff, int32_min + 2);
1688 diff -= 2; // Extra CMP Rn, #0, 16-bit.
1689 break;
1690
1691 case kLiteral1KiB:
1692 case kLiteral4KiB:
1693 case kLongOrFPLiteral1KiB:
1694 DCHECK(diff >= 0 || (GetSize() == kLiteral1KiB && diff == -2));
1695 diff += LiteralPoolPaddingSize(current_code_size);
1696 // Load literal instructions round down the PC+4 to a multiple of 4, so if the PC
1697 // isn't a multiple of 2, we need to adjust. Since we already adjusted for the target
1698 // being aligned, current PC alignment can be inferred from diff.
1699 DCHECK_EQ(diff & 1, 0);
1700 diff = diff + (diff & 2);
1701 DCHECK_GE(diff, 0);
1702 break;
1703 case kLiteral1MiB:
1704 case kLiteral64KiB:
1705 case kLongOrFPLiteral256KiB:
1706 DCHECK_GE(diff, 4); // The target must be at least 4 bytes after the ADD rX, PC.
1707 diff -= 4; // One extra 32-bit MOV.
1708 diff += LiteralPoolPaddingSize(current_code_size);
1709 break;
1710 case kLiteralFar:
1711 case kLongOrFPLiteralFar:
1712 DCHECK_GE(diff, 8); // The target must be at least 4 bytes after the ADD rX, PC.
1713 diff -= 8; // Extra MOVW+MOVT; both 32-bit.
1714 diff += LiteralPoolPaddingSize(current_code_size);
1715 break;
1716 }
1717 return diff;
1718}
1719
1720inline size_t Thumb2Assembler::Fixup::IncreaseSize(Size new_size) {
1721 DCHECK_NE(target_, kUnresolved);
1722 Size old_size = size_;
1723 size_ = new_size;
1724 DCHECK_GT(SizeInBytes(new_size), SizeInBytes(old_size));
1725 size_t adjustment = SizeInBytes(new_size) - SizeInBytes(old_size);
1726 if (target_ > location_) {
1727 adjustment_ += adjustment;
1728 }
1729 return adjustment;
1730}
1731
1732uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
1733 uint32_t old_code_size = current_code_size;
1734 switch (GetSize()) {
1735 case kBranch16Bit:
1736 if (IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size))) {
1737 break;
Vladimir Markof38caa62015-05-29 15:50:18 +01001738 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001739 current_code_size += IncreaseSize(kBranch32Bit);
1740 FALLTHROUGH_INTENDED;
1741 case kBranch32Bit:
1742 // We don't support conditional branches beyond +-1MiB
1743 // or unconditional branches beyond +-16MiB.
1744 break;
1745
1746 case kCbxz16Bit:
1747 if (IsUint<7>(GetOffset(current_code_size))) {
1748 break;
1749 }
1750 current_code_size += IncreaseSize(kCbxz32Bit);
1751 FALLTHROUGH_INTENDED;
1752 case kCbxz32Bit:
1753 if (IsInt<9>(GetOffset(current_code_size))) {
1754 break;
1755 }
1756 current_code_size += IncreaseSize(kCbxz48Bit);
1757 FALLTHROUGH_INTENDED;
1758 case kCbxz48Bit:
1759 // We don't support conditional branches beyond +-1MiB.
1760 break;
1761
1762 case kLiteral1KiB:
1763 DCHECK(!IsHighRegister(rn_));
1764 if (IsUint<10>(GetOffset(current_code_size))) {
1765 break;
1766 }
1767 current_code_size += IncreaseSize(kLiteral4KiB);
1768 FALLTHROUGH_INTENDED;
1769 case kLiteral4KiB:
1770 if (IsUint<12>(GetOffset(current_code_size))) {
1771 break;
1772 }
1773 current_code_size += IncreaseSize(kLiteral64KiB);
1774 FALLTHROUGH_INTENDED;
1775 case kLiteral64KiB:
1776 // Can't handle high register which we can encounter by fall-through from kLiteral4KiB.
1777 if (!IsHighRegister(rn_) && IsUint<16>(GetOffset(current_code_size))) {
1778 break;
1779 }
1780 current_code_size += IncreaseSize(kLiteral1MiB);
1781 FALLTHROUGH_INTENDED;
1782 case kLiteral1MiB:
1783 if (IsUint<20>(GetOffset(current_code_size))) {
1784 break;
1785 }
1786 current_code_size += IncreaseSize(kLiteralFar);
1787 FALLTHROUGH_INTENDED;
1788 case kLiteralFar:
1789 // This encoding can reach any target.
1790 break;
1791
1792 case kLongOrFPLiteral1KiB:
1793 if (IsUint<10>(GetOffset(current_code_size))) {
1794 break;
1795 }
1796 current_code_size += IncreaseSize(kLongOrFPLiteral256KiB);
1797 FALLTHROUGH_INTENDED;
1798 case kLongOrFPLiteral256KiB:
1799 if (IsUint<18>(GetOffset(current_code_size))) {
1800 break;
1801 }
1802 current_code_size += IncreaseSize(kLongOrFPLiteralFar);
1803 FALLTHROUGH_INTENDED;
1804 case kLongOrFPLiteralFar:
1805 // This encoding can reach any target.
1806 break;
1807 }
1808 return current_code_size - old_code_size;
1809}
1810
1811void Thumb2Assembler::Fixup::Emit(AssemblerBuffer* buffer, uint32_t code_size) const {
1812 switch (GetSize()) {
1813 case kBranch16Bit: {
1814 DCHECK(type_ == kUnconditional || type_ == kConditional);
1815 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1816 int16_t encoding = BEncoding16(GetOffset(code_size), cond_);
Vladimir Markof38caa62015-05-29 15:50:18 +01001817 buffer->Store<int16_t>(location_, encoding);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001818 break;
1819 }
1820 case kBranch32Bit: {
1821 DCHECK(type_ == kConditional || type_ == kUnconditional ||
1822 type_ == kUnconditionalLink || type_ == kUnconditionalLinkX);
1823 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1824 int32_t encoding = BEncoding32(GetOffset(code_size), cond_);
1825 if (type_ == kUnconditionalLink) {
1826 DCHECK_NE(encoding & B12, 0);
1827 encoding |= B14;
1828 } else if (type_ == kUnconditionalLinkX) {
1829 DCHECK_NE(encoding & B12, 0);
1830 encoding ^= B14 | B12;
1831 }
1832 buffer->Store<int16_t>(location_, encoding >> 16);
1833 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
1834 break;
1835 }
1836
1837 case kCbxz16Bit: {
1838 DCHECK(type_ == kCompareAndBranchXZero);
1839 int16_t encoding = CbxzEncoding16(rn_, GetOffset(code_size), cond_);
1840 buffer->Store<int16_t>(location_, encoding);
1841 break;
1842 }
1843 case kCbxz32Bit: {
1844 DCHECK(type_ == kCompareAndBranchXZero);
1845 DCHECK(cond_ == EQ || cond_ == NE);
1846 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1847 int16_t b_encoding = BEncoding16(GetOffset(code_size), cond_);
1848 buffer->Store<int16_t>(location_, cmp_encoding);
1849 buffer->Store<int16_t>(location_ + 2, b_encoding);
1850 break;
1851 }
1852 case kCbxz48Bit: {
1853 DCHECK(type_ == kCompareAndBranchXZero);
1854 DCHECK(cond_ == EQ || cond_ == NE);
1855 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1856 int32_t b_encoding = BEncoding32(GetOffset(code_size), cond_);
1857 buffer->Store<int16_t>(location_, cmp_encoding);
1858 buffer->Store<int16_t>(location_ + 2u, b_encoding >> 16);
1859 buffer->Store<int16_t>(location_ + 4u, static_cast<int16_t>(b_encoding & 0xffff));
1860 break;
1861 }
1862
1863 case kLiteral1KiB: {
1864 DCHECK(type_ == kLoadLiteralNarrow);
1865 int16_t encoding = LdrLitEncoding16(rn_, GetOffset(code_size));
1866 buffer->Store<int16_t>(location_, encoding);
1867 break;
1868 }
1869 case kLiteral4KiB: {
1870 DCHECK(type_ == kLoadLiteralNarrow);
1871 // GetOffset() uses PC+4 but load literal uses AlignDown(PC+4, 4). Adjust offset accordingly.
1872 int32_t encoding = LdrLitEncoding32(rn_, GetOffset(code_size));
1873 buffer->Store<int16_t>(location_, encoding >> 16);
1874 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
1875 break;
1876 }
1877 case kLiteral64KiB: {
1878 DCHECK(type_ == kLoadLiteralNarrow);
1879 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
1880 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
1881 int16_t ldr_encoding = LdrRtRnImm5Encoding16(rn_, rn_, 0);
1882 buffer->Store<int16_t>(location_, mov_encoding >> 16);
1883 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
1884 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
1885 buffer->Store<int16_t>(location_ + 6u, ldr_encoding);
1886 break;
1887 }
1888 case kLiteral1MiB: {
1889 DCHECK(type_ == kLoadLiteralNarrow);
1890 int32_t offset = GetOffset(code_size);
1891 int32_t mov_encoding = MovModImmEncoding32(rn_, offset & ~0xfff);
1892 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
1893 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, offset & 0xfff);
1894 buffer->Store<int16_t>(location_, mov_encoding >> 16);
1895 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
1896 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
1897 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
1898 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
1899 break;
1900 }
1901 case kLiteralFar: {
1902 DCHECK(type_ == kLoadLiteralNarrow);
1903 int32_t offset = GetOffset(code_size);
1904 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
1905 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
1906 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
1907 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, 0);
1908 buffer->Store<int16_t>(location_, movw_encoding >> 16);
1909 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
1910 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
1911 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
1912 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
1913 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
1914 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
1915 break;
1916 }
1917
1918 case kLongOrFPLiteral1KiB: {
1919 int32_t encoding = LoadWideOrFpEncoding(PC, GetOffset(code_size)); // DCHECKs type_.
1920 buffer->Store<int16_t>(location_, encoding >> 16);
1921 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
1922 break;
1923 }
1924 case kLongOrFPLiteral256KiB: {
1925 int32_t offset = GetOffset(code_size);
1926 int32_t mov_encoding = MovModImmEncoding32(IP, offset & ~0x3ff);
1927 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
1928 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, offset & 0x3ff); // DCHECKs type_.
1929 buffer->Store<int16_t>(location_, mov_encoding >> 16);
1930 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
1931 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
1932 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
1933 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
1934 break;
1935 }
1936 case kLongOrFPLiteralFar: {
1937 int32_t offset = GetOffset(code_size);
1938 int32_t movw_encoding = MovwEncoding32(IP, offset & 0xffff);
1939 int32_t movt_encoding = MovtEncoding32(IP, offset & ~0xffff);
1940 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
1941 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0); // DCHECKs type_.
1942 buffer->Store<int16_t>(location_, movw_encoding >> 16);
1943 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
1944 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
1945 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
1946 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
1947 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
1948 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
1949 break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001950 }
1951 }
1952}
1953
Dave Allison65fcc2c2014-04-28 13:45:27 -07001954uint16_t Thumb2Assembler::EmitCompareAndBranch(Register rn, uint16_t prev, bool n) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001955 CHECK(IsLowRegister(rn));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001956 uint32_t location = buffer_.Size();
1957
1958 // This is always unresolved as it must be a forward branch.
1959 Emit16(prev); // Previous link.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001960 return AddFixup(Fixup::CompareAndBranch(location, rn, n ? NE : EQ));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001961}
1962
1963
1964// NOTE: this only support immediate offsets, not [rx,ry].
1965// TODO: support [rx,ry] instructions.
1966void Thumb2Assembler::EmitLoadStore(Condition cond,
1967 bool load,
1968 bool byte,
1969 bool half,
1970 bool is_signed,
1971 Register rd,
1972 const Address& ad) {
1973 CHECK_NE(rd, kNoRegister);
1974 CheckCondition(cond);
1975 bool must_be_32bit = force_32bit_;
1976 if (IsHighRegister(rd)) {
1977 must_be_32bit = true;
1978 }
1979
1980 Register rn = ad.GetRegister();
Dave Allison45fdb932014-06-25 12:37:10 -07001981 if (IsHighRegister(rn) && rn != SP && rn != PC) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001982 must_be_32bit = true;
1983 }
1984
1985 if (is_signed || ad.GetOffset() < 0 || ad.GetMode() != Address::Offset) {
1986 must_be_32bit = true;
1987 }
1988
Dave Allison45fdb932014-06-25 12:37:10 -07001989 if (ad.IsImmediate()) {
1990 // Immediate offset
1991 int32_t offset = ad.GetOffset();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001992
Dave Allison45fdb932014-06-25 12:37:10 -07001993 // The 16 bit SP relative instruction can only have a 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07001994 if (rn == SP && offset >= (1 << 10)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001995 must_be_32bit = true;
1996 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001997
1998 if (byte) {
Dave Allison45fdb932014-06-25 12:37:10 -07001999 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002000 if (offset >= (1 << 5)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002001 must_be_32bit = true;
2002 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002003 } else if (half) {
Dave Allison45fdb932014-06-25 12:37:10 -07002004 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002005 if (offset >= (1 << 6)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002006 must_be_32bit = true;
2007 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002008 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002009 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002010 if (offset >= (1 << 7)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002011 must_be_32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002012 }
2013 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002014
Dave Allison45fdb932014-06-25 12:37:10 -07002015 if (must_be_32bit) {
2016 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2017 (load ? B20 : 0) |
2018 (is_signed ? B24 : 0) |
2019 static_cast<uint32_t>(rd) << 12 |
2020 ad.encodingThumb(true) |
2021 (byte ? 0 : half ? B21 : B22);
2022 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002023 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002024 // 16 bit thumb1.
2025 uint8_t opA = 0;
2026 bool sp_relative = false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002027
2028 if (byte) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002029 opA = 7U /* 0b0111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002030 } else if (half) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002031 opA = 8U /* 0b1000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002032 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002033 if (rn == SP) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002034 opA = 9U /* 0b1001 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002035 sp_relative = true;
2036 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002037 opA = 6U /* 0b0110 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002038 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002039 }
Dave Allison45fdb932014-06-25 12:37:10 -07002040 int16_t encoding = opA << 12 |
2041 (load ? B11 : 0);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002042
Dave Allison45fdb932014-06-25 12:37:10 -07002043 CHECK_GE(offset, 0);
2044 if (sp_relative) {
2045 // SP relative, 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002046 CHECK_LT(offset, (1 << 10));
Andreas Gampec8ccf682014-09-29 20:07:43 -07002047 CHECK_EQ((offset & 3 /* 0b11 */), 0);
Dave Allison45fdb932014-06-25 12:37:10 -07002048 encoding |= rd << 8 | offset >> 2;
2049 } else {
2050 // No SP relative. The offset is shifted right depending on
2051 // the size of the load/store.
2052 encoding |= static_cast<uint32_t>(rd);
2053
2054 if (byte) {
2055 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002056 CHECK_LT(offset, (1 << 5));
Dave Allison45fdb932014-06-25 12:37:10 -07002057 } else if (half) {
2058 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002059 CHECK_LT(offset, (1 << 6));
Andreas Gampec8ccf682014-09-29 20:07:43 -07002060 CHECK_EQ((offset & 1 /* 0b1 */), 0);
Dave Allison45fdb932014-06-25 12:37:10 -07002061 offset >>= 1;
2062 } else {
2063 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002064 CHECK_LT(offset, (1 << 7));
Andreas Gampec8ccf682014-09-29 20:07:43 -07002065 CHECK_EQ((offset & 3 /* 0b11 */), 0);
Dave Allison45fdb932014-06-25 12:37:10 -07002066 offset >>= 2;
2067 }
2068 encoding |= rn << 3 | offset << 6;
2069 }
2070
2071 Emit16(encoding);
2072 }
2073 } else {
2074 // Register shift.
2075 if (ad.GetRegister() == PC) {
2076 // PC relative literal encoding.
2077 int32_t offset = ad.GetOffset();
Dave Allison0bb9ade2014-06-26 17:57:36 -07002078 if (must_be_32bit || offset < 0 || offset >= (1 << 10) || !load) {
Dave Allison45fdb932014-06-25 12:37:10 -07002079 int32_t up = B23;
2080 if (offset < 0) {
2081 offset = -offset;
2082 up = 0;
2083 }
2084 CHECK_LT(offset, (1 << 12));
2085 int32_t encoding = 0x1f << 27 | 0xf << 16 | B22 | (load ? B20 : 0) |
2086 offset | up |
2087 static_cast<uint32_t>(rd) << 12;
2088 Emit32(encoding);
2089 } else {
2090 // 16 bit literal load.
2091 CHECK_GE(offset, 0);
2092 CHECK_LT(offset, (1 << 10));
2093 int32_t encoding = B14 | (load ? B11 : 0) | static_cast<uint32_t>(rd) << 8 | offset >> 2;
2094 Emit16(encoding);
2095 }
2096 } else {
2097 if (ad.GetShiftCount() != 0) {
2098 // If there is a shift count this must be 32 bit.
2099 must_be_32bit = true;
2100 } else if (IsHighRegister(ad.GetRegisterOffset())) {
2101 must_be_32bit = true;
2102 }
2103
2104 if (must_be_32bit) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002105 int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 |
Dave Allison45fdb932014-06-25 12:37:10 -07002106 ad.encodingThumb(true);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002107 if (half) {
2108 encoding |= B21;
2109 } else if (!byte) {
2110 encoding |= B22;
2111 }
Dave Allison45fdb932014-06-25 12:37:10 -07002112 Emit32(encoding);
2113 } else {
2114 // 16 bit register offset.
2115 int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) |
2116 ad.encodingThumb(false);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002117 if (byte) {
2118 encoding |= B10;
2119 } else if (half) {
2120 encoding |= B9;
2121 }
Dave Allison45fdb932014-06-25 12:37:10 -07002122 Emit16(encoding);
2123 }
2124 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002125 }
2126}
2127
2128
2129void Thumb2Assembler::EmitMultiMemOp(Condition cond,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002130 BlockAddressMode bam,
Dave Allison65fcc2c2014-04-28 13:45:27 -07002131 bool load,
2132 Register base,
2133 RegList regs) {
2134 CHECK_NE(base, kNoRegister);
2135 CheckCondition(cond);
2136 bool must_be_32bit = force_32bit_;
2137
Vladimir Markoe8469c12014-11-26 18:09:30 +00002138 if (!must_be_32bit && base == SP && bam == (load ? IA_W : DB_W) &&
2139 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) {
2140 // Use 16-bit PUSH/POP.
2141 int16_t encoding = B15 | B13 | B12 | (load ? B11 : 0) | B10 |
2142 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff);
2143 Emit16(encoding);
2144 return;
2145 }
2146
Dave Allison65fcc2c2014-04-28 13:45:27 -07002147 if ((regs & 0xff00) != 0) {
2148 must_be_32bit = true;
2149 }
2150
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002151 bool w_bit = bam == IA_W || bam == DB_W || bam == DA_W || bam == IB_W;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002152 // 16 bit always uses writeback.
2153 if (!w_bit) {
2154 must_be_32bit = true;
2155 }
2156
2157 if (must_be_32bit) {
2158 uint32_t op = 0;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002159 switch (bam) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002160 case IA:
2161 case IA_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002162 op = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002163 break;
2164 case DB:
2165 case DB_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002166 op = 2U /* 0b10 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002167 break;
2168 case DA:
2169 case IB:
2170 case DA_W:
2171 case IB_W:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002172 LOG(FATAL) << "LDM/STM mode not supported on thumb: " << bam;
Vladimir Markoe8469c12014-11-26 18:09:30 +00002173 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002174 }
2175 if (load) {
2176 // Cannot have SP in the list.
2177 CHECK_EQ((regs & (1 << SP)), 0);
2178 } else {
2179 // Cannot have PC or SP in the list.
2180 CHECK_EQ((regs & (1 << PC | 1 << SP)), 0);
2181 }
2182 int32_t encoding = B31 | B30 | B29 | B27 |
2183 (op << 23) |
2184 (load ? B20 : 0) |
2185 base << 16 |
2186 regs |
2187 (w_bit << 21);
2188 Emit32(encoding);
2189 } else {
2190 int16_t encoding = B15 | B14 |
2191 (load ? B11 : 0) |
2192 base << 8 |
2193 regs;
2194 Emit16(encoding);
2195 }
2196}
2197
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002198void Thumb2Assembler::EmitBranch(Condition cond, Label* label, bool link, bool x) {
2199 bool use32bit = IsForced32Bit() || !CanRelocateBranches();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002200 uint32_t pc = buffer_.Size();
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002201 Fixup::Type branch_type;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002202 if (cond == AL) {
2203 if (link) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002204 use32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002205 if (x) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002206 branch_type = Fixup::kUnconditionalLinkX; // BLX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002207 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002208 branch_type = Fixup::kUnconditionalLink; // BX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002209 }
2210 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002211 branch_type = Fixup::kUnconditional; // B.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002212 }
2213 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002214 branch_type = Fixup::kConditional; // B<cond>.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002215 }
2216
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002217 Fixup::Size size = use32bit ? Fixup::kBranch32Bit : Fixup::kBranch16Bit;
2218 FixupId branch_id = AddFixup(Fixup::Branch(pc, branch_type, size, cond));
2219
Dave Allison65fcc2c2014-04-28 13:45:27 -07002220 if (label->IsBound()) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002221 // The branch is to a bound label which means that it's a backwards branch.
2222 // Record this branch as a dependency of all Fixups between the label and the branch.
2223 GetFixup(branch_id)->Resolve(label->Position());
2224 for (FixupId fixup_id = branch_id; fixup_id != 0u; ) {
2225 --fixup_id;
2226 Fixup* fixup = GetFixup(fixup_id);
2227 DCHECK_GE(label->Position(), 0);
2228 if (fixup->GetLocation() < static_cast<uint32_t>(label->Position())) {
2229 break;
2230 }
2231 fixup->AddDependent(branch_id);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002232 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002233 Emit16(0);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002234 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002235 // Branch target is an unbound label. Add it to a singly-linked list maintained within
2236 // the code with the label serving as the head.
2237 Emit16(static_cast<uint16_t>(label->position_));
2238 label->LinkTo(branch_id);
Vladimir Markof38caa62015-05-29 15:50:18 +01002239 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002240
2241 if (use32bit) {
2242 Emit16(0);
2243 }
2244 DCHECK_EQ(buffer_.Size() - pc, GetFixup(branch_id)->GetSizeInBytes());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002245}
2246
2247
2248void Thumb2Assembler::clz(Register rd, Register rm, Condition cond) {
2249 CHECK_NE(rd, kNoRegister);
2250 CHECK_NE(rm, kNoRegister);
2251 CheckCondition(cond);
2252 CHECK_NE(rd, PC);
2253 CHECK_NE(rm, PC);
2254 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2255 B25 | B23 | B21 | B20 |
2256 static_cast<uint32_t>(rm) << 16 |
2257 0xf << 12 |
2258 static_cast<uint32_t>(rd) << 8 |
2259 B7 |
2260 static_cast<uint32_t>(rm);
2261 Emit32(encoding);
2262}
2263
2264
2265void Thumb2Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
2266 CheckCondition(cond);
2267 bool must_be_32bit = force_32bit_;
2268 if (IsHighRegister(rd)|| imm16 >= 256u) {
2269 must_be_32bit = true;
2270 }
2271
2272 if (must_be_32bit) {
2273 // Use encoding T3.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002274 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2275 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2276 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002277 uint32_t imm8 = imm16 & 0xff;
2278 int32_t encoding = B31 | B30 | B29 | B28 |
2279 B25 | B22 |
2280 static_cast<uint32_t>(rd) << 8 |
2281 i << 26 |
2282 imm4 << 16 |
2283 imm3 << 12 |
2284 imm8;
2285 Emit32(encoding);
2286 } else {
2287 int16_t encoding = B13 | static_cast<uint16_t>(rd) << 8 |
2288 imm16;
2289 Emit16(encoding);
2290 }
2291}
2292
2293
2294void Thumb2Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
2295 CheckCondition(cond);
2296 // Always 32 bits.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002297 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2298 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2299 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002300 uint32_t imm8 = imm16 & 0xff;
2301 int32_t encoding = B31 | B30 | B29 | B28 |
2302 B25 | B23 | B22 |
2303 static_cast<uint32_t>(rd) << 8 |
2304 i << 26 |
2305 imm4 << 16 |
2306 imm3 << 12 |
2307 imm8;
2308 Emit32(encoding);
2309}
2310
2311
2312void Thumb2Assembler::ldrex(Register rt, Register rn, uint16_t imm, Condition cond) {
2313 CHECK_NE(rn, kNoRegister);
2314 CHECK_NE(rt, kNoRegister);
2315 CheckCondition(cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002316 CHECK_LT(imm, (1u << 10));
2317
2318 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
2319 static_cast<uint32_t>(rn) << 16 |
2320 static_cast<uint32_t>(rt) << 12 |
2321 0xf << 8 |
2322 imm >> 2;
2323 Emit32(encoding);
2324}
2325
2326
2327void Thumb2Assembler::ldrex(Register rt, Register rn, Condition cond) {
2328 ldrex(rt, rn, 0, cond);
2329}
2330
2331
2332void Thumb2Assembler::strex(Register rd,
2333 Register rt,
2334 Register rn,
2335 uint16_t imm,
2336 Condition cond) {
2337 CHECK_NE(rn, kNoRegister);
2338 CHECK_NE(rd, kNoRegister);
2339 CHECK_NE(rt, kNoRegister);
2340 CheckCondition(cond);
2341 CHECK_LT(imm, (1u << 10));
2342
2343 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
2344 static_cast<uint32_t>(rn) << 16 |
2345 static_cast<uint32_t>(rt) << 12 |
2346 static_cast<uint32_t>(rd) << 8 |
2347 imm >> 2;
2348 Emit32(encoding);
2349}
2350
2351
Calin Juravle52c48962014-12-16 17:02:57 +00002352void Thumb2Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
2353 CHECK_NE(rn, kNoRegister);
2354 CHECK_NE(rt, kNoRegister);
2355 CHECK_NE(rt2, kNoRegister);
2356 CHECK_NE(rt, rt2);
2357 CheckCondition(cond);
2358
2359 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 | B20 |
2360 static_cast<uint32_t>(rn) << 16 |
2361 static_cast<uint32_t>(rt) << 12 |
2362 static_cast<uint32_t>(rt2) << 8 |
2363 B6 | B5 | B4 | B3 | B2 | B1 | B0;
2364 Emit32(encoding);
2365}
2366
2367
Dave Allison65fcc2c2014-04-28 13:45:27 -07002368void Thumb2Assembler::strex(Register rd,
2369 Register rt,
2370 Register rn,
2371 Condition cond) {
2372 strex(rd, rt, rn, 0, cond);
2373}
2374
2375
Calin Juravle52c48962014-12-16 17:02:57 +00002376void Thumb2Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
2377 CHECK_NE(rd, kNoRegister);
2378 CHECK_NE(rn, kNoRegister);
2379 CHECK_NE(rt, kNoRegister);
2380 CHECK_NE(rt2, kNoRegister);
2381 CHECK_NE(rt, rt2);
2382 CHECK_NE(rd, rt);
2383 CHECK_NE(rd, rt2);
2384 CheckCondition(cond);
2385
2386 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 |
2387 static_cast<uint32_t>(rn) << 16 |
2388 static_cast<uint32_t>(rt) << 12 |
2389 static_cast<uint32_t>(rt2) << 8 |
2390 B6 | B5 | B4 |
2391 static_cast<uint32_t>(rd);
2392 Emit32(encoding);
2393}
2394
2395
Dave Allison65fcc2c2014-04-28 13:45:27 -07002396void Thumb2Assembler::clrex(Condition cond) {
2397 CheckCondition(cond);
2398 int32_t encoding = B31 | B30 | B29 | B27 | B28 | B25 | B24 | B23 |
2399 B21 | B20 |
2400 0xf << 16 |
2401 B15 |
2402 0xf << 8 |
2403 B5 |
2404 0xf;
2405 Emit32(encoding);
2406}
2407
2408
2409void Thumb2Assembler::nop(Condition cond) {
2410 CheckCondition(cond);
Andreas Gampec8ccf682014-09-29 20:07:43 -07002411 uint16_t encoding = B15 | B13 | B12 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002412 B11 | B10 | B9 | B8;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002413 Emit16(static_cast<int16_t>(encoding));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002414}
2415
2416
2417void Thumb2Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
2418 CHECK_NE(sn, kNoSRegister);
2419 CHECK_NE(rt, kNoRegister);
2420 CHECK_NE(rt, SP);
2421 CHECK_NE(rt, PC);
2422 CheckCondition(cond);
2423 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2424 B27 | B26 | B25 |
2425 ((static_cast<int32_t>(sn) >> 1)*B16) |
2426 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2427 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2428 Emit32(encoding);
2429}
2430
2431
2432void Thumb2Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
2433 CHECK_NE(sn, kNoSRegister);
2434 CHECK_NE(rt, kNoRegister);
2435 CHECK_NE(rt, SP);
2436 CHECK_NE(rt, PC);
2437 CheckCondition(cond);
2438 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2439 B27 | B26 | B25 | B20 |
2440 ((static_cast<int32_t>(sn) >> 1)*B16) |
2441 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2442 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2443 Emit32(encoding);
2444}
2445
2446
2447void Thumb2Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
2448 Condition cond) {
2449 CHECK_NE(sm, kNoSRegister);
2450 CHECK_NE(sm, S31);
2451 CHECK_NE(rt, kNoRegister);
2452 CHECK_NE(rt, SP);
2453 CHECK_NE(rt, PC);
2454 CHECK_NE(rt2, kNoRegister);
2455 CHECK_NE(rt2, SP);
2456 CHECK_NE(rt2, PC);
2457 CheckCondition(cond);
2458 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2459 B27 | B26 | B22 |
2460 (static_cast<int32_t>(rt2)*B16) |
2461 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2462 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2463 (static_cast<int32_t>(sm) >> 1);
2464 Emit32(encoding);
2465}
2466
2467
2468void Thumb2Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
2469 Condition cond) {
2470 CHECK_NE(sm, kNoSRegister);
2471 CHECK_NE(sm, S31);
2472 CHECK_NE(rt, kNoRegister);
2473 CHECK_NE(rt, SP);
2474 CHECK_NE(rt, PC);
2475 CHECK_NE(rt2, kNoRegister);
2476 CHECK_NE(rt2, SP);
2477 CHECK_NE(rt2, PC);
2478 CHECK_NE(rt, rt2);
2479 CheckCondition(cond);
2480 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2481 B27 | B26 | B22 | B20 |
2482 (static_cast<int32_t>(rt2)*B16) |
2483 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2484 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2485 (static_cast<int32_t>(sm) >> 1);
2486 Emit32(encoding);
2487}
2488
2489
2490void Thumb2Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
2491 Condition cond) {
2492 CHECK_NE(dm, kNoDRegister);
2493 CHECK_NE(rt, kNoRegister);
2494 CHECK_NE(rt, SP);
2495 CHECK_NE(rt, PC);
2496 CHECK_NE(rt2, kNoRegister);
2497 CHECK_NE(rt2, SP);
2498 CHECK_NE(rt2, PC);
2499 CheckCondition(cond);
2500 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2501 B27 | B26 | B22 |
2502 (static_cast<int32_t>(rt2)*B16) |
2503 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2504 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2505 (static_cast<int32_t>(dm) & 0xf);
2506 Emit32(encoding);
2507}
2508
2509
2510void Thumb2Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
2511 Condition cond) {
2512 CHECK_NE(dm, kNoDRegister);
2513 CHECK_NE(rt, kNoRegister);
2514 CHECK_NE(rt, SP);
2515 CHECK_NE(rt, PC);
2516 CHECK_NE(rt2, kNoRegister);
2517 CHECK_NE(rt2, SP);
2518 CHECK_NE(rt2, PC);
2519 CHECK_NE(rt, rt2);
2520 CheckCondition(cond);
2521 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2522 B27 | B26 | B22 | B20 |
2523 (static_cast<int32_t>(rt2)*B16) |
2524 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2525 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2526 (static_cast<int32_t>(dm) & 0xf);
2527 Emit32(encoding);
2528}
2529
2530
2531void Thumb2Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
2532 const Address& addr = static_cast<const Address&>(ad);
2533 CHECK_NE(sd, kNoSRegister);
2534 CheckCondition(cond);
2535 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2536 B27 | B26 | B24 | B20 |
2537 ((static_cast<int32_t>(sd) & 1)*B22) |
2538 ((static_cast<int32_t>(sd) >> 1)*B12) |
2539 B11 | B9 | addr.vencoding();
2540 Emit32(encoding);
2541}
2542
2543
2544void Thumb2Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
2545 const Address& addr = static_cast<const Address&>(ad);
2546 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2547 CHECK_NE(sd, kNoSRegister);
2548 CheckCondition(cond);
2549 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2550 B27 | B26 | B24 |
2551 ((static_cast<int32_t>(sd) & 1)*B22) |
2552 ((static_cast<int32_t>(sd) >> 1)*B12) |
2553 B11 | B9 | addr.vencoding();
2554 Emit32(encoding);
2555}
2556
2557
2558void Thumb2Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
2559 const Address& addr = static_cast<const Address&>(ad);
2560 CHECK_NE(dd, kNoDRegister);
2561 CheckCondition(cond);
2562 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2563 B27 | B26 | B24 | B20 |
2564 ((static_cast<int32_t>(dd) >> 4)*B22) |
2565 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2566 B11 | B9 | B8 | addr.vencoding();
2567 Emit32(encoding);
2568}
2569
2570
2571void Thumb2Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
2572 const Address& addr = static_cast<const Address&>(ad);
2573 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2574 CHECK_NE(dd, kNoDRegister);
2575 CheckCondition(cond);
2576 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2577 B27 | B26 | B24 |
2578 ((static_cast<int32_t>(dd) >> 4)*B22) |
2579 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2580 B11 | B9 | B8 | addr.vencoding();
2581 Emit32(encoding);
2582}
2583
2584
2585void Thumb2Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
2586 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
2587}
2588
2589
2590void Thumb2Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
2591 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
2592}
2593
2594
2595void Thumb2Assembler::vpops(SRegister reg, int nregs, Condition cond) {
2596 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
2597}
2598
2599
2600void Thumb2Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
2601 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
2602}
2603
2604
2605void Thumb2Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
2606 CheckCondition(cond);
2607
2608 uint32_t D;
2609 uint32_t Vd;
2610 if (dbl) {
2611 // Encoded as D:Vd.
2612 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002613 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002614 } else {
2615 // Encoded as Vd:D.
2616 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002617 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002618 }
2619 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
2620 B11 | B9 |
2621 (dbl ? B8 : 0) |
2622 (push ? B24 : (B23 | B20)) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07002623 14U /* 0b1110 */ << 28 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002624 nregs << (dbl ? 1 : 0) |
2625 D << 22 |
2626 Vd << 12;
2627 Emit32(encoding);
2628}
2629
2630
2631void Thumb2Assembler::EmitVFPsss(Condition cond, int32_t opcode,
2632 SRegister sd, SRegister sn, SRegister sm) {
2633 CHECK_NE(sd, kNoSRegister);
2634 CHECK_NE(sn, kNoSRegister);
2635 CHECK_NE(sm, kNoSRegister);
2636 CheckCondition(cond);
2637 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2638 B27 | B26 | B25 | B11 | B9 | opcode |
2639 ((static_cast<int32_t>(sd) & 1)*B22) |
2640 ((static_cast<int32_t>(sn) >> 1)*B16) |
2641 ((static_cast<int32_t>(sd) >> 1)*B12) |
2642 ((static_cast<int32_t>(sn) & 1)*B7) |
2643 ((static_cast<int32_t>(sm) & 1)*B5) |
2644 (static_cast<int32_t>(sm) >> 1);
2645 Emit32(encoding);
2646}
2647
2648
2649void Thumb2Assembler::EmitVFPddd(Condition cond, int32_t opcode,
2650 DRegister dd, DRegister dn, DRegister dm) {
2651 CHECK_NE(dd, kNoDRegister);
2652 CHECK_NE(dn, kNoDRegister);
2653 CHECK_NE(dm, kNoDRegister);
2654 CheckCondition(cond);
2655 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2656 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
2657 ((static_cast<int32_t>(dd) >> 4)*B22) |
2658 ((static_cast<int32_t>(dn) & 0xf)*B16) |
2659 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2660 ((static_cast<int32_t>(dn) >> 4)*B7) |
2661 ((static_cast<int32_t>(dm) >> 4)*B5) |
2662 (static_cast<int32_t>(dm) & 0xf);
2663 Emit32(encoding);
2664}
2665
2666
2667void Thumb2Assembler::EmitVFPsd(Condition cond, int32_t opcode,
2668 SRegister sd, DRegister dm) {
2669 CHECK_NE(sd, kNoSRegister);
2670 CHECK_NE(dm, kNoDRegister);
2671 CheckCondition(cond);
2672 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2673 B27 | B26 | B25 | B11 | B9 | opcode |
2674 ((static_cast<int32_t>(sd) & 1)*B22) |
2675 ((static_cast<int32_t>(sd) >> 1)*B12) |
2676 ((static_cast<int32_t>(dm) >> 4)*B5) |
2677 (static_cast<int32_t>(dm) & 0xf);
2678 Emit32(encoding);
2679}
2680
2681
2682void Thumb2Assembler::EmitVFPds(Condition cond, int32_t opcode,
2683 DRegister dd, SRegister sm) {
2684 CHECK_NE(dd, kNoDRegister);
2685 CHECK_NE(sm, kNoSRegister);
2686 CheckCondition(cond);
2687 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2688 B27 | B26 | B25 | B11 | B9 | opcode |
2689 ((static_cast<int32_t>(dd) >> 4)*B22) |
2690 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2691 ((static_cast<int32_t>(sm) & 1)*B5) |
2692 (static_cast<int32_t>(sm) >> 1);
2693 Emit32(encoding);
2694}
2695
2696
2697void Thumb2Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR.
Calin Juravleddb7df22014-11-25 20:56:51 +00002698 CHECK_NE(cond, kNoCondition);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002699 CheckCondition(cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00002700 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2701 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
2702 (static_cast<int32_t>(PC)*B12) |
2703 B11 | B9 | B4;
2704 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002705}
2706
2707
2708void Thumb2Assembler::svc(uint32_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002709 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002710 int16_t encoding = B15 | B14 | B12 |
2711 B11 | B10 | B9 | B8 |
2712 imm8;
2713 Emit16(encoding);
2714}
2715
2716
2717void Thumb2Assembler::bkpt(uint16_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002718 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002719 int16_t encoding = B15 | B13 | B12 |
2720 B11 | B10 | B9 |
2721 imm8;
2722 Emit16(encoding);
2723}
2724
2725// Convert the given IT state to a mask bit given bit 0 of the first
2726// condition and a shift position.
2727static uint8_t ToItMask(ItState s, uint8_t firstcond0, uint8_t shift) {
2728 switch (s) {
2729 case kItOmitted: return 1 << shift;
2730 case kItThen: return firstcond0 << shift;
2731 case kItElse: return !firstcond0 << shift;
2732 }
2733 return 0;
2734}
2735
2736
2737// Set the IT condition in the given position for the given state. This is used
2738// to check that conditional instructions match the preceding IT statement.
2739void Thumb2Assembler::SetItCondition(ItState s, Condition cond, uint8_t index) {
2740 switch (s) {
2741 case kItOmitted: it_conditions_[index] = AL; break;
2742 case kItThen: it_conditions_[index] = cond; break;
2743 case kItElse:
2744 it_conditions_[index] = static_cast<Condition>(static_cast<uint8_t>(cond) ^ 1);
2745 break;
2746 }
2747}
2748
2749
2750void Thumb2Assembler::it(Condition firstcond, ItState i1, ItState i2, ItState i3) {
2751 CheckCondition(AL); // Not allowed in IT block.
2752 uint8_t firstcond0 = static_cast<uint8_t>(firstcond) & 1;
2753
2754 // All conditions to AL.
2755 for (uint8_t i = 0; i < 4; ++i) {
2756 it_conditions_[i] = AL;
2757 }
2758
2759 SetItCondition(kItThen, firstcond, 0);
2760 uint8_t mask = ToItMask(i1, firstcond0, 3);
2761 SetItCondition(i1, firstcond, 1);
2762
2763 if (i1 != kItOmitted) {
2764 mask |= ToItMask(i2, firstcond0, 2);
2765 SetItCondition(i2, firstcond, 2);
2766 if (i2 != kItOmitted) {
2767 mask |= ToItMask(i3, firstcond0, 1);
2768 SetItCondition(i3, firstcond, 3);
2769 if (i3 != kItOmitted) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002770 mask |= 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002771 }
2772 }
2773 }
2774
2775 // Start at first condition.
2776 it_cond_index_ = 0;
2777 next_condition_ = it_conditions_[0];
2778 uint16_t encoding = B15 | B13 | B12 |
2779 B11 | B10 | B9 | B8 |
2780 firstcond << 4 |
2781 mask;
2782 Emit16(encoding);
2783}
2784
2785
2786void Thumb2Assembler::cbz(Register rn, Label* label) {
2787 CheckCondition(AL);
2788 if (label->IsBound()) {
2789 LOG(FATAL) << "cbz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002790 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002791 } else if (IsHighRegister(rn)) {
2792 LOG(FATAL) << "cbz can only be used with low registers";
2793 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002794 } else {
2795 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), false);
2796 label->LinkTo(branchid);
2797 }
2798}
2799
2800
2801void Thumb2Assembler::cbnz(Register rn, Label* label) {
2802 CheckCondition(AL);
2803 if (label->IsBound()) {
2804 LOG(FATAL) << "cbnz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002805 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002806 } else if (IsHighRegister(rn)) {
2807 LOG(FATAL) << "cbnz can only be used with low registers";
2808 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002809 } else {
2810 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), true);
2811 label->LinkTo(branchid);
2812 }
2813}
2814
2815
2816void Thumb2Assembler::blx(Register rm, Condition cond) {
2817 CHECK_NE(rm, kNoRegister);
2818 CheckCondition(cond);
2819 int16_t encoding = B14 | B10 | B9 | B8 | B7 | static_cast<int16_t>(rm) << 3;
2820 Emit16(encoding);
2821}
2822
2823
2824void Thumb2Assembler::bx(Register rm, Condition cond) {
2825 CHECK_NE(rm, kNoRegister);
2826 CheckCondition(cond);
2827 int16_t encoding = B14 | B10 | B9 | B8 | static_cast<int16_t>(rm) << 3;
2828 Emit16(encoding);
2829}
2830
2831
2832void Thumb2Assembler::Push(Register rd, Condition cond) {
2833 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
2834}
2835
2836
2837void Thumb2Assembler::Pop(Register rd, Condition cond) {
2838 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
2839}
2840
2841
2842void Thumb2Assembler::PushList(RegList regs, Condition cond) {
2843 stm(DB_W, SP, regs, cond);
2844}
2845
2846
2847void Thumb2Assembler::PopList(RegList regs, Condition cond) {
2848 ldm(IA_W, SP, regs, cond);
2849}
2850
2851
2852void Thumb2Assembler::Mov(Register rd, Register rm, Condition cond) {
2853 if (cond != AL || rd != rm) {
2854 mov(rd, ShifterOperand(rm), cond);
2855 }
2856}
2857
2858
Dave Allison65fcc2c2014-04-28 13:45:27 -07002859void Thumb2Assembler::Bind(Label* label) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002860 BindLabel(label, buffer_.Size());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002861}
2862
2863
2864void Thumb2Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07002865 bool setcc, Condition cond) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00002866 CHECK_LE(shift_imm, 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07002867 CheckCondition(cond);
2868 EmitShift(rd, rm, LSL, shift_imm, setcc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002869}
2870
2871
2872void Thumb2Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07002873 bool setcc, Condition cond) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00002874 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002875 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07002876 CheckCondition(cond);
2877 EmitShift(rd, rm, LSR, shift_imm, setcc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002878}
2879
2880
2881void Thumb2Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07002882 bool setcc, Condition cond) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00002883 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002884 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07002885 CheckCondition(cond);
2886 EmitShift(rd, rm, ASR, shift_imm, setcc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002887}
2888
2889
2890void Thumb2Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07002891 bool setcc, Condition cond) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00002892 CHECK(1u <= shift_imm && shift_imm <= 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07002893 CheckCondition(cond);
2894 EmitShift(rd, rm, ROR, shift_imm, setcc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002895}
2896
2897
Dave Allison45fdb932014-06-25 12:37:10 -07002898void Thumb2Assembler::Rrx(Register rd, Register rm, bool setcc, Condition cond) {
2899 CheckCondition(cond);
2900 EmitShift(rd, rm, RRX, rm, setcc);
2901}
2902
2903
2904void Thumb2Assembler::Lsl(Register rd, Register rm, Register rn,
2905 bool setcc, Condition cond) {
2906 CheckCondition(cond);
2907 EmitShift(rd, rm, LSL, rn, setcc);
2908}
2909
2910
2911void Thumb2Assembler::Lsr(Register rd, Register rm, Register rn,
2912 bool setcc, Condition cond) {
2913 CheckCondition(cond);
2914 EmitShift(rd, rm, LSR, rn, setcc);
2915}
2916
2917
2918void Thumb2Assembler::Asr(Register rd, Register rm, Register rn,
2919 bool setcc, Condition cond) {
2920 CheckCondition(cond);
2921 EmitShift(rd, rm, ASR, rn, setcc);
2922}
2923
2924
2925void Thumb2Assembler::Ror(Register rd, Register rm, Register rn,
2926 bool setcc, Condition cond) {
2927 CheckCondition(cond);
2928 EmitShift(rd, rm, ROR, rn, setcc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002929}
2930
2931
2932int32_t Thumb2Assembler::EncodeBranchOffset(int32_t offset, int32_t inst) {
2933 // The offset is off by 4 due to the way the ARM CPUs read PC.
2934 offset -= 4;
2935 offset >>= 1;
2936
2937 uint32_t value = 0;
2938 // There are two different encodings depending on the value of bit 12. In one case
2939 // intermediate values are calculated using the sign bit.
2940 if ((inst & B12) == B12) {
2941 // 25 bits of offset.
2942 uint32_t signbit = (offset >> 31) & 0x1;
2943 uint32_t i1 = (offset >> 22) & 0x1;
2944 uint32_t i2 = (offset >> 21) & 0x1;
2945 uint32_t imm10 = (offset >> 11) & 0x03ff;
2946 uint32_t imm11 = offset & 0x07ff;
2947 uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
2948 uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
2949 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
2950 imm11;
2951 // Remove the offset from the current encoding.
2952 inst &= ~(0x3ff << 16 | 0x7ff);
2953 } else {
2954 uint32_t signbit = (offset >> 31) & 0x1;
2955 uint32_t imm6 = (offset >> 11) & 0x03f;
2956 uint32_t imm11 = offset & 0x07ff;
2957 uint32_t j1 = (offset >> 19) & 1;
2958 uint32_t j2 = (offset >> 17) & 1;
2959 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm6 << 16) |
2960 imm11;
2961 // Remove the offset from the current encoding.
2962 inst &= ~(0x3f << 16 | 0x7ff);
2963 }
2964 // Mask out offset bits in current instruction.
2965 inst &= ~(B26 | B13 | B11);
2966 inst |= value;
2967 return inst;
2968}
2969
2970
2971int Thumb2Assembler::DecodeBranchOffset(int32_t instr) {
2972 int32_t imm32;
2973 if ((instr & B12) == B12) {
2974 uint32_t S = (instr >> 26) & 1;
2975 uint32_t J2 = (instr >> 11) & 1;
2976 uint32_t J1 = (instr >> 13) & 1;
2977 uint32_t imm10 = (instr >> 16) & 0x3FF;
2978 uint32_t imm11 = instr & 0x7FF;
2979
2980 uint32_t I1 = ~(J1 ^ S) & 1;
2981 uint32_t I2 = ~(J2 ^ S) & 1;
2982 imm32 = (S << 24) | (I1 << 23) | (I2 << 22) | (imm10 << 12) | (imm11 << 1);
2983 imm32 = (imm32 << 8) >> 8; // sign extend 24 bit immediate.
2984 } else {
2985 uint32_t S = (instr >> 26) & 1;
2986 uint32_t J2 = (instr >> 11) & 1;
2987 uint32_t J1 = (instr >> 13) & 1;
2988 uint32_t imm6 = (instr >> 16) & 0x3F;
2989 uint32_t imm11 = instr & 0x7FF;
2990
2991 imm32 = (S << 20) | (J2 << 19) | (J1 << 18) | (imm6 << 12) | (imm11 << 1);
2992 imm32 = (imm32 << 11) >> 11; // sign extend 21 bit immediate.
2993 }
2994 imm32 += 4;
2995 return imm32;
2996}
2997
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002998uint32_t Thumb2Assembler::GetAdjustedPosition(uint32_t old_position) {
2999 // We can reconstruct the adjustment by going through all the fixups from the beginning
3000 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
3001 // with increasing old_position, we can use the data from last AdjustedPosition() to
3002 // continue where we left off and the whole loop should be O(m+n) where m is the number
3003 // of positions to adjust and n is the number of fixups.
3004 if (old_position < last_old_position_) {
3005 last_position_adjustment_ = 0u;
3006 last_old_position_ = 0u;
3007 last_fixup_id_ = 0u;
3008 }
3009 while (last_fixup_id_ != fixups_.size()) {
3010 Fixup* fixup = GetFixup(last_fixup_id_);
3011 if (fixup->GetLocation() >= old_position + last_position_adjustment_) {
3012 break;
3013 }
3014 if (fixup->GetSize() != fixup->GetOriginalSize()) {
3015 last_position_adjustment_ += fixup->GetSizeInBytes() - fixup->GetOriginalSizeInBytes();
3016 }
3017 ++last_fixup_id_;
3018 }
3019 last_old_position_ = old_position;
3020 return old_position + last_position_adjustment_;
3021}
3022
3023Literal* Thumb2Assembler::NewLiteral(size_t size, const uint8_t* data) {
3024 DCHECK(size == 4u || size == 8u) << size;
3025 literals_.emplace_back(size, data);
3026 return &literals_.back();
3027}
3028
3029void Thumb2Assembler::LoadLiteral(Register rt, Literal* literal) {
3030 DCHECK_EQ(literal->GetSize(), 4u);
3031 DCHECK(!literal->GetLabel()->IsBound());
3032 bool use32bit = IsForced32Bit() || IsHighRegister(rt);
3033 uint32_t location = buffer_.Size();
3034 Fixup::Size size = use32bit ? Fixup::kLiteral4KiB : Fixup::kLiteral1KiB;
3035 FixupId fixup_id = AddFixup(Fixup::LoadNarrowLiteral(location, rt, size));
3036 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3037 literal->GetLabel()->LinkTo(fixup_id);
3038 if (use32bit) {
3039 Emit16(0);
3040 }
3041 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3042}
3043
3044void Thumb2Assembler::LoadLiteral(Register rt, Register rt2, Literal* literal) {
3045 DCHECK_EQ(literal->GetSize(), 8u);
3046 DCHECK(!literal->GetLabel()->IsBound());
3047 uint32_t location = buffer_.Size();
3048 FixupId fixup_id =
3049 AddFixup(Fixup::LoadWideLiteral(location, rt, rt2, Fixup::kLongOrFPLiteral1KiB));
3050 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3051 literal->GetLabel()->LinkTo(fixup_id);
3052 Emit16(0);
3053 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3054}
3055
3056void Thumb2Assembler::LoadLiteral(SRegister sd, Literal* literal) {
3057 DCHECK_EQ(literal->GetSize(), 4u);
3058 DCHECK(!literal->GetLabel()->IsBound());
3059 uint32_t location = buffer_.Size();
3060 FixupId fixup_id = AddFixup(Fixup::LoadSingleLiteral(location, sd, Fixup::kLongOrFPLiteral1KiB));
3061 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3062 literal->GetLabel()->LinkTo(fixup_id);
3063 Emit16(0);
3064 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3065}
3066
3067void Thumb2Assembler::LoadLiteral(DRegister dd, Literal* literal) {
3068 DCHECK_EQ(literal->GetSize(), 8u);
3069 DCHECK(!literal->GetLabel()->IsBound());
3070 uint32_t location = buffer_.Size();
3071 FixupId fixup_id = AddFixup(Fixup::LoadDoubleLiteral(location, dd, Fixup::kLongOrFPLiteral1KiB));
3072 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3073 literal->GetLabel()->LinkTo(fixup_id);
3074 Emit16(0);
3075 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3076}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003077
3078void Thumb2Assembler::AddConstant(Register rd, int32_t value, Condition cond) {
3079 AddConstant(rd, rd, value, cond);
3080}
3081
3082
3083void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value,
3084 Condition cond) {
3085 if (value == 0) {
3086 if (rd != rn) {
3087 mov(rd, ShifterOperand(rn), cond);
3088 }
3089 return;
3090 }
3091 // We prefer to select the shorter code sequence rather than selecting add for
3092 // positive values and sub for negatives ones, which would slightly improve
3093 // the readability of generated code for some constants.
3094 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003095 if (ShifterOperandCanHold(rd, rn, ADD, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003096 add(rd, rn, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003097 } else if (ShifterOperandCanHold(rd, rn, SUB, -value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003098 sub(rd, rn, shifter_op, cond);
3099 } else {
3100 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003101 if (ShifterOperandCanHold(rd, rn, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003102 mvn(IP, shifter_op, cond);
3103 add(rd, rn, ShifterOperand(IP), cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003104 } else if (ShifterOperandCanHold(rd, rn, MVN, ~(-value), &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003105 mvn(IP, shifter_op, cond);
3106 sub(rd, rn, ShifterOperand(IP), cond);
3107 } else {
3108 movw(IP, Low16Bits(value), cond);
3109 uint16_t value_high = High16Bits(value);
3110 if (value_high != 0) {
3111 movt(IP, value_high, cond);
3112 }
3113 add(rd, rn, ShifterOperand(IP), cond);
3114 }
3115 }
3116}
3117
3118
3119void Thumb2Assembler::AddConstantSetFlags(Register rd, Register rn, int32_t value,
3120 Condition cond) {
3121 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003122 if (ShifterOperandCanHold(rd, rn, ADD, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003123 adds(rd, rn, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003124 } else if (ShifterOperandCanHold(rd, rn, ADD, -value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003125 subs(rd, rn, shifter_op, cond);
3126 } else {
3127 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003128 if (ShifterOperandCanHold(rd, rn, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003129 mvn(IP, shifter_op, cond);
3130 adds(rd, rn, ShifterOperand(IP), cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003131 } else if (ShifterOperandCanHold(rd, rn, MVN, ~(-value), &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003132 mvn(IP, shifter_op, cond);
3133 subs(rd, rn, ShifterOperand(IP), cond);
3134 } else {
3135 movw(IP, Low16Bits(value), cond);
3136 uint16_t value_high = High16Bits(value);
3137 if (value_high != 0) {
3138 movt(IP, value_high, cond);
3139 }
3140 adds(rd, rn, ShifterOperand(IP), cond);
3141 }
3142 }
3143}
3144
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003145
Dave Allison65fcc2c2014-04-28 13:45:27 -07003146void Thumb2Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
3147 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003148 if (ShifterOperandCanHold(rd, R0, MOV, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003149 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003150 } else if (ShifterOperandCanHold(rd, R0, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003151 mvn(rd, shifter_op, cond);
3152 } else {
3153 movw(rd, Low16Bits(value), cond);
3154 uint16_t value_high = High16Bits(value);
3155 if (value_high != 0) {
3156 movt(rd, value_high, cond);
3157 }
3158 }
3159}
3160
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003161
Dave Allison65fcc2c2014-04-28 13:45:27 -07003162// Implementation note: this method must emit at most one instruction when
3163// Address::CanHoldLoadOffsetThumb.
3164void Thumb2Assembler::LoadFromOffset(LoadOperandType type,
3165 Register reg,
3166 Register base,
3167 int32_t offset,
3168 Condition cond) {
3169 if (!Address::CanHoldLoadOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003170 CHECK_NE(base, IP);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003171 LoadImmediate(IP, offset, cond);
3172 add(IP, IP, ShifterOperand(base), cond);
3173 base = IP;
3174 offset = 0;
3175 }
3176 CHECK(Address::CanHoldLoadOffsetThumb(type, offset));
3177 switch (type) {
3178 case kLoadSignedByte:
3179 ldrsb(reg, Address(base, offset), cond);
3180 break;
3181 case kLoadUnsignedByte:
3182 ldrb(reg, Address(base, offset), cond);
3183 break;
3184 case kLoadSignedHalfword:
3185 ldrsh(reg, Address(base, offset), cond);
3186 break;
3187 case kLoadUnsignedHalfword:
3188 ldrh(reg, Address(base, offset), cond);
3189 break;
3190 case kLoadWord:
3191 ldr(reg, Address(base, offset), cond);
3192 break;
3193 case kLoadWordPair:
3194 ldrd(reg, Address(base, offset), cond);
3195 break;
3196 default:
3197 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003198 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003199 }
3200}
3201
3202
3203// Implementation note: this method must emit at most one instruction when
3204// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3205void Thumb2Assembler::LoadSFromOffset(SRegister reg,
3206 Register base,
3207 int32_t offset,
3208 Condition cond) {
3209 if (!Address::CanHoldLoadOffsetThumb(kLoadSWord, offset)) {
3210 CHECK_NE(base, IP);
3211 LoadImmediate(IP, offset, cond);
3212 add(IP, IP, ShifterOperand(base), cond);
3213 base = IP;
3214 offset = 0;
3215 }
3216 CHECK(Address::CanHoldLoadOffsetThumb(kLoadSWord, offset));
3217 vldrs(reg, Address(base, offset), cond);
3218}
3219
3220
3221// Implementation note: this method must emit at most one instruction when
3222// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3223void Thumb2Assembler::LoadDFromOffset(DRegister reg,
3224 Register base,
3225 int32_t offset,
3226 Condition cond) {
3227 if (!Address::CanHoldLoadOffsetThumb(kLoadDWord, offset)) {
3228 CHECK_NE(base, IP);
3229 LoadImmediate(IP, offset, cond);
3230 add(IP, IP, ShifterOperand(base), cond);
3231 base = IP;
3232 offset = 0;
3233 }
3234 CHECK(Address::CanHoldLoadOffsetThumb(kLoadDWord, offset));
3235 vldrd(reg, Address(base, offset), cond);
3236}
3237
3238
3239// Implementation note: this method must emit at most one instruction when
3240// Address::CanHoldStoreOffsetThumb.
3241void Thumb2Assembler::StoreToOffset(StoreOperandType type,
3242 Register reg,
3243 Register base,
3244 int32_t offset,
3245 Condition cond) {
Roland Levillain775ef492014-11-04 17:43:11 +00003246 Register tmp_reg = kNoRegister;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003247 if (!Address::CanHoldStoreOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003248 CHECK_NE(base, IP);
Roland Levillain4af147e2015-04-07 13:54:49 +01003249 if (reg != IP &&
3250 (type != kStoreWordPair || reg + 1 != IP)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003251 tmp_reg = IP;
3252 } else {
Roland Levillain4af147e2015-04-07 13:54:49 +01003253 // Be careful not to use IP twice (for `reg` (or `reg` + 1 in
3254 // the case of a word-pair store)) and to build the Address
3255 // object used by the store instruction(s) below). Instead,
3256 // save R5 on the stack (or R6 if R5 is not available), use it
3257 // as secondary temporary register, and restore it after the
3258 // store instruction has been emitted.
Roland Levillain775ef492014-11-04 17:43:11 +00003259 tmp_reg = base != R5 ? R5 : R6;
3260 Push(tmp_reg);
3261 if (base == SP) {
3262 offset += kRegisterSize;
3263 }
3264 }
3265 LoadImmediate(tmp_reg, offset, cond);
3266 add(tmp_reg, tmp_reg, ShifterOperand(base), cond);
3267 base = tmp_reg;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003268 offset = 0;
3269 }
3270 CHECK(Address::CanHoldStoreOffsetThumb(type, offset));
3271 switch (type) {
3272 case kStoreByte:
3273 strb(reg, Address(base, offset), cond);
3274 break;
3275 case kStoreHalfword:
3276 strh(reg, Address(base, offset), cond);
3277 break;
3278 case kStoreWord:
3279 str(reg, Address(base, offset), cond);
3280 break;
3281 case kStoreWordPair:
3282 strd(reg, Address(base, offset), cond);
3283 break;
3284 default:
3285 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003286 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003287 }
Roland Levillain775ef492014-11-04 17:43:11 +00003288 if (tmp_reg != kNoRegister && tmp_reg != IP) {
3289 DCHECK(tmp_reg == R5 || tmp_reg == R6);
3290 Pop(tmp_reg);
3291 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003292}
3293
3294
3295// Implementation note: this method must emit at most one instruction when
3296// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreToOffset.
3297void Thumb2Assembler::StoreSToOffset(SRegister reg,
3298 Register base,
3299 int32_t offset,
3300 Condition cond) {
3301 if (!Address::CanHoldStoreOffsetThumb(kStoreSWord, offset)) {
3302 CHECK_NE(base, IP);
3303 LoadImmediate(IP, offset, cond);
3304 add(IP, IP, ShifterOperand(base), cond);
3305 base = IP;
3306 offset = 0;
3307 }
3308 CHECK(Address::CanHoldStoreOffsetThumb(kStoreSWord, offset));
3309 vstrs(reg, Address(base, offset), cond);
3310}
3311
3312
3313// Implementation note: this method must emit at most one instruction when
3314// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreSToOffset.
3315void Thumb2Assembler::StoreDToOffset(DRegister reg,
3316 Register base,
3317 int32_t offset,
3318 Condition cond) {
3319 if (!Address::CanHoldStoreOffsetThumb(kStoreDWord, offset)) {
3320 CHECK_NE(base, IP);
3321 LoadImmediate(IP, offset, cond);
3322 add(IP, IP, ShifterOperand(base), cond);
3323 base = IP;
3324 offset = 0;
3325 }
3326 CHECK(Address::CanHoldStoreOffsetThumb(kStoreDWord, offset));
3327 vstrd(reg, Address(base, offset), cond);
3328}
3329
3330
3331void Thumb2Assembler::MemoryBarrier(ManagedRegister mscratch) {
3332 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003333 dmb(SY);
3334}
3335
3336
3337void Thumb2Assembler::dmb(DmbOptions flavor) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003338 int32_t encoding = 0xf3bf8f50; // dmb in T1 encoding.
3339 Emit32(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003340}
3341
3342
3343void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003344 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003345 cbz(r, label);
3346 } else {
3347 cmp(r, ShifterOperand(0));
3348 b(label, EQ);
3349 }
3350}
3351
3352
Dave Allison65fcc2c2014-04-28 13:45:27 -07003353void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003354 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd126ba12015-05-20 11:25:27 +01003355 cbnz(r, label);
3356 } else {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003357 cmp(r, ShifterOperand(0));
3358 b(label, NE);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003359 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003360}
3361} // namespace arm
3362} // namespace art