blob: cc87856e82910f0e444741955c0579ed98e3c81d [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_thumb2.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Vladimir Marko6b756b52015-07-14 11:58:38 +010028void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) {
29 // For each Fixup, it's easy to find the Fixups that it depends on as they are either
30 // the following or the preceding Fixups until we find the target. However, for fixup
31 // adjustment we need the reverse lookup, i.e. what Fixups depend on a given Fixup.
32 // This function creates a compact representation of this relationship, where we have
33 // all the dependents in a single array and Fixups reference their ranges by start
34 // index and count. (Instead of having a per-fixup vector.)
35
36 // Count the number of dependents of each Fixup.
37 const FixupId end_id = assembler->fixups_.size();
38 Fixup* fixups = assembler->fixups_.data();
39 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
40 uint32_t target = fixups[fixup_id].target_;
41 if (target > fixups[fixup_id].location_) {
42 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
43 fixups[id].dependents_count_ += 1u;
44 }
45 } else {
46 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
47 fixups[id - 1u].dependents_count_ += 1u;
48 }
49 }
50 }
51 // Assign index ranges in fixup_dependents_ to individual fixups. Record the end of the
52 // range in dependents_start_, we shall later decrement it as we fill in fixup_dependents_.
53 uint32_t number_of_dependents = 0u;
54 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
55 number_of_dependents += fixups[fixup_id].dependents_count_;
56 fixups[fixup_id].dependents_start_ = number_of_dependents;
57 }
58 if (number_of_dependents == 0u) {
59 return;
60 }
61 // Create and fill in the fixup_dependents_.
62 assembler->fixup_dependents_.reset(new FixupId[number_of_dependents]);
63 FixupId* dependents = assembler->fixup_dependents_.get();
64 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
65 uint32_t target = fixups[fixup_id].target_;
66 if (target > fixups[fixup_id].location_) {
67 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
68 fixups[id].dependents_start_ -= 1u;
69 dependents[fixups[id].dependents_start_] = fixup_id;
70 }
71 } else {
72 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
73 fixups[id - 1u].dependents_start_ -= 1u;
74 dependents[fixups[id - 1u].dependents_start_] = fixup_id;
75 }
76 }
77 }
78}
79
Vladimir Markocf93a5c2015-06-16 11:33:24 +000080void Thumb2Assembler::BindLabel(Label* label, uint32_t bound_pc) {
81 CHECK(!label->IsBound());
82
83 while (label->IsLinked()) {
84 FixupId fixup_id = label->Position(); // The id for linked Fixup.
85 Fixup* fixup = GetFixup(fixup_id); // Get the Fixup at this id.
86 fixup->Resolve(bound_pc); // Fixup can be resolved now.
Vladimir Markocf93a5c2015-06-16 11:33:24 +000087 uint32_t fixup_location = fixup->GetLocation();
88 uint16_t next = buffer_.Load<uint16_t>(fixup_location); // Get next in chain.
89 buffer_.Store<int16_t>(fixup_location, 0);
90 label->position_ = next; // Move to next.
91 }
92 label->BindTo(bound_pc);
93}
94
95void Thumb2Assembler::BindLiterals() {
96 // We don't add the padding here, that's done only after adjusting the Fixup sizes.
97 uint32_t code_size = buffer_.Size();
98 for (Literal& lit : literals_) {
99 Label* label = lit.GetLabel();
100 BindLabel(label, code_size);
101 code_size += lit.GetSize();
102 }
103}
104
105void Thumb2Assembler::AdjustFixupIfNeeded(Fixup* fixup, uint32_t* current_code_size,
106 std::deque<FixupId>* fixups_to_recalculate) {
107 uint32_t adjustment = fixup->AdjustSizeIfNeeded(*current_code_size);
108 if (adjustment != 0u) {
109 *current_code_size += adjustment;
Vladimir Marko6b756b52015-07-14 11:58:38 +0100110 for (FixupId dependent_id : fixup->Dependents(*this)) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000111 Fixup* dependent = GetFixup(dependent_id);
112 dependent->IncreaseAdjustment(adjustment);
113 if (buffer_.Load<int16_t>(dependent->GetLocation()) == 0) {
114 buffer_.Store<int16_t>(dependent->GetLocation(), 1);
115 fixups_to_recalculate->push_back(dependent_id);
116 }
117 }
118 }
119}
120
121uint32_t Thumb2Assembler::AdjustFixups() {
Vladimir Marko6b756b52015-07-14 11:58:38 +0100122 Fixup::PrepareDependents(this);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000123 uint32_t current_code_size = buffer_.Size();
124 std::deque<FixupId> fixups_to_recalculate;
125 if (kIsDebugBuild) {
126 // We will use the placeholders in the buffer_ to mark whether the fixup has
127 // been added to the fixups_to_recalculate. Make sure we start with zeros.
128 for (Fixup& fixup : fixups_) {
129 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
130 }
131 }
132 for (Fixup& fixup : fixups_) {
133 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
134 }
135 while (!fixups_to_recalculate.empty()) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100136 do {
137 // Pop the fixup.
138 FixupId fixup_id = fixups_to_recalculate.front();
139 fixups_to_recalculate.pop_front();
140 Fixup* fixup = GetFixup(fixup_id);
141 DCHECK_NE(buffer_.Load<int16_t>(fixup->GetLocation()), 0);
142 buffer_.Store<int16_t>(fixup->GetLocation(), 0);
143 // See if it needs adjustment.
144 AdjustFixupIfNeeded(fixup, &current_code_size, &fixups_to_recalculate);
145 } while (!fixups_to_recalculate.empty());
146
147 if ((current_code_size & 2) != 0 && !literals_.empty()) {
148 // If we need to add padding before literals, this may just push some out of range,
149 // so recalculate all load literals. This makes up for the fact that we don't mark
150 // load literal as a dependency of all previous Fixups even though it actually is.
151 for (Fixup& fixup : fixups_) {
152 if (fixup.IsLoadLiteral()) {
153 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
154 }
155 }
156 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000157 }
158 if (kIsDebugBuild) {
159 // Check that no fixup is marked as being in fixups_to_recalculate anymore.
160 for (Fixup& fixup : fixups_) {
161 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
162 }
163 }
164
165 // Adjust literal pool labels for padding.
Roland Levillain14d90572015-07-16 10:52:26 +0100166 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000167 uint32_t literals_adjustment = current_code_size + (current_code_size & 2) - buffer_.Size();
168 if (literals_adjustment != 0u) {
169 for (Literal& literal : literals_) {
170 Label* label = literal.GetLabel();
171 DCHECK(label->IsBound());
172 int old_position = label->Position();
173 label->Reinitialize();
174 label->BindTo(old_position + literals_adjustment);
175 }
176 }
177
178 return current_code_size;
179}
180
181void Thumb2Assembler::EmitFixups(uint32_t adjusted_code_size) {
182 // Move non-fixup code to its final place and emit fixups.
183 // Process fixups in reverse order so that we don't repeatedly move the same data.
184 size_t src_end = buffer_.Size();
185 size_t dest_end = adjusted_code_size;
186 buffer_.Resize(dest_end);
187 DCHECK_GE(dest_end, src_end);
188 for (auto i = fixups_.rbegin(), end = fixups_.rend(); i != end; ++i) {
189 Fixup* fixup = &*i;
190 if (fixup->GetOriginalSize() == fixup->GetSize()) {
191 // The size of this Fixup didn't change. To avoid moving the data
192 // in small chunks, emit the code to its original position.
193 fixup->Emit(&buffer_, adjusted_code_size);
194 fixup->Finalize(dest_end - src_end);
195 } else {
196 // Move the data between the end of the fixup and src_end to its final location.
197 size_t old_fixup_location = fixup->GetLocation();
198 size_t src_begin = old_fixup_location + fixup->GetOriginalSizeInBytes();
199 size_t data_size = src_end - src_begin;
200 size_t dest_begin = dest_end - data_size;
201 buffer_.Move(dest_begin, src_begin, data_size);
202 src_end = old_fixup_location;
203 dest_end = dest_begin - fixup->GetSizeInBytes();
204 // Finalize the Fixup and emit the data to the new location.
205 fixup->Finalize(dest_end - src_end);
206 fixup->Emit(&buffer_, adjusted_code_size);
207 }
208 }
209 CHECK_EQ(src_end, dest_end);
210}
211
212void Thumb2Assembler::EmitLiterals() {
213 if (!literals_.empty()) {
214 // Load literal instructions (LDR, LDRD, VLDR) require 4-byte alignment.
215 // We don't support byte and half-word literals.
216 uint32_t code_size = buffer_.Size();
Roland Levillain14d90572015-07-16 10:52:26 +0100217 DCHECK_ALIGNED(code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000218 if ((code_size & 2u) != 0u) {
219 Emit16(0);
220 }
221 for (Literal& literal : literals_) {
222 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
223 DCHECK_EQ(static_cast<size_t>(literal.GetLabel()->Position()), buffer_.Size());
224 DCHECK(literal.GetSize() == 4u || literal.GetSize() == 8u);
225 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
226 buffer_.Emit<uint8_t>(literal.GetData()[i]);
227 }
228 }
229 }
230}
231
232inline int16_t Thumb2Assembler::BEncoding16(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100233 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000234 int16_t encoding = B15 | B14;
235 if (cond != AL) {
236 DCHECK(IsInt<9>(offset));
237 encoding |= B12 | (static_cast<int32_t>(cond) << 8) | ((offset >> 1) & 0xff);
238 } else {
239 DCHECK(IsInt<12>(offset));
240 encoding |= B13 | ((offset >> 1) & 0x7ff);
241 }
242 return encoding;
243}
244
245inline int32_t Thumb2Assembler::BEncoding32(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100246 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000247 int32_t s = (offset >> 31) & 1; // Sign bit.
248 int32_t encoding = B31 | B30 | B29 | B28 | B15 |
249 (s << 26) | // Sign bit goes to bit 26.
250 ((offset >> 1) & 0x7ff); // imm11 goes to bits 0-10.
251 if (cond != AL) {
252 DCHECK(IsInt<21>(offset));
253 // Encode cond, move imm6 from bits 12-17 to bits 16-21 and move J1 and J2.
254 encoding |= (static_cast<int32_t>(cond) << 22) | ((offset & 0x3f000) << (16 - 12)) |
255 ((offset & (1 << 19)) >> (19 - 13)) | // Extract J1 from bit 19 to bit 13.
256 ((offset & (1 << 18)) >> (18 - 11)); // Extract J2 from bit 18 to bit 11.
257 } else {
258 DCHECK(IsInt<25>(offset));
259 int32_t j1 = ((offset >> 23) ^ s ^ 1) & 1; // Calculate J1 from I1 extracted from bit 23.
260 int32_t j2 = ((offset >> 22)^ s ^ 1) & 1; // Calculate J2 from I2 extracted from bit 22.
261 // Move imm10 from bits 12-21 to bits 16-25 and add J1 and J2.
262 encoding |= B12 | ((offset & 0x3ff000) << (16 - 12)) |
263 (j1 << 13) | (j2 << 11);
264 }
265 return encoding;
266}
267
268inline int16_t Thumb2Assembler::CbxzEncoding16(Register rn, int32_t offset, Condition cond) {
269 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100270 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000271 DCHECK(IsUint<7>(offset));
272 DCHECK(cond == EQ || cond == NE);
273 return B15 | B13 | B12 | B8 | (cond == NE ? B11 : 0) | static_cast<int32_t>(rn) |
274 ((offset & 0x3e) << (3 - 1)) | // Move imm5 from bits 1-5 to bits 3-7.
275 ((offset & 0x40) << (9 - 6)); // Move i from bit 6 to bit 11
276}
277
278inline int16_t Thumb2Assembler::CmpRnImm8Encoding16(Register rn, int32_t value) {
279 DCHECK(!IsHighRegister(rn));
280 DCHECK(IsUint<8>(value));
281 return B13 | B11 | (rn << 8) | value;
282}
283
284inline int16_t Thumb2Assembler::AddRdnRmEncoding16(Register rdn, Register rm) {
285 // The high bit of rn is moved across 4-bit rm.
286 return B14 | B10 | (static_cast<int32_t>(rm) << 3) |
287 (static_cast<int32_t>(rdn) & 7) | ((static_cast<int32_t>(rdn) & 8) << 4);
288}
289
290inline int32_t Thumb2Assembler::MovwEncoding32(Register rd, int32_t value) {
291 DCHECK(IsUint<16>(value));
292 return B31 | B30 | B29 | B28 | B25 | B22 |
293 (static_cast<int32_t>(rd) << 8) |
294 ((value & 0xf000) << (16 - 12)) | // Move imm4 from bits 12-15 to bits 16-19.
295 ((value & 0x0800) << (26 - 11)) | // Move i from bit 11 to bit 26.
296 ((value & 0x0700) << (12 - 8)) | // Move imm3 from bits 8-10 to bits 12-14.
297 (value & 0xff); // Keep imm8 in bits 0-7.
298}
299
300inline int32_t Thumb2Assembler::MovtEncoding32(Register rd, int32_t value) {
301 DCHECK_EQ(value & 0xffff, 0);
302 int32_t movw_encoding = MovwEncoding32(rd, (value >> 16) & 0xffff);
303 return movw_encoding | B25 | B23;
304}
305
306inline int32_t Thumb2Assembler::MovModImmEncoding32(Register rd, int32_t value) {
307 uint32_t mod_imm = ModifiedImmediate(value);
308 DCHECK_NE(mod_imm, kInvalidModifiedImmediate);
309 return B31 | B30 | B29 | B28 | B22 | B19 | B18 | B17 | B16 |
310 (static_cast<int32_t>(rd) << 8) | static_cast<int32_t>(mod_imm);
311}
312
313inline int16_t Thumb2Assembler::LdrLitEncoding16(Register rt, int32_t offset) {
314 DCHECK(!IsHighRegister(rt));
Roland Levillain14d90572015-07-16 10:52:26 +0100315 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000316 DCHECK(IsUint<10>(offset));
317 return B14 | B11 | (static_cast<int32_t>(rt) << 8) | (offset >> 2);
318}
319
320inline int32_t Thumb2Assembler::LdrLitEncoding32(Register rt, int32_t offset) {
321 // NOTE: We don't support negative offset, i.e. U=0 (B23).
322 return LdrRtRnImm12Encoding(rt, PC, offset);
323}
324
325inline int32_t Thumb2Assembler::LdrdEncoding32(Register rt, Register rt2, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100326 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000327 CHECK(IsUint<10>(offset));
328 return B31 | B30 | B29 | B27 |
329 B24 /* P = 1 */ | B23 /* U = 1 */ | B22 | 0 /* W = 0 */ | B20 |
330 (static_cast<int32_t>(rn) << 16) | (static_cast<int32_t>(rt) << 12) |
331 (static_cast<int32_t>(rt2) << 8) | (offset >> 2);
332}
333
334inline int32_t Thumb2Assembler::VldrsEncoding32(SRegister sd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100335 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000336 CHECK(IsUint<10>(offset));
337 return B31 | B30 | B29 | B27 | B26 | B24 |
338 B23 /* U = 1 */ | B20 | B11 | B9 |
339 (static_cast<int32_t>(rn) << 16) |
340 ((static_cast<int32_t>(sd) & 0x01) << (22 - 0)) | // Move D from bit 0 to bit 22.
341 ((static_cast<int32_t>(sd) & 0x1e) << (12 - 1)) | // Move Vd from bits 1-4 to bits 12-15.
342 (offset >> 2);
343}
344
345inline int32_t Thumb2Assembler::VldrdEncoding32(DRegister dd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100346 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000347 CHECK(IsUint<10>(offset));
348 return B31 | B30 | B29 | B27 | B26 | B24 |
349 B23 /* U = 1 */ | B20 | B11 | B9 | B8 |
350 (rn << 16) |
351 ((static_cast<int32_t>(dd) & 0x10) << (22 - 4)) | // Move D from bit 4 to bit 22.
352 ((static_cast<int32_t>(dd) & 0x0f) << (12 - 0)) | // Move Vd from bits 0-3 to bits 12-15.
353 (offset >> 2);
354}
355
356inline int16_t Thumb2Assembler::LdrRtRnImm5Encoding16(Register rt, Register rn, int32_t offset) {
357 DCHECK(!IsHighRegister(rt));
358 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100359 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000360 DCHECK(IsUint<7>(offset));
361 return B14 | B13 | B11 |
362 (static_cast<int32_t>(rn) << 3) | static_cast<int32_t>(rt) |
363 (offset << (6 - 2)); // Move imm5 from bits 2-6 to bits 6-10.
364}
365
366int32_t Thumb2Assembler::Fixup::LoadWideOrFpEncoding(Register rbase, int32_t offset) const {
367 switch (type_) {
368 case kLoadLiteralWide:
369 return LdrdEncoding32(rn_, rt2_, rbase, offset);
370 case kLoadFPLiteralSingle:
371 return VldrsEncoding32(sd_, rbase, offset);
372 case kLoadFPLiteralDouble:
373 return VldrdEncoding32(dd_, rbase, offset);
374 default:
375 LOG(FATAL) << "Unexpected type: " << static_cast<int>(type_);
376 UNREACHABLE();
377 }
378}
379
380inline int32_t Thumb2Assembler::LdrRtRnImm12Encoding(Register rt, Register rn, int32_t offset) {
381 DCHECK(IsUint<12>(offset));
382 return B31 | B30 | B29 | B28 | B27 | B23 | B22 | B20 | (rn << 16) | (rt << 12) | offset;
383}
384
385void Thumb2Assembler::FinalizeCode() {
386 ArmAssembler::FinalizeCode();
387 BindLiterals();
388 uint32_t adjusted_code_size = AdjustFixups();
389 EmitFixups(adjusted_code_size);
390 EmitLiterals();
391}
392
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +0100393bool Thumb2Assembler::ShifterOperandCanAlwaysHold(uint32_t immediate) {
394 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
395}
396
Nicolas Geoffray3d1e7882015-02-03 13:59:52 +0000397bool Thumb2Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
398 Register rn ATTRIBUTE_UNUSED,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000399 Opcode opcode,
400 uint32_t immediate,
401 ShifterOperand* shifter_op) {
402 shifter_op->type_ = ShifterOperand::kImmediate;
403 shifter_op->immed_ = immediate;
404 shifter_op->is_shift_ = false;
405 shifter_op->is_rotate_ = false;
406 switch (opcode) {
407 case ADD:
408 case SUB:
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000409 if (immediate < (1 << 12)) { // Less than (or equal to) 12 bits can always be done.
410 return true;
411 }
412 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
413
414 case MOV:
415 // TODO: Support less than or equal to 12bits.
416 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100417
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000418 case MVN:
419 default:
420 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
421 }
422}
423
Dave Allison65fcc2c2014-04-28 13:45:27 -0700424void Thumb2Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100425 Condition cond, SetCc set_cc) {
426 EmitDataProcessing(cond, AND, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700427}
428
429
430void Thumb2Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100431 Condition cond, SetCc set_cc) {
432 EmitDataProcessing(cond, EOR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700433}
434
435
436void Thumb2Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100437 Condition cond, SetCc set_cc) {
438 EmitDataProcessing(cond, SUB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700439}
440
441
442void Thumb2Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100443 Condition cond, SetCc set_cc) {
444 EmitDataProcessing(cond, RSB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700445}
446
447
448void Thumb2Assembler::add(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100449 Condition cond, SetCc set_cc) {
450 EmitDataProcessing(cond, ADD, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700451}
452
453
454void Thumb2Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100455 Condition cond, SetCc set_cc) {
456 EmitDataProcessing(cond, ADC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700457}
458
459
460void Thumb2Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100461 Condition cond, SetCc set_cc) {
462 EmitDataProcessing(cond, SBC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700463}
464
465
466void Thumb2Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100467 Condition cond, SetCc set_cc) {
468 EmitDataProcessing(cond, RSC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700469}
470
471
472void Thumb2Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
473 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100474 EmitDataProcessing(cond, TST, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700475}
476
477
478void Thumb2Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
479 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100480 EmitDataProcessing(cond, TEQ, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700481}
482
483
484void Thumb2Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100485 EmitDataProcessing(cond, CMP, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700486}
487
488
489void Thumb2Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100490 EmitDataProcessing(cond, CMN, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700491}
492
493
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100494void Thumb2Assembler::orr(Register rd, Register rn, const ShifterOperand& so,
495 Condition cond, SetCc set_cc) {
496 EmitDataProcessing(cond, ORR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700497}
498
499
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100500void Thumb2Assembler::orn(Register rd, Register rn, const ShifterOperand& so,
501 Condition cond, SetCc set_cc) {
502 EmitDataProcessing(cond, ORN, set_cc, rn, rd, so);
503}
504
505
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100506void Thumb2Assembler::mov(Register rd, const ShifterOperand& so,
507 Condition cond, SetCc set_cc) {
508 EmitDataProcessing(cond, MOV, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700509}
510
511
512void Thumb2Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100513 Condition cond, SetCc set_cc) {
514 EmitDataProcessing(cond, BIC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700515}
516
517
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100518void Thumb2Assembler::mvn(Register rd, const ShifterOperand& so,
519 Condition cond, SetCc set_cc) {
520 EmitDataProcessing(cond, MVN, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700521}
522
523
524void Thumb2Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700525 CheckCondition(cond);
526
Dave Allison65fcc2c2014-04-28 13:45:27 -0700527 if (rd == rm && !IsHighRegister(rd) && !IsHighRegister(rn) && !force_32bit_) {
528 // 16 bit.
529 int16_t encoding = B14 | B9 | B8 | B6 |
530 rn << 3 | rd;
531 Emit16(encoding);
532 } else {
533 // 32 bit.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700534 uint32_t op1 = 0U /* 0b000 */;
535 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700536 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
537 op1 << 20 |
538 B15 | B14 | B13 | B12 |
539 op2 << 4 |
540 static_cast<uint32_t>(rd) << 8 |
541 static_cast<uint32_t>(rn) << 16 |
542 static_cast<uint32_t>(rm);
543
544 Emit32(encoding);
545 }
546}
547
548
549void Thumb2Assembler::mla(Register rd, Register rn, Register rm, Register ra,
550 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700551 CheckCondition(cond);
552
Andreas Gampec8ccf682014-09-29 20:07:43 -0700553 uint32_t op1 = 0U /* 0b000 */;
554 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700555 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
556 op1 << 20 |
557 op2 << 4 |
558 static_cast<uint32_t>(rd) << 8 |
559 static_cast<uint32_t>(ra) << 12 |
560 static_cast<uint32_t>(rn) << 16 |
561 static_cast<uint32_t>(rm);
562
563 Emit32(encoding);
564}
565
566
567void Thumb2Assembler::mls(Register rd, Register rn, Register rm, Register ra,
568 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700569 CheckCondition(cond);
570
Andreas Gampec8ccf682014-09-29 20:07:43 -0700571 uint32_t op1 = 0U /* 0b000 */;
572 uint32_t op2 = 01 /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700573 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
574 op1 << 20 |
575 op2 << 4 |
576 static_cast<uint32_t>(rd) << 8 |
577 static_cast<uint32_t>(ra) << 12 |
578 static_cast<uint32_t>(rn) << 16 |
579 static_cast<uint32_t>(rm);
580
581 Emit32(encoding);
582}
583
584
Zheng Xuc6667102015-05-15 16:08:45 +0800585void Thumb2Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
586 Register rm, Condition cond) {
587 CheckCondition(cond);
588
589 uint32_t op1 = 0U /* 0b000; */;
590 uint32_t op2 = 0U /* 0b0000 */;
591 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
592 op1 << 20 |
593 op2 << 4 |
594 static_cast<uint32_t>(rd_lo) << 12 |
595 static_cast<uint32_t>(rd_hi) << 8 |
596 static_cast<uint32_t>(rn) << 16 |
597 static_cast<uint32_t>(rm);
598
599 Emit32(encoding);
600}
601
602
Dave Allison65fcc2c2014-04-28 13:45:27 -0700603void Thumb2Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
604 Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700605 CheckCondition(cond);
606
Andreas Gampec8ccf682014-09-29 20:07:43 -0700607 uint32_t op1 = 2U /* 0b010; */;
608 uint32_t op2 = 0U /* 0b0000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700609 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
610 op1 << 20 |
611 op2 << 4 |
612 static_cast<uint32_t>(rd_lo) << 12 |
613 static_cast<uint32_t>(rd_hi) << 8 |
614 static_cast<uint32_t>(rn) << 16 |
615 static_cast<uint32_t>(rm);
616
617 Emit32(encoding);
618}
619
620
621void Thumb2Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700622 CheckCondition(cond);
623
Andreas Gampec8ccf682014-09-29 20:07:43 -0700624 uint32_t op1 = 1U /* 0b001 */;
625 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700626 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B20 |
627 op1 << 20 |
628 op2 << 4 |
629 0xf << 12 |
630 static_cast<uint32_t>(rd) << 8 |
631 static_cast<uint32_t>(rn) << 16 |
632 static_cast<uint32_t>(rm);
633
634 Emit32(encoding);
635}
636
637
638void Thumb2Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700639 CheckCondition(cond);
640
Andreas Gampec8ccf682014-09-29 20:07:43 -0700641 uint32_t op1 = 1U /* 0b001 */;
642 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700643 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B21 | B20 |
644 op1 << 20 |
645 op2 << 4 |
646 0xf << 12 |
647 static_cast<uint32_t>(rd) << 8 |
648 static_cast<uint32_t>(rn) << 16 |
649 static_cast<uint32_t>(rm);
650
651 Emit32(encoding);
652}
653
654
Roland Levillain51d3fc42014-11-13 14:11:42 +0000655void Thumb2Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
656 CheckCondition(cond);
657 CHECK_LE(lsb, 31U);
658 CHECK(1U <= width && width <= 32U) << width;
659 uint32_t widthminus1 = width - 1;
660 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
661 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
662
663 uint32_t op = 20U /* 0b10100 */;
664 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
665 op << 20 |
666 static_cast<uint32_t>(rn) << 16 |
667 imm3 << 12 |
668 static_cast<uint32_t>(rd) << 8 |
669 imm2 << 6 |
670 widthminus1;
671
672 Emit32(encoding);
673}
674
675
Roland Levillain981e4542014-11-14 11:47:14 +0000676void Thumb2Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
677 CheckCondition(cond);
678 CHECK_LE(lsb, 31U);
679 CHECK(1U <= width && width <= 32U) << width;
680 uint32_t widthminus1 = width - 1;
681 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
682 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
683
684 uint32_t op = 28U /* 0b11100 */;
685 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
686 op << 20 |
687 static_cast<uint32_t>(rn) << 16 |
688 imm3 << 12 |
689 static_cast<uint32_t>(rd) << 8 |
690 imm2 << 6 |
691 widthminus1;
692
693 Emit32(encoding);
694}
695
696
Dave Allison65fcc2c2014-04-28 13:45:27 -0700697void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) {
698 EmitLoadStore(cond, true, false, false, false, rd, ad);
699}
700
701
702void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) {
703 EmitLoadStore(cond, false, false, false, false, rd, ad);
704}
705
706
707void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
708 EmitLoadStore(cond, true, true, false, false, rd, ad);
709}
710
711
712void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) {
713 EmitLoadStore(cond, false, true, false, false, rd, ad);
714}
715
716
717void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
718 EmitLoadStore(cond, true, false, true, false, rd, ad);
719}
720
721
722void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) {
723 EmitLoadStore(cond, false, false, true, false, rd, ad);
724}
725
726
727void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
728 EmitLoadStore(cond, true, true, false, true, rd, ad);
729}
730
731
732void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
733 EmitLoadStore(cond, true, false, true, true, rd, ad);
734}
735
736
737void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100738 ldrd(rd, Register(rd + 1), ad, cond);
739}
740
741
742void Thumb2Assembler::ldrd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700743 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100744 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700745 // This is different from other loads. The encoding is like ARM.
746 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
747 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100748 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700749 ad.encodingThumbLdrdStrd();
750 Emit32(encoding);
751}
752
753
754void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100755 strd(rd, Register(rd + 1), ad, cond);
756}
757
758
759void Thumb2Assembler::strd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700760 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100761 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700762 // This is different from other loads. The encoding is like ARM.
763 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
764 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100765 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700766 ad.encodingThumbLdrdStrd();
767 Emit32(encoding);
768}
769
770
771void Thumb2Assembler::ldm(BlockAddressMode am,
772 Register base,
773 RegList regs,
774 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000775 CHECK_NE(regs, 0u); // Do not use ldm if there's nothing to load.
776 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700777 // Thumb doesn't support one reg in the list.
778 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000779 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700780 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700781 CHECK(am == DB_W); // Only writeback is supported.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700782 ldr(static_cast<Register>(reg), Address(base, kRegisterSize, Address::PostIndex), cond);
783 } else {
784 EmitMultiMemOp(cond, am, true, base, regs);
785 }
786}
787
788
789void Thumb2Assembler::stm(BlockAddressMode am,
790 Register base,
791 RegList regs,
792 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000793 CHECK_NE(regs, 0u); // Do not use stm if there's nothing to store.
794 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700795 // Thumb doesn't support one reg in the list.
796 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000797 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700798 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700799 CHECK(am == IA || am == IA_W);
800 Address::Mode strmode = am == IA ? Address::PreIndex : Address::Offset;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700801 str(static_cast<Register>(reg), Address(base, -kRegisterSize, strmode), cond);
802 } else {
803 EmitMultiMemOp(cond, am, false, base, regs);
804 }
805}
806
807
808bool Thumb2Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
809 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
810 if (((imm32 & ((1 << 19) - 1)) == 0) &&
811 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
812 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
813 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
814 ((imm32 >> 19) & ((1 << 6) -1));
815 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
816 sd, S0, S0);
817 return true;
818 }
819 return false;
820}
821
822
823bool Thumb2Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
824 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
825 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
826 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
827 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
828 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
829 ((imm64 >> 48) & ((1 << 6) -1));
830 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
831 dd, D0, D0);
832 return true;
833 }
834 return false;
835}
836
837
838void Thumb2Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
839 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
840}
841
842
843void Thumb2Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
844 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
845}
846
847
848void Thumb2Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
849 Condition cond) {
850 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
851}
852
853
854void Thumb2Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
855 Condition cond) {
856 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
857}
858
859
860void Thumb2Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
861 Condition cond) {
862 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
863}
864
865
866void Thumb2Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
867 Condition cond) {
868 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
869}
870
871
872void Thumb2Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
873 Condition cond) {
874 EmitVFPsss(cond, B21, sd, sn, sm);
875}
876
877
878void Thumb2Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
879 Condition cond) {
880 EmitVFPddd(cond, B21, dd, dn, dm);
881}
882
883
884void Thumb2Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
885 Condition cond) {
886 EmitVFPsss(cond, 0, sd, sn, sm);
887}
888
889
890void Thumb2Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
891 Condition cond) {
892 EmitVFPddd(cond, 0, dd, dn, dm);
893}
894
895
896void Thumb2Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
897 Condition cond) {
898 EmitVFPsss(cond, B6, sd, sn, sm);
899}
900
901
902void Thumb2Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
903 Condition cond) {
904 EmitVFPddd(cond, B6, dd, dn, dm);
905}
906
907
908void Thumb2Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
909 Condition cond) {
910 EmitVFPsss(cond, B23, sd, sn, sm);
911}
912
913
914void Thumb2Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
915 Condition cond) {
916 EmitVFPddd(cond, B23, dd, dn, dm);
917}
918
919
920void Thumb2Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
921 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
922}
923
924
925void Thumb2Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
926 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
927}
928
929
930void Thumb2Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
931 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
932}
933
934
935void Thumb2Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
936 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
937}
938
939
940void Thumb2Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
941 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
942}
943
944void Thumb2Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
945 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
946}
947
948
949void Thumb2Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
950 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
951}
952
953
954void Thumb2Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
955 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
956}
957
958
959void Thumb2Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
960 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
961}
962
963
964void Thumb2Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
965 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
966}
967
968
969void Thumb2Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
970 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
971}
972
973
974void Thumb2Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
975 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
976}
977
978
979void Thumb2Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
980 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
981}
982
983
984void Thumb2Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
985 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
986}
987
988
989void Thumb2Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
990 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
991}
992
993
994void Thumb2Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
995 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
996}
997
998
999void Thumb2Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
1000 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
1001}
1002
1003
1004void Thumb2Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
1005 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
1006}
1007
1008
1009void Thumb2Assembler::vcmpsz(SRegister sd, Condition cond) {
1010 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
1011}
1012
1013
1014void Thumb2Assembler::vcmpdz(DRegister dd, Condition cond) {
1015 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
1016}
1017
1018void Thumb2Assembler::b(Label* label, Condition cond) {
agicsakie2142d252015-06-30 17:10:03 -07001019 DCHECK_EQ(next_condition_, AL);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001020 EmitBranch(cond, label, false, false);
1021}
1022
1023
1024void Thumb2Assembler::bl(Label* label, Condition cond) {
1025 CheckCondition(cond);
1026 EmitBranch(cond, label, true, false);
1027}
1028
1029
1030void Thumb2Assembler::blx(Label* label) {
1031 EmitBranch(AL, label, true, true);
1032}
1033
1034
1035void Thumb2Assembler::MarkExceptionHandler(Label* label) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001036 EmitDataProcessing(AL, TST, kCcSet, PC, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001037 Label l;
1038 b(&l);
1039 EmitBranch(AL, label, false, false);
1040 Bind(&l);
1041}
1042
1043
1044void Thumb2Assembler::Emit32(int32_t value) {
1045 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1046 buffer_.Emit<int16_t>(value >> 16);
1047 buffer_.Emit<int16_t>(value & 0xffff);
1048}
1049
1050
1051void Thumb2Assembler::Emit16(int16_t value) {
1052 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1053 buffer_.Emit<int16_t>(value);
1054}
1055
1056
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001057bool Thumb2Assembler::Is32BitDataProcessing(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001058 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001059 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001060 Register rn,
1061 Register rd,
1062 const ShifterOperand& so) {
1063 if (force_32bit_) {
1064 return true;
1065 }
1066
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001067 // Check special case for SP relative ADD and SUB immediate.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001068 if ((opcode == ADD || opcode == SUB) && rn == SP && so.IsImmediate() && set_cc != kCcSet) {
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001069 // If the immediate is in range, use 16 bit.
1070 if (rd == SP) {
1071 if (so.GetImmediate() < (1 << 9)) { // 9 bit immediate.
1072 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001073 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001074 } else if (!IsHighRegister(rd) && opcode == ADD) {
1075 if (so.GetImmediate() < (1 << 10)) { // 10 bit immediate.
1076 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001077 }
1078 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001079 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001080
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001081 bool can_contain_high_register =
1082 (opcode == CMP) ||
1083 (opcode == MOV && set_cc != kCcSet) ||
1084 ((opcode == ADD) && (rn == rd) && set_cc != kCcSet);
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001085
1086 if (IsHighRegister(rd) || IsHighRegister(rn)) {
1087 if (!can_contain_high_register) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001088 return true;
1089 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001090
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001091 // There are high register instructions available for this opcode.
1092 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
1093 if (so.IsShift() && (so.GetShift() == RRX || so.GetImmediate() != 0u)) {
1094 return true;
1095 }
1096
1097 // The ADD and MOV instructions that work with high registers don't have 16-bit
1098 // immediate variants.
1099 if (so.IsImmediate()) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001100 return true;
1101 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001102 }
1103
1104 if (so.IsRegister() && IsHighRegister(so.GetRegister()) && !can_contain_high_register) {
1105 return true;
1106 }
1107
Dave Allison65fcc2c2014-04-28 13:45:27 -07001108 bool rn_is_valid = true;
1109
1110 // Check for single operand instructions and ADD/SUB.
1111 switch (opcode) {
1112 case CMP:
1113 case MOV:
1114 case TST:
1115 case MVN:
1116 rn_is_valid = false; // There is no Rn for these instructions.
1117 break;
1118 case TEQ:
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001119 case ORN:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001120 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001121 case ADD:
1122 case SUB:
1123 break;
1124 default:
1125 if (so.IsRegister() && rd != rn) {
1126 return true;
1127 }
1128 }
1129
1130 if (so.IsImmediate()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001131 if (opcode == RSB) {
1132 DCHECK(rn_is_valid);
1133 if (so.GetImmediate() != 0u) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001134 return true;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001135 }
1136 } else if (rn_is_valid && rn != rd) {
1137 // The only thumb1 instructions with a register and an immediate are ADD and SUB
1138 // with a 3-bit immediate, and RSB with zero immediate.
1139 if (opcode == ADD || opcode == SUB) {
1140 if (!IsUint<3>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001141 return true;
1142 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001143 } else {
1144 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001145 }
1146 } else {
1147 // ADD, SUB, CMP and MOV may be thumb1 only if the immediate is 8 bits.
1148 if (!(opcode == ADD || opcode == SUB || opcode == MOV || opcode == CMP)) {
1149 return true;
1150 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001151 if (!IsUint<8>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001152 return true;
1153 }
1154 }
1155 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001156 } else {
1157 DCHECK(so.IsRegister());
1158 if (so.IsShift()) {
1159 // Shift operand - check if it is a MOV convertible to a 16-bit shift instruction.
1160 if (opcode != MOV) {
Zheng Xuc6667102015-05-15 16:08:45 +08001161 return true;
1162 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001163 // Check for MOV with an ROR/RRX. There is no 16-bit ROR immediate and no 16-bit RRX.
1164 if (so.GetShift() == ROR || so.GetShift() == RRX) {
1165 return true;
1166 }
1167 // 16-bit shifts set condition codes if and only if outside IT block,
1168 // i.e. if and only if cond == AL.
1169 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1170 return true;
1171 }
1172 } else {
1173 // Register operand without shift.
1174 switch (opcode) {
1175 case ADD:
1176 // The 16-bit ADD that cannot contain high registers can set condition codes
1177 // if and only if outside IT block, i.e. if and only if cond == AL.
1178 if (!can_contain_high_register &&
1179 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1180 return true;
1181 }
1182 break;
1183 case AND:
1184 case BIC:
1185 case EOR:
1186 case ORR:
1187 case MVN:
1188 case ADC:
1189 case SUB:
1190 case SBC:
1191 // These 16-bit opcodes set condition codes if and only if outside IT block,
1192 // i.e. if and only if cond == AL.
1193 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1194 return true;
1195 }
1196 break;
1197 case RSB:
1198 case RSC:
1199 // No 16-bit RSB/RSC Rd, Rm, Rn. It would be equivalent to SUB/SBC Rd, Rn, Rm.
1200 return true;
1201 case CMP:
1202 default:
1203 break;
1204 }
Zheng Xuc6667102015-05-15 16:08:45 +08001205 }
1206 }
1207
Dave Allison65fcc2c2014-04-28 13:45:27 -07001208 // The instruction can be encoded in 16 bits.
1209 return false;
1210}
1211
1212
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001213void Thumb2Assembler::Emit32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001214 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001215 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001216 Register rn,
1217 Register rd,
1218 const ShifterOperand& so) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001219 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001220 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001221 case AND: thumb_opcode = 0U /* 0b0000 */; break;
1222 case EOR: thumb_opcode = 4U /* 0b0100 */; break;
1223 case SUB: thumb_opcode = 13U /* 0b1101 */; break;
1224 case RSB: thumb_opcode = 14U /* 0b1110 */; break;
1225 case ADD: thumb_opcode = 8U /* 0b1000 */; break;
Andreas Gampe35c68e32014-09-30 08:39:37 -07001226 case ADC: thumb_opcode = 10U /* 0b1010 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001227 case SBC: thumb_opcode = 11U /* 0b1011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001228 case RSC: break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001229 case TST: thumb_opcode = 0U /* 0b0000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1230 case TEQ: thumb_opcode = 4U /* 0b0100 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1231 case CMP: thumb_opcode = 13U /* 0b1101 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1232 case CMN: thumb_opcode = 8U /* 0b1000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001233 case ORR: thumb_opcode = 2U /* 0b0010 */; break;
1234 case MOV: thumb_opcode = 2U /* 0b0010 */; rn = PC; break;
1235 case BIC: thumb_opcode = 1U /* 0b0001 */; break;
1236 case MVN: thumb_opcode = 3U /* 0b0011 */; rn = PC; break;
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001237 case ORN: thumb_opcode = 3U /* 0b0011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001238 default:
1239 break;
1240 }
1241
Andreas Gampec8ccf682014-09-29 20:07:43 -07001242 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001243 LOG(FATAL) << "Invalid thumb2 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001244 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001245 }
1246
1247 int32_t encoding = 0;
1248 if (so.IsImmediate()) {
1249 // Check special cases.
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001250 if ((opcode == SUB || opcode == ADD) && (so.GetImmediate() < (1u << 12))) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001251 if (set_cc != kCcSet) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001252 if (opcode == SUB) {
1253 thumb_opcode = 5U;
1254 } else if (opcode == ADD) {
1255 thumb_opcode = 0U;
1256 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001257 }
1258 uint32_t imm = so.GetImmediate();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001259
1260 uint32_t i = (imm >> 11) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001261 uint32_t imm3 = (imm >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001262 uint32_t imm8 = imm & 0xff;
1263
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001264 encoding = B31 | B30 | B29 | B28 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001265 (set_cc == kCcSet ? B20 : B25) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001266 thumb_opcode << 21 |
1267 rn << 16 |
1268 rd << 8 |
1269 i << 26 |
1270 imm3 << 12 |
1271 imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001272 } else {
1273 // Modified immediate.
Dave Allison45fdb932014-06-25 12:37:10 -07001274 uint32_t imm = ModifiedImmediate(so.encodingThumb());
Dave Allison65fcc2c2014-04-28 13:45:27 -07001275 if (imm == kInvalidModifiedImmediate) {
1276 LOG(FATAL) << "Immediate value cannot fit in thumb2 modified immediate";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001277 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001278 }
1279 encoding = B31 | B30 | B29 | B28 |
1280 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001281 (set_cc == kCcSet ? B20 : 0) |
Dave Allison65fcc2c2014-04-28 13:45:27 -07001282 rn << 16 |
1283 rd << 8 |
1284 imm;
1285 }
1286 } else if (so.IsRegister()) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001287 // Register (possibly shifted)
1288 encoding = B31 | B30 | B29 | B27 | B25 |
1289 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001290 (set_cc == kCcSet ? B20 : 0) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001291 rn << 16 |
1292 rd << 8 |
1293 so.encodingThumb();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001294 }
1295 Emit32(encoding);
1296}
1297
1298
1299void Thumb2Assembler::Emit16BitDataProcessing(Condition cond,
1300 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001301 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001302 Register rn,
1303 Register rd,
1304 const ShifterOperand& so) {
1305 if (opcode == ADD || opcode == SUB) {
1306 Emit16BitAddSub(cond, opcode, set_cc, rn, rd, so);
1307 return;
1308 }
Andreas Gampec8ccf682014-09-29 20:07:43 -07001309 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001310 // Thumb1.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001311 uint8_t dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001312 uint8_t opcode_shift = 6;
1313 uint8_t rd_shift = 0;
1314 uint8_t rn_shift = 3;
1315 uint8_t immediate_shift = 0;
1316 bool use_immediate = false;
1317 uint8_t immediate = 0;
1318
1319 if (opcode == MOV && so.IsRegister() && so.IsShift()) {
1320 // Convert shifted mov operand2 into 16 bit opcodes.
1321 dp_opcode = 0;
1322 opcode_shift = 11;
1323
1324 use_immediate = true;
1325 immediate = so.GetImmediate();
1326 immediate_shift = 6;
1327
1328 rn = so.GetRegister();
1329
1330 switch (so.GetShift()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001331 case LSL:
1332 DCHECK_LE(immediate, 31u);
1333 thumb_opcode = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001334 break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001335 case LSR:
1336 DCHECK(1 <= immediate && immediate <= 32);
1337 immediate &= 31; // 32 is encoded as 0.
1338 thumb_opcode = 1U /* 0b01 */;
1339 break;
1340 case ASR:
1341 DCHECK(1 <= immediate && immediate <= 32);
1342 immediate &= 31; // 32 is encoded as 0.
1343 thumb_opcode = 2U /* 0b10 */;
1344 break;
1345 case ROR: // No 16-bit ROR immediate.
1346 case RRX: // No 16-bit RRX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001347 default:
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001348 LOG(FATAL) << "Unexpected shift: " << so.GetShift();
1349 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001350 }
1351 } else {
1352 if (so.IsImmediate()) {
1353 use_immediate = true;
1354 immediate = so.GetImmediate();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001355 } else {
Guillaume "Vermeille" Sanchezab4a2f52015-03-11 14:00:30 +00001356 CHECK(!(so.IsRegister() && so.IsShift() && so.GetSecondRegister() != kNoRegister))
1357 << "No register-shifted register instruction available in thumb";
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001358 // Adjust rn and rd: only two registers will be emitted.
1359 switch (opcode) {
1360 case AND:
1361 case ORR:
1362 case EOR:
1363 case RSB:
1364 case ADC:
1365 case SBC:
1366 case BIC: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001367 // Sets condition codes if and only if outside IT block,
1368 // check that it complies with set_cc.
1369 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001370 if (rn == rd) {
1371 rn = so.GetRegister();
1372 } else {
1373 CHECK_EQ(rd, so.GetRegister());
1374 }
1375 break;
1376 }
1377 case CMP:
1378 case CMN: {
1379 CHECK_EQ(rd, 0);
1380 rd = rn;
1381 rn = so.GetRegister();
1382 break;
1383 }
1384 case MVN: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001385 // Sets condition codes if and only if outside IT block,
1386 // check that it complies with set_cc.
1387 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1388 CHECK_EQ(rn, 0);
1389 rn = so.GetRegister();
1390 break;
1391 }
1392 case TST:
1393 case TEQ: {
1394 DCHECK(set_cc == kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001395 CHECK_EQ(rn, 0);
1396 rn = so.GetRegister();
1397 break;
1398 }
1399 default:
1400 break;
1401 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001402 }
1403
1404 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001405 case AND: thumb_opcode = 0U /* 0b0000 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001406 case ORR: thumb_opcode = 12U /* 0b1100 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001407 case EOR: thumb_opcode = 1U /* 0b0001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001408 case RSB: thumb_opcode = 9U /* 0b1001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001409 case ADC: thumb_opcode = 5U /* 0b0101 */; break;
1410 case SBC: thumb_opcode = 6U /* 0b0110 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001411 case BIC: thumb_opcode = 14U /* 0b1110 */; break;
1412 case TST: thumb_opcode = 8U /* 0b1000 */; CHECK(!use_immediate); break;
1413 case MVN: thumb_opcode = 15U /* 0b1111 */; CHECK(!use_immediate); break;
1414 case CMP: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001415 DCHECK(set_cc == kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001416 if (use_immediate) {
1417 // T2 encoding.
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001418 dp_opcode = 0;
1419 opcode_shift = 11;
1420 thumb_opcode = 5U /* 0b101 */;
1421 rd_shift = 8;
1422 rn_shift = 8;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001423 } else if (IsHighRegister(rd) || IsHighRegister(rn)) {
1424 // Special cmp for high registers.
1425 dp_opcode = 1U /* 0b01 */;
1426 opcode_shift = 7;
1427 // Put the top bit of rd into the bottom bit of the opcode.
1428 thumb_opcode = 10U /* 0b0001010 */ | static_cast<uint32_t>(rd) >> 3;
1429 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001430 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001431 thumb_opcode = 10U /* 0b1010 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001432 }
1433
1434 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001435 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001436 case CMN: {
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001437 CHECK(!use_immediate);
Andreas Gampec8ccf682014-09-29 20:07:43 -07001438 thumb_opcode = 11U /* 0b1011 */;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001439 break;
1440 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001441 case MOV:
1442 dp_opcode = 0;
1443 if (use_immediate) {
1444 // T2 encoding.
1445 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001446 thumb_opcode = 4U /* 0b100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001447 rd_shift = 8;
1448 rn_shift = 8;
1449 } else {
1450 rn = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001451 if (set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001452 // Special mov for high registers.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001453 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001454 opcode_shift = 7;
1455 // Put the top bit of rd into the bottom bit of the opcode.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001456 thumb_opcode = 12U /* 0b0001100 */ | static_cast<uint32_t>(rd) >> 3;
1457 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001458 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001459 DCHECK(!IsHighRegister(rn));
1460 DCHECK(!IsHighRegister(rd));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001461 thumb_opcode = 0;
1462 }
1463 }
1464 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001465
1466 case TEQ:
1467 case RSC:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001468 default:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001469 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001470 break;
1471 }
1472 }
1473
Andreas Gampec8ccf682014-09-29 20:07:43 -07001474 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001475 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001476 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001477 }
1478
1479 int16_t encoding = dp_opcode << 14 |
1480 (thumb_opcode << opcode_shift) |
1481 rd << rd_shift |
1482 rn << rn_shift |
1483 (use_immediate ? (immediate << immediate_shift) : 0);
1484
1485 Emit16(encoding);
1486}
1487
1488
1489// ADD and SUB are complex enough to warrant their own emitter.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001490void Thumb2Assembler::Emit16BitAddSub(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001491 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001492 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001493 Register rn,
1494 Register rd,
1495 const ShifterOperand& so) {
1496 uint8_t dp_opcode = 0;
1497 uint8_t opcode_shift = 6;
1498 uint8_t rd_shift = 0;
1499 uint8_t rn_shift = 3;
1500 uint8_t immediate_shift = 0;
1501 bool use_immediate = false;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001502 uint32_t immediate = 0; // Should be at most 9 bits but keep the full immediate for CHECKs.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001503 uint8_t thumb_opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001504
1505 if (so.IsImmediate()) {
1506 use_immediate = true;
1507 immediate = so.GetImmediate();
1508 }
1509
1510 switch (opcode) {
1511 case ADD:
1512 if (so.IsRegister()) {
1513 Register rm = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001514 if (rn == rd && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001515 // Can use T2 encoding (allows 4 bit registers)
Andreas Gampec8ccf682014-09-29 20:07:43 -07001516 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001517 opcode_shift = 10;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001518 thumb_opcode = 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001519 // Make Rn also contain the top bit of rd.
1520 rn = static_cast<Register>(static_cast<uint32_t>(rm) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07001521 (static_cast<uint32_t>(rd) & 8U /* 0b1000 */) << 1);
1522 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001523 } else {
1524 // T1.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001525 DCHECK(!IsHighRegister(rd));
1526 DCHECK(!IsHighRegister(rn));
1527 DCHECK(!IsHighRegister(rm));
1528 // Sets condition codes if and only if outside IT block,
1529 // check that it complies with set_cc.
1530 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001531 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001532 thumb_opcode = 12U /* 0b01100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001533 immediate = static_cast<uint32_t>(so.GetRegister());
1534 use_immediate = true;
1535 immediate_shift = 6;
1536 }
1537 } else {
1538 // Immediate.
1539 if (rd == SP && rn == SP) {
1540 // ADD sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001541 dp_opcode = 2U /* 0b10 */;
1542 thumb_opcode = 3U /* 0b11 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001543 opcode_shift = 12;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001544 CHECK_LT(immediate, (1u << 9));
Roland Levillain14d90572015-07-16 10:52:26 +01001545 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001546
1547 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1548 rn = R0;
1549 rd = R0;
1550 rd_shift = 0;
1551 rn_shift = 0;
1552 immediate >>= 2;
1553 } else if (rd != SP && rn == SP) {
1554 // ADD rd, SP, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001555 dp_opcode = 2U /* 0b10 */;
1556 thumb_opcode = 5U /* 0b101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001557 opcode_shift = 11;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001558 CHECK_LT(immediate, (1u << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01001559 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001560
1561 // Remove rn from instruction.
1562 rn = R0;
1563 rn_shift = 0;
1564 rd_shift = 8;
1565 immediate >>= 2;
1566 } else if (rn != rd) {
1567 // Must use T1.
1568 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001569 thumb_opcode = 14U /* 0b01110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001570 immediate_shift = 6;
1571 } else {
1572 // T2 encoding.
1573 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001574 thumb_opcode = 6U /* 0b110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001575 rd_shift = 8;
1576 rn_shift = 8;
1577 }
1578 }
1579 break;
1580
1581 case SUB:
1582 if (so.IsRegister()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001583 // T1.
1584 Register rm = so.GetRegister();
1585 DCHECK(!IsHighRegister(rd));
1586 DCHECK(!IsHighRegister(rn));
1587 DCHECK(!IsHighRegister(rm));
1588 // Sets condition codes if and only if outside IT block,
1589 // check that it complies with set_cc.
1590 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1591 opcode_shift = 9;
1592 thumb_opcode = 13U /* 0b01101 */;
1593 immediate = static_cast<uint32_t>(rm);
1594 use_immediate = true;
1595 immediate_shift = 6;
1596 } else {
1597 if (rd == SP && rn == SP) {
1598 // SUB sp, sp, #imm
1599 dp_opcode = 2U /* 0b10 */;
1600 thumb_opcode = 0x61 /* 0b1100001 */;
1601 opcode_shift = 7;
1602 CHECK_LT(immediate, (1u << 9));
1603 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001604
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001605 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1606 rn = R0;
1607 rd = R0;
1608 rd_shift = 0;
1609 rn_shift = 0;
1610 immediate >>= 2;
1611 } else if (rn != rd) {
1612 // Must use T1.
1613 opcode_shift = 9;
1614 thumb_opcode = 15U /* 0b01111 */;
1615 immediate_shift = 6;
1616 } else {
1617 // T2 encoding.
1618 opcode_shift = 11;
1619 thumb_opcode = 7U /* 0b111 */;
1620 rd_shift = 8;
1621 rn_shift = 8;
1622 }
1623 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001624 break;
1625 default:
1626 LOG(FATAL) << "This opcode is not an ADD or SUB: " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001627 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001628 }
1629
1630 int16_t encoding = dp_opcode << 14 |
1631 (thumb_opcode << opcode_shift) |
1632 rd << rd_shift |
1633 rn << rn_shift |
1634 (use_immediate ? (immediate << immediate_shift) : 0);
1635
1636 Emit16(encoding);
1637}
1638
1639
1640void Thumb2Assembler::EmitDataProcessing(Condition cond,
1641 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001642 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001643 Register rn,
1644 Register rd,
1645 const ShifterOperand& so) {
1646 CHECK_NE(rd, kNoRegister);
1647 CheckCondition(cond);
1648
1649 if (Is32BitDataProcessing(cond, opcode, set_cc, rn, rd, so)) {
1650 Emit32BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1651 } else {
1652 Emit16BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1653 }
1654}
1655
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001656void Thumb2Assembler::EmitShift(Register rd,
1657 Register rm,
1658 Shift shift,
1659 uint8_t amount,
1660 Condition cond,
1661 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001662 CHECK_LT(amount, (1 << 5));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001663 if ((IsHighRegister(rd) || IsHighRegister(rm) || shift == ROR || shift == RRX) ||
1664 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001665 uint16_t opcode = 0;
1666 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001667 case LSL: opcode = 0U /* 0b00 */; break;
1668 case LSR: opcode = 1U /* 0b01 */; break;
1669 case ASR: opcode = 2U /* 0b10 */; break;
1670 case ROR: opcode = 3U /* 0b11 */; break;
1671 case RRX: opcode = 3U /* 0b11 */; amount = 0; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001672 default:
1673 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001674 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001675 }
1676 // 32 bit.
1677 int32_t encoding = B31 | B30 | B29 | B27 | B25 | B22 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001678 0xf << 16 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001679 uint32_t imm3 = amount >> 2;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001680 uint32_t imm2 = amount & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -07001681 encoding |= imm3 << 12 | imm2 << 6 | static_cast<int16_t>(rm) |
1682 static_cast<int16_t>(rd) << 8 | opcode << 4;
1683 Emit32(encoding);
1684 } else {
1685 // 16 bit shift
1686 uint16_t opcode = 0;
1687 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001688 case LSL: opcode = 0U /* 0b00 */; break;
1689 case LSR: opcode = 1U /* 0b01 */; break;
1690 case ASR: opcode = 2U /* 0b10 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001691 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001692 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1693 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001694 }
1695 int16_t encoding = opcode << 11 | amount << 6 | static_cast<int16_t>(rm) << 3 |
1696 static_cast<int16_t>(rd);
1697 Emit16(encoding);
1698 }
1699}
1700
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001701void Thumb2Assembler::EmitShift(Register rd,
1702 Register rn,
1703 Shift shift,
1704 Register rm,
1705 Condition cond,
1706 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001707 CHECK_NE(shift, RRX);
1708 bool must_be_32bit = false;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001709 if (IsHighRegister(rd) || IsHighRegister(rm) || IsHighRegister(rn) || rd != rn ||
1710 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001711 must_be_32bit = true;
1712 }
1713
1714 if (must_be_32bit) {
1715 uint16_t opcode = 0;
1716 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001717 case LSL: opcode = 0U /* 0b00 */; break;
1718 case LSR: opcode = 1U /* 0b01 */; break;
1719 case ASR: opcode = 2U /* 0b10 */; break;
1720 case ROR: opcode = 3U /* 0b11 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001721 default:
1722 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001723 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001724 }
1725 // 32 bit.
1726 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001727 0xf << 12 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001728 encoding |= static_cast<int16_t>(rn) << 16 | static_cast<int16_t>(rm) |
1729 static_cast<int16_t>(rd) << 8 | opcode << 21;
1730 Emit32(encoding);
1731 } else {
1732 uint16_t opcode = 0;
1733 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001734 case LSL: opcode = 2U /* 0b0010 */; break;
1735 case LSR: opcode = 3U /* 0b0011 */; break;
1736 case ASR: opcode = 4U /* 0b0100 */; break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001737 case ROR: opcode = 7U /* 0b0111 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001738 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001739 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1740 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001741 }
1742 int16_t encoding = B14 | opcode << 6 | static_cast<int16_t>(rm) << 3 |
1743 static_cast<int16_t>(rd);
1744 Emit16(encoding);
1745 }
1746}
1747
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001748inline size_t Thumb2Assembler::Fixup::SizeInBytes(Size size) {
1749 switch (size) {
1750 case kBranch16Bit:
1751 return 2u;
1752 case kBranch32Bit:
1753 return 4u;
Dave Allison45fdb932014-06-25 12:37:10 -07001754
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001755 case kCbxz16Bit:
1756 return 2u;
1757 case kCbxz32Bit:
1758 return 4u;
1759 case kCbxz48Bit:
1760 return 6u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001761
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001762 case kLiteral1KiB:
1763 return 2u;
1764 case kLiteral4KiB:
1765 return 4u;
1766 case kLiteral64KiB:
1767 return 8u;
1768 case kLiteral1MiB:
1769 return 10u;
1770 case kLiteralFar:
1771 return 14u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001772
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001773 case kLongOrFPLiteral1KiB:
1774 return 4u;
1775 case kLongOrFPLiteral256KiB:
1776 return 10u;
1777 case kLongOrFPLiteralFar:
1778 return 14u;
1779 }
1780 LOG(FATAL) << "Unexpected size: " << static_cast<int>(size);
1781 UNREACHABLE();
1782}
1783
1784inline uint32_t Thumb2Assembler::Fixup::GetOriginalSizeInBytes() const {
1785 return SizeInBytes(original_size_);
1786}
1787
1788inline uint32_t Thumb2Assembler::Fixup::GetSizeInBytes() const {
1789 return SizeInBytes(size_);
1790}
1791
1792inline size_t Thumb2Assembler::Fixup::LiteralPoolPaddingSize(uint32_t current_code_size) {
1793 // The code size must be a multiple of 2.
Roland Levillain14d90572015-07-16 10:52:26 +01001794 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001795 // If it isn't a multiple of 4, we need to add a 2-byte padding before the literal pool.
1796 return current_code_size & 2;
1797}
1798
1799inline int32_t Thumb2Assembler::Fixup::GetOffset(uint32_t current_code_size) const {
1800 static constexpr int32_t int32_min = std::numeric_limits<int32_t>::min();
1801 static constexpr int32_t int32_max = std::numeric_limits<int32_t>::max();
1802 DCHECK_LE(target_, static_cast<uint32_t>(int32_max));
1803 DCHECK_LE(location_, static_cast<uint32_t>(int32_max));
1804 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max));
1805 int32_t diff = static_cast<int32_t>(target_) - static_cast<int32_t>(location_);
1806 if (target_ > location_) {
1807 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max - diff));
1808 diff += static_cast<int32_t>(adjustment_);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001809 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001810 DCHECK_LE(int32_min + static_cast<int32_t>(adjustment_), diff);
1811 diff -= static_cast<int32_t>(adjustment_);
1812 }
1813 // The default PC adjustment for Thumb2 is 4 bytes.
1814 DCHECK_GE(diff, int32_min + 4);
1815 diff -= 4;
1816 // Add additional adjustment for instructions preceding the PC usage, padding
1817 // before the literal pool and rounding down the PC for literal loads.
1818 switch (GetSize()) {
1819 case kBranch16Bit:
1820 case kBranch32Bit:
1821 break;
1822
1823 case kCbxz16Bit:
1824 break;
1825 case kCbxz32Bit:
1826 case kCbxz48Bit:
1827 DCHECK_GE(diff, int32_min + 2);
1828 diff -= 2; // Extra CMP Rn, #0, 16-bit.
1829 break;
1830
1831 case kLiteral1KiB:
1832 case kLiteral4KiB:
1833 case kLongOrFPLiteral1KiB:
1834 DCHECK(diff >= 0 || (GetSize() == kLiteral1KiB && diff == -2));
1835 diff += LiteralPoolPaddingSize(current_code_size);
1836 // Load literal instructions round down the PC+4 to a multiple of 4, so if the PC
1837 // isn't a multiple of 2, we need to adjust. Since we already adjusted for the target
1838 // being aligned, current PC alignment can be inferred from diff.
Roland Levillain14d90572015-07-16 10:52:26 +01001839 DCHECK_ALIGNED(diff, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001840 diff = diff + (diff & 2);
1841 DCHECK_GE(diff, 0);
1842 break;
1843 case kLiteral1MiB:
1844 case kLiteral64KiB:
1845 case kLongOrFPLiteral256KiB:
1846 DCHECK_GE(diff, 4); // The target must be at least 4 bytes after the ADD rX, PC.
1847 diff -= 4; // One extra 32-bit MOV.
1848 diff += LiteralPoolPaddingSize(current_code_size);
1849 break;
1850 case kLiteralFar:
1851 case kLongOrFPLiteralFar:
1852 DCHECK_GE(diff, 8); // The target must be at least 4 bytes after the ADD rX, PC.
1853 diff -= 8; // Extra MOVW+MOVT; both 32-bit.
1854 diff += LiteralPoolPaddingSize(current_code_size);
1855 break;
1856 }
1857 return diff;
1858}
1859
1860inline size_t Thumb2Assembler::Fixup::IncreaseSize(Size new_size) {
1861 DCHECK_NE(target_, kUnresolved);
1862 Size old_size = size_;
1863 size_ = new_size;
1864 DCHECK_GT(SizeInBytes(new_size), SizeInBytes(old_size));
1865 size_t adjustment = SizeInBytes(new_size) - SizeInBytes(old_size);
1866 if (target_ > location_) {
1867 adjustment_ += adjustment;
1868 }
1869 return adjustment;
1870}
1871
1872uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
1873 uint32_t old_code_size = current_code_size;
1874 switch (GetSize()) {
1875 case kBranch16Bit:
1876 if (IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size))) {
1877 break;
Vladimir Markof38caa62015-05-29 15:50:18 +01001878 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001879 current_code_size += IncreaseSize(kBranch32Bit);
1880 FALLTHROUGH_INTENDED;
1881 case kBranch32Bit:
1882 // We don't support conditional branches beyond +-1MiB
1883 // or unconditional branches beyond +-16MiB.
1884 break;
1885
1886 case kCbxz16Bit:
1887 if (IsUint<7>(GetOffset(current_code_size))) {
1888 break;
1889 }
1890 current_code_size += IncreaseSize(kCbxz32Bit);
1891 FALLTHROUGH_INTENDED;
1892 case kCbxz32Bit:
1893 if (IsInt<9>(GetOffset(current_code_size))) {
1894 break;
1895 }
1896 current_code_size += IncreaseSize(kCbxz48Bit);
1897 FALLTHROUGH_INTENDED;
1898 case kCbxz48Bit:
1899 // We don't support conditional branches beyond +-1MiB.
1900 break;
1901
1902 case kLiteral1KiB:
1903 DCHECK(!IsHighRegister(rn_));
1904 if (IsUint<10>(GetOffset(current_code_size))) {
1905 break;
1906 }
1907 current_code_size += IncreaseSize(kLiteral4KiB);
1908 FALLTHROUGH_INTENDED;
1909 case kLiteral4KiB:
1910 if (IsUint<12>(GetOffset(current_code_size))) {
1911 break;
1912 }
1913 current_code_size += IncreaseSize(kLiteral64KiB);
1914 FALLTHROUGH_INTENDED;
1915 case kLiteral64KiB:
1916 // Can't handle high register which we can encounter by fall-through from kLiteral4KiB.
1917 if (!IsHighRegister(rn_) && IsUint<16>(GetOffset(current_code_size))) {
1918 break;
1919 }
1920 current_code_size += IncreaseSize(kLiteral1MiB);
1921 FALLTHROUGH_INTENDED;
1922 case kLiteral1MiB:
1923 if (IsUint<20>(GetOffset(current_code_size))) {
1924 break;
1925 }
1926 current_code_size += IncreaseSize(kLiteralFar);
1927 FALLTHROUGH_INTENDED;
1928 case kLiteralFar:
1929 // This encoding can reach any target.
1930 break;
1931
1932 case kLongOrFPLiteral1KiB:
1933 if (IsUint<10>(GetOffset(current_code_size))) {
1934 break;
1935 }
1936 current_code_size += IncreaseSize(kLongOrFPLiteral256KiB);
1937 FALLTHROUGH_INTENDED;
1938 case kLongOrFPLiteral256KiB:
1939 if (IsUint<18>(GetOffset(current_code_size))) {
1940 break;
1941 }
1942 current_code_size += IncreaseSize(kLongOrFPLiteralFar);
1943 FALLTHROUGH_INTENDED;
1944 case kLongOrFPLiteralFar:
1945 // This encoding can reach any target.
1946 break;
1947 }
1948 return current_code_size - old_code_size;
1949}
1950
1951void Thumb2Assembler::Fixup::Emit(AssemblerBuffer* buffer, uint32_t code_size) const {
1952 switch (GetSize()) {
1953 case kBranch16Bit: {
1954 DCHECK(type_ == kUnconditional || type_ == kConditional);
1955 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1956 int16_t encoding = BEncoding16(GetOffset(code_size), cond_);
Vladimir Markof38caa62015-05-29 15:50:18 +01001957 buffer->Store<int16_t>(location_, encoding);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001958 break;
1959 }
1960 case kBranch32Bit: {
1961 DCHECK(type_ == kConditional || type_ == kUnconditional ||
1962 type_ == kUnconditionalLink || type_ == kUnconditionalLinkX);
1963 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1964 int32_t encoding = BEncoding32(GetOffset(code_size), cond_);
1965 if (type_ == kUnconditionalLink) {
1966 DCHECK_NE(encoding & B12, 0);
1967 encoding |= B14;
1968 } else if (type_ == kUnconditionalLinkX) {
1969 DCHECK_NE(encoding & B12, 0);
1970 encoding ^= B14 | B12;
1971 }
1972 buffer->Store<int16_t>(location_, encoding >> 16);
1973 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
1974 break;
1975 }
1976
1977 case kCbxz16Bit: {
1978 DCHECK(type_ == kCompareAndBranchXZero);
1979 int16_t encoding = CbxzEncoding16(rn_, GetOffset(code_size), cond_);
1980 buffer->Store<int16_t>(location_, encoding);
1981 break;
1982 }
1983 case kCbxz32Bit: {
1984 DCHECK(type_ == kCompareAndBranchXZero);
1985 DCHECK(cond_ == EQ || cond_ == NE);
1986 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1987 int16_t b_encoding = BEncoding16(GetOffset(code_size), cond_);
1988 buffer->Store<int16_t>(location_, cmp_encoding);
1989 buffer->Store<int16_t>(location_ + 2, b_encoding);
1990 break;
1991 }
1992 case kCbxz48Bit: {
1993 DCHECK(type_ == kCompareAndBranchXZero);
1994 DCHECK(cond_ == EQ || cond_ == NE);
1995 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1996 int32_t b_encoding = BEncoding32(GetOffset(code_size), cond_);
1997 buffer->Store<int16_t>(location_, cmp_encoding);
1998 buffer->Store<int16_t>(location_ + 2u, b_encoding >> 16);
1999 buffer->Store<int16_t>(location_ + 4u, static_cast<int16_t>(b_encoding & 0xffff));
2000 break;
2001 }
2002
2003 case kLiteral1KiB: {
2004 DCHECK(type_ == kLoadLiteralNarrow);
2005 int16_t encoding = LdrLitEncoding16(rn_, GetOffset(code_size));
2006 buffer->Store<int16_t>(location_, encoding);
2007 break;
2008 }
2009 case kLiteral4KiB: {
2010 DCHECK(type_ == kLoadLiteralNarrow);
2011 // GetOffset() uses PC+4 but load literal uses AlignDown(PC+4, 4). Adjust offset accordingly.
2012 int32_t encoding = LdrLitEncoding32(rn_, GetOffset(code_size));
2013 buffer->Store<int16_t>(location_, encoding >> 16);
2014 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2015 break;
2016 }
2017 case kLiteral64KiB: {
2018 DCHECK(type_ == kLoadLiteralNarrow);
2019 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2020 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2021 int16_t ldr_encoding = LdrRtRnImm5Encoding16(rn_, rn_, 0);
2022 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2023 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2024 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2025 buffer->Store<int16_t>(location_ + 6u, ldr_encoding);
2026 break;
2027 }
2028 case kLiteral1MiB: {
2029 DCHECK(type_ == kLoadLiteralNarrow);
2030 int32_t offset = GetOffset(code_size);
2031 int32_t mov_encoding = MovModImmEncoding32(rn_, offset & ~0xfff);
2032 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2033 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, offset & 0xfff);
2034 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2035 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2036 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2037 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2038 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2039 break;
2040 }
2041 case kLiteralFar: {
2042 DCHECK(type_ == kLoadLiteralNarrow);
2043 int32_t offset = GetOffset(code_size);
2044 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2045 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2046 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2047 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, 0);
2048 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2049 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2050 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2051 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2052 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2053 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2054 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2055 break;
2056 }
2057
2058 case kLongOrFPLiteral1KiB: {
2059 int32_t encoding = LoadWideOrFpEncoding(PC, GetOffset(code_size)); // DCHECKs type_.
2060 buffer->Store<int16_t>(location_, encoding >> 16);
2061 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2062 break;
2063 }
2064 case kLongOrFPLiteral256KiB: {
2065 int32_t offset = GetOffset(code_size);
2066 int32_t mov_encoding = MovModImmEncoding32(IP, offset & ~0x3ff);
2067 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2068 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, offset & 0x3ff); // DCHECKs type_.
2069 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2070 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2071 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2072 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2073 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2074 break;
2075 }
2076 case kLongOrFPLiteralFar: {
2077 int32_t offset = GetOffset(code_size);
2078 int32_t movw_encoding = MovwEncoding32(IP, offset & 0xffff);
2079 int32_t movt_encoding = MovtEncoding32(IP, offset & ~0xffff);
2080 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2081 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0); // DCHECKs type_.
2082 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2083 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2084 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2085 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2086 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2087 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2088 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2089 break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002090 }
2091 }
2092}
2093
Dave Allison65fcc2c2014-04-28 13:45:27 -07002094uint16_t Thumb2Assembler::EmitCompareAndBranch(Register rn, uint16_t prev, bool n) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002095 CHECK(IsLowRegister(rn));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002096 uint32_t location = buffer_.Size();
2097
2098 // This is always unresolved as it must be a forward branch.
2099 Emit16(prev); // Previous link.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002100 return AddFixup(Fixup::CompareAndBranch(location, rn, n ? NE : EQ));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002101}
2102
2103
2104// NOTE: this only support immediate offsets, not [rx,ry].
2105// TODO: support [rx,ry] instructions.
2106void Thumb2Assembler::EmitLoadStore(Condition cond,
2107 bool load,
2108 bool byte,
2109 bool half,
2110 bool is_signed,
2111 Register rd,
2112 const Address& ad) {
2113 CHECK_NE(rd, kNoRegister);
2114 CheckCondition(cond);
2115 bool must_be_32bit = force_32bit_;
2116 if (IsHighRegister(rd)) {
2117 must_be_32bit = true;
2118 }
2119
2120 Register rn = ad.GetRegister();
Dave Allison45fdb932014-06-25 12:37:10 -07002121 if (IsHighRegister(rn) && rn != SP && rn != PC) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002122 must_be_32bit = true;
2123 }
2124
2125 if (is_signed || ad.GetOffset() < 0 || ad.GetMode() != Address::Offset) {
2126 must_be_32bit = true;
2127 }
2128
Dave Allison45fdb932014-06-25 12:37:10 -07002129 if (ad.IsImmediate()) {
2130 // Immediate offset
2131 int32_t offset = ad.GetOffset();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002132
Dave Allison45fdb932014-06-25 12:37:10 -07002133 // The 16 bit SP relative instruction can only have a 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002134 if (rn == SP && offset >= (1 << 10)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002135 must_be_32bit = true;
2136 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002137
2138 if (byte) {
Dave Allison45fdb932014-06-25 12:37:10 -07002139 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002140 if (offset >= (1 << 5)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002141 must_be_32bit = true;
2142 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002143 } else if (half) {
Dave Allison45fdb932014-06-25 12:37:10 -07002144 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002145 if (offset >= (1 << 6)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002146 must_be_32bit = true;
2147 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002148 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002149 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002150 if (offset >= (1 << 7)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002151 must_be_32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002152 }
2153 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002154
Dave Allison45fdb932014-06-25 12:37:10 -07002155 if (must_be_32bit) {
2156 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2157 (load ? B20 : 0) |
2158 (is_signed ? B24 : 0) |
2159 static_cast<uint32_t>(rd) << 12 |
2160 ad.encodingThumb(true) |
2161 (byte ? 0 : half ? B21 : B22);
2162 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002163 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002164 // 16 bit thumb1.
2165 uint8_t opA = 0;
2166 bool sp_relative = false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002167
2168 if (byte) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002169 opA = 7U /* 0b0111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002170 } else if (half) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002171 opA = 8U /* 0b1000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002172 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002173 if (rn == SP) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002174 opA = 9U /* 0b1001 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002175 sp_relative = true;
2176 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002177 opA = 6U /* 0b0110 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002178 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002179 }
Dave Allison45fdb932014-06-25 12:37:10 -07002180 int16_t encoding = opA << 12 |
2181 (load ? B11 : 0);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002182
Dave Allison45fdb932014-06-25 12:37:10 -07002183 CHECK_GE(offset, 0);
2184 if (sp_relative) {
2185 // SP relative, 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002186 CHECK_LT(offset, (1 << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01002187 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002188 encoding |= rd << 8 | offset >> 2;
2189 } else {
2190 // No SP relative. The offset is shifted right depending on
2191 // the size of the load/store.
2192 encoding |= static_cast<uint32_t>(rd);
2193
2194 if (byte) {
2195 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002196 CHECK_LT(offset, (1 << 5));
Dave Allison45fdb932014-06-25 12:37:10 -07002197 } else if (half) {
2198 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002199 CHECK_LT(offset, (1 << 6));
Roland Levillain14d90572015-07-16 10:52:26 +01002200 CHECK_ALIGNED(offset, 2);
Dave Allison45fdb932014-06-25 12:37:10 -07002201 offset >>= 1;
2202 } else {
2203 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002204 CHECK_LT(offset, (1 << 7));
Roland Levillain14d90572015-07-16 10:52:26 +01002205 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002206 offset >>= 2;
2207 }
2208 encoding |= rn << 3 | offset << 6;
2209 }
2210
2211 Emit16(encoding);
2212 }
2213 } else {
2214 // Register shift.
2215 if (ad.GetRegister() == PC) {
2216 // PC relative literal encoding.
2217 int32_t offset = ad.GetOffset();
Dave Allison0bb9ade2014-06-26 17:57:36 -07002218 if (must_be_32bit || offset < 0 || offset >= (1 << 10) || !load) {
Dave Allison45fdb932014-06-25 12:37:10 -07002219 int32_t up = B23;
2220 if (offset < 0) {
2221 offset = -offset;
2222 up = 0;
2223 }
2224 CHECK_LT(offset, (1 << 12));
2225 int32_t encoding = 0x1f << 27 | 0xf << 16 | B22 | (load ? B20 : 0) |
2226 offset | up |
2227 static_cast<uint32_t>(rd) << 12;
2228 Emit32(encoding);
2229 } else {
2230 // 16 bit literal load.
2231 CHECK_GE(offset, 0);
2232 CHECK_LT(offset, (1 << 10));
2233 int32_t encoding = B14 | (load ? B11 : 0) | static_cast<uint32_t>(rd) << 8 | offset >> 2;
2234 Emit16(encoding);
2235 }
2236 } else {
2237 if (ad.GetShiftCount() != 0) {
2238 // If there is a shift count this must be 32 bit.
2239 must_be_32bit = true;
2240 } else if (IsHighRegister(ad.GetRegisterOffset())) {
2241 must_be_32bit = true;
2242 }
2243
2244 if (must_be_32bit) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002245 int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 |
Dave Allison45fdb932014-06-25 12:37:10 -07002246 ad.encodingThumb(true);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002247 if (half) {
2248 encoding |= B21;
2249 } else if (!byte) {
2250 encoding |= B22;
2251 }
Dave Allison45fdb932014-06-25 12:37:10 -07002252 Emit32(encoding);
2253 } else {
2254 // 16 bit register offset.
2255 int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) |
2256 ad.encodingThumb(false);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002257 if (byte) {
2258 encoding |= B10;
2259 } else if (half) {
2260 encoding |= B9;
2261 }
Dave Allison45fdb932014-06-25 12:37:10 -07002262 Emit16(encoding);
2263 }
2264 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002265 }
2266}
2267
2268
2269void Thumb2Assembler::EmitMultiMemOp(Condition cond,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002270 BlockAddressMode bam,
Dave Allison65fcc2c2014-04-28 13:45:27 -07002271 bool load,
2272 Register base,
2273 RegList regs) {
2274 CHECK_NE(base, kNoRegister);
2275 CheckCondition(cond);
2276 bool must_be_32bit = force_32bit_;
2277
Vladimir Markoe8469c12014-11-26 18:09:30 +00002278 if (!must_be_32bit && base == SP && bam == (load ? IA_W : DB_W) &&
2279 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) {
2280 // Use 16-bit PUSH/POP.
2281 int16_t encoding = B15 | B13 | B12 | (load ? B11 : 0) | B10 |
2282 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff);
2283 Emit16(encoding);
2284 return;
2285 }
2286
Dave Allison65fcc2c2014-04-28 13:45:27 -07002287 if ((regs & 0xff00) != 0) {
2288 must_be_32bit = true;
2289 }
2290
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002291 bool w_bit = bam == IA_W || bam == DB_W || bam == DA_W || bam == IB_W;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002292 // 16 bit always uses writeback.
2293 if (!w_bit) {
2294 must_be_32bit = true;
2295 }
2296
2297 if (must_be_32bit) {
2298 uint32_t op = 0;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002299 switch (bam) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002300 case IA:
2301 case IA_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002302 op = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002303 break;
2304 case DB:
2305 case DB_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002306 op = 2U /* 0b10 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002307 break;
2308 case DA:
2309 case IB:
2310 case DA_W:
2311 case IB_W:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002312 LOG(FATAL) << "LDM/STM mode not supported on thumb: " << bam;
Vladimir Markoe8469c12014-11-26 18:09:30 +00002313 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002314 }
2315 if (load) {
2316 // Cannot have SP in the list.
2317 CHECK_EQ((regs & (1 << SP)), 0);
2318 } else {
2319 // Cannot have PC or SP in the list.
2320 CHECK_EQ((regs & (1 << PC | 1 << SP)), 0);
2321 }
2322 int32_t encoding = B31 | B30 | B29 | B27 |
2323 (op << 23) |
2324 (load ? B20 : 0) |
2325 base << 16 |
2326 regs |
2327 (w_bit << 21);
2328 Emit32(encoding);
2329 } else {
2330 int16_t encoding = B15 | B14 |
2331 (load ? B11 : 0) |
2332 base << 8 |
2333 regs;
2334 Emit16(encoding);
2335 }
2336}
2337
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002338void Thumb2Assembler::EmitBranch(Condition cond, Label* label, bool link, bool x) {
2339 bool use32bit = IsForced32Bit() || !CanRelocateBranches();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002340 uint32_t pc = buffer_.Size();
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002341 Fixup::Type branch_type;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002342 if (cond == AL) {
2343 if (link) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002344 use32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002345 if (x) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002346 branch_type = Fixup::kUnconditionalLinkX; // BLX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002347 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002348 branch_type = Fixup::kUnconditionalLink; // BX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002349 }
2350 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002351 branch_type = Fixup::kUnconditional; // B.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002352 }
2353 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002354 branch_type = Fixup::kConditional; // B<cond>.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002355 }
2356
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002357 Fixup::Size size = use32bit ? Fixup::kBranch32Bit : Fixup::kBranch16Bit;
2358 FixupId branch_id = AddFixup(Fixup::Branch(pc, branch_type, size, cond));
2359
Dave Allison65fcc2c2014-04-28 13:45:27 -07002360 if (label->IsBound()) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002361 // The branch is to a bound label which means that it's a backwards branch.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002362 GetFixup(branch_id)->Resolve(label->Position());
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002363 Emit16(0);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002364 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002365 // Branch target is an unbound label. Add it to a singly-linked list maintained within
2366 // the code with the label serving as the head.
2367 Emit16(static_cast<uint16_t>(label->position_));
2368 label->LinkTo(branch_id);
Vladimir Markof38caa62015-05-29 15:50:18 +01002369 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002370
2371 if (use32bit) {
2372 Emit16(0);
2373 }
2374 DCHECK_EQ(buffer_.Size() - pc, GetFixup(branch_id)->GetSizeInBytes());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002375}
2376
2377
2378void Thumb2Assembler::clz(Register rd, Register rm, Condition cond) {
2379 CHECK_NE(rd, kNoRegister);
2380 CHECK_NE(rm, kNoRegister);
2381 CheckCondition(cond);
2382 CHECK_NE(rd, PC);
2383 CHECK_NE(rm, PC);
2384 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2385 B25 | B23 | B21 | B20 |
2386 static_cast<uint32_t>(rm) << 16 |
2387 0xf << 12 |
2388 static_cast<uint32_t>(rd) << 8 |
2389 B7 |
2390 static_cast<uint32_t>(rm);
2391 Emit32(encoding);
2392}
2393
2394
2395void Thumb2Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
2396 CheckCondition(cond);
2397 bool must_be_32bit = force_32bit_;
2398 if (IsHighRegister(rd)|| imm16 >= 256u) {
2399 must_be_32bit = true;
2400 }
2401
2402 if (must_be_32bit) {
2403 // Use encoding T3.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002404 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2405 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2406 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002407 uint32_t imm8 = imm16 & 0xff;
2408 int32_t encoding = B31 | B30 | B29 | B28 |
2409 B25 | B22 |
2410 static_cast<uint32_t>(rd) << 8 |
2411 i << 26 |
2412 imm4 << 16 |
2413 imm3 << 12 |
2414 imm8;
2415 Emit32(encoding);
2416 } else {
2417 int16_t encoding = B13 | static_cast<uint16_t>(rd) << 8 |
2418 imm16;
2419 Emit16(encoding);
2420 }
2421}
2422
2423
2424void Thumb2Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
2425 CheckCondition(cond);
2426 // Always 32 bits.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002427 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2428 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2429 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002430 uint32_t imm8 = imm16 & 0xff;
2431 int32_t encoding = B31 | B30 | B29 | B28 |
2432 B25 | B23 | B22 |
2433 static_cast<uint32_t>(rd) << 8 |
2434 i << 26 |
2435 imm4 << 16 |
2436 imm3 << 12 |
2437 imm8;
2438 Emit32(encoding);
2439}
2440
2441
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002442void Thumb2Assembler::rbit(Register rd, Register rm, Condition cond) {
2443 CHECK_NE(rd, kNoRegister);
2444 CHECK_NE(rm, kNoRegister);
2445 CheckCondition(cond);
2446 CHECK_NE(rd, PC);
2447 CHECK_NE(rm, PC);
2448 CHECK_NE(rd, SP);
2449 CHECK_NE(rm, SP);
2450 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2451 B25 | B23 | B20 |
2452 static_cast<uint32_t>(rm) << 16 |
2453 0xf << 12 |
2454 static_cast<uint32_t>(rd) << 8 |
2455 B7 | B5 |
2456 static_cast<uint32_t>(rm);
2457 Emit32(encoding);
2458}
2459
2460
Dave Allison65fcc2c2014-04-28 13:45:27 -07002461void Thumb2Assembler::ldrex(Register rt, Register rn, uint16_t imm, Condition cond) {
2462 CHECK_NE(rn, kNoRegister);
2463 CHECK_NE(rt, kNoRegister);
2464 CheckCondition(cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002465 CHECK_LT(imm, (1u << 10));
2466
2467 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
2468 static_cast<uint32_t>(rn) << 16 |
2469 static_cast<uint32_t>(rt) << 12 |
2470 0xf << 8 |
2471 imm >> 2;
2472 Emit32(encoding);
2473}
2474
2475
2476void Thumb2Assembler::ldrex(Register rt, Register rn, Condition cond) {
2477 ldrex(rt, rn, 0, cond);
2478}
2479
2480
2481void Thumb2Assembler::strex(Register rd,
2482 Register rt,
2483 Register rn,
2484 uint16_t imm,
2485 Condition cond) {
2486 CHECK_NE(rn, kNoRegister);
2487 CHECK_NE(rd, kNoRegister);
2488 CHECK_NE(rt, kNoRegister);
2489 CheckCondition(cond);
2490 CHECK_LT(imm, (1u << 10));
2491
2492 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
2493 static_cast<uint32_t>(rn) << 16 |
2494 static_cast<uint32_t>(rt) << 12 |
2495 static_cast<uint32_t>(rd) << 8 |
2496 imm >> 2;
2497 Emit32(encoding);
2498}
2499
2500
Calin Juravle52c48962014-12-16 17:02:57 +00002501void Thumb2Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
2502 CHECK_NE(rn, kNoRegister);
2503 CHECK_NE(rt, kNoRegister);
2504 CHECK_NE(rt2, kNoRegister);
2505 CHECK_NE(rt, rt2);
2506 CheckCondition(cond);
2507
2508 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 | B20 |
2509 static_cast<uint32_t>(rn) << 16 |
2510 static_cast<uint32_t>(rt) << 12 |
2511 static_cast<uint32_t>(rt2) << 8 |
2512 B6 | B5 | B4 | B3 | B2 | B1 | B0;
2513 Emit32(encoding);
2514}
2515
2516
Dave Allison65fcc2c2014-04-28 13:45:27 -07002517void Thumb2Assembler::strex(Register rd,
2518 Register rt,
2519 Register rn,
2520 Condition cond) {
2521 strex(rd, rt, rn, 0, cond);
2522}
2523
2524
Calin Juravle52c48962014-12-16 17:02:57 +00002525void Thumb2Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
2526 CHECK_NE(rd, kNoRegister);
2527 CHECK_NE(rn, kNoRegister);
2528 CHECK_NE(rt, kNoRegister);
2529 CHECK_NE(rt2, kNoRegister);
2530 CHECK_NE(rt, rt2);
2531 CHECK_NE(rd, rt);
2532 CHECK_NE(rd, rt2);
2533 CheckCondition(cond);
2534
2535 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 |
2536 static_cast<uint32_t>(rn) << 16 |
2537 static_cast<uint32_t>(rt) << 12 |
2538 static_cast<uint32_t>(rt2) << 8 |
2539 B6 | B5 | B4 |
2540 static_cast<uint32_t>(rd);
2541 Emit32(encoding);
2542}
2543
2544
Dave Allison65fcc2c2014-04-28 13:45:27 -07002545void Thumb2Assembler::clrex(Condition cond) {
2546 CheckCondition(cond);
2547 int32_t encoding = B31 | B30 | B29 | B27 | B28 | B25 | B24 | B23 |
2548 B21 | B20 |
2549 0xf << 16 |
2550 B15 |
2551 0xf << 8 |
2552 B5 |
2553 0xf;
2554 Emit32(encoding);
2555}
2556
2557
2558void Thumb2Assembler::nop(Condition cond) {
2559 CheckCondition(cond);
Andreas Gampec8ccf682014-09-29 20:07:43 -07002560 uint16_t encoding = B15 | B13 | B12 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002561 B11 | B10 | B9 | B8;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002562 Emit16(static_cast<int16_t>(encoding));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002563}
2564
2565
2566void Thumb2Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
2567 CHECK_NE(sn, kNoSRegister);
2568 CHECK_NE(rt, kNoRegister);
2569 CHECK_NE(rt, SP);
2570 CHECK_NE(rt, PC);
2571 CheckCondition(cond);
2572 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2573 B27 | B26 | B25 |
2574 ((static_cast<int32_t>(sn) >> 1)*B16) |
2575 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2576 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2577 Emit32(encoding);
2578}
2579
2580
2581void Thumb2Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
2582 CHECK_NE(sn, kNoSRegister);
2583 CHECK_NE(rt, kNoRegister);
2584 CHECK_NE(rt, SP);
2585 CHECK_NE(rt, PC);
2586 CheckCondition(cond);
2587 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2588 B27 | B26 | B25 | B20 |
2589 ((static_cast<int32_t>(sn) >> 1)*B16) |
2590 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2591 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2592 Emit32(encoding);
2593}
2594
2595
2596void Thumb2Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
2597 Condition cond) {
2598 CHECK_NE(sm, kNoSRegister);
2599 CHECK_NE(sm, S31);
2600 CHECK_NE(rt, kNoRegister);
2601 CHECK_NE(rt, SP);
2602 CHECK_NE(rt, PC);
2603 CHECK_NE(rt2, kNoRegister);
2604 CHECK_NE(rt2, SP);
2605 CHECK_NE(rt2, PC);
2606 CheckCondition(cond);
2607 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2608 B27 | B26 | B22 |
2609 (static_cast<int32_t>(rt2)*B16) |
2610 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2611 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2612 (static_cast<int32_t>(sm) >> 1);
2613 Emit32(encoding);
2614}
2615
2616
2617void Thumb2Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
2618 Condition cond) {
2619 CHECK_NE(sm, kNoSRegister);
2620 CHECK_NE(sm, S31);
2621 CHECK_NE(rt, kNoRegister);
2622 CHECK_NE(rt, SP);
2623 CHECK_NE(rt, PC);
2624 CHECK_NE(rt2, kNoRegister);
2625 CHECK_NE(rt2, SP);
2626 CHECK_NE(rt2, PC);
2627 CHECK_NE(rt, rt2);
2628 CheckCondition(cond);
2629 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2630 B27 | B26 | B22 | B20 |
2631 (static_cast<int32_t>(rt2)*B16) |
2632 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2633 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2634 (static_cast<int32_t>(sm) >> 1);
2635 Emit32(encoding);
2636}
2637
2638
2639void Thumb2Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
2640 Condition cond) {
2641 CHECK_NE(dm, kNoDRegister);
2642 CHECK_NE(rt, kNoRegister);
2643 CHECK_NE(rt, SP);
2644 CHECK_NE(rt, PC);
2645 CHECK_NE(rt2, kNoRegister);
2646 CHECK_NE(rt2, SP);
2647 CHECK_NE(rt2, PC);
2648 CheckCondition(cond);
2649 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2650 B27 | B26 | B22 |
2651 (static_cast<int32_t>(rt2)*B16) |
2652 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2653 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2654 (static_cast<int32_t>(dm) & 0xf);
2655 Emit32(encoding);
2656}
2657
2658
2659void Thumb2Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
2660 Condition cond) {
2661 CHECK_NE(dm, kNoDRegister);
2662 CHECK_NE(rt, kNoRegister);
2663 CHECK_NE(rt, SP);
2664 CHECK_NE(rt, PC);
2665 CHECK_NE(rt2, kNoRegister);
2666 CHECK_NE(rt2, SP);
2667 CHECK_NE(rt2, PC);
2668 CHECK_NE(rt, rt2);
2669 CheckCondition(cond);
2670 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2671 B27 | B26 | B22 | B20 |
2672 (static_cast<int32_t>(rt2)*B16) |
2673 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2674 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2675 (static_cast<int32_t>(dm) & 0xf);
2676 Emit32(encoding);
2677}
2678
2679
2680void Thumb2Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
2681 const Address& addr = static_cast<const Address&>(ad);
2682 CHECK_NE(sd, kNoSRegister);
2683 CheckCondition(cond);
2684 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2685 B27 | B26 | B24 | B20 |
2686 ((static_cast<int32_t>(sd) & 1)*B22) |
2687 ((static_cast<int32_t>(sd) >> 1)*B12) |
2688 B11 | B9 | addr.vencoding();
2689 Emit32(encoding);
2690}
2691
2692
2693void Thumb2Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
2694 const Address& addr = static_cast<const Address&>(ad);
2695 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2696 CHECK_NE(sd, kNoSRegister);
2697 CheckCondition(cond);
2698 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2699 B27 | B26 | B24 |
2700 ((static_cast<int32_t>(sd) & 1)*B22) |
2701 ((static_cast<int32_t>(sd) >> 1)*B12) |
2702 B11 | B9 | addr.vencoding();
2703 Emit32(encoding);
2704}
2705
2706
2707void Thumb2Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
2708 const Address& addr = static_cast<const Address&>(ad);
2709 CHECK_NE(dd, kNoDRegister);
2710 CheckCondition(cond);
2711 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2712 B27 | B26 | B24 | B20 |
2713 ((static_cast<int32_t>(dd) >> 4)*B22) |
2714 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2715 B11 | B9 | B8 | addr.vencoding();
2716 Emit32(encoding);
2717}
2718
2719
2720void Thumb2Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
2721 const Address& addr = static_cast<const Address&>(ad);
2722 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2723 CHECK_NE(dd, kNoDRegister);
2724 CheckCondition(cond);
2725 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2726 B27 | B26 | B24 |
2727 ((static_cast<int32_t>(dd) >> 4)*B22) |
2728 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2729 B11 | B9 | B8 | addr.vencoding();
2730 Emit32(encoding);
2731}
2732
2733
2734void Thumb2Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
2735 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
2736}
2737
2738
2739void Thumb2Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
2740 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
2741}
2742
2743
2744void Thumb2Assembler::vpops(SRegister reg, int nregs, Condition cond) {
2745 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
2746}
2747
2748
2749void Thumb2Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
2750 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
2751}
2752
2753
2754void Thumb2Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
2755 CheckCondition(cond);
2756
2757 uint32_t D;
2758 uint32_t Vd;
2759 if (dbl) {
2760 // Encoded as D:Vd.
2761 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002762 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002763 } else {
2764 // Encoded as Vd:D.
2765 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002766 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002767 }
2768 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
2769 B11 | B9 |
2770 (dbl ? B8 : 0) |
2771 (push ? B24 : (B23 | B20)) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07002772 14U /* 0b1110 */ << 28 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002773 nregs << (dbl ? 1 : 0) |
2774 D << 22 |
2775 Vd << 12;
2776 Emit32(encoding);
2777}
2778
2779
2780void Thumb2Assembler::EmitVFPsss(Condition cond, int32_t opcode,
2781 SRegister sd, SRegister sn, SRegister sm) {
2782 CHECK_NE(sd, kNoSRegister);
2783 CHECK_NE(sn, kNoSRegister);
2784 CHECK_NE(sm, kNoSRegister);
2785 CheckCondition(cond);
2786 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2787 B27 | B26 | B25 | B11 | B9 | opcode |
2788 ((static_cast<int32_t>(sd) & 1)*B22) |
2789 ((static_cast<int32_t>(sn) >> 1)*B16) |
2790 ((static_cast<int32_t>(sd) >> 1)*B12) |
2791 ((static_cast<int32_t>(sn) & 1)*B7) |
2792 ((static_cast<int32_t>(sm) & 1)*B5) |
2793 (static_cast<int32_t>(sm) >> 1);
2794 Emit32(encoding);
2795}
2796
2797
2798void Thumb2Assembler::EmitVFPddd(Condition cond, int32_t opcode,
2799 DRegister dd, DRegister dn, DRegister dm) {
2800 CHECK_NE(dd, kNoDRegister);
2801 CHECK_NE(dn, kNoDRegister);
2802 CHECK_NE(dm, kNoDRegister);
2803 CheckCondition(cond);
2804 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2805 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
2806 ((static_cast<int32_t>(dd) >> 4)*B22) |
2807 ((static_cast<int32_t>(dn) & 0xf)*B16) |
2808 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2809 ((static_cast<int32_t>(dn) >> 4)*B7) |
2810 ((static_cast<int32_t>(dm) >> 4)*B5) |
2811 (static_cast<int32_t>(dm) & 0xf);
2812 Emit32(encoding);
2813}
2814
2815
2816void Thumb2Assembler::EmitVFPsd(Condition cond, int32_t opcode,
2817 SRegister sd, DRegister dm) {
2818 CHECK_NE(sd, kNoSRegister);
2819 CHECK_NE(dm, kNoDRegister);
2820 CheckCondition(cond);
2821 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2822 B27 | B26 | B25 | B11 | B9 | opcode |
2823 ((static_cast<int32_t>(sd) & 1)*B22) |
2824 ((static_cast<int32_t>(sd) >> 1)*B12) |
2825 ((static_cast<int32_t>(dm) >> 4)*B5) |
2826 (static_cast<int32_t>(dm) & 0xf);
2827 Emit32(encoding);
2828}
2829
2830
2831void Thumb2Assembler::EmitVFPds(Condition cond, int32_t opcode,
2832 DRegister dd, SRegister sm) {
2833 CHECK_NE(dd, kNoDRegister);
2834 CHECK_NE(sm, kNoSRegister);
2835 CheckCondition(cond);
2836 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2837 B27 | B26 | B25 | B11 | B9 | opcode |
2838 ((static_cast<int32_t>(dd) >> 4)*B22) |
2839 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2840 ((static_cast<int32_t>(sm) & 1)*B5) |
2841 (static_cast<int32_t>(sm) >> 1);
2842 Emit32(encoding);
2843}
2844
2845
2846void Thumb2Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR.
Calin Juravleddb7df22014-11-25 20:56:51 +00002847 CHECK_NE(cond, kNoCondition);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002848 CheckCondition(cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00002849 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2850 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
2851 (static_cast<int32_t>(PC)*B12) |
2852 B11 | B9 | B4;
2853 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002854}
2855
2856
2857void Thumb2Assembler::svc(uint32_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002858 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002859 int16_t encoding = B15 | B14 | B12 |
2860 B11 | B10 | B9 | B8 |
2861 imm8;
2862 Emit16(encoding);
2863}
2864
2865
2866void Thumb2Assembler::bkpt(uint16_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002867 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002868 int16_t encoding = B15 | B13 | B12 |
2869 B11 | B10 | B9 |
2870 imm8;
2871 Emit16(encoding);
2872}
2873
2874// Convert the given IT state to a mask bit given bit 0 of the first
2875// condition and a shift position.
2876static uint8_t ToItMask(ItState s, uint8_t firstcond0, uint8_t shift) {
2877 switch (s) {
2878 case kItOmitted: return 1 << shift;
2879 case kItThen: return firstcond0 << shift;
2880 case kItElse: return !firstcond0 << shift;
2881 }
2882 return 0;
2883}
2884
2885
2886// Set the IT condition in the given position for the given state. This is used
2887// to check that conditional instructions match the preceding IT statement.
2888void Thumb2Assembler::SetItCondition(ItState s, Condition cond, uint8_t index) {
2889 switch (s) {
2890 case kItOmitted: it_conditions_[index] = AL; break;
2891 case kItThen: it_conditions_[index] = cond; break;
2892 case kItElse:
2893 it_conditions_[index] = static_cast<Condition>(static_cast<uint8_t>(cond) ^ 1);
2894 break;
2895 }
2896}
2897
2898
2899void Thumb2Assembler::it(Condition firstcond, ItState i1, ItState i2, ItState i3) {
2900 CheckCondition(AL); // Not allowed in IT block.
2901 uint8_t firstcond0 = static_cast<uint8_t>(firstcond) & 1;
2902
2903 // All conditions to AL.
2904 for (uint8_t i = 0; i < 4; ++i) {
2905 it_conditions_[i] = AL;
2906 }
2907
2908 SetItCondition(kItThen, firstcond, 0);
2909 uint8_t mask = ToItMask(i1, firstcond0, 3);
2910 SetItCondition(i1, firstcond, 1);
2911
2912 if (i1 != kItOmitted) {
2913 mask |= ToItMask(i2, firstcond0, 2);
2914 SetItCondition(i2, firstcond, 2);
2915 if (i2 != kItOmitted) {
2916 mask |= ToItMask(i3, firstcond0, 1);
2917 SetItCondition(i3, firstcond, 3);
2918 if (i3 != kItOmitted) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002919 mask |= 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002920 }
2921 }
2922 }
2923
2924 // Start at first condition.
2925 it_cond_index_ = 0;
2926 next_condition_ = it_conditions_[0];
2927 uint16_t encoding = B15 | B13 | B12 |
2928 B11 | B10 | B9 | B8 |
2929 firstcond << 4 |
2930 mask;
2931 Emit16(encoding);
2932}
2933
2934
2935void Thumb2Assembler::cbz(Register rn, Label* label) {
2936 CheckCondition(AL);
2937 if (label->IsBound()) {
2938 LOG(FATAL) << "cbz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002939 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002940 } else if (IsHighRegister(rn)) {
2941 LOG(FATAL) << "cbz can only be used with low registers";
2942 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002943 } else {
2944 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), false);
2945 label->LinkTo(branchid);
2946 }
2947}
2948
2949
2950void Thumb2Assembler::cbnz(Register rn, Label* label) {
2951 CheckCondition(AL);
2952 if (label->IsBound()) {
2953 LOG(FATAL) << "cbnz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002954 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002955 } else if (IsHighRegister(rn)) {
2956 LOG(FATAL) << "cbnz can only be used with low registers";
2957 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002958 } else {
2959 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), true);
2960 label->LinkTo(branchid);
2961 }
2962}
2963
2964
2965void Thumb2Assembler::blx(Register rm, Condition cond) {
2966 CHECK_NE(rm, kNoRegister);
2967 CheckCondition(cond);
2968 int16_t encoding = B14 | B10 | B9 | B8 | B7 | static_cast<int16_t>(rm) << 3;
2969 Emit16(encoding);
2970}
2971
2972
2973void Thumb2Assembler::bx(Register rm, Condition cond) {
2974 CHECK_NE(rm, kNoRegister);
2975 CheckCondition(cond);
2976 int16_t encoding = B14 | B10 | B9 | B8 | static_cast<int16_t>(rm) << 3;
2977 Emit16(encoding);
2978}
2979
2980
2981void Thumb2Assembler::Push(Register rd, Condition cond) {
2982 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
2983}
2984
2985
2986void Thumb2Assembler::Pop(Register rd, Condition cond) {
2987 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
2988}
2989
2990
2991void Thumb2Assembler::PushList(RegList regs, Condition cond) {
2992 stm(DB_W, SP, regs, cond);
2993}
2994
2995
2996void Thumb2Assembler::PopList(RegList regs, Condition cond) {
2997 ldm(IA_W, SP, regs, cond);
2998}
2999
3000
3001void Thumb2Assembler::Mov(Register rd, Register rm, Condition cond) {
3002 if (cond != AL || rd != rm) {
3003 mov(rd, ShifterOperand(rm), cond);
3004 }
3005}
3006
3007
Dave Allison65fcc2c2014-04-28 13:45:27 -07003008void Thumb2Assembler::Bind(Label* label) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003009 BindLabel(label, buffer_.Size());
Dave Allison65fcc2c2014-04-28 13:45:27 -07003010}
3011
3012
3013void Thumb2Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003014 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003015 CHECK_LE(shift_imm, 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003016 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003017 EmitShift(rd, rm, LSL, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003018}
3019
3020
3021void Thumb2Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003022 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003023 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003024 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003025 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003026 EmitShift(rd, rm, LSR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003027}
3028
3029
3030void Thumb2Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003031 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003032 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003033 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003034 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003035 EmitShift(rd, rm, ASR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003036}
3037
3038
3039void Thumb2Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003040 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003041 CHECK(1u <= shift_imm && shift_imm <= 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003042 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003043 EmitShift(rd, rm, ROR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003044}
3045
3046
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003047void Thumb2Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003048 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003049 EmitShift(rd, rm, RRX, rm, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003050}
3051
3052
3053void Thumb2Assembler::Lsl(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003054 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003055 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003056 EmitShift(rd, rm, LSL, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003057}
3058
3059
3060void Thumb2Assembler::Lsr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003061 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003062 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003063 EmitShift(rd, rm, LSR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003064}
3065
3066
3067void Thumb2Assembler::Asr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003068 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003069 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003070 EmitShift(rd, rm, ASR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003071}
3072
3073
3074void Thumb2Assembler::Ror(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003075 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003076 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003077 EmitShift(rd, rm, ROR, rn, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003078}
3079
3080
3081int32_t Thumb2Assembler::EncodeBranchOffset(int32_t offset, int32_t inst) {
3082 // The offset is off by 4 due to the way the ARM CPUs read PC.
3083 offset -= 4;
3084 offset >>= 1;
3085
3086 uint32_t value = 0;
3087 // There are two different encodings depending on the value of bit 12. In one case
3088 // intermediate values are calculated using the sign bit.
3089 if ((inst & B12) == B12) {
3090 // 25 bits of offset.
3091 uint32_t signbit = (offset >> 31) & 0x1;
3092 uint32_t i1 = (offset >> 22) & 0x1;
3093 uint32_t i2 = (offset >> 21) & 0x1;
3094 uint32_t imm10 = (offset >> 11) & 0x03ff;
3095 uint32_t imm11 = offset & 0x07ff;
3096 uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
3097 uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
3098 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
3099 imm11;
3100 // Remove the offset from the current encoding.
3101 inst &= ~(0x3ff << 16 | 0x7ff);
3102 } else {
3103 uint32_t signbit = (offset >> 31) & 0x1;
3104 uint32_t imm6 = (offset >> 11) & 0x03f;
3105 uint32_t imm11 = offset & 0x07ff;
3106 uint32_t j1 = (offset >> 19) & 1;
3107 uint32_t j2 = (offset >> 17) & 1;
3108 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm6 << 16) |
3109 imm11;
3110 // Remove the offset from the current encoding.
3111 inst &= ~(0x3f << 16 | 0x7ff);
3112 }
3113 // Mask out offset bits in current instruction.
3114 inst &= ~(B26 | B13 | B11);
3115 inst |= value;
3116 return inst;
3117}
3118
3119
3120int Thumb2Assembler::DecodeBranchOffset(int32_t instr) {
3121 int32_t imm32;
3122 if ((instr & B12) == B12) {
3123 uint32_t S = (instr >> 26) & 1;
3124 uint32_t J2 = (instr >> 11) & 1;
3125 uint32_t J1 = (instr >> 13) & 1;
3126 uint32_t imm10 = (instr >> 16) & 0x3FF;
3127 uint32_t imm11 = instr & 0x7FF;
3128
3129 uint32_t I1 = ~(J1 ^ S) & 1;
3130 uint32_t I2 = ~(J2 ^ S) & 1;
3131 imm32 = (S << 24) | (I1 << 23) | (I2 << 22) | (imm10 << 12) | (imm11 << 1);
3132 imm32 = (imm32 << 8) >> 8; // sign extend 24 bit immediate.
3133 } else {
3134 uint32_t S = (instr >> 26) & 1;
3135 uint32_t J2 = (instr >> 11) & 1;
3136 uint32_t J1 = (instr >> 13) & 1;
3137 uint32_t imm6 = (instr >> 16) & 0x3F;
3138 uint32_t imm11 = instr & 0x7FF;
3139
3140 imm32 = (S << 20) | (J2 << 19) | (J1 << 18) | (imm6 << 12) | (imm11 << 1);
3141 imm32 = (imm32 << 11) >> 11; // sign extend 21 bit immediate.
3142 }
3143 imm32 += 4;
3144 return imm32;
3145}
3146
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003147uint32_t Thumb2Assembler::GetAdjustedPosition(uint32_t old_position) {
3148 // We can reconstruct the adjustment by going through all the fixups from the beginning
3149 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
3150 // with increasing old_position, we can use the data from last AdjustedPosition() to
3151 // continue where we left off and the whole loop should be O(m+n) where m is the number
3152 // of positions to adjust and n is the number of fixups.
3153 if (old_position < last_old_position_) {
3154 last_position_adjustment_ = 0u;
3155 last_old_position_ = 0u;
3156 last_fixup_id_ = 0u;
3157 }
3158 while (last_fixup_id_ != fixups_.size()) {
3159 Fixup* fixup = GetFixup(last_fixup_id_);
3160 if (fixup->GetLocation() >= old_position + last_position_adjustment_) {
3161 break;
3162 }
3163 if (fixup->GetSize() != fixup->GetOriginalSize()) {
3164 last_position_adjustment_ += fixup->GetSizeInBytes() - fixup->GetOriginalSizeInBytes();
3165 }
3166 ++last_fixup_id_;
3167 }
3168 last_old_position_ = old_position;
3169 return old_position + last_position_adjustment_;
3170}
3171
3172Literal* Thumb2Assembler::NewLiteral(size_t size, const uint8_t* data) {
3173 DCHECK(size == 4u || size == 8u) << size;
3174 literals_.emplace_back(size, data);
3175 return &literals_.back();
3176}
3177
3178void Thumb2Assembler::LoadLiteral(Register rt, Literal* literal) {
3179 DCHECK_EQ(literal->GetSize(), 4u);
3180 DCHECK(!literal->GetLabel()->IsBound());
3181 bool use32bit = IsForced32Bit() || IsHighRegister(rt);
3182 uint32_t location = buffer_.Size();
3183 Fixup::Size size = use32bit ? Fixup::kLiteral4KiB : Fixup::kLiteral1KiB;
3184 FixupId fixup_id = AddFixup(Fixup::LoadNarrowLiteral(location, rt, size));
3185 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3186 literal->GetLabel()->LinkTo(fixup_id);
3187 if (use32bit) {
3188 Emit16(0);
3189 }
3190 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3191}
3192
3193void Thumb2Assembler::LoadLiteral(Register rt, Register rt2, Literal* literal) {
3194 DCHECK_EQ(literal->GetSize(), 8u);
3195 DCHECK(!literal->GetLabel()->IsBound());
3196 uint32_t location = buffer_.Size();
3197 FixupId fixup_id =
3198 AddFixup(Fixup::LoadWideLiteral(location, rt, rt2, Fixup::kLongOrFPLiteral1KiB));
3199 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3200 literal->GetLabel()->LinkTo(fixup_id);
3201 Emit16(0);
3202 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3203}
3204
3205void Thumb2Assembler::LoadLiteral(SRegister sd, Literal* literal) {
3206 DCHECK_EQ(literal->GetSize(), 4u);
3207 DCHECK(!literal->GetLabel()->IsBound());
3208 uint32_t location = buffer_.Size();
3209 FixupId fixup_id = AddFixup(Fixup::LoadSingleLiteral(location, sd, Fixup::kLongOrFPLiteral1KiB));
3210 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3211 literal->GetLabel()->LinkTo(fixup_id);
3212 Emit16(0);
3213 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3214}
3215
3216void Thumb2Assembler::LoadLiteral(DRegister dd, Literal* literal) {
3217 DCHECK_EQ(literal->GetSize(), 8u);
3218 DCHECK(!literal->GetLabel()->IsBound());
3219 uint32_t location = buffer_.Size();
3220 FixupId fixup_id = AddFixup(Fixup::LoadDoubleLiteral(location, dd, Fixup::kLongOrFPLiteral1KiB));
3221 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3222 literal->GetLabel()->LinkTo(fixup_id);
3223 Emit16(0);
3224 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3225}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003226
Dave Allison65fcc2c2014-04-28 13:45:27 -07003227
3228void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value,
Vladimir Marko449b1092015-09-08 12:16:45 +01003229 Condition cond, SetCc set_cc) {
3230 if (value == 0 && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003231 if (rd != rn) {
3232 mov(rd, ShifterOperand(rn), cond);
3233 }
3234 return;
3235 }
3236 // We prefer to select the shorter code sequence rather than selecting add for
3237 // positive values and sub for negatives ones, which would slightly improve
3238 // the readability of generated code for some constants.
3239 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003240 if (ShifterOperandCanHold(rd, rn, ADD, value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003241 add(rd, rn, shifter_op, cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003242 } else if (ShifterOperandCanHold(rd, rn, SUB, -value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003243 sub(rd, rn, shifter_op, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003244 } else {
3245 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003246 if (ShifterOperandCanHold(rd, rn, MVN, ~value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003247 mvn(IP, shifter_op, cond, kCcKeep);
3248 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003249 } else if (ShifterOperandCanHold(rd, rn, MVN, ~(-value), &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003250 mvn(IP, shifter_op, cond, kCcKeep);
3251 sub(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003252 } else {
3253 movw(IP, Low16Bits(value), cond);
3254 uint16_t value_high = High16Bits(value);
3255 if (value_high != 0) {
3256 movt(IP, value_high, cond);
3257 }
Vladimir Marko449b1092015-09-08 12:16:45 +01003258 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003259 }
3260 }
3261}
3262
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003263
Dave Allison65fcc2c2014-04-28 13:45:27 -07003264void Thumb2Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
3265 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003266 if (ShifterOperandCanHold(rd, R0, MOV, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003267 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003268 } else if (ShifterOperandCanHold(rd, R0, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003269 mvn(rd, shifter_op, cond);
3270 } else {
3271 movw(rd, Low16Bits(value), cond);
3272 uint16_t value_high = High16Bits(value);
3273 if (value_high != 0) {
3274 movt(rd, value_high, cond);
3275 }
3276 }
3277}
3278
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003279
Dave Allison65fcc2c2014-04-28 13:45:27 -07003280// Implementation note: this method must emit at most one instruction when
3281// Address::CanHoldLoadOffsetThumb.
3282void Thumb2Assembler::LoadFromOffset(LoadOperandType type,
3283 Register reg,
3284 Register base,
3285 int32_t offset,
3286 Condition cond) {
3287 if (!Address::CanHoldLoadOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003288 CHECK_NE(base, IP);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003289 LoadImmediate(IP, offset, cond);
3290 add(IP, IP, ShifterOperand(base), cond);
3291 base = IP;
3292 offset = 0;
3293 }
3294 CHECK(Address::CanHoldLoadOffsetThumb(type, offset));
3295 switch (type) {
3296 case kLoadSignedByte:
3297 ldrsb(reg, Address(base, offset), cond);
3298 break;
3299 case kLoadUnsignedByte:
3300 ldrb(reg, Address(base, offset), cond);
3301 break;
3302 case kLoadSignedHalfword:
3303 ldrsh(reg, Address(base, offset), cond);
3304 break;
3305 case kLoadUnsignedHalfword:
3306 ldrh(reg, Address(base, offset), cond);
3307 break;
3308 case kLoadWord:
3309 ldr(reg, Address(base, offset), cond);
3310 break;
3311 case kLoadWordPair:
3312 ldrd(reg, Address(base, offset), cond);
3313 break;
3314 default:
3315 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003316 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003317 }
3318}
3319
3320
3321// Implementation note: this method must emit at most one instruction when
3322// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3323void Thumb2Assembler::LoadSFromOffset(SRegister reg,
3324 Register base,
3325 int32_t offset,
3326 Condition cond) {
3327 if (!Address::CanHoldLoadOffsetThumb(kLoadSWord, offset)) {
3328 CHECK_NE(base, IP);
3329 LoadImmediate(IP, offset, cond);
3330 add(IP, IP, ShifterOperand(base), cond);
3331 base = IP;
3332 offset = 0;
3333 }
3334 CHECK(Address::CanHoldLoadOffsetThumb(kLoadSWord, offset));
3335 vldrs(reg, Address(base, offset), cond);
3336}
3337
3338
3339// Implementation note: this method must emit at most one instruction when
3340// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3341void Thumb2Assembler::LoadDFromOffset(DRegister reg,
3342 Register base,
3343 int32_t offset,
3344 Condition cond) {
3345 if (!Address::CanHoldLoadOffsetThumb(kLoadDWord, offset)) {
3346 CHECK_NE(base, IP);
3347 LoadImmediate(IP, offset, cond);
3348 add(IP, IP, ShifterOperand(base), cond);
3349 base = IP;
3350 offset = 0;
3351 }
3352 CHECK(Address::CanHoldLoadOffsetThumb(kLoadDWord, offset));
3353 vldrd(reg, Address(base, offset), cond);
3354}
3355
3356
3357// Implementation note: this method must emit at most one instruction when
3358// Address::CanHoldStoreOffsetThumb.
3359void Thumb2Assembler::StoreToOffset(StoreOperandType type,
3360 Register reg,
3361 Register base,
3362 int32_t offset,
3363 Condition cond) {
Roland Levillain775ef492014-11-04 17:43:11 +00003364 Register tmp_reg = kNoRegister;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003365 if (!Address::CanHoldStoreOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003366 CHECK_NE(base, IP);
Roland Levillain23f02f32015-08-25 18:23:20 +01003367 if ((reg != IP) &&
3368 ((type != kStoreWordPair) || (reg + 1 != IP))) {
Roland Levillain775ef492014-11-04 17:43:11 +00003369 tmp_reg = IP;
3370 } else {
Roland Levillain4af147e2015-04-07 13:54:49 +01003371 // Be careful not to use IP twice (for `reg` (or `reg` + 1 in
Roland Levillain23f02f32015-08-25 18:23:20 +01003372 // the case of a word-pair store) and `base`) to build the
3373 // Address object used by the store instruction(s) below.
3374 // Instead, save R5 on the stack (or R6 if R5 is already used by
3375 // `base`), use it as secondary temporary register, and restore
3376 // it after the store instruction has been emitted.
3377 tmp_reg = (base != R5) ? R5 : R6;
Roland Levillain775ef492014-11-04 17:43:11 +00003378 Push(tmp_reg);
3379 if (base == SP) {
3380 offset += kRegisterSize;
3381 }
3382 }
3383 LoadImmediate(tmp_reg, offset, cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003384 add(tmp_reg, tmp_reg, ShifterOperand(base), AL);
Roland Levillain775ef492014-11-04 17:43:11 +00003385 base = tmp_reg;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003386 offset = 0;
3387 }
3388 CHECK(Address::CanHoldStoreOffsetThumb(type, offset));
3389 switch (type) {
3390 case kStoreByte:
3391 strb(reg, Address(base, offset), cond);
3392 break;
3393 case kStoreHalfword:
3394 strh(reg, Address(base, offset), cond);
3395 break;
3396 case kStoreWord:
3397 str(reg, Address(base, offset), cond);
3398 break;
3399 case kStoreWordPair:
3400 strd(reg, Address(base, offset), cond);
3401 break;
3402 default:
3403 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003404 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003405 }
Roland Levillain23f02f32015-08-25 18:23:20 +01003406 if ((tmp_reg != kNoRegister) && (tmp_reg != IP)) {
3407 CHECK((tmp_reg == R5) || (tmp_reg == R6));
Roland Levillain775ef492014-11-04 17:43:11 +00003408 Pop(tmp_reg);
3409 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003410}
3411
3412
3413// Implementation note: this method must emit at most one instruction when
3414// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreToOffset.
3415void Thumb2Assembler::StoreSToOffset(SRegister reg,
3416 Register base,
3417 int32_t offset,
3418 Condition cond) {
3419 if (!Address::CanHoldStoreOffsetThumb(kStoreSWord, offset)) {
3420 CHECK_NE(base, IP);
3421 LoadImmediate(IP, offset, cond);
3422 add(IP, IP, ShifterOperand(base), cond);
3423 base = IP;
3424 offset = 0;
3425 }
3426 CHECK(Address::CanHoldStoreOffsetThumb(kStoreSWord, offset));
3427 vstrs(reg, Address(base, offset), cond);
3428}
3429
3430
3431// Implementation note: this method must emit at most one instruction when
3432// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreSToOffset.
3433void Thumb2Assembler::StoreDToOffset(DRegister reg,
3434 Register base,
3435 int32_t offset,
3436 Condition cond) {
3437 if (!Address::CanHoldStoreOffsetThumb(kStoreDWord, offset)) {
3438 CHECK_NE(base, IP);
3439 LoadImmediate(IP, offset, cond);
3440 add(IP, IP, ShifterOperand(base), cond);
3441 base = IP;
3442 offset = 0;
3443 }
3444 CHECK(Address::CanHoldStoreOffsetThumb(kStoreDWord, offset));
3445 vstrd(reg, Address(base, offset), cond);
3446}
3447
3448
3449void Thumb2Assembler::MemoryBarrier(ManagedRegister mscratch) {
3450 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003451 dmb(SY);
3452}
3453
3454
3455void Thumb2Assembler::dmb(DmbOptions flavor) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003456 int32_t encoding = 0xf3bf8f50; // dmb in T1 encoding.
3457 Emit32(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003458}
3459
3460
3461void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003462 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003463 cbz(r, label);
3464 } else {
3465 cmp(r, ShifterOperand(0));
3466 b(label, EQ);
3467 }
3468}
3469
3470
Dave Allison65fcc2c2014-04-28 13:45:27 -07003471void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003472 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd126ba12015-05-20 11:25:27 +01003473 cbnz(r, label);
3474 } else {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003475 cmp(r, ShifterOperand(0));
3476 b(label, NE);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003477 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003478}
3479} // namespace arm
3480} // namespace art