blob: b8c5fd2e2b2ba9f4c3799bcd91e6e9fb563e7a02 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_thumb2.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Vladimir Marko6b756b52015-07-14 11:58:38 +010028void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) {
29 // For each Fixup, it's easy to find the Fixups that it depends on as they are either
30 // the following or the preceding Fixups until we find the target. However, for fixup
31 // adjustment we need the reverse lookup, i.e. what Fixups depend on a given Fixup.
32 // This function creates a compact representation of this relationship, where we have
33 // all the dependents in a single array and Fixups reference their ranges by start
34 // index and count. (Instead of having a per-fixup vector.)
35
36 // Count the number of dependents of each Fixup.
37 const FixupId end_id = assembler->fixups_.size();
38 Fixup* fixups = assembler->fixups_.data();
39 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
40 uint32_t target = fixups[fixup_id].target_;
41 if (target > fixups[fixup_id].location_) {
42 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
43 fixups[id].dependents_count_ += 1u;
44 }
45 } else {
46 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
47 fixups[id - 1u].dependents_count_ += 1u;
48 }
49 }
50 }
51 // Assign index ranges in fixup_dependents_ to individual fixups. Record the end of the
52 // range in dependents_start_, we shall later decrement it as we fill in fixup_dependents_.
53 uint32_t number_of_dependents = 0u;
54 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
55 number_of_dependents += fixups[fixup_id].dependents_count_;
56 fixups[fixup_id].dependents_start_ = number_of_dependents;
57 }
58 if (number_of_dependents == 0u) {
59 return;
60 }
61 // Create and fill in the fixup_dependents_.
62 assembler->fixup_dependents_.reset(new FixupId[number_of_dependents]);
63 FixupId* dependents = assembler->fixup_dependents_.get();
64 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
65 uint32_t target = fixups[fixup_id].target_;
66 if (target > fixups[fixup_id].location_) {
67 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
68 fixups[id].dependents_start_ -= 1u;
69 dependents[fixups[id].dependents_start_] = fixup_id;
70 }
71 } else {
72 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
73 fixups[id - 1u].dependents_start_ -= 1u;
74 dependents[fixups[id - 1u].dependents_start_] = fixup_id;
75 }
76 }
77 }
78}
79
Vladimir Markocf93a5c2015-06-16 11:33:24 +000080void Thumb2Assembler::BindLabel(Label* label, uint32_t bound_pc) {
81 CHECK(!label->IsBound());
82
83 while (label->IsLinked()) {
84 FixupId fixup_id = label->Position(); // The id for linked Fixup.
85 Fixup* fixup = GetFixup(fixup_id); // Get the Fixup at this id.
86 fixup->Resolve(bound_pc); // Fixup can be resolved now.
Vladimir Markocf93a5c2015-06-16 11:33:24 +000087 uint32_t fixup_location = fixup->GetLocation();
88 uint16_t next = buffer_.Load<uint16_t>(fixup_location); // Get next in chain.
89 buffer_.Store<int16_t>(fixup_location, 0);
90 label->position_ = next; // Move to next.
91 }
92 label->BindTo(bound_pc);
93}
94
95void Thumb2Assembler::BindLiterals() {
96 // We don't add the padding here, that's done only after adjusting the Fixup sizes.
97 uint32_t code_size = buffer_.Size();
98 for (Literal& lit : literals_) {
99 Label* label = lit.GetLabel();
100 BindLabel(label, code_size);
101 code_size += lit.GetSize();
102 }
103}
104
105void Thumb2Assembler::AdjustFixupIfNeeded(Fixup* fixup, uint32_t* current_code_size,
106 std::deque<FixupId>* fixups_to_recalculate) {
107 uint32_t adjustment = fixup->AdjustSizeIfNeeded(*current_code_size);
108 if (adjustment != 0u) {
109 *current_code_size += adjustment;
Vladimir Marko6b756b52015-07-14 11:58:38 +0100110 for (FixupId dependent_id : fixup->Dependents(*this)) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000111 Fixup* dependent = GetFixup(dependent_id);
112 dependent->IncreaseAdjustment(adjustment);
113 if (buffer_.Load<int16_t>(dependent->GetLocation()) == 0) {
114 buffer_.Store<int16_t>(dependent->GetLocation(), 1);
115 fixups_to_recalculate->push_back(dependent_id);
116 }
117 }
118 }
119}
120
121uint32_t Thumb2Assembler::AdjustFixups() {
Vladimir Marko6b756b52015-07-14 11:58:38 +0100122 Fixup::PrepareDependents(this);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000123 uint32_t current_code_size = buffer_.Size();
124 std::deque<FixupId> fixups_to_recalculate;
125 if (kIsDebugBuild) {
126 // We will use the placeholders in the buffer_ to mark whether the fixup has
127 // been added to the fixups_to_recalculate. Make sure we start with zeros.
128 for (Fixup& fixup : fixups_) {
129 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
130 }
131 }
132 for (Fixup& fixup : fixups_) {
133 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
134 }
135 while (!fixups_to_recalculate.empty()) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100136 do {
137 // Pop the fixup.
138 FixupId fixup_id = fixups_to_recalculate.front();
139 fixups_to_recalculate.pop_front();
140 Fixup* fixup = GetFixup(fixup_id);
141 DCHECK_NE(buffer_.Load<int16_t>(fixup->GetLocation()), 0);
142 buffer_.Store<int16_t>(fixup->GetLocation(), 0);
143 // See if it needs adjustment.
144 AdjustFixupIfNeeded(fixup, &current_code_size, &fixups_to_recalculate);
145 } while (!fixups_to_recalculate.empty());
146
147 if ((current_code_size & 2) != 0 && !literals_.empty()) {
148 // If we need to add padding before literals, this may just push some out of range,
149 // so recalculate all load literals. This makes up for the fact that we don't mark
150 // load literal as a dependency of all previous Fixups even though it actually is.
151 for (Fixup& fixup : fixups_) {
152 if (fixup.IsLoadLiteral()) {
153 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
154 }
155 }
156 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000157 }
158 if (kIsDebugBuild) {
159 // Check that no fixup is marked as being in fixups_to_recalculate anymore.
160 for (Fixup& fixup : fixups_) {
161 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
162 }
163 }
164
165 // Adjust literal pool labels for padding.
Roland Levillain14d90572015-07-16 10:52:26 +0100166 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000167 uint32_t literals_adjustment = current_code_size + (current_code_size & 2) - buffer_.Size();
168 if (literals_adjustment != 0u) {
169 for (Literal& literal : literals_) {
170 Label* label = literal.GetLabel();
171 DCHECK(label->IsBound());
172 int old_position = label->Position();
173 label->Reinitialize();
174 label->BindTo(old_position + literals_adjustment);
175 }
176 }
177
178 return current_code_size;
179}
180
181void Thumb2Assembler::EmitFixups(uint32_t adjusted_code_size) {
182 // Move non-fixup code to its final place and emit fixups.
183 // Process fixups in reverse order so that we don't repeatedly move the same data.
184 size_t src_end = buffer_.Size();
185 size_t dest_end = adjusted_code_size;
186 buffer_.Resize(dest_end);
187 DCHECK_GE(dest_end, src_end);
188 for (auto i = fixups_.rbegin(), end = fixups_.rend(); i != end; ++i) {
189 Fixup* fixup = &*i;
190 if (fixup->GetOriginalSize() == fixup->GetSize()) {
191 // The size of this Fixup didn't change. To avoid moving the data
192 // in small chunks, emit the code to its original position.
193 fixup->Emit(&buffer_, adjusted_code_size);
194 fixup->Finalize(dest_end - src_end);
195 } else {
196 // Move the data between the end of the fixup and src_end to its final location.
197 size_t old_fixup_location = fixup->GetLocation();
198 size_t src_begin = old_fixup_location + fixup->GetOriginalSizeInBytes();
199 size_t data_size = src_end - src_begin;
200 size_t dest_begin = dest_end - data_size;
201 buffer_.Move(dest_begin, src_begin, data_size);
202 src_end = old_fixup_location;
203 dest_end = dest_begin - fixup->GetSizeInBytes();
204 // Finalize the Fixup and emit the data to the new location.
205 fixup->Finalize(dest_end - src_end);
206 fixup->Emit(&buffer_, adjusted_code_size);
207 }
208 }
209 CHECK_EQ(src_end, dest_end);
210}
211
212void Thumb2Assembler::EmitLiterals() {
213 if (!literals_.empty()) {
214 // Load literal instructions (LDR, LDRD, VLDR) require 4-byte alignment.
215 // We don't support byte and half-word literals.
216 uint32_t code_size = buffer_.Size();
Roland Levillain14d90572015-07-16 10:52:26 +0100217 DCHECK_ALIGNED(code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000218 if ((code_size & 2u) != 0u) {
219 Emit16(0);
220 }
221 for (Literal& literal : literals_) {
222 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
223 DCHECK_EQ(static_cast<size_t>(literal.GetLabel()->Position()), buffer_.Size());
224 DCHECK(literal.GetSize() == 4u || literal.GetSize() == 8u);
225 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
226 buffer_.Emit<uint8_t>(literal.GetData()[i]);
227 }
228 }
229 }
230}
231
232inline int16_t Thumb2Assembler::BEncoding16(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100233 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000234 int16_t encoding = B15 | B14;
235 if (cond != AL) {
236 DCHECK(IsInt<9>(offset));
237 encoding |= B12 | (static_cast<int32_t>(cond) << 8) | ((offset >> 1) & 0xff);
238 } else {
239 DCHECK(IsInt<12>(offset));
240 encoding |= B13 | ((offset >> 1) & 0x7ff);
241 }
242 return encoding;
243}
244
245inline int32_t Thumb2Assembler::BEncoding32(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100246 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000247 int32_t s = (offset >> 31) & 1; // Sign bit.
248 int32_t encoding = B31 | B30 | B29 | B28 | B15 |
249 (s << 26) | // Sign bit goes to bit 26.
250 ((offset >> 1) & 0x7ff); // imm11 goes to bits 0-10.
251 if (cond != AL) {
252 DCHECK(IsInt<21>(offset));
253 // Encode cond, move imm6 from bits 12-17 to bits 16-21 and move J1 and J2.
254 encoding |= (static_cast<int32_t>(cond) << 22) | ((offset & 0x3f000) << (16 - 12)) |
255 ((offset & (1 << 19)) >> (19 - 13)) | // Extract J1 from bit 19 to bit 13.
256 ((offset & (1 << 18)) >> (18 - 11)); // Extract J2 from bit 18 to bit 11.
257 } else {
258 DCHECK(IsInt<25>(offset));
259 int32_t j1 = ((offset >> 23) ^ s ^ 1) & 1; // Calculate J1 from I1 extracted from bit 23.
260 int32_t j2 = ((offset >> 22)^ s ^ 1) & 1; // Calculate J2 from I2 extracted from bit 22.
261 // Move imm10 from bits 12-21 to bits 16-25 and add J1 and J2.
262 encoding |= B12 | ((offset & 0x3ff000) << (16 - 12)) |
263 (j1 << 13) | (j2 << 11);
264 }
265 return encoding;
266}
267
268inline int16_t Thumb2Assembler::CbxzEncoding16(Register rn, int32_t offset, Condition cond) {
269 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100270 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000271 DCHECK(IsUint<7>(offset));
272 DCHECK(cond == EQ || cond == NE);
273 return B15 | B13 | B12 | B8 | (cond == NE ? B11 : 0) | static_cast<int32_t>(rn) |
274 ((offset & 0x3e) << (3 - 1)) | // Move imm5 from bits 1-5 to bits 3-7.
275 ((offset & 0x40) << (9 - 6)); // Move i from bit 6 to bit 11
276}
277
278inline int16_t Thumb2Assembler::CmpRnImm8Encoding16(Register rn, int32_t value) {
279 DCHECK(!IsHighRegister(rn));
280 DCHECK(IsUint<8>(value));
281 return B13 | B11 | (rn << 8) | value;
282}
283
284inline int16_t Thumb2Assembler::AddRdnRmEncoding16(Register rdn, Register rm) {
285 // The high bit of rn is moved across 4-bit rm.
286 return B14 | B10 | (static_cast<int32_t>(rm) << 3) |
287 (static_cast<int32_t>(rdn) & 7) | ((static_cast<int32_t>(rdn) & 8) << 4);
288}
289
290inline int32_t Thumb2Assembler::MovwEncoding32(Register rd, int32_t value) {
291 DCHECK(IsUint<16>(value));
292 return B31 | B30 | B29 | B28 | B25 | B22 |
293 (static_cast<int32_t>(rd) << 8) |
294 ((value & 0xf000) << (16 - 12)) | // Move imm4 from bits 12-15 to bits 16-19.
295 ((value & 0x0800) << (26 - 11)) | // Move i from bit 11 to bit 26.
296 ((value & 0x0700) << (12 - 8)) | // Move imm3 from bits 8-10 to bits 12-14.
297 (value & 0xff); // Keep imm8 in bits 0-7.
298}
299
300inline int32_t Thumb2Assembler::MovtEncoding32(Register rd, int32_t value) {
301 DCHECK_EQ(value & 0xffff, 0);
302 int32_t movw_encoding = MovwEncoding32(rd, (value >> 16) & 0xffff);
303 return movw_encoding | B25 | B23;
304}
305
306inline int32_t Thumb2Assembler::MovModImmEncoding32(Register rd, int32_t value) {
307 uint32_t mod_imm = ModifiedImmediate(value);
308 DCHECK_NE(mod_imm, kInvalidModifiedImmediate);
309 return B31 | B30 | B29 | B28 | B22 | B19 | B18 | B17 | B16 |
310 (static_cast<int32_t>(rd) << 8) | static_cast<int32_t>(mod_imm);
311}
312
313inline int16_t Thumb2Assembler::LdrLitEncoding16(Register rt, int32_t offset) {
314 DCHECK(!IsHighRegister(rt));
Roland Levillain14d90572015-07-16 10:52:26 +0100315 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000316 DCHECK(IsUint<10>(offset));
317 return B14 | B11 | (static_cast<int32_t>(rt) << 8) | (offset >> 2);
318}
319
320inline int32_t Thumb2Assembler::LdrLitEncoding32(Register rt, int32_t offset) {
321 // NOTE: We don't support negative offset, i.e. U=0 (B23).
322 return LdrRtRnImm12Encoding(rt, PC, offset);
323}
324
325inline int32_t Thumb2Assembler::LdrdEncoding32(Register rt, Register rt2, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100326 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000327 CHECK(IsUint<10>(offset));
328 return B31 | B30 | B29 | B27 |
329 B24 /* P = 1 */ | B23 /* U = 1 */ | B22 | 0 /* W = 0 */ | B20 |
330 (static_cast<int32_t>(rn) << 16) | (static_cast<int32_t>(rt) << 12) |
331 (static_cast<int32_t>(rt2) << 8) | (offset >> 2);
332}
333
334inline int32_t Thumb2Assembler::VldrsEncoding32(SRegister sd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100335 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000336 CHECK(IsUint<10>(offset));
337 return B31 | B30 | B29 | B27 | B26 | B24 |
338 B23 /* U = 1 */ | B20 | B11 | B9 |
339 (static_cast<int32_t>(rn) << 16) |
340 ((static_cast<int32_t>(sd) & 0x01) << (22 - 0)) | // Move D from bit 0 to bit 22.
341 ((static_cast<int32_t>(sd) & 0x1e) << (12 - 1)) | // Move Vd from bits 1-4 to bits 12-15.
342 (offset >> 2);
343}
344
345inline int32_t Thumb2Assembler::VldrdEncoding32(DRegister dd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100346 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000347 CHECK(IsUint<10>(offset));
348 return B31 | B30 | B29 | B27 | B26 | B24 |
349 B23 /* U = 1 */ | B20 | B11 | B9 | B8 |
350 (rn << 16) |
351 ((static_cast<int32_t>(dd) & 0x10) << (22 - 4)) | // Move D from bit 4 to bit 22.
352 ((static_cast<int32_t>(dd) & 0x0f) << (12 - 0)) | // Move Vd from bits 0-3 to bits 12-15.
353 (offset >> 2);
354}
355
356inline int16_t Thumb2Assembler::LdrRtRnImm5Encoding16(Register rt, Register rn, int32_t offset) {
357 DCHECK(!IsHighRegister(rt));
358 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100359 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000360 DCHECK(IsUint<7>(offset));
361 return B14 | B13 | B11 |
362 (static_cast<int32_t>(rn) << 3) | static_cast<int32_t>(rt) |
363 (offset << (6 - 2)); // Move imm5 from bits 2-6 to bits 6-10.
364}
365
366int32_t Thumb2Assembler::Fixup::LoadWideOrFpEncoding(Register rbase, int32_t offset) const {
367 switch (type_) {
368 case kLoadLiteralWide:
369 return LdrdEncoding32(rn_, rt2_, rbase, offset);
370 case kLoadFPLiteralSingle:
371 return VldrsEncoding32(sd_, rbase, offset);
372 case kLoadFPLiteralDouble:
373 return VldrdEncoding32(dd_, rbase, offset);
374 default:
375 LOG(FATAL) << "Unexpected type: " << static_cast<int>(type_);
376 UNREACHABLE();
377 }
378}
379
380inline int32_t Thumb2Assembler::LdrRtRnImm12Encoding(Register rt, Register rn, int32_t offset) {
381 DCHECK(IsUint<12>(offset));
382 return B31 | B30 | B29 | B28 | B27 | B23 | B22 | B20 | (rn << 16) | (rt << 12) | offset;
383}
384
385void Thumb2Assembler::FinalizeCode() {
386 ArmAssembler::FinalizeCode();
387 BindLiterals();
388 uint32_t adjusted_code_size = AdjustFixups();
389 EmitFixups(adjusted_code_size);
390 EmitLiterals();
391}
392
Nicolas Geoffray3d1e7882015-02-03 13:59:52 +0000393bool Thumb2Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
394 Register rn ATTRIBUTE_UNUSED,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000395 Opcode opcode,
396 uint32_t immediate,
397 ShifterOperand* shifter_op) {
398 shifter_op->type_ = ShifterOperand::kImmediate;
399 shifter_op->immed_ = immediate;
400 shifter_op->is_shift_ = false;
401 shifter_op->is_rotate_ = false;
402 switch (opcode) {
403 case ADD:
404 case SUB:
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000405 if (immediate < (1 << 12)) { // Less than (or equal to) 12 bits can always be done.
406 return true;
407 }
408 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
409
410 case MOV:
411 // TODO: Support less than or equal to 12bits.
412 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100413
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000414 case MVN:
415 default:
416 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
417 }
418}
419
Dave Allison65fcc2c2014-04-28 13:45:27 -0700420void Thumb2Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100421 Condition cond, SetCc set_cc) {
422 EmitDataProcessing(cond, AND, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700423}
424
425
426void Thumb2Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100427 Condition cond, SetCc set_cc) {
428 EmitDataProcessing(cond, EOR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700429}
430
431
432void Thumb2Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100433 Condition cond, SetCc set_cc) {
434 EmitDataProcessing(cond, SUB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700435}
436
437
438void Thumb2Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100439 Condition cond, SetCc set_cc) {
440 EmitDataProcessing(cond, RSB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700441}
442
443
444void Thumb2Assembler::add(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100445 Condition cond, SetCc set_cc) {
446 EmitDataProcessing(cond, ADD, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700447}
448
449
450void Thumb2Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100451 Condition cond, SetCc set_cc) {
452 EmitDataProcessing(cond, ADC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700453}
454
455
456void Thumb2Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100457 Condition cond, SetCc set_cc) {
458 EmitDataProcessing(cond, SBC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700459}
460
461
462void Thumb2Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100463 Condition cond, SetCc set_cc) {
464 EmitDataProcessing(cond, RSC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700465}
466
467
468void Thumb2Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
469 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100470 EmitDataProcessing(cond, TST, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700471}
472
473
474void Thumb2Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
475 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100476 EmitDataProcessing(cond, TEQ, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700477}
478
479
480void Thumb2Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100481 EmitDataProcessing(cond, CMP, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700482}
483
484
485void Thumb2Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100486 EmitDataProcessing(cond, CMN, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700487}
488
489
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100490void Thumb2Assembler::orr(Register rd, Register rn, const ShifterOperand& so,
491 Condition cond, SetCc set_cc) {
492 EmitDataProcessing(cond, ORR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700493}
494
495
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100496void Thumb2Assembler::orn(Register rd, Register rn, const ShifterOperand& so,
497 Condition cond, SetCc set_cc) {
498 EmitDataProcessing(cond, ORN, set_cc, rn, rd, so);
499}
500
501
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100502void Thumb2Assembler::mov(Register rd, const ShifterOperand& so,
503 Condition cond, SetCc set_cc) {
504 EmitDataProcessing(cond, MOV, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700505}
506
507
508void Thumb2Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100509 Condition cond, SetCc set_cc) {
510 EmitDataProcessing(cond, BIC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700511}
512
513
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100514void Thumb2Assembler::mvn(Register rd, const ShifterOperand& so,
515 Condition cond, SetCc set_cc) {
516 EmitDataProcessing(cond, MVN, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700517}
518
519
520void Thumb2Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700521 CheckCondition(cond);
522
Dave Allison65fcc2c2014-04-28 13:45:27 -0700523 if (rd == rm && !IsHighRegister(rd) && !IsHighRegister(rn) && !force_32bit_) {
524 // 16 bit.
525 int16_t encoding = B14 | B9 | B8 | B6 |
526 rn << 3 | rd;
527 Emit16(encoding);
528 } else {
529 // 32 bit.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700530 uint32_t op1 = 0U /* 0b000 */;
531 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700532 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
533 op1 << 20 |
534 B15 | B14 | B13 | B12 |
535 op2 << 4 |
536 static_cast<uint32_t>(rd) << 8 |
537 static_cast<uint32_t>(rn) << 16 |
538 static_cast<uint32_t>(rm);
539
540 Emit32(encoding);
541 }
542}
543
544
545void Thumb2Assembler::mla(Register rd, Register rn, Register rm, Register ra,
546 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700547 CheckCondition(cond);
548
Andreas Gampec8ccf682014-09-29 20:07:43 -0700549 uint32_t op1 = 0U /* 0b000 */;
550 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700551 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
552 op1 << 20 |
553 op2 << 4 |
554 static_cast<uint32_t>(rd) << 8 |
555 static_cast<uint32_t>(ra) << 12 |
556 static_cast<uint32_t>(rn) << 16 |
557 static_cast<uint32_t>(rm);
558
559 Emit32(encoding);
560}
561
562
563void Thumb2Assembler::mls(Register rd, Register rn, Register rm, Register ra,
564 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700565 CheckCondition(cond);
566
Andreas Gampec8ccf682014-09-29 20:07:43 -0700567 uint32_t op1 = 0U /* 0b000 */;
568 uint32_t op2 = 01 /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700569 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
570 op1 << 20 |
571 op2 << 4 |
572 static_cast<uint32_t>(rd) << 8 |
573 static_cast<uint32_t>(ra) << 12 |
574 static_cast<uint32_t>(rn) << 16 |
575 static_cast<uint32_t>(rm);
576
577 Emit32(encoding);
578}
579
580
Zheng Xuc6667102015-05-15 16:08:45 +0800581void Thumb2Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
582 Register rm, Condition cond) {
583 CheckCondition(cond);
584
585 uint32_t op1 = 0U /* 0b000; */;
586 uint32_t op2 = 0U /* 0b0000 */;
587 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
588 op1 << 20 |
589 op2 << 4 |
590 static_cast<uint32_t>(rd_lo) << 12 |
591 static_cast<uint32_t>(rd_hi) << 8 |
592 static_cast<uint32_t>(rn) << 16 |
593 static_cast<uint32_t>(rm);
594
595 Emit32(encoding);
596}
597
598
Dave Allison65fcc2c2014-04-28 13:45:27 -0700599void Thumb2Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
600 Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700601 CheckCondition(cond);
602
Andreas Gampec8ccf682014-09-29 20:07:43 -0700603 uint32_t op1 = 2U /* 0b010; */;
604 uint32_t op2 = 0U /* 0b0000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700605 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
606 op1 << 20 |
607 op2 << 4 |
608 static_cast<uint32_t>(rd_lo) << 12 |
609 static_cast<uint32_t>(rd_hi) << 8 |
610 static_cast<uint32_t>(rn) << 16 |
611 static_cast<uint32_t>(rm);
612
613 Emit32(encoding);
614}
615
616
617void Thumb2Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700618 CheckCondition(cond);
619
Andreas Gampec8ccf682014-09-29 20:07:43 -0700620 uint32_t op1 = 1U /* 0b001 */;
621 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700622 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B20 |
623 op1 << 20 |
624 op2 << 4 |
625 0xf << 12 |
626 static_cast<uint32_t>(rd) << 8 |
627 static_cast<uint32_t>(rn) << 16 |
628 static_cast<uint32_t>(rm);
629
630 Emit32(encoding);
631}
632
633
634void Thumb2Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700635 CheckCondition(cond);
636
Andreas Gampec8ccf682014-09-29 20:07:43 -0700637 uint32_t op1 = 1U /* 0b001 */;
638 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700639 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B21 | B20 |
640 op1 << 20 |
641 op2 << 4 |
642 0xf << 12 |
643 static_cast<uint32_t>(rd) << 8 |
644 static_cast<uint32_t>(rn) << 16 |
645 static_cast<uint32_t>(rm);
646
647 Emit32(encoding);
648}
649
650
Roland Levillain51d3fc42014-11-13 14:11:42 +0000651void Thumb2Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
652 CheckCondition(cond);
653 CHECK_LE(lsb, 31U);
654 CHECK(1U <= width && width <= 32U) << width;
655 uint32_t widthminus1 = width - 1;
656 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
657 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
658
659 uint32_t op = 20U /* 0b10100 */;
660 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
661 op << 20 |
662 static_cast<uint32_t>(rn) << 16 |
663 imm3 << 12 |
664 static_cast<uint32_t>(rd) << 8 |
665 imm2 << 6 |
666 widthminus1;
667
668 Emit32(encoding);
669}
670
671
Roland Levillain981e4542014-11-14 11:47:14 +0000672void Thumb2Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
673 CheckCondition(cond);
674 CHECK_LE(lsb, 31U);
675 CHECK(1U <= width && width <= 32U) << width;
676 uint32_t widthminus1 = width - 1;
677 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
678 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
679
680 uint32_t op = 28U /* 0b11100 */;
681 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
682 op << 20 |
683 static_cast<uint32_t>(rn) << 16 |
684 imm3 << 12 |
685 static_cast<uint32_t>(rd) << 8 |
686 imm2 << 6 |
687 widthminus1;
688
689 Emit32(encoding);
690}
691
692
Dave Allison65fcc2c2014-04-28 13:45:27 -0700693void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) {
694 EmitLoadStore(cond, true, false, false, false, rd, ad);
695}
696
697
698void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) {
699 EmitLoadStore(cond, false, false, false, false, rd, ad);
700}
701
702
703void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
704 EmitLoadStore(cond, true, true, false, false, rd, ad);
705}
706
707
708void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) {
709 EmitLoadStore(cond, false, true, false, false, rd, ad);
710}
711
712
713void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
714 EmitLoadStore(cond, true, false, true, false, rd, ad);
715}
716
717
718void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) {
719 EmitLoadStore(cond, false, false, true, false, rd, ad);
720}
721
722
723void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
724 EmitLoadStore(cond, true, true, false, true, rd, ad);
725}
726
727
728void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
729 EmitLoadStore(cond, true, false, true, true, rd, ad);
730}
731
732
733void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100734 ldrd(rd, Register(rd + 1), ad, cond);
735}
736
737
738void Thumb2Assembler::ldrd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700739 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100740 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700741 // This is different from other loads. The encoding is like ARM.
742 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
743 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100744 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700745 ad.encodingThumbLdrdStrd();
746 Emit32(encoding);
747}
748
749
750void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100751 strd(rd, Register(rd + 1), ad, cond);
752}
753
754
755void Thumb2Assembler::strd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700756 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100757 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700758 // This is different from other loads. The encoding is like ARM.
759 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
760 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100761 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700762 ad.encodingThumbLdrdStrd();
763 Emit32(encoding);
764}
765
766
767void Thumb2Assembler::ldm(BlockAddressMode am,
768 Register base,
769 RegList regs,
770 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000771 CHECK_NE(regs, 0u); // Do not use ldm if there's nothing to load.
772 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700773 // Thumb doesn't support one reg in the list.
774 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000775 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700776 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700777 CHECK(am == DB_W); // Only writeback is supported.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700778 ldr(static_cast<Register>(reg), Address(base, kRegisterSize, Address::PostIndex), cond);
779 } else {
780 EmitMultiMemOp(cond, am, true, base, regs);
781 }
782}
783
784
785void Thumb2Assembler::stm(BlockAddressMode am,
786 Register base,
787 RegList regs,
788 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000789 CHECK_NE(regs, 0u); // Do not use stm if there's nothing to store.
790 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700791 // Thumb doesn't support one reg in the list.
792 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000793 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700794 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700795 CHECK(am == IA || am == IA_W);
796 Address::Mode strmode = am == IA ? Address::PreIndex : Address::Offset;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700797 str(static_cast<Register>(reg), Address(base, -kRegisterSize, strmode), cond);
798 } else {
799 EmitMultiMemOp(cond, am, false, base, regs);
800 }
801}
802
803
804bool Thumb2Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
805 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
806 if (((imm32 & ((1 << 19) - 1)) == 0) &&
807 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
808 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
809 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
810 ((imm32 >> 19) & ((1 << 6) -1));
811 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
812 sd, S0, S0);
813 return true;
814 }
815 return false;
816}
817
818
819bool Thumb2Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
820 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
821 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
822 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
823 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
824 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
825 ((imm64 >> 48) & ((1 << 6) -1));
826 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
827 dd, D0, D0);
828 return true;
829 }
830 return false;
831}
832
833
834void Thumb2Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
835 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
836}
837
838
839void Thumb2Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
840 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
841}
842
843
844void Thumb2Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
845 Condition cond) {
846 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
847}
848
849
850void Thumb2Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
851 Condition cond) {
852 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
853}
854
855
856void Thumb2Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
857 Condition cond) {
858 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
859}
860
861
862void Thumb2Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
863 Condition cond) {
864 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
865}
866
867
868void Thumb2Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
869 Condition cond) {
870 EmitVFPsss(cond, B21, sd, sn, sm);
871}
872
873
874void Thumb2Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
875 Condition cond) {
876 EmitVFPddd(cond, B21, dd, dn, dm);
877}
878
879
880void Thumb2Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
881 Condition cond) {
882 EmitVFPsss(cond, 0, sd, sn, sm);
883}
884
885
886void Thumb2Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
887 Condition cond) {
888 EmitVFPddd(cond, 0, dd, dn, dm);
889}
890
891
892void Thumb2Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
893 Condition cond) {
894 EmitVFPsss(cond, B6, sd, sn, sm);
895}
896
897
898void Thumb2Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
899 Condition cond) {
900 EmitVFPddd(cond, B6, dd, dn, dm);
901}
902
903
904void Thumb2Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
905 Condition cond) {
906 EmitVFPsss(cond, B23, sd, sn, sm);
907}
908
909
910void Thumb2Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
911 Condition cond) {
912 EmitVFPddd(cond, B23, dd, dn, dm);
913}
914
915
916void Thumb2Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
917 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
918}
919
920
921void Thumb2Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
922 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
923}
924
925
926void Thumb2Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
927 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
928}
929
930
931void Thumb2Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
932 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
933}
934
935
936void Thumb2Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
937 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
938}
939
940void Thumb2Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
941 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
942}
943
944
945void Thumb2Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
946 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
947}
948
949
950void Thumb2Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
951 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
952}
953
954
955void Thumb2Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
956 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
957}
958
959
960void Thumb2Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
961 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
962}
963
964
965void Thumb2Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
966 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
967}
968
969
970void Thumb2Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
971 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
972}
973
974
975void Thumb2Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
976 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
977}
978
979
980void Thumb2Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
981 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
982}
983
984
985void Thumb2Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
986 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
987}
988
989
990void Thumb2Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
991 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
992}
993
994
995void Thumb2Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
996 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
997}
998
999
1000void Thumb2Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
1001 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
1002}
1003
1004
1005void Thumb2Assembler::vcmpsz(SRegister sd, Condition cond) {
1006 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
1007}
1008
1009
1010void Thumb2Assembler::vcmpdz(DRegister dd, Condition cond) {
1011 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
1012}
1013
1014void Thumb2Assembler::b(Label* label, Condition cond) {
agicsakie2142d252015-06-30 17:10:03 -07001015 DCHECK_EQ(next_condition_, AL);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001016 EmitBranch(cond, label, false, false);
1017}
1018
1019
1020void Thumb2Assembler::bl(Label* label, Condition cond) {
1021 CheckCondition(cond);
1022 EmitBranch(cond, label, true, false);
1023}
1024
1025
1026void Thumb2Assembler::blx(Label* label) {
1027 EmitBranch(AL, label, true, true);
1028}
1029
1030
1031void Thumb2Assembler::MarkExceptionHandler(Label* label) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001032 EmitDataProcessing(AL, TST, kCcSet, PC, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001033 Label l;
1034 b(&l);
1035 EmitBranch(AL, label, false, false);
1036 Bind(&l);
1037}
1038
1039
1040void Thumb2Assembler::Emit32(int32_t value) {
1041 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1042 buffer_.Emit<int16_t>(value >> 16);
1043 buffer_.Emit<int16_t>(value & 0xffff);
1044}
1045
1046
1047void Thumb2Assembler::Emit16(int16_t value) {
1048 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1049 buffer_.Emit<int16_t>(value);
1050}
1051
1052
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001053bool Thumb2Assembler::Is32BitDataProcessing(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001054 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001055 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001056 Register rn,
1057 Register rd,
1058 const ShifterOperand& so) {
1059 if (force_32bit_) {
1060 return true;
1061 }
1062
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001063 // Check special case for SP relative ADD and SUB immediate.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001064 if ((opcode == ADD || opcode == SUB) && rn == SP && so.IsImmediate() && set_cc != kCcSet) {
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001065 // If the immediate is in range, use 16 bit.
1066 if (rd == SP) {
1067 if (so.GetImmediate() < (1 << 9)) { // 9 bit immediate.
1068 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001069 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001070 } else if (!IsHighRegister(rd) && opcode == ADD) {
1071 if (so.GetImmediate() < (1 << 10)) { // 10 bit immediate.
1072 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001073 }
1074 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001075 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001076
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001077 bool can_contain_high_register =
1078 (opcode == CMP) ||
1079 (opcode == MOV && set_cc != kCcSet) ||
1080 ((opcode == ADD) && (rn == rd) && set_cc != kCcSet);
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001081
1082 if (IsHighRegister(rd) || IsHighRegister(rn)) {
1083 if (!can_contain_high_register) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001084 return true;
1085 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001086
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001087 // There are high register instructions available for this opcode.
1088 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
1089 if (so.IsShift() && (so.GetShift() == RRX || so.GetImmediate() != 0u)) {
1090 return true;
1091 }
1092
1093 // The ADD and MOV instructions that work with high registers don't have 16-bit
1094 // immediate variants.
1095 if (so.IsImmediate()) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001096 return true;
1097 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001098 }
1099
1100 if (so.IsRegister() && IsHighRegister(so.GetRegister()) && !can_contain_high_register) {
1101 return true;
1102 }
1103
Dave Allison65fcc2c2014-04-28 13:45:27 -07001104 bool rn_is_valid = true;
1105
1106 // Check for single operand instructions and ADD/SUB.
1107 switch (opcode) {
1108 case CMP:
1109 case MOV:
1110 case TST:
1111 case MVN:
1112 rn_is_valid = false; // There is no Rn for these instructions.
1113 break;
1114 case TEQ:
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001115 case ORN:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001116 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001117 case ADD:
1118 case SUB:
1119 break;
1120 default:
1121 if (so.IsRegister() && rd != rn) {
1122 return true;
1123 }
1124 }
1125
1126 if (so.IsImmediate()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001127 if (opcode == RSB) {
1128 DCHECK(rn_is_valid);
1129 if (so.GetImmediate() != 0u) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001130 return true;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001131 }
1132 } else if (rn_is_valid && rn != rd) {
1133 // The only thumb1 instructions with a register and an immediate are ADD and SUB
1134 // with a 3-bit immediate, and RSB with zero immediate.
1135 if (opcode == ADD || opcode == SUB) {
1136 if (!IsUint<3>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001137 return true;
1138 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001139 } else {
1140 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001141 }
1142 } else {
1143 // ADD, SUB, CMP and MOV may be thumb1 only if the immediate is 8 bits.
1144 if (!(opcode == ADD || opcode == SUB || opcode == MOV || opcode == CMP)) {
1145 return true;
1146 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001147 if (!IsUint<8>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001148 return true;
1149 }
1150 }
1151 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001152 } else {
1153 DCHECK(so.IsRegister());
1154 if (so.IsShift()) {
1155 // Shift operand - check if it is a MOV convertible to a 16-bit shift instruction.
1156 if (opcode != MOV) {
Zheng Xuc6667102015-05-15 16:08:45 +08001157 return true;
1158 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001159 // Check for MOV with an ROR/RRX. There is no 16-bit ROR immediate and no 16-bit RRX.
1160 if (so.GetShift() == ROR || so.GetShift() == RRX) {
1161 return true;
1162 }
1163 // 16-bit shifts set condition codes if and only if outside IT block,
1164 // i.e. if and only if cond == AL.
1165 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1166 return true;
1167 }
1168 } else {
1169 // Register operand without shift.
1170 switch (opcode) {
1171 case ADD:
1172 // The 16-bit ADD that cannot contain high registers can set condition codes
1173 // if and only if outside IT block, i.e. if and only if cond == AL.
1174 if (!can_contain_high_register &&
1175 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1176 return true;
1177 }
1178 break;
1179 case AND:
1180 case BIC:
1181 case EOR:
1182 case ORR:
1183 case MVN:
1184 case ADC:
1185 case SUB:
1186 case SBC:
1187 // These 16-bit opcodes set condition codes if and only if outside IT block,
1188 // i.e. if and only if cond == AL.
1189 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1190 return true;
1191 }
1192 break;
1193 case RSB:
1194 case RSC:
1195 // No 16-bit RSB/RSC Rd, Rm, Rn. It would be equivalent to SUB/SBC Rd, Rn, Rm.
1196 return true;
1197 case CMP:
1198 default:
1199 break;
1200 }
Zheng Xuc6667102015-05-15 16:08:45 +08001201 }
1202 }
1203
Dave Allison65fcc2c2014-04-28 13:45:27 -07001204 // The instruction can be encoded in 16 bits.
1205 return false;
1206}
1207
1208
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001209void Thumb2Assembler::Emit32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001210 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001211 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001212 Register rn,
1213 Register rd,
1214 const ShifterOperand& so) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001215 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001216 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001217 case AND: thumb_opcode = 0U /* 0b0000 */; break;
1218 case EOR: thumb_opcode = 4U /* 0b0100 */; break;
1219 case SUB: thumb_opcode = 13U /* 0b1101 */; break;
1220 case RSB: thumb_opcode = 14U /* 0b1110 */; break;
1221 case ADD: thumb_opcode = 8U /* 0b1000 */; break;
Andreas Gampe35c68e32014-09-30 08:39:37 -07001222 case ADC: thumb_opcode = 10U /* 0b1010 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001223 case SBC: thumb_opcode = 11U /* 0b1011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001224 case RSC: break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001225 case TST: thumb_opcode = 0U /* 0b0000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1226 case TEQ: thumb_opcode = 4U /* 0b0100 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1227 case CMP: thumb_opcode = 13U /* 0b1101 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1228 case CMN: thumb_opcode = 8U /* 0b1000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001229 case ORR: thumb_opcode = 2U /* 0b0010 */; break;
1230 case MOV: thumb_opcode = 2U /* 0b0010 */; rn = PC; break;
1231 case BIC: thumb_opcode = 1U /* 0b0001 */; break;
1232 case MVN: thumb_opcode = 3U /* 0b0011 */; rn = PC; break;
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001233 case ORN: thumb_opcode = 3U /* 0b0011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001234 default:
1235 break;
1236 }
1237
Andreas Gampec8ccf682014-09-29 20:07:43 -07001238 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001239 LOG(FATAL) << "Invalid thumb2 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001240 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001241 }
1242
1243 int32_t encoding = 0;
1244 if (so.IsImmediate()) {
1245 // Check special cases.
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001246 if ((opcode == SUB || opcode == ADD) && (so.GetImmediate() < (1u << 12))) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001247 if (set_cc != kCcSet) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001248 if (opcode == SUB) {
1249 thumb_opcode = 5U;
1250 } else if (opcode == ADD) {
1251 thumb_opcode = 0U;
1252 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001253 }
1254 uint32_t imm = so.GetImmediate();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001255
1256 uint32_t i = (imm >> 11) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001257 uint32_t imm3 = (imm >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001258 uint32_t imm8 = imm & 0xff;
1259
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001260 encoding = B31 | B30 | B29 | B28 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001261 (set_cc == kCcSet ? B20 : B25) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001262 thumb_opcode << 21 |
1263 rn << 16 |
1264 rd << 8 |
1265 i << 26 |
1266 imm3 << 12 |
1267 imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001268 } else {
1269 // Modified immediate.
Dave Allison45fdb932014-06-25 12:37:10 -07001270 uint32_t imm = ModifiedImmediate(so.encodingThumb());
Dave Allison65fcc2c2014-04-28 13:45:27 -07001271 if (imm == kInvalidModifiedImmediate) {
1272 LOG(FATAL) << "Immediate value cannot fit in thumb2 modified immediate";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001273 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001274 }
1275 encoding = B31 | B30 | B29 | B28 |
1276 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001277 (set_cc == kCcSet ? B20 : 0) |
Dave Allison65fcc2c2014-04-28 13:45:27 -07001278 rn << 16 |
1279 rd << 8 |
1280 imm;
1281 }
1282 } else if (so.IsRegister()) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001283 // Register (possibly shifted)
1284 encoding = B31 | B30 | B29 | B27 | B25 |
1285 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001286 (set_cc == kCcSet ? B20 : 0) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001287 rn << 16 |
1288 rd << 8 |
1289 so.encodingThumb();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001290 }
1291 Emit32(encoding);
1292}
1293
1294
1295void Thumb2Assembler::Emit16BitDataProcessing(Condition cond,
1296 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001297 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001298 Register rn,
1299 Register rd,
1300 const ShifterOperand& so) {
1301 if (opcode == ADD || opcode == SUB) {
1302 Emit16BitAddSub(cond, opcode, set_cc, rn, rd, so);
1303 return;
1304 }
Andreas Gampec8ccf682014-09-29 20:07:43 -07001305 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001306 // Thumb1.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001307 uint8_t dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001308 uint8_t opcode_shift = 6;
1309 uint8_t rd_shift = 0;
1310 uint8_t rn_shift = 3;
1311 uint8_t immediate_shift = 0;
1312 bool use_immediate = false;
1313 uint8_t immediate = 0;
1314
1315 if (opcode == MOV && so.IsRegister() && so.IsShift()) {
1316 // Convert shifted mov operand2 into 16 bit opcodes.
1317 dp_opcode = 0;
1318 opcode_shift = 11;
1319
1320 use_immediate = true;
1321 immediate = so.GetImmediate();
1322 immediate_shift = 6;
1323
1324 rn = so.GetRegister();
1325
1326 switch (so.GetShift()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001327 case LSL:
1328 DCHECK_LE(immediate, 31u);
1329 thumb_opcode = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001330 break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001331 case LSR:
1332 DCHECK(1 <= immediate && immediate <= 32);
1333 immediate &= 31; // 32 is encoded as 0.
1334 thumb_opcode = 1U /* 0b01 */;
1335 break;
1336 case ASR:
1337 DCHECK(1 <= immediate && immediate <= 32);
1338 immediate &= 31; // 32 is encoded as 0.
1339 thumb_opcode = 2U /* 0b10 */;
1340 break;
1341 case ROR: // No 16-bit ROR immediate.
1342 case RRX: // No 16-bit RRX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001343 default:
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001344 LOG(FATAL) << "Unexpected shift: " << so.GetShift();
1345 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001346 }
1347 } else {
1348 if (so.IsImmediate()) {
1349 use_immediate = true;
1350 immediate = so.GetImmediate();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001351 } else {
Guillaume "Vermeille" Sanchezab4a2f52015-03-11 14:00:30 +00001352 CHECK(!(so.IsRegister() && so.IsShift() && so.GetSecondRegister() != kNoRegister))
1353 << "No register-shifted register instruction available in thumb";
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001354 // Adjust rn and rd: only two registers will be emitted.
1355 switch (opcode) {
1356 case AND:
1357 case ORR:
1358 case EOR:
1359 case RSB:
1360 case ADC:
1361 case SBC:
1362 case BIC: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001363 // Sets condition codes if and only if outside IT block,
1364 // check that it complies with set_cc.
1365 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001366 if (rn == rd) {
1367 rn = so.GetRegister();
1368 } else {
1369 CHECK_EQ(rd, so.GetRegister());
1370 }
1371 break;
1372 }
1373 case CMP:
1374 case CMN: {
1375 CHECK_EQ(rd, 0);
1376 rd = rn;
1377 rn = so.GetRegister();
1378 break;
1379 }
1380 case MVN: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001381 // Sets condition codes if and only if outside IT block,
1382 // check that it complies with set_cc.
1383 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1384 CHECK_EQ(rn, 0);
1385 rn = so.GetRegister();
1386 break;
1387 }
1388 case TST:
1389 case TEQ: {
1390 DCHECK(set_cc == kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001391 CHECK_EQ(rn, 0);
1392 rn = so.GetRegister();
1393 break;
1394 }
1395 default:
1396 break;
1397 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001398 }
1399
1400 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001401 case AND: thumb_opcode = 0U /* 0b0000 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001402 case ORR: thumb_opcode = 12U /* 0b1100 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001403 case EOR: thumb_opcode = 1U /* 0b0001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001404 case RSB: thumb_opcode = 9U /* 0b1001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001405 case ADC: thumb_opcode = 5U /* 0b0101 */; break;
1406 case SBC: thumb_opcode = 6U /* 0b0110 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001407 case BIC: thumb_opcode = 14U /* 0b1110 */; break;
1408 case TST: thumb_opcode = 8U /* 0b1000 */; CHECK(!use_immediate); break;
1409 case MVN: thumb_opcode = 15U /* 0b1111 */; CHECK(!use_immediate); break;
1410 case CMP: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001411 DCHECK(set_cc == kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001412 if (use_immediate) {
1413 // T2 encoding.
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001414 dp_opcode = 0;
1415 opcode_shift = 11;
1416 thumb_opcode = 5U /* 0b101 */;
1417 rd_shift = 8;
1418 rn_shift = 8;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001419 } else if (IsHighRegister(rd) || IsHighRegister(rn)) {
1420 // Special cmp for high registers.
1421 dp_opcode = 1U /* 0b01 */;
1422 opcode_shift = 7;
1423 // Put the top bit of rd into the bottom bit of the opcode.
1424 thumb_opcode = 10U /* 0b0001010 */ | static_cast<uint32_t>(rd) >> 3;
1425 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001426 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001427 thumb_opcode = 10U /* 0b1010 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001428 }
1429
1430 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001431 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001432 case CMN: {
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001433 CHECK(!use_immediate);
Andreas Gampec8ccf682014-09-29 20:07:43 -07001434 thumb_opcode = 11U /* 0b1011 */;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001435 break;
1436 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001437 case MOV:
1438 dp_opcode = 0;
1439 if (use_immediate) {
1440 // T2 encoding.
1441 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001442 thumb_opcode = 4U /* 0b100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001443 rd_shift = 8;
1444 rn_shift = 8;
1445 } else {
1446 rn = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001447 if (set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001448 // Special mov for high registers.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001449 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001450 opcode_shift = 7;
1451 // Put the top bit of rd into the bottom bit of the opcode.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001452 thumb_opcode = 12U /* 0b0001100 */ | static_cast<uint32_t>(rd) >> 3;
1453 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001454 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001455 DCHECK(!IsHighRegister(rn));
1456 DCHECK(!IsHighRegister(rd));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001457 thumb_opcode = 0;
1458 }
1459 }
1460 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001461
1462 case TEQ:
1463 case RSC:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001464 default:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001465 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001466 break;
1467 }
1468 }
1469
Andreas Gampec8ccf682014-09-29 20:07:43 -07001470 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001471 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001472 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001473 }
1474
1475 int16_t encoding = dp_opcode << 14 |
1476 (thumb_opcode << opcode_shift) |
1477 rd << rd_shift |
1478 rn << rn_shift |
1479 (use_immediate ? (immediate << immediate_shift) : 0);
1480
1481 Emit16(encoding);
1482}
1483
1484
1485// ADD and SUB are complex enough to warrant their own emitter.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001486void Thumb2Assembler::Emit16BitAddSub(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001487 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001488 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001489 Register rn,
1490 Register rd,
1491 const ShifterOperand& so) {
1492 uint8_t dp_opcode = 0;
1493 uint8_t opcode_shift = 6;
1494 uint8_t rd_shift = 0;
1495 uint8_t rn_shift = 3;
1496 uint8_t immediate_shift = 0;
1497 bool use_immediate = false;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001498 uint32_t immediate = 0; // Should be at most 9 bits but keep the full immediate for CHECKs.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001499 uint8_t thumb_opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001500
1501 if (so.IsImmediate()) {
1502 use_immediate = true;
1503 immediate = so.GetImmediate();
1504 }
1505
1506 switch (opcode) {
1507 case ADD:
1508 if (so.IsRegister()) {
1509 Register rm = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001510 if (rn == rd && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001511 // Can use T2 encoding (allows 4 bit registers)
Andreas Gampec8ccf682014-09-29 20:07:43 -07001512 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001513 opcode_shift = 10;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001514 thumb_opcode = 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001515 // Make Rn also contain the top bit of rd.
1516 rn = static_cast<Register>(static_cast<uint32_t>(rm) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07001517 (static_cast<uint32_t>(rd) & 8U /* 0b1000 */) << 1);
1518 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001519 } else {
1520 // T1.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001521 DCHECK(!IsHighRegister(rd));
1522 DCHECK(!IsHighRegister(rn));
1523 DCHECK(!IsHighRegister(rm));
1524 // Sets condition codes if and only if outside IT block,
1525 // check that it complies with set_cc.
1526 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001527 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001528 thumb_opcode = 12U /* 0b01100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001529 immediate = static_cast<uint32_t>(so.GetRegister());
1530 use_immediate = true;
1531 immediate_shift = 6;
1532 }
1533 } else {
1534 // Immediate.
1535 if (rd == SP && rn == SP) {
1536 // ADD sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001537 dp_opcode = 2U /* 0b10 */;
1538 thumb_opcode = 3U /* 0b11 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001539 opcode_shift = 12;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001540 CHECK_LT(immediate, (1u << 9));
Roland Levillain14d90572015-07-16 10:52:26 +01001541 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001542
1543 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1544 rn = R0;
1545 rd = R0;
1546 rd_shift = 0;
1547 rn_shift = 0;
1548 immediate >>= 2;
1549 } else if (rd != SP && rn == SP) {
1550 // ADD rd, SP, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001551 dp_opcode = 2U /* 0b10 */;
1552 thumb_opcode = 5U /* 0b101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001553 opcode_shift = 11;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001554 CHECK_LT(immediate, (1u << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01001555 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001556
1557 // Remove rn from instruction.
1558 rn = R0;
1559 rn_shift = 0;
1560 rd_shift = 8;
1561 immediate >>= 2;
1562 } else if (rn != rd) {
1563 // Must use T1.
1564 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001565 thumb_opcode = 14U /* 0b01110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001566 immediate_shift = 6;
1567 } else {
1568 // T2 encoding.
1569 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001570 thumb_opcode = 6U /* 0b110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001571 rd_shift = 8;
1572 rn_shift = 8;
1573 }
1574 }
1575 break;
1576
1577 case SUB:
1578 if (so.IsRegister()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001579 // T1.
1580 Register rm = so.GetRegister();
1581 DCHECK(!IsHighRegister(rd));
1582 DCHECK(!IsHighRegister(rn));
1583 DCHECK(!IsHighRegister(rm));
1584 // Sets condition codes if and only if outside IT block,
1585 // check that it complies with set_cc.
1586 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1587 opcode_shift = 9;
1588 thumb_opcode = 13U /* 0b01101 */;
1589 immediate = static_cast<uint32_t>(rm);
1590 use_immediate = true;
1591 immediate_shift = 6;
1592 } else {
1593 if (rd == SP && rn == SP) {
1594 // SUB sp, sp, #imm
1595 dp_opcode = 2U /* 0b10 */;
1596 thumb_opcode = 0x61 /* 0b1100001 */;
1597 opcode_shift = 7;
1598 CHECK_LT(immediate, (1u << 9));
1599 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001600
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001601 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1602 rn = R0;
1603 rd = R0;
1604 rd_shift = 0;
1605 rn_shift = 0;
1606 immediate >>= 2;
1607 } else if (rn != rd) {
1608 // Must use T1.
1609 opcode_shift = 9;
1610 thumb_opcode = 15U /* 0b01111 */;
1611 immediate_shift = 6;
1612 } else {
1613 // T2 encoding.
1614 opcode_shift = 11;
1615 thumb_opcode = 7U /* 0b111 */;
1616 rd_shift = 8;
1617 rn_shift = 8;
1618 }
1619 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001620 break;
1621 default:
1622 LOG(FATAL) << "This opcode is not an ADD or SUB: " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001623 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001624 }
1625
1626 int16_t encoding = dp_opcode << 14 |
1627 (thumb_opcode << opcode_shift) |
1628 rd << rd_shift |
1629 rn << rn_shift |
1630 (use_immediate ? (immediate << immediate_shift) : 0);
1631
1632 Emit16(encoding);
1633}
1634
1635
1636void Thumb2Assembler::EmitDataProcessing(Condition cond,
1637 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001638 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001639 Register rn,
1640 Register rd,
1641 const ShifterOperand& so) {
1642 CHECK_NE(rd, kNoRegister);
1643 CheckCondition(cond);
1644
1645 if (Is32BitDataProcessing(cond, opcode, set_cc, rn, rd, so)) {
1646 Emit32BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1647 } else {
1648 Emit16BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1649 }
1650}
1651
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001652void Thumb2Assembler::EmitShift(Register rd,
1653 Register rm,
1654 Shift shift,
1655 uint8_t amount,
1656 Condition cond,
1657 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001658 CHECK_LT(amount, (1 << 5));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001659 if ((IsHighRegister(rd) || IsHighRegister(rm) || shift == ROR || shift == RRX) ||
1660 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001661 uint16_t opcode = 0;
1662 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001663 case LSL: opcode = 0U /* 0b00 */; break;
1664 case LSR: opcode = 1U /* 0b01 */; break;
1665 case ASR: opcode = 2U /* 0b10 */; break;
1666 case ROR: opcode = 3U /* 0b11 */; break;
1667 case RRX: opcode = 3U /* 0b11 */; amount = 0; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001668 default:
1669 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001670 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001671 }
1672 // 32 bit.
1673 int32_t encoding = B31 | B30 | B29 | B27 | B25 | B22 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001674 0xf << 16 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001675 uint32_t imm3 = amount >> 2;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001676 uint32_t imm2 = amount & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -07001677 encoding |= imm3 << 12 | imm2 << 6 | static_cast<int16_t>(rm) |
1678 static_cast<int16_t>(rd) << 8 | opcode << 4;
1679 Emit32(encoding);
1680 } else {
1681 // 16 bit shift
1682 uint16_t opcode = 0;
1683 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001684 case LSL: opcode = 0U /* 0b00 */; break;
1685 case LSR: opcode = 1U /* 0b01 */; break;
1686 case ASR: opcode = 2U /* 0b10 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001687 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001688 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1689 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001690 }
1691 int16_t encoding = opcode << 11 | amount << 6 | static_cast<int16_t>(rm) << 3 |
1692 static_cast<int16_t>(rd);
1693 Emit16(encoding);
1694 }
1695}
1696
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001697void Thumb2Assembler::EmitShift(Register rd,
1698 Register rn,
1699 Shift shift,
1700 Register rm,
1701 Condition cond,
1702 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001703 CHECK_NE(shift, RRX);
1704 bool must_be_32bit = false;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001705 if (IsHighRegister(rd) || IsHighRegister(rm) || IsHighRegister(rn) || rd != rn ||
1706 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001707 must_be_32bit = true;
1708 }
1709
1710 if (must_be_32bit) {
1711 uint16_t opcode = 0;
1712 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001713 case LSL: opcode = 0U /* 0b00 */; break;
1714 case LSR: opcode = 1U /* 0b01 */; break;
1715 case ASR: opcode = 2U /* 0b10 */; break;
1716 case ROR: opcode = 3U /* 0b11 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001717 default:
1718 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001719 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001720 }
1721 // 32 bit.
1722 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001723 0xf << 12 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001724 encoding |= static_cast<int16_t>(rn) << 16 | static_cast<int16_t>(rm) |
1725 static_cast<int16_t>(rd) << 8 | opcode << 21;
1726 Emit32(encoding);
1727 } else {
1728 uint16_t opcode = 0;
1729 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001730 case LSL: opcode = 2U /* 0b0010 */; break;
1731 case LSR: opcode = 3U /* 0b0011 */; break;
1732 case ASR: opcode = 4U /* 0b0100 */; break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001733 case ROR: opcode = 7U /* 0b0111 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001734 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001735 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1736 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001737 }
1738 int16_t encoding = B14 | opcode << 6 | static_cast<int16_t>(rm) << 3 |
1739 static_cast<int16_t>(rd);
1740 Emit16(encoding);
1741 }
1742}
1743
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001744inline size_t Thumb2Assembler::Fixup::SizeInBytes(Size size) {
1745 switch (size) {
1746 case kBranch16Bit:
1747 return 2u;
1748 case kBranch32Bit:
1749 return 4u;
Dave Allison45fdb932014-06-25 12:37:10 -07001750
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001751 case kCbxz16Bit:
1752 return 2u;
1753 case kCbxz32Bit:
1754 return 4u;
1755 case kCbxz48Bit:
1756 return 6u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001757
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001758 case kLiteral1KiB:
1759 return 2u;
1760 case kLiteral4KiB:
1761 return 4u;
1762 case kLiteral64KiB:
1763 return 8u;
1764 case kLiteral1MiB:
1765 return 10u;
1766 case kLiteralFar:
1767 return 14u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001768
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001769 case kLongOrFPLiteral1KiB:
1770 return 4u;
1771 case kLongOrFPLiteral256KiB:
1772 return 10u;
1773 case kLongOrFPLiteralFar:
1774 return 14u;
1775 }
1776 LOG(FATAL) << "Unexpected size: " << static_cast<int>(size);
1777 UNREACHABLE();
1778}
1779
1780inline uint32_t Thumb2Assembler::Fixup::GetOriginalSizeInBytes() const {
1781 return SizeInBytes(original_size_);
1782}
1783
1784inline uint32_t Thumb2Assembler::Fixup::GetSizeInBytes() const {
1785 return SizeInBytes(size_);
1786}
1787
1788inline size_t Thumb2Assembler::Fixup::LiteralPoolPaddingSize(uint32_t current_code_size) {
1789 // The code size must be a multiple of 2.
Roland Levillain14d90572015-07-16 10:52:26 +01001790 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001791 // If it isn't a multiple of 4, we need to add a 2-byte padding before the literal pool.
1792 return current_code_size & 2;
1793}
1794
1795inline int32_t Thumb2Assembler::Fixup::GetOffset(uint32_t current_code_size) const {
1796 static constexpr int32_t int32_min = std::numeric_limits<int32_t>::min();
1797 static constexpr int32_t int32_max = std::numeric_limits<int32_t>::max();
1798 DCHECK_LE(target_, static_cast<uint32_t>(int32_max));
1799 DCHECK_LE(location_, static_cast<uint32_t>(int32_max));
1800 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max));
1801 int32_t diff = static_cast<int32_t>(target_) - static_cast<int32_t>(location_);
1802 if (target_ > location_) {
1803 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max - diff));
1804 diff += static_cast<int32_t>(adjustment_);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001805 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001806 DCHECK_LE(int32_min + static_cast<int32_t>(adjustment_), diff);
1807 diff -= static_cast<int32_t>(adjustment_);
1808 }
1809 // The default PC adjustment for Thumb2 is 4 bytes.
1810 DCHECK_GE(diff, int32_min + 4);
1811 diff -= 4;
1812 // Add additional adjustment for instructions preceding the PC usage, padding
1813 // before the literal pool and rounding down the PC for literal loads.
1814 switch (GetSize()) {
1815 case kBranch16Bit:
1816 case kBranch32Bit:
1817 break;
1818
1819 case kCbxz16Bit:
1820 break;
1821 case kCbxz32Bit:
1822 case kCbxz48Bit:
1823 DCHECK_GE(diff, int32_min + 2);
1824 diff -= 2; // Extra CMP Rn, #0, 16-bit.
1825 break;
1826
1827 case kLiteral1KiB:
1828 case kLiteral4KiB:
1829 case kLongOrFPLiteral1KiB:
1830 DCHECK(diff >= 0 || (GetSize() == kLiteral1KiB && diff == -2));
1831 diff += LiteralPoolPaddingSize(current_code_size);
1832 // Load literal instructions round down the PC+4 to a multiple of 4, so if the PC
1833 // isn't a multiple of 2, we need to adjust. Since we already adjusted for the target
1834 // being aligned, current PC alignment can be inferred from diff.
Roland Levillain14d90572015-07-16 10:52:26 +01001835 DCHECK_ALIGNED(diff, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001836 diff = diff + (diff & 2);
1837 DCHECK_GE(diff, 0);
1838 break;
1839 case kLiteral1MiB:
1840 case kLiteral64KiB:
1841 case kLongOrFPLiteral256KiB:
1842 DCHECK_GE(diff, 4); // The target must be at least 4 bytes after the ADD rX, PC.
1843 diff -= 4; // One extra 32-bit MOV.
1844 diff += LiteralPoolPaddingSize(current_code_size);
1845 break;
1846 case kLiteralFar:
1847 case kLongOrFPLiteralFar:
1848 DCHECK_GE(diff, 8); // The target must be at least 4 bytes after the ADD rX, PC.
1849 diff -= 8; // Extra MOVW+MOVT; both 32-bit.
1850 diff += LiteralPoolPaddingSize(current_code_size);
1851 break;
1852 }
1853 return diff;
1854}
1855
1856inline size_t Thumb2Assembler::Fixup::IncreaseSize(Size new_size) {
1857 DCHECK_NE(target_, kUnresolved);
1858 Size old_size = size_;
1859 size_ = new_size;
1860 DCHECK_GT(SizeInBytes(new_size), SizeInBytes(old_size));
1861 size_t adjustment = SizeInBytes(new_size) - SizeInBytes(old_size);
1862 if (target_ > location_) {
1863 adjustment_ += adjustment;
1864 }
1865 return adjustment;
1866}
1867
1868uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
1869 uint32_t old_code_size = current_code_size;
1870 switch (GetSize()) {
1871 case kBranch16Bit:
1872 if (IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size))) {
1873 break;
Vladimir Markof38caa62015-05-29 15:50:18 +01001874 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001875 current_code_size += IncreaseSize(kBranch32Bit);
1876 FALLTHROUGH_INTENDED;
1877 case kBranch32Bit:
1878 // We don't support conditional branches beyond +-1MiB
1879 // or unconditional branches beyond +-16MiB.
1880 break;
1881
1882 case kCbxz16Bit:
1883 if (IsUint<7>(GetOffset(current_code_size))) {
1884 break;
1885 }
1886 current_code_size += IncreaseSize(kCbxz32Bit);
1887 FALLTHROUGH_INTENDED;
1888 case kCbxz32Bit:
1889 if (IsInt<9>(GetOffset(current_code_size))) {
1890 break;
1891 }
1892 current_code_size += IncreaseSize(kCbxz48Bit);
1893 FALLTHROUGH_INTENDED;
1894 case kCbxz48Bit:
1895 // We don't support conditional branches beyond +-1MiB.
1896 break;
1897
1898 case kLiteral1KiB:
1899 DCHECK(!IsHighRegister(rn_));
1900 if (IsUint<10>(GetOffset(current_code_size))) {
1901 break;
1902 }
1903 current_code_size += IncreaseSize(kLiteral4KiB);
1904 FALLTHROUGH_INTENDED;
1905 case kLiteral4KiB:
1906 if (IsUint<12>(GetOffset(current_code_size))) {
1907 break;
1908 }
1909 current_code_size += IncreaseSize(kLiteral64KiB);
1910 FALLTHROUGH_INTENDED;
1911 case kLiteral64KiB:
1912 // Can't handle high register which we can encounter by fall-through from kLiteral4KiB.
1913 if (!IsHighRegister(rn_) && IsUint<16>(GetOffset(current_code_size))) {
1914 break;
1915 }
1916 current_code_size += IncreaseSize(kLiteral1MiB);
1917 FALLTHROUGH_INTENDED;
1918 case kLiteral1MiB:
1919 if (IsUint<20>(GetOffset(current_code_size))) {
1920 break;
1921 }
1922 current_code_size += IncreaseSize(kLiteralFar);
1923 FALLTHROUGH_INTENDED;
1924 case kLiteralFar:
1925 // This encoding can reach any target.
1926 break;
1927
1928 case kLongOrFPLiteral1KiB:
1929 if (IsUint<10>(GetOffset(current_code_size))) {
1930 break;
1931 }
1932 current_code_size += IncreaseSize(kLongOrFPLiteral256KiB);
1933 FALLTHROUGH_INTENDED;
1934 case kLongOrFPLiteral256KiB:
1935 if (IsUint<18>(GetOffset(current_code_size))) {
1936 break;
1937 }
1938 current_code_size += IncreaseSize(kLongOrFPLiteralFar);
1939 FALLTHROUGH_INTENDED;
1940 case kLongOrFPLiteralFar:
1941 // This encoding can reach any target.
1942 break;
1943 }
1944 return current_code_size - old_code_size;
1945}
1946
1947void Thumb2Assembler::Fixup::Emit(AssemblerBuffer* buffer, uint32_t code_size) const {
1948 switch (GetSize()) {
1949 case kBranch16Bit: {
1950 DCHECK(type_ == kUnconditional || type_ == kConditional);
1951 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1952 int16_t encoding = BEncoding16(GetOffset(code_size), cond_);
Vladimir Markof38caa62015-05-29 15:50:18 +01001953 buffer->Store<int16_t>(location_, encoding);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001954 break;
1955 }
1956 case kBranch32Bit: {
1957 DCHECK(type_ == kConditional || type_ == kUnconditional ||
1958 type_ == kUnconditionalLink || type_ == kUnconditionalLinkX);
1959 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1960 int32_t encoding = BEncoding32(GetOffset(code_size), cond_);
1961 if (type_ == kUnconditionalLink) {
1962 DCHECK_NE(encoding & B12, 0);
1963 encoding |= B14;
1964 } else if (type_ == kUnconditionalLinkX) {
1965 DCHECK_NE(encoding & B12, 0);
1966 encoding ^= B14 | B12;
1967 }
1968 buffer->Store<int16_t>(location_, encoding >> 16);
1969 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
1970 break;
1971 }
1972
1973 case kCbxz16Bit: {
1974 DCHECK(type_ == kCompareAndBranchXZero);
1975 int16_t encoding = CbxzEncoding16(rn_, GetOffset(code_size), cond_);
1976 buffer->Store<int16_t>(location_, encoding);
1977 break;
1978 }
1979 case kCbxz32Bit: {
1980 DCHECK(type_ == kCompareAndBranchXZero);
1981 DCHECK(cond_ == EQ || cond_ == NE);
1982 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1983 int16_t b_encoding = BEncoding16(GetOffset(code_size), cond_);
1984 buffer->Store<int16_t>(location_, cmp_encoding);
1985 buffer->Store<int16_t>(location_ + 2, b_encoding);
1986 break;
1987 }
1988 case kCbxz48Bit: {
1989 DCHECK(type_ == kCompareAndBranchXZero);
1990 DCHECK(cond_ == EQ || cond_ == NE);
1991 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1992 int32_t b_encoding = BEncoding32(GetOffset(code_size), cond_);
1993 buffer->Store<int16_t>(location_, cmp_encoding);
1994 buffer->Store<int16_t>(location_ + 2u, b_encoding >> 16);
1995 buffer->Store<int16_t>(location_ + 4u, static_cast<int16_t>(b_encoding & 0xffff));
1996 break;
1997 }
1998
1999 case kLiteral1KiB: {
2000 DCHECK(type_ == kLoadLiteralNarrow);
2001 int16_t encoding = LdrLitEncoding16(rn_, GetOffset(code_size));
2002 buffer->Store<int16_t>(location_, encoding);
2003 break;
2004 }
2005 case kLiteral4KiB: {
2006 DCHECK(type_ == kLoadLiteralNarrow);
2007 // GetOffset() uses PC+4 but load literal uses AlignDown(PC+4, 4). Adjust offset accordingly.
2008 int32_t encoding = LdrLitEncoding32(rn_, GetOffset(code_size));
2009 buffer->Store<int16_t>(location_, encoding >> 16);
2010 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2011 break;
2012 }
2013 case kLiteral64KiB: {
2014 DCHECK(type_ == kLoadLiteralNarrow);
2015 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2016 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2017 int16_t ldr_encoding = LdrRtRnImm5Encoding16(rn_, rn_, 0);
2018 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2019 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2020 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2021 buffer->Store<int16_t>(location_ + 6u, ldr_encoding);
2022 break;
2023 }
2024 case kLiteral1MiB: {
2025 DCHECK(type_ == kLoadLiteralNarrow);
2026 int32_t offset = GetOffset(code_size);
2027 int32_t mov_encoding = MovModImmEncoding32(rn_, offset & ~0xfff);
2028 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2029 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, offset & 0xfff);
2030 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2031 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2032 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2033 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2034 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2035 break;
2036 }
2037 case kLiteralFar: {
2038 DCHECK(type_ == kLoadLiteralNarrow);
2039 int32_t offset = GetOffset(code_size);
2040 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2041 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2042 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2043 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, 0);
2044 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2045 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2046 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2047 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2048 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2049 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2050 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2051 break;
2052 }
2053
2054 case kLongOrFPLiteral1KiB: {
2055 int32_t encoding = LoadWideOrFpEncoding(PC, GetOffset(code_size)); // DCHECKs type_.
2056 buffer->Store<int16_t>(location_, encoding >> 16);
2057 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2058 break;
2059 }
2060 case kLongOrFPLiteral256KiB: {
2061 int32_t offset = GetOffset(code_size);
2062 int32_t mov_encoding = MovModImmEncoding32(IP, offset & ~0x3ff);
2063 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2064 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, offset & 0x3ff); // DCHECKs type_.
2065 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2066 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2067 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2068 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2069 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2070 break;
2071 }
2072 case kLongOrFPLiteralFar: {
2073 int32_t offset = GetOffset(code_size);
2074 int32_t movw_encoding = MovwEncoding32(IP, offset & 0xffff);
2075 int32_t movt_encoding = MovtEncoding32(IP, offset & ~0xffff);
2076 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2077 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0); // DCHECKs type_.
2078 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2079 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2080 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2081 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2082 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2083 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2084 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2085 break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002086 }
2087 }
2088}
2089
Dave Allison65fcc2c2014-04-28 13:45:27 -07002090uint16_t Thumb2Assembler::EmitCompareAndBranch(Register rn, uint16_t prev, bool n) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002091 CHECK(IsLowRegister(rn));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002092 uint32_t location = buffer_.Size();
2093
2094 // This is always unresolved as it must be a forward branch.
2095 Emit16(prev); // Previous link.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002096 return AddFixup(Fixup::CompareAndBranch(location, rn, n ? NE : EQ));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002097}
2098
2099
2100// NOTE: this only support immediate offsets, not [rx,ry].
2101// TODO: support [rx,ry] instructions.
2102void Thumb2Assembler::EmitLoadStore(Condition cond,
2103 bool load,
2104 bool byte,
2105 bool half,
2106 bool is_signed,
2107 Register rd,
2108 const Address& ad) {
2109 CHECK_NE(rd, kNoRegister);
2110 CheckCondition(cond);
2111 bool must_be_32bit = force_32bit_;
2112 if (IsHighRegister(rd)) {
2113 must_be_32bit = true;
2114 }
2115
2116 Register rn = ad.GetRegister();
Dave Allison45fdb932014-06-25 12:37:10 -07002117 if (IsHighRegister(rn) && rn != SP && rn != PC) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002118 must_be_32bit = true;
2119 }
2120
2121 if (is_signed || ad.GetOffset() < 0 || ad.GetMode() != Address::Offset) {
2122 must_be_32bit = true;
2123 }
2124
Dave Allison45fdb932014-06-25 12:37:10 -07002125 if (ad.IsImmediate()) {
2126 // Immediate offset
2127 int32_t offset = ad.GetOffset();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002128
Dave Allison45fdb932014-06-25 12:37:10 -07002129 // The 16 bit SP relative instruction can only have a 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002130 if (rn == SP && offset >= (1 << 10)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002131 must_be_32bit = true;
2132 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002133
2134 if (byte) {
Dave Allison45fdb932014-06-25 12:37:10 -07002135 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002136 if (offset >= (1 << 5)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002137 must_be_32bit = true;
2138 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002139 } else if (half) {
Dave Allison45fdb932014-06-25 12:37:10 -07002140 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002141 if (offset >= (1 << 6)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002142 must_be_32bit = true;
2143 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002144 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002145 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002146 if (offset >= (1 << 7)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002147 must_be_32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002148 }
2149 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002150
Dave Allison45fdb932014-06-25 12:37:10 -07002151 if (must_be_32bit) {
2152 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2153 (load ? B20 : 0) |
2154 (is_signed ? B24 : 0) |
2155 static_cast<uint32_t>(rd) << 12 |
2156 ad.encodingThumb(true) |
2157 (byte ? 0 : half ? B21 : B22);
2158 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002159 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002160 // 16 bit thumb1.
2161 uint8_t opA = 0;
2162 bool sp_relative = false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002163
2164 if (byte) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002165 opA = 7U /* 0b0111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002166 } else if (half) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002167 opA = 8U /* 0b1000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002168 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002169 if (rn == SP) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002170 opA = 9U /* 0b1001 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002171 sp_relative = true;
2172 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002173 opA = 6U /* 0b0110 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002174 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002175 }
Dave Allison45fdb932014-06-25 12:37:10 -07002176 int16_t encoding = opA << 12 |
2177 (load ? B11 : 0);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002178
Dave Allison45fdb932014-06-25 12:37:10 -07002179 CHECK_GE(offset, 0);
2180 if (sp_relative) {
2181 // SP relative, 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002182 CHECK_LT(offset, (1 << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01002183 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002184 encoding |= rd << 8 | offset >> 2;
2185 } else {
2186 // No SP relative. The offset is shifted right depending on
2187 // the size of the load/store.
2188 encoding |= static_cast<uint32_t>(rd);
2189
2190 if (byte) {
2191 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002192 CHECK_LT(offset, (1 << 5));
Dave Allison45fdb932014-06-25 12:37:10 -07002193 } else if (half) {
2194 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002195 CHECK_LT(offset, (1 << 6));
Roland Levillain14d90572015-07-16 10:52:26 +01002196 CHECK_ALIGNED(offset, 2);
Dave Allison45fdb932014-06-25 12:37:10 -07002197 offset >>= 1;
2198 } else {
2199 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002200 CHECK_LT(offset, (1 << 7));
Roland Levillain14d90572015-07-16 10:52:26 +01002201 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002202 offset >>= 2;
2203 }
2204 encoding |= rn << 3 | offset << 6;
2205 }
2206
2207 Emit16(encoding);
2208 }
2209 } else {
2210 // Register shift.
2211 if (ad.GetRegister() == PC) {
2212 // PC relative literal encoding.
2213 int32_t offset = ad.GetOffset();
Dave Allison0bb9ade2014-06-26 17:57:36 -07002214 if (must_be_32bit || offset < 0 || offset >= (1 << 10) || !load) {
Dave Allison45fdb932014-06-25 12:37:10 -07002215 int32_t up = B23;
2216 if (offset < 0) {
2217 offset = -offset;
2218 up = 0;
2219 }
2220 CHECK_LT(offset, (1 << 12));
2221 int32_t encoding = 0x1f << 27 | 0xf << 16 | B22 | (load ? B20 : 0) |
2222 offset | up |
2223 static_cast<uint32_t>(rd) << 12;
2224 Emit32(encoding);
2225 } else {
2226 // 16 bit literal load.
2227 CHECK_GE(offset, 0);
2228 CHECK_LT(offset, (1 << 10));
2229 int32_t encoding = B14 | (load ? B11 : 0) | static_cast<uint32_t>(rd) << 8 | offset >> 2;
2230 Emit16(encoding);
2231 }
2232 } else {
2233 if (ad.GetShiftCount() != 0) {
2234 // If there is a shift count this must be 32 bit.
2235 must_be_32bit = true;
2236 } else if (IsHighRegister(ad.GetRegisterOffset())) {
2237 must_be_32bit = true;
2238 }
2239
2240 if (must_be_32bit) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002241 int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 |
Dave Allison45fdb932014-06-25 12:37:10 -07002242 ad.encodingThumb(true);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002243 if (half) {
2244 encoding |= B21;
2245 } else if (!byte) {
2246 encoding |= B22;
2247 }
Dave Allison45fdb932014-06-25 12:37:10 -07002248 Emit32(encoding);
2249 } else {
2250 // 16 bit register offset.
2251 int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) |
2252 ad.encodingThumb(false);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002253 if (byte) {
2254 encoding |= B10;
2255 } else if (half) {
2256 encoding |= B9;
2257 }
Dave Allison45fdb932014-06-25 12:37:10 -07002258 Emit16(encoding);
2259 }
2260 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002261 }
2262}
2263
2264
2265void Thumb2Assembler::EmitMultiMemOp(Condition cond,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002266 BlockAddressMode bam,
Dave Allison65fcc2c2014-04-28 13:45:27 -07002267 bool load,
2268 Register base,
2269 RegList regs) {
2270 CHECK_NE(base, kNoRegister);
2271 CheckCondition(cond);
2272 bool must_be_32bit = force_32bit_;
2273
Vladimir Markoe8469c12014-11-26 18:09:30 +00002274 if (!must_be_32bit && base == SP && bam == (load ? IA_W : DB_W) &&
2275 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) {
2276 // Use 16-bit PUSH/POP.
2277 int16_t encoding = B15 | B13 | B12 | (load ? B11 : 0) | B10 |
2278 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff);
2279 Emit16(encoding);
2280 return;
2281 }
2282
Dave Allison65fcc2c2014-04-28 13:45:27 -07002283 if ((regs & 0xff00) != 0) {
2284 must_be_32bit = true;
2285 }
2286
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002287 bool w_bit = bam == IA_W || bam == DB_W || bam == DA_W || bam == IB_W;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002288 // 16 bit always uses writeback.
2289 if (!w_bit) {
2290 must_be_32bit = true;
2291 }
2292
2293 if (must_be_32bit) {
2294 uint32_t op = 0;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002295 switch (bam) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002296 case IA:
2297 case IA_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002298 op = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002299 break;
2300 case DB:
2301 case DB_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002302 op = 2U /* 0b10 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002303 break;
2304 case DA:
2305 case IB:
2306 case DA_W:
2307 case IB_W:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002308 LOG(FATAL) << "LDM/STM mode not supported on thumb: " << bam;
Vladimir Markoe8469c12014-11-26 18:09:30 +00002309 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002310 }
2311 if (load) {
2312 // Cannot have SP in the list.
2313 CHECK_EQ((regs & (1 << SP)), 0);
2314 } else {
2315 // Cannot have PC or SP in the list.
2316 CHECK_EQ((regs & (1 << PC | 1 << SP)), 0);
2317 }
2318 int32_t encoding = B31 | B30 | B29 | B27 |
2319 (op << 23) |
2320 (load ? B20 : 0) |
2321 base << 16 |
2322 regs |
2323 (w_bit << 21);
2324 Emit32(encoding);
2325 } else {
2326 int16_t encoding = B15 | B14 |
2327 (load ? B11 : 0) |
2328 base << 8 |
2329 regs;
2330 Emit16(encoding);
2331 }
2332}
2333
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002334void Thumb2Assembler::EmitBranch(Condition cond, Label* label, bool link, bool x) {
2335 bool use32bit = IsForced32Bit() || !CanRelocateBranches();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002336 uint32_t pc = buffer_.Size();
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002337 Fixup::Type branch_type;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002338 if (cond == AL) {
2339 if (link) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002340 use32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002341 if (x) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002342 branch_type = Fixup::kUnconditionalLinkX; // BLX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002343 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002344 branch_type = Fixup::kUnconditionalLink; // BX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002345 }
2346 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002347 branch_type = Fixup::kUnconditional; // B.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002348 }
2349 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002350 branch_type = Fixup::kConditional; // B<cond>.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002351 }
2352
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002353 Fixup::Size size = use32bit ? Fixup::kBranch32Bit : Fixup::kBranch16Bit;
2354 FixupId branch_id = AddFixup(Fixup::Branch(pc, branch_type, size, cond));
2355
Dave Allison65fcc2c2014-04-28 13:45:27 -07002356 if (label->IsBound()) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002357 // The branch is to a bound label which means that it's a backwards branch.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002358 GetFixup(branch_id)->Resolve(label->Position());
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002359 Emit16(0);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002360 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002361 // Branch target is an unbound label. Add it to a singly-linked list maintained within
2362 // the code with the label serving as the head.
2363 Emit16(static_cast<uint16_t>(label->position_));
2364 label->LinkTo(branch_id);
Vladimir Markof38caa62015-05-29 15:50:18 +01002365 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002366
2367 if (use32bit) {
2368 Emit16(0);
2369 }
2370 DCHECK_EQ(buffer_.Size() - pc, GetFixup(branch_id)->GetSizeInBytes());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002371}
2372
2373
2374void Thumb2Assembler::clz(Register rd, Register rm, Condition cond) {
2375 CHECK_NE(rd, kNoRegister);
2376 CHECK_NE(rm, kNoRegister);
2377 CheckCondition(cond);
2378 CHECK_NE(rd, PC);
2379 CHECK_NE(rm, PC);
2380 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2381 B25 | B23 | B21 | B20 |
2382 static_cast<uint32_t>(rm) << 16 |
2383 0xf << 12 |
2384 static_cast<uint32_t>(rd) << 8 |
2385 B7 |
2386 static_cast<uint32_t>(rm);
2387 Emit32(encoding);
2388}
2389
2390
2391void Thumb2Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
2392 CheckCondition(cond);
2393 bool must_be_32bit = force_32bit_;
2394 if (IsHighRegister(rd)|| imm16 >= 256u) {
2395 must_be_32bit = true;
2396 }
2397
2398 if (must_be_32bit) {
2399 // Use encoding T3.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002400 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2401 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2402 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002403 uint32_t imm8 = imm16 & 0xff;
2404 int32_t encoding = B31 | B30 | B29 | B28 |
2405 B25 | B22 |
2406 static_cast<uint32_t>(rd) << 8 |
2407 i << 26 |
2408 imm4 << 16 |
2409 imm3 << 12 |
2410 imm8;
2411 Emit32(encoding);
2412 } else {
2413 int16_t encoding = B13 | static_cast<uint16_t>(rd) << 8 |
2414 imm16;
2415 Emit16(encoding);
2416 }
2417}
2418
2419
2420void Thumb2Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
2421 CheckCondition(cond);
2422 // Always 32 bits.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002423 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2424 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2425 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002426 uint32_t imm8 = imm16 & 0xff;
2427 int32_t encoding = B31 | B30 | B29 | B28 |
2428 B25 | B23 | B22 |
2429 static_cast<uint32_t>(rd) << 8 |
2430 i << 26 |
2431 imm4 << 16 |
2432 imm3 << 12 |
2433 imm8;
2434 Emit32(encoding);
2435}
2436
2437
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002438void Thumb2Assembler::rbit(Register rd, Register rm, Condition cond) {
2439 CHECK_NE(rd, kNoRegister);
2440 CHECK_NE(rm, kNoRegister);
2441 CheckCondition(cond);
2442 CHECK_NE(rd, PC);
2443 CHECK_NE(rm, PC);
2444 CHECK_NE(rd, SP);
2445 CHECK_NE(rm, SP);
2446 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2447 B25 | B23 | B20 |
2448 static_cast<uint32_t>(rm) << 16 |
2449 0xf << 12 |
2450 static_cast<uint32_t>(rd) << 8 |
2451 B7 | B5 |
2452 static_cast<uint32_t>(rm);
2453 Emit32(encoding);
2454}
2455
2456
Dave Allison65fcc2c2014-04-28 13:45:27 -07002457void Thumb2Assembler::ldrex(Register rt, Register rn, uint16_t imm, Condition cond) {
2458 CHECK_NE(rn, kNoRegister);
2459 CHECK_NE(rt, kNoRegister);
2460 CheckCondition(cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002461 CHECK_LT(imm, (1u << 10));
2462
2463 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
2464 static_cast<uint32_t>(rn) << 16 |
2465 static_cast<uint32_t>(rt) << 12 |
2466 0xf << 8 |
2467 imm >> 2;
2468 Emit32(encoding);
2469}
2470
2471
2472void Thumb2Assembler::ldrex(Register rt, Register rn, Condition cond) {
2473 ldrex(rt, rn, 0, cond);
2474}
2475
2476
2477void Thumb2Assembler::strex(Register rd,
2478 Register rt,
2479 Register rn,
2480 uint16_t imm,
2481 Condition cond) {
2482 CHECK_NE(rn, kNoRegister);
2483 CHECK_NE(rd, kNoRegister);
2484 CHECK_NE(rt, kNoRegister);
2485 CheckCondition(cond);
2486 CHECK_LT(imm, (1u << 10));
2487
2488 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
2489 static_cast<uint32_t>(rn) << 16 |
2490 static_cast<uint32_t>(rt) << 12 |
2491 static_cast<uint32_t>(rd) << 8 |
2492 imm >> 2;
2493 Emit32(encoding);
2494}
2495
2496
Calin Juravle52c48962014-12-16 17:02:57 +00002497void Thumb2Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
2498 CHECK_NE(rn, kNoRegister);
2499 CHECK_NE(rt, kNoRegister);
2500 CHECK_NE(rt2, kNoRegister);
2501 CHECK_NE(rt, rt2);
2502 CheckCondition(cond);
2503
2504 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 | B20 |
2505 static_cast<uint32_t>(rn) << 16 |
2506 static_cast<uint32_t>(rt) << 12 |
2507 static_cast<uint32_t>(rt2) << 8 |
2508 B6 | B5 | B4 | B3 | B2 | B1 | B0;
2509 Emit32(encoding);
2510}
2511
2512
Dave Allison65fcc2c2014-04-28 13:45:27 -07002513void Thumb2Assembler::strex(Register rd,
2514 Register rt,
2515 Register rn,
2516 Condition cond) {
2517 strex(rd, rt, rn, 0, cond);
2518}
2519
2520
Calin Juravle52c48962014-12-16 17:02:57 +00002521void Thumb2Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
2522 CHECK_NE(rd, kNoRegister);
2523 CHECK_NE(rn, kNoRegister);
2524 CHECK_NE(rt, kNoRegister);
2525 CHECK_NE(rt2, kNoRegister);
2526 CHECK_NE(rt, rt2);
2527 CHECK_NE(rd, rt);
2528 CHECK_NE(rd, rt2);
2529 CheckCondition(cond);
2530
2531 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 |
2532 static_cast<uint32_t>(rn) << 16 |
2533 static_cast<uint32_t>(rt) << 12 |
2534 static_cast<uint32_t>(rt2) << 8 |
2535 B6 | B5 | B4 |
2536 static_cast<uint32_t>(rd);
2537 Emit32(encoding);
2538}
2539
2540
Dave Allison65fcc2c2014-04-28 13:45:27 -07002541void Thumb2Assembler::clrex(Condition cond) {
2542 CheckCondition(cond);
2543 int32_t encoding = B31 | B30 | B29 | B27 | B28 | B25 | B24 | B23 |
2544 B21 | B20 |
2545 0xf << 16 |
2546 B15 |
2547 0xf << 8 |
2548 B5 |
2549 0xf;
2550 Emit32(encoding);
2551}
2552
2553
2554void Thumb2Assembler::nop(Condition cond) {
2555 CheckCondition(cond);
Andreas Gampec8ccf682014-09-29 20:07:43 -07002556 uint16_t encoding = B15 | B13 | B12 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002557 B11 | B10 | B9 | B8;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002558 Emit16(static_cast<int16_t>(encoding));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002559}
2560
2561
2562void Thumb2Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
2563 CHECK_NE(sn, kNoSRegister);
2564 CHECK_NE(rt, kNoRegister);
2565 CHECK_NE(rt, SP);
2566 CHECK_NE(rt, PC);
2567 CheckCondition(cond);
2568 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2569 B27 | B26 | B25 |
2570 ((static_cast<int32_t>(sn) >> 1)*B16) |
2571 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2572 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2573 Emit32(encoding);
2574}
2575
2576
2577void Thumb2Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
2578 CHECK_NE(sn, kNoSRegister);
2579 CHECK_NE(rt, kNoRegister);
2580 CHECK_NE(rt, SP);
2581 CHECK_NE(rt, PC);
2582 CheckCondition(cond);
2583 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2584 B27 | B26 | B25 | B20 |
2585 ((static_cast<int32_t>(sn) >> 1)*B16) |
2586 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2587 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2588 Emit32(encoding);
2589}
2590
2591
2592void Thumb2Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
2593 Condition cond) {
2594 CHECK_NE(sm, kNoSRegister);
2595 CHECK_NE(sm, S31);
2596 CHECK_NE(rt, kNoRegister);
2597 CHECK_NE(rt, SP);
2598 CHECK_NE(rt, PC);
2599 CHECK_NE(rt2, kNoRegister);
2600 CHECK_NE(rt2, SP);
2601 CHECK_NE(rt2, PC);
2602 CheckCondition(cond);
2603 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2604 B27 | B26 | B22 |
2605 (static_cast<int32_t>(rt2)*B16) |
2606 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2607 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2608 (static_cast<int32_t>(sm) >> 1);
2609 Emit32(encoding);
2610}
2611
2612
2613void Thumb2Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
2614 Condition cond) {
2615 CHECK_NE(sm, kNoSRegister);
2616 CHECK_NE(sm, S31);
2617 CHECK_NE(rt, kNoRegister);
2618 CHECK_NE(rt, SP);
2619 CHECK_NE(rt, PC);
2620 CHECK_NE(rt2, kNoRegister);
2621 CHECK_NE(rt2, SP);
2622 CHECK_NE(rt2, PC);
2623 CHECK_NE(rt, rt2);
2624 CheckCondition(cond);
2625 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2626 B27 | B26 | B22 | B20 |
2627 (static_cast<int32_t>(rt2)*B16) |
2628 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2629 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2630 (static_cast<int32_t>(sm) >> 1);
2631 Emit32(encoding);
2632}
2633
2634
2635void Thumb2Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
2636 Condition cond) {
2637 CHECK_NE(dm, kNoDRegister);
2638 CHECK_NE(rt, kNoRegister);
2639 CHECK_NE(rt, SP);
2640 CHECK_NE(rt, PC);
2641 CHECK_NE(rt2, kNoRegister);
2642 CHECK_NE(rt2, SP);
2643 CHECK_NE(rt2, PC);
2644 CheckCondition(cond);
2645 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2646 B27 | B26 | B22 |
2647 (static_cast<int32_t>(rt2)*B16) |
2648 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2649 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2650 (static_cast<int32_t>(dm) & 0xf);
2651 Emit32(encoding);
2652}
2653
2654
2655void Thumb2Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
2656 Condition cond) {
2657 CHECK_NE(dm, kNoDRegister);
2658 CHECK_NE(rt, kNoRegister);
2659 CHECK_NE(rt, SP);
2660 CHECK_NE(rt, PC);
2661 CHECK_NE(rt2, kNoRegister);
2662 CHECK_NE(rt2, SP);
2663 CHECK_NE(rt2, PC);
2664 CHECK_NE(rt, rt2);
2665 CheckCondition(cond);
2666 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2667 B27 | B26 | B22 | B20 |
2668 (static_cast<int32_t>(rt2)*B16) |
2669 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2670 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2671 (static_cast<int32_t>(dm) & 0xf);
2672 Emit32(encoding);
2673}
2674
2675
2676void Thumb2Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
2677 const Address& addr = static_cast<const Address&>(ad);
2678 CHECK_NE(sd, kNoSRegister);
2679 CheckCondition(cond);
2680 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2681 B27 | B26 | B24 | B20 |
2682 ((static_cast<int32_t>(sd) & 1)*B22) |
2683 ((static_cast<int32_t>(sd) >> 1)*B12) |
2684 B11 | B9 | addr.vencoding();
2685 Emit32(encoding);
2686}
2687
2688
2689void Thumb2Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
2690 const Address& addr = static_cast<const Address&>(ad);
2691 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2692 CHECK_NE(sd, kNoSRegister);
2693 CheckCondition(cond);
2694 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2695 B27 | B26 | B24 |
2696 ((static_cast<int32_t>(sd) & 1)*B22) |
2697 ((static_cast<int32_t>(sd) >> 1)*B12) |
2698 B11 | B9 | addr.vencoding();
2699 Emit32(encoding);
2700}
2701
2702
2703void Thumb2Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
2704 const Address& addr = static_cast<const Address&>(ad);
2705 CHECK_NE(dd, kNoDRegister);
2706 CheckCondition(cond);
2707 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2708 B27 | B26 | B24 | B20 |
2709 ((static_cast<int32_t>(dd) >> 4)*B22) |
2710 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2711 B11 | B9 | B8 | addr.vencoding();
2712 Emit32(encoding);
2713}
2714
2715
2716void Thumb2Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
2717 const Address& addr = static_cast<const Address&>(ad);
2718 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2719 CHECK_NE(dd, kNoDRegister);
2720 CheckCondition(cond);
2721 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2722 B27 | B26 | B24 |
2723 ((static_cast<int32_t>(dd) >> 4)*B22) |
2724 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2725 B11 | B9 | B8 | addr.vencoding();
2726 Emit32(encoding);
2727}
2728
2729
2730void Thumb2Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
2731 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
2732}
2733
2734
2735void Thumb2Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
2736 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
2737}
2738
2739
2740void Thumb2Assembler::vpops(SRegister reg, int nregs, Condition cond) {
2741 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
2742}
2743
2744
2745void Thumb2Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
2746 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
2747}
2748
2749
2750void Thumb2Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
2751 CheckCondition(cond);
2752
2753 uint32_t D;
2754 uint32_t Vd;
2755 if (dbl) {
2756 // Encoded as D:Vd.
2757 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002758 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002759 } else {
2760 // Encoded as Vd:D.
2761 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002762 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002763 }
2764 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
2765 B11 | B9 |
2766 (dbl ? B8 : 0) |
2767 (push ? B24 : (B23 | B20)) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07002768 14U /* 0b1110 */ << 28 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002769 nregs << (dbl ? 1 : 0) |
2770 D << 22 |
2771 Vd << 12;
2772 Emit32(encoding);
2773}
2774
2775
2776void Thumb2Assembler::EmitVFPsss(Condition cond, int32_t opcode,
2777 SRegister sd, SRegister sn, SRegister sm) {
2778 CHECK_NE(sd, kNoSRegister);
2779 CHECK_NE(sn, kNoSRegister);
2780 CHECK_NE(sm, kNoSRegister);
2781 CheckCondition(cond);
2782 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2783 B27 | B26 | B25 | B11 | B9 | opcode |
2784 ((static_cast<int32_t>(sd) & 1)*B22) |
2785 ((static_cast<int32_t>(sn) >> 1)*B16) |
2786 ((static_cast<int32_t>(sd) >> 1)*B12) |
2787 ((static_cast<int32_t>(sn) & 1)*B7) |
2788 ((static_cast<int32_t>(sm) & 1)*B5) |
2789 (static_cast<int32_t>(sm) >> 1);
2790 Emit32(encoding);
2791}
2792
2793
2794void Thumb2Assembler::EmitVFPddd(Condition cond, int32_t opcode,
2795 DRegister dd, DRegister dn, DRegister dm) {
2796 CHECK_NE(dd, kNoDRegister);
2797 CHECK_NE(dn, kNoDRegister);
2798 CHECK_NE(dm, kNoDRegister);
2799 CheckCondition(cond);
2800 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2801 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
2802 ((static_cast<int32_t>(dd) >> 4)*B22) |
2803 ((static_cast<int32_t>(dn) & 0xf)*B16) |
2804 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2805 ((static_cast<int32_t>(dn) >> 4)*B7) |
2806 ((static_cast<int32_t>(dm) >> 4)*B5) |
2807 (static_cast<int32_t>(dm) & 0xf);
2808 Emit32(encoding);
2809}
2810
2811
2812void Thumb2Assembler::EmitVFPsd(Condition cond, int32_t opcode,
2813 SRegister sd, DRegister dm) {
2814 CHECK_NE(sd, kNoSRegister);
2815 CHECK_NE(dm, kNoDRegister);
2816 CheckCondition(cond);
2817 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2818 B27 | B26 | B25 | B11 | B9 | opcode |
2819 ((static_cast<int32_t>(sd) & 1)*B22) |
2820 ((static_cast<int32_t>(sd) >> 1)*B12) |
2821 ((static_cast<int32_t>(dm) >> 4)*B5) |
2822 (static_cast<int32_t>(dm) & 0xf);
2823 Emit32(encoding);
2824}
2825
2826
2827void Thumb2Assembler::EmitVFPds(Condition cond, int32_t opcode,
2828 DRegister dd, SRegister sm) {
2829 CHECK_NE(dd, kNoDRegister);
2830 CHECK_NE(sm, kNoSRegister);
2831 CheckCondition(cond);
2832 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2833 B27 | B26 | B25 | B11 | B9 | opcode |
2834 ((static_cast<int32_t>(dd) >> 4)*B22) |
2835 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2836 ((static_cast<int32_t>(sm) & 1)*B5) |
2837 (static_cast<int32_t>(sm) >> 1);
2838 Emit32(encoding);
2839}
2840
2841
2842void Thumb2Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR.
Calin Juravleddb7df22014-11-25 20:56:51 +00002843 CHECK_NE(cond, kNoCondition);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002844 CheckCondition(cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00002845 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2846 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
2847 (static_cast<int32_t>(PC)*B12) |
2848 B11 | B9 | B4;
2849 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002850}
2851
2852
2853void Thumb2Assembler::svc(uint32_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002854 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002855 int16_t encoding = B15 | B14 | B12 |
2856 B11 | B10 | B9 | B8 |
2857 imm8;
2858 Emit16(encoding);
2859}
2860
2861
2862void Thumb2Assembler::bkpt(uint16_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002863 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002864 int16_t encoding = B15 | B13 | B12 |
2865 B11 | B10 | B9 |
2866 imm8;
2867 Emit16(encoding);
2868}
2869
2870// Convert the given IT state to a mask bit given bit 0 of the first
2871// condition and a shift position.
2872static uint8_t ToItMask(ItState s, uint8_t firstcond0, uint8_t shift) {
2873 switch (s) {
2874 case kItOmitted: return 1 << shift;
2875 case kItThen: return firstcond0 << shift;
2876 case kItElse: return !firstcond0 << shift;
2877 }
2878 return 0;
2879}
2880
2881
2882// Set the IT condition in the given position for the given state. This is used
2883// to check that conditional instructions match the preceding IT statement.
2884void Thumb2Assembler::SetItCondition(ItState s, Condition cond, uint8_t index) {
2885 switch (s) {
2886 case kItOmitted: it_conditions_[index] = AL; break;
2887 case kItThen: it_conditions_[index] = cond; break;
2888 case kItElse:
2889 it_conditions_[index] = static_cast<Condition>(static_cast<uint8_t>(cond) ^ 1);
2890 break;
2891 }
2892}
2893
2894
2895void Thumb2Assembler::it(Condition firstcond, ItState i1, ItState i2, ItState i3) {
2896 CheckCondition(AL); // Not allowed in IT block.
2897 uint8_t firstcond0 = static_cast<uint8_t>(firstcond) & 1;
2898
2899 // All conditions to AL.
2900 for (uint8_t i = 0; i < 4; ++i) {
2901 it_conditions_[i] = AL;
2902 }
2903
2904 SetItCondition(kItThen, firstcond, 0);
2905 uint8_t mask = ToItMask(i1, firstcond0, 3);
2906 SetItCondition(i1, firstcond, 1);
2907
2908 if (i1 != kItOmitted) {
2909 mask |= ToItMask(i2, firstcond0, 2);
2910 SetItCondition(i2, firstcond, 2);
2911 if (i2 != kItOmitted) {
2912 mask |= ToItMask(i3, firstcond0, 1);
2913 SetItCondition(i3, firstcond, 3);
2914 if (i3 != kItOmitted) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002915 mask |= 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002916 }
2917 }
2918 }
2919
2920 // Start at first condition.
2921 it_cond_index_ = 0;
2922 next_condition_ = it_conditions_[0];
2923 uint16_t encoding = B15 | B13 | B12 |
2924 B11 | B10 | B9 | B8 |
2925 firstcond << 4 |
2926 mask;
2927 Emit16(encoding);
2928}
2929
2930
2931void Thumb2Assembler::cbz(Register rn, Label* label) {
2932 CheckCondition(AL);
2933 if (label->IsBound()) {
2934 LOG(FATAL) << "cbz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002935 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002936 } else if (IsHighRegister(rn)) {
2937 LOG(FATAL) << "cbz can only be used with low registers";
2938 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002939 } else {
2940 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), false);
2941 label->LinkTo(branchid);
2942 }
2943}
2944
2945
2946void Thumb2Assembler::cbnz(Register rn, Label* label) {
2947 CheckCondition(AL);
2948 if (label->IsBound()) {
2949 LOG(FATAL) << "cbnz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002950 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002951 } else if (IsHighRegister(rn)) {
2952 LOG(FATAL) << "cbnz can only be used with low registers";
2953 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002954 } else {
2955 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), true);
2956 label->LinkTo(branchid);
2957 }
2958}
2959
2960
2961void Thumb2Assembler::blx(Register rm, Condition cond) {
2962 CHECK_NE(rm, kNoRegister);
2963 CheckCondition(cond);
2964 int16_t encoding = B14 | B10 | B9 | B8 | B7 | static_cast<int16_t>(rm) << 3;
2965 Emit16(encoding);
2966}
2967
2968
2969void Thumb2Assembler::bx(Register rm, Condition cond) {
2970 CHECK_NE(rm, kNoRegister);
2971 CheckCondition(cond);
2972 int16_t encoding = B14 | B10 | B9 | B8 | static_cast<int16_t>(rm) << 3;
2973 Emit16(encoding);
2974}
2975
2976
2977void Thumb2Assembler::Push(Register rd, Condition cond) {
2978 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
2979}
2980
2981
2982void Thumb2Assembler::Pop(Register rd, Condition cond) {
2983 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
2984}
2985
2986
2987void Thumb2Assembler::PushList(RegList regs, Condition cond) {
2988 stm(DB_W, SP, regs, cond);
2989}
2990
2991
2992void Thumb2Assembler::PopList(RegList regs, Condition cond) {
2993 ldm(IA_W, SP, regs, cond);
2994}
2995
2996
2997void Thumb2Assembler::Mov(Register rd, Register rm, Condition cond) {
2998 if (cond != AL || rd != rm) {
2999 mov(rd, ShifterOperand(rm), cond);
3000 }
3001}
3002
3003
Dave Allison65fcc2c2014-04-28 13:45:27 -07003004void Thumb2Assembler::Bind(Label* label) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003005 BindLabel(label, buffer_.Size());
Dave Allison65fcc2c2014-04-28 13:45:27 -07003006}
3007
3008
3009void Thumb2Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003010 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003011 CHECK_LE(shift_imm, 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003012 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003013 EmitShift(rd, rm, LSL, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003014}
3015
3016
3017void Thumb2Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003018 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003019 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003020 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003021 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003022 EmitShift(rd, rm, LSR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003023}
3024
3025
3026void Thumb2Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003027 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003028 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003029 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003030 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003031 EmitShift(rd, rm, ASR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003032}
3033
3034
3035void Thumb2Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003036 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003037 CHECK(1u <= shift_imm && shift_imm <= 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003038 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003039 EmitShift(rd, rm, ROR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003040}
3041
3042
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003043void Thumb2Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003044 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003045 EmitShift(rd, rm, RRX, rm, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003046}
3047
3048
3049void Thumb2Assembler::Lsl(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003050 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003051 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003052 EmitShift(rd, rm, LSL, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003053}
3054
3055
3056void Thumb2Assembler::Lsr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003057 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003058 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003059 EmitShift(rd, rm, LSR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003060}
3061
3062
3063void Thumb2Assembler::Asr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003064 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003065 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003066 EmitShift(rd, rm, ASR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003067}
3068
3069
3070void Thumb2Assembler::Ror(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003071 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003072 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003073 EmitShift(rd, rm, ROR, rn, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003074}
3075
3076
3077int32_t Thumb2Assembler::EncodeBranchOffset(int32_t offset, int32_t inst) {
3078 // The offset is off by 4 due to the way the ARM CPUs read PC.
3079 offset -= 4;
3080 offset >>= 1;
3081
3082 uint32_t value = 0;
3083 // There are two different encodings depending on the value of bit 12. In one case
3084 // intermediate values are calculated using the sign bit.
3085 if ((inst & B12) == B12) {
3086 // 25 bits of offset.
3087 uint32_t signbit = (offset >> 31) & 0x1;
3088 uint32_t i1 = (offset >> 22) & 0x1;
3089 uint32_t i2 = (offset >> 21) & 0x1;
3090 uint32_t imm10 = (offset >> 11) & 0x03ff;
3091 uint32_t imm11 = offset & 0x07ff;
3092 uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
3093 uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
3094 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
3095 imm11;
3096 // Remove the offset from the current encoding.
3097 inst &= ~(0x3ff << 16 | 0x7ff);
3098 } else {
3099 uint32_t signbit = (offset >> 31) & 0x1;
3100 uint32_t imm6 = (offset >> 11) & 0x03f;
3101 uint32_t imm11 = offset & 0x07ff;
3102 uint32_t j1 = (offset >> 19) & 1;
3103 uint32_t j2 = (offset >> 17) & 1;
3104 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm6 << 16) |
3105 imm11;
3106 // Remove the offset from the current encoding.
3107 inst &= ~(0x3f << 16 | 0x7ff);
3108 }
3109 // Mask out offset bits in current instruction.
3110 inst &= ~(B26 | B13 | B11);
3111 inst |= value;
3112 return inst;
3113}
3114
3115
3116int Thumb2Assembler::DecodeBranchOffset(int32_t instr) {
3117 int32_t imm32;
3118 if ((instr & B12) == B12) {
3119 uint32_t S = (instr >> 26) & 1;
3120 uint32_t J2 = (instr >> 11) & 1;
3121 uint32_t J1 = (instr >> 13) & 1;
3122 uint32_t imm10 = (instr >> 16) & 0x3FF;
3123 uint32_t imm11 = instr & 0x7FF;
3124
3125 uint32_t I1 = ~(J1 ^ S) & 1;
3126 uint32_t I2 = ~(J2 ^ S) & 1;
3127 imm32 = (S << 24) | (I1 << 23) | (I2 << 22) | (imm10 << 12) | (imm11 << 1);
3128 imm32 = (imm32 << 8) >> 8; // sign extend 24 bit immediate.
3129 } else {
3130 uint32_t S = (instr >> 26) & 1;
3131 uint32_t J2 = (instr >> 11) & 1;
3132 uint32_t J1 = (instr >> 13) & 1;
3133 uint32_t imm6 = (instr >> 16) & 0x3F;
3134 uint32_t imm11 = instr & 0x7FF;
3135
3136 imm32 = (S << 20) | (J2 << 19) | (J1 << 18) | (imm6 << 12) | (imm11 << 1);
3137 imm32 = (imm32 << 11) >> 11; // sign extend 21 bit immediate.
3138 }
3139 imm32 += 4;
3140 return imm32;
3141}
3142
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003143uint32_t Thumb2Assembler::GetAdjustedPosition(uint32_t old_position) {
3144 // We can reconstruct the adjustment by going through all the fixups from the beginning
3145 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
3146 // with increasing old_position, we can use the data from last AdjustedPosition() to
3147 // continue where we left off and the whole loop should be O(m+n) where m is the number
3148 // of positions to adjust and n is the number of fixups.
3149 if (old_position < last_old_position_) {
3150 last_position_adjustment_ = 0u;
3151 last_old_position_ = 0u;
3152 last_fixup_id_ = 0u;
3153 }
3154 while (last_fixup_id_ != fixups_.size()) {
3155 Fixup* fixup = GetFixup(last_fixup_id_);
3156 if (fixup->GetLocation() >= old_position + last_position_adjustment_) {
3157 break;
3158 }
3159 if (fixup->GetSize() != fixup->GetOriginalSize()) {
3160 last_position_adjustment_ += fixup->GetSizeInBytes() - fixup->GetOriginalSizeInBytes();
3161 }
3162 ++last_fixup_id_;
3163 }
3164 last_old_position_ = old_position;
3165 return old_position + last_position_adjustment_;
3166}
3167
3168Literal* Thumb2Assembler::NewLiteral(size_t size, const uint8_t* data) {
3169 DCHECK(size == 4u || size == 8u) << size;
3170 literals_.emplace_back(size, data);
3171 return &literals_.back();
3172}
3173
3174void Thumb2Assembler::LoadLiteral(Register rt, Literal* literal) {
3175 DCHECK_EQ(literal->GetSize(), 4u);
3176 DCHECK(!literal->GetLabel()->IsBound());
3177 bool use32bit = IsForced32Bit() || IsHighRegister(rt);
3178 uint32_t location = buffer_.Size();
3179 Fixup::Size size = use32bit ? Fixup::kLiteral4KiB : Fixup::kLiteral1KiB;
3180 FixupId fixup_id = AddFixup(Fixup::LoadNarrowLiteral(location, rt, size));
3181 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3182 literal->GetLabel()->LinkTo(fixup_id);
3183 if (use32bit) {
3184 Emit16(0);
3185 }
3186 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3187}
3188
3189void Thumb2Assembler::LoadLiteral(Register rt, Register rt2, Literal* literal) {
3190 DCHECK_EQ(literal->GetSize(), 8u);
3191 DCHECK(!literal->GetLabel()->IsBound());
3192 uint32_t location = buffer_.Size();
3193 FixupId fixup_id =
3194 AddFixup(Fixup::LoadWideLiteral(location, rt, rt2, Fixup::kLongOrFPLiteral1KiB));
3195 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3196 literal->GetLabel()->LinkTo(fixup_id);
3197 Emit16(0);
3198 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3199}
3200
3201void Thumb2Assembler::LoadLiteral(SRegister sd, Literal* literal) {
3202 DCHECK_EQ(literal->GetSize(), 4u);
3203 DCHECK(!literal->GetLabel()->IsBound());
3204 uint32_t location = buffer_.Size();
3205 FixupId fixup_id = AddFixup(Fixup::LoadSingleLiteral(location, sd, Fixup::kLongOrFPLiteral1KiB));
3206 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3207 literal->GetLabel()->LinkTo(fixup_id);
3208 Emit16(0);
3209 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3210}
3211
3212void Thumb2Assembler::LoadLiteral(DRegister dd, Literal* literal) {
3213 DCHECK_EQ(literal->GetSize(), 8u);
3214 DCHECK(!literal->GetLabel()->IsBound());
3215 uint32_t location = buffer_.Size();
3216 FixupId fixup_id = AddFixup(Fixup::LoadDoubleLiteral(location, dd, Fixup::kLongOrFPLiteral1KiB));
3217 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3218 literal->GetLabel()->LinkTo(fixup_id);
3219 Emit16(0);
3220 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3221}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003222
Dave Allison65fcc2c2014-04-28 13:45:27 -07003223
3224void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value,
Vladimir Marko449b1092015-09-08 12:16:45 +01003225 Condition cond, SetCc set_cc) {
3226 if (value == 0 && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003227 if (rd != rn) {
3228 mov(rd, ShifterOperand(rn), cond);
3229 }
3230 return;
3231 }
3232 // We prefer to select the shorter code sequence rather than selecting add for
3233 // positive values and sub for negatives ones, which would slightly improve
3234 // the readability of generated code for some constants.
3235 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003236 if (ShifterOperandCanHold(rd, rn, ADD, value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003237 add(rd, rn, shifter_op, cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003238 } else if (ShifterOperandCanHold(rd, rn, SUB, -value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003239 sub(rd, rn, shifter_op, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003240 } else {
3241 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003242 if (ShifterOperandCanHold(rd, rn, MVN, ~value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003243 mvn(IP, shifter_op, cond, kCcKeep);
3244 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003245 } else if (ShifterOperandCanHold(rd, rn, MVN, ~(-value), &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003246 mvn(IP, shifter_op, cond, kCcKeep);
3247 sub(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003248 } else {
3249 movw(IP, Low16Bits(value), cond);
3250 uint16_t value_high = High16Bits(value);
3251 if (value_high != 0) {
3252 movt(IP, value_high, cond);
3253 }
Vladimir Marko449b1092015-09-08 12:16:45 +01003254 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003255 }
3256 }
3257}
3258
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003259
Dave Allison65fcc2c2014-04-28 13:45:27 -07003260void Thumb2Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
3261 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003262 if (ShifterOperandCanHold(rd, R0, MOV, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003263 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003264 } else if (ShifterOperandCanHold(rd, R0, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003265 mvn(rd, shifter_op, cond);
3266 } else {
3267 movw(rd, Low16Bits(value), cond);
3268 uint16_t value_high = High16Bits(value);
3269 if (value_high != 0) {
3270 movt(rd, value_high, cond);
3271 }
3272 }
3273}
3274
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003275
Dave Allison65fcc2c2014-04-28 13:45:27 -07003276// Implementation note: this method must emit at most one instruction when
3277// Address::CanHoldLoadOffsetThumb.
3278void Thumb2Assembler::LoadFromOffset(LoadOperandType type,
3279 Register reg,
3280 Register base,
3281 int32_t offset,
3282 Condition cond) {
3283 if (!Address::CanHoldLoadOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003284 CHECK_NE(base, IP);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003285 LoadImmediate(IP, offset, cond);
3286 add(IP, IP, ShifterOperand(base), cond);
3287 base = IP;
3288 offset = 0;
3289 }
3290 CHECK(Address::CanHoldLoadOffsetThumb(type, offset));
3291 switch (type) {
3292 case kLoadSignedByte:
3293 ldrsb(reg, Address(base, offset), cond);
3294 break;
3295 case kLoadUnsignedByte:
3296 ldrb(reg, Address(base, offset), cond);
3297 break;
3298 case kLoadSignedHalfword:
3299 ldrsh(reg, Address(base, offset), cond);
3300 break;
3301 case kLoadUnsignedHalfword:
3302 ldrh(reg, Address(base, offset), cond);
3303 break;
3304 case kLoadWord:
3305 ldr(reg, Address(base, offset), cond);
3306 break;
3307 case kLoadWordPair:
3308 ldrd(reg, Address(base, offset), cond);
3309 break;
3310 default:
3311 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003312 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003313 }
3314}
3315
3316
3317// Implementation note: this method must emit at most one instruction when
3318// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3319void Thumb2Assembler::LoadSFromOffset(SRegister reg,
3320 Register base,
3321 int32_t offset,
3322 Condition cond) {
3323 if (!Address::CanHoldLoadOffsetThumb(kLoadSWord, offset)) {
3324 CHECK_NE(base, IP);
3325 LoadImmediate(IP, offset, cond);
3326 add(IP, IP, ShifterOperand(base), cond);
3327 base = IP;
3328 offset = 0;
3329 }
3330 CHECK(Address::CanHoldLoadOffsetThumb(kLoadSWord, offset));
3331 vldrs(reg, Address(base, offset), cond);
3332}
3333
3334
3335// Implementation note: this method must emit at most one instruction when
3336// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3337void Thumb2Assembler::LoadDFromOffset(DRegister reg,
3338 Register base,
3339 int32_t offset,
3340 Condition cond) {
3341 if (!Address::CanHoldLoadOffsetThumb(kLoadDWord, offset)) {
3342 CHECK_NE(base, IP);
3343 LoadImmediate(IP, offset, cond);
3344 add(IP, IP, ShifterOperand(base), cond);
3345 base = IP;
3346 offset = 0;
3347 }
3348 CHECK(Address::CanHoldLoadOffsetThumb(kLoadDWord, offset));
3349 vldrd(reg, Address(base, offset), cond);
3350}
3351
3352
3353// Implementation note: this method must emit at most one instruction when
3354// Address::CanHoldStoreOffsetThumb.
3355void Thumb2Assembler::StoreToOffset(StoreOperandType type,
3356 Register reg,
3357 Register base,
3358 int32_t offset,
3359 Condition cond) {
Roland Levillain775ef492014-11-04 17:43:11 +00003360 Register tmp_reg = kNoRegister;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003361 if (!Address::CanHoldStoreOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003362 CHECK_NE(base, IP);
Roland Levillain23f02f32015-08-25 18:23:20 +01003363 if ((reg != IP) &&
3364 ((type != kStoreWordPair) || (reg + 1 != IP))) {
Roland Levillain775ef492014-11-04 17:43:11 +00003365 tmp_reg = IP;
3366 } else {
Roland Levillain4af147e2015-04-07 13:54:49 +01003367 // Be careful not to use IP twice (for `reg` (or `reg` + 1 in
Roland Levillain23f02f32015-08-25 18:23:20 +01003368 // the case of a word-pair store) and `base`) to build the
3369 // Address object used by the store instruction(s) below.
3370 // Instead, save R5 on the stack (or R6 if R5 is already used by
3371 // `base`), use it as secondary temporary register, and restore
3372 // it after the store instruction has been emitted.
3373 tmp_reg = (base != R5) ? R5 : R6;
Roland Levillain775ef492014-11-04 17:43:11 +00003374 Push(tmp_reg);
3375 if (base == SP) {
3376 offset += kRegisterSize;
3377 }
3378 }
3379 LoadImmediate(tmp_reg, offset, cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003380 add(tmp_reg, tmp_reg, ShifterOperand(base), AL);
Roland Levillain775ef492014-11-04 17:43:11 +00003381 base = tmp_reg;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003382 offset = 0;
3383 }
3384 CHECK(Address::CanHoldStoreOffsetThumb(type, offset));
3385 switch (type) {
3386 case kStoreByte:
3387 strb(reg, Address(base, offset), cond);
3388 break;
3389 case kStoreHalfword:
3390 strh(reg, Address(base, offset), cond);
3391 break;
3392 case kStoreWord:
3393 str(reg, Address(base, offset), cond);
3394 break;
3395 case kStoreWordPair:
3396 strd(reg, Address(base, offset), cond);
3397 break;
3398 default:
3399 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003400 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003401 }
Roland Levillain23f02f32015-08-25 18:23:20 +01003402 if ((tmp_reg != kNoRegister) && (tmp_reg != IP)) {
3403 CHECK((tmp_reg == R5) || (tmp_reg == R6));
Roland Levillain775ef492014-11-04 17:43:11 +00003404 Pop(tmp_reg);
3405 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003406}
3407
3408
3409// Implementation note: this method must emit at most one instruction when
3410// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreToOffset.
3411void Thumb2Assembler::StoreSToOffset(SRegister reg,
3412 Register base,
3413 int32_t offset,
3414 Condition cond) {
3415 if (!Address::CanHoldStoreOffsetThumb(kStoreSWord, offset)) {
3416 CHECK_NE(base, IP);
3417 LoadImmediate(IP, offset, cond);
3418 add(IP, IP, ShifterOperand(base), cond);
3419 base = IP;
3420 offset = 0;
3421 }
3422 CHECK(Address::CanHoldStoreOffsetThumb(kStoreSWord, offset));
3423 vstrs(reg, Address(base, offset), cond);
3424}
3425
3426
3427// Implementation note: this method must emit at most one instruction when
3428// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreSToOffset.
3429void Thumb2Assembler::StoreDToOffset(DRegister reg,
3430 Register base,
3431 int32_t offset,
3432 Condition cond) {
3433 if (!Address::CanHoldStoreOffsetThumb(kStoreDWord, offset)) {
3434 CHECK_NE(base, IP);
3435 LoadImmediate(IP, offset, cond);
3436 add(IP, IP, ShifterOperand(base), cond);
3437 base = IP;
3438 offset = 0;
3439 }
3440 CHECK(Address::CanHoldStoreOffsetThumb(kStoreDWord, offset));
3441 vstrd(reg, Address(base, offset), cond);
3442}
3443
3444
3445void Thumb2Assembler::MemoryBarrier(ManagedRegister mscratch) {
3446 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003447 dmb(SY);
3448}
3449
3450
3451void Thumb2Assembler::dmb(DmbOptions flavor) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003452 int32_t encoding = 0xf3bf8f50; // dmb in T1 encoding.
3453 Emit32(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003454}
3455
3456
3457void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003458 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003459 cbz(r, label);
3460 } else {
3461 cmp(r, ShifterOperand(0));
3462 b(label, EQ);
3463 }
3464}
3465
3466
Dave Allison65fcc2c2014-04-28 13:45:27 -07003467void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003468 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd126ba12015-05-20 11:25:27 +01003469 cbnz(r, label);
3470 } else {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003471 cmp(r, ShifterOperand(0));
3472 b(label, NE);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003473 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003474}
3475} // namespace arm
3476} // namespace art