blob: 2ec55869eec317e4a0378443c90e2f95c7af27c4 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_thumb2.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Vladimir Marko6b756b52015-07-14 11:58:38 +010028void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) {
29 // For each Fixup, it's easy to find the Fixups that it depends on as they are either
30 // the following or the preceding Fixups until we find the target. However, for fixup
31 // adjustment we need the reverse lookup, i.e. what Fixups depend on a given Fixup.
32 // This function creates a compact representation of this relationship, where we have
33 // all the dependents in a single array and Fixups reference their ranges by start
34 // index and count. (Instead of having a per-fixup vector.)
35
36 // Count the number of dependents of each Fixup.
37 const FixupId end_id = assembler->fixups_.size();
38 Fixup* fixups = assembler->fixups_.data();
39 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
40 uint32_t target = fixups[fixup_id].target_;
41 if (target > fixups[fixup_id].location_) {
42 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
43 fixups[id].dependents_count_ += 1u;
44 }
45 } else {
46 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
47 fixups[id - 1u].dependents_count_ += 1u;
48 }
49 }
50 }
51 // Assign index ranges in fixup_dependents_ to individual fixups. Record the end of the
52 // range in dependents_start_, we shall later decrement it as we fill in fixup_dependents_.
53 uint32_t number_of_dependents = 0u;
54 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
55 number_of_dependents += fixups[fixup_id].dependents_count_;
56 fixups[fixup_id].dependents_start_ = number_of_dependents;
57 }
58 if (number_of_dependents == 0u) {
59 return;
60 }
61 // Create and fill in the fixup_dependents_.
62 assembler->fixup_dependents_.reset(new FixupId[number_of_dependents]);
63 FixupId* dependents = assembler->fixup_dependents_.get();
64 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
65 uint32_t target = fixups[fixup_id].target_;
66 if (target > fixups[fixup_id].location_) {
67 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
68 fixups[id].dependents_start_ -= 1u;
69 dependents[fixups[id].dependents_start_] = fixup_id;
70 }
71 } else {
72 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
73 fixups[id - 1u].dependents_start_ -= 1u;
74 dependents[fixups[id - 1u].dependents_start_] = fixup_id;
75 }
76 }
77 }
78}
79
Vladimir Markocf93a5c2015-06-16 11:33:24 +000080void Thumb2Assembler::BindLabel(Label* label, uint32_t bound_pc) {
81 CHECK(!label->IsBound());
82
83 while (label->IsLinked()) {
84 FixupId fixup_id = label->Position(); // The id for linked Fixup.
85 Fixup* fixup = GetFixup(fixup_id); // Get the Fixup at this id.
86 fixup->Resolve(bound_pc); // Fixup can be resolved now.
Vladimir Markocf93a5c2015-06-16 11:33:24 +000087 uint32_t fixup_location = fixup->GetLocation();
88 uint16_t next = buffer_.Load<uint16_t>(fixup_location); // Get next in chain.
89 buffer_.Store<int16_t>(fixup_location, 0);
90 label->position_ = next; // Move to next.
91 }
92 label->BindTo(bound_pc);
93}
94
95void Thumb2Assembler::BindLiterals() {
96 // We don't add the padding here, that's done only after adjusting the Fixup sizes.
97 uint32_t code_size = buffer_.Size();
98 for (Literal& lit : literals_) {
99 Label* label = lit.GetLabel();
100 BindLabel(label, code_size);
101 code_size += lit.GetSize();
102 }
103}
104
105void Thumb2Assembler::AdjustFixupIfNeeded(Fixup* fixup, uint32_t* current_code_size,
106 std::deque<FixupId>* fixups_to_recalculate) {
107 uint32_t adjustment = fixup->AdjustSizeIfNeeded(*current_code_size);
108 if (adjustment != 0u) {
109 *current_code_size += adjustment;
Vladimir Marko6b756b52015-07-14 11:58:38 +0100110 for (FixupId dependent_id : fixup->Dependents(*this)) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000111 Fixup* dependent = GetFixup(dependent_id);
112 dependent->IncreaseAdjustment(adjustment);
113 if (buffer_.Load<int16_t>(dependent->GetLocation()) == 0) {
114 buffer_.Store<int16_t>(dependent->GetLocation(), 1);
115 fixups_to_recalculate->push_back(dependent_id);
116 }
117 }
118 }
119}
120
121uint32_t Thumb2Assembler::AdjustFixups() {
Vladimir Marko6b756b52015-07-14 11:58:38 +0100122 Fixup::PrepareDependents(this);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000123 uint32_t current_code_size = buffer_.Size();
124 std::deque<FixupId> fixups_to_recalculate;
125 if (kIsDebugBuild) {
126 // We will use the placeholders in the buffer_ to mark whether the fixup has
127 // been added to the fixups_to_recalculate. Make sure we start with zeros.
128 for (Fixup& fixup : fixups_) {
129 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
130 }
131 }
132 for (Fixup& fixup : fixups_) {
133 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
134 }
135 while (!fixups_to_recalculate.empty()) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100136 do {
137 // Pop the fixup.
138 FixupId fixup_id = fixups_to_recalculate.front();
139 fixups_to_recalculate.pop_front();
140 Fixup* fixup = GetFixup(fixup_id);
141 DCHECK_NE(buffer_.Load<int16_t>(fixup->GetLocation()), 0);
142 buffer_.Store<int16_t>(fixup->GetLocation(), 0);
143 // See if it needs adjustment.
144 AdjustFixupIfNeeded(fixup, &current_code_size, &fixups_to_recalculate);
145 } while (!fixups_to_recalculate.empty());
146
147 if ((current_code_size & 2) != 0 && !literals_.empty()) {
148 // If we need to add padding before literals, this may just push some out of range,
149 // so recalculate all load literals. This makes up for the fact that we don't mark
150 // load literal as a dependency of all previous Fixups even though it actually is.
151 for (Fixup& fixup : fixups_) {
152 if (fixup.IsLoadLiteral()) {
153 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
154 }
155 }
156 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000157 }
158 if (kIsDebugBuild) {
159 // Check that no fixup is marked as being in fixups_to_recalculate anymore.
160 for (Fixup& fixup : fixups_) {
161 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
162 }
163 }
164
165 // Adjust literal pool labels for padding.
Roland Levillain14d90572015-07-16 10:52:26 +0100166 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000167 uint32_t literals_adjustment = current_code_size + (current_code_size & 2) - buffer_.Size();
168 if (literals_adjustment != 0u) {
169 for (Literal& literal : literals_) {
170 Label* label = literal.GetLabel();
171 DCHECK(label->IsBound());
172 int old_position = label->Position();
173 label->Reinitialize();
174 label->BindTo(old_position + literals_adjustment);
175 }
176 }
177
178 return current_code_size;
179}
180
181void Thumb2Assembler::EmitFixups(uint32_t adjusted_code_size) {
182 // Move non-fixup code to its final place and emit fixups.
183 // Process fixups in reverse order so that we don't repeatedly move the same data.
184 size_t src_end = buffer_.Size();
185 size_t dest_end = adjusted_code_size;
186 buffer_.Resize(dest_end);
187 DCHECK_GE(dest_end, src_end);
188 for (auto i = fixups_.rbegin(), end = fixups_.rend(); i != end; ++i) {
189 Fixup* fixup = &*i;
190 if (fixup->GetOriginalSize() == fixup->GetSize()) {
191 // The size of this Fixup didn't change. To avoid moving the data
192 // in small chunks, emit the code to its original position.
193 fixup->Emit(&buffer_, adjusted_code_size);
194 fixup->Finalize(dest_end - src_end);
195 } else {
196 // Move the data between the end of the fixup and src_end to its final location.
197 size_t old_fixup_location = fixup->GetLocation();
198 size_t src_begin = old_fixup_location + fixup->GetOriginalSizeInBytes();
199 size_t data_size = src_end - src_begin;
200 size_t dest_begin = dest_end - data_size;
201 buffer_.Move(dest_begin, src_begin, data_size);
202 src_end = old_fixup_location;
203 dest_end = dest_begin - fixup->GetSizeInBytes();
204 // Finalize the Fixup and emit the data to the new location.
205 fixup->Finalize(dest_end - src_end);
206 fixup->Emit(&buffer_, adjusted_code_size);
207 }
208 }
209 CHECK_EQ(src_end, dest_end);
210}
211
212void Thumb2Assembler::EmitLiterals() {
213 if (!literals_.empty()) {
214 // Load literal instructions (LDR, LDRD, VLDR) require 4-byte alignment.
215 // We don't support byte and half-word literals.
216 uint32_t code_size = buffer_.Size();
Roland Levillain14d90572015-07-16 10:52:26 +0100217 DCHECK_ALIGNED(code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000218 if ((code_size & 2u) != 0u) {
219 Emit16(0);
220 }
221 for (Literal& literal : literals_) {
222 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
223 DCHECK_EQ(static_cast<size_t>(literal.GetLabel()->Position()), buffer_.Size());
224 DCHECK(literal.GetSize() == 4u || literal.GetSize() == 8u);
225 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
226 buffer_.Emit<uint8_t>(literal.GetData()[i]);
227 }
228 }
229 }
230}
231
232inline int16_t Thumb2Assembler::BEncoding16(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100233 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000234 int16_t encoding = B15 | B14;
235 if (cond != AL) {
236 DCHECK(IsInt<9>(offset));
237 encoding |= B12 | (static_cast<int32_t>(cond) << 8) | ((offset >> 1) & 0xff);
238 } else {
239 DCHECK(IsInt<12>(offset));
240 encoding |= B13 | ((offset >> 1) & 0x7ff);
241 }
242 return encoding;
243}
244
245inline int32_t Thumb2Assembler::BEncoding32(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100246 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000247 int32_t s = (offset >> 31) & 1; // Sign bit.
248 int32_t encoding = B31 | B30 | B29 | B28 | B15 |
249 (s << 26) | // Sign bit goes to bit 26.
250 ((offset >> 1) & 0x7ff); // imm11 goes to bits 0-10.
251 if (cond != AL) {
252 DCHECK(IsInt<21>(offset));
253 // Encode cond, move imm6 from bits 12-17 to bits 16-21 and move J1 and J2.
254 encoding |= (static_cast<int32_t>(cond) << 22) | ((offset & 0x3f000) << (16 - 12)) |
255 ((offset & (1 << 19)) >> (19 - 13)) | // Extract J1 from bit 19 to bit 13.
256 ((offset & (1 << 18)) >> (18 - 11)); // Extract J2 from bit 18 to bit 11.
257 } else {
258 DCHECK(IsInt<25>(offset));
259 int32_t j1 = ((offset >> 23) ^ s ^ 1) & 1; // Calculate J1 from I1 extracted from bit 23.
260 int32_t j2 = ((offset >> 22)^ s ^ 1) & 1; // Calculate J2 from I2 extracted from bit 22.
261 // Move imm10 from bits 12-21 to bits 16-25 and add J1 and J2.
262 encoding |= B12 | ((offset & 0x3ff000) << (16 - 12)) |
263 (j1 << 13) | (j2 << 11);
264 }
265 return encoding;
266}
267
268inline int16_t Thumb2Assembler::CbxzEncoding16(Register rn, int32_t offset, Condition cond) {
269 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100270 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000271 DCHECK(IsUint<7>(offset));
272 DCHECK(cond == EQ || cond == NE);
273 return B15 | B13 | B12 | B8 | (cond == NE ? B11 : 0) | static_cast<int32_t>(rn) |
274 ((offset & 0x3e) << (3 - 1)) | // Move imm5 from bits 1-5 to bits 3-7.
275 ((offset & 0x40) << (9 - 6)); // Move i from bit 6 to bit 11
276}
277
278inline int16_t Thumb2Assembler::CmpRnImm8Encoding16(Register rn, int32_t value) {
279 DCHECK(!IsHighRegister(rn));
280 DCHECK(IsUint<8>(value));
281 return B13 | B11 | (rn << 8) | value;
282}
283
284inline int16_t Thumb2Assembler::AddRdnRmEncoding16(Register rdn, Register rm) {
285 // The high bit of rn is moved across 4-bit rm.
286 return B14 | B10 | (static_cast<int32_t>(rm) << 3) |
287 (static_cast<int32_t>(rdn) & 7) | ((static_cast<int32_t>(rdn) & 8) << 4);
288}
289
290inline int32_t Thumb2Assembler::MovwEncoding32(Register rd, int32_t value) {
291 DCHECK(IsUint<16>(value));
292 return B31 | B30 | B29 | B28 | B25 | B22 |
293 (static_cast<int32_t>(rd) << 8) |
294 ((value & 0xf000) << (16 - 12)) | // Move imm4 from bits 12-15 to bits 16-19.
295 ((value & 0x0800) << (26 - 11)) | // Move i from bit 11 to bit 26.
296 ((value & 0x0700) << (12 - 8)) | // Move imm3 from bits 8-10 to bits 12-14.
297 (value & 0xff); // Keep imm8 in bits 0-7.
298}
299
300inline int32_t Thumb2Assembler::MovtEncoding32(Register rd, int32_t value) {
301 DCHECK_EQ(value & 0xffff, 0);
302 int32_t movw_encoding = MovwEncoding32(rd, (value >> 16) & 0xffff);
303 return movw_encoding | B25 | B23;
304}
305
306inline int32_t Thumb2Assembler::MovModImmEncoding32(Register rd, int32_t value) {
307 uint32_t mod_imm = ModifiedImmediate(value);
308 DCHECK_NE(mod_imm, kInvalidModifiedImmediate);
309 return B31 | B30 | B29 | B28 | B22 | B19 | B18 | B17 | B16 |
310 (static_cast<int32_t>(rd) << 8) | static_cast<int32_t>(mod_imm);
311}
312
313inline int16_t Thumb2Assembler::LdrLitEncoding16(Register rt, int32_t offset) {
314 DCHECK(!IsHighRegister(rt));
Roland Levillain14d90572015-07-16 10:52:26 +0100315 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000316 DCHECK(IsUint<10>(offset));
317 return B14 | B11 | (static_cast<int32_t>(rt) << 8) | (offset >> 2);
318}
319
320inline int32_t Thumb2Assembler::LdrLitEncoding32(Register rt, int32_t offset) {
321 // NOTE: We don't support negative offset, i.e. U=0 (B23).
322 return LdrRtRnImm12Encoding(rt, PC, offset);
323}
324
325inline int32_t Thumb2Assembler::LdrdEncoding32(Register rt, Register rt2, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100326 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000327 CHECK(IsUint<10>(offset));
328 return B31 | B30 | B29 | B27 |
329 B24 /* P = 1 */ | B23 /* U = 1 */ | B22 | 0 /* W = 0 */ | B20 |
330 (static_cast<int32_t>(rn) << 16) | (static_cast<int32_t>(rt) << 12) |
331 (static_cast<int32_t>(rt2) << 8) | (offset >> 2);
332}
333
334inline int32_t Thumb2Assembler::VldrsEncoding32(SRegister sd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100335 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000336 CHECK(IsUint<10>(offset));
337 return B31 | B30 | B29 | B27 | B26 | B24 |
338 B23 /* U = 1 */ | B20 | B11 | B9 |
339 (static_cast<int32_t>(rn) << 16) |
340 ((static_cast<int32_t>(sd) & 0x01) << (22 - 0)) | // Move D from bit 0 to bit 22.
341 ((static_cast<int32_t>(sd) & 0x1e) << (12 - 1)) | // Move Vd from bits 1-4 to bits 12-15.
342 (offset >> 2);
343}
344
345inline int32_t Thumb2Assembler::VldrdEncoding32(DRegister dd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100346 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000347 CHECK(IsUint<10>(offset));
348 return B31 | B30 | B29 | B27 | B26 | B24 |
349 B23 /* U = 1 */ | B20 | B11 | B9 | B8 |
350 (rn << 16) |
351 ((static_cast<int32_t>(dd) & 0x10) << (22 - 4)) | // Move D from bit 4 to bit 22.
352 ((static_cast<int32_t>(dd) & 0x0f) << (12 - 0)) | // Move Vd from bits 0-3 to bits 12-15.
353 (offset >> 2);
354}
355
356inline int16_t Thumb2Assembler::LdrRtRnImm5Encoding16(Register rt, Register rn, int32_t offset) {
357 DCHECK(!IsHighRegister(rt));
358 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100359 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000360 DCHECK(IsUint<7>(offset));
361 return B14 | B13 | B11 |
362 (static_cast<int32_t>(rn) << 3) | static_cast<int32_t>(rt) |
363 (offset << (6 - 2)); // Move imm5 from bits 2-6 to bits 6-10.
364}
365
366int32_t Thumb2Assembler::Fixup::LoadWideOrFpEncoding(Register rbase, int32_t offset) const {
367 switch (type_) {
368 case kLoadLiteralWide:
369 return LdrdEncoding32(rn_, rt2_, rbase, offset);
370 case kLoadFPLiteralSingle:
371 return VldrsEncoding32(sd_, rbase, offset);
372 case kLoadFPLiteralDouble:
373 return VldrdEncoding32(dd_, rbase, offset);
374 default:
375 LOG(FATAL) << "Unexpected type: " << static_cast<int>(type_);
376 UNREACHABLE();
377 }
378}
379
380inline int32_t Thumb2Assembler::LdrRtRnImm12Encoding(Register rt, Register rn, int32_t offset) {
381 DCHECK(IsUint<12>(offset));
382 return B31 | B30 | B29 | B28 | B27 | B23 | B22 | B20 | (rn << 16) | (rt << 12) | offset;
383}
384
385void Thumb2Assembler::FinalizeCode() {
386 ArmAssembler::FinalizeCode();
387 BindLiterals();
388 uint32_t adjusted_code_size = AdjustFixups();
389 EmitFixups(adjusted_code_size);
390 EmitLiterals();
391}
392
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +0100393bool Thumb2Assembler::ShifterOperandCanAlwaysHold(uint32_t immediate) {
394 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
395}
396
Nicolas Geoffray3d1e7882015-02-03 13:59:52 +0000397bool Thumb2Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
398 Register rn ATTRIBUTE_UNUSED,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000399 Opcode opcode,
400 uint32_t immediate,
401 ShifterOperand* shifter_op) {
402 shifter_op->type_ = ShifterOperand::kImmediate;
403 shifter_op->immed_ = immediate;
404 shifter_op->is_shift_ = false;
405 shifter_op->is_rotate_ = false;
406 switch (opcode) {
407 case ADD:
408 case SUB:
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000409 if (immediate < (1 << 12)) { // Less than (or equal to) 12 bits can always be done.
410 return true;
411 }
412 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
413
414 case MOV:
415 // TODO: Support less than or equal to 12bits.
416 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
417 case MVN:
418 default:
419 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
420 }
421}
422
Dave Allison65fcc2c2014-04-28 13:45:27 -0700423void Thumb2Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100424 Condition cond, SetCc set_cc) {
425 EmitDataProcessing(cond, AND, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700426}
427
428
429void Thumb2Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100430 Condition cond, SetCc set_cc) {
431 EmitDataProcessing(cond, EOR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700432}
433
434
435void Thumb2Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100436 Condition cond, SetCc set_cc) {
437 EmitDataProcessing(cond, SUB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700438}
439
440
441void Thumb2Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100442 Condition cond, SetCc set_cc) {
443 EmitDataProcessing(cond, RSB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700444}
445
446
447void Thumb2Assembler::add(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100448 Condition cond, SetCc set_cc) {
449 EmitDataProcessing(cond, ADD, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700450}
451
452
453void Thumb2Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100454 Condition cond, SetCc set_cc) {
455 EmitDataProcessing(cond, ADC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700456}
457
458
459void Thumb2Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100460 Condition cond, SetCc set_cc) {
461 EmitDataProcessing(cond, SBC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700462}
463
464
465void Thumb2Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100466 Condition cond, SetCc set_cc) {
467 EmitDataProcessing(cond, RSC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700468}
469
470
471void Thumb2Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
472 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100473 EmitDataProcessing(cond, TST, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700474}
475
476
477void Thumb2Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
478 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100479 EmitDataProcessing(cond, TEQ, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700480}
481
482
483void Thumb2Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100484 EmitDataProcessing(cond, CMP, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700485}
486
487
488void Thumb2Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100489 EmitDataProcessing(cond, CMN, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700490}
491
492
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100493void Thumb2Assembler::orr(Register rd, Register rn, const ShifterOperand& so,
494 Condition cond, SetCc set_cc) {
495 EmitDataProcessing(cond, ORR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700496}
497
498
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100499void Thumb2Assembler::mov(Register rd, const ShifterOperand& so,
500 Condition cond, SetCc set_cc) {
501 EmitDataProcessing(cond, MOV, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700502}
503
504
505void Thumb2Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100506 Condition cond, SetCc set_cc) {
507 EmitDataProcessing(cond, BIC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700508}
509
510
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100511void Thumb2Assembler::mvn(Register rd, const ShifterOperand& so,
512 Condition cond, SetCc set_cc) {
513 EmitDataProcessing(cond, MVN, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700514}
515
516
517void Thumb2Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700518 CheckCondition(cond);
519
Dave Allison65fcc2c2014-04-28 13:45:27 -0700520 if (rd == rm && !IsHighRegister(rd) && !IsHighRegister(rn) && !force_32bit_) {
521 // 16 bit.
522 int16_t encoding = B14 | B9 | B8 | B6 |
523 rn << 3 | rd;
524 Emit16(encoding);
525 } else {
526 // 32 bit.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700527 uint32_t op1 = 0U /* 0b000 */;
528 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700529 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
530 op1 << 20 |
531 B15 | B14 | B13 | B12 |
532 op2 << 4 |
533 static_cast<uint32_t>(rd) << 8 |
534 static_cast<uint32_t>(rn) << 16 |
535 static_cast<uint32_t>(rm);
536
537 Emit32(encoding);
538 }
539}
540
541
542void Thumb2Assembler::mla(Register rd, Register rn, Register rm, Register ra,
543 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700544 CheckCondition(cond);
545
Andreas Gampec8ccf682014-09-29 20:07:43 -0700546 uint32_t op1 = 0U /* 0b000 */;
547 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700548 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
549 op1 << 20 |
550 op2 << 4 |
551 static_cast<uint32_t>(rd) << 8 |
552 static_cast<uint32_t>(ra) << 12 |
553 static_cast<uint32_t>(rn) << 16 |
554 static_cast<uint32_t>(rm);
555
556 Emit32(encoding);
557}
558
559
560void Thumb2Assembler::mls(Register rd, Register rn, Register rm, Register ra,
561 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700562 CheckCondition(cond);
563
Andreas Gampec8ccf682014-09-29 20:07:43 -0700564 uint32_t op1 = 0U /* 0b000 */;
565 uint32_t op2 = 01 /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700566 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
567 op1 << 20 |
568 op2 << 4 |
569 static_cast<uint32_t>(rd) << 8 |
570 static_cast<uint32_t>(ra) << 12 |
571 static_cast<uint32_t>(rn) << 16 |
572 static_cast<uint32_t>(rm);
573
574 Emit32(encoding);
575}
576
577
Zheng Xuc6667102015-05-15 16:08:45 +0800578void Thumb2Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
579 Register rm, Condition cond) {
580 CheckCondition(cond);
581
582 uint32_t op1 = 0U /* 0b000; */;
583 uint32_t op2 = 0U /* 0b0000 */;
584 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
585 op1 << 20 |
586 op2 << 4 |
587 static_cast<uint32_t>(rd_lo) << 12 |
588 static_cast<uint32_t>(rd_hi) << 8 |
589 static_cast<uint32_t>(rn) << 16 |
590 static_cast<uint32_t>(rm);
591
592 Emit32(encoding);
593}
594
595
Dave Allison65fcc2c2014-04-28 13:45:27 -0700596void Thumb2Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
597 Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700598 CheckCondition(cond);
599
Andreas Gampec8ccf682014-09-29 20:07:43 -0700600 uint32_t op1 = 2U /* 0b010; */;
601 uint32_t op2 = 0U /* 0b0000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700602 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
603 op1 << 20 |
604 op2 << 4 |
605 static_cast<uint32_t>(rd_lo) << 12 |
606 static_cast<uint32_t>(rd_hi) << 8 |
607 static_cast<uint32_t>(rn) << 16 |
608 static_cast<uint32_t>(rm);
609
610 Emit32(encoding);
611}
612
613
614void Thumb2Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700615 CheckCondition(cond);
616
Andreas Gampec8ccf682014-09-29 20:07:43 -0700617 uint32_t op1 = 1U /* 0b001 */;
618 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700619 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B20 |
620 op1 << 20 |
621 op2 << 4 |
622 0xf << 12 |
623 static_cast<uint32_t>(rd) << 8 |
624 static_cast<uint32_t>(rn) << 16 |
625 static_cast<uint32_t>(rm);
626
627 Emit32(encoding);
628}
629
630
631void Thumb2Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700632 CheckCondition(cond);
633
Andreas Gampec8ccf682014-09-29 20:07:43 -0700634 uint32_t op1 = 1U /* 0b001 */;
635 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700636 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B21 | B20 |
637 op1 << 20 |
638 op2 << 4 |
639 0xf << 12 |
640 static_cast<uint32_t>(rd) << 8 |
641 static_cast<uint32_t>(rn) << 16 |
642 static_cast<uint32_t>(rm);
643
644 Emit32(encoding);
645}
646
647
Roland Levillain51d3fc42014-11-13 14:11:42 +0000648void Thumb2Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
649 CheckCondition(cond);
650 CHECK_LE(lsb, 31U);
651 CHECK(1U <= width && width <= 32U) << width;
652 uint32_t widthminus1 = width - 1;
653 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
654 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
655
656 uint32_t op = 20U /* 0b10100 */;
657 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
658 op << 20 |
659 static_cast<uint32_t>(rn) << 16 |
660 imm3 << 12 |
661 static_cast<uint32_t>(rd) << 8 |
662 imm2 << 6 |
663 widthminus1;
664
665 Emit32(encoding);
666}
667
668
Roland Levillain981e4542014-11-14 11:47:14 +0000669void Thumb2Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
670 CheckCondition(cond);
671 CHECK_LE(lsb, 31U);
672 CHECK(1U <= width && width <= 32U) << width;
673 uint32_t widthminus1 = width - 1;
674 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
675 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
676
677 uint32_t op = 28U /* 0b11100 */;
678 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
679 op << 20 |
680 static_cast<uint32_t>(rn) << 16 |
681 imm3 << 12 |
682 static_cast<uint32_t>(rd) << 8 |
683 imm2 << 6 |
684 widthminus1;
685
686 Emit32(encoding);
687}
688
689
Dave Allison65fcc2c2014-04-28 13:45:27 -0700690void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) {
691 EmitLoadStore(cond, true, false, false, false, rd, ad);
692}
693
694
695void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) {
696 EmitLoadStore(cond, false, false, false, false, rd, ad);
697}
698
699
700void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
701 EmitLoadStore(cond, true, true, false, false, rd, ad);
702}
703
704
705void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) {
706 EmitLoadStore(cond, false, true, false, false, rd, ad);
707}
708
709
710void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
711 EmitLoadStore(cond, true, false, true, false, rd, ad);
712}
713
714
715void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) {
716 EmitLoadStore(cond, false, false, true, false, rd, ad);
717}
718
719
720void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
721 EmitLoadStore(cond, true, true, false, true, rd, ad);
722}
723
724
725void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
726 EmitLoadStore(cond, true, false, true, true, rd, ad);
727}
728
729
730void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100731 ldrd(rd, Register(rd + 1), ad, cond);
732}
733
734
735void Thumb2Assembler::ldrd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700736 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100737 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700738 // This is different from other loads. The encoding is like ARM.
739 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
740 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100741 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700742 ad.encodingThumbLdrdStrd();
743 Emit32(encoding);
744}
745
746
747void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100748 strd(rd, Register(rd + 1), ad, cond);
749}
750
751
752void Thumb2Assembler::strd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700753 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100754 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700755 // This is different from other loads. The encoding is like ARM.
756 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
757 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100758 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700759 ad.encodingThumbLdrdStrd();
760 Emit32(encoding);
761}
762
763
764void Thumb2Assembler::ldm(BlockAddressMode am,
765 Register base,
766 RegList regs,
767 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000768 CHECK_NE(regs, 0u); // Do not use ldm if there's nothing to load.
769 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700770 // Thumb doesn't support one reg in the list.
771 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000772 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700773 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700774 CHECK(am == DB_W); // Only writeback is supported.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700775 ldr(static_cast<Register>(reg), Address(base, kRegisterSize, Address::PostIndex), cond);
776 } else {
777 EmitMultiMemOp(cond, am, true, base, regs);
778 }
779}
780
781
782void Thumb2Assembler::stm(BlockAddressMode am,
783 Register base,
784 RegList regs,
785 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000786 CHECK_NE(regs, 0u); // Do not use stm if there's nothing to store.
787 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700788 // Thumb doesn't support one reg in the list.
789 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000790 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700791 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700792 CHECK(am == IA || am == IA_W);
793 Address::Mode strmode = am == IA ? Address::PreIndex : Address::Offset;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700794 str(static_cast<Register>(reg), Address(base, -kRegisterSize, strmode), cond);
795 } else {
796 EmitMultiMemOp(cond, am, false, base, regs);
797 }
798}
799
800
801bool Thumb2Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
802 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
803 if (((imm32 & ((1 << 19) - 1)) == 0) &&
804 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
805 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
806 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
807 ((imm32 >> 19) & ((1 << 6) -1));
808 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
809 sd, S0, S0);
810 return true;
811 }
812 return false;
813}
814
815
816bool Thumb2Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
817 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
818 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
819 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
820 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
821 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
822 ((imm64 >> 48) & ((1 << 6) -1));
823 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
824 dd, D0, D0);
825 return true;
826 }
827 return false;
828}
829
830
831void Thumb2Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
832 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
833}
834
835
836void Thumb2Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
837 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
838}
839
840
841void Thumb2Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
842 Condition cond) {
843 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
844}
845
846
847void Thumb2Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
848 Condition cond) {
849 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
850}
851
852
853void Thumb2Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
854 Condition cond) {
855 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
856}
857
858
859void Thumb2Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
860 Condition cond) {
861 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
862}
863
864
865void Thumb2Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
866 Condition cond) {
867 EmitVFPsss(cond, B21, sd, sn, sm);
868}
869
870
871void Thumb2Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
872 Condition cond) {
873 EmitVFPddd(cond, B21, dd, dn, dm);
874}
875
876
877void Thumb2Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
878 Condition cond) {
879 EmitVFPsss(cond, 0, sd, sn, sm);
880}
881
882
883void Thumb2Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
884 Condition cond) {
885 EmitVFPddd(cond, 0, dd, dn, dm);
886}
887
888
889void Thumb2Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
890 Condition cond) {
891 EmitVFPsss(cond, B6, sd, sn, sm);
892}
893
894
895void Thumb2Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
896 Condition cond) {
897 EmitVFPddd(cond, B6, dd, dn, dm);
898}
899
900
901void Thumb2Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
902 Condition cond) {
903 EmitVFPsss(cond, B23, sd, sn, sm);
904}
905
906
907void Thumb2Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
908 Condition cond) {
909 EmitVFPddd(cond, B23, dd, dn, dm);
910}
911
912
913void Thumb2Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
914 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
915}
916
917
918void Thumb2Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
919 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
920}
921
922
923void Thumb2Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
924 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
925}
926
927
928void Thumb2Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
929 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
930}
931
932
933void Thumb2Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
934 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
935}
936
937void Thumb2Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
938 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
939}
940
941
942void Thumb2Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
943 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
944}
945
946
947void Thumb2Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
948 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
949}
950
951
952void Thumb2Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
953 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
954}
955
956
957void Thumb2Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
958 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
959}
960
961
962void Thumb2Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
963 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
964}
965
966
967void Thumb2Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
968 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
969}
970
971
972void Thumb2Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
973 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
974}
975
976
977void Thumb2Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
978 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
979}
980
981
982void Thumb2Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
983 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
984}
985
986
987void Thumb2Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
988 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
989}
990
991
992void Thumb2Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
993 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
994}
995
996
997void Thumb2Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
998 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
999}
1000
1001
1002void Thumb2Assembler::vcmpsz(SRegister sd, Condition cond) {
1003 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
1004}
1005
1006
1007void Thumb2Assembler::vcmpdz(DRegister dd, Condition cond) {
1008 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
1009}
1010
1011void Thumb2Assembler::b(Label* label, Condition cond) {
agicsakie2142d252015-06-30 17:10:03 -07001012 DCHECK_EQ(next_condition_, AL);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001013 EmitBranch(cond, label, false, false);
1014}
1015
1016
1017void Thumb2Assembler::bl(Label* label, Condition cond) {
1018 CheckCondition(cond);
1019 EmitBranch(cond, label, true, false);
1020}
1021
1022
1023void Thumb2Assembler::blx(Label* label) {
1024 EmitBranch(AL, label, true, true);
1025}
1026
1027
1028void Thumb2Assembler::MarkExceptionHandler(Label* label) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001029 EmitDataProcessing(AL, TST, kCcSet, PC, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001030 Label l;
1031 b(&l);
1032 EmitBranch(AL, label, false, false);
1033 Bind(&l);
1034}
1035
1036
1037void Thumb2Assembler::Emit32(int32_t value) {
1038 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1039 buffer_.Emit<int16_t>(value >> 16);
1040 buffer_.Emit<int16_t>(value & 0xffff);
1041}
1042
1043
1044void Thumb2Assembler::Emit16(int16_t value) {
1045 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1046 buffer_.Emit<int16_t>(value);
1047}
1048
1049
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001050bool Thumb2Assembler::Is32BitDataProcessing(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001051 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001052 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001053 Register rn,
1054 Register rd,
1055 const ShifterOperand& so) {
1056 if (force_32bit_) {
1057 return true;
1058 }
1059
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001060 // Check special case for SP relative ADD and SUB immediate.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001061 if ((opcode == ADD || opcode == SUB) && rn == SP && so.IsImmediate() && set_cc != kCcSet) {
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001062 // If the immediate is in range, use 16 bit.
1063 if (rd == SP) {
1064 if (so.GetImmediate() < (1 << 9)) { // 9 bit immediate.
1065 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001066 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001067 } else if (!IsHighRegister(rd) && opcode == ADD) {
1068 if (so.GetImmediate() < (1 << 10)) { // 10 bit immediate.
1069 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001070 }
1071 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001072 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001073
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001074 bool can_contain_high_register =
1075 (opcode == CMP) ||
1076 (opcode == MOV && set_cc != kCcSet) ||
1077 ((opcode == ADD) && (rn == rd) && set_cc != kCcSet);
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001078
1079 if (IsHighRegister(rd) || IsHighRegister(rn)) {
1080 if (!can_contain_high_register) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001081 return true;
1082 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001083
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001084 // There are high register instructions available for this opcode.
1085 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
1086 if (so.IsShift() && (so.GetShift() == RRX || so.GetImmediate() != 0u)) {
1087 return true;
1088 }
1089
1090 // The ADD and MOV instructions that work with high registers don't have 16-bit
1091 // immediate variants.
1092 if (so.IsImmediate()) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001093 return true;
1094 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001095 }
1096
1097 if (so.IsRegister() && IsHighRegister(so.GetRegister()) && !can_contain_high_register) {
1098 return true;
1099 }
1100
Dave Allison65fcc2c2014-04-28 13:45:27 -07001101 bool rn_is_valid = true;
1102
1103 // Check for single operand instructions and ADD/SUB.
1104 switch (opcode) {
1105 case CMP:
1106 case MOV:
1107 case TST:
1108 case MVN:
1109 rn_is_valid = false; // There is no Rn for these instructions.
1110 break;
1111 case TEQ:
1112 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001113 case ADD:
1114 case SUB:
1115 break;
1116 default:
1117 if (so.IsRegister() && rd != rn) {
1118 return true;
1119 }
1120 }
1121
1122 if (so.IsImmediate()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001123 if (opcode == RSB) {
1124 DCHECK(rn_is_valid);
1125 if (so.GetImmediate() != 0u) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001126 return true;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001127 }
1128 } else if (rn_is_valid && rn != rd) {
1129 // The only thumb1 instructions with a register and an immediate are ADD and SUB
1130 // with a 3-bit immediate, and RSB with zero immediate.
1131 if (opcode == ADD || opcode == SUB) {
1132 if (!IsUint<3>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001133 return true;
1134 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001135 } else {
1136 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001137 }
1138 } else {
1139 // ADD, SUB, CMP and MOV may be thumb1 only if the immediate is 8 bits.
1140 if (!(opcode == ADD || opcode == SUB || opcode == MOV || opcode == CMP)) {
1141 return true;
1142 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001143 if (!IsUint<8>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001144 return true;
1145 }
1146 }
1147 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001148 } else {
1149 DCHECK(so.IsRegister());
1150 if (so.IsShift()) {
1151 // Shift operand - check if it is a MOV convertible to a 16-bit shift instruction.
1152 if (opcode != MOV) {
Zheng Xuc6667102015-05-15 16:08:45 +08001153 return true;
1154 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001155 // Check for MOV with an ROR/RRX. There is no 16-bit ROR immediate and no 16-bit RRX.
1156 if (so.GetShift() == ROR || so.GetShift() == RRX) {
1157 return true;
1158 }
1159 // 16-bit shifts set condition codes if and only if outside IT block,
1160 // i.e. if and only if cond == AL.
1161 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1162 return true;
1163 }
1164 } else {
1165 // Register operand without shift.
1166 switch (opcode) {
1167 case ADD:
1168 // The 16-bit ADD that cannot contain high registers can set condition codes
1169 // if and only if outside IT block, i.e. if and only if cond == AL.
1170 if (!can_contain_high_register &&
1171 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1172 return true;
1173 }
1174 break;
1175 case AND:
1176 case BIC:
1177 case EOR:
1178 case ORR:
1179 case MVN:
1180 case ADC:
1181 case SUB:
1182 case SBC:
1183 // These 16-bit opcodes set condition codes if and only if outside IT block,
1184 // i.e. if and only if cond == AL.
1185 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1186 return true;
1187 }
1188 break;
1189 case RSB:
1190 case RSC:
1191 // No 16-bit RSB/RSC Rd, Rm, Rn. It would be equivalent to SUB/SBC Rd, Rn, Rm.
1192 return true;
1193 case CMP:
1194 default:
1195 break;
1196 }
Zheng Xuc6667102015-05-15 16:08:45 +08001197 }
1198 }
1199
Dave Allison65fcc2c2014-04-28 13:45:27 -07001200 // The instruction can be encoded in 16 bits.
1201 return false;
1202}
1203
1204
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001205void Thumb2Assembler::Emit32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001206 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001207 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001208 Register rn,
1209 Register rd,
1210 const ShifterOperand& so) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001211 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001212 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001213 case AND: thumb_opcode = 0U /* 0b0000 */; break;
1214 case EOR: thumb_opcode = 4U /* 0b0100 */; break;
1215 case SUB: thumb_opcode = 13U /* 0b1101 */; break;
1216 case RSB: thumb_opcode = 14U /* 0b1110 */; break;
1217 case ADD: thumb_opcode = 8U /* 0b1000 */; break;
Andreas Gampe35c68e32014-09-30 08:39:37 -07001218 case ADC: thumb_opcode = 10U /* 0b1010 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001219 case SBC: thumb_opcode = 11U /* 0b1011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001220 case RSC: break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001221 case TST: thumb_opcode = 0U /* 0b0000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1222 case TEQ: thumb_opcode = 4U /* 0b0100 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1223 case CMP: thumb_opcode = 13U /* 0b1101 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1224 case CMN: thumb_opcode = 8U /* 0b1000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001225 case ORR: thumb_opcode = 2U /* 0b0010 */; break;
1226 case MOV: thumb_opcode = 2U /* 0b0010 */; rn = PC; break;
1227 case BIC: thumb_opcode = 1U /* 0b0001 */; break;
1228 case MVN: thumb_opcode = 3U /* 0b0011 */; rn = PC; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001229 default:
1230 break;
1231 }
1232
Andreas Gampec8ccf682014-09-29 20:07:43 -07001233 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001234 LOG(FATAL) << "Invalid thumb2 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001235 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001236 }
1237
1238 int32_t encoding = 0;
1239 if (so.IsImmediate()) {
1240 // Check special cases.
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001241 if ((opcode == SUB || opcode == ADD) && (so.GetImmediate() < (1u << 12))) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001242 if (set_cc != kCcSet) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001243 if (opcode == SUB) {
1244 thumb_opcode = 5U;
1245 } else if (opcode == ADD) {
1246 thumb_opcode = 0U;
1247 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001248 }
1249 uint32_t imm = so.GetImmediate();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001250
1251 uint32_t i = (imm >> 11) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001252 uint32_t imm3 = (imm >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001253 uint32_t imm8 = imm & 0xff;
1254
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001255 encoding = B31 | B30 | B29 | B28 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001256 (set_cc == kCcSet ? B20 : B25) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001257 thumb_opcode << 21 |
1258 rn << 16 |
1259 rd << 8 |
1260 i << 26 |
1261 imm3 << 12 |
1262 imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001263 } else {
1264 // Modified immediate.
Dave Allison45fdb932014-06-25 12:37:10 -07001265 uint32_t imm = ModifiedImmediate(so.encodingThumb());
Dave Allison65fcc2c2014-04-28 13:45:27 -07001266 if (imm == kInvalidModifiedImmediate) {
1267 LOG(FATAL) << "Immediate value cannot fit in thumb2 modified immediate";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001268 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001269 }
1270 encoding = B31 | B30 | B29 | B28 |
1271 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001272 (set_cc == kCcSet ? B20 : 0) |
Dave Allison65fcc2c2014-04-28 13:45:27 -07001273 rn << 16 |
1274 rd << 8 |
1275 imm;
1276 }
1277 } else if (so.IsRegister()) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001278 // Register (possibly shifted)
1279 encoding = B31 | B30 | B29 | B27 | B25 |
1280 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001281 (set_cc == kCcSet ? B20 : 0) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001282 rn << 16 |
1283 rd << 8 |
1284 so.encodingThumb();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001285 }
1286 Emit32(encoding);
1287}
1288
1289
1290void Thumb2Assembler::Emit16BitDataProcessing(Condition cond,
1291 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001292 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001293 Register rn,
1294 Register rd,
1295 const ShifterOperand& so) {
1296 if (opcode == ADD || opcode == SUB) {
1297 Emit16BitAddSub(cond, opcode, set_cc, rn, rd, so);
1298 return;
1299 }
Andreas Gampec8ccf682014-09-29 20:07:43 -07001300 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001301 // Thumb1.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001302 uint8_t dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001303 uint8_t opcode_shift = 6;
1304 uint8_t rd_shift = 0;
1305 uint8_t rn_shift = 3;
1306 uint8_t immediate_shift = 0;
1307 bool use_immediate = false;
1308 uint8_t immediate = 0;
1309
1310 if (opcode == MOV && so.IsRegister() && so.IsShift()) {
1311 // Convert shifted mov operand2 into 16 bit opcodes.
1312 dp_opcode = 0;
1313 opcode_shift = 11;
1314
1315 use_immediate = true;
1316 immediate = so.GetImmediate();
1317 immediate_shift = 6;
1318
1319 rn = so.GetRegister();
1320
1321 switch (so.GetShift()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001322 case LSL:
1323 DCHECK_LE(immediate, 31u);
1324 thumb_opcode = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001325 break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001326 case LSR:
1327 DCHECK(1 <= immediate && immediate <= 32);
1328 immediate &= 31; // 32 is encoded as 0.
1329 thumb_opcode = 1U /* 0b01 */;
1330 break;
1331 case ASR:
1332 DCHECK(1 <= immediate && immediate <= 32);
1333 immediate &= 31; // 32 is encoded as 0.
1334 thumb_opcode = 2U /* 0b10 */;
1335 break;
1336 case ROR: // No 16-bit ROR immediate.
1337 case RRX: // No 16-bit RRX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001338 default:
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001339 LOG(FATAL) << "Unexpected shift: " << so.GetShift();
1340 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001341 }
1342 } else {
1343 if (so.IsImmediate()) {
1344 use_immediate = true;
1345 immediate = so.GetImmediate();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001346 } else {
Guillaume "Vermeille" Sanchezab4a2f52015-03-11 14:00:30 +00001347 CHECK(!(so.IsRegister() && so.IsShift() && so.GetSecondRegister() != kNoRegister))
1348 << "No register-shifted register instruction available in thumb";
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001349 // Adjust rn and rd: only two registers will be emitted.
1350 switch (opcode) {
1351 case AND:
1352 case ORR:
1353 case EOR:
1354 case RSB:
1355 case ADC:
1356 case SBC:
1357 case BIC: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001358 // Sets condition codes if and only if outside IT block,
1359 // check that it complies with set_cc.
1360 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001361 if (rn == rd) {
1362 rn = so.GetRegister();
1363 } else {
1364 CHECK_EQ(rd, so.GetRegister());
1365 }
1366 break;
1367 }
1368 case CMP:
1369 case CMN: {
1370 CHECK_EQ(rd, 0);
1371 rd = rn;
1372 rn = so.GetRegister();
1373 break;
1374 }
1375 case MVN: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001376 // Sets condition codes if and only if outside IT block,
1377 // check that it complies with set_cc.
1378 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1379 CHECK_EQ(rn, 0);
1380 rn = so.GetRegister();
1381 break;
1382 }
1383 case TST:
1384 case TEQ: {
1385 DCHECK(set_cc == kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001386 CHECK_EQ(rn, 0);
1387 rn = so.GetRegister();
1388 break;
1389 }
1390 default:
1391 break;
1392 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001393 }
1394
1395 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001396 case AND: thumb_opcode = 0U /* 0b0000 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001397 case ORR: thumb_opcode = 12U /* 0b1100 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001398 case EOR: thumb_opcode = 1U /* 0b0001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001399 case RSB: thumb_opcode = 9U /* 0b1001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001400 case ADC: thumb_opcode = 5U /* 0b0101 */; break;
1401 case SBC: thumb_opcode = 6U /* 0b0110 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001402 case BIC: thumb_opcode = 14U /* 0b1110 */; break;
1403 case TST: thumb_opcode = 8U /* 0b1000 */; CHECK(!use_immediate); break;
1404 case MVN: thumb_opcode = 15U /* 0b1111 */; CHECK(!use_immediate); break;
1405 case CMP: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001406 DCHECK(set_cc == kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001407 if (use_immediate) {
1408 // T2 encoding.
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001409 dp_opcode = 0;
1410 opcode_shift = 11;
1411 thumb_opcode = 5U /* 0b101 */;
1412 rd_shift = 8;
1413 rn_shift = 8;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001414 } else if (IsHighRegister(rd) || IsHighRegister(rn)) {
1415 // Special cmp for high registers.
1416 dp_opcode = 1U /* 0b01 */;
1417 opcode_shift = 7;
1418 // Put the top bit of rd into the bottom bit of the opcode.
1419 thumb_opcode = 10U /* 0b0001010 */ | static_cast<uint32_t>(rd) >> 3;
1420 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001421 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001422 thumb_opcode = 10U /* 0b1010 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001423 }
1424
1425 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001426 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001427 case CMN: {
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001428 CHECK(!use_immediate);
Andreas Gampec8ccf682014-09-29 20:07:43 -07001429 thumb_opcode = 11U /* 0b1011 */;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001430 break;
1431 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001432 case MOV:
1433 dp_opcode = 0;
1434 if (use_immediate) {
1435 // T2 encoding.
1436 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001437 thumb_opcode = 4U /* 0b100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001438 rd_shift = 8;
1439 rn_shift = 8;
1440 } else {
1441 rn = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001442 if (set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001443 // Special mov for high registers.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001444 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001445 opcode_shift = 7;
1446 // Put the top bit of rd into the bottom bit of the opcode.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001447 thumb_opcode = 12U /* 0b0001100 */ | static_cast<uint32_t>(rd) >> 3;
1448 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001449 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001450 DCHECK(!IsHighRegister(rn));
1451 DCHECK(!IsHighRegister(rd));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001452 thumb_opcode = 0;
1453 }
1454 }
1455 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001456
1457 case TEQ:
1458 case RSC:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001459 default:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001460 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001461 break;
1462 }
1463 }
1464
Andreas Gampec8ccf682014-09-29 20:07:43 -07001465 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001466 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001467 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001468 }
1469
1470 int16_t encoding = dp_opcode << 14 |
1471 (thumb_opcode << opcode_shift) |
1472 rd << rd_shift |
1473 rn << rn_shift |
1474 (use_immediate ? (immediate << immediate_shift) : 0);
1475
1476 Emit16(encoding);
1477}
1478
1479
1480// ADD and SUB are complex enough to warrant their own emitter.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001481void Thumb2Assembler::Emit16BitAddSub(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001482 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001483 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001484 Register rn,
1485 Register rd,
1486 const ShifterOperand& so) {
1487 uint8_t dp_opcode = 0;
1488 uint8_t opcode_shift = 6;
1489 uint8_t rd_shift = 0;
1490 uint8_t rn_shift = 3;
1491 uint8_t immediate_shift = 0;
1492 bool use_immediate = false;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001493 uint32_t immediate = 0; // Should be at most 9 bits but keep the full immediate for CHECKs.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001494 uint8_t thumb_opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001495
1496 if (so.IsImmediate()) {
1497 use_immediate = true;
1498 immediate = so.GetImmediate();
1499 }
1500
1501 switch (opcode) {
1502 case ADD:
1503 if (so.IsRegister()) {
1504 Register rm = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001505 if (rn == rd && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001506 // Can use T2 encoding (allows 4 bit registers)
Andreas Gampec8ccf682014-09-29 20:07:43 -07001507 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001508 opcode_shift = 10;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001509 thumb_opcode = 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001510 // Make Rn also contain the top bit of rd.
1511 rn = static_cast<Register>(static_cast<uint32_t>(rm) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07001512 (static_cast<uint32_t>(rd) & 8U /* 0b1000 */) << 1);
1513 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001514 } else {
1515 // T1.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001516 DCHECK(!IsHighRegister(rd));
1517 DCHECK(!IsHighRegister(rn));
1518 DCHECK(!IsHighRegister(rm));
1519 // Sets condition codes if and only if outside IT block,
1520 // check that it complies with set_cc.
1521 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001522 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001523 thumb_opcode = 12U /* 0b01100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001524 immediate = static_cast<uint32_t>(so.GetRegister());
1525 use_immediate = true;
1526 immediate_shift = 6;
1527 }
1528 } else {
1529 // Immediate.
1530 if (rd == SP && rn == SP) {
1531 // ADD sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001532 dp_opcode = 2U /* 0b10 */;
1533 thumb_opcode = 3U /* 0b11 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001534 opcode_shift = 12;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001535 CHECK_LT(immediate, (1u << 9));
Roland Levillain14d90572015-07-16 10:52:26 +01001536 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001537
1538 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1539 rn = R0;
1540 rd = R0;
1541 rd_shift = 0;
1542 rn_shift = 0;
1543 immediate >>= 2;
1544 } else if (rd != SP && rn == SP) {
1545 // ADD rd, SP, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001546 dp_opcode = 2U /* 0b10 */;
1547 thumb_opcode = 5U /* 0b101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001548 opcode_shift = 11;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001549 CHECK_LT(immediate, (1u << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01001550 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001551
1552 // Remove rn from instruction.
1553 rn = R0;
1554 rn_shift = 0;
1555 rd_shift = 8;
1556 immediate >>= 2;
1557 } else if (rn != rd) {
1558 // Must use T1.
1559 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001560 thumb_opcode = 14U /* 0b01110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001561 immediate_shift = 6;
1562 } else {
1563 // T2 encoding.
1564 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001565 thumb_opcode = 6U /* 0b110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001566 rd_shift = 8;
1567 rn_shift = 8;
1568 }
1569 }
1570 break;
1571
1572 case SUB:
1573 if (so.IsRegister()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001574 // T1.
1575 Register rm = so.GetRegister();
1576 DCHECK(!IsHighRegister(rd));
1577 DCHECK(!IsHighRegister(rn));
1578 DCHECK(!IsHighRegister(rm));
1579 // Sets condition codes if and only if outside IT block,
1580 // check that it complies with set_cc.
1581 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1582 opcode_shift = 9;
1583 thumb_opcode = 13U /* 0b01101 */;
1584 immediate = static_cast<uint32_t>(rm);
1585 use_immediate = true;
1586 immediate_shift = 6;
1587 } else {
1588 if (rd == SP && rn == SP) {
1589 // SUB sp, sp, #imm
1590 dp_opcode = 2U /* 0b10 */;
1591 thumb_opcode = 0x61 /* 0b1100001 */;
1592 opcode_shift = 7;
1593 CHECK_LT(immediate, (1u << 9));
1594 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001595
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001596 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1597 rn = R0;
1598 rd = R0;
1599 rd_shift = 0;
1600 rn_shift = 0;
1601 immediate >>= 2;
1602 } else if (rn != rd) {
1603 // Must use T1.
1604 opcode_shift = 9;
1605 thumb_opcode = 15U /* 0b01111 */;
1606 immediate_shift = 6;
1607 } else {
1608 // T2 encoding.
1609 opcode_shift = 11;
1610 thumb_opcode = 7U /* 0b111 */;
1611 rd_shift = 8;
1612 rn_shift = 8;
1613 }
1614 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001615 break;
1616 default:
1617 LOG(FATAL) << "This opcode is not an ADD or SUB: " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001618 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001619 }
1620
1621 int16_t encoding = dp_opcode << 14 |
1622 (thumb_opcode << opcode_shift) |
1623 rd << rd_shift |
1624 rn << rn_shift |
1625 (use_immediate ? (immediate << immediate_shift) : 0);
1626
1627 Emit16(encoding);
1628}
1629
1630
1631void Thumb2Assembler::EmitDataProcessing(Condition cond,
1632 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001633 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001634 Register rn,
1635 Register rd,
1636 const ShifterOperand& so) {
1637 CHECK_NE(rd, kNoRegister);
1638 CheckCondition(cond);
1639
1640 if (Is32BitDataProcessing(cond, opcode, set_cc, rn, rd, so)) {
1641 Emit32BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1642 } else {
1643 Emit16BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1644 }
1645}
1646
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001647void Thumb2Assembler::EmitShift(Register rd,
1648 Register rm,
1649 Shift shift,
1650 uint8_t amount,
1651 Condition cond,
1652 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001653 CHECK_LT(amount, (1 << 5));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001654 if ((IsHighRegister(rd) || IsHighRegister(rm) || shift == ROR || shift == RRX) ||
1655 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001656 uint16_t opcode = 0;
1657 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001658 case LSL: opcode = 0U /* 0b00 */; break;
1659 case LSR: opcode = 1U /* 0b01 */; break;
1660 case ASR: opcode = 2U /* 0b10 */; break;
1661 case ROR: opcode = 3U /* 0b11 */; break;
1662 case RRX: opcode = 3U /* 0b11 */; amount = 0; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001663 default:
1664 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001665 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001666 }
1667 // 32 bit.
1668 int32_t encoding = B31 | B30 | B29 | B27 | B25 | B22 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001669 0xf << 16 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001670 uint32_t imm3 = amount >> 2;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001671 uint32_t imm2 = amount & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -07001672 encoding |= imm3 << 12 | imm2 << 6 | static_cast<int16_t>(rm) |
1673 static_cast<int16_t>(rd) << 8 | opcode << 4;
1674 Emit32(encoding);
1675 } else {
1676 // 16 bit shift
1677 uint16_t opcode = 0;
1678 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001679 case LSL: opcode = 0U /* 0b00 */; break;
1680 case LSR: opcode = 1U /* 0b01 */; break;
1681 case ASR: opcode = 2U /* 0b10 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001682 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001683 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1684 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001685 }
1686 int16_t encoding = opcode << 11 | amount << 6 | static_cast<int16_t>(rm) << 3 |
1687 static_cast<int16_t>(rd);
1688 Emit16(encoding);
1689 }
1690}
1691
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001692void Thumb2Assembler::EmitShift(Register rd,
1693 Register rn,
1694 Shift shift,
1695 Register rm,
1696 Condition cond,
1697 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001698 CHECK_NE(shift, RRX);
1699 bool must_be_32bit = false;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001700 if (IsHighRegister(rd) || IsHighRegister(rm) || IsHighRegister(rn) || rd != rn ||
1701 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001702 must_be_32bit = true;
1703 }
1704
1705 if (must_be_32bit) {
1706 uint16_t opcode = 0;
1707 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001708 case LSL: opcode = 0U /* 0b00 */; break;
1709 case LSR: opcode = 1U /* 0b01 */; break;
1710 case ASR: opcode = 2U /* 0b10 */; break;
1711 case ROR: opcode = 3U /* 0b11 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001712 default:
1713 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001714 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001715 }
1716 // 32 bit.
1717 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001718 0xf << 12 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001719 encoding |= static_cast<int16_t>(rn) << 16 | static_cast<int16_t>(rm) |
1720 static_cast<int16_t>(rd) << 8 | opcode << 21;
1721 Emit32(encoding);
1722 } else {
1723 uint16_t opcode = 0;
1724 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001725 case LSL: opcode = 2U /* 0b0010 */; break;
1726 case LSR: opcode = 3U /* 0b0011 */; break;
1727 case ASR: opcode = 4U /* 0b0100 */; break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001728 case ROR: opcode = 7U /* 0b0111 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001729 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001730 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1731 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001732 }
1733 int16_t encoding = B14 | opcode << 6 | static_cast<int16_t>(rm) << 3 |
1734 static_cast<int16_t>(rd);
1735 Emit16(encoding);
1736 }
1737}
1738
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001739inline size_t Thumb2Assembler::Fixup::SizeInBytes(Size size) {
1740 switch (size) {
1741 case kBranch16Bit:
1742 return 2u;
1743 case kBranch32Bit:
1744 return 4u;
Dave Allison45fdb932014-06-25 12:37:10 -07001745
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001746 case kCbxz16Bit:
1747 return 2u;
1748 case kCbxz32Bit:
1749 return 4u;
1750 case kCbxz48Bit:
1751 return 6u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001752
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001753 case kLiteral1KiB:
1754 return 2u;
1755 case kLiteral4KiB:
1756 return 4u;
1757 case kLiteral64KiB:
1758 return 8u;
1759 case kLiteral1MiB:
1760 return 10u;
1761 case kLiteralFar:
1762 return 14u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001763
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001764 case kLongOrFPLiteral1KiB:
1765 return 4u;
1766 case kLongOrFPLiteral256KiB:
1767 return 10u;
1768 case kLongOrFPLiteralFar:
1769 return 14u;
1770 }
1771 LOG(FATAL) << "Unexpected size: " << static_cast<int>(size);
1772 UNREACHABLE();
1773}
1774
1775inline uint32_t Thumb2Assembler::Fixup::GetOriginalSizeInBytes() const {
1776 return SizeInBytes(original_size_);
1777}
1778
1779inline uint32_t Thumb2Assembler::Fixup::GetSizeInBytes() const {
1780 return SizeInBytes(size_);
1781}
1782
1783inline size_t Thumb2Assembler::Fixup::LiteralPoolPaddingSize(uint32_t current_code_size) {
1784 // The code size must be a multiple of 2.
Roland Levillain14d90572015-07-16 10:52:26 +01001785 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001786 // If it isn't a multiple of 4, we need to add a 2-byte padding before the literal pool.
1787 return current_code_size & 2;
1788}
1789
1790inline int32_t Thumb2Assembler::Fixup::GetOffset(uint32_t current_code_size) const {
1791 static constexpr int32_t int32_min = std::numeric_limits<int32_t>::min();
1792 static constexpr int32_t int32_max = std::numeric_limits<int32_t>::max();
1793 DCHECK_LE(target_, static_cast<uint32_t>(int32_max));
1794 DCHECK_LE(location_, static_cast<uint32_t>(int32_max));
1795 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max));
1796 int32_t diff = static_cast<int32_t>(target_) - static_cast<int32_t>(location_);
1797 if (target_ > location_) {
1798 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max - diff));
1799 diff += static_cast<int32_t>(adjustment_);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001800 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001801 DCHECK_LE(int32_min + static_cast<int32_t>(adjustment_), diff);
1802 diff -= static_cast<int32_t>(adjustment_);
1803 }
1804 // The default PC adjustment for Thumb2 is 4 bytes.
1805 DCHECK_GE(diff, int32_min + 4);
1806 diff -= 4;
1807 // Add additional adjustment for instructions preceding the PC usage, padding
1808 // before the literal pool and rounding down the PC for literal loads.
1809 switch (GetSize()) {
1810 case kBranch16Bit:
1811 case kBranch32Bit:
1812 break;
1813
1814 case kCbxz16Bit:
1815 break;
1816 case kCbxz32Bit:
1817 case kCbxz48Bit:
1818 DCHECK_GE(diff, int32_min + 2);
1819 diff -= 2; // Extra CMP Rn, #0, 16-bit.
1820 break;
1821
1822 case kLiteral1KiB:
1823 case kLiteral4KiB:
1824 case kLongOrFPLiteral1KiB:
1825 DCHECK(diff >= 0 || (GetSize() == kLiteral1KiB && diff == -2));
1826 diff += LiteralPoolPaddingSize(current_code_size);
1827 // Load literal instructions round down the PC+4 to a multiple of 4, so if the PC
1828 // isn't a multiple of 2, we need to adjust. Since we already adjusted for the target
1829 // being aligned, current PC alignment can be inferred from diff.
Roland Levillain14d90572015-07-16 10:52:26 +01001830 DCHECK_ALIGNED(diff, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001831 diff = diff + (diff & 2);
1832 DCHECK_GE(diff, 0);
1833 break;
1834 case kLiteral1MiB:
1835 case kLiteral64KiB:
1836 case kLongOrFPLiteral256KiB:
1837 DCHECK_GE(diff, 4); // The target must be at least 4 bytes after the ADD rX, PC.
1838 diff -= 4; // One extra 32-bit MOV.
1839 diff += LiteralPoolPaddingSize(current_code_size);
1840 break;
1841 case kLiteralFar:
1842 case kLongOrFPLiteralFar:
1843 DCHECK_GE(diff, 8); // The target must be at least 4 bytes after the ADD rX, PC.
1844 diff -= 8; // Extra MOVW+MOVT; both 32-bit.
1845 diff += LiteralPoolPaddingSize(current_code_size);
1846 break;
1847 }
1848 return diff;
1849}
1850
1851inline size_t Thumb2Assembler::Fixup::IncreaseSize(Size new_size) {
1852 DCHECK_NE(target_, kUnresolved);
1853 Size old_size = size_;
1854 size_ = new_size;
1855 DCHECK_GT(SizeInBytes(new_size), SizeInBytes(old_size));
1856 size_t adjustment = SizeInBytes(new_size) - SizeInBytes(old_size);
1857 if (target_ > location_) {
1858 adjustment_ += adjustment;
1859 }
1860 return adjustment;
1861}
1862
1863uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
1864 uint32_t old_code_size = current_code_size;
1865 switch (GetSize()) {
1866 case kBranch16Bit:
1867 if (IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size))) {
1868 break;
Vladimir Markof38caa62015-05-29 15:50:18 +01001869 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001870 current_code_size += IncreaseSize(kBranch32Bit);
1871 FALLTHROUGH_INTENDED;
1872 case kBranch32Bit:
1873 // We don't support conditional branches beyond +-1MiB
1874 // or unconditional branches beyond +-16MiB.
1875 break;
1876
1877 case kCbxz16Bit:
1878 if (IsUint<7>(GetOffset(current_code_size))) {
1879 break;
1880 }
1881 current_code_size += IncreaseSize(kCbxz32Bit);
1882 FALLTHROUGH_INTENDED;
1883 case kCbxz32Bit:
1884 if (IsInt<9>(GetOffset(current_code_size))) {
1885 break;
1886 }
1887 current_code_size += IncreaseSize(kCbxz48Bit);
1888 FALLTHROUGH_INTENDED;
1889 case kCbxz48Bit:
1890 // We don't support conditional branches beyond +-1MiB.
1891 break;
1892
1893 case kLiteral1KiB:
1894 DCHECK(!IsHighRegister(rn_));
1895 if (IsUint<10>(GetOffset(current_code_size))) {
1896 break;
1897 }
1898 current_code_size += IncreaseSize(kLiteral4KiB);
1899 FALLTHROUGH_INTENDED;
1900 case kLiteral4KiB:
1901 if (IsUint<12>(GetOffset(current_code_size))) {
1902 break;
1903 }
1904 current_code_size += IncreaseSize(kLiteral64KiB);
1905 FALLTHROUGH_INTENDED;
1906 case kLiteral64KiB:
1907 // Can't handle high register which we can encounter by fall-through from kLiteral4KiB.
1908 if (!IsHighRegister(rn_) && IsUint<16>(GetOffset(current_code_size))) {
1909 break;
1910 }
1911 current_code_size += IncreaseSize(kLiteral1MiB);
1912 FALLTHROUGH_INTENDED;
1913 case kLiteral1MiB:
1914 if (IsUint<20>(GetOffset(current_code_size))) {
1915 break;
1916 }
1917 current_code_size += IncreaseSize(kLiteralFar);
1918 FALLTHROUGH_INTENDED;
1919 case kLiteralFar:
1920 // This encoding can reach any target.
1921 break;
1922
1923 case kLongOrFPLiteral1KiB:
1924 if (IsUint<10>(GetOffset(current_code_size))) {
1925 break;
1926 }
1927 current_code_size += IncreaseSize(kLongOrFPLiteral256KiB);
1928 FALLTHROUGH_INTENDED;
1929 case kLongOrFPLiteral256KiB:
1930 if (IsUint<18>(GetOffset(current_code_size))) {
1931 break;
1932 }
1933 current_code_size += IncreaseSize(kLongOrFPLiteralFar);
1934 FALLTHROUGH_INTENDED;
1935 case kLongOrFPLiteralFar:
1936 // This encoding can reach any target.
1937 break;
1938 }
1939 return current_code_size - old_code_size;
1940}
1941
1942void Thumb2Assembler::Fixup::Emit(AssemblerBuffer* buffer, uint32_t code_size) const {
1943 switch (GetSize()) {
1944 case kBranch16Bit: {
1945 DCHECK(type_ == kUnconditional || type_ == kConditional);
1946 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1947 int16_t encoding = BEncoding16(GetOffset(code_size), cond_);
Vladimir Markof38caa62015-05-29 15:50:18 +01001948 buffer->Store<int16_t>(location_, encoding);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001949 break;
1950 }
1951 case kBranch32Bit: {
1952 DCHECK(type_ == kConditional || type_ == kUnconditional ||
1953 type_ == kUnconditionalLink || type_ == kUnconditionalLinkX);
1954 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1955 int32_t encoding = BEncoding32(GetOffset(code_size), cond_);
1956 if (type_ == kUnconditionalLink) {
1957 DCHECK_NE(encoding & B12, 0);
1958 encoding |= B14;
1959 } else if (type_ == kUnconditionalLinkX) {
1960 DCHECK_NE(encoding & B12, 0);
1961 encoding ^= B14 | B12;
1962 }
1963 buffer->Store<int16_t>(location_, encoding >> 16);
1964 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
1965 break;
1966 }
1967
1968 case kCbxz16Bit: {
1969 DCHECK(type_ == kCompareAndBranchXZero);
1970 int16_t encoding = CbxzEncoding16(rn_, GetOffset(code_size), cond_);
1971 buffer->Store<int16_t>(location_, encoding);
1972 break;
1973 }
1974 case kCbxz32Bit: {
1975 DCHECK(type_ == kCompareAndBranchXZero);
1976 DCHECK(cond_ == EQ || cond_ == NE);
1977 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1978 int16_t b_encoding = BEncoding16(GetOffset(code_size), cond_);
1979 buffer->Store<int16_t>(location_, cmp_encoding);
1980 buffer->Store<int16_t>(location_ + 2, b_encoding);
1981 break;
1982 }
1983 case kCbxz48Bit: {
1984 DCHECK(type_ == kCompareAndBranchXZero);
1985 DCHECK(cond_ == EQ || cond_ == NE);
1986 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1987 int32_t b_encoding = BEncoding32(GetOffset(code_size), cond_);
1988 buffer->Store<int16_t>(location_, cmp_encoding);
1989 buffer->Store<int16_t>(location_ + 2u, b_encoding >> 16);
1990 buffer->Store<int16_t>(location_ + 4u, static_cast<int16_t>(b_encoding & 0xffff));
1991 break;
1992 }
1993
1994 case kLiteral1KiB: {
1995 DCHECK(type_ == kLoadLiteralNarrow);
1996 int16_t encoding = LdrLitEncoding16(rn_, GetOffset(code_size));
1997 buffer->Store<int16_t>(location_, encoding);
1998 break;
1999 }
2000 case kLiteral4KiB: {
2001 DCHECK(type_ == kLoadLiteralNarrow);
2002 // GetOffset() uses PC+4 but load literal uses AlignDown(PC+4, 4). Adjust offset accordingly.
2003 int32_t encoding = LdrLitEncoding32(rn_, GetOffset(code_size));
2004 buffer->Store<int16_t>(location_, encoding >> 16);
2005 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2006 break;
2007 }
2008 case kLiteral64KiB: {
2009 DCHECK(type_ == kLoadLiteralNarrow);
2010 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2011 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2012 int16_t ldr_encoding = LdrRtRnImm5Encoding16(rn_, rn_, 0);
2013 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2014 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2015 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2016 buffer->Store<int16_t>(location_ + 6u, ldr_encoding);
2017 break;
2018 }
2019 case kLiteral1MiB: {
2020 DCHECK(type_ == kLoadLiteralNarrow);
2021 int32_t offset = GetOffset(code_size);
2022 int32_t mov_encoding = MovModImmEncoding32(rn_, offset & ~0xfff);
2023 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2024 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, offset & 0xfff);
2025 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2026 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2027 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2028 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2029 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2030 break;
2031 }
2032 case kLiteralFar: {
2033 DCHECK(type_ == kLoadLiteralNarrow);
2034 int32_t offset = GetOffset(code_size);
2035 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2036 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2037 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2038 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, 0);
2039 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2040 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2041 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2042 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2043 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2044 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2045 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2046 break;
2047 }
2048
2049 case kLongOrFPLiteral1KiB: {
2050 int32_t encoding = LoadWideOrFpEncoding(PC, GetOffset(code_size)); // DCHECKs type_.
2051 buffer->Store<int16_t>(location_, encoding >> 16);
2052 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2053 break;
2054 }
2055 case kLongOrFPLiteral256KiB: {
2056 int32_t offset = GetOffset(code_size);
2057 int32_t mov_encoding = MovModImmEncoding32(IP, offset & ~0x3ff);
2058 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2059 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, offset & 0x3ff); // DCHECKs type_.
2060 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2061 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2062 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2063 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2064 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2065 break;
2066 }
2067 case kLongOrFPLiteralFar: {
2068 int32_t offset = GetOffset(code_size);
2069 int32_t movw_encoding = MovwEncoding32(IP, offset & 0xffff);
2070 int32_t movt_encoding = MovtEncoding32(IP, offset & ~0xffff);
2071 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2072 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0); // DCHECKs type_.
2073 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2074 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2075 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2076 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2077 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2078 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2079 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2080 break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002081 }
2082 }
2083}
2084
Dave Allison65fcc2c2014-04-28 13:45:27 -07002085uint16_t Thumb2Assembler::EmitCompareAndBranch(Register rn, uint16_t prev, bool n) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002086 CHECK(IsLowRegister(rn));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002087 uint32_t location = buffer_.Size();
2088
2089 // This is always unresolved as it must be a forward branch.
2090 Emit16(prev); // Previous link.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002091 return AddFixup(Fixup::CompareAndBranch(location, rn, n ? NE : EQ));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002092}
2093
2094
2095// NOTE: this only support immediate offsets, not [rx,ry].
2096// TODO: support [rx,ry] instructions.
2097void Thumb2Assembler::EmitLoadStore(Condition cond,
2098 bool load,
2099 bool byte,
2100 bool half,
2101 bool is_signed,
2102 Register rd,
2103 const Address& ad) {
2104 CHECK_NE(rd, kNoRegister);
2105 CheckCondition(cond);
2106 bool must_be_32bit = force_32bit_;
2107 if (IsHighRegister(rd)) {
2108 must_be_32bit = true;
2109 }
2110
2111 Register rn = ad.GetRegister();
Dave Allison45fdb932014-06-25 12:37:10 -07002112 if (IsHighRegister(rn) && rn != SP && rn != PC) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002113 must_be_32bit = true;
2114 }
2115
2116 if (is_signed || ad.GetOffset() < 0 || ad.GetMode() != Address::Offset) {
2117 must_be_32bit = true;
2118 }
2119
Dave Allison45fdb932014-06-25 12:37:10 -07002120 if (ad.IsImmediate()) {
2121 // Immediate offset
2122 int32_t offset = ad.GetOffset();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002123
Dave Allison45fdb932014-06-25 12:37:10 -07002124 // The 16 bit SP relative instruction can only have a 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002125 if (rn == SP && offset >= (1 << 10)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002126 must_be_32bit = true;
2127 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002128
2129 if (byte) {
Dave Allison45fdb932014-06-25 12:37:10 -07002130 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002131 if (offset >= (1 << 5)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002132 must_be_32bit = true;
2133 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002134 } else if (half) {
Dave Allison45fdb932014-06-25 12:37:10 -07002135 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002136 if (offset >= (1 << 6)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002137 must_be_32bit = true;
2138 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002139 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002140 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002141 if (offset >= (1 << 7)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002142 must_be_32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002143 }
2144 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002145
Dave Allison45fdb932014-06-25 12:37:10 -07002146 if (must_be_32bit) {
2147 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2148 (load ? B20 : 0) |
2149 (is_signed ? B24 : 0) |
2150 static_cast<uint32_t>(rd) << 12 |
2151 ad.encodingThumb(true) |
2152 (byte ? 0 : half ? B21 : B22);
2153 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002154 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002155 // 16 bit thumb1.
2156 uint8_t opA = 0;
2157 bool sp_relative = false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002158
2159 if (byte) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002160 opA = 7U /* 0b0111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002161 } else if (half) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002162 opA = 8U /* 0b1000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002163 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002164 if (rn == SP) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002165 opA = 9U /* 0b1001 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002166 sp_relative = true;
2167 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002168 opA = 6U /* 0b0110 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002169 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002170 }
Dave Allison45fdb932014-06-25 12:37:10 -07002171 int16_t encoding = opA << 12 |
2172 (load ? B11 : 0);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002173
Dave Allison45fdb932014-06-25 12:37:10 -07002174 CHECK_GE(offset, 0);
2175 if (sp_relative) {
2176 // SP relative, 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002177 CHECK_LT(offset, (1 << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01002178 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002179 encoding |= rd << 8 | offset >> 2;
2180 } else {
2181 // No SP relative. The offset is shifted right depending on
2182 // the size of the load/store.
2183 encoding |= static_cast<uint32_t>(rd);
2184
2185 if (byte) {
2186 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002187 CHECK_LT(offset, (1 << 5));
Dave Allison45fdb932014-06-25 12:37:10 -07002188 } else if (half) {
2189 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002190 CHECK_LT(offset, (1 << 6));
Roland Levillain14d90572015-07-16 10:52:26 +01002191 CHECK_ALIGNED(offset, 2);
Dave Allison45fdb932014-06-25 12:37:10 -07002192 offset >>= 1;
2193 } else {
2194 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002195 CHECK_LT(offset, (1 << 7));
Roland Levillain14d90572015-07-16 10:52:26 +01002196 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002197 offset >>= 2;
2198 }
2199 encoding |= rn << 3 | offset << 6;
2200 }
2201
2202 Emit16(encoding);
2203 }
2204 } else {
2205 // Register shift.
2206 if (ad.GetRegister() == PC) {
2207 // PC relative literal encoding.
2208 int32_t offset = ad.GetOffset();
Dave Allison0bb9ade2014-06-26 17:57:36 -07002209 if (must_be_32bit || offset < 0 || offset >= (1 << 10) || !load) {
Dave Allison45fdb932014-06-25 12:37:10 -07002210 int32_t up = B23;
2211 if (offset < 0) {
2212 offset = -offset;
2213 up = 0;
2214 }
2215 CHECK_LT(offset, (1 << 12));
2216 int32_t encoding = 0x1f << 27 | 0xf << 16 | B22 | (load ? B20 : 0) |
2217 offset | up |
2218 static_cast<uint32_t>(rd) << 12;
2219 Emit32(encoding);
2220 } else {
2221 // 16 bit literal load.
2222 CHECK_GE(offset, 0);
2223 CHECK_LT(offset, (1 << 10));
2224 int32_t encoding = B14 | (load ? B11 : 0) | static_cast<uint32_t>(rd) << 8 | offset >> 2;
2225 Emit16(encoding);
2226 }
2227 } else {
2228 if (ad.GetShiftCount() != 0) {
2229 // If there is a shift count this must be 32 bit.
2230 must_be_32bit = true;
2231 } else if (IsHighRegister(ad.GetRegisterOffset())) {
2232 must_be_32bit = true;
2233 }
2234
2235 if (must_be_32bit) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002236 int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 |
Dave Allison45fdb932014-06-25 12:37:10 -07002237 ad.encodingThumb(true);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002238 if (half) {
2239 encoding |= B21;
2240 } else if (!byte) {
2241 encoding |= B22;
2242 }
Dave Allison45fdb932014-06-25 12:37:10 -07002243 Emit32(encoding);
2244 } else {
2245 // 16 bit register offset.
2246 int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) |
2247 ad.encodingThumb(false);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002248 if (byte) {
2249 encoding |= B10;
2250 } else if (half) {
2251 encoding |= B9;
2252 }
Dave Allison45fdb932014-06-25 12:37:10 -07002253 Emit16(encoding);
2254 }
2255 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002256 }
2257}
2258
2259
2260void Thumb2Assembler::EmitMultiMemOp(Condition cond,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002261 BlockAddressMode bam,
Dave Allison65fcc2c2014-04-28 13:45:27 -07002262 bool load,
2263 Register base,
2264 RegList regs) {
2265 CHECK_NE(base, kNoRegister);
2266 CheckCondition(cond);
2267 bool must_be_32bit = force_32bit_;
2268
Vladimir Markoe8469c12014-11-26 18:09:30 +00002269 if (!must_be_32bit && base == SP && bam == (load ? IA_W : DB_W) &&
2270 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) {
2271 // Use 16-bit PUSH/POP.
2272 int16_t encoding = B15 | B13 | B12 | (load ? B11 : 0) | B10 |
2273 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff);
2274 Emit16(encoding);
2275 return;
2276 }
2277
Dave Allison65fcc2c2014-04-28 13:45:27 -07002278 if ((regs & 0xff00) != 0) {
2279 must_be_32bit = true;
2280 }
2281
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002282 bool w_bit = bam == IA_W || bam == DB_W || bam == DA_W || bam == IB_W;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002283 // 16 bit always uses writeback.
2284 if (!w_bit) {
2285 must_be_32bit = true;
2286 }
2287
2288 if (must_be_32bit) {
2289 uint32_t op = 0;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002290 switch (bam) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002291 case IA:
2292 case IA_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002293 op = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002294 break;
2295 case DB:
2296 case DB_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002297 op = 2U /* 0b10 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002298 break;
2299 case DA:
2300 case IB:
2301 case DA_W:
2302 case IB_W:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002303 LOG(FATAL) << "LDM/STM mode not supported on thumb: " << bam;
Vladimir Markoe8469c12014-11-26 18:09:30 +00002304 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002305 }
2306 if (load) {
2307 // Cannot have SP in the list.
2308 CHECK_EQ((regs & (1 << SP)), 0);
2309 } else {
2310 // Cannot have PC or SP in the list.
2311 CHECK_EQ((regs & (1 << PC | 1 << SP)), 0);
2312 }
2313 int32_t encoding = B31 | B30 | B29 | B27 |
2314 (op << 23) |
2315 (load ? B20 : 0) |
2316 base << 16 |
2317 regs |
2318 (w_bit << 21);
2319 Emit32(encoding);
2320 } else {
2321 int16_t encoding = B15 | B14 |
2322 (load ? B11 : 0) |
2323 base << 8 |
2324 regs;
2325 Emit16(encoding);
2326 }
2327}
2328
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002329void Thumb2Assembler::EmitBranch(Condition cond, Label* label, bool link, bool x) {
2330 bool use32bit = IsForced32Bit() || !CanRelocateBranches();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002331 uint32_t pc = buffer_.Size();
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002332 Fixup::Type branch_type;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002333 if (cond == AL) {
2334 if (link) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002335 use32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002336 if (x) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002337 branch_type = Fixup::kUnconditionalLinkX; // BLX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002338 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002339 branch_type = Fixup::kUnconditionalLink; // BX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002340 }
2341 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002342 branch_type = Fixup::kUnconditional; // B.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002343 }
2344 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002345 branch_type = Fixup::kConditional; // B<cond>.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002346 }
2347
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002348 Fixup::Size size = use32bit ? Fixup::kBranch32Bit : Fixup::kBranch16Bit;
2349 FixupId branch_id = AddFixup(Fixup::Branch(pc, branch_type, size, cond));
2350
Dave Allison65fcc2c2014-04-28 13:45:27 -07002351 if (label->IsBound()) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002352 // The branch is to a bound label which means that it's a backwards branch.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002353 GetFixup(branch_id)->Resolve(label->Position());
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002354 Emit16(0);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002355 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002356 // Branch target is an unbound label. Add it to a singly-linked list maintained within
2357 // the code with the label serving as the head.
2358 Emit16(static_cast<uint16_t>(label->position_));
2359 label->LinkTo(branch_id);
Vladimir Markof38caa62015-05-29 15:50:18 +01002360 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002361
2362 if (use32bit) {
2363 Emit16(0);
2364 }
2365 DCHECK_EQ(buffer_.Size() - pc, GetFixup(branch_id)->GetSizeInBytes());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002366}
2367
2368
2369void Thumb2Assembler::clz(Register rd, Register rm, Condition cond) {
2370 CHECK_NE(rd, kNoRegister);
2371 CHECK_NE(rm, kNoRegister);
2372 CheckCondition(cond);
2373 CHECK_NE(rd, PC);
2374 CHECK_NE(rm, PC);
2375 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2376 B25 | B23 | B21 | B20 |
2377 static_cast<uint32_t>(rm) << 16 |
2378 0xf << 12 |
2379 static_cast<uint32_t>(rd) << 8 |
2380 B7 |
2381 static_cast<uint32_t>(rm);
2382 Emit32(encoding);
2383}
2384
2385
2386void Thumb2Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
2387 CheckCondition(cond);
2388 bool must_be_32bit = force_32bit_;
2389 if (IsHighRegister(rd)|| imm16 >= 256u) {
2390 must_be_32bit = true;
2391 }
2392
2393 if (must_be_32bit) {
2394 // Use encoding T3.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002395 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2396 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2397 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002398 uint32_t imm8 = imm16 & 0xff;
2399 int32_t encoding = B31 | B30 | B29 | B28 |
2400 B25 | B22 |
2401 static_cast<uint32_t>(rd) << 8 |
2402 i << 26 |
2403 imm4 << 16 |
2404 imm3 << 12 |
2405 imm8;
2406 Emit32(encoding);
2407 } else {
2408 int16_t encoding = B13 | static_cast<uint16_t>(rd) << 8 |
2409 imm16;
2410 Emit16(encoding);
2411 }
2412}
2413
2414
2415void Thumb2Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
2416 CheckCondition(cond);
2417 // Always 32 bits.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002418 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2419 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2420 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002421 uint32_t imm8 = imm16 & 0xff;
2422 int32_t encoding = B31 | B30 | B29 | B28 |
2423 B25 | B23 | B22 |
2424 static_cast<uint32_t>(rd) << 8 |
2425 i << 26 |
2426 imm4 << 16 |
2427 imm3 << 12 |
2428 imm8;
2429 Emit32(encoding);
2430}
2431
2432
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002433void Thumb2Assembler::rbit(Register rd, Register rm, Condition cond) {
2434 CHECK_NE(rd, kNoRegister);
2435 CHECK_NE(rm, kNoRegister);
2436 CheckCondition(cond);
2437 CHECK_NE(rd, PC);
2438 CHECK_NE(rm, PC);
2439 CHECK_NE(rd, SP);
2440 CHECK_NE(rm, SP);
2441 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2442 B25 | B23 | B20 |
2443 static_cast<uint32_t>(rm) << 16 |
2444 0xf << 12 |
2445 static_cast<uint32_t>(rd) << 8 |
2446 B7 | B5 |
2447 static_cast<uint32_t>(rm);
2448 Emit32(encoding);
2449}
2450
2451
Dave Allison65fcc2c2014-04-28 13:45:27 -07002452void Thumb2Assembler::ldrex(Register rt, Register rn, uint16_t imm, Condition cond) {
2453 CHECK_NE(rn, kNoRegister);
2454 CHECK_NE(rt, kNoRegister);
2455 CheckCondition(cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002456 CHECK_LT(imm, (1u << 10));
2457
2458 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
2459 static_cast<uint32_t>(rn) << 16 |
2460 static_cast<uint32_t>(rt) << 12 |
2461 0xf << 8 |
2462 imm >> 2;
2463 Emit32(encoding);
2464}
2465
2466
2467void Thumb2Assembler::ldrex(Register rt, Register rn, Condition cond) {
2468 ldrex(rt, rn, 0, cond);
2469}
2470
2471
2472void Thumb2Assembler::strex(Register rd,
2473 Register rt,
2474 Register rn,
2475 uint16_t imm,
2476 Condition cond) {
2477 CHECK_NE(rn, kNoRegister);
2478 CHECK_NE(rd, kNoRegister);
2479 CHECK_NE(rt, kNoRegister);
2480 CheckCondition(cond);
2481 CHECK_LT(imm, (1u << 10));
2482
2483 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
2484 static_cast<uint32_t>(rn) << 16 |
2485 static_cast<uint32_t>(rt) << 12 |
2486 static_cast<uint32_t>(rd) << 8 |
2487 imm >> 2;
2488 Emit32(encoding);
2489}
2490
2491
Calin Juravle52c48962014-12-16 17:02:57 +00002492void Thumb2Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
2493 CHECK_NE(rn, kNoRegister);
2494 CHECK_NE(rt, kNoRegister);
2495 CHECK_NE(rt2, kNoRegister);
2496 CHECK_NE(rt, rt2);
2497 CheckCondition(cond);
2498
2499 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 | B20 |
2500 static_cast<uint32_t>(rn) << 16 |
2501 static_cast<uint32_t>(rt) << 12 |
2502 static_cast<uint32_t>(rt2) << 8 |
2503 B6 | B5 | B4 | B3 | B2 | B1 | B0;
2504 Emit32(encoding);
2505}
2506
2507
Dave Allison65fcc2c2014-04-28 13:45:27 -07002508void Thumb2Assembler::strex(Register rd,
2509 Register rt,
2510 Register rn,
2511 Condition cond) {
2512 strex(rd, rt, rn, 0, cond);
2513}
2514
2515
Calin Juravle52c48962014-12-16 17:02:57 +00002516void Thumb2Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
2517 CHECK_NE(rd, kNoRegister);
2518 CHECK_NE(rn, kNoRegister);
2519 CHECK_NE(rt, kNoRegister);
2520 CHECK_NE(rt2, kNoRegister);
2521 CHECK_NE(rt, rt2);
2522 CHECK_NE(rd, rt);
2523 CHECK_NE(rd, rt2);
2524 CheckCondition(cond);
2525
2526 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 |
2527 static_cast<uint32_t>(rn) << 16 |
2528 static_cast<uint32_t>(rt) << 12 |
2529 static_cast<uint32_t>(rt2) << 8 |
2530 B6 | B5 | B4 |
2531 static_cast<uint32_t>(rd);
2532 Emit32(encoding);
2533}
2534
2535
Dave Allison65fcc2c2014-04-28 13:45:27 -07002536void Thumb2Assembler::clrex(Condition cond) {
2537 CheckCondition(cond);
2538 int32_t encoding = B31 | B30 | B29 | B27 | B28 | B25 | B24 | B23 |
2539 B21 | B20 |
2540 0xf << 16 |
2541 B15 |
2542 0xf << 8 |
2543 B5 |
2544 0xf;
2545 Emit32(encoding);
2546}
2547
2548
2549void Thumb2Assembler::nop(Condition cond) {
2550 CheckCondition(cond);
Andreas Gampec8ccf682014-09-29 20:07:43 -07002551 uint16_t encoding = B15 | B13 | B12 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002552 B11 | B10 | B9 | B8;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002553 Emit16(static_cast<int16_t>(encoding));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002554}
2555
2556
2557void Thumb2Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
2558 CHECK_NE(sn, kNoSRegister);
2559 CHECK_NE(rt, kNoRegister);
2560 CHECK_NE(rt, SP);
2561 CHECK_NE(rt, PC);
2562 CheckCondition(cond);
2563 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2564 B27 | B26 | B25 |
2565 ((static_cast<int32_t>(sn) >> 1)*B16) |
2566 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2567 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2568 Emit32(encoding);
2569}
2570
2571
2572void Thumb2Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
2573 CHECK_NE(sn, kNoSRegister);
2574 CHECK_NE(rt, kNoRegister);
2575 CHECK_NE(rt, SP);
2576 CHECK_NE(rt, PC);
2577 CheckCondition(cond);
2578 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2579 B27 | B26 | B25 | B20 |
2580 ((static_cast<int32_t>(sn) >> 1)*B16) |
2581 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2582 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2583 Emit32(encoding);
2584}
2585
2586
2587void Thumb2Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
2588 Condition cond) {
2589 CHECK_NE(sm, kNoSRegister);
2590 CHECK_NE(sm, S31);
2591 CHECK_NE(rt, kNoRegister);
2592 CHECK_NE(rt, SP);
2593 CHECK_NE(rt, PC);
2594 CHECK_NE(rt2, kNoRegister);
2595 CHECK_NE(rt2, SP);
2596 CHECK_NE(rt2, PC);
2597 CheckCondition(cond);
2598 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2599 B27 | B26 | B22 |
2600 (static_cast<int32_t>(rt2)*B16) |
2601 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2602 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2603 (static_cast<int32_t>(sm) >> 1);
2604 Emit32(encoding);
2605}
2606
2607
2608void Thumb2Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
2609 Condition cond) {
2610 CHECK_NE(sm, kNoSRegister);
2611 CHECK_NE(sm, S31);
2612 CHECK_NE(rt, kNoRegister);
2613 CHECK_NE(rt, SP);
2614 CHECK_NE(rt, PC);
2615 CHECK_NE(rt2, kNoRegister);
2616 CHECK_NE(rt2, SP);
2617 CHECK_NE(rt2, PC);
2618 CHECK_NE(rt, rt2);
2619 CheckCondition(cond);
2620 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2621 B27 | B26 | B22 | B20 |
2622 (static_cast<int32_t>(rt2)*B16) |
2623 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2624 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2625 (static_cast<int32_t>(sm) >> 1);
2626 Emit32(encoding);
2627}
2628
2629
2630void Thumb2Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
2631 Condition cond) {
2632 CHECK_NE(dm, kNoDRegister);
2633 CHECK_NE(rt, kNoRegister);
2634 CHECK_NE(rt, SP);
2635 CHECK_NE(rt, PC);
2636 CHECK_NE(rt2, kNoRegister);
2637 CHECK_NE(rt2, SP);
2638 CHECK_NE(rt2, PC);
2639 CheckCondition(cond);
2640 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2641 B27 | B26 | B22 |
2642 (static_cast<int32_t>(rt2)*B16) |
2643 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2644 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2645 (static_cast<int32_t>(dm) & 0xf);
2646 Emit32(encoding);
2647}
2648
2649
2650void Thumb2Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
2651 Condition cond) {
2652 CHECK_NE(dm, kNoDRegister);
2653 CHECK_NE(rt, kNoRegister);
2654 CHECK_NE(rt, SP);
2655 CHECK_NE(rt, PC);
2656 CHECK_NE(rt2, kNoRegister);
2657 CHECK_NE(rt2, SP);
2658 CHECK_NE(rt2, PC);
2659 CHECK_NE(rt, rt2);
2660 CheckCondition(cond);
2661 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2662 B27 | B26 | B22 | B20 |
2663 (static_cast<int32_t>(rt2)*B16) |
2664 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2665 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2666 (static_cast<int32_t>(dm) & 0xf);
2667 Emit32(encoding);
2668}
2669
2670
2671void Thumb2Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
2672 const Address& addr = static_cast<const Address&>(ad);
2673 CHECK_NE(sd, kNoSRegister);
2674 CheckCondition(cond);
2675 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2676 B27 | B26 | B24 | B20 |
2677 ((static_cast<int32_t>(sd) & 1)*B22) |
2678 ((static_cast<int32_t>(sd) >> 1)*B12) |
2679 B11 | B9 | addr.vencoding();
2680 Emit32(encoding);
2681}
2682
2683
2684void Thumb2Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
2685 const Address& addr = static_cast<const Address&>(ad);
2686 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2687 CHECK_NE(sd, kNoSRegister);
2688 CheckCondition(cond);
2689 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2690 B27 | B26 | B24 |
2691 ((static_cast<int32_t>(sd) & 1)*B22) |
2692 ((static_cast<int32_t>(sd) >> 1)*B12) |
2693 B11 | B9 | addr.vencoding();
2694 Emit32(encoding);
2695}
2696
2697
2698void Thumb2Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
2699 const Address& addr = static_cast<const Address&>(ad);
2700 CHECK_NE(dd, kNoDRegister);
2701 CheckCondition(cond);
2702 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2703 B27 | B26 | B24 | B20 |
2704 ((static_cast<int32_t>(dd) >> 4)*B22) |
2705 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2706 B11 | B9 | B8 | addr.vencoding();
2707 Emit32(encoding);
2708}
2709
2710
2711void Thumb2Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
2712 const Address& addr = static_cast<const Address&>(ad);
2713 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2714 CHECK_NE(dd, kNoDRegister);
2715 CheckCondition(cond);
2716 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2717 B27 | B26 | B24 |
2718 ((static_cast<int32_t>(dd) >> 4)*B22) |
2719 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2720 B11 | B9 | B8 | addr.vencoding();
2721 Emit32(encoding);
2722}
2723
2724
2725void Thumb2Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
2726 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
2727}
2728
2729
2730void Thumb2Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
2731 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
2732}
2733
2734
2735void Thumb2Assembler::vpops(SRegister reg, int nregs, Condition cond) {
2736 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
2737}
2738
2739
2740void Thumb2Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
2741 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
2742}
2743
2744
2745void Thumb2Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
2746 CheckCondition(cond);
2747
2748 uint32_t D;
2749 uint32_t Vd;
2750 if (dbl) {
2751 // Encoded as D:Vd.
2752 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002753 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002754 } else {
2755 // Encoded as Vd:D.
2756 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002757 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002758 }
2759 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
2760 B11 | B9 |
2761 (dbl ? B8 : 0) |
2762 (push ? B24 : (B23 | B20)) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07002763 14U /* 0b1110 */ << 28 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002764 nregs << (dbl ? 1 : 0) |
2765 D << 22 |
2766 Vd << 12;
2767 Emit32(encoding);
2768}
2769
2770
2771void Thumb2Assembler::EmitVFPsss(Condition cond, int32_t opcode,
2772 SRegister sd, SRegister sn, SRegister sm) {
2773 CHECK_NE(sd, kNoSRegister);
2774 CHECK_NE(sn, kNoSRegister);
2775 CHECK_NE(sm, kNoSRegister);
2776 CheckCondition(cond);
2777 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2778 B27 | B26 | B25 | B11 | B9 | opcode |
2779 ((static_cast<int32_t>(sd) & 1)*B22) |
2780 ((static_cast<int32_t>(sn) >> 1)*B16) |
2781 ((static_cast<int32_t>(sd) >> 1)*B12) |
2782 ((static_cast<int32_t>(sn) & 1)*B7) |
2783 ((static_cast<int32_t>(sm) & 1)*B5) |
2784 (static_cast<int32_t>(sm) >> 1);
2785 Emit32(encoding);
2786}
2787
2788
2789void Thumb2Assembler::EmitVFPddd(Condition cond, int32_t opcode,
2790 DRegister dd, DRegister dn, DRegister dm) {
2791 CHECK_NE(dd, kNoDRegister);
2792 CHECK_NE(dn, kNoDRegister);
2793 CHECK_NE(dm, kNoDRegister);
2794 CheckCondition(cond);
2795 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2796 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
2797 ((static_cast<int32_t>(dd) >> 4)*B22) |
2798 ((static_cast<int32_t>(dn) & 0xf)*B16) |
2799 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2800 ((static_cast<int32_t>(dn) >> 4)*B7) |
2801 ((static_cast<int32_t>(dm) >> 4)*B5) |
2802 (static_cast<int32_t>(dm) & 0xf);
2803 Emit32(encoding);
2804}
2805
2806
2807void Thumb2Assembler::EmitVFPsd(Condition cond, int32_t opcode,
2808 SRegister sd, DRegister dm) {
2809 CHECK_NE(sd, kNoSRegister);
2810 CHECK_NE(dm, kNoDRegister);
2811 CheckCondition(cond);
2812 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2813 B27 | B26 | B25 | B11 | B9 | opcode |
2814 ((static_cast<int32_t>(sd) & 1)*B22) |
2815 ((static_cast<int32_t>(sd) >> 1)*B12) |
2816 ((static_cast<int32_t>(dm) >> 4)*B5) |
2817 (static_cast<int32_t>(dm) & 0xf);
2818 Emit32(encoding);
2819}
2820
2821
2822void Thumb2Assembler::EmitVFPds(Condition cond, int32_t opcode,
2823 DRegister dd, SRegister sm) {
2824 CHECK_NE(dd, kNoDRegister);
2825 CHECK_NE(sm, kNoSRegister);
2826 CheckCondition(cond);
2827 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2828 B27 | B26 | B25 | B11 | B9 | opcode |
2829 ((static_cast<int32_t>(dd) >> 4)*B22) |
2830 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2831 ((static_cast<int32_t>(sm) & 1)*B5) |
2832 (static_cast<int32_t>(sm) >> 1);
2833 Emit32(encoding);
2834}
2835
2836
2837void Thumb2Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR.
Calin Juravleddb7df22014-11-25 20:56:51 +00002838 CHECK_NE(cond, kNoCondition);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002839 CheckCondition(cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00002840 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2841 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
2842 (static_cast<int32_t>(PC)*B12) |
2843 B11 | B9 | B4;
2844 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002845}
2846
2847
2848void Thumb2Assembler::svc(uint32_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002849 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002850 int16_t encoding = B15 | B14 | B12 |
2851 B11 | B10 | B9 | B8 |
2852 imm8;
2853 Emit16(encoding);
2854}
2855
2856
2857void Thumb2Assembler::bkpt(uint16_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002858 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002859 int16_t encoding = B15 | B13 | B12 |
2860 B11 | B10 | B9 |
2861 imm8;
2862 Emit16(encoding);
2863}
2864
2865// Convert the given IT state to a mask bit given bit 0 of the first
2866// condition and a shift position.
2867static uint8_t ToItMask(ItState s, uint8_t firstcond0, uint8_t shift) {
2868 switch (s) {
2869 case kItOmitted: return 1 << shift;
2870 case kItThen: return firstcond0 << shift;
2871 case kItElse: return !firstcond0 << shift;
2872 }
2873 return 0;
2874}
2875
2876
2877// Set the IT condition in the given position for the given state. This is used
2878// to check that conditional instructions match the preceding IT statement.
2879void Thumb2Assembler::SetItCondition(ItState s, Condition cond, uint8_t index) {
2880 switch (s) {
2881 case kItOmitted: it_conditions_[index] = AL; break;
2882 case kItThen: it_conditions_[index] = cond; break;
2883 case kItElse:
2884 it_conditions_[index] = static_cast<Condition>(static_cast<uint8_t>(cond) ^ 1);
2885 break;
2886 }
2887}
2888
2889
2890void Thumb2Assembler::it(Condition firstcond, ItState i1, ItState i2, ItState i3) {
2891 CheckCondition(AL); // Not allowed in IT block.
2892 uint8_t firstcond0 = static_cast<uint8_t>(firstcond) & 1;
2893
2894 // All conditions to AL.
2895 for (uint8_t i = 0; i < 4; ++i) {
2896 it_conditions_[i] = AL;
2897 }
2898
2899 SetItCondition(kItThen, firstcond, 0);
2900 uint8_t mask = ToItMask(i1, firstcond0, 3);
2901 SetItCondition(i1, firstcond, 1);
2902
2903 if (i1 != kItOmitted) {
2904 mask |= ToItMask(i2, firstcond0, 2);
2905 SetItCondition(i2, firstcond, 2);
2906 if (i2 != kItOmitted) {
2907 mask |= ToItMask(i3, firstcond0, 1);
2908 SetItCondition(i3, firstcond, 3);
2909 if (i3 != kItOmitted) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002910 mask |= 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002911 }
2912 }
2913 }
2914
2915 // Start at first condition.
2916 it_cond_index_ = 0;
2917 next_condition_ = it_conditions_[0];
2918 uint16_t encoding = B15 | B13 | B12 |
2919 B11 | B10 | B9 | B8 |
2920 firstcond << 4 |
2921 mask;
2922 Emit16(encoding);
2923}
2924
2925
2926void Thumb2Assembler::cbz(Register rn, Label* label) {
2927 CheckCondition(AL);
2928 if (label->IsBound()) {
2929 LOG(FATAL) << "cbz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002930 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002931 } else if (IsHighRegister(rn)) {
2932 LOG(FATAL) << "cbz can only be used with low registers";
2933 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002934 } else {
2935 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), false);
2936 label->LinkTo(branchid);
2937 }
2938}
2939
2940
2941void Thumb2Assembler::cbnz(Register rn, Label* label) {
2942 CheckCondition(AL);
2943 if (label->IsBound()) {
2944 LOG(FATAL) << "cbnz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002945 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002946 } else if (IsHighRegister(rn)) {
2947 LOG(FATAL) << "cbnz can only be used with low registers";
2948 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002949 } else {
2950 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), true);
2951 label->LinkTo(branchid);
2952 }
2953}
2954
2955
2956void Thumb2Assembler::blx(Register rm, Condition cond) {
2957 CHECK_NE(rm, kNoRegister);
2958 CheckCondition(cond);
2959 int16_t encoding = B14 | B10 | B9 | B8 | B7 | static_cast<int16_t>(rm) << 3;
2960 Emit16(encoding);
2961}
2962
2963
2964void Thumb2Assembler::bx(Register rm, Condition cond) {
2965 CHECK_NE(rm, kNoRegister);
2966 CheckCondition(cond);
2967 int16_t encoding = B14 | B10 | B9 | B8 | static_cast<int16_t>(rm) << 3;
2968 Emit16(encoding);
2969}
2970
2971
2972void Thumb2Assembler::Push(Register rd, Condition cond) {
2973 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
2974}
2975
2976
2977void Thumb2Assembler::Pop(Register rd, Condition cond) {
2978 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
2979}
2980
2981
2982void Thumb2Assembler::PushList(RegList regs, Condition cond) {
2983 stm(DB_W, SP, regs, cond);
2984}
2985
2986
2987void Thumb2Assembler::PopList(RegList regs, Condition cond) {
2988 ldm(IA_W, SP, regs, cond);
2989}
2990
2991
2992void Thumb2Assembler::Mov(Register rd, Register rm, Condition cond) {
2993 if (cond != AL || rd != rm) {
2994 mov(rd, ShifterOperand(rm), cond);
2995 }
2996}
2997
2998
Dave Allison65fcc2c2014-04-28 13:45:27 -07002999void Thumb2Assembler::Bind(Label* label) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003000 BindLabel(label, buffer_.Size());
Dave Allison65fcc2c2014-04-28 13:45:27 -07003001}
3002
3003
3004void Thumb2Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003005 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003006 CHECK_LE(shift_imm, 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003007 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003008 EmitShift(rd, rm, LSL, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003009}
3010
3011
3012void Thumb2Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003013 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003014 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003015 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003016 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003017 EmitShift(rd, rm, LSR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003018}
3019
3020
3021void Thumb2Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003022 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003023 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003024 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003025 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003026 EmitShift(rd, rm, ASR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003027}
3028
3029
3030void Thumb2Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003031 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003032 CHECK(1u <= shift_imm && shift_imm <= 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003033 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003034 EmitShift(rd, rm, ROR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003035}
3036
3037
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003038void Thumb2Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003039 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003040 EmitShift(rd, rm, RRX, rm, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003041}
3042
3043
3044void Thumb2Assembler::Lsl(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003045 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003046 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003047 EmitShift(rd, rm, LSL, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003048}
3049
3050
3051void Thumb2Assembler::Lsr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003052 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003053 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003054 EmitShift(rd, rm, LSR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003055}
3056
3057
3058void Thumb2Assembler::Asr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003059 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003060 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003061 EmitShift(rd, rm, ASR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003062}
3063
3064
3065void Thumb2Assembler::Ror(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003066 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003067 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003068 EmitShift(rd, rm, ROR, rn, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003069}
3070
3071
3072int32_t Thumb2Assembler::EncodeBranchOffset(int32_t offset, int32_t inst) {
3073 // The offset is off by 4 due to the way the ARM CPUs read PC.
3074 offset -= 4;
3075 offset >>= 1;
3076
3077 uint32_t value = 0;
3078 // There are two different encodings depending on the value of bit 12. In one case
3079 // intermediate values are calculated using the sign bit.
3080 if ((inst & B12) == B12) {
3081 // 25 bits of offset.
3082 uint32_t signbit = (offset >> 31) & 0x1;
3083 uint32_t i1 = (offset >> 22) & 0x1;
3084 uint32_t i2 = (offset >> 21) & 0x1;
3085 uint32_t imm10 = (offset >> 11) & 0x03ff;
3086 uint32_t imm11 = offset & 0x07ff;
3087 uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
3088 uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
3089 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
3090 imm11;
3091 // Remove the offset from the current encoding.
3092 inst &= ~(0x3ff << 16 | 0x7ff);
3093 } else {
3094 uint32_t signbit = (offset >> 31) & 0x1;
3095 uint32_t imm6 = (offset >> 11) & 0x03f;
3096 uint32_t imm11 = offset & 0x07ff;
3097 uint32_t j1 = (offset >> 19) & 1;
3098 uint32_t j2 = (offset >> 17) & 1;
3099 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm6 << 16) |
3100 imm11;
3101 // Remove the offset from the current encoding.
3102 inst &= ~(0x3f << 16 | 0x7ff);
3103 }
3104 // Mask out offset bits in current instruction.
3105 inst &= ~(B26 | B13 | B11);
3106 inst |= value;
3107 return inst;
3108}
3109
3110
3111int Thumb2Assembler::DecodeBranchOffset(int32_t instr) {
3112 int32_t imm32;
3113 if ((instr & B12) == B12) {
3114 uint32_t S = (instr >> 26) & 1;
3115 uint32_t J2 = (instr >> 11) & 1;
3116 uint32_t J1 = (instr >> 13) & 1;
3117 uint32_t imm10 = (instr >> 16) & 0x3FF;
3118 uint32_t imm11 = instr & 0x7FF;
3119
3120 uint32_t I1 = ~(J1 ^ S) & 1;
3121 uint32_t I2 = ~(J2 ^ S) & 1;
3122 imm32 = (S << 24) | (I1 << 23) | (I2 << 22) | (imm10 << 12) | (imm11 << 1);
3123 imm32 = (imm32 << 8) >> 8; // sign extend 24 bit immediate.
3124 } else {
3125 uint32_t S = (instr >> 26) & 1;
3126 uint32_t J2 = (instr >> 11) & 1;
3127 uint32_t J1 = (instr >> 13) & 1;
3128 uint32_t imm6 = (instr >> 16) & 0x3F;
3129 uint32_t imm11 = instr & 0x7FF;
3130
3131 imm32 = (S << 20) | (J2 << 19) | (J1 << 18) | (imm6 << 12) | (imm11 << 1);
3132 imm32 = (imm32 << 11) >> 11; // sign extend 21 bit immediate.
3133 }
3134 imm32 += 4;
3135 return imm32;
3136}
3137
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003138uint32_t Thumb2Assembler::GetAdjustedPosition(uint32_t old_position) {
3139 // We can reconstruct the adjustment by going through all the fixups from the beginning
3140 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
3141 // with increasing old_position, we can use the data from last AdjustedPosition() to
3142 // continue where we left off and the whole loop should be O(m+n) where m is the number
3143 // of positions to adjust and n is the number of fixups.
3144 if (old_position < last_old_position_) {
3145 last_position_adjustment_ = 0u;
3146 last_old_position_ = 0u;
3147 last_fixup_id_ = 0u;
3148 }
3149 while (last_fixup_id_ != fixups_.size()) {
3150 Fixup* fixup = GetFixup(last_fixup_id_);
3151 if (fixup->GetLocation() >= old_position + last_position_adjustment_) {
3152 break;
3153 }
3154 if (fixup->GetSize() != fixup->GetOriginalSize()) {
3155 last_position_adjustment_ += fixup->GetSizeInBytes() - fixup->GetOriginalSizeInBytes();
3156 }
3157 ++last_fixup_id_;
3158 }
3159 last_old_position_ = old_position;
3160 return old_position + last_position_adjustment_;
3161}
3162
3163Literal* Thumb2Assembler::NewLiteral(size_t size, const uint8_t* data) {
3164 DCHECK(size == 4u || size == 8u) << size;
3165 literals_.emplace_back(size, data);
3166 return &literals_.back();
3167}
3168
3169void Thumb2Assembler::LoadLiteral(Register rt, Literal* literal) {
3170 DCHECK_EQ(literal->GetSize(), 4u);
3171 DCHECK(!literal->GetLabel()->IsBound());
3172 bool use32bit = IsForced32Bit() || IsHighRegister(rt);
3173 uint32_t location = buffer_.Size();
3174 Fixup::Size size = use32bit ? Fixup::kLiteral4KiB : Fixup::kLiteral1KiB;
3175 FixupId fixup_id = AddFixup(Fixup::LoadNarrowLiteral(location, rt, size));
3176 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3177 literal->GetLabel()->LinkTo(fixup_id);
3178 if (use32bit) {
3179 Emit16(0);
3180 }
3181 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3182}
3183
3184void Thumb2Assembler::LoadLiteral(Register rt, Register rt2, Literal* literal) {
3185 DCHECK_EQ(literal->GetSize(), 8u);
3186 DCHECK(!literal->GetLabel()->IsBound());
3187 uint32_t location = buffer_.Size();
3188 FixupId fixup_id =
3189 AddFixup(Fixup::LoadWideLiteral(location, rt, rt2, Fixup::kLongOrFPLiteral1KiB));
3190 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3191 literal->GetLabel()->LinkTo(fixup_id);
3192 Emit16(0);
3193 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3194}
3195
3196void Thumb2Assembler::LoadLiteral(SRegister sd, Literal* literal) {
3197 DCHECK_EQ(literal->GetSize(), 4u);
3198 DCHECK(!literal->GetLabel()->IsBound());
3199 uint32_t location = buffer_.Size();
3200 FixupId fixup_id = AddFixup(Fixup::LoadSingleLiteral(location, sd, Fixup::kLongOrFPLiteral1KiB));
3201 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3202 literal->GetLabel()->LinkTo(fixup_id);
3203 Emit16(0);
3204 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3205}
3206
3207void Thumb2Assembler::LoadLiteral(DRegister dd, Literal* literal) {
3208 DCHECK_EQ(literal->GetSize(), 8u);
3209 DCHECK(!literal->GetLabel()->IsBound());
3210 uint32_t location = buffer_.Size();
3211 FixupId fixup_id = AddFixup(Fixup::LoadDoubleLiteral(location, dd, Fixup::kLongOrFPLiteral1KiB));
3212 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3213 literal->GetLabel()->LinkTo(fixup_id);
3214 Emit16(0);
3215 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3216}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003217
Dave Allison65fcc2c2014-04-28 13:45:27 -07003218
3219void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value,
Vladimir Marko449b1092015-09-08 12:16:45 +01003220 Condition cond, SetCc set_cc) {
3221 if (value == 0 && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003222 if (rd != rn) {
3223 mov(rd, ShifterOperand(rn), cond);
3224 }
3225 return;
3226 }
3227 // We prefer to select the shorter code sequence rather than selecting add for
3228 // positive values and sub for negatives ones, which would slightly improve
3229 // the readability of generated code for some constants.
3230 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003231 if (ShifterOperandCanHold(rd, rn, ADD, value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003232 add(rd, rn, shifter_op, cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003233 } else if (ShifterOperandCanHold(rd, rn, SUB, -value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003234 sub(rd, rn, shifter_op, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003235 } else {
3236 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003237 if (ShifterOperandCanHold(rd, rn, MVN, ~value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003238 mvn(IP, shifter_op, cond, kCcKeep);
3239 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003240 } else if (ShifterOperandCanHold(rd, rn, MVN, ~(-value), &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003241 mvn(IP, shifter_op, cond, kCcKeep);
3242 sub(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003243 } else {
3244 movw(IP, Low16Bits(value), cond);
3245 uint16_t value_high = High16Bits(value);
3246 if (value_high != 0) {
3247 movt(IP, value_high, cond);
3248 }
Vladimir Marko449b1092015-09-08 12:16:45 +01003249 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003250 }
3251 }
3252}
3253
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003254
Dave Allison65fcc2c2014-04-28 13:45:27 -07003255void Thumb2Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
3256 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003257 if (ShifterOperandCanHold(rd, R0, MOV, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003258 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003259 } else if (ShifterOperandCanHold(rd, R0, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003260 mvn(rd, shifter_op, cond);
3261 } else {
3262 movw(rd, Low16Bits(value), cond);
3263 uint16_t value_high = High16Bits(value);
3264 if (value_high != 0) {
3265 movt(rd, value_high, cond);
3266 }
3267 }
3268}
3269
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003270
Dave Allison65fcc2c2014-04-28 13:45:27 -07003271// Implementation note: this method must emit at most one instruction when
3272// Address::CanHoldLoadOffsetThumb.
3273void Thumb2Assembler::LoadFromOffset(LoadOperandType type,
3274 Register reg,
3275 Register base,
3276 int32_t offset,
3277 Condition cond) {
3278 if (!Address::CanHoldLoadOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003279 CHECK_NE(base, IP);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003280 LoadImmediate(IP, offset, cond);
3281 add(IP, IP, ShifterOperand(base), cond);
3282 base = IP;
3283 offset = 0;
3284 }
3285 CHECK(Address::CanHoldLoadOffsetThumb(type, offset));
3286 switch (type) {
3287 case kLoadSignedByte:
3288 ldrsb(reg, Address(base, offset), cond);
3289 break;
3290 case kLoadUnsignedByte:
3291 ldrb(reg, Address(base, offset), cond);
3292 break;
3293 case kLoadSignedHalfword:
3294 ldrsh(reg, Address(base, offset), cond);
3295 break;
3296 case kLoadUnsignedHalfword:
3297 ldrh(reg, Address(base, offset), cond);
3298 break;
3299 case kLoadWord:
3300 ldr(reg, Address(base, offset), cond);
3301 break;
3302 case kLoadWordPair:
3303 ldrd(reg, Address(base, offset), cond);
3304 break;
3305 default:
3306 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003307 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003308 }
3309}
3310
3311
3312// Implementation note: this method must emit at most one instruction when
3313// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3314void Thumb2Assembler::LoadSFromOffset(SRegister reg,
3315 Register base,
3316 int32_t offset,
3317 Condition cond) {
3318 if (!Address::CanHoldLoadOffsetThumb(kLoadSWord, offset)) {
3319 CHECK_NE(base, IP);
3320 LoadImmediate(IP, offset, cond);
3321 add(IP, IP, ShifterOperand(base), cond);
3322 base = IP;
3323 offset = 0;
3324 }
3325 CHECK(Address::CanHoldLoadOffsetThumb(kLoadSWord, offset));
3326 vldrs(reg, Address(base, offset), cond);
3327}
3328
3329
3330// Implementation note: this method must emit at most one instruction when
3331// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3332void Thumb2Assembler::LoadDFromOffset(DRegister reg,
3333 Register base,
3334 int32_t offset,
3335 Condition cond) {
3336 if (!Address::CanHoldLoadOffsetThumb(kLoadDWord, offset)) {
3337 CHECK_NE(base, IP);
3338 LoadImmediate(IP, offset, cond);
3339 add(IP, IP, ShifterOperand(base), cond);
3340 base = IP;
3341 offset = 0;
3342 }
3343 CHECK(Address::CanHoldLoadOffsetThumb(kLoadDWord, offset));
3344 vldrd(reg, Address(base, offset), cond);
3345}
3346
3347
3348// Implementation note: this method must emit at most one instruction when
3349// Address::CanHoldStoreOffsetThumb.
3350void Thumb2Assembler::StoreToOffset(StoreOperandType type,
3351 Register reg,
3352 Register base,
3353 int32_t offset,
3354 Condition cond) {
Roland Levillain775ef492014-11-04 17:43:11 +00003355 Register tmp_reg = kNoRegister;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003356 if (!Address::CanHoldStoreOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003357 CHECK_NE(base, IP);
Roland Levillain23f02f32015-08-25 18:23:20 +01003358 if ((reg != IP) &&
3359 ((type != kStoreWordPair) || (reg + 1 != IP))) {
Roland Levillain775ef492014-11-04 17:43:11 +00003360 tmp_reg = IP;
3361 } else {
Roland Levillain4af147e2015-04-07 13:54:49 +01003362 // Be careful not to use IP twice (for `reg` (or `reg` + 1 in
Roland Levillain23f02f32015-08-25 18:23:20 +01003363 // the case of a word-pair store) and `base`) to build the
3364 // Address object used by the store instruction(s) below.
3365 // Instead, save R5 on the stack (or R6 if R5 is already used by
3366 // `base`), use it as secondary temporary register, and restore
3367 // it after the store instruction has been emitted.
3368 tmp_reg = (base != R5) ? R5 : R6;
Roland Levillain775ef492014-11-04 17:43:11 +00003369 Push(tmp_reg);
3370 if (base == SP) {
3371 offset += kRegisterSize;
3372 }
3373 }
3374 LoadImmediate(tmp_reg, offset, cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003375 add(tmp_reg, tmp_reg, ShifterOperand(base), AL);
Roland Levillain775ef492014-11-04 17:43:11 +00003376 base = tmp_reg;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003377 offset = 0;
3378 }
3379 CHECK(Address::CanHoldStoreOffsetThumb(type, offset));
3380 switch (type) {
3381 case kStoreByte:
3382 strb(reg, Address(base, offset), cond);
3383 break;
3384 case kStoreHalfword:
3385 strh(reg, Address(base, offset), cond);
3386 break;
3387 case kStoreWord:
3388 str(reg, Address(base, offset), cond);
3389 break;
3390 case kStoreWordPair:
3391 strd(reg, Address(base, offset), cond);
3392 break;
3393 default:
3394 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003395 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003396 }
Roland Levillain23f02f32015-08-25 18:23:20 +01003397 if ((tmp_reg != kNoRegister) && (tmp_reg != IP)) {
3398 CHECK((tmp_reg == R5) || (tmp_reg == R6));
Roland Levillain775ef492014-11-04 17:43:11 +00003399 Pop(tmp_reg);
3400 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003401}
3402
3403
3404// Implementation note: this method must emit at most one instruction when
3405// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreToOffset.
3406void Thumb2Assembler::StoreSToOffset(SRegister reg,
3407 Register base,
3408 int32_t offset,
3409 Condition cond) {
3410 if (!Address::CanHoldStoreOffsetThumb(kStoreSWord, offset)) {
3411 CHECK_NE(base, IP);
3412 LoadImmediate(IP, offset, cond);
3413 add(IP, IP, ShifterOperand(base), cond);
3414 base = IP;
3415 offset = 0;
3416 }
3417 CHECK(Address::CanHoldStoreOffsetThumb(kStoreSWord, offset));
3418 vstrs(reg, Address(base, offset), cond);
3419}
3420
3421
3422// Implementation note: this method must emit at most one instruction when
3423// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreSToOffset.
3424void Thumb2Assembler::StoreDToOffset(DRegister reg,
3425 Register base,
3426 int32_t offset,
3427 Condition cond) {
3428 if (!Address::CanHoldStoreOffsetThumb(kStoreDWord, offset)) {
3429 CHECK_NE(base, IP);
3430 LoadImmediate(IP, offset, cond);
3431 add(IP, IP, ShifterOperand(base), cond);
3432 base = IP;
3433 offset = 0;
3434 }
3435 CHECK(Address::CanHoldStoreOffsetThumb(kStoreDWord, offset));
3436 vstrd(reg, Address(base, offset), cond);
3437}
3438
3439
3440void Thumb2Assembler::MemoryBarrier(ManagedRegister mscratch) {
3441 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003442 dmb(SY);
3443}
3444
3445
3446void Thumb2Assembler::dmb(DmbOptions flavor) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003447 int32_t encoding = 0xf3bf8f50; // dmb in T1 encoding.
3448 Emit32(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003449}
3450
3451
3452void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003453 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003454 cbz(r, label);
3455 } else {
3456 cmp(r, ShifterOperand(0));
3457 b(label, EQ);
3458 }
3459}
3460
3461
Dave Allison65fcc2c2014-04-28 13:45:27 -07003462void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003463 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd126ba12015-05-20 11:25:27 +01003464 cbnz(r, label);
3465 } else {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003466 cmp(r, ShifterOperand(0));
3467 b(label, NE);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003468 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003469}
3470} // namespace arm
3471} // namespace art