blob: fb3aa1ea85a03532f85188d7b1305c624c95585f [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_thumb2.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Vladimir Marko6b756b52015-07-14 11:58:38 +010028void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) {
29 // For each Fixup, it's easy to find the Fixups that it depends on as they are either
30 // the following or the preceding Fixups until we find the target. However, for fixup
31 // adjustment we need the reverse lookup, i.e. what Fixups depend on a given Fixup.
32 // This function creates a compact representation of this relationship, where we have
33 // all the dependents in a single array and Fixups reference their ranges by start
34 // index and count. (Instead of having a per-fixup vector.)
35
36 // Count the number of dependents of each Fixup.
37 const FixupId end_id = assembler->fixups_.size();
38 Fixup* fixups = assembler->fixups_.data();
39 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
40 uint32_t target = fixups[fixup_id].target_;
41 if (target > fixups[fixup_id].location_) {
42 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
43 fixups[id].dependents_count_ += 1u;
44 }
45 } else {
46 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
47 fixups[id - 1u].dependents_count_ += 1u;
48 }
49 }
50 }
51 // Assign index ranges in fixup_dependents_ to individual fixups. Record the end of the
52 // range in dependents_start_, we shall later decrement it as we fill in fixup_dependents_.
53 uint32_t number_of_dependents = 0u;
54 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
55 number_of_dependents += fixups[fixup_id].dependents_count_;
56 fixups[fixup_id].dependents_start_ = number_of_dependents;
57 }
58 if (number_of_dependents == 0u) {
59 return;
60 }
61 // Create and fill in the fixup_dependents_.
62 assembler->fixup_dependents_.reset(new FixupId[number_of_dependents]);
63 FixupId* dependents = assembler->fixup_dependents_.get();
64 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
65 uint32_t target = fixups[fixup_id].target_;
66 if (target > fixups[fixup_id].location_) {
67 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
68 fixups[id].dependents_start_ -= 1u;
69 dependents[fixups[id].dependents_start_] = fixup_id;
70 }
71 } else {
72 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
73 fixups[id - 1u].dependents_start_ -= 1u;
74 dependents[fixups[id - 1u].dependents_start_] = fixup_id;
75 }
76 }
77 }
78}
79
Vladimir Markocf93a5c2015-06-16 11:33:24 +000080void Thumb2Assembler::BindLabel(Label* label, uint32_t bound_pc) {
81 CHECK(!label->IsBound());
82
83 while (label->IsLinked()) {
84 FixupId fixup_id = label->Position(); // The id for linked Fixup.
85 Fixup* fixup = GetFixup(fixup_id); // Get the Fixup at this id.
86 fixup->Resolve(bound_pc); // Fixup can be resolved now.
Vladimir Markocf93a5c2015-06-16 11:33:24 +000087 uint32_t fixup_location = fixup->GetLocation();
88 uint16_t next = buffer_.Load<uint16_t>(fixup_location); // Get next in chain.
89 buffer_.Store<int16_t>(fixup_location, 0);
90 label->position_ = next; // Move to next.
91 }
92 label->BindTo(bound_pc);
93}
94
Andreas Gampe7cffc3b2015-10-19 21:31:53 -070095uint32_t Thumb2Assembler::BindLiterals() {
Vladimir Markocf93a5c2015-06-16 11:33:24 +000096 // We don't add the padding here, that's done only after adjusting the Fixup sizes.
97 uint32_t code_size = buffer_.Size();
98 for (Literal& lit : literals_) {
99 Label* label = lit.GetLabel();
100 BindLabel(label, code_size);
101 code_size += lit.GetSize();
102 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700103 return code_size;
104}
105
106void Thumb2Assembler::BindJumpTables(uint32_t code_size) {
107 for (JumpTable& table : jump_tables_) {
108 Label* label = table.GetLabel();
109 BindLabel(label, code_size);
110 code_size += table.GetSize();
111 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000112}
113
114void Thumb2Assembler::AdjustFixupIfNeeded(Fixup* fixup, uint32_t* current_code_size,
115 std::deque<FixupId>* fixups_to_recalculate) {
116 uint32_t adjustment = fixup->AdjustSizeIfNeeded(*current_code_size);
117 if (adjustment != 0u) {
118 *current_code_size += adjustment;
Vladimir Marko6b756b52015-07-14 11:58:38 +0100119 for (FixupId dependent_id : fixup->Dependents(*this)) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000120 Fixup* dependent = GetFixup(dependent_id);
121 dependent->IncreaseAdjustment(adjustment);
122 if (buffer_.Load<int16_t>(dependent->GetLocation()) == 0) {
123 buffer_.Store<int16_t>(dependent->GetLocation(), 1);
124 fixups_to_recalculate->push_back(dependent_id);
125 }
126 }
127 }
128}
129
130uint32_t Thumb2Assembler::AdjustFixups() {
Vladimir Marko6b756b52015-07-14 11:58:38 +0100131 Fixup::PrepareDependents(this);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000132 uint32_t current_code_size = buffer_.Size();
133 std::deque<FixupId> fixups_to_recalculate;
134 if (kIsDebugBuild) {
135 // We will use the placeholders in the buffer_ to mark whether the fixup has
136 // been added to the fixups_to_recalculate. Make sure we start with zeros.
137 for (Fixup& fixup : fixups_) {
138 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
139 }
140 }
141 for (Fixup& fixup : fixups_) {
142 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
143 }
144 while (!fixups_to_recalculate.empty()) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100145 do {
146 // Pop the fixup.
147 FixupId fixup_id = fixups_to_recalculate.front();
148 fixups_to_recalculate.pop_front();
149 Fixup* fixup = GetFixup(fixup_id);
150 DCHECK_NE(buffer_.Load<int16_t>(fixup->GetLocation()), 0);
151 buffer_.Store<int16_t>(fixup->GetLocation(), 0);
152 // See if it needs adjustment.
153 AdjustFixupIfNeeded(fixup, &current_code_size, &fixups_to_recalculate);
154 } while (!fixups_to_recalculate.empty());
155
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700156 if ((current_code_size & 2) != 0 && (!literals_.empty() || !jump_tables_.empty())) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100157 // If we need to add padding before literals, this may just push some out of range,
158 // so recalculate all load literals. This makes up for the fact that we don't mark
159 // load literal as a dependency of all previous Fixups even though it actually is.
160 for (Fixup& fixup : fixups_) {
161 if (fixup.IsLoadLiteral()) {
162 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
163 }
164 }
165 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000166 }
167 if (kIsDebugBuild) {
168 // Check that no fixup is marked as being in fixups_to_recalculate anymore.
169 for (Fixup& fixup : fixups_) {
170 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
171 }
172 }
173
174 // Adjust literal pool labels for padding.
Roland Levillain14d90572015-07-16 10:52:26 +0100175 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000176 uint32_t literals_adjustment = current_code_size + (current_code_size & 2) - buffer_.Size();
177 if (literals_adjustment != 0u) {
178 for (Literal& literal : literals_) {
179 Label* label = literal.GetLabel();
180 DCHECK(label->IsBound());
181 int old_position = label->Position();
182 label->Reinitialize();
183 label->BindTo(old_position + literals_adjustment);
184 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700185 for (JumpTable& table : jump_tables_) {
186 Label* label = table.GetLabel();
187 DCHECK(label->IsBound());
188 int old_position = label->Position();
189 label->Reinitialize();
190 label->BindTo(old_position + literals_adjustment);
191 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000192 }
193
194 return current_code_size;
195}
196
197void Thumb2Assembler::EmitFixups(uint32_t adjusted_code_size) {
198 // Move non-fixup code to its final place and emit fixups.
199 // Process fixups in reverse order so that we don't repeatedly move the same data.
200 size_t src_end = buffer_.Size();
201 size_t dest_end = adjusted_code_size;
202 buffer_.Resize(dest_end);
203 DCHECK_GE(dest_end, src_end);
204 for (auto i = fixups_.rbegin(), end = fixups_.rend(); i != end; ++i) {
205 Fixup* fixup = &*i;
206 if (fixup->GetOriginalSize() == fixup->GetSize()) {
207 // The size of this Fixup didn't change. To avoid moving the data
208 // in small chunks, emit the code to its original position.
209 fixup->Emit(&buffer_, adjusted_code_size);
210 fixup->Finalize(dest_end - src_end);
211 } else {
212 // Move the data between the end of the fixup and src_end to its final location.
213 size_t old_fixup_location = fixup->GetLocation();
214 size_t src_begin = old_fixup_location + fixup->GetOriginalSizeInBytes();
215 size_t data_size = src_end - src_begin;
216 size_t dest_begin = dest_end - data_size;
217 buffer_.Move(dest_begin, src_begin, data_size);
218 src_end = old_fixup_location;
219 dest_end = dest_begin - fixup->GetSizeInBytes();
220 // Finalize the Fixup and emit the data to the new location.
221 fixup->Finalize(dest_end - src_end);
222 fixup->Emit(&buffer_, adjusted_code_size);
223 }
224 }
225 CHECK_EQ(src_end, dest_end);
226}
227
228void Thumb2Assembler::EmitLiterals() {
229 if (!literals_.empty()) {
230 // Load literal instructions (LDR, LDRD, VLDR) require 4-byte alignment.
231 // We don't support byte and half-word literals.
232 uint32_t code_size = buffer_.Size();
Roland Levillain14d90572015-07-16 10:52:26 +0100233 DCHECK_ALIGNED(code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000234 if ((code_size & 2u) != 0u) {
235 Emit16(0);
236 }
237 for (Literal& literal : literals_) {
238 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
239 DCHECK_EQ(static_cast<size_t>(literal.GetLabel()->Position()), buffer_.Size());
240 DCHECK(literal.GetSize() == 4u || literal.GetSize() == 8u);
241 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
242 buffer_.Emit<uint8_t>(literal.GetData()[i]);
243 }
244 }
245 }
246}
247
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700248void Thumb2Assembler::EmitJumpTables() {
249 if (!jump_tables_.empty()) {
250 // Jump tables require 4 byte alignment. (We don't support byte and half-word jump tables.)
251 uint32_t code_size = buffer_.Size();
252 DCHECK_ALIGNED(code_size, 2);
253 if ((code_size & 2u) != 0u) {
254 Emit16(0);
255 }
256 for (JumpTable& table : jump_tables_) {
257 // Bulk ensure capacity, as this may be large.
258 size_t orig_size = buffer_.Size();
259 buffer_.ExtendCapacity(orig_size + table.GetSize());
260#ifndef NDEBUG
261 buffer_.has_ensured_capacity_ = true;
262#endif
263
264 DCHECK_EQ(static_cast<size_t>(table.GetLabel()->Position()), buffer_.Size());
265 int32_t anchor_position = table.GetAnchorLabel()->Position() + 4;
266
267 for (Label* target : table.GetData()) {
268 // Ensure that the label was tracked, so that it will have the right position.
269 DCHECK(std::find(tracked_labels_.begin(), tracked_labels_.end(), target) !=
270 tracked_labels_.end());
271
272 int32_t offset = target->Position() - anchor_position;
273 buffer_.Emit<int32_t>(offset);
274 }
275
276#ifndef NDEBUG
277 buffer_.has_ensured_capacity_ = false;
278#endif
279 size_t new_size = buffer_.Size();
280 DCHECK_LE(new_size - orig_size, table.GetSize());
281 }
282 }
283}
284
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000285inline int16_t Thumb2Assembler::BEncoding16(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100286 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000287 int16_t encoding = B15 | B14;
288 if (cond != AL) {
289 DCHECK(IsInt<9>(offset));
290 encoding |= B12 | (static_cast<int32_t>(cond) << 8) | ((offset >> 1) & 0xff);
291 } else {
292 DCHECK(IsInt<12>(offset));
293 encoding |= B13 | ((offset >> 1) & 0x7ff);
294 }
295 return encoding;
296}
297
298inline int32_t Thumb2Assembler::BEncoding32(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100299 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000300 int32_t s = (offset >> 31) & 1; // Sign bit.
301 int32_t encoding = B31 | B30 | B29 | B28 | B15 |
302 (s << 26) | // Sign bit goes to bit 26.
303 ((offset >> 1) & 0x7ff); // imm11 goes to bits 0-10.
304 if (cond != AL) {
305 DCHECK(IsInt<21>(offset));
306 // Encode cond, move imm6 from bits 12-17 to bits 16-21 and move J1 and J2.
307 encoding |= (static_cast<int32_t>(cond) << 22) | ((offset & 0x3f000) << (16 - 12)) |
308 ((offset & (1 << 19)) >> (19 - 13)) | // Extract J1 from bit 19 to bit 13.
309 ((offset & (1 << 18)) >> (18 - 11)); // Extract J2 from bit 18 to bit 11.
310 } else {
311 DCHECK(IsInt<25>(offset));
312 int32_t j1 = ((offset >> 23) ^ s ^ 1) & 1; // Calculate J1 from I1 extracted from bit 23.
313 int32_t j2 = ((offset >> 22)^ s ^ 1) & 1; // Calculate J2 from I2 extracted from bit 22.
314 // Move imm10 from bits 12-21 to bits 16-25 and add J1 and J2.
315 encoding |= B12 | ((offset & 0x3ff000) << (16 - 12)) |
316 (j1 << 13) | (j2 << 11);
317 }
318 return encoding;
319}
320
321inline int16_t Thumb2Assembler::CbxzEncoding16(Register rn, int32_t offset, Condition cond) {
322 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100323 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000324 DCHECK(IsUint<7>(offset));
325 DCHECK(cond == EQ || cond == NE);
326 return B15 | B13 | B12 | B8 | (cond == NE ? B11 : 0) | static_cast<int32_t>(rn) |
327 ((offset & 0x3e) << (3 - 1)) | // Move imm5 from bits 1-5 to bits 3-7.
328 ((offset & 0x40) << (9 - 6)); // Move i from bit 6 to bit 11
329}
330
331inline int16_t Thumb2Assembler::CmpRnImm8Encoding16(Register rn, int32_t value) {
332 DCHECK(!IsHighRegister(rn));
333 DCHECK(IsUint<8>(value));
334 return B13 | B11 | (rn << 8) | value;
335}
336
337inline int16_t Thumb2Assembler::AddRdnRmEncoding16(Register rdn, Register rm) {
338 // The high bit of rn is moved across 4-bit rm.
339 return B14 | B10 | (static_cast<int32_t>(rm) << 3) |
340 (static_cast<int32_t>(rdn) & 7) | ((static_cast<int32_t>(rdn) & 8) << 4);
341}
342
343inline int32_t Thumb2Assembler::MovwEncoding32(Register rd, int32_t value) {
344 DCHECK(IsUint<16>(value));
345 return B31 | B30 | B29 | B28 | B25 | B22 |
346 (static_cast<int32_t>(rd) << 8) |
347 ((value & 0xf000) << (16 - 12)) | // Move imm4 from bits 12-15 to bits 16-19.
348 ((value & 0x0800) << (26 - 11)) | // Move i from bit 11 to bit 26.
349 ((value & 0x0700) << (12 - 8)) | // Move imm3 from bits 8-10 to bits 12-14.
350 (value & 0xff); // Keep imm8 in bits 0-7.
351}
352
353inline int32_t Thumb2Assembler::MovtEncoding32(Register rd, int32_t value) {
354 DCHECK_EQ(value & 0xffff, 0);
355 int32_t movw_encoding = MovwEncoding32(rd, (value >> 16) & 0xffff);
356 return movw_encoding | B25 | B23;
357}
358
359inline int32_t Thumb2Assembler::MovModImmEncoding32(Register rd, int32_t value) {
360 uint32_t mod_imm = ModifiedImmediate(value);
361 DCHECK_NE(mod_imm, kInvalidModifiedImmediate);
362 return B31 | B30 | B29 | B28 | B22 | B19 | B18 | B17 | B16 |
363 (static_cast<int32_t>(rd) << 8) | static_cast<int32_t>(mod_imm);
364}
365
366inline int16_t Thumb2Assembler::LdrLitEncoding16(Register rt, int32_t offset) {
367 DCHECK(!IsHighRegister(rt));
Roland Levillain14d90572015-07-16 10:52:26 +0100368 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000369 DCHECK(IsUint<10>(offset));
370 return B14 | B11 | (static_cast<int32_t>(rt) << 8) | (offset >> 2);
371}
372
373inline int32_t Thumb2Assembler::LdrLitEncoding32(Register rt, int32_t offset) {
374 // NOTE: We don't support negative offset, i.e. U=0 (B23).
375 return LdrRtRnImm12Encoding(rt, PC, offset);
376}
377
378inline int32_t Thumb2Assembler::LdrdEncoding32(Register rt, Register rt2, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100379 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000380 CHECK(IsUint<10>(offset));
381 return B31 | B30 | B29 | B27 |
382 B24 /* P = 1 */ | B23 /* U = 1 */ | B22 | 0 /* W = 0 */ | B20 |
383 (static_cast<int32_t>(rn) << 16) | (static_cast<int32_t>(rt) << 12) |
384 (static_cast<int32_t>(rt2) << 8) | (offset >> 2);
385}
386
387inline int32_t Thumb2Assembler::VldrsEncoding32(SRegister sd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100388 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000389 CHECK(IsUint<10>(offset));
390 return B31 | B30 | B29 | B27 | B26 | B24 |
391 B23 /* U = 1 */ | B20 | B11 | B9 |
392 (static_cast<int32_t>(rn) << 16) |
393 ((static_cast<int32_t>(sd) & 0x01) << (22 - 0)) | // Move D from bit 0 to bit 22.
394 ((static_cast<int32_t>(sd) & 0x1e) << (12 - 1)) | // Move Vd from bits 1-4 to bits 12-15.
395 (offset >> 2);
396}
397
398inline int32_t Thumb2Assembler::VldrdEncoding32(DRegister dd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100399 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000400 CHECK(IsUint<10>(offset));
401 return B31 | B30 | B29 | B27 | B26 | B24 |
402 B23 /* U = 1 */ | B20 | B11 | B9 | B8 |
403 (rn << 16) |
404 ((static_cast<int32_t>(dd) & 0x10) << (22 - 4)) | // Move D from bit 4 to bit 22.
405 ((static_cast<int32_t>(dd) & 0x0f) << (12 - 0)) | // Move Vd from bits 0-3 to bits 12-15.
406 (offset >> 2);
407}
408
409inline int16_t Thumb2Assembler::LdrRtRnImm5Encoding16(Register rt, Register rn, int32_t offset) {
410 DCHECK(!IsHighRegister(rt));
411 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100412 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000413 DCHECK(IsUint<7>(offset));
414 return B14 | B13 | B11 |
415 (static_cast<int32_t>(rn) << 3) | static_cast<int32_t>(rt) |
416 (offset << (6 - 2)); // Move imm5 from bits 2-6 to bits 6-10.
417}
418
419int32_t Thumb2Assembler::Fixup::LoadWideOrFpEncoding(Register rbase, int32_t offset) const {
420 switch (type_) {
421 case kLoadLiteralWide:
422 return LdrdEncoding32(rn_, rt2_, rbase, offset);
423 case kLoadFPLiteralSingle:
424 return VldrsEncoding32(sd_, rbase, offset);
425 case kLoadFPLiteralDouble:
426 return VldrdEncoding32(dd_, rbase, offset);
427 default:
428 LOG(FATAL) << "Unexpected type: " << static_cast<int>(type_);
429 UNREACHABLE();
430 }
431}
432
433inline int32_t Thumb2Assembler::LdrRtRnImm12Encoding(Register rt, Register rn, int32_t offset) {
434 DCHECK(IsUint<12>(offset));
435 return B31 | B30 | B29 | B28 | B27 | B23 | B22 | B20 | (rn << 16) | (rt << 12) | offset;
436}
437
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700438inline int16_t Thumb2Assembler::AdrEncoding16(Register rd, int32_t offset) {
439 DCHECK(IsUint<10>(offset));
440 DCHECK(IsAligned<4>(offset));
441 DCHECK(!IsHighRegister(rd));
442 return B15 | B13 | (rd << 8) | (offset >> 2);
443}
444
445inline int32_t Thumb2Assembler::AdrEncoding32(Register rd, int32_t offset) {
446 DCHECK(IsUint<12>(offset));
447 // Bit 26: offset[11]
448 // Bits 14-12: offset[10-8]
449 // Bits 7-0: offset[7-0]
450 int32_t immediate_mask =
451 ((offset & (1 << 11)) << (26 - 11)) |
452 ((offset & (7 << 8)) << (12 - 8)) |
453 (offset & 0xFF);
454 return B31 | B30 | B29 | B28 | B25 | B19 | B18 | B17 | B16 | (rd << 8) | immediate_mask;
455}
456
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000457void Thumb2Assembler::FinalizeCode() {
458 ArmAssembler::FinalizeCode();
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700459 uint32_t size_after_literals = BindLiterals();
460 BindJumpTables(size_after_literals);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000461 uint32_t adjusted_code_size = AdjustFixups();
462 EmitFixups(adjusted_code_size);
463 EmitLiterals();
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700464 FinalizeTrackedLabels();
465 EmitJumpTables();
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000466}
467
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +0100468bool Thumb2Assembler::ShifterOperandCanAlwaysHold(uint32_t immediate) {
469 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
470}
471
Nicolas Geoffray3d1e7882015-02-03 13:59:52 +0000472bool Thumb2Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
473 Register rn ATTRIBUTE_UNUSED,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000474 Opcode opcode,
475 uint32_t immediate,
476 ShifterOperand* shifter_op) {
477 shifter_op->type_ = ShifterOperand::kImmediate;
478 shifter_op->immed_ = immediate;
479 shifter_op->is_shift_ = false;
480 shifter_op->is_rotate_ = false;
481 switch (opcode) {
482 case ADD:
483 case SUB:
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000484 if (immediate < (1 << 12)) { // Less than (or equal to) 12 bits can always be done.
485 return true;
486 }
487 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
488
489 case MOV:
490 // TODO: Support less than or equal to 12bits.
491 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100492
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000493 case MVN:
494 default:
495 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
496 }
497}
498
Dave Allison65fcc2c2014-04-28 13:45:27 -0700499void Thumb2Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100500 Condition cond, SetCc set_cc) {
501 EmitDataProcessing(cond, AND, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700502}
503
504
505void Thumb2Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100506 Condition cond, SetCc set_cc) {
507 EmitDataProcessing(cond, EOR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700508}
509
510
511void Thumb2Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100512 Condition cond, SetCc set_cc) {
513 EmitDataProcessing(cond, SUB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700514}
515
516
517void Thumb2Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100518 Condition cond, SetCc set_cc) {
519 EmitDataProcessing(cond, RSB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700520}
521
522
523void Thumb2Assembler::add(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100524 Condition cond, SetCc set_cc) {
525 EmitDataProcessing(cond, ADD, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700526}
527
528
529void Thumb2Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100530 Condition cond, SetCc set_cc) {
531 EmitDataProcessing(cond, ADC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700532}
533
534
535void Thumb2Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100536 Condition cond, SetCc set_cc) {
537 EmitDataProcessing(cond, SBC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700538}
539
540
541void Thumb2Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100542 Condition cond, SetCc set_cc) {
543 EmitDataProcessing(cond, RSC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700544}
545
546
547void Thumb2Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
548 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100549 EmitDataProcessing(cond, TST, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700550}
551
552
553void Thumb2Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
554 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100555 EmitDataProcessing(cond, TEQ, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700556}
557
558
559void Thumb2Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100560 EmitDataProcessing(cond, CMP, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700561}
562
563
564void Thumb2Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100565 EmitDataProcessing(cond, CMN, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700566}
567
568
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100569void Thumb2Assembler::orr(Register rd, Register rn, const ShifterOperand& so,
570 Condition cond, SetCc set_cc) {
571 EmitDataProcessing(cond, ORR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700572}
573
574
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100575void Thumb2Assembler::orn(Register rd, Register rn, const ShifterOperand& so,
576 Condition cond, SetCc set_cc) {
577 EmitDataProcessing(cond, ORN, set_cc, rn, rd, so);
578}
579
580
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100581void Thumb2Assembler::mov(Register rd, const ShifterOperand& so,
582 Condition cond, SetCc set_cc) {
583 EmitDataProcessing(cond, MOV, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700584}
585
586
587void Thumb2Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100588 Condition cond, SetCc set_cc) {
589 EmitDataProcessing(cond, BIC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700590}
591
592
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100593void Thumb2Assembler::mvn(Register rd, const ShifterOperand& so,
594 Condition cond, SetCc set_cc) {
595 EmitDataProcessing(cond, MVN, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700596}
597
598
599void Thumb2Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700600 CheckCondition(cond);
601
Dave Allison65fcc2c2014-04-28 13:45:27 -0700602 if (rd == rm && !IsHighRegister(rd) && !IsHighRegister(rn) && !force_32bit_) {
603 // 16 bit.
604 int16_t encoding = B14 | B9 | B8 | B6 |
605 rn << 3 | rd;
606 Emit16(encoding);
607 } else {
608 // 32 bit.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700609 uint32_t op1 = 0U /* 0b000 */;
610 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700611 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
612 op1 << 20 |
613 B15 | B14 | B13 | B12 |
614 op2 << 4 |
615 static_cast<uint32_t>(rd) << 8 |
616 static_cast<uint32_t>(rn) << 16 |
617 static_cast<uint32_t>(rm);
618
619 Emit32(encoding);
620 }
621}
622
623
624void Thumb2Assembler::mla(Register rd, Register rn, Register rm, Register ra,
625 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700626 CheckCondition(cond);
627
Andreas Gampec8ccf682014-09-29 20:07:43 -0700628 uint32_t op1 = 0U /* 0b000 */;
629 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700630 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
631 op1 << 20 |
632 op2 << 4 |
633 static_cast<uint32_t>(rd) << 8 |
634 static_cast<uint32_t>(ra) << 12 |
635 static_cast<uint32_t>(rn) << 16 |
636 static_cast<uint32_t>(rm);
637
638 Emit32(encoding);
639}
640
641
642void Thumb2Assembler::mls(Register rd, Register rn, Register rm, Register ra,
643 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700644 CheckCondition(cond);
645
Andreas Gampec8ccf682014-09-29 20:07:43 -0700646 uint32_t op1 = 0U /* 0b000 */;
647 uint32_t op2 = 01 /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700648 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
649 op1 << 20 |
650 op2 << 4 |
651 static_cast<uint32_t>(rd) << 8 |
652 static_cast<uint32_t>(ra) << 12 |
653 static_cast<uint32_t>(rn) << 16 |
654 static_cast<uint32_t>(rm);
655
656 Emit32(encoding);
657}
658
659
Zheng Xuc6667102015-05-15 16:08:45 +0800660void Thumb2Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
661 Register rm, Condition cond) {
662 CheckCondition(cond);
663
664 uint32_t op1 = 0U /* 0b000; */;
665 uint32_t op2 = 0U /* 0b0000 */;
666 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
667 op1 << 20 |
668 op2 << 4 |
669 static_cast<uint32_t>(rd_lo) << 12 |
670 static_cast<uint32_t>(rd_hi) << 8 |
671 static_cast<uint32_t>(rn) << 16 |
672 static_cast<uint32_t>(rm);
673
674 Emit32(encoding);
675}
676
677
Dave Allison65fcc2c2014-04-28 13:45:27 -0700678void Thumb2Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
679 Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700680 CheckCondition(cond);
681
Andreas Gampec8ccf682014-09-29 20:07:43 -0700682 uint32_t op1 = 2U /* 0b010; */;
683 uint32_t op2 = 0U /* 0b0000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700684 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
685 op1 << 20 |
686 op2 << 4 |
687 static_cast<uint32_t>(rd_lo) << 12 |
688 static_cast<uint32_t>(rd_hi) << 8 |
689 static_cast<uint32_t>(rn) << 16 |
690 static_cast<uint32_t>(rm);
691
692 Emit32(encoding);
693}
694
695
696void Thumb2Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700697 CheckCondition(cond);
698
Andreas Gampec8ccf682014-09-29 20:07:43 -0700699 uint32_t op1 = 1U /* 0b001 */;
700 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700701 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B20 |
702 op1 << 20 |
703 op2 << 4 |
704 0xf << 12 |
705 static_cast<uint32_t>(rd) << 8 |
706 static_cast<uint32_t>(rn) << 16 |
707 static_cast<uint32_t>(rm);
708
709 Emit32(encoding);
710}
711
712
713void Thumb2Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700714 CheckCondition(cond);
715
Andreas Gampec8ccf682014-09-29 20:07:43 -0700716 uint32_t op1 = 1U /* 0b001 */;
717 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700718 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B21 | B20 |
719 op1 << 20 |
720 op2 << 4 |
721 0xf << 12 |
722 static_cast<uint32_t>(rd) << 8 |
723 static_cast<uint32_t>(rn) << 16 |
724 static_cast<uint32_t>(rm);
725
726 Emit32(encoding);
727}
728
729
Roland Levillain51d3fc42014-11-13 14:11:42 +0000730void Thumb2Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
731 CheckCondition(cond);
732 CHECK_LE(lsb, 31U);
733 CHECK(1U <= width && width <= 32U) << width;
734 uint32_t widthminus1 = width - 1;
735 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
736 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
737
738 uint32_t op = 20U /* 0b10100 */;
739 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
740 op << 20 |
741 static_cast<uint32_t>(rn) << 16 |
742 imm3 << 12 |
743 static_cast<uint32_t>(rd) << 8 |
744 imm2 << 6 |
745 widthminus1;
746
747 Emit32(encoding);
748}
749
750
Roland Levillain981e4542014-11-14 11:47:14 +0000751void Thumb2Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
752 CheckCondition(cond);
753 CHECK_LE(lsb, 31U);
754 CHECK(1U <= width && width <= 32U) << width;
755 uint32_t widthminus1 = width - 1;
756 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
757 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
758
759 uint32_t op = 28U /* 0b11100 */;
760 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
761 op << 20 |
762 static_cast<uint32_t>(rn) << 16 |
763 imm3 << 12 |
764 static_cast<uint32_t>(rd) << 8 |
765 imm2 << 6 |
766 widthminus1;
767
768 Emit32(encoding);
769}
770
771
Dave Allison65fcc2c2014-04-28 13:45:27 -0700772void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) {
773 EmitLoadStore(cond, true, false, false, false, rd, ad);
774}
775
776
777void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) {
778 EmitLoadStore(cond, false, false, false, false, rd, ad);
779}
780
781
782void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
783 EmitLoadStore(cond, true, true, false, false, rd, ad);
784}
785
786
787void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) {
788 EmitLoadStore(cond, false, true, false, false, rd, ad);
789}
790
791
792void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
793 EmitLoadStore(cond, true, false, true, false, rd, ad);
794}
795
796
797void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) {
798 EmitLoadStore(cond, false, false, true, false, rd, ad);
799}
800
801
802void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
803 EmitLoadStore(cond, true, true, false, true, rd, ad);
804}
805
806
807void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
808 EmitLoadStore(cond, true, false, true, true, rd, ad);
809}
810
811
812void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100813 ldrd(rd, Register(rd + 1), ad, cond);
814}
815
816
817void Thumb2Assembler::ldrd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700818 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100819 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700820 // This is different from other loads. The encoding is like ARM.
821 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
822 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100823 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700824 ad.encodingThumbLdrdStrd();
825 Emit32(encoding);
826}
827
828
829void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100830 strd(rd, Register(rd + 1), ad, cond);
831}
832
833
834void Thumb2Assembler::strd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700835 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100836 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700837 // This is different from other loads. The encoding is like ARM.
838 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
839 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100840 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700841 ad.encodingThumbLdrdStrd();
842 Emit32(encoding);
843}
844
845
846void Thumb2Assembler::ldm(BlockAddressMode am,
847 Register base,
848 RegList regs,
849 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000850 CHECK_NE(regs, 0u); // Do not use ldm if there's nothing to load.
851 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700852 // Thumb doesn't support one reg in the list.
853 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000854 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700855 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700856 CHECK(am == DB_W); // Only writeback is supported.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700857 ldr(static_cast<Register>(reg), Address(base, kRegisterSize, Address::PostIndex), cond);
858 } else {
859 EmitMultiMemOp(cond, am, true, base, regs);
860 }
861}
862
863
864void Thumb2Assembler::stm(BlockAddressMode am,
865 Register base,
866 RegList regs,
867 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000868 CHECK_NE(regs, 0u); // Do not use stm if there's nothing to store.
869 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700870 // Thumb doesn't support one reg in the list.
871 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000872 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700873 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700874 CHECK(am == IA || am == IA_W);
875 Address::Mode strmode = am == IA ? Address::PreIndex : Address::Offset;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700876 str(static_cast<Register>(reg), Address(base, -kRegisterSize, strmode), cond);
877 } else {
878 EmitMultiMemOp(cond, am, false, base, regs);
879 }
880}
881
882
883bool Thumb2Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
884 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
885 if (((imm32 & ((1 << 19) - 1)) == 0) &&
886 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
887 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
888 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
889 ((imm32 >> 19) & ((1 << 6) -1));
890 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
891 sd, S0, S0);
892 return true;
893 }
894 return false;
895}
896
897
898bool Thumb2Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
899 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
900 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
901 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
902 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
903 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
904 ((imm64 >> 48) & ((1 << 6) -1));
905 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
906 dd, D0, D0);
907 return true;
908 }
909 return false;
910}
911
912
913void Thumb2Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
914 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
915}
916
917
918void Thumb2Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
919 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
920}
921
922
923void Thumb2Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
924 Condition cond) {
925 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
926}
927
928
929void Thumb2Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
930 Condition cond) {
931 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
932}
933
934
935void Thumb2Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
936 Condition cond) {
937 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
938}
939
940
941void Thumb2Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
942 Condition cond) {
943 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
944}
945
946
947void Thumb2Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
948 Condition cond) {
949 EmitVFPsss(cond, B21, sd, sn, sm);
950}
951
952
953void Thumb2Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
954 Condition cond) {
955 EmitVFPddd(cond, B21, dd, dn, dm);
956}
957
958
959void Thumb2Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
960 Condition cond) {
961 EmitVFPsss(cond, 0, sd, sn, sm);
962}
963
964
965void Thumb2Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
966 Condition cond) {
967 EmitVFPddd(cond, 0, dd, dn, dm);
968}
969
970
971void Thumb2Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
972 Condition cond) {
973 EmitVFPsss(cond, B6, sd, sn, sm);
974}
975
976
977void Thumb2Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
978 Condition cond) {
979 EmitVFPddd(cond, B6, dd, dn, dm);
980}
981
982
983void Thumb2Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
984 Condition cond) {
985 EmitVFPsss(cond, B23, sd, sn, sm);
986}
987
988
989void Thumb2Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
990 Condition cond) {
991 EmitVFPddd(cond, B23, dd, dn, dm);
992}
993
994
995void Thumb2Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
996 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
997}
998
999
1000void Thumb2Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
1001 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
1002}
1003
1004
1005void Thumb2Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
1006 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
1007}
1008
1009
1010void Thumb2Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
1011 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
1012}
1013
1014
1015void Thumb2Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
1016 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
1017}
1018
1019void Thumb2Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
1020 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
1021}
1022
1023
1024void Thumb2Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
1025 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
1026}
1027
1028
1029void Thumb2Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
1030 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
1031}
1032
1033
1034void Thumb2Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
1035 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
1036}
1037
1038
1039void Thumb2Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
1040 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
1041}
1042
1043
1044void Thumb2Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
1045 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
1046}
1047
1048
1049void Thumb2Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
1050 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
1051}
1052
1053
1054void Thumb2Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
1055 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
1056}
1057
1058
1059void Thumb2Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
1060 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
1061}
1062
1063
1064void Thumb2Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
1065 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
1066}
1067
1068
1069void Thumb2Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
1070 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
1071}
1072
1073
1074void Thumb2Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
1075 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
1076}
1077
1078
1079void Thumb2Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
1080 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
1081}
1082
1083
1084void Thumb2Assembler::vcmpsz(SRegister sd, Condition cond) {
1085 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
1086}
1087
1088
1089void Thumb2Assembler::vcmpdz(DRegister dd, Condition cond) {
1090 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
1091}
1092
1093void Thumb2Assembler::b(Label* label, Condition cond) {
agicsakie2142d252015-06-30 17:10:03 -07001094 DCHECK_EQ(next_condition_, AL);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001095 EmitBranch(cond, label, false, false);
1096}
1097
1098
1099void Thumb2Assembler::bl(Label* label, Condition cond) {
1100 CheckCondition(cond);
1101 EmitBranch(cond, label, true, false);
1102}
1103
1104
1105void Thumb2Assembler::blx(Label* label) {
1106 EmitBranch(AL, label, true, true);
1107}
1108
1109
1110void Thumb2Assembler::MarkExceptionHandler(Label* label) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001111 EmitDataProcessing(AL, TST, kCcSet, PC, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001112 Label l;
1113 b(&l);
1114 EmitBranch(AL, label, false, false);
1115 Bind(&l);
1116}
1117
1118
1119void Thumb2Assembler::Emit32(int32_t value) {
1120 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1121 buffer_.Emit<int16_t>(value >> 16);
1122 buffer_.Emit<int16_t>(value & 0xffff);
1123}
1124
1125
1126void Thumb2Assembler::Emit16(int16_t value) {
1127 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1128 buffer_.Emit<int16_t>(value);
1129}
1130
1131
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001132bool Thumb2Assembler::Is32BitDataProcessing(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001133 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001134 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001135 Register rn,
1136 Register rd,
1137 const ShifterOperand& so) {
1138 if (force_32bit_) {
1139 return true;
1140 }
1141
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001142 // Check special case for SP relative ADD and SUB immediate.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001143 if ((opcode == ADD || opcode == SUB) && rn == SP && so.IsImmediate() && set_cc != kCcSet) {
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001144 // If the immediate is in range, use 16 bit.
1145 if (rd == SP) {
1146 if (so.GetImmediate() < (1 << 9)) { // 9 bit immediate.
1147 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001148 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001149 } else if (!IsHighRegister(rd) && opcode == ADD) {
1150 if (so.GetImmediate() < (1 << 10)) { // 10 bit immediate.
1151 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001152 }
1153 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001154 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001155
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001156 bool can_contain_high_register =
1157 (opcode == CMP) ||
1158 (opcode == MOV && set_cc != kCcSet) ||
1159 ((opcode == ADD) && (rn == rd) && set_cc != kCcSet);
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001160
1161 if (IsHighRegister(rd) || IsHighRegister(rn)) {
1162 if (!can_contain_high_register) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001163 return true;
1164 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001165
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001166 // There are high register instructions available for this opcode.
1167 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
1168 if (so.IsShift() && (so.GetShift() == RRX || so.GetImmediate() != 0u)) {
1169 return true;
1170 }
1171
1172 // The ADD and MOV instructions that work with high registers don't have 16-bit
1173 // immediate variants.
1174 if (so.IsImmediate()) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001175 return true;
1176 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001177 }
1178
1179 if (so.IsRegister() && IsHighRegister(so.GetRegister()) && !can_contain_high_register) {
1180 return true;
1181 }
1182
Dave Allison65fcc2c2014-04-28 13:45:27 -07001183 bool rn_is_valid = true;
1184
1185 // Check for single operand instructions and ADD/SUB.
1186 switch (opcode) {
1187 case CMP:
1188 case MOV:
1189 case TST:
1190 case MVN:
1191 rn_is_valid = false; // There is no Rn for these instructions.
1192 break;
1193 case TEQ:
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001194 case ORN:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001195 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001196 case ADD:
1197 case SUB:
1198 break;
1199 default:
1200 if (so.IsRegister() && rd != rn) {
1201 return true;
1202 }
1203 }
1204
1205 if (so.IsImmediate()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001206 if (opcode == RSB) {
1207 DCHECK(rn_is_valid);
1208 if (so.GetImmediate() != 0u) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001209 return true;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001210 }
1211 } else if (rn_is_valid && rn != rd) {
1212 // The only thumb1 instructions with a register and an immediate are ADD and SUB
1213 // with a 3-bit immediate, and RSB with zero immediate.
1214 if (opcode == ADD || opcode == SUB) {
1215 if (!IsUint<3>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001216 return true;
1217 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001218 } else {
1219 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001220 }
1221 } else {
1222 // ADD, SUB, CMP and MOV may be thumb1 only if the immediate is 8 bits.
1223 if (!(opcode == ADD || opcode == SUB || opcode == MOV || opcode == CMP)) {
1224 return true;
1225 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001226 if (!IsUint<8>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001227 return true;
1228 }
1229 }
1230 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001231 } else {
1232 DCHECK(so.IsRegister());
1233 if (so.IsShift()) {
1234 // Shift operand - check if it is a MOV convertible to a 16-bit shift instruction.
1235 if (opcode != MOV) {
Zheng Xuc6667102015-05-15 16:08:45 +08001236 return true;
1237 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001238 // Check for MOV with an ROR/RRX. There is no 16-bit ROR immediate and no 16-bit RRX.
1239 if (so.GetShift() == ROR || so.GetShift() == RRX) {
1240 return true;
1241 }
1242 // 16-bit shifts set condition codes if and only if outside IT block,
1243 // i.e. if and only if cond == AL.
1244 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1245 return true;
1246 }
1247 } else {
1248 // Register operand without shift.
1249 switch (opcode) {
1250 case ADD:
1251 // The 16-bit ADD that cannot contain high registers can set condition codes
1252 // if and only if outside IT block, i.e. if and only if cond == AL.
1253 if (!can_contain_high_register &&
1254 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1255 return true;
1256 }
1257 break;
1258 case AND:
1259 case BIC:
1260 case EOR:
1261 case ORR:
1262 case MVN:
1263 case ADC:
1264 case SUB:
1265 case SBC:
1266 // These 16-bit opcodes set condition codes if and only if outside IT block,
1267 // i.e. if and only if cond == AL.
1268 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1269 return true;
1270 }
1271 break;
1272 case RSB:
1273 case RSC:
1274 // No 16-bit RSB/RSC Rd, Rm, Rn. It would be equivalent to SUB/SBC Rd, Rn, Rm.
1275 return true;
1276 case CMP:
1277 default:
1278 break;
1279 }
Zheng Xuc6667102015-05-15 16:08:45 +08001280 }
1281 }
1282
Dave Allison65fcc2c2014-04-28 13:45:27 -07001283 // The instruction can be encoded in 16 bits.
1284 return false;
1285}
1286
1287
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001288void Thumb2Assembler::Emit32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001289 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001290 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001291 Register rn,
1292 Register rd,
1293 const ShifterOperand& so) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001294 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001295 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001296 case AND: thumb_opcode = 0U /* 0b0000 */; break;
1297 case EOR: thumb_opcode = 4U /* 0b0100 */; break;
1298 case SUB: thumb_opcode = 13U /* 0b1101 */; break;
1299 case RSB: thumb_opcode = 14U /* 0b1110 */; break;
1300 case ADD: thumb_opcode = 8U /* 0b1000 */; break;
Andreas Gampe35c68e32014-09-30 08:39:37 -07001301 case ADC: thumb_opcode = 10U /* 0b1010 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001302 case SBC: thumb_opcode = 11U /* 0b1011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001303 case RSC: break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001304 case TST: thumb_opcode = 0U /* 0b0000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1305 case TEQ: thumb_opcode = 4U /* 0b0100 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1306 case CMP: thumb_opcode = 13U /* 0b1101 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1307 case CMN: thumb_opcode = 8U /* 0b1000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001308 case ORR: thumb_opcode = 2U /* 0b0010 */; break;
1309 case MOV: thumb_opcode = 2U /* 0b0010 */; rn = PC; break;
1310 case BIC: thumb_opcode = 1U /* 0b0001 */; break;
1311 case MVN: thumb_opcode = 3U /* 0b0011 */; rn = PC; break;
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001312 case ORN: thumb_opcode = 3U /* 0b0011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001313 default:
1314 break;
1315 }
1316
Andreas Gampec8ccf682014-09-29 20:07:43 -07001317 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001318 LOG(FATAL) << "Invalid thumb2 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001319 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001320 }
1321
1322 int32_t encoding = 0;
1323 if (so.IsImmediate()) {
1324 // Check special cases.
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001325 if ((opcode == SUB || opcode == ADD) && (so.GetImmediate() < (1u << 12))) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001326 if (set_cc != kCcSet) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001327 if (opcode == SUB) {
1328 thumb_opcode = 5U;
1329 } else if (opcode == ADD) {
1330 thumb_opcode = 0U;
1331 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001332 }
1333 uint32_t imm = so.GetImmediate();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001334
1335 uint32_t i = (imm >> 11) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001336 uint32_t imm3 = (imm >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001337 uint32_t imm8 = imm & 0xff;
1338
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001339 encoding = B31 | B30 | B29 | B28 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001340 (set_cc == kCcSet ? B20 : B25) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001341 thumb_opcode << 21 |
1342 rn << 16 |
1343 rd << 8 |
1344 i << 26 |
1345 imm3 << 12 |
1346 imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001347 } else {
1348 // Modified immediate.
Dave Allison45fdb932014-06-25 12:37:10 -07001349 uint32_t imm = ModifiedImmediate(so.encodingThumb());
Dave Allison65fcc2c2014-04-28 13:45:27 -07001350 if (imm == kInvalidModifiedImmediate) {
1351 LOG(FATAL) << "Immediate value cannot fit in thumb2 modified immediate";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001352 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001353 }
1354 encoding = B31 | B30 | B29 | B28 |
1355 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001356 (set_cc == kCcSet ? B20 : 0) |
Dave Allison65fcc2c2014-04-28 13:45:27 -07001357 rn << 16 |
1358 rd << 8 |
1359 imm;
1360 }
1361 } else if (so.IsRegister()) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001362 // Register (possibly shifted)
1363 encoding = B31 | B30 | B29 | B27 | B25 |
1364 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001365 (set_cc == kCcSet ? B20 : 0) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001366 rn << 16 |
1367 rd << 8 |
1368 so.encodingThumb();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001369 }
1370 Emit32(encoding);
1371}
1372
1373
1374void Thumb2Assembler::Emit16BitDataProcessing(Condition cond,
1375 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001376 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001377 Register rn,
1378 Register rd,
1379 const ShifterOperand& so) {
1380 if (opcode == ADD || opcode == SUB) {
1381 Emit16BitAddSub(cond, opcode, set_cc, rn, rd, so);
1382 return;
1383 }
Andreas Gampec8ccf682014-09-29 20:07:43 -07001384 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001385 // Thumb1.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001386 uint8_t dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001387 uint8_t opcode_shift = 6;
1388 uint8_t rd_shift = 0;
1389 uint8_t rn_shift = 3;
1390 uint8_t immediate_shift = 0;
1391 bool use_immediate = false;
1392 uint8_t immediate = 0;
1393
1394 if (opcode == MOV && so.IsRegister() && so.IsShift()) {
1395 // Convert shifted mov operand2 into 16 bit opcodes.
1396 dp_opcode = 0;
1397 opcode_shift = 11;
1398
1399 use_immediate = true;
1400 immediate = so.GetImmediate();
1401 immediate_shift = 6;
1402
1403 rn = so.GetRegister();
1404
1405 switch (so.GetShift()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001406 case LSL:
1407 DCHECK_LE(immediate, 31u);
1408 thumb_opcode = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001409 break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001410 case LSR:
1411 DCHECK(1 <= immediate && immediate <= 32);
1412 immediate &= 31; // 32 is encoded as 0.
1413 thumb_opcode = 1U /* 0b01 */;
1414 break;
1415 case ASR:
1416 DCHECK(1 <= immediate && immediate <= 32);
1417 immediate &= 31; // 32 is encoded as 0.
1418 thumb_opcode = 2U /* 0b10 */;
1419 break;
1420 case ROR: // No 16-bit ROR immediate.
1421 case RRX: // No 16-bit RRX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001422 default:
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001423 LOG(FATAL) << "Unexpected shift: " << so.GetShift();
1424 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001425 }
1426 } else {
1427 if (so.IsImmediate()) {
1428 use_immediate = true;
1429 immediate = so.GetImmediate();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001430 } else {
Guillaume "Vermeille" Sanchezab4a2f52015-03-11 14:00:30 +00001431 CHECK(!(so.IsRegister() && so.IsShift() && so.GetSecondRegister() != kNoRegister))
1432 << "No register-shifted register instruction available in thumb";
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001433 // Adjust rn and rd: only two registers will be emitted.
1434 switch (opcode) {
1435 case AND:
1436 case ORR:
1437 case EOR:
1438 case RSB:
1439 case ADC:
1440 case SBC:
1441 case BIC: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001442 // Sets condition codes if and only if outside IT block,
1443 // check that it complies with set_cc.
1444 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001445 if (rn == rd) {
1446 rn = so.GetRegister();
1447 } else {
1448 CHECK_EQ(rd, so.GetRegister());
1449 }
1450 break;
1451 }
1452 case CMP:
1453 case CMN: {
1454 CHECK_EQ(rd, 0);
1455 rd = rn;
1456 rn = so.GetRegister();
1457 break;
1458 }
1459 case MVN: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001460 // Sets condition codes if and only if outside IT block,
1461 // check that it complies with set_cc.
1462 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1463 CHECK_EQ(rn, 0);
1464 rn = so.GetRegister();
1465 break;
1466 }
1467 case TST:
1468 case TEQ: {
1469 DCHECK(set_cc == kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001470 CHECK_EQ(rn, 0);
1471 rn = so.GetRegister();
1472 break;
1473 }
1474 default:
1475 break;
1476 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001477 }
1478
1479 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001480 case AND: thumb_opcode = 0U /* 0b0000 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001481 case ORR: thumb_opcode = 12U /* 0b1100 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001482 case EOR: thumb_opcode = 1U /* 0b0001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001483 case RSB: thumb_opcode = 9U /* 0b1001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001484 case ADC: thumb_opcode = 5U /* 0b0101 */; break;
1485 case SBC: thumb_opcode = 6U /* 0b0110 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001486 case BIC: thumb_opcode = 14U /* 0b1110 */; break;
1487 case TST: thumb_opcode = 8U /* 0b1000 */; CHECK(!use_immediate); break;
1488 case MVN: thumb_opcode = 15U /* 0b1111 */; CHECK(!use_immediate); break;
1489 case CMP: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001490 DCHECK(set_cc == kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001491 if (use_immediate) {
1492 // T2 encoding.
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001493 dp_opcode = 0;
1494 opcode_shift = 11;
1495 thumb_opcode = 5U /* 0b101 */;
1496 rd_shift = 8;
1497 rn_shift = 8;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001498 } else if (IsHighRegister(rd) || IsHighRegister(rn)) {
1499 // Special cmp for high registers.
1500 dp_opcode = 1U /* 0b01 */;
1501 opcode_shift = 7;
1502 // Put the top bit of rd into the bottom bit of the opcode.
1503 thumb_opcode = 10U /* 0b0001010 */ | static_cast<uint32_t>(rd) >> 3;
1504 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001505 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001506 thumb_opcode = 10U /* 0b1010 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001507 }
1508
1509 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001510 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001511 case CMN: {
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001512 CHECK(!use_immediate);
Andreas Gampec8ccf682014-09-29 20:07:43 -07001513 thumb_opcode = 11U /* 0b1011 */;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001514 break;
1515 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001516 case MOV:
1517 dp_opcode = 0;
1518 if (use_immediate) {
1519 // T2 encoding.
1520 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001521 thumb_opcode = 4U /* 0b100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001522 rd_shift = 8;
1523 rn_shift = 8;
1524 } else {
1525 rn = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001526 if (set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001527 // Special mov for high registers.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001528 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001529 opcode_shift = 7;
1530 // Put the top bit of rd into the bottom bit of the opcode.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001531 thumb_opcode = 12U /* 0b0001100 */ | static_cast<uint32_t>(rd) >> 3;
1532 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001533 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001534 DCHECK(!IsHighRegister(rn));
1535 DCHECK(!IsHighRegister(rd));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001536 thumb_opcode = 0;
1537 }
1538 }
1539 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001540
1541 case TEQ:
1542 case RSC:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001543 default:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001544 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001545 break;
1546 }
1547 }
1548
Andreas Gampec8ccf682014-09-29 20:07:43 -07001549 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001550 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001551 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001552 }
1553
1554 int16_t encoding = dp_opcode << 14 |
1555 (thumb_opcode << opcode_shift) |
1556 rd << rd_shift |
1557 rn << rn_shift |
1558 (use_immediate ? (immediate << immediate_shift) : 0);
1559
1560 Emit16(encoding);
1561}
1562
1563
1564// ADD and SUB are complex enough to warrant their own emitter.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001565void Thumb2Assembler::Emit16BitAddSub(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001566 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001567 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001568 Register rn,
1569 Register rd,
1570 const ShifterOperand& so) {
1571 uint8_t dp_opcode = 0;
1572 uint8_t opcode_shift = 6;
1573 uint8_t rd_shift = 0;
1574 uint8_t rn_shift = 3;
1575 uint8_t immediate_shift = 0;
1576 bool use_immediate = false;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001577 uint32_t immediate = 0; // Should be at most 9 bits but keep the full immediate for CHECKs.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001578 uint8_t thumb_opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001579
1580 if (so.IsImmediate()) {
1581 use_immediate = true;
1582 immediate = so.GetImmediate();
1583 }
1584
1585 switch (opcode) {
1586 case ADD:
1587 if (so.IsRegister()) {
1588 Register rm = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001589 if (rn == rd && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001590 // Can use T2 encoding (allows 4 bit registers)
Andreas Gampec8ccf682014-09-29 20:07:43 -07001591 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001592 opcode_shift = 10;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001593 thumb_opcode = 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001594 // Make Rn also contain the top bit of rd.
1595 rn = static_cast<Register>(static_cast<uint32_t>(rm) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07001596 (static_cast<uint32_t>(rd) & 8U /* 0b1000 */) << 1);
1597 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001598 } else {
1599 // T1.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001600 DCHECK(!IsHighRegister(rd));
1601 DCHECK(!IsHighRegister(rn));
1602 DCHECK(!IsHighRegister(rm));
1603 // Sets condition codes if and only if outside IT block,
1604 // check that it complies with set_cc.
1605 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001606 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001607 thumb_opcode = 12U /* 0b01100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001608 immediate = static_cast<uint32_t>(so.GetRegister());
1609 use_immediate = true;
1610 immediate_shift = 6;
1611 }
1612 } else {
1613 // Immediate.
1614 if (rd == SP && rn == SP) {
1615 // ADD sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001616 dp_opcode = 2U /* 0b10 */;
1617 thumb_opcode = 3U /* 0b11 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001618 opcode_shift = 12;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001619 CHECK_LT(immediate, (1u << 9));
Roland Levillain14d90572015-07-16 10:52:26 +01001620 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001621
1622 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1623 rn = R0;
1624 rd = R0;
1625 rd_shift = 0;
1626 rn_shift = 0;
1627 immediate >>= 2;
1628 } else if (rd != SP && rn == SP) {
1629 // ADD rd, SP, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001630 dp_opcode = 2U /* 0b10 */;
1631 thumb_opcode = 5U /* 0b101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001632 opcode_shift = 11;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001633 CHECK_LT(immediate, (1u << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01001634 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001635
1636 // Remove rn from instruction.
1637 rn = R0;
1638 rn_shift = 0;
1639 rd_shift = 8;
1640 immediate >>= 2;
1641 } else if (rn != rd) {
1642 // Must use T1.
1643 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001644 thumb_opcode = 14U /* 0b01110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001645 immediate_shift = 6;
1646 } else {
1647 // T2 encoding.
1648 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001649 thumb_opcode = 6U /* 0b110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001650 rd_shift = 8;
1651 rn_shift = 8;
1652 }
1653 }
1654 break;
1655
1656 case SUB:
1657 if (so.IsRegister()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001658 // T1.
1659 Register rm = so.GetRegister();
1660 DCHECK(!IsHighRegister(rd));
1661 DCHECK(!IsHighRegister(rn));
1662 DCHECK(!IsHighRegister(rm));
1663 // Sets condition codes if and only if outside IT block,
1664 // check that it complies with set_cc.
1665 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1666 opcode_shift = 9;
1667 thumb_opcode = 13U /* 0b01101 */;
1668 immediate = static_cast<uint32_t>(rm);
1669 use_immediate = true;
1670 immediate_shift = 6;
1671 } else {
1672 if (rd == SP && rn == SP) {
1673 // SUB sp, sp, #imm
1674 dp_opcode = 2U /* 0b10 */;
1675 thumb_opcode = 0x61 /* 0b1100001 */;
1676 opcode_shift = 7;
1677 CHECK_LT(immediate, (1u << 9));
1678 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001679
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001680 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1681 rn = R0;
1682 rd = R0;
1683 rd_shift = 0;
1684 rn_shift = 0;
1685 immediate >>= 2;
1686 } else if (rn != rd) {
1687 // Must use T1.
1688 opcode_shift = 9;
1689 thumb_opcode = 15U /* 0b01111 */;
1690 immediate_shift = 6;
1691 } else {
1692 // T2 encoding.
1693 opcode_shift = 11;
1694 thumb_opcode = 7U /* 0b111 */;
1695 rd_shift = 8;
1696 rn_shift = 8;
1697 }
1698 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001699 break;
1700 default:
1701 LOG(FATAL) << "This opcode is not an ADD or SUB: " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001702 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001703 }
1704
1705 int16_t encoding = dp_opcode << 14 |
1706 (thumb_opcode << opcode_shift) |
1707 rd << rd_shift |
1708 rn << rn_shift |
1709 (use_immediate ? (immediate << immediate_shift) : 0);
1710
1711 Emit16(encoding);
1712}
1713
1714
1715void Thumb2Assembler::EmitDataProcessing(Condition cond,
1716 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001717 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001718 Register rn,
1719 Register rd,
1720 const ShifterOperand& so) {
1721 CHECK_NE(rd, kNoRegister);
1722 CheckCondition(cond);
1723
1724 if (Is32BitDataProcessing(cond, opcode, set_cc, rn, rd, so)) {
1725 Emit32BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1726 } else {
1727 Emit16BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1728 }
1729}
1730
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001731void Thumb2Assembler::EmitShift(Register rd,
1732 Register rm,
1733 Shift shift,
1734 uint8_t amount,
1735 Condition cond,
1736 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001737 CHECK_LT(amount, (1 << 5));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001738 if ((IsHighRegister(rd) || IsHighRegister(rm) || shift == ROR || shift == RRX) ||
1739 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001740 uint16_t opcode = 0;
1741 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001742 case LSL: opcode = 0U /* 0b00 */; break;
1743 case LSR: opcode = 1U /* 0b01 */; break;
1744 case ASR: opcode = 2U /* 0b10 */; break;
1745 case ROR: opcode = 3U /* 0b11 */; break;
1746 case RRX: opcode = 3U /* 0b11 */; amount = 0; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001747 default:
1748 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001749 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001750 }
1751 // 32 bit.
1752 int32_t encoding = B31 | B30 | B29 | B27 | B25 | B22 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001753 0xf << 16 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001754 uint32_t imm3 = amount >> 2;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001755 uint32_t imm2 = amount & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -07001756 encoding |= imm3 << 12 | imm2 << 6 | static_cast<int16_t>(rm) |
1757 static_cast<int16_t>(rd) << 8 | opcode << 4;
1758 Emit32(encoding);
1759 } else {
1760 // 16 bit shift
1761 uint16_t opcode = 0;
1762 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001763 case LSL: opcode = 0U /* 0b00 */; break;
1764 case LSR: opcode = 1U /* 0b01 */; break;
1765 case ASR: opcode = 2U /* 0b10 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001766 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001767 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1768 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001769 }
1770 int16_t encoding = opcode << 11 | amount << 6 | static_cast<int16_t>(rm) << 3 |
1771 static_cast<int16_t>(rd);
1772 Emit16(encoding);
1773 }
1774}
1775
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001776void Thumb2Assembler::EmitShift(Register rd,
1777 Register rn,
1778 Shift shift,
1779 Register rm,
1780 Condition cond,
1781 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001782 CHECK_NE(shift, RRX);
1783 bool must_be_32bit = false;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001784 if (IsHighRegister(rd) || IsHighRegister(rm) || IsHighRegister(rn) || rd != rn ||
1785 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001786 must_be_32bit = true;
1787 }
1788
1789 if (must_be_32bit) {
1790 uint16_t opcode = 0;
1791 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001792 case LSL: opcode = 0U /* 0b00 */; break;
1793 case LSR: opcode = 1U /* 0b01 */; break;
1794 case ASR: opcode = 2U /* 0b10 */; break;
1795 case ROR: opcode = 3U /* 0b11 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001796 default:
1797 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001798 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001799 }
1800 // 32 bit.
1801 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001802 0xf << 12 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001803 encoding |= static_cast<int16_t>(rn) << 16 | static_cast<int16_t>(rm) |
1804 static_cast<int16_t>(rd) << 8 | opcode << 21;
1805 Emit32(encoding);
1806 } else {
1807 uint16_t opcode = 0;
1808 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001809 case LSL: opcode = 2U /* 0b0010 */; break;
1810 case LSR: opcode = 3U /* 0b0011 */; break;
1811 case ASR: opcode = 4U /* 0b0100 */; break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001812 case ROR: opcode = 7U /* 0b0111 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001813 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001814 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1815 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001816 }
1817 int16_t encoding = B14 | opcode << 6 | static_cast<int16_t>(rm) << 3 |
1818 static_cast<int16_t>(rd);
1819 Emit16(encoding);
1820 }
1821}
1822
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001823inline size_t Thumb2Assembler::Fixup::SizeInBytes(Size size) {
1824 switch (size) {
1825 case kBranch16Bit:
1826 return 2u;
1827 case kBranch32Bit:
1828 return 4u;
Dave Allison45fdb932014-06-25 12:37:10 -07001829
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001830 case kCbxz16Bit:
1831 return 2u;
1832 case kCbxz32Bit:
1833 return 4u;
1834 case kCbxz48Bit:
1835 return 6u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001836
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001837 case kLiteral1KiB:
1838 return 2u;
1839 case kLiteral4KiB:
1840 return 4u;
1841 case kLiteral64KiB:
1842 return 8u;
1843 case kLiteral1MiB:
1844 return 10u;
1845 case kLiteralFar:
1846 return 14u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001847
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001848 case kLiteralAddr1KiB:
1849 return 2u;
1850 case kLiteralAddr4KiB:
1851 return 4u;
1852 case kLiteralAddr64KiB:
1853 return 6u;
1854 case kLiteralAddrFar:
1855 return 10u;
1856
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001857 case kLongOrFPLiteral1KiB:
1858 return 4u;
1859 case kLongOrFPLiteral256KiB:
1860 return 10u;
1861 case kLongOrFPLiteralFar:
1862 return 14u;
1863 }
1864 LOG(FATAL) << "Unexpected size: " << static_cast<int>(size);
1865 UNREACHABLE();
1866}
1867
1868inline uint32_t Thumb2Assembler::Fixup::GetOriginalSizeInBytes() const {
1869 return SizeInBytes(original_size_);
1870}
1871
1872inline uint32_t Thumb2Assembler::Fixup::GetSizeInBytes() const {
1873 return SizeInBytes(size_);
1874}
1875
1876inline size_t Thumb2Assembler::Fixup::LiteralPoolPaddingSize(uint32_t current_code_size) {
1877 // The code size must be a multiple of 2.
Roland Levillain14d90572015-07-16 10:52:26 +01001878 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001879 // If it isn't a multiple of 4, we need to add a 2-byte padding before the literal pool.
1880 return current_code_size & 2;
1881}
1882
1883inline int32_t Thumb2Assembler::Fixup::GetOffset(uint32_t current_code_size) const {
1884 static constexpr int32_t int32_min = std::numeric_limits<int32_t>::min();
1885 static constexpr int32_t int32_max = std::numeric_limits<int32_t>::max();
1886 DCHECK_LE(target_, static_cast<uint32_t>(int32_max));
1887 DCHECK_LE(location_, static_cast<uint32_t>(int32_max));
1888 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max));
1889 int32_t diff = static_cast<int32_t>(target_) - static_cast<int32_t>(location_);
1890 if (target_ > location_) {
1891 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max - diff));
1892 diff += static_cast<int32_t>(adjustment_);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001893 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001894 DCHECK_LE(int32_min + static_cast<int32_t>(adjustment_), diff);
1895 diff -= static_cast<int32_t>(adjustment_);
1896 }
1897 // The default PC adjustment for Thumb2 is 4 bytes.
1898 DCHECK_GE(diff, int32_min + 4);
1899 diff -= 4;
1900 // Add additional adjustment for instructions preceding the PC usage, padding
1901 // before the literal pool and rounding down the PC for literal loads.
1902 switch (GetSize()) {
1903 case kBranch16Bit:
1904 case kBranch32Bit:
1905 break;
1906
1907 case kCbxz16Bit:
1908 break;
1909 case kCbxz32Bit:
1910 case kCbxz48Bit:
1911 DCHECK_GE(diff, int32_min + 2);
1912 diff -= 2; // Extra CMP Rn, #0, 16-bit.
1913 break;
1914
1915 case kLiteral1KiB:
1916 case kLiteral4KiB:
1917 case kLongOrFPLiteral1KiB:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001918 case kLiteralAddr1KiB:
1919 case kLiteralAddr4KiB:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001920 DCHECK(diff >= 0 || (GetSize() == kLiteral1KiB && diff == -2));
1921 diff += LiteralPoolPaddingSize(current_code_size);
1922 // Load literal instructions round down the PC+4 to a multiple of 4, so if the PC
1923 // isn't a multiple of 2, we need to adjust. Since we already adjusted for the target
1924 // being aligned, current PC alignment can be inferred from diff.
Roland Levillain14d90572015-07-16 10:52:26 +01001925 DCHECK_ALIGNED(diff, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001926 diff = diff + (diff & 2);
1927 DCHECK_GE(diff, 0);
1928 break;
1929 case kLiteral1MiB:
1930 case kLiteral64KiB:
1931 case kLongOrFPLiteral256KiB:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001932 case kLiteralAddr64KiB:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001933 DCHECK_GE(diff, 4); // The target must be at least 4 bytes after the ADD rX, PC.
1934 diff -= 4; // One extra 32-bit MOV.
1935 diff += LiteralPoolPaddingSize(current_code_size);
1936 break;
1937 case kLiteralFar:
1938 case kLongOrFPLiteralFar:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001939 case kLiteralAddrFar:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001940 DCHECK_GE(diff, 8); // The target must be at least 4 bytes after the ADD rX, PC.
1941 diff -= 8; // Extra MOVW+MOVT; both 32-bit.
1942 diff += LiteralPoolPaddingSize(current_code_size);
1943 break;
1944 }
1945 return diff;
1946}
1947
1948inline size_t Thumb2Assembler::Fixup::IncreaseSize(Size new_size) {
1949 DCHECK_NE(target_, kUnresolved);
1950 Size old_size = size_;
1951 size_ = new_size;
1952 DCHECK_GT(SizeInBytes(new_size), SizeInBytes(old_size));
1953 size_t adjustment = SizeInBytes(new_size) - SizeInBytes(old_size);
1954 if (target_ > location_) {
1955 adjustment_ += adjustment;
1956 }
1957 return adjustment;
1958}
1959
1960uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
1961 uint32_t old_code_size = current_code_size;
1962 switch (GetSize()) {
1963 case kBranch16Bit:
1964 if (IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size))) {
1965 break;
Vladimir Markof38caa62015-05-29 15:50:18 +01001966 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001967 current_code_size += IncreaseSize(kBranch32Bit);
1968 FALLTHROUGH_INTENDED;
1969 case kBranch32Bit:
1970 // We don't support conditional branches beyond +-1MiB
1971 // or unconditional branches beyond +-16MiB.
1972 break;
1973
1974 case kCbxz16Bit:
1975 if (IsUint<7>(GetOffset(current_code_size))) {
1976 break;
1977 }
1978 current_code_size += IncreaseSize(kCbxz32Bit);
1979 FALLTHROUGH_INTENDED;
1980 case kCbxz32Bit:
1981 if (IsInt<9>(GetOffset(current_code_size))) {
1982 break;
1983 }
1984 current_code_size += IncreaseSize(kCbxz48Bit);
1985 FALLTHROUGH_INTENDED;
1986 case kCbxz48Bit:
1987 // We don't support conditional branches beyond +-1MiB.
1988 break;
1989
1990 case kLiteral1KiB:
1991 DCHECK(!IsHighRegister(rn_));
1992 if (IsUint<10>(GetOffset(current_code_size))) {
1993 break;
1994 }
1995 current_code_size += IncreaseSize(kLiteral4KiB);
1996 FALLTHROUGH_INTENDED;
1997 case kLiteral4KiB:
1998 if (IsUint<12>(GetOffset(current_code_size))) {
1999 break;
2000 }
2001 current_code_size += IncreaseSize(kLiteral64KiB);
2002 FALLTHROUGH_INTENDED;
2003 case kLiteral64KiB:
2004 // Can't handle high register which we can encounter by fall-through from kLiteral4KiB.
2005 if (!IsHighRegister(rn_) && IsUint<16>(GetOffset(current_code_size))) {
2006 break;
2007 }
2008 current_code_size += IncreaseSize(kLiteral1MiB);
2009 FALLTHROUGH_INTENDED;
2010 case kLiteral1MiB:
2011 if (IsUint<20>(GetOffset(current_code_size))) {
2012 break;
2013 }
2014 current_code_size += IncreaseSize(kLiteralFar);
2015 FALLTHROUGH_INTENDED;
2016 case kLiteralFar:
2017 // This encoding can reach any target.
2018 break;
2019
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002020 case kLiteralAddr1KiB:
2021 DCHECK(!IsHighRegister(rn_));
2022 if (IsUint<10>(GetOffset(current_code_size))) {
2023 break;
2024 }
2025 current_code_size += IncreaseSize(kLiteralAddr4KiB);
2026 FALLTHROUGH_INTENDED;
2027 case kLiteralAddr4KiB:
2028 if (IsUint<12>(GetOffset(current_code_size))) {
2029 break;
2030 }
2031 current_code_size += IncreaseSize(kLiteralAddr64KiB);
2032 FALLTHROUGH_INTENDED;
2033 case kLiteralAddr64KiB:
2034 if (IsUint<16>(GetOffset(current_code_size))) {
2035 break;
2036 }
2037 current_code_size += IncreaseSize(kLiteralAddrFar);
2038 FALLTHROUGH_INTENDED;
2039 case kLiteralAddrFar:
2040 // This encoding can reach any target.
2041 break;
2042
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002043 case kLongOrFPLiteral1KiB:
2044 if (IsUint<10>(GetOffset(current_code_size))) {
2045 break;
2046 }
2047 current_code_size += IncreaseSize(kLongOrFPLiteral256KiB);
2048 FALLTHROUGH_INTENDED;
2049 case kLongOrFPLiteral256KiB:
2050 if (IsUint<18>(GetOffset(current_code_size))) {
2051 break;
2052 }
2053 current_code_size += IncreaseSize(kLongOrFPLiteralFar);
2054 FALLTHROUGH_INTENDED;
2055 case kLongOrFPLiteralFar:
2056 // This encoding can reach any target.
2057 break;
2058 }
2059 return current_code_size - old_code_size;
2060}
2061
2062void Thumb2Assembler::Fixup::Emit(AssemblerBuffer* buffer, uint32_t code_size) const {
2063 switch (GetSize()) {
2064 case kBranch16Bit: {
2065 DCHECK(type_ == kUnconditional || type_ == kConditional);
2066 DCHECK_EQ(type_ == kConditional, cond_ != AL);
2067 int16_t encoding = BEncoding16(GetOffset(code_size), cond_);
Vladimir Markof38caa62015-05-29 15:50:18 +01002068 buffer->Store<int16_t>(location_, encoding);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002069 break;
2070 }
2071 case kBranch32Bit: {
2072 DCHECK(type_ == kConditional || type_ == kUnconditional ||
2073 type_ == kUnconditionalLink || type_ == kUnconditionalLinkX);
2074 DCHECK_EQ(type_ == kConditional, cond_ != AL);
2075 int32_t encoding = BEncoding32(GetOffset(code_size), cond_);
2076 if (type_ == kUnconditionalLink) {
2077 DCHECK_NE(encoding & B12, 0);
2078 encoding |= B14;
2079 } else if (type_ == kUnconditionalLinkX) {
2080 DCHECK_NE(encoding & B12, 0);
2081 encoding ^= B14 | B12;
2082 }
2083 buffer->Store<int16_t>(location_, encoding >> 16);
2084 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2085 break;
2086 }
2087
2088 case kCbxz16Bit: {
2089 DCHECK(type_ == kCompareAndBranchXZero);
2090 int16_t encoding = CbxzEncoding16(rn_, GetOffset(code_size), cond_);
2091 buffer->Store<int16_t>(location_, encoding);
2092 break;
2093 }
2094 case kCbxz32Bit: {
2095 DCHECK(type_ == kCompareAndBranchXZero);
2096 DCHECK(cond_ == EQ || cond_ == NE);
2097 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
2098 int16_t b_encoding = BEncoding16(GetOffset(code_size), cond_);
2099 buffer->Store<int16_t>(location_, cmp_encoding);
2100 buffer->Store<int16_t>(location_ + 2, b_encoding);
2101 break;
2102 }
2103 case kCbxz48Bit: {
2104 DCHECK(type_ == kCompareAndBranchXZero);
2105 DCHECK(cond_ == EQ || cond_ == NE);
2106 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
2107 int32_t b_encoding = BEncoding32(GetOffset(code_size), cond_);
2108 buffer->Store<int16_t>(location_, cmp_encoding);
2109 buffer->Store<int16_t>(location_ + 2u, b_encoding >> 16);
2110 buffer->Store<int16_t>(location_ + 4u, static_cast<int16_t>(b_encoding & 0xffff));
2111 break;
2112 }
2113
2114 case kLiteral1KiB: {
2115 DCHECK(type_ == kLoadLiteralNarrow);
2116 int16_t encoding = LdrLitEncoding16(rn_, GetOffset(code_size));
2117 buffer->Store<int16_t>(location_, encoding);
2118 break;
2119 }
2120 case kLiteral4KiB: {
2121 DCHECK(type_ == kLoadLiteralNarrow);
2122 // GetOffset() uses PC+4 but load literal uses AlignDown(PC+4, 4). Adjust offset accordingly.
2123 int32_t encoding = LdrLitEncoding32(rn_, GetOffset(code_size));
2124 buffer->Store<int16_t>(location_, encoding >> 16);
2125 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2126 break;
2127 }
2128 case kLiteral64KiB: {
2129 DCHECK(type_ == kLoadLiteralNarrow);
2130 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2131 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2132 int16_t ldr_encoding = LdrRtRnImm5Encoding16(rn_, rn_, 0);
2133 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2134 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2135 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2136 buffer->Store<int16_t>(location_ + 6u, ldr_encoding);
2137 break;
2138 }
2139 case kLiteral1MiB: {
2140 DCHECK(type_ == kLoadLiteralNarrow);
2141 int32_t offset = GetOffset(code_size);
2142 int32_t mov_encoding = MovModImmEncoding32(rn_, offset & ~0xfff);
2143 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2144 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, offset & 0xfff);
2145 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2146 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2147 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2148 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2149 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2150 break;
2151 }
2152 case kLiteralFar: {
2153 DCHECK(type_ == kLoadLiteralNarrow);
2154 int32_t offset = GetOffset(code_size);
2155 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2156 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2157 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2158 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, 0);
2159 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2160 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2161 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2162 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2163 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2164 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2165 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2166 break;
2167 }
2168
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002169 case kLiteralAddr1KiB: {
2170 DCHECK(type_ == kLoadLiteralAddr);
2171 int16_t encoding = AdrEncoding16(rn_, GetOffset(code_size));
2172 buffer->Store<int16_t>(location_, encoding);
2173 break;
2174 }
2175 case kLiteralAddr4KiB: {
2176 DCHECK(type_ == kLoadLiteralAddr);
2177 int32_t encoding = AdrEncoding32(rn_, GetOffset(code_size));
2178 buffer->Store<int16_t>(location_, encoding >> 16);
2179 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2180 break;
2181 }
2182 case kLiteralAddr64KiB: {
2183 DCHECK(type_ == kLoadLiteralAddr);
2184 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2185 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2186 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2187 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2188 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2189 break;
2190 }
2191 case kLiteralAddrFar: {
2192 DCHECK(type_ == kLoadLiteralAddr);
2193 int32_t offset = GetOffset(code_size);
2194 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2195 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2196 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2197 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2198 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2199 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2200 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2201 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2202 break;
2203 }
2204
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002205 case kLongOrFPLiteral1KiB: {
2206 int32_t encoding = LoadWideOrFpEncoding(PC, GetOffset(code_size)); // DCHECKs type_.
2207 buffer->Store<int16_t>(location_, encoding >> 16);
2208 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2209 break;
2210 }
2211 case kLongOrFPLiteral256KiB: {
2212 int32_t offset = GetOffset(code_size);
2213 int32_t mov_encoding = MovModImmEncoding32(IP, offset & ~0x3ff);
2214 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2215 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, offset & 0x3ff); // DCHECKs type_.
2216 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2217 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2218 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2219 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2220 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2221 break;
2222 }
2223 case kLongOrFPLiteralFar: {
2224 int32_t offset = GetOffset(code_size);
2225 int32_t movw_encoding = MovwEncoding32(IP, offset & 0xffff);
2226 int32_t movt_encoding = MovtEncoding32(IP, offset & ~0xffff);
2227 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2228 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0); // DCHECKs type_.
2229 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2230 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2231 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2232 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2233 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2234 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2235 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2236 break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002237 }
2238 }
2239}
2240
Dave Allison65fcc2c2014-04-28 13:45:27 -07002241uint16_t Thumb2Assembler::EmitCompareAndBranch(Register rn, uint16_t prev, bool n) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002242 CHECK(IsLowRegister(rn));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002243 uint32_t location = buffer_.Size();
2244
2245 // This is always unresolved as it must be a forward branch.
2246 Emit16(prev); // Previous link.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002247 return AddFixup(Fixup::CompareAndBranch(location, rn, n ? NE : EQ));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002248}
2249
2250
2251// NOTE: this only support immediate offsets, not [rx,ry].
2252// TODO: support [rx,ry] instructions.
2253void Thumb2Assembler::EmitLoadStore(Condition cond,
2254 bool load,
2255 bool byte,
2256 bool half,
2257 bool is_signed,
2258 Register rd,
2259 const Address& ad) {
2260 CHECK_NE(rd, kNoRegister);
2261 CheckCondition(cond);
2262 bool must_be_32bit = force_32bit_;
2263 if (IsHighRegister(rd)) {
2264 must_be_32bit = true;
2265 }
2266
2267 Register rn = ad.GetRegister();
Dave Allison45fdb932014-06-25 12:37:10 -07002268 if (IsHighRegister(rn) && rn != SP && rn != PC) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002269 must_be_32bit = true;
2270 }
2271
2272 if (is_signed || ad.GetOffset() < 0 || ad.GetMode() != Address::Offset) {
2273 must_be_32bit = true;
2274 }
2275
Dave Allison45fdb932014-06-25 12:37:10 -07002276 if (ad.IsImmediate()) {
2277 // Immediate offset
2278 int32_t offset = ad.GetOffset();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002279
Dave Allison45fdb932014-06-25 12:37:10 -07002280 // The 16 bit SP relative instruction can only have a 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002281 if (rn == SP && offset >= (1 << 10)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002282 must_be_32bit = true;
2283 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002284
2285 if (byte) {
Dave Allison45fdb932014-06-25 12:37:10 -07002286 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002287 if (offset >= (1 << 5)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002288 must_be_32bit = true;
2289 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002290 } else if (half) {
Dave Allison45fdb932014-06-25 12:37:10 -07002291 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002292 if (offset >= (1 << 6)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002293 must_be_32bit = true;
2294 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002295 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002296 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002297 if (offset >= (1 << 7)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002298 must_be_32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002299 }
2300 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002301
Dave Allison45fdb932014-06-25 12:37:10 -07002302 if (must_be_32bit) {
2303 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2304 (load ? B20 : 0) |
2305 (is_signed ? B24 : 0) |
2306 static_cast<uint32_t>(rd) << 12 |
2307 ad.encodingThumb(true) |
2308 (byte ? 0 : half ? B21 : B22);
2309 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002310 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002311 // 16 bit thumb1.
2312 uint8_t opA = 0;
2313 bool sp_relative = false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002314
2315 if (byte) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002316 opA = 7U /* 0b0111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002317 } else if (half) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002318 opA = 8U /* 0b1000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002319 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002320 if (rn == SP) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002321 opA = 9U /* 0b1001 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002322 sp_relative = true;
2323 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002324 opA = 6U /* 0b0110 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002325 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002326 }
Dave Allison45fdb932014-06-25 12:37:10 -07002327 int16_t encoding = opA << 12 |
2328 (load ? B11 : 0);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002329
Dave Allison45fdb932014-06-25 12:37:10 -07002330 CHECK_GE(offset, 0);
2331 if (sp_relative) {
2332 // SP relative, 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002333 CHECK_LT(offset, (1 << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01002334 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002335 encoding |= rd << 8 | offset >> 2;
2336 } else {
2337 // No SP relative. The offset is shifted right depending on
2338 // the size of the load/store.
2339 encoding |= static_cast<uint32_t>(rd);
2340
2341 if (byte) {
2342 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002343 CHECK_LT(offset, (1 << 5));
Dave Allison45fdb932014-06-25 12:37:10 -07002344 } else if (half) {
2345 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002346 CHECK_LT(offset, (1 << 6));
Roland Levillain14d90572015-07-16 10:52:26 +01002347 CHECK_ALIGNED(offset, 2);
Dave Allison45fdb932014-06-25 12:37:10 -07002348 offset >>= 1;
2349 } else {
2350 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002351 CHECK_LT(offset, (1 << 7));
Roland Levillain14d90572015-07-16 10:52:26 +01002352 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002353 offset >>= 2;
2354 }
2355 encoding |= rn << 3 | offset << 6;
2356 }
2357
2358 Emit16(encoding);
2359 }
2360 } else {
2361 // Register shift.
2362 if (ad.GetRegister() == PC) {
2363 // PC relative literal encoding.
2364 int32_t offset = ad.GetOffset();
Dave Allison0bb9ade2014-06-26 17:57:36 -07002365 if (must_be_32bit || offset < 0 || offset >= (1 << 10) || !load) {
Dave Allison45fdb932014-06-25 12:37:10 -07002366 int32_t up = B23;
2367 if (offset < 0) {
2368 offset = -offset;
2369 up = 0;
2370 }
2371 CHECK_LT(offset, (1 << 12));
2372 int32_t encoding = 0x1f << 27 | 0xf << 16 | B22 | (load ? B20 : 0) |
2373 offset | up |
2374 static_cast<uint32_t>(rd) << 12;
2375 Emit32(encoding);
2376 } else {
2377 // 16 bit literal load.
2378 CHECK_GE(offset, 0);
2379 CHECK_LT(offset, (1 << 10));
2380 int32_t encoding = B14 | (load ? B11 : 0) | static_cast<uint32_t>(rd) << 8 | offset >> 2;
2381 Emit16(encoding);
2382 }
2383 } else {
2384 if (ad.GetShiftCount() != 0) {
2385 // If there is a shift count this must be 32 bit.
2386 must_be_32bit = true;
2387 } else if (IsHighRegister(ad.GetRegisterOffset())) {
2388 must_be_32bit = true;
2389 }
2390
2391 if (must_be_32bit) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002392 int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 |
Dave Allison45fdb932014-06-25 12:37:10 -07002393 ad.encodingThumb(true);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002394 if (half) {
2395 encoding |= B21;
2396 } else if (!byte) {
2397 encoding |= B22;
2398 }
Dave Allison45fdb932014-06-25 12:37:10 -07002399 Emit32(encoding);
2400 } else {
2401 // 16 bit register offset.
2402 int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) |
2403 ad.encodingThumb(false);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002404 if (byte) {
2405 encoding |= B10;
2406 } else if (half) {
2407 encoding |= B9;
2408 }
Dave Allison45fdb932014-06-25 12:37:10 -07002409 Emit16(encoding);
2410 }
2411 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002412 }
2413}
2414
2415
2416void Thumb2Assembler::EmitMultiMemOp(Condition cond,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002417 BlockAddressMode bam,
Dave Allison65fcc2c2014-04-28 13:45:27 -07002418 bool load,
2419 Register base,
2420 RegList regs) {
2421 CHECK_NE(base, kNoRegister);
2422 CheckCondition(cond);
2423 bool must_be_32bit = force_32bit_;
2424
Vladimir Markoe8469c12014-11-26 18:09:30 +00002425 if (!must_be_32bit && base == SP && bam == (load ? IA_W : DB_W) &&
2426 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) {
2427 // Use 16-bit PUSH/POP.
2428 int16_t encoding = B15 | B13 | B12 | (load ? B11 : 0) | B10 |
2429 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff);
2430 Emit16(encoding);
2431 return;
2432 }
2433
Dave Allison65fcc2c2014-04-28 13:45:27 -07002434 if ((regs & 0xff00) != 0) {
2435 must_be_32bit = true;
2436 }
2437
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002438 bool w_bit = bam == IA_W || bam == DB_W || bam == DA_W || bam == IB_W;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002439 // 16 bit always uses writeback.
2440 if (!w_bit) {
2441 must_be_32bit = true;
2442 }
2443
2444 if (must_be_32bit) {
2445 uint32_t op = 0;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002446 switch (bam) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002447 case IA:
2448 case IA_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002449 op = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002450 break;
2451 case DB:
2452 case DB_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002453 op = 2U /* 0b10 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002454 break;
2455 case DA:
2456 case IB:
2457 case DA_W:
2458 case IB_W:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002459 LOG(FATAL) << "LDM/STM mode not supported on thumb: " << bam;
Vladimir Markoe8469c12014-11-26 18:09:30 +00002460 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002461 }
2462 if (load) {
2463 // Cannot have SP in the list.
2464 CHECK_EQ((regs & (1 << SP)), 0);
2465 } else {
2466 // Cannot have PC or SP in the list.
2467 CHECK_EQ((regs & (1 << PC | 1 << SP)), 0);
2468 }
2469 int32_t encoding = B31 | B30 | B29 | B27 |
2470 (op << 23) |
2471 (load ? B20 : 0) |
2472 base << 16 |
2473 regs |
2474 (w_bit << 21);
2475 Emit32(encoding);
2476 } else {
2477 int16_t encoding = B15 | B14 |
2478 (load ? B11 : 0) |
2479 base << 8 |
2480 regs;
2481 Emit16(encoding);
2482 }
2483}
2484
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002485void Thumb2Assembler::EmitBranch(Condition cond, Label* label, bool link, bool x) {
2486 bool use32bit = IsForced32Bit() || !CanRelocateBranches();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002487 uint32_t pc = buffer_.Size();
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002488 Fixup::Type branch_type;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002489 if (cond == AL) {
2490 if (link) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002491 use32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002492 if (x) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002493 branch_type = Fixup::kUnconditionalLinkX; // BLX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002494 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002495 branch_type = Fixup::kUnconditionalLink; // BX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002496 }
2497 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002498 branch_type = Fixup::kUnconditional; // B.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002499 }
2500 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002501 branch_type = Fixup::kConditional; // B<cond>.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002502 }
2503
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002504 Fixup::Size size = use32bit ? Fixup::kBranch32Bit : Fixup::kBranch16Bit;
2505 FixupId branch_id = AddFixup(Fixup::Branch(pc, branch_type, size, cond));
2506
Dave Allison65fcc2c2014-04-28 13:45:27 -07002507 if (label->IsBound()) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002508 // The branch is to a bound label which means that it's a backwards branch.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002509 GetFixup(branch_id)->Resolve(label->Position());
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002510 Emit16(0);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002511 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002512 // Branch target is an unbound label. Add it to a singly-linked list maintained within
2513 // the code with the label serving as the head.
2514 Emit16(static_cast<uint16_t>(label->position_));
2515 label->LinkTo(branch_id);
Vladimir Markof38caa62015-05-29 15:50:18 +01002516 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002517
2518 if (use32bit) {
2519 Emit16(0);
2520 }
2521 DCHECK_EQ(buffer_.Size() - pc, GetFixup(branch_id)->GetSizeInBytes());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002522}
2523
2524
2525void Thumb2Assembler::clz(Register rd, Register rm, Condition cond) {
2526 CHECK_NE(rd, kNoRegister);
2527 CHECK_NE(rm, kNoRegister);
2528 CheckCondition(cond);
2529 CHECK_NE(rd, PC);
2530 CHECK_NE(rm, PC);
2531 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2532 B25 | B23 | B21 | B20 |
2533 static_cast<uint32_t>(rm) << 16 |
2534 0xf << 12 |
2535 static_cast<uint32_t>(rd) << 8 |
2536 B7 |
2537 static_cast<uint32_t>(rm);
2538 Emit32(encoding);
2539}
2540
2541
2542void Thumb2Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
2543 CheckCondition(cond);
2544 bool must_be_32bit = force_32bit_;
2545 if (IsHighRegister(rd)|| imm16 >= 256u) {
2546 must_be_32bit = true;
2547 }
2548
2549 if (must_be_32bit) {
2550 // Use encoding T3.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002551 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2552 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2553 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002554 uint32_t imm8 = imm16 & 0xff;
2555 int32_t encoding = B31 | B30 | B29 | B28 |
2556 B25 | B22 |
2557 static_cast<uint32_t>(rd) << 8 |
2558 i << 26 |
2559 imm4 << 16 |
2560 imm3 << 12 |
2561 imm8;
2562 Emit32(encoding);
2563 } else {
2564 int16_t encoding = B13 | static_cast<uint16_t>(rd) << 8 |
2565 imm16;
2566 Emit16(encoding);
2567 }
2568}
2569
2570
2571void Thumb2Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
2572 CheckCondition(cond);
2573 // Always 32 bits.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002574 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2575 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2576 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002577 uint32_t imm8 = imm16 & 0xff;
2578 int32_t encoding = B31 | B30 | B29 | B28 |
2579 B25 | B23 | B22 |
2580 static_cast<uint32_t>(rd) << 8 |
2581 i << 26 |
2582 imm4 << 16 |
2583 imm3 << 12 |
2584 imm8;
2585 Emit32(encoding);
2586}
2587
2588
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002589void Thumb2Assembler::rbit(Register rd, Register rm, Condition cond) {
2590 CHECK_NE(rd, kNoRegister);
2591 CHECK_NE(rm, kNoRegister);
2592 CheckCondition(cond);
2593 CHECK_NE(rd, PC);
2594 CHECK_NE(rm, PC);
2595 CHECK_NE(rd, SP);
2596 CHECK_NE(rm, SP);
2597 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2598 B25 | B23 | B20 |
2599 static_cast<uint32_t>(rm) << 16 |
2600 0xf << 12 |
2601 static_cast<uint32_t>(rd) << 8 |
2602 B7 | B5 |
2603 static_cast<uint32_t>(rm);
2604 Emit32(encoding);
2605}
2606
2607
Dave Allison65fcc2c2014-04-28 13:45:27 -07002608void Thumb2Assembler::ldrex(Register rt, Register rn, uint16_t imm, Condition cond) {
2609 CHECK_NE(rn, kNoRegister);
2610 CHECK_NE(rt, kNoRegister);
2611 CheckCondition(cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002612 CHECK_LT(imm, (1u << 10));
2613
2614 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
2615 static_cast<uint32_t>(rn) << 16 |
2616 static_cast<uint32_t>(rt) << 12 |
2617 0xf << 8 |
2618 imm >> 2;
2619 Emit32(encoding);
2620}
2621
2622
2623void Thumb2Assembler::ldrex(Register rt, Register rn, Condition cond) {
2624 ldrex(rt, rn, 0, cond);
2625}
2626
2627
2628void Thumb2Assembler::strex(Register rd,
2629 Register rt,
2630 Register rn,
2631 uint16_t imm,
2632 Condition cond) {
2633 CHECK_NE(rn, kNoRegister);
2634 CHECK_NE(rd, kNoRegister);
2635 CHECK_NE(rt, kNoRegister);
2636 CheckCondition(cond);
2637 CHECK_LT(imm, (1u << 10));
2638
2639 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
2640 static_cast<uint32_t>(rn) << 16 |
2641 static_cast<uint32_t>(rt) << 12 |
2642 static_cast<uint32_t>(rd) << 8 |
2643 imm >> 2;
2644 Emit32(encoding);
2645}
2646
2647
Calin Juravle52c48962014-12-16 17:02:57 +00002648void Thumb2Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
2649 CHECK_NE(rn, kNoRegister);
2650 CHECK_NE(rt, kNoRegister);
2651 CHECK_NE(rt2, kNoRegister);
2652 CHECK_NE(rt, rt2);
2653 CheckCondition(cond);
2654
2655 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 | B20 |
2656 static_cast<uint32_t>(rn) << 16 |
2657 static_cast<uint32_t>(rt) << 12 |
2658 static_cast<uint32_t>(rt2) << 8 |
2659 B6 | B5 | B4 | B3 | B2 | B1 | B0;
2660 Emit32(encoding);
2661}
2662
2663
Dave Allison65fcc2c2014-04-28 13:45:27 -07002664void Thumb2Assembler::strex(Register rd,
2665 Register rt,
2666 Register rn,
2667 Condition cond) {
2668 strex(rd, rt, rn, 0, cond);
2669}
2670
2671
Calin Juravle52c48962014-12-16 17:02:57 +00002672void Thumb2Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
2673 CHECK_NE(rd, kNoRegister);
2674 CHECK_NE(rn, kNoRegister);
2675 CHECK_NE(rt, kNoRegister);
2676 CHECK_NE(rt2, kNoRegister);
2677 CHECK_NE(rt, rt2);
2678 CHECK_NE(rd, rt);
2679 CHECK_NE(rd, rt2);
2680 CheckCondition(cond);
2681
2682 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 |
2683 static_cast<uint32_t>(rn) << 16 |
2684 static_cast<uint32_t>(rt) << 12 |
2685 static_cast<uint32_t>(rt2) << 8 |
2686 B6 | B5 | B4 |
2687 static_cast<uint32_t>(rd);
2688 Emit32(encoding);
2689}
2690
2691
Dave Allison65fcc2c2014-04-28 13:45:27 -07002692void Thumb2Assembler::clrex(Condition cond) {
2693 CheckCondition(cond);
2694 int32_t encoding = B31 | B30 | B29 | B27 | B28 | B25 | B24 | B23 |
2695 B21 | B20 |
2696 0xf << 16 |
2697 B15 |
2698 0xf << 8 |
2699 B5 |
2700 0xf;
2701 Emit32(encoding);
2702}
2703
2704
2705void Thumb2Assembler::nop(Condition cond) {
2706 CheckCondition(cond);
Andreas Gampec8ccf682014-09-29 20:07:43 -07002707 uint16_t encoding = B15 | B13 | B12 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002708 B11 | B10 | B9 | B8;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002709 Emit16(static_cast<int16_t>(encoding));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002710}
2711
2712
2713void Thumb2Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
2714 CHECK_NE(sn, kNoSRegister);
2715 CHECK_NE(rt, kNoRegister);
2716 CHECK_NE(rt, SP);
2717 CHECK_NE(rt, PC);
2718 CheckCondition(cond);
2719 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2720 B27 | B26 | B25 |
2721 ((static_cast<int32_t>(sn) >> 1)*B16) |
2722 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2723 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2724 Emit32(encoding);
2725}
2726
2727
2728void Thumb2Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
2729 CHECK_NE(sn, kNoSRegister);
2730 CHECK_NE(rt, kNoRegister);
2731 CHECK_NE(rt, SP);
2732 CHECK_NE(rt, PC);
2733 CheckCondition(cond);
2734 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2735 B27 | B26 | B25 | B20 |
2736 ((static_cast<int32_t>(sn) >> 1)*B16) |
2737 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2738 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2739 Emit32(encoding);
2740}
2741
2742
2743void Thumb2Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
2744 Condition cond) {
2745 CHECK_NE(sm, kNoSRegister);
2746 CHECK_NE(sm, S31);
2747 CHECK_NE(rt, kNoRegister);
2748 CHECK_NE(rt, SP);
2749 CHECK_NE(rt, PC);
2750 CHECK_NE(rt2, kNoRegister);
2751 CHECK_NE(rt2, SP);
2752 CHECK_NE(rt2, PC);
2753 CheckCondition(cond);
2754 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2755 B27 | B26 | B22 |
2756 (static_cast<int32_t>(rt2)*B16) |
2757 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2758 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2759 (static_cast<int32_t>(sm) >> 1);
2760 Emit32(encoding);
2761}
2762
2763
2764void Thumb2Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
2765 Condition cond) {
2766 CHECK_NE(sm, kNoSRegister);
2767 CHECK_NE(sm, S31);
2768 CHECK_NE(rt, kNoRegister);
2769 CHECK_NE(rt, SP);
2770 CHECK_NE(rt, PC);
2771 CHECK_NE(rt2, kNoRegister);
2772 CHECK_NE(rt2, SP);
2773 CHECK_NE(rt2, PC);
2774 CHECK_NE(rt, rt2);
2775 CheckCondition(cond);
2776 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2777 B27 | B26 | B22 | B20 |
2778 (static_cast<int32_t>(rt2)*B16) |
2779 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2780 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2781 (static_cast<int32_t>(sm) >> 1);
2782 Emit32(encoding);
2783}
2784
2785
2786void Thumb2Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
2787 Condition cond) {
2788 CHECK_NE(dm, kNoDRegister);
2789 CHECK_NE(rt, kNoRegister);
2790 CHECK_NE(rt, SP);
2791 CHECK_NE(rt, PC);
2792 CHECK_NE(rt2, kNoRegister);
2793 CHECK_NE(rt2, SP);
2794 CHECK_NE(rt2, PC);
2795 CheckCondition(cond);
2796 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2797 B27 | B26 | B22 |
2798 (static_cast<int32_t>(rt2)*B16) |
2799 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2800 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2801 (static_cast<int32_t>(dm) & 0xf);
2802 Emit32(encoding);
2803}
2804
2805
2806void Thumb2Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
2807 Condition cond) {
2808 CHECK_NE(dm, kNoDRegister);
2809 CHECK_NE(rt, kNoRegister);
2810 CHECK_NE(rt, SP);
2811 CHECK_NE(rt, PC);
2812 CHECK_NE(rt2, kNoRegister);
2813 CHECK_NE(rt2, SP);
2814 CHECK_NE(rt2, PC);
2815 CHECK_NE(rt, rt2);
2816 CheckCondition(cond);
2817 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2818 B27 | B26 | B22 | B20 |
2819 (static_cast<int32_t>(rt2)*B16) |
2820 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2821 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2822 (static_cast<int32_t>(dm) & 0xf);
2823 Emit32(encoding);
2824}
2825
2826
2827void Thumb2Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
2828 const Address& addr = static_cast<const Address&>(ad);
2829 CHECK_NE(sd, kNoSRegister);
2830 CheckCondition(cond);
2831 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2832 B27 | B26 | B24 | B20 |
2833 ((static_cast<int32_t>(sd) & 1)*B22) |
2834 ((static_cast<int32_t>(sd) >> 1)*B12) |
2835 B11 | B9 | addr.vencoding();
2836 Emit32(encoding);
2837}
2838
2839
2840void Thumb2Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
2841 const Address& addr = static_cast<const Address&>(ad);
2842 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2843 CHECK_NE(sd, kNoSRegister);
2844 CheckCondition(cond);
2845 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2846 B27 | B26 | B24 |
2847 ((static_cast<int32_t>(sd) & 1)*B22) |
2848 ((static_cast<int32_t>(sd) >> 1)*B12) |
2849 B11 | B9 | addr.vencoding();
2850 Emit32(encoding);
2851}
2852
2853
2854void Thumb2Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
2855 const Address& addr = static_cast<const Address&>(ad);
2856 CHECK_NE(dd, kNoDRegister);
2857 CheckCondition(cond);
2858 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2859 B27 | B26 | B24 | B20 |
2860 ((static_cast<int32_t>(dd) >> 4)*B22) |
2861 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2862 B11 | B9 | B8 | addr.vencoding();
2863 Emit32(encoding);
2864}
2865
2866
2867void Thumb2Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
2868 const Address& addr = static_cast<const Address&>(ad);
2869 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2870 CHECK_NE(dd, kNoDRegister);
2871 CheckCondition(cond);
2872 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2873 B27 | B26 | B24 |
2874 ((static_cast<int32_t>(dd) >> 4)*B22) |
2875 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2876 B11 | B9 | B8 | addr.vencoding();
2877 Emit32(encoding);
2878}
2879
2880
2881void Thumb2Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
2882 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
2883}
2884
2885
2886void Thumb2Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
2887 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
2888}
2889
2890
2891void Thumb2Assembler::vpops(SRegister reg, int nregs, Condition cond) {
2892 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
2893}
2894
2895
2896void Thumb2Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
2897 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
2898}
2899
2900
2901void Thumb2Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
2902 CheckCondition(cond);
2903
2904 uint32_t D;
2905 uint32_t Vd;
2906 if (dbl) {
2907 // Encoded as D:Vd.
2908 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002909 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002910 } else {
2911 // Encoded as Vd:D.
2912 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002913 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002914 }
2915 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
2916 B11 | B9 |
2917 (dbl ? B8 : 0) |
2918 (push ? B24 : (B23 | B20)) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07002919 14U /* 0b1110 */ << 28 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002920 nregs << (dbl ? 1 : 0) |
2921 D << 22 |
2922 Vd << 12;
2923 Emit32(encoding);
2924}
2925
2926
2927void Thumb2Assembler::EmitVFPsss(Condition cond, int32_t opcode,
2928 SRegister sd, SRegister sn, SRegister sm) {
2929 CHECK_NE(sd, kNoSRegister);
2930 CHECK_NE(sn, kNoSRegister);
2931 CHECK_NE(sm, kNoSRegister);
2932 CheckCondition(cond);
2933 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2934 B27 | B26 | B25 | B11 | B9 | opcode |
2935 ((static_cast<int32_t>(sd) & 1)*B22) |
2936 ((static_cast<int32_t>(sn) >> 1)*B16) |
2937 ((static_cast<int32_t>(sd) >> 1)*B12) |
2938 ((static_cast<int32_t>(sn) & 1)*B7) |
2939 ((static_cast<int32_t>(sm) & 1)*B5) |
2940 (static_cast<int32_t>(sm) >> 1);
2941 Emit32(encoding);
2942}
2943
2944
2945void Thumb2Assembler::EmitVFPddd(Condition cond, int32_t opcode,
2946 DRegister dd, DRegister dn, DRegister dm) {
2947 CHECK_NE(dd, kNoDRegister);
2948 CHECK_NE(dn, kNoDRegister);
2949 CHECK_NE(dm, kNoDRegister);
2950 CheckCondition(cond);
2951 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2952 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
2953 ((static_cast<int32_t>(dd) >> 4)*B22) |
2954 ((static_cast<int32_t>(dn) & 0xf)*B16) |
2955 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2956 ((static_cast<int32_t>(dn) >> 4)*B7) |
2957 ((static_cast<int32_t>(dm) >> 4)*B5) |
2958 (static_cast<int32_t>(dm) & 0xf);
2959 Emit32(encoding);
2960}
2961
2962
2963void Thumb2Assembler::EmitVFPsd(Condition cond, int32_t opcode,
2964 SRegister sd, DRegister dm) {
2965 CHECK_NE(sd, kNoSRegister);
2966 CHECK_NE(dm, kNoDRegister);
2967 CheckCondition(cond);
2968 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2969 B27 | B26 | B25 | B11 | B9 | opcode |
2970 ((static_cast<int32_t>(sd) & 1)*B22) |
2971 ((static_cast<int32_t>(sd) >> 1)*B12) |
2972 ((static_cast<int32_t>(dm) >> 4)*B5) |
2973 (static_cast<int32_t>(dm) & 0xf);
2974 Emit32(encoding);
2975}
2976
2977
2978void Thumb2Assembler::EmitVFPds(Condition cond, int32_t opcode,
2979 DRegister dd, SRegister sm) {
2980 CHECK_NE(dd, kNoDRegister);
2981 CHECK_NE(sm, kNoSRegister);
2982 CheckCondition(cond);
2983 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2984 B27 | B26 | B25 | B11 | B9 | opcode |
2985 ((static_cast<int32_t>(dd) >> 4)*B22) |
2986 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2987 ((static_cast<int32_t>(sm) & 1)*B5) |
2988 (static_cast<int32_t>(sm) >> 1);
2989 Emit32(encoding);
2990}
2991
2992
2993void Thumb2Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR.
Calin Juravleddb7df22014-11-25 20:56:51 +00002994 CHECK_NE(cond, kNoCondition);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002995 CheckCondition(cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00002996 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2997 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
2998 (static_cast<int32_t>(PC)*B12) |
2999 B11 | B9 | B4;
3000 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003001}
3002
3003
3004void Thumb2Assembler::svc(uint32_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08003005 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003006 int16_t encoding = B15 | B14 | B12 |
3007 B11 | B10 | B9 | B8 |
3008 imm8;
3009 Emit16(encoding);
3010}
3011
3012
3013void Thumb2Assembler::bkpt(uint16_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08003014 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003015 int16_t encoding = B15 | B13 | B12 |
3016 B11 | B10 | B9 |
3017 imm8;
3018 Emit16(encoding);
3019}
3020
3021// Convert the given IT state to a mask bit given bit 0 of the first
3022// condition and a shift position.
3023static uint8_t ToItMask(ItState s, uint8_t firstcond0, uint8_t shift) {
3024 switch (s) {
3025 case kItOmitted: return 1 << shift;
3026 case kItThen: return firstcond0 << shift;
3027 case kItElse: return !firstcond0 << shift;
3028 }
3029 return 0;
3030}
3031
3032
3033// Set the IT condition in the given position for the given state. This is used
3034// to check that conditional instructions match the preceding IT statement.
3035void Thumb2Assembler::SetItCondition(ItState s, Condition cond, uint8_t index) {
3036 switch (s) {
3037 case kItOmitted: it_conditions_[index] = AL; break;
3038 case kItThen: it_conditions_[index] = cond; break;
3039 case kItElse:
3040 it_conditions_[index] = static_cast<Condition>(static_cast<uint8_t>(cond) ^ 1);
3041 break;
3042 }
3043}
3044
3045
3046void Thumb2Assembler::it(Condition firstcond, ItState i1, ItState i2, ItState i3) {
3047 CheckCondition(AL); // Not allowed in IT block.
3048 uint8_t firstcond0 = static_cast<uint8_t>(firstcond) & 1;
3049
3050 // All conditions to AL.
3051 for (uint8_t i = 0; i < 4; ++i) {
3052 it_conditions_[i] = AL;
3053 }
3054
3055 SetItCondition(kItThen, firstcond, 0);
3056 uint8_t mask = ToItMask(i1, firstcond0, 3);
3057 SetItCondition(i1, firstcond, 1);
3058
3059 if (i1 != kItOmitted) {
3060 mask |= ToItMask(i2, firstcond0, 2);
3061 SetItCondition(i2, firstcond, 2);
3062 if (i2 != kItOmitted) {
3063 mask |= ToItMask(i3, firstcond0, 1);
3064 SetItCondition(i3, firstcond, 3);
3065 if (i3 != kItOmitted) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07003066 mask |= 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003067 }
3068 }
3069 }
3070
3071 // Start at first condition.
3072 it_cond_index_ = 0;
3073 next_condition_ = it_conditions_[0];
3074 uint16_t encoding = B15 | B13 | B12 |
3075 B11 | B10 | B9 | B8 |
3076 firstcond << 4 |
3077 mask;
3078 Emit16(encoding);
3079}
3080
3081
3082void Thumb2Assembler::cbz(Register rn, Label* label) {
3083 CheckCondition(AL);
3084 if (label->IsBound()) {
3085 LOG(FATAL) << "cbz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00003086 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003087 } else if (IsHighRegister(rn)) {
3088 LOG(FATAL) << "cbz can only be used with low registers";
3089 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003090 } else {
3091 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), false);
3092 label->LinkTo(branchid);
3093 }
3094}
3095
3096
3097void Thumb2Assembler::cbnz(Register rn, Label* label) {
3098 CheckCondition(AL);
3099 if (label->IsBound()) {
3100 LOG(FATAL) << "cbnz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00003101 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003102 } else if (IsHighRegister(rn)) {
3103 LOG(FATAL) << "cbnz can only be used with low registers";
3104 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003105 } else {
3106 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), true);
3107 label->LinkTo(branchid);
3108 }
3109}
3110
3111
3112void Thumb2Assembler::blx(Register rm, Condition cond) {
3113 CHECK_NE(rm, kNoRegister);
3114 CheckCondition(cond);
3115 int16_t encoding = B14 | B10 | B9 | B8 | B7 | static_cast<int16_t>(rm) << 3;
3116 Emit16(encoding);
3117}
3118
3119
3120void Thumb2Assembler::bx(Register rm, Condition cond) {
3121 CHECK_NE(rm, kNoRegister);
3122 CheckCondition(cond);
3123 int16_t encoding = B14 | B10 | B9 | B8 | static_cast<int16_t>(rm) << 3;
3124 Emit16(encoding);
3125}
3126
3127
3128void Thumb2Assembler::Push(Register rd, Condition cond) {
3129 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
3130}
3131
3132
3133void Thumb2Assembler::Pop(Register rd, Condition cond) {
3134 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
3135}
3136
3137
3138void Thumb2Assembler::PushList(RegList regs, Condition cond) {
3139 stm(DB_W, SP, regs, cond);
3140}
3141
3142
3143void Thumb2Assembler::PopList(RegList regs, Condition cond) {
3144 ldm(IA_W, SP, regs, cond);
3145}
3146
3147
3148void Thumb2Assembler::Mov(Register rd, Register rm, Condition cond) {
3149 if (cond != AL || rd != rm) {
3150 mov(rd, ShifterOperand(rm), cond);
3151 }
3152}
3153
3154
Dave Allison65fcc2c2014-04-28 13:45:27 -07003155void Thumb2Assembler::Bind(Label* label) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003156 BindLabel(label, buffer_.Size());
Dave Allison65fcc2c2014-04-28 13:45:27 -07003157}
3158
3159
3160void Thumb2Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003161 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003162 CHECK_LE(shift_imm, 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003163 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003164 EmitShift(rd, rm, LSL, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003165}
3166
3167
3168void Thumb2Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003169 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003170 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003171 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003172 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003173 EmitShift(rd, rm, LSR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003174}
3175
3176
3177void Thumb2Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003178 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003179 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003180 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003181 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003182 EmitShift(rd, rm, ASR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003183}
3184
3185
3186void Thumb2Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003187 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003188 CHECK(1u <= shift_imm && shift_imm <= 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003189 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003190 EmitShift(rd, rm, ROR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003191}
3192
3193
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003194void Thumb2Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003195 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003196 EmitShift(rd, rm, RRX, rm, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003197}
3198
3199
3200void Thumb2Assembler::Lsl(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003201 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003202 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003203 EmitShift(rd, rm, LSL, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003204}
3205
3206
3207void Thumb2Assembler::Lsr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003208 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003209 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003210 EmitShift(rd, rm, LSR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003211}
3212
3213
3214void Thumb2Assembler::Asr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003215 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003216 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003217 EmitShift(rd, rm, ASR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003218}
3219
3220
3221void Thumb2Assembler::Ror(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003222 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003223 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003224 EmitShift(rd, rm, ROR, rn, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003225}
3226
3227
3228int32_t Thumb2Assembler::EncodeBranchOffset(int32_t offset, int32_t inst) {
3229 // The offset is off by 4 due to the way the ARM CPUs read PC.
3230 offset -= 4;
3231 offset >>= 1;
3232
3233 uint32_t value = 0;
3234 // There are two different encodings depending on the value of bit 12. In one case
3235 // intermediate values are calculated using the sign bit.
3236 if ((inst & B12) == B12) {
3237 // 25 bits of offset.
3238 uint32_t signbit = (offset >> 31) & 0x1;
3239 uint32_t i1 = (offset >> 22) & 0x1;
3240 uint32_t i2 = (offset >> 21) & 0x1;
3241 uint32_t imm10 = (offset >> 11) & 0x03ff;
3242 uint32_t imm11 = offset & 0x07ff;
3243 uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
3244 uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
3245 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
3246 imm11;
3247 // Remove the offset from the current encoding.
3248 inst &= ~(0x3ff << 16 | 0x7ff);
3249 } else {
3250 uint32_t signbit = (offset >> 31) & 0x1;
3251 uint32_t imm6 = (offset >> 11) & 0x03f;
3252 uint32_t imm11 = offset & 0x07ff;
3253 uint32_t j1 = (offset >> 19) & 1;
3254 uint32_t j2 = (offset >> 17) & 1;
3255 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm6 << 16) |
3256 imm11;
3257 // Remove the offset from the current encoding.
3258 inst &= ~(0x3f << 16 | 0x7ff);
3259 }
3260 // Mask out offset bits in current instruction.
3261 inst &= ~(B26 | B13 | B11);
3262 inst |= value;
3263 return inst;
3264}
3265
3266
3267int Thumb2Assembler::DecodeBranchOffset(int32_t instr) {
3268 int32_t imm32;
3269 if ((instr & B12) == B12) {
3270 uint32_t S = (instr >> 26) & 1;
3271 uint32_t J2 = (instr >> 11) & 1;
3272 uint32_t J1 = (instr >> 13) & 1;
3273 uint32_t imm10 = (instr >> 16) & 0x3FF;
3274 uint32_t imm11 = instr & 0x7FF;
3275
3276 uint32_t I1 = ~(J1 ^ S) & 1;
3277 uint32_t I2 = ~(J2 ^ S) & 1;
3278 imm32 = (S << 24) | (I1 << 23) | (I2 << 22) | (imm10 << 12) | (imm11 << 1);
3279 imm32 = (imm32 << 8) >> 8; // sign extend 24 bit immediate.
3280 } else {
3281 uint32_t S = (instr >> 26) & 1;
3282 uint32_t J2 = (instr >> 11) & 1;
3283 uint32_t J1 = (instr >> 13) & 1;
3284 uint32_t imm6 = (instr >> 16) & 0x3F;
3285 uint32_t imm11 = instr & 0x7FF;
3286
3287 imm32 = (S << 20) | (J2 << 19) | (J1 << 18) | (imm6 << 12) | (imm11 << 1);
3288 imm32 = (imm32 << 11) >> 11; // sign extend 21 bit immediate.
3289 }
3290 imm32 += 4;
3291 return imm32;
3292}
3293
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003294uint32_t Thumb2Assembler::GetAdjustedPosition(uint32_t old_position) {
3295 // We can reconstruct the adjustment by going through all the fixups from the beginning
3296 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
3297 // with increasing old_position, we can use the data from last AdjustedPosition() to
3298 // continue where we left off and the whole loop should be O(m+n) where m is the number
3299 // of positions to adjust and n is the number of fixups.
3300 if (old_position < last_old_position_) {
3301 last_position_adjustment_ = 0u;
3302 last_old_position_ = 0u;
3303 last_fixup_id_ = 0u;
3304 }
3305 while (last_fixup_id_ != fixups_.size()) {
3306 Fixup* fixup = GetFixup(last_fixup_id_);
3307 if (fixup->GetLocation() >= old_position + last_position_adjustment_) {
3308 break;
3309 }
3310 if (fixup->GetSize() != fixup->GetOriginalSize()) {
3311 last_position_adjustment_ += fixup->GetSizeInBytes() - fixup->GetOriginalSizeInBytes();
3312 }
3313 ++last_fixup_id_;
3314 }
3315 last_old_position_ = old_position;
3316 return old_position + last_position_adjustment_;
3317}
3318
3319Literal* Thumb2Assembler::NewLiteral(size_t size, const uint8_t* data) {
3320 DCHECK(size == 4u || size == 8u) << size;
3321 literals_.emplace_back(size, data);
3322 return &literals_.back();
3323}
3324
3325void Thumb2Assembler::LoadLiteral(Register rt, Literal* literal) {
3326 DCHECK_EQ(literal->GetSize(), 4u);
3327 DCHECK(!literal->GetLabel()->IsBound());
3328 bool use32bit = IsForced32Bit() || IsHighRegister(rt);
3329 uint32_t location = buffer_.Size();
3330 Fixup::Size size = use32bit ? Fixup::kLiteral4KiB : Fixup::kLiteral1KiB;
3331 FixupId fixup_id = AddFixup(Fixup::LoadNarrowLiteral(location, rt, size));
3332 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3333 literal->GetLabel()->LinkTo(fixup_id);
3334 if (use32bit) {
3335 Emit16(0);
3336 }
3337 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3338}
3339
3340void Thumb2Assembler::LoadLiteral(Register rt, Register rt2, Literal* literal) {
3341 DCHECK_EQ(literal->GetSize(), 8u);
3342 DCHECK(!literal->GetLabel()->IsBound());
3343 uint32_t location = buffer_.Size();
3344 FixupId fixup_id =
3345 AddFixup(Fixup::LoadWideLiteral(location, rt, rt2, Fixup::kLongOrFPLiteral1KiB));
3346 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3347 literal->GetLabel()->LinkTo(fixup_id);
3348 Emit16(0);
3349 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3350}
3351
3352void Thumb2Assembler::LoadLiteral(SRegister sd, Literal* literal) {
3353 DCHECK_EQ(literal->GetSize(), 4u);
3354 DCHECK(!literal->GetLabel()->IsBound());
3355 uint32_t location = buffer_.Size();
3356 FixupId fixup_id = AddFixup(Fixup::LoadSingleLiteral(location, sd, Fixup::kLongOrFPLiteral1KiB));
3357 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3358 literal->GetLabel()->LinkTo(fixup_id);
3359 Emit16(0);
3360 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3361}
3362
3363void Thumb2Assembler::LoadLiteral(DRegister dd, Literal* literal) {
3364 DCHECK_EQ(literal->GetSize(), 8u);
3365 DCHECK(!literal->GetLabel()->IsBound());
3366 uint32_t location = buffer_.Size();
3367 FixupId fixup_id = AddFixup(Fixup::LoadDoubleLiteral(location, dd, Fixup::kLongOrFPLiteral1KiB));
3368 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3369 literal->GetLabel()->LinkTo(fixup_id);
3370 Emit16(0);
3371 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3372}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003373
Dave Allison65fcc2c2014-04-28 13:45:27 -07003374
3375void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value,
Vladimir Marko449b1092015-09-08 12:16:45 +01003376 Condition cond, SetCc set_cc) {
3377 if (value == 0 && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003378 if (rd != rn) {
3379 mov(rd, ShifterOperand(rn), cond);
3380 }
3381 return;
3382 }
3383 // We prefer to select the shorter code sequence rather than selecting add for
3384 // positive values and sub for negatives ones, which would slightly improve
3385 // the readability of generated code for some constants.
3386 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003387 if (ShifterOperandCanHold(rd, rn, ADD, value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003388 add(rd, rn, shifter_op, cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003389 } else if (ShifterOperandCanHold(rd, rn, SUB, -value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003390 sub(rd, rn, shifter_op, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003391 } else {
3392 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003393 if (ShifterOperandCanHold(rd, rn, MVN, ~value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003394 mvn(IP, shifter_op, cond, kCcKeep);
3395 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003396 } else if (ShifterOperandCanHold(rd, rn, MVN, ~(-value), &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003397 mvn(IP, shifter_op, cond, kCcKeep);
3398 sub(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003399 } else {
3400 movw(IP, Low16Bits(value), cond);
3401 uint16_t value_high = High16Bits(value);
3402 if (value_high != 0) {
3403 movt(IP, value_high, cond);
3404 }
Vladimir Marko449b1092015-09-08 12:16:45 +01003405 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003406 }
3407 }
3408}
3409
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003410void Thumb2Assembler::CmpConstant(Register rn, int32_t value, Condition cond) {
3411 // We prefer to select the shorter code sequence rather than selecting add for
3412 // positive values and sub for negatives ones, which would slightly improve
3413 // the readability of generated code for some constants.
3414 ShifterOperand shifter_op;
3415 if (ShifterOperandCanHold(kNoRegister, rn, CMP, value, &shifter_op)) {
3416 cmp(rn, shifter_op, cond);
3417 } else if (ShifterOperandCanHold(kNoRegister, rn, CMN, ~value, &shifter_op)) {
3418 cmn(rn, shifter_op, cond);
3419 } else {
3420 CHECK(rn != IP);
3421 movw(IP, Low16Bits(value), cond);
3422 uint16_t value_high = High16Bits(value);
3423 if (value_high != 0) {
3424 movt(IP, value_high, cond);
3425 }
3426 cmp(rn, ShifterOperand(IP), cond);
3427 }
3428}
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003429
Dave Allison65fcc2c2014-04-28 13:45:27 -07003430void Thumb2Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
3431 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003432 if (ShifterOperandCanHold(rd, R0, MOV, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003433 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003434 } else if (ShifterOperandCanHold(rd, R0, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003435 mvn(rd, shifter_op, cond);
3436 } else {
3437 movw(rd, Low16Bits(value), cond);
3438 uint16_t value_high = High16Bits(value);
3439 if (value_high != 0) {
3440 movt(rd, value_high, cond);
3441 }
3442 }
3443}
3444
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003445
Dave Allison65fcc2c2014-04-28 13:45:27 -07003446// Implementation note: this method must emit at most one instruction when
3447// Address::CanHoldLoadOffsetThumb.
3448void Thumb2Assembler::LoadFromOffset(LoadOperandType type,
3449 Register reg,
3450 Register base,
3451 int32_t offset,
3452 Condition cond) {
3453 if (!Address::CanHoldLoadOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003454 CHECK_NE(base, IP);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003455 LoadImmediate(IP, offset, cond);
3456 add(IP, IP, ShifterOperand(base), cond);
3457 base = IP;
3458 offset = 0;
3459 }
3460 CHECK(Address::CanHoldLoadOffsetThumb(type, offset));
3461 switch (type) {
3462 case kLoadSignedByte:
3463 ldrsb(reg, Address(base, offset), cond);
3464 break;
3465 case kLoadUnsignedByte:
3466 ldrb(reg, Address(base, offset), cond);
3467 break;
3468 case kLoadSignedHalfword:
3469 ldrsh(reg, Address(base, offset), cond);
3470 break;
3471 case kLoadUnsignedHalfword:
3472 ldrh(reg, Address(base, offset), cond);
3473 break;
3474 case kLoadWord:
3475 ldr(reg, Address(base, offset), cond);
3476 break;
3477 case kLoadWordPair:
3478 ldrd(reg, Address(base, offset), cond);
3479 break;
3480 default:
3481 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003482 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003483 }
3484}
3485
3486
3487// Implementation note: this method must emit at most one instruction when
3488// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3489void Thumb2Assembler::LoadSFromOffset(SRegister reg,
3490 Register base,
3491 int32_t offset,
3492 Condition cond) {
3493 if (!Address::CanHoldLoadOffsetThumb(kLoadSWord, offset)) {
3494 CHECK_NE(base, IP);
3495 LoadImmediate(IP, offset, cond);
3496 add(IP, IP, ShifterOperand(base), cond);
3497 base = IP;
3498 offset = 0;
3499 }
3500 CHECK(Address::CanHoldLoadOffsetThumb(kLoadSWord, offset));
3501 vldrs(reg, Address(base, offset), cond);
3502}
3503
3504
3505// Implementation note: this method must emit at most one instruction when
3506// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3507void Thumb2Assembler::LoadDFromOffset(DRegister reg,
3508 Register base,
3509 int32_t offset,
3510 Condition cond) {
3511 if (!Address::CanHoldLoadOffsetThumb(kLoadDWord, offset)) {
3512 CHECK_NE(base, IP);
3513 LoadImmediate(IP, offset, cond);
3514 add(IP, IP, ShifterOperand(base), cond);
3515 base = IP;
3516 offset = 0;
3517 }
3518 CHECK(Address::CanHoldLoadOffsetThumb(kLoadDWord, offset));
3519 vldrd(reg, Address(base, offset), cond);
3520}
3521
3522
3523// Implementation note: this method must emit at most one instruction when
3524// Address::CanHoldStoreOffsetThumb.
3525void Thumb2Assembler::StoreToOffset(StoreOperandType type,
3526 Register reg,
3527 Register base,
3528 int32_t offset,
3529 Condition cond) {
Roland Levillain775ef492014-11-04 17:43:11 +00003530 Register tmp_reg = kNoRegister;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003531 if (!Address::CanHoldStoreOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003532 CHECK_NE(base, IP);
Roland Levillain23f02f32015-08-25 18:23:20 +01003533 if ((reg != IP) &&
3534 ((type != kStoreWordPair) || (reg + 1 != IP))) {
Roland Levillain775ef492014-11-04 17:43:11 +00003535 tmp_reg = IP;
3536 } else {
Roland Levillain4af147e2015-04-07 13:54:49 +01003537 // Be careful not to use IP twice (for `reg` (or `reg` + 1 in
Roland Levillain23f02f32015-08-25 18:23:20 +01003538 // the case of a word-pair store) and `base`) to build the
3539 // Address object used by the store instruction(s) below.
3540 // Instead, save R5 on the stack (or R6 if R5 is already used by
3541 // `base`), use it as secondary temporary register, and restore
3542 // it after the store instruction has been emitted.
3543 tmp_reg = (base != R5) ? R5 : R6;
Roland Levillain775ef492014-11-04 17:43:11 +00003544 Push(tmp_reg);
3545 if (base == SP) {
3546 offset += kRegisterSize;
3547 }
3548 }
3549 LoadImmediate(tmp_reg, offset, cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003550 add(tmp_reg, tmp_reg, ShifterOperand(base), AL);
Roland Levillain775ef492014-11-04 17:43:11 +00003551 base = tmp_reg;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003552 offset = 0;
3553 }
3554 CHECK(Address::CanHoldStoreOffsetThumb(type, offset));
3555 switch (type) {
3556 case kStoreByte:
3557 strb(reg, Address(base, offset), cond);
3558 break;
3559 case kStoreHalfword:
3560 strh(reg, Address(base, offset), cond);
3561 break;
3562 case kStoreWord:
3563 str(reg, Address(base, offset), cond);
3564 break;
3565 case kStoreWordPair:
3566 strd(reg, Address(base, offset), cond);
3567 break;
3568 default:
3569 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003570 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003571 }
Roland Levillain23f02f32015-08-25 18:23:20 +01003572 if ((tmp_reg != kNoRegister) && (tmp_reg != IP)) {
3573 CHECK((tmp_reg == R5) || (tmp_reg == R6));
Roland Levillain775ef492014-11-04 17:43:11 +00003574 Pop(tmp_reg);
3575 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003576}
3577
3578
3579// Implementation note: this method must emit at most one instruction when
3580// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreToOffset.
3581void Thumb2Assembler::StoreSToOffset(SRegister reg,
3582 Register base,
3583 int32_t offset,
3584 Condition cond) {
3585 if (!Address::CanHoldStoreOffsetThumb(kStoreSWord, offset)) {
3586 CHECK_NE(base, IP);
3587 LoadImmediate(IP, offset, cond);
3588 add(IP, IP, ShifterOperand(base), cond);
3589 base = IP;
3590 offset = 0;
3591 }
3592 CHECK(Address::CanHoldStoreOffsetThumb(kStoreSWord, offset));
3593 vstrs(reg, Address(base, offset), cond);
3594}
3595
3596
3597// Implementation note: this method must emit at most one instruction when
3598// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreSToOffset.
3599void Thumb2Assembler::StoreDToOffset(DRegister reg,
3600 Register base,
3601 int32_t offset,
3602 Condition cond) {
3603 if (!Address::CanHoldStoreOffsetThumb(kStoreDWord, offset)) {
3604 CHECK_NE(base, IP);
3605 LoadImmediate(IP, offset, cond);
3606 add(IP, IP, ShifterOperand(base), cond);
3607 base = IP;
3608 offset = 0;
3609 }
3610 CHECK(Address::CanHoldStoreOffsetThumb(kStoreDWord, offset));
3611 vstrd(reg, Address(base, offset), cond);
3612}
3613
3614
3615void Thumb2Assembler::MemoryBarrier(ManagedRegister mscratch) {
3616 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003617 dmb(SY);
3618}
3619
3620
3621void Thumb2Assembler::dmb(DmbOptions flavor) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003622 int32_t encoding = 0xf3bf8f50; // dmb in T1 encoding.
3623 Emit32(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003624}
3625
3626
3627void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003628 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003629 cbz(r, label);
3630 } else {
3631 cmp(r, ShifterOperand(0));
3632 b(label, EQ);
3633 }
3634}
3635
3636
Dave Allison65fcc2c2014-04-28 13:45:27 -07003637void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003638 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd126ba12015-05-20 11:25:27 +01003639 cbnz(r, label);
3640 } else {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003641 cmp(r, ShifterOperand(0));
3642 b(label, NE);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003643 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003644}
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003645
3646JumpTable* Thumb2Assembler::CreateJumpTable(std::vector<Label*>&& labels, Register base_reg) {
3647 jump_tables_.emplace_back(std::move(labels));
3648 JumpTable* table = &jump_tables_.back();
3649 DCHECK(!table->GetLabel()->IsBound());
3650
3651 bool use32bit = IsForced32Bit() || IsHighRegister(base_reg);
3652 uint32_t location = buffer_.Size();
3653 Fixup::Size size = use32bit ? Fixup::kLiteralAddr4KiB : Fixup::kLiteralAddr1KiB;
3654 FixupId fixup_id = AddFixup(Fixup::LoadLiteralAddress(location, base_reg, size));
3655 Emit16(static_cast<uint16_t>(table->GetLabel()->position_));
3656 table->GetLabel()->LinkTo(fixup_id);
3657 if (use32bit) {
3658 Emit16(0);
3659 }
3660 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3661
3662 return table;
3663}
3664
3665void Thumb2Assembler::EmitJumpTableDispatch(JumpTable* jump_table, Register displacement_reg) {
3666 CHECK(!IsForced32Bit()) << "Forced 32-bit dispatch not implemented yet";
3667 // 32-bit ADD doesn't support PC as an input, so we need a two-instruction sequence:
3668 // SUB ip, ip, #0
3669 // ADD pc, ip, reg
3670 // TODO: Implement.
3671
3672 // The anchor's position needs to be fixed up before we can compute offsets - so make it a tracked
3673 // label.
3674 BindTrackedLabel(jump_table->GetAnchorLabel());
3675
3676 add(PC, PC, ShifterOperand(displacement_reg));
3677}
3678
Dave Allison65fcc2c2014-04-28 13:45:27 -07003679} // namespace arm
3680} // namespace art