blob: 15298b390b18338341c2a783c0f1ae9224fe808a [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_thumb2.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Vladimir Marko6b756b52015-07-14 11:58:38 +010028void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) {
29 // For each Fixup, it's easy to find the Fixups that it depends on as they are either
30 // the following or the preceding Fixups until we find the target. However, for fixup
31 // adjustment we need the reverse lookup, i.e. what Fixups depend on a given Fixup.
32 // This function creates a compact representation of this relationship, where we have
33 // all the dependents in a single array and Fixups reference their ranges by start
34 // index and count. (Instead of having a per-fixup vector.)
35
36 // Count the number of dependents of each Fixup.
37 const FixupId end_id = assembler->fixups_.size();
38 Fixup* fixups = assembler->fixups_.data();
39 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
40 uint32_t target = fixups[fixup_id].target_;
41 if (target > fixups[fixup_id].location_) {
42 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
43 fixups[id].dependents_count_ += 1u;
44 }
45 } else {
46 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
47 fixups[id - 1u].dependents_count_ += 1u;
48 }
49 }
50 }
51 // Assign index ranges in fixup_dependents_ to individual fixups. Record the end of the
52 // range in dependents_start_, we shall later decrement it as we fill in fixup_dependents_.
53 uint32_t number_of_dependents = 0u;
54 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
55 number_of_dependents += fixups[fixup_id].dependents_count_;
56 fixups[fixup_id].dependents_start_ = number_of_dependents;
57 }
58 if (number_of_dependents == 0u) {
59 return;
60 }
61 // Create and fill in the fixup_dependents_.
62 assembler->fixup_dependents_.reset(new FixupId[number_of_dependents]);
63 FixupId* dependents = assembler->fixup_dependents_.get();
64 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
65 uint32_t target = fixups[fixup_id].target_;
66 if (target > fixups[fixup_id].location_) {
67 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
68 fixups[id].dependents_start_ -= 1u;
69 dependents[fixups[id].dependents_start_] = fixup_id;
70 }
71 } else {
72 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
73 fixups[id - 1u].dependents_start_ -= 1u;
74 dependents[fixups[id - 1u].dependents_start_] = fixup_id;
75 }
76 }
77 }
78}
79
Vladimir Markocf93a5c2015-06-16 11:33:24 +000080void Thumb2Assembler::BindLabel(Label* label, uint32_t bound_pc) {
81 CHECK(!label->IsBound());
82
83 while (label->IsLinked()) {
84 FixupId fixup_id = label->Position(); // The id for linked Fixup.
85 Fixup* fixup = GetFixup(fixup_id); // Get the Fixup at this id.
86 fixup->Resolve(bound_pc); // Fixup can be resolved now.
Vladimir Markocf93a5c2015-06-16 11:33:24 +000087 uint32_t fixup_location = fixup->GetLocation();
88 uint16_t next = buffer_.Load<uint16_t>(fixup_location); // Get next in chain.
89 buffer_.Store<int16_t>(fixup_location, 0);
90 label->position_ = next; // Move to next.
91 }
92 label->BindTo(bound_pc);
93}
94
Andreas Gampe7cffc3b2015-10-19 21:31:53 -070095uint32_t Thumb2Assembler::BindLiterals() {
Vladimir Markocf93a5c2015-06-16 11:33:24 +000096 // We don't add the padding here, that's done only after adjusting the Fixup sizes.
97 uint32_t code_size = buffer_.Size();
98 for (Literal& lit : literals_) {
99 Label* label = lit.GetLabel();
100 BindLabel(label, code_size);
101 code_size += lit.GetSize();
102 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700103 return code_size;
104}
105
106void Thumb2Assembler::BindJumpTables(uint32_t code_size) {
107 for (JumpTable& table : jump_tables_) {
108 Label* label = table.GetLabel();
109 BindLabel(label, code_size);
110 code_size += table.GetSize();
111 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000112}
113
114void Thumb2Assembler::AdjustFixupIfNeeded(Fixup* fixup, uint32_t* current_code_size,
115 std::deque<FixupId>* fixups_to_recalculate) {
116 uint32_t adjustment = fixup->AdjustSizeIfNeeded(*current_code_size);
117 if (adjustment != 0u) {
118 *current_code_size += adjustment;
Vladimir Marko6b756b52015-07-14 11:58:38 +0100119 for (FixupId dependent_id : fixup->Dependents(*this)) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000120 Fixup* dependent = GetFixup(dependent_id);
121 dependent->IncreaseAdjustment(adjustment);
122 if (buffer_.Load<int16_t>(dependent->GetLocation()) == 0) {
123 buffer_.Store<int16_t>(dependent->GetLocation(), 1);
124 fixups_to_recalculate->push_back(dependent_id);
125 }
126 }
127 }
128}
129
130uint32_t Thumb2Assembler::AdjustFixups() {
Vladimir Marko6b756b52015-07-14 11:58:38 +0100131 Fixup::PrepareDependents(this);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000132 uint32_t current_code_size = buffer_.Size();
133 std::deque<FixupId> fixups_to_recalculate;
134 if (kIsDebugBuild) {
135 // We will use the placeholders in the buffer_ to mark whether the fixup has
136 // been added to the fixups_to_recalculate. Make sure we start with zeros.
137 for (Fixup& fixup : fixups_) {
138 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
139 }
140 }
141 for (Fixup& fixup : fixups_) {
142 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
143 }
144 while (!fixups_to_recalculate.empty()) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100145 do {
146 // Pop the fixup.
147 FixupId fixup_id = fixups_to_recalculate.front();
148 fixups_to_recalculate.pop_front();
149 Fixup* fixup = GetFixup(fixup_id);
150 DCHECK_NE(buffer_.Load<int16_t>(fixup->GetLocation()), 0);
151 buffer_.Store<int16_t>(fixup->GetLocation(), 0);
152 // See if it needs adjustment.
153 AdjustFixupIfNeeded(fixup, &current_code_size, &fixups_to_recalculate);
154 } while (!fixups_to_recalculate.empty());
155
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700156 if ((current_code_size & 2) != 0 && (!literals_.empty() || !jump_tables_.empty())) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100157 // If we need to add padding before literals, this may just push some out of range,
158 // so recalculate all load literals. This makes up for the fact that we don't mark
159 // load literal as a dependency of all previous Fixups even though it actually is.
160 for (Fixup& fixup : fixups_) {
161 if (fixup.IsLoadLiteral()) {
162 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
163 }
164 }
165 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000166 }
167 if (kIsDebugBuild) {
168 // Check that no fixup is marked as being in fixups_to_recalculate anymore.
169 for (Fixup& fixup : fixups_) {
170 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
171 }
172 }
173
174 // Adjust literal pool labels for padding.
Roland Levillain14d90572015-07-16 10:52:26 +0100175 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000176 uint32_t literals_adjustment = current_code_size + (current_code_size & 2) - buffer_.Size();
177 if (literals_adjustment != 0u) {
178 for (Literal& literal : literals_) {
179 Label* label = literal.GetLabel();
180 DCHECK(label->IsBound());
181 int old_position = label->Position();
182 label->Reinitialize();
183 label->BindTo(old_position + literals_adjustment);
184 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700185 for (JumpTable& table : jump_tables_) {
186 Label* label = table.GetLabel();
187 DCHECK(label->IsBound());
188 int old_position = label->Position();
189 label->Reinitialize();
190 label->BindTo(old_position + literals_adjustment);
191 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000192 }
193
194 return current_code_size;
195}
196
197void Thumb2Assembler::EmitFixups(uint32_t adjusted_code_size) {
198 // Move non-fixup code to its final place and emit fixups.
199 // Process fixups in reverse order so that we don't repeatedly move the same data.
200 size_t src_end = buffer_.Size();
201 size_t dest_end = adjusted_code_size;
202 buffer_.Resize(dest_end);
203 DCHECK_GE(dest_end, src_end);
204 for (auto i = fixups_.rbegin(), end = fixups_.rend(); i != end; ++i) {
205 Fixup* fixup = &*i;
206 if (fixup->GetOriginalSize() == fixup->GetSize()) {
207 // The size of this Fixup didn't change. To avoid moving the data
208 // in small chunks, emit the code to its original position.
209 fixup->Emit(&buffer_, adjusted_code_size);
210 fixup->Finalize(dest_end - src_end);
211 } else {
212 // Move the data between the end of the fixup and src_end to its final location.
213 size_t old_fixup_location = fixup->GetLocation();
214 size_t src_begin = old_fixup_location + fixup->GetOriginalSizeInBytes();
215 size_t data_size = src_end - src_begin;
216 size_t dest_begin = dest_end - data_size;
217 buffer_.Move(dest_begin, src_begin, data_size);
218 src_end = old_fixup_location;
219 dest_end = dest_begin - fixup->GetSizeInBytes();
220 // Finalize the Fixup and emit the data to the new location.
221 fixup->Finalize(dest_end - src_end);
222 fixup->Emit(&buffer_, adjusted_code_size);
223 }
224 }
225 CHECK_EQ(src_end, dest_end);
226}
227
228void Thumb2Assembler::EmitLiterals() {
229 if (!literals_.empty()) {
230 // Load literal instructions (LDR, LDRD, VLDR) require 4-byte alignment.
231 // We don't support byte and half-word literals.
232 uint32_t code_size = buffer_.Size();
Roland Levillain14d90572015-07-16 10:52:26 +0100233 DCHECK_ALIGNED(code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000234 if ((code_size & 2u) != 0u) {
235 Emit16(0);
236 }
237 for (Literal& literal : literals_) {
238 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
239 DCHECK_EQ(static_cast<size_t>(literal.GetLabel()->Position()), buffer_.Size());
240 DCHECK(literal.GetSize() == 4u || literal.GetSize() == 8u);
241 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
242 buffer_.Emit<uint8_t>(literal.GetData()[i]);
243 }
244 }
245 }
246}
247
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700248void Thumb2Assembler::EmitJumpTables() {
249 if (!jump_tables_.empty()) {
250 // Jump tables require 4 byte alignment. (We don't support byte and half-word jump tables.)
251 uint32_t code_size = buffer_.Size();
252 DCHECK_ALIGNED(code_size, 2);
253 if ((code_size & 2u) != 0u) {
254 Emit16(0);
255 }
256 for (JumpTable& table : jump_tables_) {
257 // Bulk ensure capacity, as this may be large.
258 size_t orig_size = buffer_.Size();
259 buffer_.ExtendCapacity(orig_size + table.GetSize());
260#ifndef NDEBUG
261 buffer_.has_ensured_capacity_ = true;
262#endif
263
264 DCHECK_EQ(static_cast<size_t>(table.GetLabel()->Position()), buffer_.Size());
265 int32_t anchor_position = table.GetAnchorLabel()->Position() + 4;
266
267 for (Label* target : table.GetData()) {
268 // Ensure that the label was tracked, so that it will have the right position.
269 DCHECK(std::find(tracked_labels_.begin(), tracked_labels_.end(), target) !=
270 tracked_labels_.end());
271
272 int32_t offset = target->Position() - anchor_position;
273 buffer_.Emit<int32_t>(offset);
274 }
275
276#ifndef NDEBUG
277 buffer_.has_ensured_capacity_ = false;
278#endif
279 size_t new_size = buffer_.Size();
280 DCHECK_LE(new_size - orig_size, table.GetSize());
281 }
282 }
283}
284
Vladimir Marko10ef6942015-10-22 15:25:54 +0100285void Thumb2Assembler::PatchCFI() {
286 if (cfi().NumberOfDelayedAdvancePCs() == 0u) {
287 return;
288 }
289
290 typedef DebugFrameOpCodeWriterForAssembler::DelayedAdvancePC DelayedAdvancePC;
291 const auto data = cfi().ReleaseStreamAndPrepareForDelayedAdvancePC();
292 const std::vector<uint8_t>& old_stream = data.first;
293 const std::vector<DelayedAdvancePC>& advances = data.second;
294
295 // Refill our data buffer with patched opcodes.
296 cfi().ReserveCFIStream(old_stream.size() + advances.size() + 16);
297 size_t stream_pos = 0;
298 for (const DelayedAdvancePC& advance : advances) {
299 DCHECK_GE(advance.stream_pos, stream_pos);
300 // Copy old data up to the point where advance was issued.
301 cfi().AppendRawData(old_stream, stream_pos, advance.stream_pos);
302 stream_pos = advance.stream_pos;
303 // Insert the advance command with its final offset.
304 size_t final_pc = GetAdjustedPosition(advance.pc);
305 cfi().AdvancePC(final_pc);
306 }
307 // Copy the final segment if any.
308 cfi().AppendRawData(old_stream, stream_pos, old_stream.size());
309}
310
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000311inline int16_t Thumb2Assembler::BEncoding16(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100312 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000313 int16_t encoding = B15 | B14;
314 if (cond != AL) {
315 DCHECK(IsInt<9>(offset));
316 encoding |= B12 | (static_cast<int32_t>(cond) << 8) | ((offset >> 1) & 0xff);
317 } else {
318 DCHECK(IsInt<12>(offset));
319 encoding |= B13 | ((offset >> 1) & 0x7ff);
320 }
321 return encoding;
322}
323
324inline int32_t Thumb2Assembler::BEncoding32(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100325 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000326 int32_t s = (offset >> 31) & 1; // Sign bit.
327 int32_t encoding = B31 | B30 | B29 | B28 | B15 |
328 (s << 26) | // Sign bit goes to bit 26.
329 ((offset >> 1) & 0x7ff); // imm11 goes to bits 0-10.
330 if (cond != AL) {
331 DCHECK(IsInt<21>(offset));
332 // Encode cond, move imm6 from bits 12-17 to bits 16-21 and move J1 and J2.
333 encoding |= (static_cast<int32_t>(cond) << 22) | ((offset & 0x3f000) << (16 - 12)) |
334 ((offset & (1 << 19)) >> (19 - 13)) | // Extract J1 from bit 19 to bit 13.
335 ((offset & (1 << 18)) >> (18 - 11)); // Extract J2 from bit 18 to bit 11.
336 } else {
337 DCHECK(IsInt<25>(offset));
338 int32_t j1 = ((offset >> 23) ^ s ^ 1) & 1; // Calculate J1 from I1 extracted from bit 23.
339 int32_t j2 = ((offset >> 22)^ s ^ 1) & 1; // Calculate J2 from I2 extracted from bit 22.
340 // Move imm10 from bits 12-21 to bits 16-25 and add J1 and J2.
341 encoding |= B12 | ((offset & 0x3ff000) << (16 - 12)) |
342 (j1 << 13) | (j2 << 11);
343 }
344 return encoding;
345}
346
347inline int16_t Thumb2Assembler::CbxzEncoding16(Register rn, int32_t offset, Condition cond) {
348 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100349 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000350 DCHECK(IsUint<7>(offset));
351 DCHECK(cond == EQ || cond == NE);
352 return B15 | B13 | B12 | B8 | (cond == NE ? B11 : 0) | static_cast<int32_t>(rn) |
353 ((offset & 0x3e) << (3 - 1)) | // Move imm5 from bits 1-5 to bits 3-7.
354 ((offset & 0x40) << (9 - 6)); // Move i from bit 6 to bit 11
355}
356
357inline int16_t Thumb2Assembler::CmpRnImm8Encoding16(Register rn, int32_t value) {
358 DCHECK(!IsHighRegister(rn));
359 DCHECK(IsUint<8>(value));
360 return B13 | B11 | (rn << 8) | value;
361}
362
363inline int16_t Thumb2Assembler::AddRdnRmEncoding16(Register rdn, Register rm) {
364 // The high bit of rn is moved across 4-bit rm.
365 return B14 | B10 | (static_cast<int32_t>(rm) << 3) |
366 (static_cast<int32_t>(rdn) & 7) | ((static_cast<int32_t>(rdn) & 8) << 4);
367}
368
369inline int32_t Thumb2Assembler::MovwEncoding32(Register rd, int32_t value) {
370 DCHECK(IsUint<16>(value));
371 return B31 | B30 | B29 | B28 | B25 | B22 |
372 (static_cast<int32_t>(rd) << 8) |
373 ((value & 0xf000) << (16 - 12)) | // Move imm4 from bits 12-15 to bits 16-19.
374 ((value & 0x0800) << (26 - 11)) | // Move i from bit 11 to bit 26.
375 ((value & 0x0700) << (12 - 8)) | // Move imm3 from bits 8-10 to bits 12-14.
376 (value & 0xff); // Keep imm8 in bits 0-7.
377}
378
379inline int32_t Thumb2Assembler::MovtEncoding32(Register rd, int32_t value) {
380 DCHECK_EQ(value & 0xffff, 0);
381 int32_t movw_encoding = MovwEncoding32(rd, (value >> 16) & 0xffff);
382 return movw_encoding | B25 | B23;
383}
384
385inline int32_t Thumb2Assembler::MovModImmEncoding32(Register rd, int32_t value) {
386 uint32_t mod_imm = ModifiedImmediate(value);
387 DCHECK_NE(mod_imm, kInvalidModifiedImmediate);
388 return B31 | B30 | B29 | B28 | B22 | B19 | B18 | B17 | B16 |
389 (static_cast<int32_t>(rd) << 8) | static_cast<int32_t>(mod_imm);
390}
391
392inline int16_t Thumb2Assembler::LdrLitEncoding16(Register rt, int32_t offset) {
393 DCHECK(!IsHighRegister(rt));
Roland Levillain14d90572015-07-16 10:52:26 +0100394 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000395 DCHECK(IsUint<10>(offset));
396 return B14 | B11 | (static_cast<int32_t>(rt) << 8) | (offset >> 2);
397}
398
399inline int32_t Thumb2Assembler::LdrLitEncoding32(Register rt, int32_t offset) {
400 // NOTE: We don't support negative offset, i.e. U=0 (B23).
401 return LdrRtRnImm12Encoding(rt, PC, offset);
402}
403
404inline int32_t Thumb2Assembler::LdrdEncoding32(Register rt, Register rt2, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100405 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000406 CHECK(IsUint<10>(offset));
407 return B31 | B30 | B29 | B27 |
408 B24 /* P = 1 */ | B23 /* U = 1 */ | B22 | 0 /* W = 0 */ | B20 |
409 (static_cast<int32_t>(rn) << 16) | (static_cast<int32_t>(rt) << 12) |
410 (static_cast<int32_t>(rt2) << 8) | (offset >> 2);
411}
412
413inline int32_t Thumb2Assembler::VldrsEncoding32(SRegister sd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100414 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000415 CHECK(IsUint<10>(offset));
416 return B31 | B30 | B29 | B27 | B26 | B24 |
417 B23 /* U = 1 */ | B20 | B11 | B9 |
418 (static_cast<int32_t>(rn) << 16) |
419 ((static_cast<int32_t>(sd) & 0x01) << (22 - 0)) | // Move D from bit 0 to bit 22.
420 ((static_cast<int32_t>(sd) & 0x1e) << (12 - 1)) | // Move Vd from bits 1-4 to bits 12-15.
421 (offset >> 2);
422}
423
424inline int32_t Thumb2Assembler::VldrdEncoding32(DRegister dd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100425 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000426 CHECK(IsUint<10>(offset));
427 return B31 | B30 | B29 | B27 | B26 | B24 |
428 B23 /* U = 1 */ | B20 | B11 | B9 | B8 |
429 (rn << 16) |
430 ((static_cast<int32_t>(dd) & 0x10) << (22 - 4)) | // Move D from bit 4 to bit 22.
431 ((static_cast<int32_t>(dd) & 0x0f) << (12 - 0)) | // Move Vd from bits 0-3 to bits 12-15.
432 (offset >> 2);
433}
434
435inline int16_t Thumb2Assembler::LdrRtRnImm5Encoding16(Register rt, Register rn, int32_t offset) {
436 DCHECK(!IsHighRegister(rt));
437 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100438 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000439 DCHECK(IsUint<7>(offset));
440 return B14 | B13 | B11 |
441 (static_cast<int32_t>(rn) << 3) | static_cast<int32_t>(rt) |
442 (offset << (6 - 2)); // Move imm5 from bits 2-6 to bits 6-10.
443}
444
445int32_t Thumb2Assembler::Fixup::LoadWideOrFpEncoding(Register rbase, int32_t offset) const {
446 switch (type_) {
447 case kLoadLiteralWide:
448 return LdrdEncoding32(rn_, rt2_, rbase, offset);
449 case kLoadFPLiteralSingle:
450 return VldrsEncoding32(sd_, rbase, offset);
451 case kLoadFPLiteralDouble:
452 return VldrdEncoding32(dd_, rbase, offset);
453 default:
454 LOG(FATAL) << "Unexpected type: " << static_cast<int>(type_);
455 UNREACHABLE();
456 }
457}
458
459inline int32_t Thumb2Assembler::LdrRtRnImm12Encoding(Register rt, Register rn, int32_t offset) {
460 DCHECK(IsUint<12>(offset));
461 return B31 | B30 | B29 | B28 | B27 | B23 | B22 | B20 | (rn << 16) | (rt << 12) | offset;
462}
463
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700464inline int16_t Thumb2Assembler::AdrEncoding16(Register rd, int32_t offset) {
465 DCHECK(IsUint<10>(offset));
466 DCHECK(IsAligned<4>(offset));
467 DCHECK(!IsHighRegister(rd));
468 return B15 | B13 | (rd << 8) | (offset >> 2);
469}
470
471inline int32_t Thumb2Assembler::AdrEncoding32(Register rd, int32_t offset) {
472 DCHECK(IsUint<12>(offset));
473 // Bit 26: offset[11]
474 // Bits 14-12: offset[10-8]
475 // Bits 7-0: offset[7-0]
476 int32_t immediate_mask =
477 ((offset & (1 << 11)) << (26 - 11)) |
478 ((offset & (7 << 8)) << (12 - 8)) |
479 (offset & 0xFF);
480 return B31 | B30 | B29 | B28 | B25 | B19 | B18 | B17 | B16 | (rd << 8) | immediate_mask;
481}
482
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000483void Thumb2Assembler::FinalizeCode() {
484 ArmAssembler::FinalizeCode();
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700485 uint32_t size_after_literals = BindLiterals();
486 BindJumpTables(size_after_literals);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000487 uint32_t adjusted_code_size = AdjustFixups();
488 EmitFixups(adjusted_code_size);
489 EmitLiterals();
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700490 FinalizeTrackedLabels();
491 EmitJumpTables();
Vladimir Marko10ef6942015-10-22 15:25:54 +0100492 PatchCFI();
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000493}
494
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +0100495bool Thumb2Assembler::ShifterOperandCanAlwaysHold(uint32_t immediate) {
496 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
497}
498
Nicolas Geoffray3d1e7882015-02-03 13:59:52 +0000499bool Thumb2Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
500 Register rn ATTRIBUTE_UNUSED,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000501 Opcode opcode,
502 uint32_t immediate,
Vladimir Markof5c09c32015-12-17 12:08:08 +0000503 SetCc set_cc,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000504 ShifterOperand* shifter_op) {
505 shifter_op->type_ = ShifterOperand::kImmediate;
506 shifter_op->immed_ = immediate;
507 shifter_op->is_shift_ = false;
508 shifter_op->is_rotate_ = false;
509 switch (opcode) {
510 case ADD:
511 case SUB:
Vladimir Markof5c09c32015-12-17 12:08:08 +0000512 // Less than (or equal to) 12 bits can be done if we don't need to set condition codes.
513 if (immediate < (1 << 12) && set_cc != kCcSet) {
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000514 return true;
515 }
516 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
517
518 case MOV:
519 // TODO: Support less than or equal to 12bits.
520 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100521
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000522 case MVN:
523 default:
524 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
525 }
526}
527
Dave Allison65fcc2c2014-04-28 13:45:27 -0700528void Thumb2Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100529 Condition cond, SetCc set_cc) {
530 EmitDataProcessing(cond, AND, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700531}
532
533
534void Thumb2Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100535 Condition cond, SetCc set_cc) {
536 EmitDataProcessing(cond, EOR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700537}
538
539
540void Thumb2Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100541 Condition cond, SetCc set_cc) {
542 EmitDataProcessing(cond, SUB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700543}
544
545
546void Thumb2Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100547 Condition cond, SetCc set_cc) {
548 EmitDataProcessing(cond, RSB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700549}
550
551
552void Thumb2Assembler::add(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100553 Condition cond, SetCc set_cc) {
554 EmitDataProcessing(cond, ADD, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700555}
556
557
558void Thumb2Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100559 Condition cond, SetCc set_cc) {
560 EmitDataProcessing(cond, ADC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700561}
562
563
564void Thumb2Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100565 Condition cond, SetCc set_cc) {
566 EmitDataProcessing(cond, SBC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700567}
568
569
570void Thumb2Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100571 Condition cond, SetCc set_cc) {
572 EmitDataProcessing(cond, RSC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700573}
574
575
576void Thumb2Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
577 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100578 EmitDataProcessing(cond, TST, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700579}
580
581
582void Thumb2Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
583 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100584 EmitDataProcessing(cond, TEQ, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700585}
586
587
588void Thumb2Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100589 EmitDataProcessing(cond, CMP, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700590}
591
592
593void Thumb2Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100594 EmitDataProcessing(cond, CMN, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700595}
596
597
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100598void Thumb2Assembler::orr(Register rd, Register rn, const ShifterOperand& so,
599 Condition cond, SetCc set_cc) {
600 EmitDataProcessing(cond, ORR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700601}
602
603
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100604void Thumb2Assembler::orn(Register rd, Register rn, const ShifterOperand& so,
605 Condition cond, SetCc set_cc) {
606 EmitDataProcessing(cond, ORN, set_cc, rn, rd, so);
607}
608
609
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100610void Thumb2Assembler::mov(Register rd, const ShifterOperand& so,
611 Condition cond, SetCc set_cc) {
612 EmitDataProcessing(cond, MOV, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700613}
614
615
616void Thumb2Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100617 Condition cond, SetCc set_cc) {
618 EmitDataProcessing(cond, BIC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700619}
620
621
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100622void Thumb2Assembler::mvn(Register rd, const ShifterOperand& so,
623 Condition cond, SetCc set_cc) {
624 EmitDataProcessing(cond, MVN, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700625}
626
627
628void Thumb2Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700629 CheckCondition(cond);
630
Dave Allison65fcc2c2014-04-28 13:45:27 -0700631 if (rd == rm && !IsHighRegister(rd) && !IsHighRegister(rn) && !force_32bit_) {
632 // 16 bit.
633 int16_t encoding = B14 | B9 | B8 | B6 |
634 rn << 3 | rd;
635 Emit16(encoding);
636 } else {
637 // 32 bit.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700638 uint32_t op1 = 0U /* 0b000 */;
639 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700640 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
641 op1 << 20 |
642 B15 | B14 | B13 | B12 |
643 op2 << 4 |
644 static_cast<uint32_t>(rd) << 8 |
645 static_cast<uint32_t>(rn) << 16 |
646 static_cast<uint32_t>(rm);
647
648 Emit32(encoding);
649 }
650}
651
652
653void Thumb2Assembler::mla(Register rd, Register rn, Register rm, Register ra,
654 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700655 CheckCondition(cond);
656
Andreas Gampec8ccf682014-09-29 20:07:43 -0700657 uint32_t op1 = 0U /* 0b000 */;
658 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700659 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
660 op1 << 20 |
661 op2 << 4 |
662 static_cast<uint32_t>(rd) << 8 |
663 static_cast<uint32_t>(ra) << 12 |
664 static_cast<uint32_t>(rn) << 16 |
665 static_cast<uint32_t>(rm);
666
667 Emit32(encoding);
668}
669
670
671void Thumb2Assembler::mls(Register rd, Register rn, Register rm, Register ra,
672 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700673 CheckCondition(cond);
674
Andreas Gampec8ccf682014-09-29 20:07:43 -0700675 uint32_t op1 = 0U /* 0b000 */;
676 uint32_t op2 = 01 /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700677 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
678 op1 << 20 |
679 op2 << 4 |
680 static_cast<uint32_t>(rd) << 8 |
681 static_cast<uint32_t>(ra) << 12 |
682 static_cast<uint32_t>(rn) << 16 |
683 static_cast<uint32_t>(rm);
684
685 Emit32(encoding);
686}
687
688
Zheng Xuc6667102015-05-15 16:08:45 +0800689void Thumb2Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
690 Register rm, Condition cond) {
691 CheckCondition(cond);
692
693 uint32_t op1 = 0U /* 0b000; */;
694 uint32_t op2 = 0U /* 0b0000 */;
695 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
696 op1 << 20 |
697 op2 << 4 |
698 static_cast<uint32_t>(rd_lo) << 12 |
699 static_cast<uint32_t>(rd_hi) << 8 |
700 static_cast<uint32_t>(rn) << 16 |
701 static_cast<uint32_t>(rm);
702
703 Emit32(encoding);
704}
705
706
Dave Allison65fcc2c2014-04-28 13:45:27 -0700707void Thumb2Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
708 Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700709 CheckCondition(cond);
710
Andreas Gampec8ccf682014-09-29 20:07:43 -0700711 uint32_t op1 = 2U /* 0b010; */;
712 uint32_t op2 = 0U /* 0b0000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700713 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
714 op1 << 20 |
715 op2 << 4 |
716 static_cast<uint32_t>(rd_lo) << 12 |
717 static_cast<uint32_t>(rd_hi) << 8 |
718 static_cast<uint32_t>(rn) << 16 |
719 static_cast<uint32_t>(rm);
720
721 Emit32(encoding);
722}
723
724
725void Thumb2Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700726 CheckCondition(cond);
727
Andreas Gampec8ccf682014-09-29 20:07:43 -0700728 uint32_t op1 = 1U /* 0b001 */;
729 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700730 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B20 |
731 op1 << 20 |
732 op2 << 4 |
733 0xf << 12 |
734 static_cast<uint32_t>(rd) << 8 |
735 static_cast<uint32_t>(rn) << 16 |
736 static_cast<uint32_t>(rm);
737
738 Emit32(encoding);
739}
740
741
742void Thumb2Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700743 CheckCondition(cond);
744
Andreas Gampec8ccf682014-09-29 20:07:43 -0700745 uint32_t op1 = 1U /* 0b001 */;
746 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700747 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B21 | B20 |
748 op1 << 20 |
749 op2 << 4 |
750 0xf << 12 |
751 static_cast<uint32_t>(rd) << 8 |
752 static_cast<uint32_t>(rn) << 16 |
753 static_cast<uint32_t>(rm);
754
755 Emit32(encoding);
756}
757
758
Roland Levillain51d3fc42014-11-13 14:11:42 +0000759void Thumb2Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
760 CheckCondition(cond);
761 CHECK_LE(lsb, 31U);
762 CHECK(1U <= width && width <= 32U) << width;
763 uint32_t widthminus1 = width - 1;
764 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
765 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
766
767 uint32_t op = 20U /* 0b10100 */;
768 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
769 op << 20 |
770 static_cast<uint32_t>(rn) << 16 |
771 imm3 << 12 |
772 static_cast<uint32_t>(rd) << 8 |
773 imm2 << 6 |
774 widthminus1;
775
776 Emit32(encoding);
777}
778
779
Roland Levillain981e4542014-11-14 11:47:14 +0000780void Thumb2Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
781 CheckCondition(cond);
782 CHECK_LE(lsb, 31U);
783 CHECK(1U <= width && width <= 32U) << width;
784 uint32_t widthminus1 = width - 1;
785 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
786 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
787
788 uint32_t op = 28U /* 0b11100 */;
789 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
790 op << 20 |
791 static_cast<uint32_t>(rn) << 16 |
792 imm3 << 12 |
793 static_cast<uint32_t>(rd) << 8 |
794 imm2 << 6 |
795 widthminus1;
796
797 Emit32(encoding);
798}
799
800
Dave Allison65fcc2c2014-04-28 13:45:27 -0700801void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) {
802 EmitLoadStore(cond, true, false, false, false, rd, ad);
803}
804
805
806void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) {
807 EmitLoadStore(cond, false, false, false, false, rd, ad);
808}
809
810
811void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
812 EmitLoadStore(cond, true, true, false, false, rd, ad);
813}
814
815
816void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) {
817 EmitLoadStore(cond, false, true, false, false, rd, ad);
818}
819
820
821void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
822 EmitLoadStore(cond, true, false, true, false, rd, ad);
823}
824
825
826void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) {
827 EmitLoadStore(cond, false, false, true, false, rd, ad);
828}
829
830
831void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
832 EmitLoadStore(cond, true, true, false, true, rd, ad);
833}
834
835
836void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
837 EmitLoadStore(cond, true, false, true, true, rd, ad);
838}
839
840
841void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100842 ldrd(rd, Register(rd + 1), ad, cond);
843}
844
845
846void Thumb2Assembler::ldrd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700847 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100848 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700849 // This is different from other loads. The encoding is like ARM.
850 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
851 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100852 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700853 ad.encodingThumbLdrdStrd();
854 Emit32(encoding);
855}
856
857
858void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100859 strd(rd, Register(rd + 1), ad, cond);
860}
861
862
863void Thumb2Assembler::strd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700864 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100865 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700866 // This is different from other loads. The encoding is like ARM.
867 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
868 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100869 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700870 ad.encodingThumbLdrdStrd();
871 Emit32(encoding);
872}
873
874
875void Thumb2Assembler::ldm(BlockAddressMode am,
876 Register base,
877 RegList regs,
878 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000879 CHECK_NE(regs, 0u); // Do not use ldm if there's nothing to load.
880 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700881 // Thumb doesn't support one reg in the list.
882 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000883 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700884 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700885 CHECK(am == DB_W); // Only writeback is supported.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700886 ldr(static_cast<Register>(reg), Address(base, kRegisterSize, Address::PostIndex), cond);
887 } else {
888 EmitMultiMemOp(cond, am, true, base, regs);
889 }
890}
891
892
893void Thumb2Assembler::stm(BlockAddressMode am,
894 Register base,
895 RegList regs,
896 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000897 CHECK_NE(regs, 0u); // Do not use stm if there's nothing to store.
898 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700899 // Thumb doesn't support one reg in the list.
900 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000901 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700902 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700903 CHECK(am == IA || am == IA_W);
904 Address::Mode strmode = am == IA ? Address::PreIndex : Address::Offset;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700905 str(static_cast<Register>(reg), Address(base, -kRegisterSize, strmode), cond);
906 } else {
907 EmitMultiMemOp(cond, am, false, base, regs);
908 }
909}
910
911
912bool Thumb2Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
913 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
914 if (((imm32 & ((1 << 19) - 1)) == 0) &&
915 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
916 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
917 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
918 ((imm32 >> 19) & ((1 << 6) -1));
919 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
920 sd, S0, S0);
921 return true;
922 }
923 return false;
924}
925
926
927bool Thumb2Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
928 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
929 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
930 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
931 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
932 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
933 ((imm64 >> 48) & ((1 << 6) -1));
934 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
935 dd, D0, D0);
936 return true;
937 }
938 return false;
939}
940
941
942void Thumb2Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
943 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
944}
945
946
947void Thumb2Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
948 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
949}
950
951
952void Thumb2Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
953 Condition cond) {
954 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
955}
956
957
958void Thumb2Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
959 Condition cond) {
960 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
961}
962
963
964void Thumb2Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
965 Condition cond) {
966 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
967}
968
969
970void Thumb2Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
971 Condition cond) {
972 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
973}
974
975
976void Thumb2Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
977 Condition cond) {
978 EmitVFPsss(cond, B21, sd, sn, sm);
979}
980
981
982void Thumb2Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
983 Condition cond) {
984 EmitVFPddd(cond, B21, dd, dn, dm);
985}
986
987
988void Thumb2Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
989 Condition cond) {
990 EmitVFPsss(cond, 0, sd, sn, sm);
991}
992
993
994void Thumb2Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
995 Condition cond) {
996 EmitVFPddd(cond, 0, dd, dn, dm);
997}
998
999
1000void Thumb2Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
1001 Condition cond) {
1002 EmitVFPsss(cond, B6, sd, sn, sm);
1003}
1004
1005
1006void Thumb2Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
1007 Condition cond) {
1008 EmitVFPddd(cond, B6, dd, dn, dm);
1009}
1010
1011
1012void Thumb2Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
1013 Condition cond) {
1014 EmitVFPsss(cond, B23, sd, sn, sm);
1015}
1016
1017
1018void Thumb2Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
1019 Condition cond) {
1020 EmitVFPddd(cond, B23, dd, dn, dm);
1021}
1022
1023
1024void Thumb2Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
1025 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
1026}
1027
1028
1029void Thumb2Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
1030 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
1031}
1032
1033
1034void Thumb2Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
1035 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
1036}
1037
1038
1039void Thumb2Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
1040 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
1041}
1042
1043
1044void Thumb2Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
1045 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
1046}
1047
1048void Thumb2Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
1049 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
1050}
1051
1052
1053void Thumb2Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
1054 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
1055}
1056
1057
1058void Thumb2Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
1059 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
1060}
1061
1062
1063void Thumb2Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
1064 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
1065}
1066
1067
1068void Thumb2Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
1069 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
1070}
1071
1072
1073void Thumb2Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
1074 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
1075}
1076
1077
1078void Thumb2Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
1079 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
1080}
1081
1082
1083void Thumb2Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
1084 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
1085}
1086
1087
1088void Thumb2Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
1089 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
1090}
1091
1092
1093void Thumb2Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
1094 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
1095}
1096
1097
1098void Thumb2Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
1099 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
1100}
1101
1102
1103void Thumb2Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
1104 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
1105}
1106
1107
1108void Thumb2Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
1109 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
1110}
1111
1112
1113void Thumb2Assembler::vcmpsz(SRegister sd, Condition cond) {
1114 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
1115}
1116
1117
1118void Thumb2Assembler::vcmpdz(DRegister dd, Condition cond) {
1119 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
1120}
1121
1122void Thumb2Assembler::b(Label* label, Condition cond) {
agicsakie2142d252015-06-30 17:10:03 -07001123 DCHECK_EQ(next_condition_, AL);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001124 EmitBranch(cond, label, false, false);
1125}
1126
1127
1128void Thumb2Assembler::bl(Label* label, Condition cond) {
1129 CheckCondition(cond);
1130 EmitBranch(cond, label, true, false);
1131}
1132
1133
1134void Thumb2Assembler::blx(Label* label) {
1135 EmitBranch(AL, label, true, true);
1136}
1137
1138
1139void Thumb2Assembler::MarkExceptionHandler(Label* label) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001140 EmitDataProcessing(AL, TST, kCcSet, PC, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001141 Label l;
1142 b(&l);
1143 EmitBranch(AL, label, false, false);
1144 Bind(&l);
1145}
1146
1147
1148void Thumb2Assembler::Emit32(int32_t value) {
1149 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1150 buffer_.Emit<int16_t>(value >> 16);
1151 buffer_.Emit<int16_t>(value & 0xffff);
1152}
1153
1154
1155void Thumb2Assembler::Emit16(int16_t value) {
1156 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1157 buffer_.Emit<int16_t>(value);
1158}
1159
1160
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001161bool Thumb2Assembler::Is32BitDataProcessing(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001162 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001163 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001164 Register rn,
1165 Register rd,
1166 const ShifterOperand& so) {
1167 if (force_32bit_) {
1168 return true;
1169 }
1170
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001171 // Check special case for SP relative ADD and SUB immediate.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001172 if ((opcode == ADD || opcode == SUB) && rn == SP && so.IsImmediate() && set_cc != kCcSet) {
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001173 // If the immediate is in range, use 16 bit.
1174 if (rd == SP) {
1175 if (so.GetImmediate() < (1 << 9)) { // 9 bit immediate.
1176 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001177 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001178 } else if (!IsHighRegister(rd) && opcode == ADD) {
1179 if (so.GetImmediate() < (1 << 10)) { // 10 bit immediate.
1180 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001181 }
1182 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001183 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001184
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001185 bool can_contain_high_register =
1186 (opcode == CMP) ||
1187 (opcode == MOV && set_cc != kCcSet) ||
1188 ((opcode == ADD) && (rn == rd) && set_cc != kCcSet);
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001189
1190 if (IsHighRegister(rd) || IsHighRegister(rn)) {
1191 if (!can_contain_high_register) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001192 return true;
1193 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001194
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001195 // There are high register instructions available for this opcode.
1196 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
1197 if (so.IsShift() && (so.GetShift() == RRX || so.GetImmediate() != 0u)) {
1198 return true;
1199 }
1200
1201 // The ADD and MOV instructions that work with high registers don't have 16-bit
1202 // immediate variants.
1203 if (so.IsImmediate()) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001204 return true;
1205 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001206 }
1207
1208 if (so.IsRegister() && IsHighRegister(so.GetRegister()) && !can_contain_high_register) {
1209 return true;
1210 }
1211
Dave Allison65fcc2c2014-04-28 13:45:27 -07001212 bool rn_is_valid = true;
1213
1214 // Check for single operand instructions and ADD/SUB.
1215 switch (opcode) {
1216 case CMP:
1217 case MOV:
1218 case TST:
1219 case MVN:
1220 rn_is_valid = false; // There is no Rn for these instructions.
1221 break;
1222 case TEQ:
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001223 case ORN:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001224 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001225 case ADD:
1226 case SUB:
1227 break;
1228 default:
1229 if (so.IsRegister() && rd != rn) {
1230 return true;
1231 }
1232 }
1233
1234 if (so.IsImmediate()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001235 if (opcode == RSB) {
1236 DCHECK(rn_is_valid);
1237 if (so.GetImmediate() != 0u) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001238 return true;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001239 }
1240 } else if (rn_is_valid && rn != rd) {
1241 // The only thumb1 instructions with a register and an immediate are ADD and SUB
1242 // with a 3-bit immediate, and RSB with zero immediate.
1243 if (opcode == ADD || opcode == SUB) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00001244 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1245 return true; // Cannot match "setflags".
1246 }
1247 if (!IsUint<3>(so.GetImmediate()) && !IsUint<3>(-so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001248 return true;
1249 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001250 } else {
1251 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001252 }
1253 } else {
1254 // ADD, SUB, CMP and MOV may be thumb1 only if the immediate is 8 bits.
1255 if (!(opcode == ADD || opcode == SUB || opcode == MOV || opcode == CMP)) {
1256 return true;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001257 } else if (opcode != CMP && ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1258 return true; // Cannot match "setflags" for ADD, SUB or MOV.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001259 } else {
Vladimir Markof5c09c32015-12-17 12:08:08 +00001260 // For ADD and SUB allow also negative 8-bit immediate as we will emit the oposite opcode.
1261 if (!IsUint<8>(so.GetImmediate()) &&
1262 (opcode == MOV || opcode == CMP || !IsUint<8>(-so.GetImmediate()))) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001263 return true;
1264 }
1265 }
1266 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001267 } else {
1268 DCHECK(so.IsRegister());
1269 if (so.IsShift()) {
1270 // Shift operand - check if it is a MOV convertible to a 16-bit shift instruction.
1271 if (opcode != MOV) {
Zheng Xuc6667102015-05-15 16:08:45 +08001272 return true;
1273 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001274 // Check for MOV with an ROR/RRX. There is no 16-bit ROR immediate and no 16-bit RRX.
1275 if (so.GetShift() == ROR || so.GetShift() == RRX) {
1276 return true;
1277 }
1278 // 16-bit shifts set condition codes if and only if outside IT block,
1279 // i.e. if and only if cond == AL.
1280 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1281 return true;
1282 }
1283 } else {
1284 // Register operand without shift.
1285 switch (opcode) {
1286 case ADD:
1287 // The 16-bit ADD that cannot contain high registers can set condition codes
1288 // if and only if outside IT block, i.e. if and only if cond == AL.
1289 if (!can_contain_high_register &&
1290 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1291 return true;
1292 }
1293 break;
1294 case AND:
1295 case BIC:
1296 case EOR:
1297 case ORR:
1298 case MVN:
1299 case ADC:
1300 case SUB:
1301 case SBC:
1302 // These 16-bit opcodes set condition codes if and only if outside IT block,
1303 // i.e. if and only if cond == AL.
1304 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1305 return true;
1306 }
1307 break;
1308 case RSB:
1309 case RSC:
1310 // No 16-bit RSB/RSC Rd, Rm, Rn. It would be equivalent to SUB/SBC Rd, Rn, Rm.
1311 return true;
1312 case CMP:
1313 default:
1314 break;
1315 }
Zheng Xuc6667102015-05-15 16:08:45 +08001316 }
1317 }
1318
Dave Allison65fcc2c2014-04-28 13:45:27 -07001319 // The instruction can be encoded in 16 bits.
1320 return false;
1321}
1322
1323
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001324void Thumb2Assembler::Emit32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001325 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001326 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001327 Register rn,
1328 Register rd,
1329 const ShifterOperand& so) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001330 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001331 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001332 case AND: thumb_opcode = 0U /* 0b0000 */; break;
1333 case EOR: thumb_opcode = 4U /* 0b0100 */; break;
1334 case SUB: thumb_opcode = 13U /* 0b1101 */; break;
1335 case RSB: thumb_opcode = 14U /* 0b1110 */; break;
1336 case ADD: thumb_opcode = 8U /* 0b1000 */; break;
Andreas Gampe35c68e32014-09-30 08:39:37 -07001337 case ADC: thumb_opcode = 10U /* 0b1010 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001338 case SBC: thumb_opcode = 11U /* 0b1011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001339 case RSC: break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001340 case TST: thumb_opcode = 0U /* 0b0000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1341 case TEQ: thumb_opcode = 4U /* 0b0100 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1342 case CMP: thumb_opcode = 13U /* 0b1101 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1343 case CMN: thumb_opcode = 8U /* 0b1000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001344 case ORR: thumb_opcode = 2U /* 0b0010 */; break;
1345 case MOV: thumb_opcode = 2U /* 0b0010 */; rn = PC; break;
1346 case BIC: thumb_opcode = 1U /* 0b0001 */; break;
1347 case MVN: thumb_opcode = 3U /* 0b0011 */; rn = PC; break;
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001348 case ORN: thumb_opcode = 3U /* 0b0011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001349 default:
1350 break;
1351 }
1352
Andreas Gampec8ccf682014-09-29 20:07:43 -07001353 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001354 LOG(FATAL) << "Invalid thumb2 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001355 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001356 }
1357
1358 int32_t encoding = 0;
1359 if (so.IsImmediate()) {
1360 // Check special cases.
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00001361 if ((opcode == SUB || opcode == ADD) && (so.GetImmediate() < (1u << 12)) &&
1362 /* Prefer T3 encoding to T4. */ !ShifterOperandCanAlwaysHold(so.GetImmediate())) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001363 if (set_cc != kCcSet) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001364 if (opcode == SUB) {
1365 thumb_opcode = 5U;
1366 } else if (opcode == ADD) {
1367 thumb_opcode = 0U;
1368 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001369 }
1370 uint32_t imm = so.GetImmediate();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001371
1372 uint32_t i = (imm >> 11) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001373 uint32_t imm3 = (imm >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001374 uint32_t imm8 = imm & 0xff;
1375
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001376 encoding = B31 | B30 | B29 | B28 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001377 (set_cc == kCcSet ? B20 : B25) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001378 thumb_opcode << 21 |
1379 rn << 16 |
1380 rd << 8 |
1381 i << 26 |
1382 imm3 << 12 |
1383 imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001384 } else {
1385 // Modified immediate.
Dave Allison45fdb932014-06-25 12:37:10 -07001386 uint32_t imm = ModifiedImmediate(so.encodingThumb());
Dave Allison65fcc2c2014-04-28 13:45:27 -07001387 if (imm == kInvalidModifiedImmediate) {
1388 LOG(FATAL) << "Immediate value cannot fit in thumb2 modified immediate";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001389 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001390 }
1391 encoding = B31 | B30 | B29 | B28 |
1392 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001393 (set_cc == kCcSet ? B20 : 0) |
Dave Allison65fcc2c2014-04-28 13:45:27 -07001394 rn << 16 |
1395 rd << 8 |
1396 imm;
1397 }
1398 } else if (so.IsRegister()) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001399 // Register (possibly shifted)
1400 encoding = B31 | B30 | B29 | B27 | B25 |
1401 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001402 (set_cc == kCcSet ? B20 : 0) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001403 rn << 16 |
1404 rd << 8 |
1405 so.encodingThumb();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001406 }
1407 Emit32(encoding);
1408}
1409
1410
1411void Thumb2Assembler::Emit16BitDataProcessing(Condition cond,
1412 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001413 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001414 Register rn,
1415 Register rd,
1416 const ShifterOperand& so) {
1417 if (opcode == ADD || opcode == SUB) {
1418 Emit16BitAddSub(cond, opcode, set_cc, rn, rd, so);
1419 return;
1420 }
Andreas Gampec8ccf682014-09-29 20:07:43 -07001421 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001422 // Thumb1.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001423 uint8_t dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001424 uint8_t opcode_shift = 6;
1425 uint8_t rd_shift = 0;
1426 uint8_t rn_shift = 3;
1427 uint8_t immediate_shift = 0;
1428 bool use_immediate = false;
1429 uint8_t immediate = 0;
1430
1431 if (opcode == MOV && so.IsRegister() && so.IsShift()) {
1432 // Convert shifted mov operand2 into 16 bit opcodes.
1433 dp_opcode = 0;
1434 opcode_shift = 11;
1435
1436 use_immediate = true;
1437 immediate = so.GetImmediate();
1438 immediate_shift = 6;
1439
1440 rn = so.GetRegister();
1441
1442 switch (so.GetShift()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001443 case LSL:
1444 DCHECK_LE(immediate, 31u);
1445 thumb_opcode = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001446 break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001447 case LSR:
1448 DCHECK(1 <= immediate && immediate <= 32);
1449 immediate &= 31; // 32 is encoded as 0.
1450 thumb_opcode = 1U /* 0b01 */;
1451 break;
1452 case ASR:
1453 DCHECK(1 <= immediate && immediate <= 32);
1454 immediate &= 31; // 32 is encoded as 0.
1455 thumb_opcode = 2U /* 0b10 */;
1456 break;
1457 case ROR: // No 16-bit ROR immediate.
1458 case RRX: // No 16-bit RRX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001459 default:
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001460 LOG(FATAL) << "Unexpected shift: " << so.GetShift();
1461 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001462 }
1463 } else {
1464 if (so.IsImmediate()) {
1465 use_immediate = true;
1466 immediate = so.GetImmediate();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001467 } else {
Guillaume "Vermeille" Sanchezab4a2f52015-03-11 14:00:30 +00001468 CHECK(!(so.IsRegister() && so.IsShift() && so.GetSecondRegister() != kNoRegister))
1469 << "No register-shifted register instruction available in thumb";
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001470 // Adjust rn and rd: only two registers will be emitted.
1471 switch (opcode) {
1472 case AND:
1473 case ORR:
1474 case EOR:
1475 case RSB:
1476 case ADC:
1477 case SBC:
1478 case BIC: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001479 // Sets condition codes if and only if outside IT block,
1480 // check that it complies with set_cc.
1481 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001482 if (rn == rd) {
1483 rn = so.GetRegister();
1484 } else {
1485 CHECK_EQ(rd, so.GetRegister());
1486 }
1487 break;
1488 }
1489 case CMP:
1490 case CMN: {
1491 CHECK_EQ(rd, 0);
1492 rd = rn;
1493 rn = so.GetRegister();
1494 break;
1495 }
1496 case MVN: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001497 // Sets condition codes if and only if outside IT block,
1498 // check that it complies with set_cc.
1499 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1500 CHECK_EQ(rn, 0);
1501 rn = so.GetRegister();
1502 break;
1503 }
1504 case TST:
1505 case TEQ: {
1506 DCHECK(set_cc == kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001507 CHECK_EQ(rn, 0);
1508 rn = so.GetRegister();
1509 break;
1510 }
1511 default:
1512 break;
1513 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001514 }
1515
1516 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001517 case AND: thumb_opcode = 0U /* 0b0000 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001518 case ORR: thumb_opcode = 12U /* 0b1100 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001519 case EOR: thumb_opcode = 1U /* 0b0001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001520 case RSB: thumb_opcode = 9U /* 0b1001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001521 case ADC: thumb_opcode = 5U /* 0b0101 */; break;
1522 case SBC: thumb_opcode = 6U /* 0b0110 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001523 case BIC: thumb_opcode = 14U /* 0b1110 */; break;
1524 case TST: thumb_opcode = 8U /* 0b1000 */; CHECK(!use_immediate); break;
1525 case MVN: thumb_opcode = 15U /* 0b1111 */; CHECK(!use_immediate); break;
1526 case CMP: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001527 DCHECK(set_cc == kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001528 if (use_immediate) {
1529 // T2 encoding.
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001530 dp_opcode = 0;
1531 opcode_shift = 11;
1532 thumb_opcode = 5U /* 0b101 */;
1533 rd_shift = 8;
1534 rn_shift = 8;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001535 } else if (IsHighRegister(rd) || IsHighRegister(rn)) {
1536 // Special cmp for high registers.
1537 dp_opcode = 1U /* 0b01 */;
1538 opcode_shift = 7;
1539 // Put the top bit of rd into the bottom bit of the opcode.
1540 thumb_opcode = 10U /* 0b0001010 */ | static_cast<uint32_t>(rd) >> 3;
1541 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001542 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001543 thumb_opcode = 10U /* 0b1010 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001544 }
1545
1546 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001547 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001548 case CMN: {
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001549 CHECK(!use_immediate);
Andreas Gampec8ccf682014-09-29 20:07:43 -07001550 thumb_opcode = 11U /* 0b1011 */;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001551 break;
1552 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001553 case MOV:
1554 dp_opcode = 0;
1555 if (use_immediate) {
1556 // T2 encoding.
1557 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001558 thumb_opcode = 4U /* 0b100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001559 rd_shift = 8;
1560 rn_shift = 8;
1561 } else {
1562 rn = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001563 if (set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001564 // Special mov for high registers.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001565 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001566 opcode_shift = 7;
1567 // Put the top bit of rd into the bottom bit of the opcode.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001568 thumb_opcode = 12U /* 0b0001100 */ | static_cast<uint32_t>(rd) >> 3;
1569 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001570 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001571 DCHECK(!IsHighRegister(rn));
1572 DCHECK(!IsHighRegister(rd));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001573 thumb_opcode = 0;
1574 }
1575 }
1576 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001577
1578 case TEQ:
1579 case RSC:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001580 default:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001581 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001582 break;
1583 }
1584 }
1585
Andreas Gampec8ccf682014-09-29 20:07:43 -07001586 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001587 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001588 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001589 }
1590
1591 int16_t encoding = dp_opcode << 14 |
1592 (thumb_opcode << opcode_shift) |
1593 rd << rd_shift |
1594 rn << rn_shift |
1595 (use_immediate ? (immediate << immediate_shift) : 0);
1596
1597 Emit16(encoding);
1598}
1599
1600
1601// ADD and SUB are complex enough to warrant their own emitter.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001602void Thumb2Assembler::Emit16BitAddSub(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001603 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001604 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001605 Register rn,
1606 Register rd,
1607 const ShifterOperand& so) {
1608 uint8_t dp_opcode = 0;
1609 uint8_t opcode_shift = 6;
1610 uint8_t rd_shift = 0;
1611 uint8_t rn_shift = 3;
1612 uint8_t immediate_shift = 0;
1613 bool use_immediate = false;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001614 uint32_t immediate = 0; // Should be at most 10 bits but keep the full immediate for CHECKs.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001615 uint8_t thumb_opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001616
1617 if (so.IsImmediate()) {
1618 use_immediate = true;
1619 immediate = so.GetImmediate();
Vladimir Markof5c09c32015-12-17 12:08:08 +00001620 if (!IsUint<10>(immediate)) {
1621 // Flip ADD/SUB.
1622 opcode = (opcode == ADD) ? SUB : ADD;
1623 immediate = -immediate;
1624 DCHECK(IsUint<10>(immediate)); // More stringent checks below.
1625 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001626 }
1627
1628 switch (opcode) {
1629 case ADD:
1630 if (so.IsRegister()) {
1631 Register rm = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001632 if (rn == rd && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001633 // Can use T2 encoding (allows 4 bit registers)
Andreas Gampec8ccf682014-09-29 20:07:43 -07001634 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001635 opcode_shift = 10;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001636 thumb_opcode = 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001637 // Make Rn also contain the top bit of rd.
1638 rn = static_cast<Register>(static_cast<uint32_t>(rm) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07001639 (static_cast<uint32_t>(rd) & 8U /* 0b1000 */) << 1);
1640 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001641 } else {
1642 // T1.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001643 DCHECK(!IsHighRegister(rd));
1644 DCHECK(!IsHighRegister(rn));
1645 DCHECK(!IsHighRegister(rm));
1646 // Sets condition codes if and only if outside IT block,
1647 // check that it complies with set_cc.
1648 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001649 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001650 thumb_opcode = 12U /* 0b01100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001651 immediate = static_cast<uint32_t>(so.GetRegister());
1652 use_immediate = true;
1653 immediate_shift = 6;
1654 }
1655 } else {
1656 // Immediate.
1657 if (rd == SP && rn == SP) {
1658 // ADD sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001659 dp_opcode = 2U /* 0b10 */;
1660 thumb_opcode = 3U /* 0b11 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001661 opcode_shift = 12;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001662 CHECK(IsUint<9>(immediate));
Roland Levillain14d90572015-07-16 10:52:26 +01001663 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001664
1665 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1666 rn = R0;
1667 rd = R0;
1668 rd_shift = 0;
1669 rn_shift = 0;
1670 immediate >>= 2;
1671 } else if (rd != SP && rn == SP) {
1672 // ADD rd, SP, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001673 dp_opcode = 2U /* 0b10 */;
1674 thumb_opcode = 5U /* 0b101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001675 opcode_shift = 11;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001676 CHECK(IsUint<10>(immediate));
Roland Levillain14d90572015-07-16 10:52:26 +01001677 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001678
1679 // Remove rn from instruction.
1680 rn = R0;
1681 rn_shift = 0;
1682 rd_shift = 8;
1683 immediate >>= 2;
1684 } else if (rn != rd) {
1685 // Must use T1.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001686 CHECK(IsUint<3>(immediate));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001687 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001688 thumb_opcode = 14U /* 0b01110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001689 immediate_shift = 6;
1690 } else {
1691 // T2 encoding.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001692 CHECK(IsUint<8>(immediate));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001693 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001694 thumb_opcode = 6U /* 0b110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001695 rd_shift = 8;
1696 rn_shift = 8;
1697 }
1698 }
1699 break;
1700
1701 case SUB:
1702 if (so.IsRegister()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001703 // T1.
1704 Register rm = so.GetRegister();
1705 DCHECK(!IsHighRegister(rd));
1706 DCHECK(!IsHighRegister(rn));
1707 DCHECK(!IsHighRegister(rm));
1708 // Sets condition codes if and only if outside IT block,
1709 // check that it complies with set_cc.
1710 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1711 opcode_shift = 9;
1712 thumb_opcode = 13U /* 0b01101 */;
1713 immediate = static_cast<uint32_t>(rm);
1714 use_immediate = true;
1715 immediate_shift = 6;
1716 } else {
1717 if (rd == SP && rn == SP) {
1718 // SUB sp, sp, #imm
1719 dp_opcode = 2U /* 0b10 */;
1720 thumb_opcode = 0x61 /* 0b1100001 */;
1721 opcode_shift = 7;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001722 CHECK(IsUint<9>(immediate));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001723 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001724
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001725 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1726 rn = R0;
1727 rd = R0;
1728 rd_shift = 0;
1729 rn_shift = 0;
1730 immediate >>= 2;
1731 } else if (rn != rd) {
1732 // Must use T1.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001733 CHECK(IsUint<3>(immediate));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001734 opcode_shift = 9;
1735 thumb_opcode = 15U /* 0b01111 */;
1736 immediate_shift = 6;
1737 } else {
1738 // T2 encoding.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001739 CHECK(IsUint<8>(immediate));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001740 opcode_shift = 11;
1741 thumb_opcode = 7U /* 0b111 */;
1742 rd_shift = 8;
1743 rn_shift = 8;
1744 }
1745 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001746 break;
1747 default:
1748 LOG(FATAL) << "This opcode is not an ADD or SUB: " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001749 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001750 }
1751
1752 int16_t encoding = dp_opcode << 14 |
1753 (thumb_opcode << opcode_shift) |
1754 rd << rd_shift |
1755 rn << rn_shift |
1756 (use_immediate ? (immediate << immediate_shift) : 0);
1757
1758 Emit16(encoding);
1759}
1760
1761
1762void Thumb2Assembler::EmitDataProcessing(Condition cond,
1763 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001764 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001765 Register rn,
1766 Register rd,
1767 const ShifterOperand& so) {
1768 CHECK_NE(rd, kNoRegister);
1769 CheckCondition(cond);
1770
1771 if (Is32BitDataProcessing(cond, opcode, set_cc, rn, rd, so)) {
1772 Emit32BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1773 } else {
1774 Emit16BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1775 }
1776}
1777
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001778void Thumb2Assembler::EmitShift(Register rd,
1779 Register rm,
1780 Shift shift,
1781 uint8_t amount,
1782 Condition cond,
1783 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001784 CHECK_LT(amount, (1 << 5));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001785 if ((IsHighRegister(rd) || IsHighRegister(rm) || shift == ROR || shift == RRX) ||
1786 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001787 uint16_t opcode = 0;
1788 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001789 case LSL: opcode = 0U /* 0b00 */; break;
1790 case LSR: opcode = 1U /* 0b01 */; break;
1791 case ASR: opcode = 2U /* 0b10 */; break;
1792 case ROR: opcode = 3U /* 0b11 */; break;
1793 case RRX: opcode = 3U /* 0b11 */; amount = 0; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001794 default:
1795 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001796 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001797 }
1798 // 32 bit.
1799 int32_t encoding = B31 | B30 | B29 | B27 | B25 | B22 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001800 0xf << 16 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001801 uint32_t imm3 = amount >> 2;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001802 uint32_t imm2 = amount & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -07001803 encoding |= imm3 << 12 | imm2 << 6 | static_cast<int16_t>(rm) |
1804 static_cast<int16_t>(rd) << 8 | opcode << 4;
1805 Emit32(encoding);
1806 } else {
1807 // 16 bit shift
1808 uint16_t opcode = 0;
1809 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001810 case LSL: opcode = 0U /* 0b00 */; break;
1811 case LSR: opcode = 1U /* 0b01 */; break;
1812 case ASR: opcode = 2U /* 0b10 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001813 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001814 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1815 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001816 }
1817 int16_t encoding = opcode << 11 | amount << 6 | static_cast<int16_t>(rm) << 3 |
1818 static_cast<int16_t>(rd);
1819 Emit16(encoding);
1820 }
1821}
1822
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001823void Thumb2Assembler::EmitShift(Register rd,
1824 Register rn,
1825 Shift shift,
1826 Register rm,
1827 Condition cond,
1828 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001829 CHECK_NE(shift, RRX);
1830 bool must_be_32bit = false;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001831 if (IsHighRegister(rd) || IsHighRegister(rm) || IsHighRegister(rn) || rd != rn ||
1832 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001833 must_be_32bit = true;
1834 }
1835
1836 if (must_be_32bit) {
1837 uint16_t opcode = 0;
1838 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001839 case LSL: opcode = 0U /* 0b00 */; break;
1840 case LSR: opcode = 1U /* 0b01 */; break;
1841 case ASR: opcode = 2U /* 0b10 */; break;
1842 case ROR: opcode = 3U /* 0b11 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001843 default:
1844 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001845 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001846 }
1847 // 32 bit.
1848 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001849 0xf << 12 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001850 encoding |= static_cast<int16_t>(rn) << 16 | static_cast<int16_t>(rm) |
1851 static_cast<int16_t>(rd) << 8 | opcode << 21;
1852 Emit32(encoding);
1853 } else {
1854 uint16_t opcode = 0;
1855 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001856 case LSL: opcode = 2U /* 0b0010 */; break;
1857 case LSR: opcode = 3U /* 0b0011 */; break;
1858 case ASR: opcode = 4U /* 0b0100 */; break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001859 case ROR: opcode = 7U /* 0b0111 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001860 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001861 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1862 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001863 }
1864 int16_t encoding = B14 | opcode << 6 | static_cast<int16_t>(rm) << 3 |
1865 static_cast<int16_t>(rd);
1866 Emit16(encoding);
1867 }
1868}
1869
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001870inline size_t Thumb2Assembler::Fixup::SizeInBytes(Size size) {
1871 switch (size) {
1872 case kBranch16Bit:
1873 return 2u;
1874 case kBranch32Bit:
1875 return 4u;
Dave Allison45fdb932014-06-25 12:37:10 -07001876
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001877 case kCbxz16Bit:
1878 return 2u;
1879 case kCbxz32Bit:
1880 return 4u;
1881 case kCbxz48Bit:
1882 return 6u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001883
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001884 case kLiteral1KiB:
1885 return 2u;
1886 case kLiteral4KiB:
1887 return 4u;
1888 case kLiteral64KiB:
1889 return 8u;
1890 case kLiteral1MiB:
1891 return 10u;
1892 case kLiteralFar:
1893 return 14u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001894
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001895 case kLiteralAddr1KiB:
1896 return 2u;
1897 case kLiteralAddr4KiB:
1898 return 4u;
1899 case kLiteralAddr64KiB:
1900 return 6u;
1901 case kLiteralAddrFar:
1902 return 10u;
1903
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001904 case kLongOrFPLiteral1KiB:
1905 return 4u;
1906 case kLongOrFPLiteral256KiB:
1907 return 10u;
1908 case kLongOrFPLiteralFar:
1909 return 14u;
1910 }
1911 LOG(FATAL) << "Unexpected size: " << static_cast<int>(size);
1912 UNREACHABLE();
1913}
1914
1915inline uint32_t Thumb2Assembler::Fixup::GetOriginalSizeInBytes() const {
1916 return SizeInBytes(original_size_);
1917}
1918
1919inline uint32_t Thumb2Assembler::Fixup::GetSizeInBytes() const {
1920 return SizeInBytes(size_);
1921}
1922
1923inline size_t Thumb2Assembler::Fixup::LiteralPoolPaddingSize(uint32_t current_code_size) {
1924 // The code size must be a multiple of 2.
Roland Levillain14d90572015-07-16 10:52:26 +01001925 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001926 // If it isn't a multiple of 4, we need to add a 2-byte padding before the literal pool.
1927 return current_code_size & 2;
1928}
1929
1930inline int32_t Thumb2Assembler::Fixup::GetOffset(uint32_t current_code_size) const {
1931 static constexpr int32_t int32_min = std::numeric_limits<int32_t>::min();
1932 static constexpr int32_t int32_max = std::numeric_limits<int32_t>::max();
1933 DCHECK_LE(target_, static_cast<uint32_t>(int32_max));
1934 DCHECK_LE(location_, static_cast<uint32_t>(int32_max));
1935 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max));
1936 int32_t diff = static_cast<int32_t>(target_) - static_cast<int32_t>(location_);
1937 if (target_ > location_) {
1938 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max - diff));
1939 diff += static_cast<int32_t>(adjustment_);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001940 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001941 DCHECK_LE(int32_min + static_cast<int32_t>(adjustment_), diff);
1942 diff -= static_cast<int32_t>(adjustment_);
1943 }
1944 // The default PC adjustment for Thumb2 is 4 bytes.
1945 DCHECK_GE(diff, int32_min + 4);
1946 diff -= 4;
1947 // Add additional adjustment for instructions preceding the PC usage, padding
1948 // before the literal pool and rounding down the PC for literal loads.
1949 switch (GetSize()) {
1950 case kBranch16Bit:
1951 case kBranch32Bit:
1952 break;
1953
1954 case kCbxz16Bit:
1955 break;
1956 case kCbxz32Bit:
1957 case kCbxz48Bit:
1958 DCHECK_GE(diff, int32_min + 2);
1959 diff -= 2; // Extra CMP Rn, #0, 16-bit.
1960 break;
1961
1962 case kLiteral1KiB:
1963 case kLiteral4KiB:
1964 case kLongOrFPLiteral1KiB:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001965 case kLiteralAddr1KiB:
1966 case kLiteralAddr4KiB:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001967 DCHECK(diff >= 0 || (GetSize() == kLiteral1KiB && diff == -2));
1968 diff += LiteralPoolPaddingSize(current_code_size);
1969 // Load literal instructions round down the PC+4 to a multiple of 4, so if the PC
1970 // isn't a multiple of 2, we need to adjust. Since we already adjusted for the target
1971 // being aligned, current PC alignment can be inferred from diff.
Roland Levillain14d90572015-07-16 10:52:26 +01001972 DCHECK_ALIGNED(diff, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001973 diff = diff + (diff & 2);
1974 DCHECK_GE(diff, 0);
1975 break;
1976 case kLiteral1MiB:
1977 case kLiteral64KiB:
1978 case kLongOrFPLiteral256KiB:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001979 case kLiteralAddr64KiB:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001980 DCHECK_GE(diff, 4); // The target must be at least 4 bytes after the ADD rX, PC.
1981 diff -= 4; // One extra 32-bit MOV.
1982 diff += LiteralPoolPaddingSize(current_code_size);
1983 break;
1984 case kLiteralFar:
1985 case kLongOrFPLiteralFar:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001986 case kLiteralAddrFar:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001987 DCHECK_GE(diff, 8); // The target must be at least 4 bytes after the ADD rX, PC.
1988 diff -= 8; // Extra MOVW+MOVT; both 32-bit.
1989 diff += LiteralPoolPaddingSize(current_code_size);
1990 break;
1991 }
1992 return diff;
1993}
1994
1995inline size_t Thumb2Assembler::Fixup::IncreaseSize(Size new_size) {
1996 DCHECK_NE(target_, kUnresolved);
1997 Size old_size = size_;
1998 size_ = new_size;
1999 DCHECK_GT(SizeInBytes(new_size), SizeInBytes(old_size));
2000 size_t adjustment = SizeInBytes(new_size) - SizeInBytes(old_size);
2001 if (target_ > location_) {
2002 adjustment_ += adjustment;
2003 }
2004 return adjustment;
2005}
2006
2007uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
2008 uint32_t old_code_size = current_code_size;
2009 switch (GetSize()) {
2010 case kBranch16Bit:
2011 if (IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size))) {
2012 break;
Vladimir Markof38caa62015-05-29 15:50:18 +01002013 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002014 current_code_size += IncreaseSize(kBranch32Bit);
2015 FALLTHROUGH_INTENDED;
2016 case kBranch32Bit:
2017 // We don't support conditional branches beyond +-1MiB
2018 // or unconditional branches beyond +-16MiB.
2019 break;
2020
2021 case kCbxz16Bit:
2022 if (IsUint<7>(GetOffset(current_code_size))) {
2023 break;
2024 }
2025 current_code_size += IncreaseSize(kCbxz32Bit);
2026 FALLTHROUGH_INTENDED;
2027 case kCbxz32Bit:
2028 if (IsInt<9>(GetOffset(current_code_size))) {
2029 break;
2030 }
2031 current_code_size += IncreaseSize(kCbxz48Bit);
2032 FALLTHROUGH_INTENDED;
2033 case kCbxz48Bit:
2034 // We don't support conditional branches beyond +-1MiB.
2035 break;
2036
2037 case kLiteral1KiB:
2038 DCHECK(!IsHighRegister(rn_));
2039 if (IsUint<10>(GetOffset(current_code_size))) {
2040 break;
2041 }
2042 current_code_size += IncreaseSize(kLiteral4KiB);
2043 FALLTHROUGH_INTENDED;
2044 case kLiteral4KiB:
2045 if (IsUint<12>(GetOffset(current_code_size))) {
2046 break;
2047 }
2048 current_code_size += IncreaseSize(kLiteral64KiB);
2049 FALLTHROUGH_INTENDED;
2050 case kLiteral64KiB:
2051 // Can't handle high register which we can encounter by fall-through from kLiteral4KiB.
2052 if (!IsHighRegister(rn_) && IsUint<16>(GetOffset(current_code_size))) {
2053 break;
2054 }
2055 current_code_size += IncreaseSize(kLiteral1MiB);
2056 FALLTHROUGH_INTENDED;
2057 case kLiteral1MiB:
2058 if (IsUint<20>(GetOffset(current_code_size))) {
2059 break;
2060 }
2061 current_code_size += IncreaseSize(kLiteralFar);
2062 FALLTHROUGH_INTENDED;
2063 case kLiteralFar:
2064 // This encoding can reach any target.
2065 break;
2066
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002067 case kLiteralAddr1KiB:
2068 DCHECK(!IsHighRegister(rn_));
2069 if (IsUint<10>(GetOffset(current_code_size))) {
2070 break;
2071 }
2072 current_code_size += IncreaseSize(kLiteralAddr4KiB);
2073 FALLTHROUGH_INTENDED;
2074 case kLiteralAddr4KiB:
2075 if (IsUint<12>(GetOffset(current_code_size))) {
2076 break;
2077 }
2078 current_code_size += IncreaseSize(kLiteralAddr64KiB);
2079 FALLTHROUGH_INTENDED;
2080 case kLiteralAddr64KiB:
2081 if (IsUint<16>(GetOffset(current_code_size))) {
2082 break;
2083 }
2084 current_code_size += IncreaseSize(kLiteralAddrFar);
2085 FALLTHROUGH_INTENDED;
2086 case kLiteralAddrFar:
2087 // This encoding can reach any target.
2088 break;
2089
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002090 case kLongOrFPLiteral1KiB:
2091 if (IsUint<10>(GetOffset(current_code_size))) {
2092 break;
2093 }
2094 current_code_size += IncreaseSize(kLongOrFPLiteral256KiB);
2095 FALLTHROUGH_INTENDED;
2096 case kLongOrFPLiteral256KiB:
2097 if (IsUint<18>(GetOffset(current_code_size))) {
2098 break;
2099 }
2100 current_code_size += IncreaseSize(kLongOrFPLiteralFar);
2101 FALLTHROUGH_INTENDED;
2102 case kLongOrFPLiteralFar:
2103 // This encoding can reach any target.
2104 break;
2105 }
2106 return current_code_size - old_code_size;
2107}
2108
2109void Thumb2Assembler::Fixup::Emit(AssemblerBuffer* buffer, uint32_t code_size) const {
2110 switch (GetSize()) {
2111 case kBranch16Bit: {
2112 DCHECK(type_ == kUnconditional || type_ == kConditional);
2113 DCHECK_EQ(type_ == kConditional, cond_ != AL);
2114 int16_t encoding = BEncoding16(GetOffset(code_size), cond_);
Vladimir Markof38caa62015-05-29 15:50:18 +01002115 buffer->Store<int16_t>(location_, encoding);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002116 break;
2117 }
2118 case kBranch32Bit: {
2119 DCHECK(type_ == kConditional || type_ == kUnconditional ||
2120 type_ == kUnconditionalLink || type_ == kUnconditionalLinkX);
2121 DCHECK_EQ(type_ == kConditional, cond_ != AL);
2122 int32_t encoding = BEncoding32(GetOffset(code_size), cond_);
2123 if (type_ == kUnconditionalLink) {
2124 DCHECK_NE(encoding & B12, 0);
2125 encoding |= B14;
2126 } else if (type_ == kUnconditionalLinkX) {
2127 DCHECK_NE(encoding & B12, 0);
2128 encoding ^= B14 | B12;
2129 }
2130 buffer->Store<int16_t>(location_, encoding >> 16);
2131 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2132 break;
2133 }
2134
2135 case kCbxz16Bit: {
2136 DCHECK(type_ == kCompareAndBranchXZero);
2137 int16_t encoding = CbxzEncoding16(rn_, GetOffset(code_size), cond_);
2138 buffer->Store<int16_t>(location_, encoding);
2139 break;
2140 }
2141 case kCbxz32Bit: {
2142 DCHECK(type_ == kCompareAndBranchXZero);
2143 DCHECK(cond_ == EQ || cond_ == NE);
2144 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
2145 int16_t b_encoding = BEncoding16(GetOffset(code_size), cond_);
2146 buffer->Store<int16_t>(location_, cmp_encoding);
2147 buffer->Store<int16_t>(location_ + 2, b_encoding);
2148 break;
2149 }
2150 case kCbxz48Bit: {
2151 DCHECK(type_ == kCompareAndBranchXZero);
2152 DCHECK(cond_ == EQ || cond_ == NE);
2153 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
2154 int32_t b_encoding = BEncoding32(GetOffset(code_size), cond_);
2155 buffer->Store<int16_t>(location_, cmp_encoding);
2156 buffer->Store<int16_t>(location_ + 2u, b_encoding >> 16);
2157 buffer->Store<int16_t>(location_ + 4u, static_cast<int16_t>(b_encoding & 0xffff));
2158 break;
2159 }
2160
2161 case kLiteral1KiB: {
2162 DCHECK(type_ == kLoadLiteralNarrow);
2163 int16_t encoding = LdrLitEncoding16(rn_, GetOffset(code_size));
2164 buffer->Store<int16_t>(location_, encoding);
2165 break;
2166 }
2167 case kLiteral4KiB: {
2168 DCHECK(type_ == kLoadLiteralNarrow);
2169 // GetOffset() uses PC+4 but load literal uses AlignDown(PC+4, 4). Adjust offset accordingly.
2170 int32_t encoding = LdrLitEncoding32(rn_, GetOffset(code_size));
2171 buffer->Store<int16_t>(location_, encoding >> 16);
2172 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2173 break;
2174 }
2175 case kLiteral64KiB: {
2176 DCHECK(type_ == kLoadLiteralNarrow);
2177 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2178 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2179 int16_t ldr_encoding = LdrRtRnImm5Encoding16(rn_, rn_, 0);
2180 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2181 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2182 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2183 buffer->Store<int16_t>(location_ + 6u, ldr_encoding);
2184 break;
2185 }
2186 case kLiteral1MiB: {
2187 DCHECK(type_ == kLoadLiteralNarrow);
2188 int32_t offset = GetOffset(code_size);
2189 int32_t mov_encoding = MovModImmEncoding32(rn_, offset & ~0xfff);
2190 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2191 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, offset & 0xfff);
2192 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2193 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2194 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2195 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2196 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2197 break;
2198 }
2199 case kLiteralFar: {
2200 DCHECK(type_ == kLoadLiteralNarrow);
2201 int32_t offset = GetOffset(code_size);
2202 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2203 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2204 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2205 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, 0);
2206 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2207 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2208 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2209 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2210 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2211 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2212 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2213 break;
2214 }
2215
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002216 case kLiteralAddr1KiB: {
2217 DCHECK(type_ == kLoadLiteralAddr);
2218 int16_t encoding = AdrEncoding16(rn_, GetOffset(code_size));
2219 buffer->Store<int16_t>(location_, encoding);
2220 break;
2221 }
2222 case kLiteralAddr4KiB: {
2223 DCHECK(type_ == kLoadLiteralAddr);
2224 int32_t encoding = AdrEncoding32(rn_, GetOffset(code_size));
2225 buffer->Store<int16_t>(location_, encoding >> 16);
2226 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2227 break;
2228 }
2229 case kLiteralAddr64KiB: {
2230 DCHECK(type_ == kLoadLiteralAddr);
2231 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2232 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2233 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2234 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2235 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2236 break;
2237 }
2238 case kLiteralAddrFar: {
2239 DCHECK(type_ == kLoadLiteralAddr);
2240 int32_t offset = GetOffset(code_size);
2241 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2242 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2243 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2244 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2245 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2246 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2247 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2248 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2249 break;
2250 }
2251
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002252 case kLongOrFPLiteral1KiB: {
2253 int32_t encoding = LoadWideOrFpEncoding(PC, GetOffset(code_size)); // DCHECKs type_.
2254 buffer->Store<int16_t>(location_, encoding >> 16);
2255 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2256 break;
2257 }
2258 case kLongOrFPLiteral256KiB: {
2259 int32_t offset = GetOffset(code_size);
2260 int32_t mov_encoding = MovModImmEncoding32(IP, offset & ~0x3ff);
2261 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2262 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, offset & 0x3ff); // DCHECKs type_.
2263 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2264 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2265 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2266 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2267 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2268 break;
2269 }
2270 case kLongOrFPLiteralFar: {
2271 int32_t offset = GetOffset(code_size);
2272 int32_t movw_encoding = MovwEncoding32(IP, offset & 0xffff);
2273 int32_t movt_encoding = MovtEncoding32(IP, offset & ~0xffff);
2274 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2275 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0); // DCHECKs type_.
2276 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2277 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2278 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2279 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2280 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2281 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2282 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2283 break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002284 }
2285 }
2286}
2287
Dave Allison65fcc2c2014-04-28 13:45:27 -07002288uint16_t Thumb2Assembler::EmitCompareAndBranch(Register rn, uint16_t prev, bool n) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002289 CHECK(IsLowRegister(rn));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002290 uint32_t location = buffer_.Size();
2291
2292 // This is always unresolved as it must be a forward branch.
2293 Emit16(prev); // Previous link.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002294 return AddFixup(Fixup::CompareAndBranch(location, rn, n ? NE : EQ));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002295}
2296
2297
2298// NOTE: this only support immediate offsets, not [rx,ry].
2299// TODO: support [rx,ry] instructions.
2300void Thumb2Assembler::EmitLoadStore(Condition cond,
2301 bool load,
2302 bool byte,
2303 bool half,
2304 bool is_signed,
2305 Register rd,
2306 const Address& ad) {
2307 CHECK_NE(rd, kNoRegister);
2308 CheckCondition(cond);
2309 bool must_be_32bit = force_32bit_;
2310 if (IsHighRegister(rd)) {
2311 must_be_32bit = true;
2312 }
2313
2314 Register rn = ad.GetRegister();
Dave Allison45fdb932014-06-25 12:37:10 -07002315 if (IsHighRegister(rn) && rn != SP && rn != PC) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002316 must_be_32bit = true;
2317 }
2318
2319 if (is_signed || ad.GetOffset() < 0 || ad.GetMode() != Address::Offset) {
2320 must_be_32bit = true;
2321 }
2322
Dave Allison45fdb932014-06-25 12:37:10 -07002323 if (ad.IsImmediate()) {
2324 // Immediate offset
2325 int32_t offset = ad.GetOffset();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002326
Dave Allison45fdb932014-06-25 12:37:10 -07002327 // The 16 bit SP relative instruction can only have a 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002328 if (rn == SP && offset >= (1 << 10)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002329 must_be_32bit = true;
2330 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002331
2332 if (byte) {
Dave Allison45fdb932014-06-25 12:37:10 -07002333 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002334 if (offset >= (1 << 5)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002335 must_be_32bit = true;
2336 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002337 } else if (half) {
Dave Allison45fdb932014-06-25 12:37:10 -07002338 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002339 if (offset >= (1 << 6)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002340 must_be_32bit = true;
2341 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002342 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002343 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002344 if (offset >= (1 << 7)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002345 must_be_32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002346 }
2347 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002348
Dave Allison45fdb932014-06-25 12:37:10 -07002349 if (must_be_32bit) {
2350 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2351 (load ? B20 : 0) |
2352 (is_signed ? B24 : 0) |
2353 static_cast<uint32_t>(rd) << 12 |
2354 ad.encodingThumb(true) |
2355 (byte ? 0 : half ? B21 : B22);
2356 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002357 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002358 // 16 bit thumb1.
2359 uint8_t opA = 0;
2360 bool sp_relative = false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002361
2362 if (byte) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002363 opA = 7U /* 0b0111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002364 } else if (half) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002365 opA = 8U /* 0b1000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002366 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002367 if (rn == SP) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002368 opA = 9U /* 0b1001 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002369 sp_relative = true;
2370 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002371 opA = 6U /* 0b0110 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002372 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002373 }
Dave Allison45fdb932014-06-25 12:37:10 -07002374 int16_t encoding = opA << 12 |
2375 (load ? B11 : 0);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002376
Dave Allison45fdb932014-06-25 12:37:10 -07002377 CHECK_GE(offset, 0);
2378 if (sp_relative) {
2379 // SP relative, 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002380 CHECK_LT(offset, (1 << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01002381 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002382 encoding |= rd << 8 | offset >> 2;
2383 } else {
2384 // No SP relative. The offset is shifted right depending on
2385 // the size of the load/store.
2386 encoding |= static_cast<uint32_t>(rd);
2387
2388 if (byte) {
2389 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002390 CHECK_LT(offset, (1 << 5));
Dave Allison45fdb932014-06-25 12:37:10 -07002391 } else if (half) {
2392 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002393 CHECK_LT(offset, (1 << 6));
Roland Levillain14d90572015-07-16 10:52:26 +01002394 CHECK_ALIGNED(offset, 2);
Dave Allison45fdb932014-06-25 12:37:10 -07002395 offset >>= 1;
2396 } else {
2397 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002398 CHECK_LT(offset, (1 << 7));
Roland Levillain14d90572015-07-16 10:52:26 +01002399 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002400 offset >>= 2;
2401 }
2402 encoding |= rn << 3 | offset << 6;
2403 }
2404
2405 Emit16(encoding);
2406 }
2407 } else {
2408 // Register shift.
2409 if (ad.GetRegister() == PC) {
2410 // PC relative literal encoding.
2411 int32_t offset = ad.GetOffset();
Dave Allison0bb9ade2014-06-26 17:57:36 -07002412 if (must_be_32bit || offset < 0 || offset >= (1 << 10) || !load) {
Dave Allison45fdb932014-06-25 12:37:10 -07002413 int32_t up = B23;
2414 if (offset < 0) {
2415 offset = -offset;
2416 up = 0;
2417 }
2418 CHECK_LT(offset, (1 << 12));
2419 int32_t encoding = 0x1f << 27 | 0xf << 16 | B22 | (load ? B20 : 0) |
2420 offset | up |
2421 static_cast<uint32_t>(rd) << 12;
2422 Emit32(encoding);
2423 } else {
2424 // 16 bit literal load.
2425 CHECK_GE(offset, 0);
2426 CHECK_LT(offset, (1 << 10));
2427 int32_t encoding = B14 | (load ? B11 : 0) | static_cast<uint32_t>(rd) << 8 | offset >> 2;
2428 Emit16(encoding);
2429 }
2430 } else {
2431 if (ad.GetShiftCount() != 0) {
2432 // If there is a shift count this must be 32 bit.
2433 must_be_32bit = true;
2434 } else if (IsHighRegister(ad.GetRegisterOffset())) {
2435 must_be_32bit = true;
2436 }
2437
2438 if (must_be_32bit) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002439 int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 |
Dave Allison45fdb932014-06-25 12:37:10 -07002440 ad.encodingThumb(true);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002441 if (half) {
2442 encoding |= B21;
2443 } else if (!byte) {
2444 encoding |= B22;
2445 }
Dave Allison45fdb932014-06-25 12:37:10 -07002446 Emit32(encoding);
2447 } else {
2448 // 16 bit register offset.
2449 int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) |
2450 ad.encodingThumb(false);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002451 if (byte) {
2452 encoding |= B10;
2453 } else if (half) {
2454 encoding |= B9;
2455 }
Dave Allison45fdb932014-06-25 12:37:10 -07002456 Emit16(encoding);
2457 }
2458 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002459 }
2460}
2461
2462
2463void Thumb2Assembler::EmitMultiMemOp(Condition cond,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002464 BlockAddressMode bam,
Dave Allison65fcc2c2014-04-28 13:45:27 -07002465 bool load,
2466 Register base,
2467 RegList regs) {
2468 CHECK_NE(base, kNoRegister);
2469 CheckCondition(cond);
2470 bool must_be_32bit = force_32bit_;
2471
Vladimir Markoe8469c12014-11-26 18:09:30 +00002472 if (!must_be_32bit && base == SP && bam == (load ? IA_W : DB_W) &&
2473 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) {
2474 // Use 16-bit PUSH/POP.
2475 int16_t encoding = B15 | B13 | B12 | (load ? B11 : 0) | B10 |
2476 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff);
2477 Emit16(encoding);
2478 return;
2479 }
2480
Dave Allison65fcc2c2014-04-28 13:45:27 -07002481 if ((regs & 0xff00) != 0) {
2482 must_be_32bit = true;
2483 }
2484
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002485 bool w_bit = bam == IA_W || bam == DB_W || bam == DA_W || bam == IB_W;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002486 // 16 bit always uses writeback.
2487 if (!w_bit) {
2488 must_be_32bit = true;
2489 }
2490
2491 if (must_be_32bit) {
2492 uint32_t op = 0;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002493 switch (bam) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002494 case IA:
2495 case IA_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002496 op = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002497 break;
2498 case DB:
2499 case DB_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002500 op = 2U /* 0b10 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002501 break;
2502 case DA:
2503 case IB:
2504 case DA_W:
2505 case IB_W:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002506 LOG(FATAL) << "LDM/STM mode not supported on thumb: " << bam;
Vladimir Markoe8469c12014-11-26 18:09:30 +00002507 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002508 }
2509 if (load) {
2510 // Cannot have SP in the list.
2511 CHECK_EQ((regs & (1 << SP)), 0);
2512 } else {
2513 // Cannot have PC or SP in the list.
2514 CHECK_EQ((regs & (1 << PC | 1 << SP)), 0);
2515 }
2516 int32_t encoding = B31 | B30 | B29 | B27 |
2517 (op << 23) |
2518 (load ? B20 : 0) |
2519 base << 16 |
2520 regs |
2521 (w_bit << 21);
2522 Emit32(encoding);
2523 } else {
2524 int16_t encoding = B15 | B14 |
2525 (load ? B11 : 0) |
2526 base << 8 |
2527 regs;
2528 Emit16(encoding);
2529 }
2530}
2531
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002532void Thumb2Assembler::EmitBranch(Condition cond, Label* label, bool link, bool x) {
2533 bool use32bit = IsForced32Bit() || !CanRelocateBranches();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002534 uint32_t pc = buffer_.Size();
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002535 Fixup::Type branch_type;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002536 if (cond == AL) {
2537 if (link) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002538 use32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002539 if (x) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002540 branch_type = Fixup::kUnconditionalLinkX; // BLX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002541 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002542 branch_type = Fixup::kUnconditionalLink; // BX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002543 }
2544 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002545 branch_type = Fixup::kUnconditional; // B.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002546 }
2547 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002548 branch_type = Fixup::kConditional; // B<cond>.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002549 }
2550
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002551 Fixup::Size size = use32bit ? Fixup::kBranch32Bit : Fixup::kBranch16Bit;
2552 FixupId branch_id = AddFixup(Fixup::Branch(pc, branch_type, size, cond));
2553
Dave Allison65fcc2c2014-04-28 13:45:27 -07002554 if (label->IsBound()) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002555 // The branch is to a bound label which means that it's a backwards branch.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002556 GetFixup(branch_id)->Resolve(label->Position());
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002557 Emit16(0);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002558 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002559 // Branch target is an unbound label. Add it to a singly-linked list maintained within
2560 // the code with the label serving as the head.
2561 Emit16(static_cast<uint16_t>(label->position_));
2562 label->LinkTo(branch_id);
Vladimir Markof38caa62015-05-29 15:50:18 +01002563 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002564
2565 if (use32bit) {
2566 Emit16(0);
2567 }
2568 DCHECK_EQ(buffer_.Size() - pc, GetFixup(branch_id)->GetSizeInBytes());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002569}
2570
2571
Artem Serovc257da72016-02-02 13:49:43 +00002572void Thumb2Assembler::Emit32Miscellaneous(uint8_t op1,
2573 uint8_t op2,
2574 uint32_t rest_encoding) {
2575 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B23 |
2576 op1 << 20 |
2577 0xf << 12 |
2578 B7 |
2579 op2 << 4 |
2580 rest_encoding;
2581 Emit32(encoding);
2582}
2583
2584
2585void Thumb2Assembler::Emit16Miscellaneous(uint32_t rest_encoding) {
2586 int16_t encoding = B15 | B13 | B12 |
2587 rest_encoding;
2588 Emit16(encoding);
2589}
2590
Dave Allison65fcc2c2014-04-28 13:45:27 -07002591void Thumb2Assembler::clz(Register rd, Register rm, Condition cond) {
2592 CHECK_NE(rd, kNoRegister);
2593 CHECK_NE(rm, kNoRegister);
2594 CheckCondition(cond);
2595 CHECK_NE(rd, PC);
2596 CHECK_NE(rm, PC);
Artem Serovc257da72016-02-02 13:49:43 +00002597 int32_t encoding =
Dave Allison65fcc2c2014-04-28 13:45:27 -07002598 static_cast<uint32_t>(rm) << 16 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002599 static_cast<uint32_t>(rd) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002600 static_cast<uint32_t>(rm);
Artem Serovc257da72016-02-02 13:49:43 +00002601 Emit32Miscellaneous(0b11, 0b00, encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002602}
2603
2604
2605void Thumb2Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
2606 CheckCondition(cond);
Vladimir Markob4536b72015-11-24 13:45:23 +00002607 // Always 32 bits, encoding T3. (Other encondings are called MOV, not MOVW.)
2608 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2609 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2610 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
2611 uint32_t imm8 = imm16 & 0xff;
2612 int32_t encoding = B31 | B30 | B29 | B28 |
2613 B25 | B22 |
2614 static_cast<uint32_t>(rd) << 8 |
2615 i << 26 |
2616 imm4 << 16 |
2617 imm3 << 12 |
2618 imm8;
2619 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002620}
2621
2622
2623void Thumb2Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
2624 CheckCondition(cond);
2625 // Always 32 bits.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002626 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2627 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2628 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002629 uint32_t imm8 = imm16 & 0xff;
2630 int32_t encoding = B31 | B30 | B29 | B28 |
2631 B25 | B23 | B22 |
2632 static_cast<uint32_t>(rd) << 8 |
2633 i << 26 |
2634 imm4 << 16 |
2635 imm3 << 12 |
2636 imm8;
2637 Emit32(encoding);
2638}
2639
2640
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002641void Thumb2Assembler::rbit(Register rd, Register rm, Condition cond) {
2642 CHECK_NE(rd, kNoRegister);
2643 CHECK_NE(rm, kNoRegister);
2644 CheckCondition(cond);
2645 CHECK_NE(rd, PC);
2646 CHECK_NE(rm, PC);
2647 CHECK_NE(rd, SP);
2648 CHECK_NE(rm, SP);
Artem Serovc257da72016-02-02 13:49:43 +00002649 int32_t encoding =
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002650 static_cast<uint32_t>(rm) << 16 |
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002651 static_cast<uint32_t>(rd) << 8 |
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002652 static_cast<uint32_t>(rm);
Artem Serovc257da72016-02-02 13:49:43 +00002653
2654 Emit32Miscellaneous(0b01, 0b10, encoding);
2655}
2656
2657
2658void Thumb2Assembler::EmitReverseBytes(Register rd, Register rm,
2659 uint32_t op) {
2660 CHECK_NE(rd, kNoRegister);
2661 CHECK_NE(rm, kNoRegister);
2662 CHECK_NE(rd, PC);
2663 CHECK_NE(rm, PC);
2664 CHECK_NE(rd, SP);
2665 CHECK_NE(rm, SP);
2666
2667 if (!IsHighRegister(rd) && !IsHighRegister(rm) && !force_32bit_) {
2668 uint16_t t1_op = B11 | B9 | (op << 6);
2669 int16_t encoding = t1_op |
2670 static_cast<uint16_t>(rm) << 3 |
2671 static_cast<uint16_t>(rd);
2672 Emit16Miscellaneous(encoding);
2673 } else {
2674 int32_t encoding =
2675 static_cast<uint32_t>(rm) << 16 |
2676 static_cast<uint32_t>(rd) << 8 |
2677 static_cast<uint32_t>(rm);
2678 Emit32Miscellaneous(0b01, op, encoding);
2679 }
2680}
2681
2682
2683void Thumb2Assembler::rev(Register rd, Register rm, Condition cond) {
2684 CheckCondition(cond);
2685 EmitReverseBytes(rd, rm, 0b00);
2686}
2687
2688
2689void Thumb2Assembler::rev16(Register rd, Register rm, Condition cond) {
2690 CheckCondition(cond);
2691 EmitReverseBytes(rd, rm, 0b01);
2692}
2693
2694
2695void Thumb2Assembler::revsh(Register rd, Register rm, Condition cond) {
2696 CheckCondition(cond);
2697 EmitReverseBytes(rd, rm, 0b11);
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002698}
2699
2700
Dave Allison65fcc2c2014-04-28 13:45:27 -07002701void Thumb2Assembler::ldrex(Register rt, Register rn, uint16_t imm, Condition cond) {
2702 CHECK_NE(rn, kNoRegister);
2703 CHECK_NE(rt, kNoRegister);
2704 CheckCondition(cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002705 CHECK_LT(imm, (1u << 10));
2706
2707 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
2708 static_cast<uint32_t>(rn) << 16 |
2709 static_cast<uint32_t>(rt) << 12 |
2710 0xf << 8 |
2711 imm >> 2;
2712 Emit32(encoding);
2713}
2714
2715
2716void Thumb2Assembler::ldrex(Register rt, Register rn, Condition cond) {
2717 ldrex(rt, rn, 0, cond);
2718}
2719
2720
2721void Thumb2Assembler::strex(Register rd,
2722 Register rt,
2723 Register rn,
2724 uint16_t imm,
2725 Condition cond) {
2726 CHECK_NE(rn, kNoRegister);
2727 CHECK_NE(rd, kNoRegister);
2728 CHECK_NE(rt, kNoRegister);
2729 CheckCondition(cond);
2730 CHECK_LT(imm, (1u << 10));
2731
2732 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
2733 static_cast<uint32_t>(rn) << 16 |
2734 static_cast<uint32_t>(rt) << 12 |
2735 static_cast<uint32_t>(rd) << 8 |
2736 imm >> 2;
2737 Emit32(encoding);
2738}
2739
2740
Calin Juravle52c48962014-12-16 17:02:57 +00002741void Thumb2Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
2742 CHECK_NE(rn, kNoRegister);
2743 CHECK_NE(rt, kNoRegister);
2744 CHECK_NE(rt2, kNoRegister);
2745 CHECK_NE(rt, rt2);
2746 CheckCondition(cond);
2747
2748 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 | B20 |
2749 static_cast<uint32_t>(rn) << 16 |
2750 static_cast<uint32_t>(rt) << 12 |
2751 static_cast<uint32_t>(rt2) << 8 |
2752 B6 | B5 | B4 | B3 | B2 | B1 | B0;
2753 Emit32(encoding);
2754}
2755
2756
Dave Allison65fcc2c2014-04-28 13:45:27 -07002757void Thumb2Assembler::strex(Register rd,
2758 Register rt,
2759 Register rn,
2760 Condition cond) {
2761 strex(rd, rt, rn, 0, cond);
2762}
2763
2764
Calin Juravle52c48962014-12-16 17:02:57 +00002765void Thumb2Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
2766 CHECK_NE(rd, kNoRegister);
2767 CHECK_NE(rn, kNoRegister);
2768 CHECK_NE(rt, kNoRegister);
2769 CHECK_NE(rt2, kNoRegister);
2770 CHECK_NE(rt, rt2);
2771 CHECK_NE(rd, rt);
2772 CHECK_NE(rd, rt2);
2773 CheckCondition(cond);
2774
2775 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 |
2776 static_cast<uint32_t>(rn) << 16 |
2777 static_cast<uint32_t>(rt) << 12 |
2778 static_cast<uint32_t>(rt2) << 8 |
2779 B6 | B5 | B4 |
2780 static_cast<uint32_t>(rd);
2781 Emit32(encoding);
2782}
2783
2784
Dave Allison65fcc2c2014-04-28 13:45:27 -07002785void Thumb2Assembler::clrex(Condition cond) {
2786 CheckCondition(cond);
2787 int32_t encoding = B31 | B30 | B29 | B27 | B28 | B25 | B24 | B23 |
2788 B21 | B20 |
2789 0xf << 16 |
2790 B15 |
2791 0xf << 8 |
2792 B5 |
2793 0xf;
2794 Emit32(encoding);
2795}
2796
2797
2798void Thumb2Assembler::nop(Condition cond) {
2799 CheckCondition(cond);
Andreas Gampec8ccf682014-09-29 20:07:43 -07002800 uint16_t encoding = B15 | B13 | B12 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002801 B11 | B10 | B9 | B8;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002802 Emit16(static_cast<int16_t>(encoding));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002803}
2804
2805
2806void Thumb2Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
2807 CHECK_NE(sn, kNoSRegister);
2808 CHECK_NE(rt, kNoRegister);
2809 CHECK_NE(rt, SP);
2810 CHECK_NE(rt, PC);
2811 CheckCondition(cond);
2812 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2813 B27 | B26 | B25 |
2814 ((static_cast<int32_t>(sn) >> 1)*B16) |
2815 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2816 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2817 Emit32(encoding);
2818}
2819
2820
2821void Thumb2Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
2822 CHECK_NE(sn, kNoSRegister);
2823 CHECK_NE(rt, kNoRegister);
2824 CHECK_NE(rt, SP);
2825 CHECK_NE(rt, PC);
2826 CheckCondition(cond);
2827 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2828 B27 | B26 | B25 | B20 |
2829 ((static_cast<int32_t>(sn) >> 1)*B16) |
2830 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2831 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2832 Emit32(encoding);
2833}
2834
2835
2836void Thumb2Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
2837 Condition cond) {
2838 CHECK_NE(sm, kNoSRegister);
2839 CHECK_NE(sm, S31);
2840 CHECK_NE(rt, kNoRegister);
2841 CHECK_NE(rt, SP);
2842 CHECK_NE(rt, PC);
2843 CHECK_NE(rt2, kNoRegister);
2844 CHECK_NE(rt2, SP);
2845 CHECK_NE(rt2, PC);
2846 CheckCondition(cond);
2847 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2848 B27 | B26 | B22 |
2849 (static_cast<int32_t>(rt2)*B16) |
2850 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2851 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2852 (static_cast<int32_t>(sm) >> 1);
2853 Emit32(encoding);
2854}
2855
2856
2857void Thumb2Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
2858 Condition cond) {
2859 CHECK_NE(sm, kNoSRegister);
2860 CHECK_NE(sm, S31);
2861 CHECK_NE(rt, kNoRegister);
2862 CHECK_NE(rt, SP);
2863 CHECK_NE(rt, PC);
2864 CHECK_NE(rt2, kNoRegister);
2865 CHECK_NE(rt2, SP);
2866 CHECK_NE(rt2, PC);
2867 CHECK_NE(rt, rt2);
2868 CheckCondition(cond);
2869 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2870 B27 | B26 | B22 | B20 |
2871 (static_cast<int32_t>(rt2)*B16) |
2872 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2873 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2874 (static_cast<int32_t>(sm) >> 1);
2875 Emit32(encoding);
2876}
2877
2878
2879void Thumb2Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
2880 Condition cond) {
2881 CHECK_NE(dm, kNoDRegister);
2882 CHECK_NE(rt, kNoRegister);
2883 CHECK_NE(rt, SP);
2884 CHECK_NE(rt, PC);
2885 CHECK_NE(rt2, kNoRegister);
2886 CHECK_NE(rt2, SP);
2887 CHECK_NE(rt2, PC);
2888 CheckCondition(cond);
2889 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2890 B27 | B26 | B22 |
2891 (static_cast<int32_t>(rt2)*B16) |
2892 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2893 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2894 (static_cast<int32_t>(dm) & 0xf);
2895 Emit32(encoding);
2896}
2897
2898
2899void Thumb2Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
2900 Condition cond) {
2901 CHECK_NE(dm, kNoDRegister);
2902 CHECK_NE(rt, kNoRegister);
2903 CHECK_NE(rt, SP);
2904 CHECK_NE(rt, PC);
2905 CHECK_NE(rt2, kNoRegister);
2906 CHECK_NE(rt2, SP);
2907 CHECK_NE(rt2, PC);
2908 CHECK_NE(rt, rt2);
2909 CheckCondition(cond);
2910 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2911 B27 | B26 | B22 | B20 |
2912 (static_cast<int32_t>(rt2)*B16) |
2913 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2914 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2915 (static_cast<int32_t>(dm) & 0xf);
2916 Emit32(encoding);
2917}
2918
2919
2920void Thumb2Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
2921 const Address& addr = static_cast<const Address&>(ad);
2922 CHECK_NE(sd, kNoSRegister);
2923 CheckCondition(cond);
2924 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2925 B27 | B26 | B24 | B20 |
2926 ((static_cast<int32_t>(sd) & 1)*B22) |
2927 ((static_cast<int32_t>(sd) >> 1)*B12) |
2928 B11 | B9 | addr.vencoding();
2929 Emit32(encoding);
2930}
2931
2932
2933void Thumb2Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
2934 const Address& addr = static_cast<const Address&>(ad);
2935 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2936 CHECK_NE(sd, kNoSRegister);
2937 CheckCondition(cond);
2938 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2939 B27 | B26 | B24 |
2940 ((static_cast<int32_t>(sd) & 1)*B22) |
2941 ((static_cast<int32_t>(sd) >> 1)*B12) |
2942 B11 | B9 | addr.vencoding();
2943 Emit32(encoding);
2944}
2945
2946
2947void Thumb2Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
2948 const Address& addr = static_cast<const Address&>(ad);
2949 CHECK_NE(dd, kNoDRegister);
2950 CheckCondition(cond);
2951 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2952 B27 | B26 | B24 | B20 |
2953 ((static_cast<int32_t>(dd) >> 4)*B22) |
2954 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2955 B11 | B9 | B8 | addr.vencoding();
2956 Emit32(encoding);
2957}
2958
2959
2960void Thumb2Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
2961 const Address& addr = static_cast<const Address&>(ad);
2962 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2963 CHECK_NE(dd, kNoDRegister);
2964 CheckCondition(cond);
2965 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2966 B27 | B26 | B24 |
2967 ((static_cast<int32_t>(dd) >> 4)*B22) |
2968 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2969 B11 | B9 | B8 | addr.vencoding();
2970 Emit32(encoding);
2971}
2972
2973
2974void Thumb2Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
2975 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
2976}
2977
2978
2979void Thumb2Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
2980 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
2981}
2982
2983
2984void Thumb2Assembler::vpops(SRegister reg, int nregs, Condition cond) {
2985 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
2986}
2987
2988
2989void Thumb2Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
2990 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
2991}
2992
2993
2994void Thumb2Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
2995 CheckCondition(cond);
2996
2997 uint32_t D;
2998 uint32_t Vd;
2999 if (dbl) {
3000 // Encoded as D:Vd.
3001 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07003002 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003003 } else {
3004 // Encoded as Vd:D.
3005 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07003006 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003007 }
3008 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
3009 B11 | B9 |
3010 (dbl ? B8 : 0) |
3011 (push ? B24 : (B23 | B20)) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07003012 14U /* 0b1110 */ << 28 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07003013 nregs << (dbl ? 1 : 0) |
3014 D << 22 |
3015 Vd << 12;
3016 Emit32(encoding);
3017}
3018
3019
3020void Thumb2Assembler::EmitVFPsss(Condition cond, int32_t opcode,
3021 SRegister sd, SRegister sn, SRegister sm) {
3022 CHECK_NE(sd, kNoSRegister);
3023 CHECK_NE(sn, kNoSRegister);
3024 CHECK_NE(sm, kNoSRegister);
3025 CheckCondition(cond);
3026 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3027 B27 | B26 | B25 | B11 | B9 | opcode |
3028 ((static_cast<int32_t>(sd) & 1)*B22) |
3029 ((static_cast<int32_t>(sn) >> 1)*B16) |
3030 ((static_cast<int32_t>(sd) >> 1)*B12) |
3031 ((static_cast<int32_t>(sn) & 1)*B7) |
3032 ((static_cast<int32_t>(sm) & 1)*B5) |
3033 (static_cast<int32_t>(sm) >> 1);
3034 Emit32(encoding);
3035}
3036
3037
3038void Thumb2Assembler::EmitVFPddd(Condition cond, int32_t opcode,
3039 DRegister dd, DRegister dn, DRegister dm) {
3040 CHECK_NE(dd, kNoDRegister);
3041 CHECK_NE(dn, kNoDRegister);
3042 CHECK_NE(dm, kNoDRegister);
3043 CheckCondition(cond);
3044 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3045 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
3046 ((static_cast<int32_t>(dd) >> 4)*B22) |
3047 ((static_cast<int32_t>(dn) & 0xf)*B16) |
3048 ((static_cast<int32_t>(dd) & 0xf)*B12) |
3049 ((static_cast<int32_t>(dn) >> 4)*B7) |
3050 ((static_cast<int32_t>(dm) >> 4)*B5) |
3051 (static_cast<int32_t>(dm) & 0xf);
3052 Emit32(encoding);
3053}
3054
3055
3056void Thumb2Assembler::EmitVFPsd(Condition cond, int32_t opcode,
3057 SRegister sd, DRegister dm) {
3058 CHECK_NE(sd, kNoSRegister);
3059 CHECK_NE(dm, kNoDRegister);
3060 CheckCondition(cond);
3061 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3062 B27 | B26 | B25 | B11 | B9 | opcode |
3063 ((static_cast<int32_t>(sd) & 1)*B22) |
3064 ((static_cast<int32_t>(sd) >> 1)*B12) |
3065 ((static_cast<int32_t>(dm) >> 4)*B5) |
3066 (static_cast<int32_t>(dm) & 0xf);
3067 Emit32(encoding);
3068}
3069
3070
3071void Thumb2Assembler::EmitVFPds(Condition cond, int32_t opcode,
3072 DRegister dd, SRegister sm) {
3073 CHECK_NE(dd, kNoDRegister);
3074 CHECK_NE(sm, kNoSRegister);
3075 CheckCondition(cond);
3076 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3077 B27 | B26 | B25 | B11 | B9 | opcode |
3078 ((static_cast<int32_t>(dd) >> 4)*B22) |
3079 ((static_cast<int32_t>(dd) & 0xf)*B12) |
3080 ((static_cast<int32_t>(sm) & 1)*B5) |
3081 (static_cast<int32_t>(sm) >> 1);
3082 Emit32(encoding);
3083}
3084
3085
3086void Thumb2Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR.
Calin Juravleddb7df22014-11-25 20:56:51 +00003087 CHECK_NE(cond, kNoCondition);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003088 CheckCondition(cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00003089 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3090 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
3091 (static_cast<int32_t>(PC)*B12) |
3092 B11 | B9 | B4;
3093 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003094}
3095
3096
3097void Thumb2Assembler::svc(uint32_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08003098 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003099 int16_t encoding = B15 | B14 | B12 |
3100 B11 | B10 | B9 | B8 |
3101 imm8;
3102 Emit16(encoding);
3103}
3104
3105
3106void Thumb2Assembler::bkpt(uint16_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08003107 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003108 int16_t encoding = B15 | B13 | B12 |
3109 B11 | B10 | B9 |
3110 imm8;
3111 Emit16(encoding);
3112}
3113
3114// Convert the given IT state to a mask bit given bit 0 of the first
3115// condition and a shift position.
3116static uint8_t ToItMask(ItState s, uint8_t firstcond0, uint8_t shift) {
3117 switch (s) {
3118 case kItOmitted: return 1 << shift;
3119 case kItThen: return firstcond0 << shift;
3120 case kItElse: return !firstcond0 << shift;
3121 }
3122 return 0;
3123}
3124
3125
3126// Set the IT condition in the given position for the given state. This is used
3127// to check that conditional instructions match the preceding IT statement.
3128void Thumb2Assembler::SetItCondition(ItState s, Condition cond, uint8_t index) {
3129 switch (s) {
3130 case kItOmitted: it_conditions_[index] = AL; break;
3131 case kItThen: it_conditions_[index] = cond; break;
3132 case kItElse:
3133 it_conditions_[index] = static_cast<Condition>(static_cast<uint8_t>(cond) ^ 1);
3134 break;
3135 }
3136}
3137
3138
3139void Thumb2Assembler::it(Condition firstcond, ItState i1, ItState i2, ItState i3) {
3140 CheckCondition(AL); // Not allowed in IT block.
3141 uint8_t firstcond0 = static_cast<uint8_t>(firstcond) & 1;
3142
3143 // All conditions to AL.
3144 for (uint8_t i = 0; i < 4; ++i) {
3145 it_conditions_[i] = AL;
3146 }
3147
3148 SetItCondition(kItThen, firstcond, 0);
3149 uint8_t mask = ToItMask(i1, firstcond0, 3);
3150 SetItCondition(i1, firstcond, 1);
3151
3152 if (i1 != kItOmitted) {
3153 mask |= ToItMask(i2, firstcond0, 2);
3154 SetItCondition(i2, firstcond, 2);
3155 if (i2 != kItOmitted) {
3156 mask |= ToItMask(i3, firstcond0, 1);
3157 SetItCondition(i3, firstcond, 3);
3158 if (i3 != kItOmitted) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07003159 mask |= 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003160 }
3161 }
3162 }
3163
3164 // Start at first condition.
3165 it_cond_index_ = 0;
3166 next_condition_ = it_conditions_[0];
3167 uint16_t encoding = B15 | B13 | B12 |
3168 B11 | B10 | B9 | B8 |
3169 firstcond << 4 |
3170 mask;
3171 Emit16(encoding);
3172}
3173
3174
3175void Thumb2Assembler::cbz(Register rn, Label* label) {
3176 CheckCondition(AL);
3177 if (label->IsBound()) {
3178 LOG(FATAL) << "cbz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00003179 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003180 } else if (IsHighRegister(rn)) {
3181 LOG(FATAL) << "cbz can only be used with low registers";
3182 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003183 } else {
3184 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), false);
3185 label->LinkTo(branchid);
3186 }
3187}
3188
3189
3190void Thumb2Assembler::cbnz(Register rn, Label* label) {
3191 CheckCondition(AL);
3192 if (label->IsBound()) {
3193 LOG(FATAL) << "cbnz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00003194 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003195 } else if (IsHighRegister(rn)) {
3196 LOG(FATAL) << "cbnz can only be used with low registers";
3197 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003198 } else {
3199 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), true);
3200 label->LinkTo(branchid);
3201 }
3202}
3203
3204
3205void Thumb2Assembler::blx(Register rm, Condition cond) {
3206 CHECK_NE(rm, kNoRegister);
3207 CheckCondition(cond);
3208 int16_t encoding = B14 | B10 | B9 | B8 | B7 | static_cast<int16_t>(rm) << 3;
3209 Emit16(encoding);
3210}
3211
3212
3213void Thumb2Assembler::bx(Register rm, Condition cond) {
3214 CHECK_NE(rm, kNoRegister);
3215 CheckCondition(cond);
3216 int16_t encoding = B14 | B10 | B9 | B8 | static_cast<int16_t>(rm) << 3;
3217 Emit16(encoding);
3218}
3219
3220
3221void Thumb2Assembler::Push(Register rd, Condition cond) {
3222 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
3223}
3224
3225
3226void Thumb2Assembler::Pop(Register rd, Condition cond) {
3227 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
3228}
3229
3230
3231void Thumb2Assembler::PushList(RegList regs, Condition cond) {
3232 stm(DB_W, SP, regs, cond);
3233}
3234
3235
3236void Thumb2Assembler::PopList(RegList regs, Condition cond) {
3237 ldm(IA_W, SP, regs, cond);
3238}
3239
3240
3241void Thumb2Assembler::Mov(Register rd, Register rm, Condition cond) {
3242 if (cond != AL || rd != rm) {
3243 mov(rd, ShifterOperand(rm), cond);
3244 }
3245}
3246
3247
Dave Allison65fcc2c2014-04-28 13:45:27 -07003248void Thumb2Assembler::Bind(Label* label) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003249 BindLabel(label, buffer_.Size());
Dave Allison65fcc2c2014-04-28 13:45:27 -07003250}
3251
3252
3253void Thumb2Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003254 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003255 CHECK_LE(shift_imm, 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003256 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003257 EmitShift(rd, rm, LSL, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003258}
3259
3260
3261void Thumb2Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003262 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003263 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003264 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003265 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003266 EmitShift(rd, rm, LSR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003267}
3268
3269
3270void Thumb2Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003271 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003272 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003273 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003274 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003275 EmitShift(rd, rm, ASR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003276}
3277
3278
3279void Thumb2Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003280 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003281 CHECK(1u <= shift_imm && shift_imm <= 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003282 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003283 EmitShift(rd, rm, ROR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003284}
3285
3286
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003287void Thumb2Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003288 CheckCondition(cond);
Vladimir Markof9d741e2015-11-20 15:08:11 +00003289 EmitShift(rd, rm, RRX, 0, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003290}
3291
3292
3293void Thumb2Assembler::Lsl(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003294 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003295 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003296 EmitShift(rd, rm, LSL, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003297}
3298
3299
3300void Thumb2Assembler::Lsr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003301 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003302 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003303 EmitShift(rd, rm, LSR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003304}
3305
3306
3307void Thumb2Assembler::Asr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003308 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003309 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003310 EmitShift(rd, rm, ASR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003311}
3312
3313
3314void Thumb2Assembler::Ror(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003315 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003316 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003317 EmitShift(rd, rm, ROR, rn, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003318}
3319
3320
3321int32_t Thumb2Assembler::EncodeBranchOffset(int32_t offset, int32_t inst) {
3322 // The offset is off by 4 due to the way the ARM CPUs read PC.
3323 offset -= 4;
3324 offset >>= 1;
3325
3326 uint32_t value = 0;
3327 // There are two different encodings depending on the value of bit 12. In one case
3328 // intermediate values are calculated using the sign bit.
3329 if ((inst & B12) == B12) {
3330 // 25 bits of offset.
3331 uint32_t signbit = (offset >> 31) & 0x1;
3332 uint32_t i1 = (offset >> 22) & 0x1;
3333 uint32_t i2 = (offset >> 21) & 0x1;
3334 uint32_t imm10 = (offset >> 11) & 0x03ff;
3335 uint32_t imm11 = offset & 0x07ff;
3336 uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
3337 uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
3338 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
3339 imm11;
3340 // Remove the offset from the current encoding.
3341 inst &= ~(0x3ff << 16 | 0x7ff);
3342 } else {
3343 uint32_t signbit = (offset >> 31) & 0x1;
3344 uint32_t imm6 = (offset >> 11) & 0x03f;
3345 uint32_t imm11 = offset & 0x07ff;
3346 uint32_t j1 = (offset >> 19) & 1;
3347 uint32_t j2 = (offset >> 17) & 1;
3348 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm6 << 16) |
3349 imm11;
3350 // Remove the offset from the current encoding.
3351 inst &= ~(0x3f << 16 | 0x7ff);
3352 }
3353 // Mask out offset bits in current instruction.
3354 inst &= ~(B26 | B13 | B11);
3355 inst |= value;
3356 return inst;
3357}
3358
3359
3360int Thumb2Assembler::DecodeBranchOffset(int32_t instr) {
3361 int32_t imm32;
3362 if ((instr & B12) == B12) {
3363 uint32_t S = (instr >> 26) & 1;
3364 uint32_t J2 = (instr >> 11) & 1;
3365 uint32_t J1 = (instr >> 13) & 1;
3366 uint32_t imm10 = (instr >> 16) & 0x3FF;
3367 uint32_t imm11 = instr & 0x7FF;
3368
3369 uint32_t I1 = ~(J1 ^ S) & 1;
3370 uint32_t I2 = ~(J2 ^ S) & 1;
3371 imm32 = (S << 24) | (I1 << 23) | (I2 << 22) | (imm10 << 12) | (imm11 << 1);
3372 imm32 = (imm32 << 8) >> 8; // sign extend 24 bit immediate.
3373 } else {
3374 uint32_t S = (instr >> 26) & 1;
3375 uint32_t J2 = (instr >> 11) & 1;
3376 uint32_t J1 = (instr >> 13) & 1;
3377 uint32_t imm6 = (instr >> 16) & 0x3F;
3378 uint32_t imm11 = instr & 0x7FF;
3379
3380 imm32 = (S << 20) | (J2 << 19) | (J1 << 18) | (imm6 << 12) | (imm11 << 1);
3381 imm32 = (imm32 << 11) >> 11; // sign extend 21 bit immediate.
3382 }
3383 imm32 += 4;
3384 return imm32;
3385}
3386
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003387uint32_t Thumb2Assembler::GetAdjustedPosition(uint32_t old_position) {
3388 // We can reconstruct the adjustment by going through all the fixups from the beginning
3389 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
3390 // with increasing old_position, we can use the data from last AdjustedPosition() to
3391 // continue where we left off and the whole loop should be O(m+n) where m is the number
3392 // of positions to adjust and n is the number of fixups.
3393 if (old_position < last_old_position_) {
3394 last_position_adjustment_ = 0u;
3395 last_old_position_ = 0u;
3396 last_fixup_id_ = 0u;
3397 }
3398 while (last_fixup_id_ != fixups_.size()) {
3399 Fixup* fixup = GetFixup(last_fixup_id_);
3400 if (fixup->GetLocation() >= old_position + last_position_adjustment_) {
3401 break;
3402 }
3403 if (fixup->GetSize() != fixup->GetOriginalSize()) {
3404 last_position_adjustment_ += fixup->GetSizeInBytes() - fixup->GetOriginalSizeInBytes();
3405 }
3406 ++last_fixup_id_;
3407 }
3408 last_old_position_ = old_position;
3409 return old_position + last_position_adjustment_;
3410}
3411
3412Literal* Thumb2Assembler::NewLiteral(size_t size, const uint8_t* data) {
3413 DCHECK(size == 4u || size == 8u) << size;
3414 literals_.emplace_back(size, data);
3415 return &literals_.back();
3416}
3417
3418void Thumb2Assembler::LoadLiteral(Register rt, Literal* literal) {
3419 DCHECK_EQ(literal->GetSize(), 4u);
3420 DCHECK(!literal->GetLabel()->IsBound());
3421 bool use32bit = IsForced32Bit() || IsHighRegister(rt);
3422 uint32_t location = buffer_.Size();
3423 Fixup::Size size = use32bit ? Fixup::kLiteral4KiB : Fixup::kLiteral1KiB;
3424 FixupId fixup_id = AddFixup(Fixup::LoadNarrowLiteral(location, rt, size));
3425 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3426 literal->GetLabel()->LinkTo(fixup_id);
3427 if (use32bit) {
3428 Emit16(0);
3429 }
3430 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3431}
3432
3433void Thumb2Assembler::LoadLiteral(Register rt, Register rt2, Literal* literal) {
3434 DCHECK_EQ(literal->GetSize(), 8u);
3435 DCHECK(!literal->GetLabel()->IsBound());
3436 uint32_t location = buffer_.Size();
3437 FixupId fixup_id =
3438 AddFixup(Fixup::LoadWideLiteral(location, rt, rt2, Fixup::kLongOrFPLiteral1KiB));
3439 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3440 literal->GetLabel()->LinkTo(fixup_id);
3441 Emit16(0);
3442 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3443}
3444
3445void Thumb2Assembler::LoadLiteral(SRegister sd, Literal* literal) {
3446 DCHECK_EQ(literal->GetSize(), 4u);
3447 DCHECK(!literal->GetLabel()->IsBound());
3448 uint32_t location = buffer_.Size();
3449 FixupId fixup_id = AddFixup(Fixup::LoadSingleLiteral(location, sd, Fixup::kLongOrFPLiteral1KiB));
3450 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3451 literal->GetLabel()->LinkTo(fixup_id);
3452 Emit16(0);
3453 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3454}
3455
3456void Thumb2Assembler::LoadLiteral(DRegister dd, Literal* literal) {
3457 DCHECK_EQ(literal->GetSize(), 8u);
3458 DCHECK(!literal->GetLabel()->IsBound());
3459 uint32_t location = buffer_.Size();
3460 FixupId fixup_id = AddFixup(Fixup::LoadDoubleLiteral(location, dd, Fixup::kLongOrFPLiteral1KiB));
3461 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3462 literal->GetLabel()->LinkTo(fixup_id);
3463 Emit16(0);
3464 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3465}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003466
Dave Allison65fcc2c2014-04-28 13:45:27 -07003467
3468void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value,
Vladimir Marko449b1092015-09-08 12:16:45 +01003469 Condition cond, SetCc set_cc) {
3470 if (value == 0 && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003471 if (rd != rn) {
3472 mov(rd, ShifterOperand(rn), cond);
3473 }
3474 return;
3475 }
3476 // We prefer to select the shorter code sequence rather than selecting add for
3477 // positive values and sub for negatives ones, which would slightly improve
3478 // the readability of generated code for some constants.
3479 ShifterOperand shifter_op;
Vladimir Markof5c09c32015-12-17 12:08:08 +00003480 if (ShifterOperandCanHold(rd, rn, ADD, value, set_cc, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003481 add(rd, rn, shifter_op, cond, set_cc);
Vladimir Markof5c09c32015-12-17 12:08:08 +00003482 } else if (ShifterOperandCanHold(rd, rn, SUB, -value, set_cc, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003483 sub(rd, rn, shifter_op, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003484 } else {
3485 CHECK(rn != IP);
Vladimir Markof5c09c32015-12-17 12:08:08 +00003486 // If rd != rn, use rd as temp. This alows 16-bit ADD/SUB in more situations than using IP.
3487 Register temp = (rd != rn) ? rd : IP;
Vladimir Markoac6ac102015-12-17 12:14:00 +00003488 if (ShifterOperandCanHold(temp, kNoRegister, MVN, ~value, kCcKeep, &shifter_op)) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003489 mvn(temp, shifter_op, cond, kCcKeep);
3490 add(rd, rn, ShifterOperand(temp), cond, set_cc);
Vladimir Markoac6ac102015-12-17 12:14:00 +00003491 } else if (ShifterOperandCanHold(temp, kNoRegister, MVN, ~(-value), kCcKeep, &shifter_op)) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003492 mvn(temp, shifter_op, cond, kCcKeep);
3493 sub(rd, rn, ShifterOperand(temp), cond, set_cc);
3494 } else if (High16Bits(-value) == 0) {
3495 movw(temp, Low16Bits(-value), cond);
3496 sub(rd, rn, ShifterOperand(temp), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003497 } else {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003498 movw(temp, Low16Bits(value), cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003499 uint16_t value_high = High16Bits(value);
3500 if (value_high != 0) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003501 movt(temp, value_high, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003502 }
Vladimir Markof5c09c32015-12-17 12:08:08 +00003503 add(rd, rn, ShifterOperand(temp), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003504 }
3505 }
3506}
3507
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003508void Thumb2Assembler::CmpConstant(Register rn, int32_t value, Condition cond) {
Vladimir Markoac6ac102015-12-17 12:14:00 +00003509 // We prefer to select the shorter code sequence rather than using plain cmp and cmn
3510 // which would slightly improve the readability of generated code for some constants.
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003511 ShifterOperand shifter_op;
Vladimir Markof5c09c32015-12-17 12:08:08 +00003512 if (ShifterOperandCanHold(kNoRegister, rn, CMP, value, kCcSet, &shifter_op)) {
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003513 cmp(rn, shifter_op, cond);
Vladimir Markoac6ac102015-12-17 12:14:00 +00003514 } else if (ShifterOperandCanHold(kNoRegister, rn, CMN, -value, kCcSet, &shifter_op)) {
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003515 cmn(rn, shifter_op, cond);
3516 } else {
3517 CHECK(rn != IP);
Vladimir Markoac6ac102015-12-17 12:14:00 +00003518 if (ShifterOperandCanHold(IP, kNoRegister, MVN, ~value, kCcKeep, &shifter_op)) {
3519 mvn(IP, shifter_op, cond, kCcKeep);
3520 cmp(rn, ShifterOperand(IP), cond);
3521 } else if (ShifterOperandCanHold(IP, kNoRegister, MVN, ~(-value), kCcKeep, &shifter_op)) {
3522 mvn(IP, shifter_op, cond, kCcKeep);
3523 cmn(rn, ShifterOperand(IP), cond);
3524 } else if (High16Bits(-value) == 0) {
3525 movw(IP, Low16Bits(-value), cond);
3526 cmn(rn, ShifterOperand(IP), cond);
3527 } else {
3528 movw(IP, Low16Bits(value), cond);
3529 uint16_t value_high = High16Bits(value);
3530 if (value_high != 0) {
3531 movt(IP, value_high, cond);
3532 }
3533 cmp(rn, ShifterOperand(IP), cond);
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003534 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003535 }
3536}
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003537
Dave Allison65fcc2c2014-04-28 13:45:27 -07003538void Thumb2Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
3539 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003540 if (ShifterOperandCanHold(rd, R0, MOV, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003541 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003542 } else if (ShifterOperandCanHold(rd, R0, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003543 mvn(rd, shifter_op, cond);
3544 } else {
3545 movw(rd, Low16Bits(value), cond);
3546 uint16_t value_high = High16Bits(value);
3547 if (value_high != 0) {
3548 movt(rd, value_high, cond);
3549 }
3550 }
3551}
3552
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003553int32_t Thumb2Assembler::GetAllowedLoadOffsetBits(LoadOperandType type) {
3554 switch (type) {
3555 case kLoadSignedByte:
3556 case kLoadSignedHalfword:
3557 case kLoadUnsignedHalfword:
3558 case kLoadUnsignedByte:
3559 case kLoadWord:
3560 // We can encode imm12 offset.
3561 return 0xfffu;
3562 case kLoadSWord:
3563 case kLoadDWord:
3564 case kLoadWordPair:
3565 // We can encode imm8:'00' offset.
3566 return 0xff << 2;
3567 default:
3568 LOG(FATAL) << "UNREACHABLE";
3569 UNREACHABLE();
3570 }
3571}
3572
3573int32_t Thumb2Assembler::GetAllowedStoreOffsetBits(StoreOperandType type) {
3574 switch (type) {
3575 case kStoreHalfword:
3576 case kStoreByte:
3577 case kStoreWord:
3578 // We can encode imm12 offset.
3579 return 0xfff;
3580 case kStoreSWord:
3581 case kStoreDWord:
3582 case kStoreWordPair:
3583 // We can encode imm8:'00' offset.
3584 return 0xff << 2;
3585 default:
3586 LOG(FATAL) << "UNREACHABLE";
3587 UNREACHABLE();
3588 }
3589}
3590
3591bool Thumb2Assembler::CanSplitLoadStoreOffset(int32_t allowed_offset_bits,
3592 int32_t offset,
3593 /*out*/ int32_t* add_to_base,
3594 /*out*/ int32_t* offset_for_load_store) {
3595 int32_t other_bits = offset & ~allowed_offset_bits;
3596 if (ShifterOperandCanAlwaysHold(other_bits) || ShifterOperandCanAlwaysHold(-other_bits)) {
3597 *add_to_base = offset & ~allowed_offset_bits;
3598 *offset_for_load_store = offset & allowed_offset_bits;
3599 return true;
3600 }
3601 return false;
3602}
3603
3604int32_t Thumb2Assembler::AdjustLoadStoreOffset(int32_t allowed_offset_bits,
3605 Register temp,
3606 Register base,
3607 int32_t offset,
3608 Condition cond) {
3609 DCHECK_NE(offset & ~allowed_offset_bits, 0);
3610 int32_t add_to_base, offset_for_load;
3611 if (CanSplitLoadStoreOffset(allowed_offset_bits, offset, &add_to_base, &offset_for_load)) {
3612 AddConstant(temp, base, add_to_base, cond, kCcKeep);
3613 return offset_for_load;
3614 } else {
3615 LoadImmediate(temp, offset, cond);
3616 add(temp, temp, ShifterOperand(base), cond, kCcKeep);
3617 return 0;
3618 }
3619}
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003620
Dave Allison65fcc2c2014-04-28 13:45:27 -07003621// Implementation note: this method must emit at most one instruction when
3622// Address::CanHoldLoadOffsetThumb.
3623void Thumb2Assembler::LoadFromOffset(LoadOperandType type,
3624 Register reg,
3625 Register base,
3626 int32_t offset,
3627 Condition cond) {
3628 if (!Address::CanHoldLoadOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003629 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003630 // Inlined AdjustLoadStoreOffset() allows us to pull a few more tricks.
3631 int32_t allowed_offset_bits = GetAllowedLoadOffsetBits(type);
3632 DCHECK_NE(offset & ~allowed_offset_bits, 0);
3633 int32_t add_to_base, offset_for_load;
3634 if (CanSplitLoadStoreOffset(allowed_offset_bits, offset, &add_to_base, &offset_for_load)) {
3635 // Use reg for the adjusted base. If it's low reg, we may end up using 16-bit load.
3636 AddConstant(reg, base, add_to_base, cond, kCcKeep);
3637 base = reg;
3638 offset = offset_for_load;
3639 } else {
3640 Register temp = (reg == base) ? IP : reg;
3641 LoadImmediate(temp, offset, cond);
3642 // TODO: Implement indexed load (not available for LDRD) and use it here to avoid the ADD.
3643 // Use reg for the adjusted base. If it's low reg, we may end up using 16-bit load.
3644 add(reg, reg, ShifterOperand((reg == base) ? IP : base), cond, kCcKeep);
3645 base = reg;
3646 offset = 0;
3647 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003648 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003649 DCHECK(Address::CanHoldLoadOffsetThumb(type, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003650 switch (type) {
3651 case kLoadSignedByte:
3652 ldrsb(reg, Address(base, offset), cond);
3653 break;
3654 case kLoadUnsignedByte:
3655 ldrb(reg, Address(base, offset), cond);
3656 break;
3657 case kLoadSignedHalfword:
3658 ldrsh(reg, Address(base, offset), cond);
3659 break;
3660 case kLoadUnsignedHalfword:
3661 ldrh(reg, Address(base, offset), cond);
3662 break;
3663 case kLoadWord:
3664 ldr(reg, Address(base, offset), cond);
3665 break;
3666 case kLoadWordPair:
3667 ldrd(reg, Address(base, offset), cond);
3668 break;
3669 default:
3670 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003671 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003672 }
3673}
3674
Dave Allison65fcc2c2014-04-28 13:45:27 -07003675// Implementation note: this method must emit at most one instruction when
3676// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3677void Thumb2Assembler::LoadSFromOffset(SRegister reg,
3678 Register base,
3679 int32_t offset,
3680 Condition cond) {
3681 if (!Address::CanHoldLoadOffsetThumb(kLoadSWord, offset)) {
3682 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003683 offset = AdjustLoadStoreOffset(GetAllowedLoadOffsetBits(kLoadSWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003684 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003685 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003686 DCHECK(Address::CanHoldLoadOffsetThumb(kLoadSWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003687 vldrs(reg, Address(base, offset), cond);
3688}
3689
3690
3691// Implementation note: this method must emit at most one instruction when
3692// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3693void Thumb2Assembler::LoadDFromOffset(DRegister reg,
3694 Register base,
3695 int32_t offset,
3696 Condition cond) {
3697 if (!Address::CanHoldLoadOffsetThumb(kLoadDWord, offset)) {
3698 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003699 offset = AdjustLoadStoreOffset(GetAllowedLoadOffsetBits(kLoadDWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003700 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003701 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003702 DCHECK(Address::CanHoldLoadOffsetThumb(kLoadDWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003703 vldrd(reg, Address(base, offset), cond);
3704}
3705
3706
3707// Implementation note: this method must emit at most one instruction when
3708// Address::CanHoldStoreOffsetThumb.
3709void Thumb2Assembler::StoreToOffset(StoreOperandType type,
3710 Register reg,
3711 Register base,
3712 int32_t offset,
3713 Condition cond) {
Roland Levillain775ef492014-11-04 17:43:11 +00003714 Register tmp_reg = kNoRegister;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003715 if (!Address::CanHoldStoreOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003716 CHECK_NE(base, IP);
Roland Levillain23f02f32015-08-25 18:23:20 +01003717 if ((reg != IP) &&
3718 ((type != kStoreWordPair) || (reg + 1 != IP))) {
Roland Levillain775ef492014-11-04 17:43:11 +00003719 tmp_reg = IP;
3720 } else {
Roland Levillain4af147e2015-04-07 13:54:49 +01003721 // Be careful not to use IP twice (for `reg` (or `reg` + 1 in
Roland Levillain23f02f32015-08-25 18:23:20 +01003722 // the case of a word-pair store) and `base`) to build the
3723 // Address object used by the store instruction(s) below.
3724 // Instead, save R5 on the stack (or R6 if R5 is already used by
3725 // `base`), use it as secondary temporary register, and restore
3726 // it after the store instruction has been emitted.
3727 tmp_reg = (base != R5) ? R5 : R6;
Roland Levillain775ef492014-11-04 17:43:11 +00003728 Push(tmp_reg);
3729 if (base == SP) {
3730 offset += kRegisterSize;
3731 }
3732 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003733 // TODO: Implement indexed store (not available for STRD), inline AdjustLoadStoreOffset()
3734 // and in the "unsplittable" path get rid of the "add" by using the store indexed instead.
3735 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(type), tmp_reg, base, offset, cond);
Roland Levillain775ef492014-11-04 17:43:11 +00003736 base = tmp_reg;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003737 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003738 DCHECK(Address::CanHoldStoreOffsetThumb(type, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003739 switch (type) {
3740 case kStoreByte:
3741 strb(reg, Address(base, offset), cond);
3742 break;
3743 case kStoreHalfword:
3744 strh(reg, Address(base, offset), cond);
3745 break;
3746 case kStoreWord:
3747 str(reg, Address(base, offset), cond);
3748 break;
3749 case kStoreWordPair:
3750 strd(reg, Address(base, offset), cond);
3751 break;
3752 default:
3753 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003754 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003755 }
Roland Levillain23f02f32015-08-25 18:23:20 +01003756 if ((tmp_reg != kNoRegister) && (tmp_reg != IP)) {
3757 CHECK((tmp_reg == R5) || (tmp_reg == R6));
Roland Levillain775ef492014-11-04 17:43:11 +00003758 Pop(tmp_reg);
3759 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003760}
3761
3762
3763// Implementation note: this method must emit at most one instruction when
3764// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreToOffset.
3765void Thumb2Assembler::StoreSToOffset(SRegister reg,
3766 Register base,
3767 int32_t offset,
3768 Condition cond) {
3769 if (!Address::CanHoldStoreOffsetThumb(kStoreSWord, offset)) {
3770 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003771 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(kStoreSWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003772 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003773 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003774 DCHECK(Address::CanHoldStoreOffsetThumb(kStoreSWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003775 vstrs(reg, Address(base, offset), cond);
3776}
3777
3778
3779// Implementation note: this method must emit at most one instruction when
3780// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreSToOffset.
3781void Thumb2Assembler::StoreDToOffset(DRegister reg,
3782 Register base,
3783 int32_t offset,
3784 Condition cond) {
3785 if (!Address::CanHoldStoreOffsetThumb(kStoreDWord, offset)) {
3786 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003787 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(kStoreDWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003788 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003789 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003790 DCHECK(Address::CanHoldStoreOffsetThumb(kStoreDWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003791 vstrd(reg, Address(base, offset), cond);
3792}
3793
3794
3795void Thumb2Assembler::MemoryBarrier(ManagedRegister mscratch) {
3796 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003797 dmb(SY);
3798}
3799
3800
3801void Thumb2Assembler::dmb(DmbOptions flavor) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003802 int32_t encoding = 0xf3bf8f50; // dmb in T1 encoding.
3803 Emit32(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003804}
3805
3806
3807void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003808 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003809 cbz(r, label);
3810 } else {
3811 cmp(r, ShifterOperand(0));
3812 b(label, EQ);
3813 }
3814}
3815
3816
Dave Allison65fcc2c2014-04-28 13:45:27 -07003817void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003818 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd126ba12015-05-20 11:25:27 +01003819 cbnz(r, label);
3820 } else {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003821 cmp(r, ShifterOperand(0));
3822 b(label, NE);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003823 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003824}
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003825
3826JumpTable* Thumb2Assembler::CreateJumpTable(std::vector<Label*>&& labels, Register base_reg) {
3827 jump_tables_.emplace_back(std::move(labels));
3828 JumpTable* table = &jump_tables_.back();
3829 DCHECK(!table->GetLabel()->IsBound());
3830
3831 bool use32bit = IsForced32Bit() || IsHighRegister(base_reg);
3832 uint32_t location = buffer_.Size();
3833 Fixup::Size size = use32bit ? Fixup::kLiteralAddr4KiB : Fixup::kLiteralAddr1KiB;
3834 FixupId fixup_id = AddFixup(Fixup::LoadLiteralAddress(location, base_reg, size));
3835 Emit16(static_cast<uint16_t>(table->GetLabel()->position_));
3836 table->GetLabel()->LinkTo(fixup_id);
3837 if (use32bit) {
3838 Emit16(0);
3839 }
3840 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3841
3842 return table;
3843}
3844
3845void Thumb2Assembler::EmitJumpTableDispatch(JumpTable* jump_table, Register displacement_reg) {
3846 CHECK(!IsForced32Bit()) << "Forced 32-bit dispatch not implemented yet";
3847 // 32-bit ADD doesn't support PC as an input, so we need a two-instruction sequence:
3848 // SUB ip, ip, #0
3849 // ADD pc, ip, reg
3850 // TODO: Implement.
3851
3852 // The anchor's position needs to be fixed up before we can compute offsets - so make it a tracked
3853 // label.
3854 BindTrackedLabel(jump_table->GetAnchorLabel());
3855
3856 add(PC, PC, ShifterOperand(displacement_reg));
3857}
3858
Dave Allison65fcc2c2014-04-28 13:45:27 -07003859} // namespace arm
3860} // namespace art