blob: 2c73fb80954649150e88b41dc24e6472197635f3 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_thumb2.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Vladimir Marko6b756b52015-07-14 11:58:38 +010028void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) {
29 // For each Fixup, it's easy to find the Fixups that it depends on as they are either
30 // the following or the preceding Fixups until we find the target. However, for fixup
31 // adjustment we need the reverse lookup, i.e. what Fixups depend on a given Fixup.
32 // This function creates a compact representation of this relationship, where we have
33 // all the dependents in a single array and Fixups reference their ranges by start
34 // index and count. (Instead of having a per-fixup vector.)
35
36 // Count the number of dependents of each Fixup.
37 const FixupId end_id = assembler->fixups_.size();
38 Fixup* fixups = assembler->fixups_.data();
39 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
40 uint32_t target = fixups[fixup_id].target_;
41 if (target > fixups[fixup_id].location_) {
42 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
43 fixups[id].dependents_count_ += 1u;
44 }
45 } else {
46 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
47 fixups[id - 1u].dependents_count_ += 1u;
48 }
49 }
50 }
51 // Assign index ranges in fixup_dependents_ to individual fixups. Record the end of the
52 // range in dependents_start_, we shall later decrement it as we fill in fixup_dependents_.
53 uint32_t number_of_dependents = 0u;
54 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
55 number_of_dependents += fixups[fixup_id].dependents_count_;
56 fixups[fixup_id].dependents_start_ = number_of_dependents;
57 }
58 if (number_of_dependents == 0u) {
59 return;
60 }
61 // Create and fill in the fixup_dependents_.
Vladimir Marko93205e32016-04-13 11:59:46 +010062 assembler->fixup_dependents_.resize(number_of_dependents);
63 FixupId* dependents = assembler->fixup_dependents_.data();
Vladimir Marko6b756b52015-07-14 11:58:38 +010064 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
65 uint32_t target = fixups[fixup_id].target_;
66 if (target > fixups[fixup_id].location_) {
67 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
68 fixups[id].dependents_start_ -= 1u;
69 dependents[fixups[id].dependents_start_] = fixup_id;
70 }
71 } else {
72 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
73 fixups[id - 1u].dependents_start_ -= 1u;
74 dependents[fixups[id - 1u].dependents_start_] = fixup_id;
75 }
76 }
77 }
78}
79
Vladimir Markocf93a5c2015-06-16 11:33:24 +000080void Thumb2Assembler::BindLabel(Label* label, uint32_t bound_pc) {
81 CHECK(!label->IsBound());
82
83 while (label->IsLinked()) {
84 FixupId fixup_id = label->Position(); // The id for linked Fixup.
85 Fixup* fixup = GetFixup(fixup_id); // Get the Fixup at this id.
86 fixup->Resolve(bound_pc); // Fixup can be resolved now.
Vladimir Markocf93a5c2015-06-16 11:33:24 +000087 uint32_t fixup_location = fixup->GetLocation();
88 uint16_t next = buffer_.Load<uint16_t>(fixup_location); // Get next in chain.
89 buffer_.Store<int16_t>(fixup_location, 0);
90 label->position_ = next; // Move to next.
91 }
92 label->BindTo(bound_pc);
93}
94
Andreas Gampe7cffc3b2015-10-19 21:31:53 -070095uint32_t Thumb2Assembler::BindLiterals() {
Vladimir Markocf93a5c2015-06-16 11:33:24 +000096 // We don't add the padding here, that's done only after adjusting the Fixup sizes.
97 uint32_t code_size = buffer_.Size();
98 for (Literal& lit : literals_) {
99 Label* label = lit.GetLabel();
100 BindLabel(label, code_size);
101 code_size += lit.GetSize();
102 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700103 return code_size;
104}
105
106void Thumb2Assembler::BindJumpTables(uint32_t code_size) {
107 for (JumpTable& table : jump_tables_) {
108 Label* label = table.GetLabel();
109 BindLabel(label, code_size);
110 code_size += table.GetSize();
111 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000112}
113
114void Thumb2Assembler::AdjustFixupIfNeeded(Fixup* fixup, uint32_t* current_code_size,
115 std::deque<FixupId>* fixups_to_recalculate) {
116 uint32_t adjustment = fixup->AdjustSizeIfNeeded(*current_code_size);
117 if (adjustment != 0u) {
118 *current_code_size += adjustment;
Vladimir Marko6b756b52015-07-14 11:58:38 +0100119 for (FixupId dependent_id : fixup->Dependents(*this)) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000120 Fixup* dependent = GetFixup(dependent_id);
121 dependent->IncreaseAdjustment(adjustment);
122 if (buffer_.Load<int16_t>(dependent->GetLocation()) == 0) {
123 buffer_.Store<int16_t>(dependent->GetLocation(), 1);
124 fixups_to_recalculate->push_back(dependent_id);
125 }
126 }
127 }
128}
129
130uint32_t Thumb2Assembler::AdjustFixups() {
Vladimir Marko6b756b52015-07-14 11:58:38 +0100131 Fixup::PrepareDependents(this);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000132 uint32_t current_code_size = buffer_.Size();
133 std::deque<FixupId> fixups_to_recalculate;
134 if (kIsDebugBuild) {
135 // We will use the placeholders in the buffer_ to mark whether the fixup has
136 // been added to the fixups_to_recalculate. Make sure we start with zeros.
137 for (Fixup& fixup : fixups_) {
138 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
139 }
140 }
141 for (Fixup& fixup : fixups_) {
142 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
143 }
144 while (!fixups_to_recalculate.empty()) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100145 do {
146 // Pop the fixup.
147 FixupId fixup_id = fixups_to_recalculate.front();
148 fixups_to_recalculate.pop_front();
149 Fixup* fixup = GetFixup(fixup_id);
150 DCHECK_NE(buffer_.Load<int16_t>(fixup->GetLocation()), 0);
151 buffer_.Store<int16_t>(fixup->GetLocation(), 0);
152 // See if it needs adjustment.
153 AdjustFixupIfNeeded(fixup, &current_code_size, &fixups_to_recalculate);
154 } while (!fixups_to_recalculate.empty());
155
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700156 if ((current_code_size & 2) != 0 && (!literals_.empty() || !jump_tables_.empty())) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100157 // If we need to add padding before literals, this may just push some out of range,
158 // so recalculate all load literals. This makes up for the fact that we don't mark
159 // load literal as a dependency of all previous Fixups even though it actually is.
160 for (Fixup& fixup : fixups_) {
161 if (fixup.IsLoadLiteral()) {
162 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
163 }
164 }
165 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000166 }
167 if (kIsDebugBuild) {
168 // Check that no fixup is marked as being in fixups_to_recalculate anymore.
169 for (Fixup& fixup : fixups_) {
170 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
171 }
172 }
173
174 // Adjust literal pool labels for padding.
Roland Levillain14d90572015-07-16 10:52:26 +0100175 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000176 uint32_t literals_adjustment = current_code_size + (current_code_size & 2) - buffer_.Size();
177 if (literals_adjustment != 0u) {
178 for (Literal& literal : literals_) {
179 Label* label = literal.GetLabel();
180 DCHECK(label->IsBound());
181 int old_position = label->Position();
182 label->Reinitialize();
183 label->BindTo(old_position + literals_adjustment);
184 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700185 for (JumpTable& table : jump_tables_) {
186 Label* label = table.GetLabel();
187 DCHECK(label->IsBound());
188 int old_position = label->Position();
189 label->Reinitialize();
190 label->BindTo(old_position + literals_adjustment);
191 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000192 }
193
194 return current_code_size;
195}
196
197void Thumb2Assembler::EmitFixups(uint32_t adjusted_code_size) {
198 // Move non-fixup code to its final place and emit fixups.
199 // Process fixups in reverse order so that we don't repeatedly move the same data.
200 size_t src_end = buffer_.Size();
201 size_t dest_end = adjusted_code_size;
202 buffer_.Resize(dest_end);
203 DCHECK_GE(dest_end, src_end);
204 for (auto i = fixups_.rbegin(), end = fixups_.rend(); i != end; ++i) {
205 Fixup* fixup = &*i;
206 if (fixup->GetOriginalSize() == fixup->GetSize()) {
207 // The size of this Fixup didn't change. To avoid moving the data
208 // in small chunks, emit the code to its original position.
209 fixup->Emit(&buffer_, adjusted_code_size);
210 fixup->Finalize(dest_end - src_end);
211 } else {
212 // Move the data between the end of the fixup and src_end to its final location.
213 size_t old_fixup_location = fixup->GetLocation();
214 size_t src_begin = old_fixup_location + fixup->GetOriginalSizeInBytes();
215 size_t data_size = src_end - src_begin;
216 size_t dest_begin = dest_end - data_size;
217 buffer_.Move(dest_begin, src_begin, data_size);
218 src_end = old_fixup_location;
219 dest_end = dest_begin - fixup->GetSizeInBytes();
220 // Finalize the Fixup and emit the data to the new location.
221 fixup->Finalize(dest_end - src_end);
222 fixup->Emit(&buffer_, adjusted_code_size);
223 }
224 }
225 CHECK_EQ(src_end, dest_end);
226}
227
228void Thumb2Assembler::EmitLiterals() {
229 if (!literals_.empty()) {
230 // Load literal instructions (LDR, LDRD, VLDR) require 4-byte alignment.
231 // We don't support byte and half-word literals.
232 uint32_t code_size = buffer_.Size();
Roland Levillain14d90572015-07-16 10:52:26 +0100233 DCHECK_ALIGNED(code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000234 if ((code_size & 2u) != 0u) {
235 Emit16(0);
236 }
237 for (Literal& literal : literals_) {
238 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
239 DCHECK_EQ(static_cast<size_t>(literal.GetLabel()->Position()), buffer_.Size());
240 DCHECK(literal.GetSize() == 4u || literal.GetSize() == 8u);
241 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
242 buffer_.Emit<uint8_t>(literal.GetData()[i]);
243 }
244 }
245 }
246}
247
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700248void Thumb2Assembler::EmitJumpTables() {
249 if (!jump_tables_.empty()) {
250 // Jump tables require 4 byte alignment. (We don't support byte and half-word jump tables.)
251 uint32_t code_size = buffer_.Size();
252 DCHECK_ALIGNED(code_size, 2);
253 if ((code_size & 2u) != 0u) {
254 Emit16(0);
255 }
256 for (JumpTable& table : jump_tables_) {
257 // Bulk ensure capacity, as this may be large.
258 size_t orig_size = buffer_.Size();
Vladimir Marko9152fed2016-04-20 14:39:47 +0100259 size_t required_capacity = orig_size + table.GetSize();
260 if (required_capacity > buffer_.Capacity()) {
261 buffer_.ExtendCapacity(required_capacity);
262 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700263#ifndef NDEBUG
264 buffer_.has_ensured_capacity_ = true;
265#endif
266
267 DCHECK_EQ(static_cast<size_t>(table.GetLabel()->Position()), buffer_.Size());
268 int32_t anchor_position = table.GetAnchorLabel()->Position() + 4;
269
270 for (Label* target : table.GetData()) {
271 // Ensure that the label was tracked, so that it will have the right position.
272 DCHECK(std::find(tracked_labels_.begin(), tracked_labels_.end(), target) !=
273 tracked_labels_.end());
274
275 int32_t offset = target->Position() - anchor_position;
276 buffer_.Emit<int32_t>(offset);
277 }
278
279#ifndef NDEBUG
280 buffer_.has_ensured_capacity_ = false;
281#endif
282 size_t new_size = buffer_.Size();
283 DCHECK_LE(new_size - orig_size, table.GetSize());
284 }
285 }
286}
287
Vladimir Marko10ef6942015-10-22 15:25:54 +0100288void Thumb2Assembler::PatchCFI() {
289 if (cfi().NumberOfDelayedAdvancePCs() == 0u) {
290 return;
291 }
292
293 typedef DebugFrameOpCodeWriterForAssembler::DelayedAdvancePC DelayedAdvancePC;
294 const auto data = cfi().ReleaseStreamAndPrepareForDelayedAdvancePC();
295 const std::vector<uint8_t>& old_stream = data.first;
296 const std::vector<DelayedAdvancePC>& advances = data.second;
297
298 // Refill our data buffer with patched opcodes.
299 cfi().ReserveCFIStream(old_stream.size() + advances.size() + 16);
300 size_t stream_pos = 0;
301 for (const DelayedAdvancePC& advance : advances) {
302 DCHECK_GE(advance.stream_pos, stream_pos);
303 // Copy old data up to the point where advance was issued.
304 cfi().AppendRawData(old_stream, stream_pos, advance.stream_pos);
305 stream_pos = advance.stream_pos;
306 // Insert the advance command with its final offset.
307 size_t final_pc = GetAdjustedPosition(advance.pc);
308 cfi().AdvancePC(final_pc);
309 }
310 // Copy the final segment if any.
311 cfi().AppendRawData(old_stream, stream_pos, old_stream.size());
312}
313
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000314inline int16_t Thumb2Assembler::BEncoding16(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100315 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000316 int16_t encoding = B15 | B14;
317 if (cond != AL) {
318 DCHECK(IsInt<9>(offset));
319 encoding |= B12 | (static_cast<int32_t>(cond) << 8) | ((offset >> 1) & 0xff);
320 } else {
321 DCHECK(IsInt<12>(offset));
322 encoding |= B13 | ((offset >> 1) & 0x7ff);
323 }
324 return encoding;
325}
326
327inline int32_t Thumb2Assembler::BEncoding32(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100328 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000329 int32_t s = (offset >> 31) & 1; // Sign bit.
330 int32_t encoding = B31 | B30 | B29 | B28 | B15 |
331 (s << 26) | // Sign bit goes to bit 26.
332 ((offset >> 1) & 0x7ff); // imm11 goes to bits 0-10.
333 if (cond != AL) {
334 DCHECK(IsInt<21>(offset));
335 // Encode cond, move imm6 from bits 12-17 to bits 16-21 and move J1 and J2.
336 encoding |= (static_cast<int32_t>(cond) << 22) | ((offset & 0x3f000) << (16 - 12)) |
337 ((offset & (1 << 19)) >> (19 - 13)) | // Extract J1 from bit 19 to bit 13.
338 ((offset & (1 << 18)) >> (18 - 11)); // Extract J2 from bit 18 to bit 11.
339 } else {
340 DCHECK(IsInt<25>(offset));
341 int32_t j1 = ((offset >> 23) ^ s ^ 1) & 1; // Calculate J1 from I1 extracted from bit 23.
342 int32_t j2 = ((offset >> 22)^ s ^ 1) & 1; // Calculate J2 from I2 extracted from bit 22.
343 // Move imm10 from bits 12-21 to bits 16-25 and add J1 and J2.
344 encoding |= B12 | ((offset & 0x3ff000) << (16 - 12)) |
345 (j1 << 13) | (j2 << 11);
346 }
347 return encoding;
348}
349
350inline int16_t Thumb2Assembler::CbxzEncoding16(Register rn, int32_t offset, Condition cond) {
351 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100352 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000353 DCHECK(IsUint<7>(offset));
354 DCHECK(cond == EQ || cond == NE);
355 return B15 | B13 | B12 | B8 | (cond == NE ? B11 : 0) | static_cast<int32_t>(rn) |
356 ((offset & 0x3e) << (3 - 1)) | // Move imm5 from bits 1-5 to bits 3-7.
357 ((offset & 0x40) << (9 - 6)); // Move i from bit 6 to bit 11
358}
359
360inline int16_t Thumb2Assembler::CmpRnImm8Encoding16(Register rn, int32_t value) {
361 DCHECK(!IsHighRegister(rn));
362 DCHECK(IsUint<8>(value));
363 return B13 | B11 | (rn << 8) | value;
364}
365
366inline int16_t Thumb2Assembler::AddRdnRmEncoding16(Register rdn, Register rm) {
367 // The high bit of rn is moved across 4-bit rm.
368 return B14 | B10 | (static_cast<int32_t>(rm) << 3) |
369 (static_cast<int32_t>(rdn) & 7) | ((static_cast<int32_t>(rdn) & 8) << 4);
370}
371
372inline int32_t Thumb2Assembler::MovwEncoding32(Register rd, int32_t value) {
373 DCHECK(IsUint<16>(value));
374 return B31 | B30 | B29 | B28 | B25 | B22 |
375 (static_cast<int32_t>(rd) << 8) |
376 ((value & 0xf000) << (16 - 12)) | // Move imm4 from bits 12-15 to bits 16-19.
377 ((value & 0x0800) << (26 - 11)) | // Move i from bit 11 to bit 26.
378 ((value & 0x0700) << (12 - 8)) | // Move imm3 from bits 8-10 to bits 12-14.
379 (value & 0xff); // Keep imm8 in bits 0-7.
380}
381
382inline int32_t Thumb2Assembler::MovtEncoding32(Register rd, int32_t value) {
383 DCHECK_EQ(value & 0xffff, 0);
384 int32_t movw_encoding = MovwEncoding32(rd, (value >> 16) & 0xffff);
385 return movw_encoding | B25 | B23;
386}
387
388inline int32_t Thumb2Assembler::MovModImmEncoding32(Register rd, int32_t value) {
389 uint32_t mod_imm = ModifiedImmediate(value);
390 DCHECK_NE(mod_imm, kInvalidModifiedImmediate);
391 return B31 | B30 | B29 | B28 | B22 | B19 | B18 | B17 | B16 |
392 (static_cast<int32_t>(rd) << 8) | static_cast<int32_t>(mod_imm);
393}
394
395inline int16_t Thumb2Assembler::LdrLitEncoding16(Register rt, int32_t offset) {
396 DCHECK(!IsHighRegister(rt));
Roland Levillain14d90572015-07-16 10:52:26 +0100397 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000398 DCHECK(IsUint<10>(offset));
399 return B14 | B11 | (static_cast<int32_t>(rt) << 8) | (offset >> 2);
400}
401
402inline int32_t Thumb2Assembler::LdrLitEncoding32(Register rt, int32_t offset) {
403 // NOTE: We don't support negative offset, i.e. U=0 (B23).
404 return LdrRtRnImm12Encoding(rt, PC, offset);
405}
406
407inline int32_t Thumb2Assembler::LdrdEncoding32(Register rt, Register rt2, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100408 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000409 CHECK(IsUint<10>(offset));
410 return B31 | B30 | B29 | B27 |
411 B24 /* P = 1 */ | B23 /* U = 1 */ | B22 | 0 /* W = 0 */ | B20 |
412 (static_cast<int32_t>(rn) << 16) | (static_cast<int32_t>(rt) << 12) |
413 (static_cast<int32_t>(rt2) << 8) | (offset >> 2);
414}
415
416inline int32_t Thumb2Assembler::VldrsEncoding32(SRegister sd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100417 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000418 CHECK(IsUint<10>(offset));
419 return B31 | B30 | B29 | B27 | B26 | B24 |
420 B23 /* U = 1 */ | B20 | B11 | B9 |
421 (static_cast<int32_t>(rn) << 16) |
422 ((static_cast<int32_t>(sd) & 0x01) << (22 - 0)) | // Move D from bit 0 to bit 22.
423 ((static_cast<int32_t>(sd) & 0x1e) << (12 - 1)) | // Move Vd from bits 1-4 to bits 12-15.
424 (offset >> 2);
425}
426
427inline int32_t Thumb2Assembler::VldrdEncoding32(DRegister dd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100428 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000429 CHECK(IsUint<10>(offset));
430 return B31 | B30 | B29 | B27 | B26 | B24 |
431 B23 /* U = 1 */ | B20 | B11 | B9 | B8 |
432 (rn << 16) |
433 ((static_cast<int32_t>(dd) & 0x10) << (22 - 4)) | // Move D from bit 4 to bit 22.
434 ((static_cast<int32_t>(dd) & 0x0f) << (12 - 0)) | // Move Vd from bits 0-3 to bits 12-15.
435 (offset >> 2);
436}
437
438inline int16_t Thumb2Assembler::LdrRtRnImm5Encoding16(Register rt, Register rn, int32_t offset) {
439 DCHECK(!IsHighRegister(rt));
440 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100441 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000442 DCHECK(IsUint<7>(offset));
443 return B14 | B13 | B11 |
444 (static_cast<int32_t>(rn) << 3) | static_cast<int32_t>(rt) |
445 (offset << (6 - 2)); // Move imm5 from bits 2-6 to bits 6-10.
446}
447
448int32_t Thumb2Assembler::Fixup::LoadWideOrFpEncoding(Register rbase, int32_t offset) const {
449 switch (type_) {
450 case kLoadLiteralWide:
451 return LdrdEncoding32(rn_, rt2_, rbase, offset);
452 case kLoadFPLiteralSingle:
453 return VldrsEncoding32(sd_, rbase, offset);
454 case kLoadFPLiteralDouble:
455 return VldrdEncoding32(dd_, rbase, offset);
456 default:
457 LOG(FATAL) << "Unexpected type: " << static_cast<int>(type_);
458 UNREACHABLE();
459 }
460}
461
462inline int32_t Thumb2Assembler::LdrRtRnImm12Encoding(Register rt, Register rn, int32_t offset) {
463 DCHECK(IsUint<12>(offset));
464 return B31 | B30 | B29 | B28 | B27 | B23 | B22 | B20 | (rn << 16) | (rt << 12) | offset;
465}
466
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700467inline int16_t Thumb2Assembler::AdrEncoding16(Register rd, int32_t offset) {
468 DCHECK(IsUint<10>(offset));
469 DCHECK(IsAligned<4>(offset));
470 DCHECK(!IsHighRegister(rd));
471 return B15 | B13 | (rd << 8) | (offset >> 2);
472}
473
474inline int32_t Thumb2Assembler::AdrEncoding32(Register rd, int32_t offset) {
475 DCHECK(IsUint<12>(offset));
476 // Bit 26: offset[11]
477 // Bits 14-12: offset[10-8]
478 // Bits 7-0: offset[7-0]
479 int32_t immediate_mask =
480 ((offset & (1 << 11)) << (26 - 11)) |
481 ((offset & (7 << 8)) << (12 - 8)) |
482 (offset & 0xFF);
483 return B31 | B30 | B29 | B28 | B25 | B19 | B18 | B17 | B16 | (rd << 8) | immediate_mask;
484}
485
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000486void Thumb2Assembler::FinalizeCode() {
487 ArmAssembler::FinalizeCode();
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700488 uint32_t size_after_literals = BindLiterals();
489 BindJumpTables(size_after_literals);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000490 uint32_t adjusted_code_size = AdjustFixups();
491 EmitFixups(adjusted_code_size);
492 EmitLiterals();
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700493 FinalizeTrackedLabels();
494 EmitJumpTables();
Vladimir Marko10ef6942015-10-22 15:25:54 +0100495 PatchCFI();
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000496}
497
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +0100498bool Thumb2Assembler::ShifterOperandCanAlwaysHold(uint32_t immediate) {
499 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
500}
501
Nicolas Geoffray3d1e7882015-02-03 13:59:52 +0000502bool Thumb2Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
503 Register rn ATTRIBUTE_UNUSED,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000504 Opcode opcode,
505 uint32_t immediate,
Vladimir Markof5c09c32015-12-17 12:08:08 +0000506 SetCc set_cc,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000507 ShifterOperand* shifter_op) {
508 shifter_op->type_ = ShifterOperand::kImmediate;
509 shifter_op->immed_ = immediate;
510 shifter_op->is_shift_ = false;
511 shifter_op->is_rotate_ = false;
512 switch (opcode) {
513 case ADD:
514 case SUB:
Vladimir Markof5c09c32015-12-17 12:08:08 +0000515 // Less than (or equal to) 12 bits can be done if we don't need to set condition codes.
516 if (immediate < (1 << 12) && set_cc != kCcSet) {
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000517 return true;
518 }
519 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
520
521 case MOV:
522 // TODO: Support less than or equal to 12bits.
523 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100524
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000525 case MVN:
526 default:
527 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
528 }
529}
530
Dave Allison65fcc2c2014-04-28 13:45:27 -0700531void Thumb2Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100532 Condition cond, SetCc set_cc) {
533 EmitDataProcessing(cond, AND, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700534}
535
536
537void Thumb2Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100538 Condition cond, SetCc set_cc) {
539 EmitDataProcessing(cond, EOR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700540}
541
542
543void Thumb2Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100544 Condition cond, SetCc set_cc) {
545 EmitDataProcessing(cond, SUB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700546}
547
548
549void Thumb2Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100550 Condition cond, SetCc set_cc) {
551 EmitDataProcessing(cond, RSB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700552}
553
554
555void Thumb2Assembler::add(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100556 Condition cond, SetCc set_cc) {
557 EmitDataProcessing(cond, ADD, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700558}
559
560
561void Thumb2Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100562 Condition cond, SetCc set_cc) {
563 EmitDataProcessing(cond, ADC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700564}
565
566
567void Thumb2Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100568 Condition cond, SetCc set_cc) {
569 EmitDataProcessing(cond, SBC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700570}
571
572
573void Thumb2Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100574 Condition cond, SetCc set_cc) {
575 EmitDataProcessing(cond, RSC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700576}
577
578
579void Thumb2Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
580 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100581 EmitDataProcessing(cond, TST, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700582}
583
584
585void Thumb2Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
586 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100587 EmitDataProcessing(cond, TEQ, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700588}
589
590
591void Thumb2Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100592 EmitDataProcessing(cond, CMP, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700593}
594
595
596void Thumb2Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100597 EmitDataProcessing(cond, CMN, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700598}
599
600
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100601void Thumb2Assembler::orr(Register rd, Register rn, const ShifterOperand& so,
602 Condition cond, SetCc set_cc) {
603 EmitDataProcessing(cond, ORR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700604}
605
606
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100607void Thumb2Assembler::orn(Register rd, Register rn, const ShifterOperand& so,
608 Condition cond, SetCc set_cc) {
609 EmitDataProcessing(cond, ORN, set_cc, rn, rd, so);
610}
611
612
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100613void Thumb2Assembler::mov(Register rd, const ShifterOperand& so,
614 Condition cond, SetCc set_cc) {
615 EmitDataProcessing(cond, MOV, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700616}
617
618
619void Thumb2Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100620 Condition cond, SetCc set_cc) {
621 EmitDataProcessing(cond, BIC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700622}
623
624
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100625void Thumb2Assembler::mvn(Register rd, const ShifterOperand& so,
626 Condition cond, SetCc set_cc) {
627 EmitDataProcessing(cond, MVN, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700628}
629
630
631void Thumb2Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700632 CheckCondition(cond);
633
Dave Allison65fcc2c2014-04-28 13:45:27 -0700634 if (rd == rm && !IsHighRegister(rd) && !IsHighRegister(rn) && !force_32bit_) {
635 // 16 bit.
636 int16_t encoding = B14 | B9 | B8 | B6 |
637 rn << 3 | rd;
638 Emit16(encoding);
639 } else {
640 // 32 bit.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700641 uint32_t op1 = 0U /* 0b000 */;
642 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700643 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
644 op1 << 20 |
645 B15 | B14 | B13 | B12 |
646 op2 << 4 |
647 static_cast<uint32_t>(rd) << 8 |
648 static_cast<uint32_t>(rn) << 16 |
649 static_cast<uint32_t>(rm);
650
651 Emit32(encoding);
652 }
653}
654
655
656void Thumb2Assembler::mla(Register rd, Register rn, Register rm, Register ra,
657 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700658 CheckCondition(cond);
659
Andreas Gampec8ccf682014-09-29 20:07:43 -0700660 uint32_t op1 = 0U /* 0b000 */;
661 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700662 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
663 op1 << 20 |
664 op2 << 4 |
665 static_cast<uint32_t>(rd) << 8 |
666 static_cast<uint32_t>(ra) << 12 |
667 static_cast<uint32_t>(rn) << 16 |
668 static_cast<uint32_t>(rm);
669
670 Emit32(encoding);
671}
672
673
674void Thumb2Assembler::mls(Register rd, Register rn, Register rm, Register ra,
675 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700676 CheckCondition(cond);
677
Andreas Gampec8ccf682014-09-29 20:07:43 -0700678 uint32_t op1 = 0U /* 0b000 */;
679 uint32_t op2 = 01 /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700680 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
681 op1 << 20 |
682 op2 << 4 |
683 static_cast<uint32_t>(rd) << 8 |
684 static_cast<uint32_t>(ra) << 12 |
685 static_cast<uint32_t>(rn) << 16 |
686 static_cast<uint32_t>(rm);
687
688 Emit32(encoding);
689}
690
691
Zheng Xuc6667102015-05-15 16:08:45 +0800692void Thumb2Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
693 Register rm, Condition cond) {
694 CheckCondition(cond);
695
696 uint32_t op1 = 0U /* 0b000; */;
697 uint32_t op2 = 0U /* 0b0000 */;
698 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
699 op1 << 20 |
700 op2 << 4 |
701 static_cast<uint32_t>(rd_lo) << 12 |
702 static_cast<uint32_t>(rd_hi) << 8 |
703 static_cast<uint32_t>(rn) << 16 |
704 static_cast<uint32_t>(rm);
705
706 Emit32(encoding);
707}
708
709
Dave Allison65fcc2c2014-04-28 13:45:27 -0700710void Thumb2Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
711 Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700712 CheckCondition(cond);
713
Andreas Gampec8ccf682014-09-29 20:07:43 -0700714 uint32_t op1 = 2U /* 0b010; */;
715 uint32_t op2 = 0U /* 0b0000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700716 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
717 op1 << 20 |
718 op2 << 4 |
719 static_cast<uint32_t>(rd_lo) << 12 |
720 static_cast<uint32_t>(rd_hi) << 8 |
721 static_cast<uint32_t>(rn) << 16 |
722 static_cast<uint32_t>(rm);
723
724 Emit32(encoding);
725}
726
727
728void Thumb2Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700729 CheckCondition(cond);
730
Andreas Gampec8ccf682014-09-29 20:07:43 -0700731 uint32_t op1 = 1U /* 0b001 */;
732 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700733 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B20 |
734 op1 << 20 |
735 op2 << 4 |
736 0xf << 12 |
737 static_cast<uint32_t>(rd) << 8 |
738 static_cast<uint32_t>(rn) << 16 |
739 static_cast<uint32_t>(rm);
740
741 Emit32(encoding);
742}
743
744
745void Thumb2Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700746 CheckCondition(cond);
747
Andreas Gampec8ccf682014-09-29 20:07:43 -0700748 uint32_t op1 = 1U /* 0b001 */;
749 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700750 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B21 | B20 |
751 op1 << 20 |
752 op2 << 4 |
753 0xf << 12 |
754 static_cast<uint32_t>(rd) << 8 |
755 static_cast<uint32_t>(rn) << 16 |
756 static_cast<uint32_t>(rm);
757
758 Emit32(encoding);
759}
760
761
Roland Levillain51d3fc42014-11-13 14:11:42 +0000762void Thumb2Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
763 CheckCondition(cond);
764 CHECK_LE(lsb, 31U);
765 CHECK(1U <= width && width <= 32U) << width;
766 uint32_t widthminus1 = width - 1;
767 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
768 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
769
770 uint32_t op = 20U /* 0b10100 */;
771 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
772 op << 20 |
773 static_cast<uint32_t>(rn) << 16 |
774 imm3 << 12 |
775 static_cast<uint32_t>(rd) << 8 |
776 imm2 << 6 |
777 widthminus1;
778
779 Emit32(encoding);
780}
781
782
Roland Levillain981e4542014-11-14 11:47:14 +0000783void Thumb2Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
784 CheckCondition(cond);
785 CHECK_LE(lsb, 31U);
786 CHECK(1U <= width && width <= 32U) << width;
787 uint32_t widthminus1 = width - 1;
788 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
789 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
790
791 uint32_t op = 28U /* 0b11100 */;
792 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
793 op << 20 |
794 static_cast<uint32_t>(rn) << 16 |
795 imm3 << 12 |
796 static_cast<uint32_t>(rd) << 8 |
797 imm2 << 6 |
798 widthminus1;
799
800 Emit32(encoding);
801}
802
803
Dave Allison65fcc2c2014-04-28 13:45:27 -0700804void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) {
805 EmitLoadStore(cond, true, false, false, false, rd, ad);
806}
807
808
809void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) {
810 EmitLoadStore(cond, false, false, false, false, rd, ad);
811}
812
813
814void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
815 EmitLoadStore(cond, true, true, false, false, rd, ad);
816}
817
818
819void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) {
820 EmitLoadStore(cond, false, true, false, false, rd, ad);
821}
822
823
824void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
825 EmitLoadStore(cond, true, false, true, false, rd, ad);
826}
827
828
829void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) {
830 EmitLoadStore(cond, false, false, true, false, rd, ad);
831}
832
833
834void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
835 EmitLoadStore(cond, true, true, false, true, rd, ad);
836}
837
838
839void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
840 EmitLoadStore(cond, true, false, true, true, rd, ad);
841}
842
843
844void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100845 ldrd(rd, Register(rd + 1), ad, cond);
846}
847
848
849void Thumb2Assembler::ldrd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700850 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100851 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700852 // This is different from other loads. The encoding is like ARM.
853 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
854 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100855 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700856 ad.encodingThumbLdrdStrd();
857 Emit32(encoding);
858}
859
860
861void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100862 strd(rd, Register(rd + 1), ad, cond);
863}
864
865
866void Thumb2Assembler::strd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700867 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100868 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700869 // This is different from other loads. The encoding is like ARM.
870 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
871 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100872 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700873 ad.encodingThumbLdrdStrd();
874 Emit32(encoding);
875}
876
877
878void Thumb2Assembler::ldm(BlockAddressMode am,
879 Register base,
880 RegList regs,
881 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000882 CHECK_NE(regs, 0u); // Do not use ldm if there's nothing to load.
883 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700884 // Thumb doesn't support one reg in the list.
885 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000886 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700887 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700888 CHECK(am == DB_W); // Only writeback is supported.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700889 ldr(static_cast<Register>(reg), Address(base, kRegisterSize, Address::PostIndex), cond);
890 } else {
891 EmitMultiMemOp(cond, am, true, base, regs);
892 }
893}
894
895
896void Thumb2Assembler::stm(BlockAddressMode am,
897 Register base,
898 RegList regs,
899 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000900 CHECK_NE(regs, 0u); // Do not use stm if there's nothing to store.
901 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700902 // Thumb doesn't support one reg in the list.
903 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000904 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700905 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700906 CHECK(am == IA || am == IA_W);
907 Address::Mode strmode = am == IA ? Address::PreIndex : Address::Offset;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700908 str(static_cast<Register>(reg), Address(base, -kRegisterSize, strmode), cond);
909 } else {
910 EmitMultiMemOp(cond, am, false, base, regs);
911 }
912}
913
914
915bool Thumb2Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
916 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
917 if (((imm32 & ((1 << 19) - 1)) == 0) &&
918 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
919 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
920 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
921 ((imm32 >> 19) & ((1 << 6) -1));
922 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
923 sd, S0, S0);
924 return true;
925 }
926 return false;
927}
928
929
930bool Thumb2Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
931 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
932 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
933 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
934 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
935 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
936 ((imm64 >> 48) & ((1 << 6) -1));
937 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
938 dd, D0, D0);
939 return true;
940 }
941 return false;
942}
943
944
945void Thumb2Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
946 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
947}
948
949
950void Thumb2Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
951 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
952}
953
954
955void Thumb2Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
956 Condition cond) {
957 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
958}
959
960
961void Thumb2Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
962 Condition cond) {
963 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
964}
965
966
967void Thumb2Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
968 Condition cond) {
969 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
970}
971
972
973void Thumb2Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
974 Condition cond) {
975 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
976}
977
978
979void Thumb2Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
980 Condition cond) {
981 EmitVFPsss(cond, B21, sd, sn, sm);
982}
983
984
985void Thumb2Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
986 Condition cond) {
987 EmitVFPddd(cond, B21, dd, dn, dm);
988}
989
990
991void Thumb2Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
992 Condition cond) {
993 EmitVFPsss(cond, 0, sd, sn, sm);
994}
995
996
997void Thumb2Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
998 Condition cond) {
999 EmitVFPddd(cond, 0, dd, dn, dm);
1000}
1001
1002
1003void Thumb2Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
1004 Condition cond) {
1005 EmitVFPsss(cond, B6, sd, sn, sm);
1006}
1007
1008
1009void Thumb2Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
1010 Condition cond) {
1011 EmitVFPddd(cond, B6, dd, dn, dm);
1012}
1013
1014
1015void Thumb2Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
1016 Condition cond) {
1017 EmitVFPsss(cond, B23, sd, sn, sm);
1018}
1019
1020
1021void Thumb2Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
1022 Condition cond) {
1023 EmitVFPddd(cond, B23, dd, dn, dm);
1024}
1025
1026
1027void Thumb2Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
1028 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
1029}
1030
1031
1032void Thumb2Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
1033 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
1034}
1035
1036
1037void Thumb2Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
1038 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
1039}
1040
1041
1042void Thumb2Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
1043 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
1044}
1045
1046
1047void Thumb2Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
1048 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
1049}
1050
1051void Thumb2Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
1052 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
1053}
1054
1055
1056void Thumb2Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
1057 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
1058}
1059
1060
1061void Thumb2Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
1062 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
1063}
1064
1065
1066void Thumb2Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
1067 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
1068}
1069
1070
1071void Thumb2Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
1072 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
1073}
1074
1075
1076void Thumb2Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
1077 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
1078}
1079
1080
1081void Thumb2Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
1082 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
1083}
1084
1085
1086void Thumb2Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
1087 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
1088}
1089
1090
1091void Thumb2Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
1092 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
1093}
1094
1095
1096void Thumb2Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
1097 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
1098}
1099
1100
1101void Thumb2Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
1102 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
1103}
1104
1105
1106void Thumb2Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
1107 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
1108}
1109
1110
1111void Thumb2Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
1112 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
1113}
1114
1115
1116void Thumb2Assembler::vcmpsz(SRegister sd, Condition cond) {
1117 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
1118}
1119
1120
1121void Thumb2Assembler::vcmpdz(DRegister dd, Condition cond) {
1122 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
1123}
1124
1125void Thumb2Assembler::b(Label* label, Condition cond) {
agicsakie2142d252015-06-30 17:10:03 -07001126 DCHECK_EQ(next_condition_, AL);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001127 EmitBranch(cond, label, false, false);
1128}
1129
1130
1131void Thumb2Assembler::bl(Label* label, Condition cond) {
1132 CheckCondition(cond);
1133 EmitBranch(cond, label, true, false);
1134}
1135
1136
1137void Thumb2Assembler::blx(Label* label) {
1138 EmitBranch(AL, label, true, true);
1139}
1140
1141
1142void Thumb2Assembler::MarkExceptionHandler(Label* label) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001143 EmitDataProcessing(AL, TST, kCcSet, PC, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001144 Label l;
1145 b(&l);
1146 EmitBranch(AL, label, false, false);
1147 Bind(&l);
1148}
1149
1150
1151void Thumb2Assembler::Emit32(int32_t value) {
1152 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1153 buffer_.Emit<int16_t>(value >> 16);
1154 buffer_.Emit<int16_t>(value & 0xffff);
1155}
1156
1157
1158void Thumb2Assembler::Emit16(int16_t value) {
1159 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1160 buffer_.Emit<int16_t>(value);
1161}
1162
1163
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001164bool Thumb2Assembler::Is32BitDataProcessing(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001165 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001166 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001167 Register rn,
1168 Register rd,
1169 const ShifterOperand& so) {
1170 if (force_32bit_) {
1171 return true;
1172 }
1173
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001174 // Check special case for SP relative ADD and SUB immediate.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001175 if ((opcode == ADD || opcode == SUB) && rn == SP && so.IsImmediate() && set_cc != kCcSet) {
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001176 // If the immediate is in range, use 16 bit.
1177 if (rd == SP) {
1178 if (so.GetImmediate() < (1 << 9)) { // 9 bit immediate.
1179 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001180 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001181 } else if (!IsHighRegister(rd) && opcode == ADD) {
1182 if (so.GetImmediate() < (1 << 10)) { // 10 bit immediate.
1183 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001184 }
1185 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001186 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001187
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001188 bool can_contain_high_register =
1189 (opcode == CMP) ||
1190 (opcode == MOV && set_cc != kCcSet) ||
1191 ((opcode == ADD) && (rn == rd) && set_cc != kCcSet);
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001192
1193 if (IsHighRegister(rd) || IsHighRegister(rn)) {
1194 if (!can_contain_high_register) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001195 return true;
1196 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001197
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001198 // There are high register instructions available for this opcode.
1199 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
1200 if (so.IsShift() && (so.GetShift() == RRX || so.GetImmediate() != 0u)) {
1201 return true;
1202 }
1203
1204 // The ADD and MOV instructions that work with high registers don't have 16-bit
1205 // immediate variants.
1206 if (so.IsImmediate()) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001207 return true;
1208 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001209 }
1210
1211 if (so.IsRegister() && IsHighRegister(so.GetRegister()) && !can_contain_high_register) {
1212 return true;
1213 }
1214
Dave Allison65fcc2c2014-04-28 13:45:27 -07001215 bool rn_is_valid = true;
1216
1217 // Check for single operand instructions and ADD/SUB.
1218 switch (opcode) {
1219 case CMP:
1220 case MOV:
1221 case TST:
1222 case MVN:
1223 rn_is_valid = false; // There is no Rn for these instructions.
1224 break;
1225 case TEQ:
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001226 case ORN:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001227 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001228 case ADD:
1229 case SUB:
1230 break;
1231 default:
1232 if (so.IsRegister() && rd != rn) {
1233 return true;
1234 }
1235 }
1236
1237 if (so.IsImmediate()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001238 if (opcode == RSB) {
1239 DCHECK(rn_is_valid);
1240 if (so.GetImmediate() != 0u) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001241 return true;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001242 }
1243 } else if (rn_is_valid && rn != rd) {
1244 // The only thumb1 instructions with a register and an immediate are ADD and SUB
1245 // with a 3-bit immediate, and RSB with zero immediate.
1246 if (opcode == ADD || opcode == SUB) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00001247 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1248 return true; // Cannot match "setflags".
1249 }
1250 if (!IsUint<3>(so.GetImmediate()) && !IsUint<3>(-so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001251 return true;
1252 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001253 } else {
1254 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001255 }
1256 } else {
1257 // ADD, SUB, CMP and MOV may be thumb1 only if the immediate is 8 bits.
1258 if (!(opcode == ADD || opcode == SUB || opcode == MOV || opcode == CMP)) {
1259 return true;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001260 } else if (opcode != CMP && ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1261 return true; // Cannot match "setflags" for ADD, SUB or MOV.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001262 } else {
Vladimir Markof5c09c32015-12-17 12:08:08 +00001263 // For ADD and SUB allow also negative 8-bit immediate as we will emit the oposite opcode.
1264 if (!IsUint<8>(so.GetImmediate()) &&
1265 (opcode == MOV || opcode == CMP || !IsUint<8>(-so.GetImmediate()))) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001266 return true;
1267 }
1268 }
1269 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001270 } else {
1271 DCHECK(so.IsRegister());
1272 if (so.IsShift()) {
1273 // Shift operand - check if it is a MOV convertible to a 16-bit shift instruction.
1274 if (opcode != MOV) {
Zheng Xuc6667102015-05-15 16:08:45 +08001275 return true;
1276 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001277 // Check for MOV with an ROR/RRX. There is no 16-bit ROR immediate and no 16-bit RRX.
1278 if (so.GetShift() == ROR || so.GetShift() == RRX) {
1279 return true;
1280 }
1281 // 16-bit shifts set condition codes if and only if outside IT block,
1282 // i.e. if and only if cond == AL.
1283 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1284 return true;
1285 }
1286 } else {
1287 // Register operand without shift.
1288 switch (opcode) {
1289 case ADD:
1290 // The 16-bit ADD that cannot contain high registers can set condition codes
1291 // if and only if outside IT block, i.e. if and only if cond == AL.
1292 if (!can_contain_high_register &&
1293 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1294 return true;
1295 }
1296 break;
1297 case AND:
1298 case BIC:
1299 case EOR:
1300 case ORR:
1301 case MVN:
1302 case ADC:
1303 case SUB:
1304 case SBC:
1305 // These 16-bit opcodes set condition codes if and only if outside IT block,
1306 // i.e. if and only if cond == AL.
1307 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1308 return true;
1309 }
1310 break;
1311 case RSB:
1312 case RSC:
1313 // No 16-bit RSB/RSC Rd, Rm, Rn. It would be equivalent to SUB/SBC Rd, Rn, Rm.
1314 return true;
1315 case CMP:
1316 default:
1317 break;
1318 }
Zheng Xuc6667102015-05-15 16:08:45 +08001319 }
1320 }
1321
Dave Allison65fcc2c2014-04-28 13:45:27 -07001322 // The instruction can be encoded in 16 bits.
1323 return false;
1324}
1325
1326
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001327void Thumb2Assembler::Emit32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001328 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001329 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001330 Register rn,
1331 Register rd,
1332 const ShifterOperand& so) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001333 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001334 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001335 case AND: thumb_opcode = 0U /* 0b0000 */; break;
1336 case EOR: thumb_opcode = 4U /* 0b0100 */; break;
1337 case SUB: thumb_opcode = 13U /* 0b1101 */; break;
1338 case RSB: thumb_opcode = 14U /* 0b1110 */; break;
1339 case ADD: thumb_opcode = 8U /* 0b1000 */; break;
Andreas Gampe35c68e32014-09-30 08:39:37 -07001340 case ADC: thumb_opcode = 10U /* 0b1010 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001341 case SBC: thumb_opcode = 11U /* 0b1011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001342 case RSC: break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001343 case TST: thumb_opcode = 0U /* 0b0000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1344 case TEQ: thumb_opcode = 4U /* 0b0100 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1345 case CMP: thumb_opcode = 13U /* 0b1101 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1346 case CMN: thumb_opcode = 8U /* 0b1000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001347 case ORR: thumb_opcode = 2U /* 0b0010 */; break;
1348 case MOV: thumb_opcode = 2U /* 0b0010 */; rn = PC; break;
1349 case BIC: thumb_opcode = 1U /* 0b0001 */; break;
1350 case MVN: thumb_opcode = 3U /* 0b0011 */; rn = PC; break;
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001351 case ORN: thumb_opcode = 3U /* 0b0011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001352 default:
1353 break;
1354 }
1355
Andreas Gampec8ccf682014-09-29 20:07:43 -07001356 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001357 LOG(FATAL) << "Invalid thumb2 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001358 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001359 }
1360
1361 int32_t encoding = 0;
1362 if (so.IsImmediate()) {
1363 // Check special cases.
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00001364 if ((opcode == SUB || opcode == ADD) && (so.GetImmediate() < (1u << 12)) &&
1365 /* Prefer T3 encoding to T4. */ !ShifterOperandCanAlwaysHold(so.GetImmediate())) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001366 if (set_cc != kCcSet) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001367 if (opcode == SUB) {
1368 thumb_opcode = 5U;
1369 } else if (opcode == ADD) {
1370 thumb_opcode = 0U;
1371 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001372 }
1373 uint32_t imm = so.GetImmediate();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001374
1375 uint32_t i = (imm >> 11) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001376 uint32_t imm3 = (imm >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001377 uint32_t imm8 = imm & 0xff;
1378
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001379 encoding = B31 | B30 | B29 | B28 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001380 (set_cc == kCcSet ? B20 : B25) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001381 thumb_opcode << 21 |
1382 rn << 16 |
1383 rd << 8 |
1384 i << 26 |
1385 imm3 << 12 |
1386 imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001387 } else {
1388 // Modified immediate.
Dave Allison45fdb932014-06-25 12:37:10 -07001389 uint32_t imm = ModifiedImmediate(so.encodingThumb());
Dave Allison65fcc2c2014-04-28 13:45:27 -07001390 if (imm == kInvalidModifiedImmediate) {
1391 LOG(FATAL) << "Immediate value cannot fit in thumb2 modified immediate";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001392 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001393 }
1394 encoding = B31 | B30 | B29 | B28 |
1395 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001396 (set_cc == kCcSet ? B20 : 0) |
Dave Allison65fcc2c2014-04-28 13:45:27 -07001397 rn << 16 |
1398 rd << 8 |
1399 imm;
1400 }
1401 } else if (so.IsRegister()) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001402 // Register (possibly shifted)
1403 encoding = B31 | B30 | B29 | B27 | B25 |
1404 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001405 (set_cc == kCcSet ? B20 : 0) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001406 rn << 16 |
1407 rd << 8 |
1408 so.encodingThumb();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001409 }
1410 Emit32(encoding);
1411}
1412
1413
1414void Thumb2Assembler::Emit16BitDataProcessing(Condition cond,
1415 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001416 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001417 Register rn,
1418 Register rd,
1419 const ShifterOperand& so) {
1420 if (opcode == ADD || opcode == SUB) {
1421 Emit16BitAddSub(cond, opcode, set_cc, rn, rd, so);
1422 return;
1423 }
Andreas Gampec8ccf682014-09-29 20:07:43 -07001424 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001425 // Thumb1.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001426 uint8_t dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001427 uint8_t opcode_shift = 6;
1428 uint8_t rd_shift = 0;
1429 uint8_t rn_shift = 3;
1430 uint8_t immediate_shift = 0;
1431 bool use_immediate = false;
1432 uint8_t immediate = 0;
1433
1434 if (opcode == MOV && so.IsRegister() && so.IsShift()) {
1435 // Convert shifted mov operand2 into 16 bit opcodes.
1436 dp_opcode = 0;
1437 opcode_shift = 11;
1438
1439 use_immediate = true;
1440 immediate = so.GetImmediate();
1441 immediate_shift = 6;
1442
1443 rn = so.GetRegister();
1444
1445 switch (so.GetShift()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001446 case LSL:
1447 DCHECK_LE(immediate, 31u);
1448 thumb_opcode = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001449 break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001450 case LSR:
1451 DCHECK(1 <= immediate && immediate <= 32);
1452 immediate &= 31; // 32 is encoded as 0.
1453 thumb_opcode = 1U /* 0b01 */;
1454 break;
1455 case ASR:
1456 DCHECK(1 <= immediate && immediate <= 32);
1457 immediate &= 31; // 32 is encoded as 0.
1458 thumb_opcode = 2U /* 0b10 */;
1459 break;
1460 case ROR: // No 16-bit ROR immediate.
1461 case RRX: // No 16-bit RRX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001462 default:
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001463 LOG(FATAL) << "Unexpected shift: " << so.GetShift();
1464 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001465 }
1466 } else {
1467 if (so.IsImmediate()) {
1468 use_immediate = true;
1469 immediate = so.GetImmediate();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001470 } else {
Guillaume "Vermeille" Sanchezab4a2f52015-03-11 14:00:30 +00001471 CHECK(!(so.IsRegister() && so.IsShift() && so.GetSecondRegister() != kNoRegister))
1472 << "No register-shifted register instruction available in thumb";
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001473 // Adjust rn and rd: only two registers will be emitted.
1474 switch (opcode) {
1475 case AND:
1476 case ORR:
1477 case EOR:
1478 case RSB:
1479 case ADC:
1480 case SBC:
1481 case BIC: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001482 // Sets condition codes if and only if outside IT block,
1483 // check that it complies with set_cc.
1484 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001485 if (rn == rd) {
1486 rn = so.GetRegister();
1487 } else {
1488 CHECK_EQ(rd, so.GetRegister());
1489 }
1490 break;
1491 }
1492 case CMP:
1493 case CMN: {
1494 CHECK_EQ(rd, 0);
1495 rd = rn;
1496 rn = so.GetRegister();
1497 break;
1498 }
1499 case MVN: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001500 // Sets condition codes if and only if outside IT block,
1501 // check that it complies with set_cc.
1502 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1503 CHECK_EQ(rn, 0);
1504 rn = so.GetRegister();
1505 break;
1506 }
1507 case TST:
1508 case TEQ: {
1509 DCHECK(set_cc == kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001510 CHECK_EQ(rn, 0);
1511 rn = so.GetRegister();
1512 break;
1513 }
1514 default:
1515 break;
1516 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001517 }
1518
1519 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001520 case AND: thumb_opcode = 0U /* 0b0000 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001521 case ORR: thumb_opcode = 12U /* 0b1100 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001522 case EOR: thumb_opcode = 1U /* 0b0001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001523 case RSB: thumb_opcode = 9U /* 0b1001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001524 case ADC: thumb_opcode = 5U /* 0b0101 */; break;
1525 case SBC: thumb_opcode = 6U /* 0b0110 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001526 case BIC: thumb_opcode = 14U /* 0b1110 */; break;
1527 case TST: thumb_opcode = 8U /* 0b1000 */; CHECK(!use_immediate); break;
1528 case MVN: thumb_opcode = 15U /* 0b1111 */; CHECK(!use_immediate); break;
1529 case CMP: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001530 DCHECK(set_cc == kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001531 if (use_immediate) {
1532 // T2 encoding.
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001533 dp_opcode = 0;
1534 opcode_shift = 11;
1535 thumb_opcode = 5U /* 0b101 */;
1536 rd_shift = 8;
1537 rn_shift = 8;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001538 } else if (IsHighRegister(rd) || IsHighRegister(rn)) {
1539 // Special cmp for high registers.
1540 dp_opcode = 1U /* 0b01 */;
1541 opcode_shift = 7;
1542 // Put the top bit of rd into the bottom bit of the opcode.
1543 thumb_opcode = 10U /* 0b0001010 */ | static_cast<uint32_t>(rd) >> 3;
1544 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001545 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001546 thumb_opcode = 10U /* 0b1010 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001547 }
1548
1549 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001550 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001551 case CMN: {
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001552 CHECK(!use_immediate);
Andreas Gampec8ccf682014-09-29 20:07:43 -07001553 thumb_opcode = 11U /* 0b1011 */;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001554 break;
1555 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001556 case MOV:
1557 dp_opcode = 0;
1558 if (use_immediate) {
1559 // T2 encoding.
1560 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001561 thumb_opcode = 4U /* 0b100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001562 rd_shift = 8;
1563 rn_shift = 8;
1564 } else {
1565 rn = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001566 if (set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001567 // Special mov for high registers.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001568 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001569 opcode_shift = 7;
1570 // Put the top bit of rd into the bottom bit of the opcode.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001571 thumb_opcode = 12U /* 0b0001100 */ | static_cast<uint32_t>(rd) >> 3;
1572 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001573 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001574 DCHECK(!IsHighRegister(rn));
1575 DCHECK(!IsHighRegister(rd));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001576 thumb_opcode = 0;
1577 }
1578 }
1579 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001580
1581 case TEQ:
1582 case RSC:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001583 default:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001584 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001585 break;
1586 }
1587 }
1588
Andreas Gampec8ccf682014-09-29 20:07:43 -07001589 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001590 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001591 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001592 }
1593
1594 int16_t encoding = dp_opcode << 14 |
1595 (thumb_opcode << opcode_shift) |
1596 rd << rd_shift |
1597 rn << rn_shift |
1598 (use_immediate ? (immediate << immediate_shift) : 0);
1599
1600 Emit16(encoding);
1601}
1602
1603
1604// ADD and SUB are complex enough to warrant their own emitter.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001605void Thumb2Assembler::Emit16BitAddSub(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001606 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001607 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001608 Register rn,
1609 Register rd,
1610 const ShifterOperand& so) {
1611 uint8_t dp_opcode = 0;
1612 uint8_t opcode_shift = 6;
1613 uint8_t rd_shift = 0;
1614 uint8_t rn_shift = 3;
1615 uint8_t immediate_shift = 0;
1616 bool use_immediate = false;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001617 uint32_t immediate = 0; // Should be at most 10 bits but keep the full immediate for CHECKs.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001618 uint8_t thumb_opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001619
1620 if (so.IsImmediate()) {
1621 use_immediate = true;
1622 immediate = so.GetImmediate();
Vladimir Markof5c09c32015-12-17 12:08:08 +00001623 if (!IsUint<10>(immediate)) {
1624 // Flip ADD/SUB.
1625 opcode = (opcode == ADD) ? SUB : ADD;
1626 immediate = -immediate;
1627 DCHECK(IsUint<10>(immediate)); // More stringent checks below.
1628 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001629 }
1630
1631 switch (opcode) {
1632 case ADD:
1633 if (so.IsRegister()) {
1634 Register rm = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001635 if (rn == rd && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001636 // Can use T2 encoding (allows 4 bit registers)
Andreas Gampec8ccf682014-09-29 20:07:43 -07001637 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001638 opcode_shift = 10;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001639 thumb_opcode = 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001640 // Make Rn also contain the top bit of rd.
1641 rn = static_cast<Register>(static_cast<uint32_t>(rm) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07001642 (static_cast<uint32_t>(rd) & 8U /* 0b1000 */) << 1);
1643 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001644 } else {
1645 // T1.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001646 DCHECK(!IsHighRegister(rd));
1647 DCHECK(!IsHighRegister(rn));
1648 DCHECK(!IsHighRegister(rm));
1649 // Sets condition codes if and only if outside IT block,
1650 // check that it complies with set_cc.
1651 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001652 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001653 thumb_opcode = 12U /* 0b01100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001654 immediate = static_cast<uint32_t>(so.GetRegister());
1655 use_immediate = true;
1656 immediate_shift = 6;
1657 }
1658 } else {
1659 // Immediate.
1660 if (rd == SP && rn == SP) {
1661 // ADD sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001662 dp_opcode = 2U /* 0b10 */;
1663 thumb_opcode = 3U /* 0b11 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001664 opcode_shift = 12;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001665 CHECK(IsUint<9>(immediate));
Roland Levillain14d90572015-07-16 10:52:26 +01001666 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001667
1668 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1669 rn = R0;
1670 rd = R0;
1671 rd_shift = 0;
1672 rn_shift = 0;
1673 immediate >>= 2;
1674 } else if (rd != SP && rn == SP) {
1675 // ADD rd, SP, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001676 dp_opcode = 2U /* 0b10 */;
1677 thumb_opcode = 5U /* 0b101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001678 opcode_shift = 11;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001679 CHECK(IsUint<10>(immediate));
Roland Levillain14d90572015-07-16 10:52:26 +01001680 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001681
1682 // Remove rn from instruction.
1683 rn = R0;
1684 rn_shift = 0;
1685 rd_shift = 8;
1686 immediate >>= 2;
1687 } else if (rn != rd) {
1688 // Must use T1.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001689 CHECK(IsUint<3>(immediate));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001690 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001691 thumb_opcode = 14U /* 0b01110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001692 immediate_shift = 6;
1693 } else {
1694 // T2 encoding.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001695 CHECK(IsUint<8>(immediate));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001696 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001697 thumb_opcode = 6U /* 0b110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001698 rd_shift = 8;
1699 rn_shift = 8;
1700 }
1701 }
1702 break;
1703
1704 case SUB:
1705 if (so.IsRegister()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001706 // T1.
1707 Register rm = so.GetRegister();
1708 DCHECK(!IsHighRegister(rd));
1709 DCHECK(!IsHighRegister(rn));
1710 DCHECK(!IsHighRegister(rm));
1711 // Sets condition codes if and only if outside IT block,
1712 // check that it complies with set_cc.
1713 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1714 opcode_shift = 9;
1715 thumb_opcode = 13U /* 0b01101 */;
1716 immediate = static_cast<uint32_t>(rm);
1717 use_immediate = true;
1718 immediate_shift = 6;
1719 } else {
1720 if (rd == SP && rn == SP) {
1721 // SUB sp, sp, #imm
1722 dp_opcode = 2U /* 0b10 */;
1723 thumb_opcode = 0x61 /* 0b1100001 */;
1724 opcode_shift = 7;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001725 CHECK(IsUint<9>(immediate));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001726 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001727
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001728 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1729 rn = R0;
1730 rd = R0;
1731 rd_shift = 0;
1732 rn_shift = 0;
1733 immediate >>= 2;
1734 } else if (rn != rd) {
1735 // Must use T1.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001736 CHECK(IsUint<3>(immediate));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001737 opcode_shift = 9;
1738 thumb_opcode = 15U /* 0b01111 */;
1739 immediate_shift = 6;
1740 } else {
1741 // T2 encoding.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001742 CHECK(IsUint<8>(immediate));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001743 opcode_shift = 11;
1744 thumb_opcode = 7U /* 0b111 */;
1745 rd_shift = 8;
1746 rn_shift = 8;
1747 }
1748 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001749 break;
1750 default:
1751 LOG(FATAL) << "This opcode is not an ADD or SUB: " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001752 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001753 }
1754
1755 int16_t encoding = dp_opcode << 14 |
1756 (thumb_opcode << opcode_shift) |
1757 rd << rd_shift |
1758 rn << rn_shift |
1759 (use_immediate ? (immediate << immediate_shift) : 0);
1760
1761 Emit16(encoding);
1762}
1763
1764
1765void Thumb2Assembler::EmitDataProcessing(Condition cond,
1766 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001767 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001768 Register rn,
1769 Register rd,
1770 const ShifterOperand& so) {
1771 CHECK_NE(rd, kNoRegister);
1772 CheckCondition(cond);
1773
1774 if (Is32BitDataProcessing(cond, opcode, set_cc, rn, rd, so)) {
1775 Emit32BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1776 } else {
1777 Emit16BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1778 }
1779}
1780
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001781void Thumb2Assembler::EmitShift(Register rd,
1782 Register rm,
1783 Shift shift,
1784 uint8_t amount,
1785 Condition cond,
1786 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001787 CHECK_LT(amount, (1 << 5));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001788 if ((IsHighRegister(rd) || IsHighRegister(rm) || shift == ROR || shift == RRX) ||
1789 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001790 uint16_t opcode = 0;
1791 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001792 case LSL: opcode = 0U /* 0b00 */; break;
1793 case LSR: opcode = 1U /* 0b01 */; break;
1794 case ASR: opcode = 2U /* 0b10 */; break;
1795 case ROR: opcode = 3U /* 0b11 */; break;
1796 case RRX: opcode = 3U /* 0b11 */; amount = 0; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001797 default:
1798 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001799 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001800 }
1801 // 32 bit.
1802 int32_t encoding = B31 | B30 | B29 | B27 | B25 | B22 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001803 0xf << 16 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001804 uint32_t imm3 = amount >> 2;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001805 uint32_t imm2 = amount & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -07001806 encoding |= imm3 << 12 | imm2 << 6 | static_cast<int16_t>(rm) |
1807 static_cast<int16_t>(rd) << 8 | opcode << 4;
1808 Emit32(encoding);
1809 } else {
1810 // 16 bit shift
1811 uint16_t opcode = 0;
1812 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001813 case LSL: opcode = 0U /* 0b00 */; break;
1814 case LSR: opcode = 1U /* 0b01 */; break;
1815 case ASR: opcode = 2U /* 0b10 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001816 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001817 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1818 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001819 }
1820 int16_t encoding = opcode << 11 | amount << 6 | static_cast<int16_t>(rm) << 3 |
1821 static_cast<int16_t>(rd);
1822 Emit16(encoding);
1823 }
1824}
1825
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001826void Thumb2Assembler::EmitShift(Register rd,
1827 Register rn,
1828 Shift shift,
1829 Register rm,
1830 Condition cond,
1831 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001832 CHECK_NE(shift, RRX);
1833 bool must_be_32bit = false;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001834 if (IsHighRegister(rd) || IsHighRegister(rm) || IsHighRegister(rn) || rd != rn ||
1835 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001836 must_be_32bit = true;
1837 }
1838
1839 if (must_be_32bit) {
1840 uint16_t opcode = 0;
1841 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001842 case LSL: opcode = 0U /* 0b00 */; break;
1843 case LSR: opcode = 1U /* 0b01 */; break;
1844 case ASR: opcode = 2U /* 0b10 */; break;
1845 case ROR: opcode = 3U /* 0b11 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001846 default:
1847 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001848 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001849 }
1850 // 32 bit.
1851 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001852 0xf << 12 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001853 encoding |= static_cast<int16_t>(rn) << 16 | static_cast<int16_t>(rm) |
1854 static_cast<int16_t>(rd) << 8 | opcode << 21;
1855 Emit32(encoding);
1856 } else {
1857 uint16_t opcode = 0;
1858 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001859 case LSL: opcode = 2U /* 0b0010 */; break;
1860 case LSR: opcode = 3U /* 0b0011 */; break;
1861 case ASR: opcode = 4U /* 0b0100 */; break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001862 case ROR: opcode = 7U /* 0b0111 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001863 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001864 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1865 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001866 }
1867 int16_t encoding = B14 | opcode << 6 | static_cast<int16_t>(rm) << 3 |
1868 static_cast<int16_t>(rd);
1869 Emit16(encoding);
1870 }
1871}
1872
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001873inline size_t Thumb2Assembler::Fixup::SizeInBytes(Size size) {
1874 switch (size) {
1875 case kBranch16Bit:
1876 return 2u;
1877 case kBranch32Bit:
1878 return 4u;
Dave Allison45fdb932014-06-25 12:37:10 -07001879
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001880 case kCbxz16Bit:
1881 return 2u;
1882 case kCbxz32Bit:
1883 return 4u;
1884 case kCbxz48Bit:
1885 return 6u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001886
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001887 case kLiteral1KiB:
1888 return 2u;
1889 case kLiteral4KiB:
1890 return 4u;
1891 case kLiteral64KiB:
1892 return 8u;
1893 case kLiteral1MiB:
1894 return 10u;
1895 case kLiteralFar:
1896 return 14u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001897
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001898 case kLiteralAddr1KiB:
1899 return 2u;
1900 case kLiteralAddr4KiB:
1901 return 4u;
1902 case kLiteralAddr64KiB:
1903 return 6u;
1904 case kLiteralAddrFar:
1905 return 10u;
1906
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001907 case kLongOrFPLiteral1KiB:
1908 return 4u;
1909 case kLongOrFPLiteral256KiB:
1910 return 10u;
1911 case kLongOrFPLiteralFar:
1912 return 14u;
1913 }
1914 LOG(FATAL) << "Unexpected size: " << static_cast<int>(size);
1915 UNREACHABLE();
1916}
1917
1918inline uint32_t Thumb2Assembler::Fixup::GetOriginalSizeInBytes() const {
1919 return SizeInBytes(original_size_);
1920}
1921
1922inline uint32_t Thumb2Assembler::Fixup::GetSizeInBytes() const {
1923 return SizeInBytes(size_);
1924}
1925
1926inline size_t Thumb2Assembler::Fixup::LiteralPoolPaddingSize(uint32_t current_code_size) {
1927 // The code size must be a multiple of 2.
Roland Levillain14d90572015-07-16 10:52:26 +01001928 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001929 // If it isn't a multiple of 4, we need to add a 2-byte padding before the literal pool.
1930 return current_code_size & 2;
1931}
1932
1933inline int32_t Thumb2Assembler::Fixup::GetOffset(uint32_t current_code_size) const {
1934 static constexpr int32_t int32_min = std::numeric_limits<int32_t>::min();
1935 static constexpr int32_t int32_max = std::numeric_limits<int32_t>::max();
1936 DCHECK_LE(target_, static_cast<uint32_t>(int32_max));
1937 DCHECK_LE(location_, static_cast<uint32_t>(int32_max));
1938 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max));
1939 int32_t diff = static_cast<int32_t>(target_) - static_cast<int32_t>(location_);
1940 if (target_ > location_) {
1941 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max - diff));
1942 diff += static_cast<int32_t>(adjustment_);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001943 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001944 DCHECK_LE(int32_min + static_cast<int32_t>(adjustment_), diff);
1945 diff -= static_cast<int32_t>(adjustment_);
1946 }
1947 // The default PC adjustment for Thumb2 is 4 bytes.
1948 DCHECK_GE(diff, int32_min + 4);
1949 diff -= 4;
1950 // Add additional adjustment for instructions preceding the PC usage, padding
1951 // before the literal pool and rounding down the PC for literal loads.
1952 switch (GetSize()) {
1953 case kBranch16Bit:
1954 case kBranch32Bit:
1955 break;
1956
1957 case kCbxz16Bit:
1958 break;
1959 case kCbxz32Bit:
1960 case kCbxz48Bit:
1961 DCHECK_GE(diff, int32_min + 2);
1962 diff -= 2; // Extra CMP Rn, #0, 16-bit.
1963 break;
1964
1965 case kLiteral1KiB:
1966 case kLiteral4KiB:
1967 case kLongOrFPLiteral1KiB:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001968 case kLiteralAddr1KiB:
1969 case kLiteralAddr4KiB:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001970 DCHECK(diff >= 0 || (GetSize() == kLiteral1KiB && diff == -2));
1971 diff += LiteralPoolPaddingSize(current_code_size);
1972 // Load literal instructions round down the PC+4 to a multiple of 4, so if the PC
1973 // isn't a multiple of 2, we need to adjust. Since we already adjusted for the target
1974 // being aligned, current PC alignment can be inferred from diff.
Roland Levillain14d90572015-07-16 10:52:26 +01001975 DCHECK_ALIGNED(diff, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001976 diff = diff + (diff & 2);
1977 DCHECK_GE(diff, 0);
1978 break;
1979 case kLiteral1MiB:
1980 case kLiteral64KiB:
1981 case kLongOrFPLiteral256KiB:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001982 case kLiteralAddr64KiB:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001983 DCHECK_GE(diff, 4); // The target must be at least 4 bytes after the ADD rX, PC.
1984 diff -= 4; // One extra 32-bit MOV.
1985 diff += LiteralPoolPaddingSize(current_code_size);
1986 break;
1987 case kLiteralFar:
1988 case kLongOrFPLiteralFar:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001989 case kLiteralAddrFar:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001990 DCHECK_GE(diff, 8); // The target must be at least 4 bytes after the ADD rX, PC.
1991 diff -= 8; // Extra MOVW+MOVT; both 32-bit.
1992 diff += LiteralPoolPaddingSize(current_code_size);
1993 break;
1994 }
1995 return diff;
1996}
1997
1998inline size_t Thumb2Assembler::Fixup::IncreaseSize(Size new_size) {
1999 DCHECK_NE(target_, kUnresolved);
2000 Size old_size = size_;
2001 size_ = new_size;
2002 DCHECK_GT(SizeInBytes(new_size), SizeInBytes(old_size));
2003 size_t adjustment = SizeInBytes(new_size) - SizeInBytes(old_size);
2004 if (target_ > location_) {
2005 adjustment_ += adjustment;
2006 }
2007 return adjustment;
2008}
2009
2010uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
2011 uint32_t old_code_size = current_code_size;
2012 switch (GetSize()) {
2013 case kBranch16Bit:
2014 if (IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size))) {
2015 break;
Vladimir Markof38caa62015-05-29 15:50:18 +01002016 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002017 current_code_size += IncreaseSize(kBranch32Bit);
2018 FALLTHROUGH_INTENDED;
2019 case kBranch32Bit:
2020 // We don't support conditional branches beyond +-1MiB
2021 // or unconditional branches beyond +-16MiB.
2022 break;
2023
2024 case kCbxz16Bit:
2025 if (IsUint<7>(GetOffset(current_code_size))) {
2026 break;
2027 }
2028 current_code_size += IncreaseSize(kCbxz32Bit);
2029 FALLTHROUGH_INTENDED;
2030 case kCbxz32Bit:
2031 if (IsInt<9>(GetOffset(current_code_size))) {
2032 break;
2033 }
2034 current_code_size += IncreaseSize(kCbxz48Bit);
2035 FALLTHROUGH_INTENDED;
2036 case kCbxz48Bit:
2037 // We don't support conditional branches beyond +-1MiB.
2038 break;
2039
2040 case kLiteral1KiB:
2041 DCHECK(!IsHighRegister(rn_));
2042 if (IsUint<10>(GetOffset(current_code_size))) {
2043 break;
2044 }
2045 current_code_size += IncreaseSize(kLiteral4KiB);
2046 FALLTHROUGH_INTENDED;
2047 case kLiteral4KiB:
2048 if (IsUint<12>(GetOffset(current_code_size))) {
2049 break;
2050 }
2051 current_code_size += IncreaseSize(kLiteral64KiB);
2052 FALLTHROUGH_INTENDED;
2053 case kLiteral64KiB:
2054 // Can't handle high register which we can encounter by fall-through from kLiteral4KiB.
2055 if (!IsHighRegister(rn_) && IsUint<16>(GetOffset(current_code_size))) {
2056 break;
2057 }
2058 current_code_size += IncreaseSize(kLiteral1MiB);
2059 FALLTHROUGH_INTENDED;
2060 case kLiteral1MiB:
2061 if (IsUint<20>(GetOffset(current_code_size))) {
2062 break;
2063 }
2064 current_code_size += IncreaseSize(kLiteralFar);
2065 FALLTHROUGH_INTENDED;
2066 case kLiteralFar:
2067 // This encoding can reach any target.
2068 break;
2069
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002070 case kLiteralAddr1KiB:
2071 DCHECK(!IsHighRegister(rn_));
2072 if (IsUint<10>(GetOffset(current_code_size))) {
2073 break;
2074 }
2075 current_code_size += IncreaseSize(kLiteralAddr4KiB);
2076 FALLTHROUGH_INTENDED;
2077 case kLiteralAddr4KiB:
2078 if (IsUint<12>(GetOffset(current_code_size))) {
2079 break;
2080 }
2081 current_code_size += IncreaseSize(kLiteralAddr64KiB);
2082 FALLTHROUGH_INTENDED;
2083 case kLiteralAddr64KiB:
2084 if (IsUint<16>(GetOffset(current_code_size))) {
2085 break;
2086 }
2087 current_code_size += IncreaseSize(kLiteralAddrFar);
2088 FALLTHROUGH_INTENDED;
2089 case kLiteralAddrFar:
2090 // This encoding can reach any target.
2091 break;
2092
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002093 case kLongOrFPLiteral1KiB:
2094 if (IsUint<10>(GetOffset(current_code_size))) {
2095 break;
2096 }
2097 current_code_size += IncreaseSize(kLongOrFPLiteral256KiB);
2098 FALLTHROUGH_INTENDED;
2099 case kLongOrFPLiteral256KiB:
2100 if (IsUint<18>(GetOffset(current_code_size))) {
2101 break;
2102 }
2103 current_code_size += IncreaseSize(kLongOrFPLiteralFar);
2104 FALLTHROUGH_INTENDED;
2105 case kLongOrFPLiteralFar:
2106 // This encoding can reach any target.
2107 break;
2108 }
2109 return current_code_size - old_code_size;
2110}
2111
2112void Thumb2Assembler::Fixup::Emit(AssemblerBuffer* buffer, uint32_t code_size) const {
2113 switch (GetSize()) {
2114 case kBranch16Bit: {
2115 DCHECK(type_ == kUnconditional || type_ == kConditional);
2116 DCHECK_EQ(type_ == kConditional, cond_ != AL);
2117 int16_t encoding = BEncoding16(GetOffset(code_size), cond_);
Vladimir Markof38caa62015-05-29 15:50:18 +01002118 buffer->Store<int16_t>(location_, encoding);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002119 break;
2120 }
2121 case kBranch32Bit: {
2122 DCHECK(type_ == kConditional || type_ == kUnconditional ||
2123 type_ == kUnconditionalLink || type_ == kUnconditionalLinkX);
2124 DCHECK_EQ(type_ == kConditional, cond_ != AL);
2125 int32_t encoding = BEncoding32(GetOffset(code_size), cond_);
2126 if (type_ == kUnconditionalLink) {
2127 DCHECK_NE(encoding & B12, 0);
2128 encoding |= B14;
2129 } else if (type_ == kUnconditionalLinkX) {
2130 DCHECK_NE(encoding & B12, 0);
2131 encoding ^= B14 | B12;
2132 }
2133 buffer->Store<int16_t>(location_, encoding >> 16);
2134 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2135 break;
2136 }
2137
2138 case kCbxz16Bit: {
2139 DCHECK(type_ == kCompareAndBranchXZero);
2140 int16_t encoding = CbxzEncoding16(rn_, GetOffset(code_size), cond_);
2141 buffer->Store<int16_t>(location_, encoding);
2142 break;
2143 }
2144 case kCbxz32Bit: {
2145 DCHECK(type_ == kCompareAndBranchXZero);
2146 DCHECK(cond_ == EQ || cond_ == NE);
2147 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
2148 int16_t b_encoding = BEncoding16(GetOffset(code_size), cond_);
2149 buffer->Store<int16_t>(location_, cmp_encoding);
2150 buffer->Store<int16_t>(location_ + 2, b_encoding);
2151 break;
2152 }
2153 case kCbxz48Bit: {
2154 DCHECK(type_ == kCompareAndBranchXZero);
2155 DCHECK(cond_ == EQ || cond_ == NE);
2156 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
2157 int32_t b_encoding = BEncoding32(GetOffset(code_size), cond_);
2158 buffer->Store<int16_t>(location_, cmp_encoding);
2159 buffer->Store<int16_t>(location_ + 2u, b_encoding >> 16);
2160 buffer->Store<int16_t>(location_ + 4u, static_cast<int16_t>(b_encoding & 0xffff));
2161 break;
2162 }
2163
2164 case kLiteral1KiB: {
2165 DCHECK(type_ == kLoadLiteralNarrow);
2166 int16_t encoding = LdrLitEncoding16(rn_, GetOffset(code_size));
2167 buffer->Store<int16_t>(location_, encoding);
2168 break;
2169 }
2170 case kLiteral4KiB: {
2171 DCHECK(type_ == kLoadLiteralNarrow);
2172 // GetOffset() uses PC+4 but load literal uses AlignDown(PC+4, 4). Adjust offset accordingly.
2173 int32_t encoding = LdrLitEncoding32(rn_, GetOffset(code_size));
2174 buffer->Store<int16_t>(location_, encoding >> 16);
2175 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2176 break;
2177 }
2178 case kLiteral64KiB: {
2179 DCHECK(type_ == kLoadLiteralNarrow);
2180 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2181 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2182 int16_t ldr_encoding = LdrRtRnImm5Encoding16(rn_, rn_, 0);
2183 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2184 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2185 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2186 buffer->Store<int16_t>(location_ + 6u, ldr_encoding);
2187 break;
2188 }
2189 case kLiteral1MiB: {
2190 DCHECK(type_ == kLoadLiteralNarrow);
2191 int32_t offset = GetOffset(code_size);
2192 int32_t mov_encoding = MovModImmEncoding32(rn_, offset & ~0xfff);
2193 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2194 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, offset & 0xfff);
2195 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2196 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2197 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2198 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2199 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2200 break;
2201 }
2202 case kLiteralFar: {
2203 DCHECK(type_ == kLoadLiteralNarrow);
2204 int32_t offset = GetOffset(code_size);
2205 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2206 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2207 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2208 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, 0);
2209 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2210 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2211 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2212 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2213 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2214 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2215 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2216 break;
2217 }
2218
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002219 case kLiteralAddr1KiB: {
2220 DCHECK(type_ == kLoadLiteralAddr);
2221 int16_t encoding = AdrEncoding16(rn_, GetOffset(code_size));
2222 buffer->Store<int16_t>(location_, encoding);
2223 break;
2224 }
2225 case kLiteralAddr4KiB: {
2226 DCHECK(type_ == kLoadLiteralAddr);
2227 int32_t encoding = AdrEncoding32(rn_, GetOffset(code_size));
2228 buffer->Store<int16_t>(location_, encoding >> 16);
2229 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2230 break;
2231 }
2232 case kLiteralAddr64KiB: {
2233 DCHECK(type_ == kLoadLiteralAddr);
2234 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2235 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2236 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2237 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2238 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2239 break;
2240 }
2241 case kLiteralAddrFar: {
2242 DCHECK(type_ == kLoadLiteralAddr);
2243 int32_t offset = GetOffset(code_size);
2244 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2245 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2246 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2247 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2248 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2249 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2250 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2251 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2252 break;
2253 }
2254
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002255 case kLongOrFPLiteral1KiB: {
2256 int32_t encoding = LoadWideOrFpEncoding(PC, GetOffset(code_size)); // DCHECKs type_.
2257 buffer->Store<int16_t>(location_, encoding >> 16);
2258 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2259 break;
2260 }
2261 case kLongOrFPLiteral256KiB: {
2262 int32_t offset = GetOffset(code_size);
2263 int32_t mov_encoding = MovModImmEncoding32(IP, offset & ~0x3ff);
2264 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2265 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, offset & 0x3ff); // DCHECKs type_.
2266 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2267 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2268 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2269 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2270 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2271 break;
2272 }
2273 case kLongOrFPLiteralFar: {
2274 int32_t offset = GetOffset(code_size);
2275 int32_t movw_encoding = MovwEncoding32(IP, offset & 0xffff);
2276 int32_t movt_encoding = MovtEncoding32(IP, offset & ~0xffff);
2277 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2278 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0); // DCHECKs type_.
2279 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2280 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2281 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2282 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2283 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2284 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2285 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2286 break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002287 }
2288 }
2289}
2290
Dave Allison65fcc2c2014-04-28 13:45:27 -07002291uint16_t Thumb2Assembler::EmitCompareAndBranch(Register rn, uint16_t prev, bool n) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002292 CHECK(IsLowRegister(rn));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002293 uint32_t location = buffer_.Size();
2294
2295 // This is always unresolved as it must be a forward branch.
2296 Emit16(prev); // Previous link.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002297 return AddFixup(Fixup::CompareAndBranch(location, rn, n ? NE : EQ));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002298}
2299
2300
2301// NOTE: this only support immediate offsets, not [rx,ry].
2302// TODO: support [rx,ry] instructions.
2303void Thumb2Assembler::EmitLoadStore(Condition cond,
2304 bool load,
2305 bool byte,
2306 bool half,
2307 bool is_signed,
2308 Register rd,
2309 const Address& ad) {
2310 CHECK_NE(rd, kNoRegister);
2311 CheckCondition(cond);
2312 bool must_be_32bit = force_32bit_;
2313 if (IsHighRegister(rd)) {
2314 must_be_32bit = true;
2315 }
2316
2317 Register rn = ad.GetRegister();
Dave Allison45fdb932014-06-25 12:37:10 -07002318 if (IsHighRegister(rn) && rn != SP && rn != PC) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002319 must_be_32bit = true;
2320 }
2321
2322 if (is_signed || ad.GetOffset() < 0 || ad.GetMode() != Address::Offset) {
2323 must_be_32bit = true;
2324 }
2325
Dave Allison45fdb932014-06-25 12:37:10 -07002326 if (ad.IsImmediate()) {
2327 // Immediate offset
2328 int32_t offset = ad.GetOffset();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002329
Dave Allison45fdb932014-06-25 12:37:10 -07002330 // The 16 bit SP relative instruction can only have a 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002331 if (rn == SP && offset >= (1 << 10)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002332 must_be_32bit = true;
2333 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002334
2335 if (byte) {
Dave Allison45fdb932014-06-25 12:37:10 -07002336 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002337 if (offset >= (1 << 5)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002338 must_be_32bit = true;
2339 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002340 } else if (half) {
Dave Allison45fdb932014-06-25 12:37:10 -07002341 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002342 if (offset >= (1 << 6)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002343 must_be_32bit = true;
2344 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002345 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002346 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002347 if (offset >= (1 << 7)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002348 must_be_32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002349 }
2350 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002351
Dave Allison45fdb932014-06-25 12:37:10 -07002352 if (must_be_32bit) {
2353 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2354 (load ? B20 : 0) |
2355 (is_signed ? B24 : 0) |
2356 static_cast<uint32_t>(rd) << 12 |
2357 ad.encodingThumb(true) |
2358 (byte ? 0 : half ? B21 : B22);
2359 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002360 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002361 // 16 bit thumb1.
2362 uint8_t opA = 0;
2363 bool sp_relative = false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002364
2365 if (byte) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002366 opA = 7U /* 0b0111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002367 } else if (half) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002368 opA = 8U /* 0b1000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002369 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002370 if (rn == SP) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002371 opA = 9U /* 0b1001 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002372 sp_relative = true;
2373 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002374 opA = 6U /* 0b0110 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002375 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002376 }
Dave Allison45fdb932014-06-25 12:37:10 -07002377 int16_t encoding = opA << 12 |
2378 (load ? B11 : 0);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002379
Dave Allison45fdb932014-06-25 12:37:10 -07002380 CHECK_GE(offset, 0);
2381 if (sp_relative) {
2382 // SP relative, 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002383 CHECK_LT(offset, (1 << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01002384 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002385 encoding |= rd << 8 | offset >> 2;
2386 } else {
2387 // No SP relative. The offset is shifted right depending on
2388 // the size of the load/store.
2389 encoding |= static_cast<uint32_t>(rd);
2390
2391 if (byte) {
2392 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002393 CHECK_LT(offset, (1 << 5));
Dave Allison45fdb932014-06-25 12:37:10 -07002394 } else if (half) {
2395 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002396 CHECK_LT(offset, (1 << 6));
Roland Levillain14d90572015-07-16 10:52:26 +01002397 CHECK_ALIGNED(offset, 2);
Dave Allison45fdb932014-06-25 12:37:10 -07002398 offset >>= 1;
2399 } else {
2400 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002401 CHECK_LT(offset, (1 << 7));
Roland Levillain14d90572015-07-16 10:52:26 +01002402 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002403 offset >>= 2;
2404 }
2405 encoding |= rn << 3 | offset << 6;
2406 }
2407
2408 Emit16(encoding);
2409 }
2410 } else {
2411 // Register shift.
2412 if (ad.GetRegister() == PC) {
2413 // PC relative literal encoding.
2414 int32_t offset = ad.GetOffset();
Dave Allison0bb9ade2014-06-26 17:57:36 -07002415 if (must_be_32bit || offset < 0 || offset >= (1 << 10) || !load) {
Dave Allison45fdb932014-06-25 12:37:10 -07002416 int32_t up = B23;
2417 if (offset < 0) {
2418 offset = -offset;
2419 up = 0;
2420 }
2421 CHECK_LT(offset, (1 << 12));
2422 int32_t encoding = 0x1f << 27 | 0xf << 16 | B22 | (load ? B20 : 0) |
2423 offset | up |
2424 static_cast<uint32_t>(rd) << 12;
2425 Emit32(encoding);
2426 } else {
2427 // 16 bit literal load.
2428 CHECK_GE(offset, 0);
2429 CHECK_LT(offset, (1 << 10));
2430 int32_t encoding = B14 | (load ? B11 : 0) | static_cast<uint32_t>(rd) << 8 | offset >> 2;
2431 Emit16(encoding);
2432 }
2433 } else {
2434 if (ad.GetShiftCount() != 0) {
2435 // If there is a shift count this must be 32 bit.
2436 must_be_32bit = true;
2437 } else if (IsHighRegister(ad.GetRegisterOffset())) {
2438 must_be_32bit = true;
2439 }
2440
2441 if (must_be_32bit) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002442 int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 |
Dave Allison45fdb932014-06-25 12:37:10 -07002443 ad.encodingThumb(true);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002444 if (half) {
2445 encoding |= B21;
2446 } else if (!byte) {
2447 encoding |= B22;
2448 }
Dave Allison45fdb932014-06-25 12:37:10 -07002449 Emit32(encoding);
2450 } else {
2451 // 16 bit register offset.
2452 int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) |
2453 ad.encodingThumb(false);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002454 if (byte) {
2455 encoding |= B10;
2456 } else if (half) {
2457 encoding |= B9;
2458 }
Dave Allison45fdb932014-06-25 12:37:10 -07002459 Emit16(encoding);
2460 }
2461 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002462 }
2463}
2464
2465
2466void Thumb2Assembler::EmitMultiMemOp(Condition cond,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002467 BlockAddressMode bam,
Dave Allison65fcc2c2014-04-28 13:45:27 -07002468 bool load,
2469 Register base,
2470 RegList regs) {
2471 CHECK_NE(base, kNoRegister);
2472 CheckCondition(cond);
2473 bool must_be_32bit = force_32bit_;
2474
Vladimir Markoe8469c12014-11-26 18:09:30 +00002475 if (!must_be_32bit && base == SP && bam == (load ? IA_W : DB_W) &&
2476 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) {
2477 // Use 16-bit PUSH/POP.
2478 int16_t encoding = B15 | B13 | B12 | (load ? B11 : 0) | B10 |
2479 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff);
2480 Emit16(encoding);
2481 return;
2482 }
2483
Dave Allison65fcc2c2014-04-28 13:45:27 -07002484 if ((regs & 0xff00) != 0) {
2485 must_be_32bit = true;
2486 }
2487
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002488 bool w_bit = bam == IA_W || bam == DB_W || bam == DA_W || bam == IB_W;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002489 // 16 bit always uses writeback.
2490 if (!w_bit) {
2491 must_be_32bit = true;
2492 }
2493
2494 if (must_be_32bit) {
2495 uint32_t op = 0;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002496 switch (bam) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002497 case IA:
2498 case IA_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002499 op = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002500 break;
2501 case DB:
2502 case DB_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002503 op = 2U /* 0b10 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002504 break;
2505 case DA:
2506 case IB:
2507 case DA_W:
2508 case IB_W:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002509 LOG(FATAL) << "LDM/STM mode not supported on thumb: " << bam;
Vladimir Markoe8469c12014-11-26 18:09:30 +00002510 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002511 }
2512 if (load) {
2513 // Cannot have SP in the list.
2514 CHECK_EQ((regs & (1 << SP)), 0);
2515 } else {
2516 // Cannot have PC or SP in the list.
2517 CHECK_EQ((regs & (1 << PC | 1 << SP)), 0);
2518 }
2519 int32_t encoding = B31 | B30 | B29 | B27 |
2520 (op << 23) |
2521 (load ? B20 : 0) |
2522 base << 16 |
2523 regs |
2524 (w_bit << 21);
2525 Emit32(encoding);
2526 } else {
2527 int16_t encoding = B15 | B14 |
2528 (load ? B11 : 0) |
2529 base << 8 |
2530 regs;
2531 Emit16(encoding);
2532 }
2533}
2534
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002535void Thumb2Assembler::EmitBranch(Condition cond, Label* label, bool link, bool x) {
2536 bool use32bit = IsForced32Bit() || !CanRelocateBranches();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002537 uint32_t pc = buffer_.Size();
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002538 Fixup::Type branch_type;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002539 if (cond == AL) {
2540 if (link) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002541 use32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002542 if (x) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002543 branch_type = Fixup::kUnconditionalLinkX; // BLX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002544 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002545 branch_type = Fixup::kUnconditionalLink; // BX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002546 }
2547 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002548 branch_type = Fixup::kUnconditional; // B.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002549 }
2550 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002551 branch_type = Fixup::kConditional; // B<cond>.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002552 }
2553
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002554 Fixup::Size size = use32bit ? Fixup::kBranch32Bit : Fixup::kBranch16Bit;
2555 FixupId branch_id = AddFixup(Fixup::Branch(pc, branch_type, size, cond));
2556
Dave Allison65fcc2c2014-04-28 13:45:27 -07002557 if (label->IsBound()) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002558 // The branch is to a bound label which means that it's a backwards branch.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002559 GetFixup(branch_id)->Resolve(label->Position());
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002560 Emit16(0);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002561 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002562 // Branch target is an unbound label. Add it to a singly-linked list maintained within
2563 // the code with the label serving as the head.
2564 Emit16(static_cast<uint16_t>(label->position_));
2565 label->LinkTo(branch_id);
Vladimir Markof38caa62015-05-29 15:50:18 +01002566 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002567
2568 if (use32bit) {
2569 Emit16(0);
2570 }
2571 DCHECK_EQ(buffer_.Size() - pc, GetFixup(branch_id)->GetSizeInBytes());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002572}
2573
2574
Artem Serovc257da72016-02-02 13:49:43 +00002575void Thumb2Assembler::Emit32Miscellaneous(uint8_t op1,
2576 uint8_t op2,
2577 uint32_t rest_encoding) {
2578 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B23 |
2579 op1 << 20 |
2580 0xf << 12 |
2581 B7 |
2582 op2 << 4 |
2583 rest_encoding;
2584 Emit32(encoding);
2585}
2586
2587
2588void Thumb2Assembler::Emit16Miscellaneous(uint32_t rest_encoding) {
2589 int16_t encoding = B15 | B13 | B12 |
2590 rest_encoding;
2591 Emit16(encoding);
2592}
2593
Dave Allison65fcc2c2014-04-28 13:45:27 -07002594void Thumb2Assembler::clz(Register rd, Register rm, Condition cond) {
2595 CHECK_NE(rd, kNoRegister);
2596 CHECK_NE(rm, kNoRegister);
2597 CheckCondition(cond);
2598 CHECK_NE(rd, PC);
2599 CHECK_NE(rm, PC);
Artem Serovc257da72016-02-02 13:49:43 +00002600 int32_t encoding =
Dave Allison65fcc2c2014-04-28 13:45:27 -07002601 static_cast<uint32_t>(rm) << 16 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002602 static_cast<uint32_t>(rd) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002603 static_cast<uint32_t>(rm);
Artem Serovc257da72016-02-02 13:49:43 +00002604 Emit32Miscellaneous(0b11, 0b00, encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002605}
2606
2607
2608void Thumb2Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
2609 CheckCondition(cond);
Vladimir Markob4536b72015-11-24 13:45:23 +00002610 // Always 32 bits, encoding T3. (Other encondings are called MOV, not MOVW.)
2611 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2612 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2613 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
2614 uint32_t imm8 = imm16 & 0xff;
2615 int32_t encoding = B31 | B30 | B29 | B28 |
2616 B25 | B22 |
2617 static_cast<uint32_t>(rd) << 8 |
2618 i << 26 |
2619 imm4 << 16 |
2620 imm3 << 12 |
2621 imm8;
2622 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002623}
2624
2625
2626void Thumb2Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
2627 CheckCondition(cond);
2628 // Always 32 bits.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002629 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2630 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2631 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002632 uint32_t imm8 = imm16 & 0xff;
2633 int32_t encoding = B31 | B30 | B29 | B28 |
2634 B25 | B23 | B22 |
2635 static_cast<uint32_t>(rd) << 8 |
2636 i << 26 |
2637 imm4 << 16 |
2638 imm3 << 12 |
2639 imm8;
2640 Emit32(encoding);
2641}
2642
2643
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002644void Thumb2Assembler::rbit(Register rd, Register rm, Condition cond) {
2645 CHECK_NE(rd, kNoRegister);
2646 CHECK_NE(rm, kNoRegister);
2647 CheckCondition(cond);
2648 CHECK_NE(rd, PC);
2649 CHECK_NE(rm, PC);
2650 CHECK_NE(rd, SP);
2651 CHECK_NE(rm, SP);
Artem Serovc257da72016-02-02 13:49:43 +00002652 int32_t encoding =
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002653 static_cast<uint32_t>(rm) << 16 |
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002654 static_cast<uint32_t>(rd) << 8 |
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002655 static_cast<uint32_t>(rm);
Artem Serovc257da72016-02-02 13:49:43 +00002656
2657 Emit32Miscellaneous(0b01, 0b10, encoding);
2658}
2659
2660
2661void Thumb2Assembler::EmitReverseBytes(Register rd, Register rm,
2662 uint32_t op) {
2663 CHECK_NE(rd, kNoRegister);
2664 CHECK_NE(rm, kNoRegister);
2665 CHECK_NE(rd, PC);
2666 CHECK_NE(rm, PC);
2667 CHECK_NE(rd, SP);
2668 CHECK_NE(rm, SP);
2669
2670 if (!IsHighRegister(rd) && !IsHighRegister(rm) && !force_32bit_) {
2671 uint16_t t1_op = B11 | B9 | (op << 6);
2672 int16_t encoding = t1_op |
2673 static_cast<uint16_t>(rm) << 3 |
2674 static_cast<uint16_t>(rd);
2675 Emit16Miscellaneous(encoding);
2676 } else {
2677 int32_t encoding =
2678 static_cast<uint32_t>(rm) << 16 |
2679 static_cast<uint32_t>(rd) << 8 |
2680 static_cast<uint32_t>(rm);
2681 Emit32Miscellaneous(0b01, op, encoding);
2682 }
2683}
2684
2685
2686void Thumb2Assembler::rev(Register rd, Register rm, Condition cond) {
2687 CheckCondition(cond);
2688 EmitReverseBytes(rd, rm, 0b00);
2689}
2690
2691
2692void Thumb2Assembler::rev16(Register rd, Register rm, Condition cond) {
2693 CheckCondition(cond);
2694 EmitReverseBytes(rd, rm, 0b01);
2695}
2696
2697
2698void Thumb2Assembler::revsh(Register rd, Register rm, Condition cond) {
2699 CheckCondition(cond);
2700 EmitReverseBytes(rd, rm, 0b11);
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002701}
2702
2703
Dave Allison65fcc2c2014-04-28 13:45:27 -07002704void Thumb2Assembler::ldrex(Register rt, Register rn, uint16_t imm, Condition cond) {
2705 CHECK_NE(rn, kNoRegister);
2706 CHECK_NE(rt, kNoRegister);
2707 CheckCondition(cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002708 CHECK_LT(imm, (1u << 10));
2709
2710 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
2711 static_cast<uint32_t>(rn) << 16 |
2712 static_cast<uint32_t>(rt) << 12 |
2713 0xf << 8 |
2714 imm >> 2;
2715 Emit32(encoding);
2716}
2717
2718
2719void Thumb2Assembler::ldrex(Register rt, Register rn, Condition cond) {
2720 ldrex(rt, rn, 0, cond);
2721}
2722
2723
2724void Thumb2Assembler::strex(Register rd,
2725 Register rt,
2726 Register rn,
2727 uint16_t imm,
2728 Condition cond) {
2729 CHECK_NE(rn, kNoRegister);
2730 CHECK_NE(rd, kNoRegister);
2731 CHECK_NE(rt, kNoRegister);
2732 CheckCondition(cond);
2733 CHECK_LT(imm, (1u << 10));
2734
2735 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
2736 static_cast<uint32_t>(rn) << 16 |
2737 static_cast<uint32_t>(rt) << 12 |
2738 static_cast<uint32_t>(rd) << 8 |
2739 imm >> 2;
2740 Emit32(encoding);
2741}
2742
2743
Calin Juravle52c48962014-12-16 17:02:57 +00002744void Thumb2Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
2745 CHECK_NE(rn, kNoRegister);
2746 CHECK_NE(rt, kNoRegister);
2747 CHECK_NE(rt2, kNoRegister);
2748 CHECK_NE(rt, rt2);
2749 CheckCondition(cond);
2750
2751 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 | B20 |
2752 static_cast<uint32_t>(rn) << 16 |
2753 static_cast<uint32_t>(rt) << 12 |
2754 static_cast<uint32_t>(rt2) << 8 |
2755 B6 | B5 | B4 | B3 | B2 | B1 | B0;
2756 Emit32(encoding);
2757}
2758
2759
Dave Allison65fcc2c2014-04-28 13:45:27 -07002760void Thumb2Assembler::strex(Register rd,
2761 Register rt,
2762 Register rn,
2763 Condition cond) {
2764 strex(rd, rt, rn, 0, cond);
2765}
2766
2767
Calin Juravle52c48962014-12-16 17:02:57 +00002768void Thumb2Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
2769 CHECK_NE(rd, kNoRegister);
2770 CHECK_NE(rn, kNoRegister);
2771 CHECK_NE(rt, kNoRegister);
2772 CHECK_NE(rt2, kNoRegister);
2773 CHECK_NE(rt, rt2);
2774 CHECK_NE(rd, rt);
2775 CHECK_NE(rd, rt2);
2776 CheckCondition(cond);
2777
2778 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 |
2779 static_cast<uint32_t>(rn) << 16 |
2780 static_cast<uint32_t>(rt) << 12 |
2781 static_cast<uint32_t>(rt2) << 8 |
2782 B6 | B5 | B4 |
2783 static_cast<uint32_t>(rd);
2784 Emit32(encoding);
2785}
2786
2787
Dave Allison65fcc2c2014-04-28 13:45:27 -07002788void Thumb2Assembler::clrex(Condition cond) {
2789 CheckCondition(cond);
2790 int32_t encoding = B31 | B30 | B29 | B27 | B28 | B25 | B24 | B23 |
2791 B21 | B20 |
2792 0xf << 16 |
2793 B15 |
2794 0xf << 8 |
2795 B5 |
2796 0xf;
2797 Emit32(encoding);
2798}
2799
2800
2801void Thumb2Assembler::nop(Condition cond) {
2802 CheckCondition(cond);
Andreas Gampec8ccf682014-09-29 20:07:43 -07002803 uint16_t encoding = B15 | B13 | B12 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002804 B11 | B10 | B9 | B8;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002805 Emit16(static_cast<int16_t>(encoding));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002806}
2807
2808
2809void Thumb2Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
2810 CHECK_NE(sn, kNoSRegister);
2811 CHECK_NE(rt, kNoRegister);
2812 CHECK_NE(rt, SP);
2813 CHECK_NE(rt, PC);
2814 CheckCondition(cond);
2815 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2816 B27 | B26 | B25 |
2817 ((static_cast<int32_t>(sn) >> 1)*B16) |
2818 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2819 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2820 Emit32(encoding);
2821}
2822
2823
2824void Thumb2Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
2825 CHECK_NE(sn, kNoSRegister);
2826 CHECK_NE(rt, kNoRegister);
2827 CHECK_NE(rt, SP);
2828 CHECK_NE(rt, PC);
2829 CheckCondition(cond);
2830 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2831 B27 | B26 | B25 | B20 |
2832 ((static_cast<int32_t>(sn) >> 1)*B16) |
2833 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2834 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2835 Emit32(encoding);
2836}
2837
2838
2839void Thumb2Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
2840 Condition cond) {
2841 CHECK_NE(sm, kNoSRegister);
2842 CHECK_NE(sm, S31);
2843 CHECK_NE(rt, kNoRegister);
2844 CHECK_NE(rt, SP);
2845 CHECK_NE(rt, PC);
2846 CHECK_NE(rt2, kNoRegister);
2847 CHECK_NE(rt2, SP);
2848 CHECK_NE(rt2, PC);
2849 CheckCondition(cond);
2850 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2851 B27 | B26 | B22 |
2852 (static_cast<int32_t>(rt2)*B16) |
2853 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2854 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2855 (static_cast<int32_t>(sm) >> 1);
2856 Emit32(encoding);
2857}
2858
2859
2860void Thumb2Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
2861 Condition cond) {
2862 CHECK_NE(sm, kNoSRegister);
2863 CHECK_NE(sm, S31);
2864 CHECK_NE(rt, kNoRegister);
2865 CHECK_NE(rt, SP);
2866 CHECK_NE(rt, PC);
2867 CHECK_NE(rt2, kNoRegister);
2868 CHECK_NE(rt2, SP);
2869 CHECK_NE(rt2, PC);
2870 CHECK_NE(rt, rt2);
2871 CheckCondition(cond);
2872 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2873 B27 | B26 | B22 | B20 |
2874 (static_cast<int32_t>(rt2)*B16) |
2875 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2876 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2877 (static_cast<int32_t>(sm) >> 1);
2878 Emit32(encoding);
2879}
2880
2881
2882void Thumb2Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
2883 Condition cond) {
2884 CHECK_NE(dm, kNoDRegister);
2885 CHECK_NE(rt, kNoRegister);
2886 CHECK_NE(rt, SP);
2887 CHECK_NE(rt, PC);
2888 CHECK_NE(rt2, kNoRegister);
2889 CHECK_NE(rt2, SP);
2890 CHECK_NE(rt2, PC);
2891 CheckCondition(cond);
2892 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2893 B27 | B26 | B22 |
2894 (static_cast<int32_t>(rt2)*B16) |
2895 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2896 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2897 (static_cast<int32_t>(dm) & 0xf);
2898 Emit32(encoding);
2899}
2900
2901
2902void Thumb2Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
2903 Condition cond) {
2904 CHECK_NE(dm, kNoDRegister);
2905 CHECK_NE(rt, kNoRegister);
2906 CHECK_NE(rt, SP);
2907 CHECK_NE(rt, PC);
2908 CHECK_NE(rt2, kNoRegister);
2909 CHECK_NE(rt2, SP);
2910 CHECK_NE(rt2, PC);
2911 CHECK_NE(rt, rt2);
2912 CheckCondition(cond);
2913 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2914 B27 | B26 | B22 | B20 |
2915 (static_cast<int32_t>(rt2)*B16) |
2916 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2917 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2918 (static_cast<int32_t>(dm) & 0xf);
2919 Emit32(encoding);
2920}
2921
2922
2923void Thumb2Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
2924 const Address& addr = static_cast<const Address&>(ad);
2925 CHECK_NE(sd, kNoSRegister);
2926 CheckCondition(cond);
2927 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2928 B27 | B26 | B24 | B20 |
2929 ((static_cast<int32_t>(sd) & 1)*B22) |
2930 ((static_cast<int32_t>(sd) >> 1)*B12) |
2931 B11 | B9 | addr.vencoding();
2932 Emit32(encoding);
2933}
2934
2935
2936void Thumb2Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
2937 const Address& addr = static_cast<const Address&>(ad);
2938 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2939 CHECK_NE(sd, kNoSRegister);
2940 CheckCondition(cond);
2941 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2942 B27 | B26 | B24 |
2943 ((static_cast<int32_t>(sd) & 1)*B22) |
2944 ((static_cast<int32_t>(sd) >> 1)*B12) |
2945 B11 | B9 | addr.vencoding();
2946 Emit32(encoding);
2947}
2948
2949
2950void Thumb2Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
2951 const Address& addr = static_cast<const Address&>(ad);
2952 CHECK_NE(dd, kNoDRegister);
2953 CheckCondition(cond);
2954 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2955 B27 | B26 | B24 | B20 |
2956 ((static_cast<int32_t>(dd) >> 4)*B22) |
2957 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2958 B11 | B9 | B8 | addr.vencoding();
2959 Emit32(encoding);
2960}
2961
2962
2963void Thumb2Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
2964 const Address& addr = static_cast<const Address&>(ad);
2965 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2966 CHECK_NE(dd, kNoDRegister);
2967 CheckCondition(cond);
2968 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2969 B27 | B26 | B24 |
2970 ((static_cast<int32_t>(dd) >> 4)*B22) |
2971 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2972 B11 | B9 | B8 | addr.vencoding();
2973 Emit32(encoding);
2974}
2975
2976
2977void Thumb2Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
2978 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
2979}
2980
2981
2982void Thumb2Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
2983 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
2984}
2985
2986
2987void Thumb2Assembler::vpops(SRegister reg, int nregs, Condition cond) {
2988 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
2989}
2990
2991
2992void Thumb2Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
2993 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
2994}
2995
2996
2997void Thumb2Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
2998 CheckCondition(cond);
2999
3000 uint32_t D;
3001 uint32_t Vd;
3002 if (dbl) {
3003 // Encoded as D:Vd.
3004 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07003005 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003006 } else {
3007 // Encoded as Vd:D.
3008 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07003009 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003010 }
3011 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
3012 B11 | B9 |
3013 (dbl ? B8 : 0) |
3014 (push ? B24 : (B23 | B20)) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07003015 14U /* 0b1110 */ << 28 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07003016 nregs << (dbl ? 1 : 0) |
3017 D << 22 |
3018 Vd << 12;
3019 Emit32(encoding);
3020}
3021
3022
3023void Thumb2Assembler::EmitVFPsss(Condition cond, int32_t opcode,
3024 SRegister sd, SRegister sn, SRegister sm) {
3025 CHECK_NE(sd, kNoSRegister);
3026 CHECK_NE(sn, kNoSRegister);
3027 CHECK_NE(sm, kNoSRegister);
3028 CheckCondition(cond);
3029 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3030 B27 | B26 | B25 | B11 | B9 | opcode |
3031 ((static_cast<int32_t>(sd) & 1)*B22) |
3032 ((static_cast<int32_t>(sn) >> 1)*B16) |
3033 ((static_cast<int32_t>(sd) >> 1)*B12) |
3034 ((static_cast<int32_t>(sn) & 1)*B7) |
3035 ((static_cast<int32_t>(sm) & 1)*B5) |
3036 (static_cast<int32_t>(sm) >> 1);
3037 Emit32(encoding);
3038}
3039
3040
3041void Thumb2Assembler::EmitVFPddd(Condition cond, int32_t opcode,
3042 DRegister dd, DRegister dn, DRegister dm) {
3043 CHECK_NE(dd, kNoDRegister);
3044 CHECK_NE(dn, kNoDRegister);
3045 CHECK_NE(dm, kNoDRegister);
3046 CheckCondition(cond);
3047 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3048 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
3049 ((static_cast<int32_t>(dd) >> 4)*B22) |
3050 ((static_cast<int32_t>(dn) & 0xf)*B16) |
3051 ((static_cast<int32_t>(dd) & 0xf)*B12) |
3052 ((static_cast<int32_t>(dn) >> 4)*B7) |
3053 ((static_cast<int32_t>(dm) >> 4)*B5) |
3054 (static_cast<int32_t>(dm) & 0xf);
3055 Emit32(encoding);
3056}
3057
3058
3059void Thumb2Assembler::EmitVFPsd(Condition cond, int32_t opcode,
3060 SRegister sd, DRegister dm) {
3061 CHECK_NE(sd, kNoSRegister);
3062 CHECK_NE(dm, kNoDRegister);
3063 CheckCondition(cond);
3064 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3065 B27 | B26 | B25 | B11 | B9 | opcode |
3066 ((static_cast<int32_t>(sd) & 1)*B22) |
3067 ((static_cast<int32_t>(sd) >> 1)*B12) |
3068 ((static_cast<int32_t>(dm) >> 4)*B5) |
3069 (static_cast<int32_t>(dm) & 0xf);
3070 Emit32(encoding);
3071}
3072
3073
3074void Thumb2Assembler::EmitVFPds(Condition cond, int32_t opcode,
3075 DRegister dd, SRegister sm) {
3076 CHECK_NE(dd, kNoDRegister);
3077 CHECK_NE(sm, kNoSRegister);
3078 CheckCondition(cond);
3079 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3080 B27 | B26 | B25 | B11 | B9 | opcode |
3081 ((static_cast<int32_t>(dd) >> 4)*B22) |
3082 ((static_cast<int32_t>(dd) & 0xf)*B12) |
3083 ((static_cast<int32_t>(sm) & 1)*B5) |
3084 (static_cast<int32_t>(sm) >> 1);
3085 Emit32(encoding);
3086}
3087
3088
3089void Thumb2Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR.
Calin Juravleddb7df22014-11-25 20:56:51 +00003090 CHECK_NE(cond, kNoCondition);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003091 CheckCondition(cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00003092 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3093 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
3094 (static_cast<int32_t>(PC)*B12) |
3095 B11 | B9 | B4;
3096 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003097}
3098
3099
3100void Thumb2Assembler::svc(uint32_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08003101 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003102 int16_t encoding = B15 | B14 | B12 |
3103 B11 | B10 | B9 | B8 |
3104 imm8;
3105 Emit16(encoding);
3106}
3107
3108
3109void Thumb2Assembler::bkpt(uint16_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08003110 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003111 int16_t encoding = B15 | B13 | B12 |
3112 B11 | B10 | B9 |
3113 imm8;
3114 Emit16(encoding);
3115}
3116
3117// Convert the given IT state to a mask bit given bit 0 of the first
3118// condition and a shift position.
3119static uint8_t ToItMask(ItState s, uint8_t firstcond0, uint8_t shift) {
3120 switch (s) {
3121 case kItOmitted: return 1 << shift;
3122 case kItThen: return firstcond0 << shift;
3123 case kItElse: return !firstcond0 << shift;
3124 }
3125 return 0;
3126}
3127
3128
3129// Set the IT condition in the given position for the given state. This is used
3130// to check that conditional instructions match the preceding IT statement.
3131void Thumb2Assembler::SetItCondition(ItState s, Condition cond, uint8_t index) {
3132 switch (s) {
3133 case kItOmitted: it_conditions_[index] = AL; break;
3134 case kItThen: it_conditions_[index] = cond; break;
3135 case kItElse:
3136 it_conditions_[index] = static_cast<Condition>(static_cast<uint8_t>(cond) ^ 1);
3137 break;
3138 }
3139}
3140
3141
3142void Thumb2Assembler::it(Condition firstcond, ItState i1, ItState i2, ItState i3) {
3143 CheckCondition(AL); // Not allowed in IT block.
3144 uint8_t firstcond0 = static_cast<uint8_t>(firstcond) & 1;
3145
3146 // All conditions to AL.
3147 for (uint8_t i = 0; i < 4; ++i) {
3148 it_conditions_[i] = AL;
3149 }
3150
3151 SetItCondition(kItThen, firstcond, 0);
3152 uint8_t mask = ToItMask(i1, firstcond0, 3);
3153 SetItCondition(i1, firstcond, 1);
3154
3155 if (i1 != kItOmitted) {
3156 mask |= ToItMask(i2, firstcond0, 2);
3157 SetItCondition(i2, firstcond, 2);
3158 if (i2 != kItOmitted) {
3159 mask |= ToItMask(i3, firstcond0, 1);
3160 SetItCondition(i3, firstcond, 3);
3161 if (i3 != kItOmitted) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07003162 mask |= 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003163 }
3164 }
3165 }
3166
3167 // Start at first condition.
3168 it_cond_index_ = 0;
3169 next_condition_ = it_conditions_[0];
3170 uint16_t encoding = B15 | B13 | B12 |
3171 B11 | B10 | B9 | B8 |
3172 firstcond << 4 |
3173 mask;
3174 Emit16(encoding);
3175}
3176
3177
3178void Thumb2Assembler::cbz(Register rn, Label* label) {
3179 CheckCondition(AL);
3180 if (label->IsBound()) {
3181 LOG(FATAL) << "cbz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00003182 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003183 } else if (IsHighRegister(rn)) {
3184 LOG(FATAL) << "cbz can only be used with low registers";
3185 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003186 } else {
3187 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), false);
3188 label->LinkTo(branchid);
3189 }
3190}
3191
3192
3193void Thumb2Assembler::cbnz(Register rn, Label* label) {
3194 CheckCondition(AL);
3195 if (label->IsBound()) {
3196 LOG(FATAL) << "cbnz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00003197 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003198 } else if (IsHighRegister(rn)) {
3199 LOG(FATAL) << "cbnz can only be used with low registers";
3200 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003201 } else {
3202 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), true);
3203 label->LinkTo(branchid);
3204 }
3205}
3206
3207
3208void Thumb2Assembler::blx(Register rm, Condition cond) {
3209 CHECK_NE(rm, kNoRegister);
3210 CheckCondition(cond);
3211 int16_t encoding = B14 | B10 | B9 | B8 | B7 | static_cast<int16_t>(rm) << 3;
3212 Emit16(encoding);
3213}
3214
3215
3216void Thumb2Assembler::bx(Register rm, Condition cond) {
3217 CHECK_NE(rm, kNoRegister);
3218 CheckCondition(cond);
3219 int16_t encoding = B14 | B10 | B9 | B8 | static_cast<int16_t>(rm) << 3;
3220 Emit16(encoding);
3221}
3222
3223
3224void Thumb2Assembler::Push(Register rd, Condition cond) {
3225 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
3226}
3227
3228
3229void Thumb2Assembler::Pop(Register rd, Condition cond) {
3230 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
3231}
3232
3233
3234void Thumb2Assembler::PushList(RegList regs, Condition cond) {
3235 stm(DB_W, SP, regs, cond);
3236}
3237
3238
3239void Thumb2Assembler::PopList(RegList regs, Condition cond) {
3240 ldm(IA_W, SP, regs, cond);
3241}
3242
3243
3244void Thumb2Assembler::Mov(Register rd, Register rm, Condition cond) {
3245 if (cond != AL || rd != rm) {
3246 mov(rd, ShifterOperand(rm), cond);
3247 }
3248}
3249
3250
Dave Allison65fcc2c2014-04-28 13:45:27 -07003251void Thumb2Assembler::Bind(Label* label) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003252 BindLabel(label, buffer_.Size());
Dave Allison65fcc2c2014-04-28 13:45:27 -07003253}
3254
3255
3256void Thumb2Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003257 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003258 CHECK_LE(shift_imm, 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003259 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003260 EmitShift(rd, rm, LSL, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003261}
3262
3263
3264void Thumb2Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003265 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003266 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003267 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003268 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003269 EmitShift(rd, rm, LSR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003270}
3271
3272
3273void Thumb2Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003274 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003275 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003276 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003277 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003278 EmitShift(rd, rm, ASR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003279}
3280
3281
3282void Thumb2Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003283 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003284 CHECK(1u <= shift_imm && shift_imm <= 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003285 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003286 EmitShift(rd, rm, ROR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003287}
3288
3289
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003290void Thumb2Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003291 CheckCondition(cond);
Vladimir Markof9d741e2015-11-20 15:08:11 +00003292 EmitShift(rd, rm, RRX, 0, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003293}
3294
3295
3296void Thumb2Assembler::Lsl(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003297 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003298 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003299 EmitShift(rd, rm, LSL, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003300}
3301
3302
3303void Thumb2Assembler::Lsr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003304 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003305 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003306 EmitShift(rd, rm, LSR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003307}
3308
3309
3310void Thumb2Assembler::Asr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003311 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003312 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003313 EmitShift(rd, rm, ASR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003314}
3315
3316
3317void Thumb2Assembler::Ror(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003318 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003319 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003320 EmitShift(rd, rm, ROR, rn, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003321}
3322
3323
3324int32_t Thumb2Assembler::EncodeBranchOffset(int32_t offset, int32_t inst) {
3325 // The offset is off by 4 due to the way the ARM CPUs read PC.
3326 offset -= 4;
3327 offset >>= 1;
3328
3329 uint32_t value = 0;
3330 // There are two different encodings depending on the value of bit 12. In one case
3331 // intermediate values are calculated using the sign bit.
3332 if ((inst & B12) == B12) {
3333 // 25 bits of offset.
3334 uint32_t signbit = (offset >> 31) & 0x1;
3335 uint32_t i1 = (offset >> 22) & 0x1;
3336 uint32_t i2 = (offset >> 21) & 0x1;
3337 uint32_t imm10 = (offset >> 11) & 0x03ff;
3338 uint32_t imm11 = offset & 0x07ff;
3339 uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
3340 uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
3341 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
3342 imm11;
3343 // Remove the offset from the current encoding.
3344 inst &= ~(0x3ff << 16 | 0x7ff);
3345 } else {
3346 uint32_t signbit = (offset >> 31) & 0x1;
3347 uint32_t imm6 = (offset >> 11) & 0x03f;
3348 uint32_t imm11 = offset & 0x07ff;
3349 uint32_t j1 = (offset >> 19) & 1;
3350 uint32_t j2 = (offset >> 17) & 1;
3351 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm6 << 16) |
3352 imm11;
3353 // Remove the offset from the current encoding.
3354 inst &= ~(0x3f << 16 | 0x7ff);
3355 }
3356 // Mask out offset bits in current instruction.
3357 inst &= ~(B26 | B13 | B11);
3358 inst |= value;
3359 return inst;
3360}
3361
3362
3363int Thumb2Assembler::DecodeBranchOffset(int32_t instr) {
3364 int32_t imm32;
3365 if ((instr & B12) == B12) {
3366 uint32_t S = (instr >> 26) & 1;
3367 uint32_t J2 = (instr >> 11) & 1;
3368 uint32_t J1 = (instr >> 13) & 1;
3369 uint32_t imm10 = (instr >> 16) & 0x3FF;
3370 uint32_t imm11 = instr & 0x7FF;
3371
3372 uint32_t I1 = ~(J1 ^ S) & 1;
3373 uint32_t I2 = ~(J2 ^ S) & 1;
3374 imm32 = (S << 24) | (I1 << 23) | (I2 << 22) | (imm10 << 12) | (imm11 << 1);
3375 imm32 = (imm32 << 8) >> 8; // sign extend 24 bit immediate.
3376 } else {
3377 uint32_t S = (instr >> 26) & 1;
3378 uint32_t J2 = (instr >> 11) & 1;
3379 uint32_t J1 = (instr >> 13) & 1;
3380 uint32_t imm6 = (instr >> 16) & 0x3F;
3381 uint32_t imm11 = instr & 0x7FF;
3382
3383 imm32 = (S << 20) | (J2 << 19) | (J1 << 18) | (imm6 << 12) | (imm11 << 1);
3384 imm32 = (imm32 << 11) >> 11; // sign extend 21 bit immediate.
3385 }
3386 imm32 += 4;
3387 return imm32;
3388}
3389
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003390uint32_t Thumb2Assembler::GetAdjustedPosition(uint32_t old_position) {
3391 // We can reconstruct the adjustment by going through all the fixups from the beginning
3392 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
3393 // with increasing old_position, we can use the data from last AdjustedPosition() to
3394 // continue where we left off and the whole loop should be O(m+n) where m is the number
3395 // of positions to adjust and n is the number of fixups.
3396 if (old_position < last_old_position_) {
3397 last_position_adjustment_ = 0u;
3398 last_old_position_ = 0u;
3399 last_fixup_id_ = 0u;
3400 }
3401 while (last_fixup_id_ != fixups_.size()) {
3402 Fixup* fixup = GetFixup(last_fixup_id_);
3403 if (fixup->GetLocation() >= old_position + last_position_adjustment_) {
3404 break;
3405 }
3406 if (fixup->GetSize() != fixup->GetOriginalSize()) {
3407 last_position_adjustment_ += fixup->GetSizeInBytes() - fixup->GetOriginalSizeInBytes();
3408 }
3409 ++last_fixup_id_;
3410 }
3411 last_old_position_ = old_position;
3412 return old_position + last_position_adjustment_;
3413}
3414
3415Literal* Thumb2Assembler::NewLiteral(size_t size, const uint8_t* data) {
3416 DCHECK(size == 4u || size == 8u) << size;
3417 literals_.emplace_back(size, data);
3418 return &literals_.back();
3419}
3420
3421void Thumb2Assembler::LoadLiteral(Register rt, Literal* literal) {
3422 DCHECK_EQ(literal->GetSize(), 4u);
3423 DCHECK(!literal->GetLabel()->IsBound());
3424 bool use32bit = IsForced32Bit() || IsHighRegister(rt);
3425 uint32_t location = buffer_.Size();
3426 Fixup::Size size = use32bit ? Fixup::kLiteral4KiB : Fixup::kLiteral1KiB;
3427 FixupId fixup_id = AddFixup(Fixup::LoadNarrowLiteral(location, rt, size));
3428 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3429 literal->GetLabel()->LinkTo(fixup_id);
3430 if (use32bit) {
3431 Emit16(0);
3432 }
3433 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3434}
3435
3436void Thumb2Assembler::LoadLiteral(Register rt, Register rt2, Literal* literal) {
3437 DCHECK_EQ(literal->GetSize(), 8u);
3438 DCHECK(!literal->GetLabel()->IsBound());
3439 uint32_t location = buffer_.Size();
3440 FixupId fixup_id =
3441 AddFixup(Fixup::LoadWideLiteral(location, rt, rt2, Fixup::kLongOrFPLiteral1KiB));
3442 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3443 literal->GetLabel()->LinkTo(fixup_id);
3444 Emit16(0);
3445 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3446}
3447
3448void Thumb2Assembler::LoadLiteral(SRegister sd, Literal* literal) {
3449 DCHECK_EQ(literal->GetSize(), 4u);
3450 DCHECK(!literal->GetLabel()->IsBound());
3451 uint32_t location = buffer_.Size();
3452 FixupId fixup_id = AddFixup(Fixup::LoadSingleLiteral(location, sd, Fixup::kLongOrFPLiteral1KiB));
3453 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3454 literal->GetLabel()->LinkTo(fixup_id);
3455 Emit16(0);
3456 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3457}
3458
3459void Thumb2Assembler::LoadLiteral(DRegister dd, Literal* literal) {
3460 DCHECK_EQ(literal->GetSize(), 8u);
3461 DCHECK(!literal->GetLabel()->IsBound());
3462 uint32_t location = buffer_.Size();
3463 FixupId fixup_id = AddFixup(Fixup::LoadDoubleLiteral(location, dd, Fixup::kLongOrFPLiteral1KiB));
3464 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3465 literal->GetLabel()->LinkTo(fixup_id);
3466 Emit16(0);
3467 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3468}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003469
Dave Allison65fcc2c2014-04-28 13:45:27 -07003470
3471void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value,
Vladimir Marko449b1092015-09-08 12:16:45 +01003472 Condition cond, SetCc set_cc) {
3473 if (value == 0 && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003474 if (rd != rn) {
3475 mov(rd, ShifterOperand(rn), cond);
3476 }
3477 return;
3478 }
3479 // We prefer to select the shorter code sequence rather than selecting add for
3480 // positive values and sub for negatives ones, which would slightly improve
3481 // the readability of generated code for some constants.
3482 ShifterOperand shifter_op;
Vladimir Markof5c09c32015-12-17 12:08:08 +00003483 if (ShifterOperandCanHold(rd, rn, ADD, value, set_cc, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003484 add(rd, rn, shifter_op, cond, set_cc);
Vladimir Markof5c09c32015-12-17 12:08:08 +00003485 } else if (ShifterOperandCanHold(rd, rn, SUB, -value, set_cc, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003486 sub(rd, rn, shifter_op, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003487 } else {
3488 CHECK(rn != IP);
Vladimir Markof5c09c32015-12-17 12:08:08 +00003489 // If rd != rn, use rd as temp. This alows 16-bit ADD/SUB in more situations than using IP.
3490 Register temp = (rd != rn) ? rd : IP;
Vladimir Markoac6ac102015-12-17 12:14:00 +00003491 if (ShifterOperandCanHold(temp, kNoRegister, MVN, ~value, kCcKeep, &shifter_op)) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003492 mvn(temp, shifter_op, cond, kCcKeep);
3493 add(rd, rn, ShifterOperand(temp), cond, set_cc);
Vladimir Markoac6ac102015-12-17 12:14:00 +00003494 } else if (ShifterOperandCanHold(temp, kNoRegister, MVN, ~(-value), kCcKeep, &shifter_op)) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003495 mvn(temp, shifter_op, cond, kCcKeep);
3496 sub(rd, rn, ShifterOperand(temp), cond, set_cc);
3497 } else if (High16Bits(-value) == 0) {
3498 movw(temp, Low16Bits(-value), cond);
3499 sub(rd, rn, ShifterOperand(temp), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003500 } else {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003501 movw(temp, Low16Bits(value), cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003502 uint16_t value_high = High16Bits(value);
3503 if (value_high != 0) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003504 movt(temp, value_high, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003505 }
Vladimir Markof5c09c32015-12-17 12:08:08 +00003506 add(rd, rn, ShifterOperand(temp), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003507 }
3508 }
3509}
3510
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003511void Thumb2Assembler::CmpConstant(Register rn, int32_t value, Condition cond) {
Vladimir Markoac6ac102015-12-17 12:14:00 +00003512 // We prefer to select the shorter code sequence rather than using plain cmp and cmn
3513 // which would slightly improve the readability of generated code for some constants.
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003514 ShifterOperand shifter_op;
Vladimir Markof5c09c32015-12-17 12:08:08 +00003515 if (ShifterOperandCanHold(kNoRegister, rn, CMP, value, kCcSet, &shifter_op)) {
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003516 cmp(rn, shifter_op, cond);
Vladimir Markoac6ac102015-12-17 12:14:00 +00003517 } else if (ShifterOperandCanHold(kNoRegister, rn, CMN, -value, kCcSet, &shifter_op)) {
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003518 cmn(rn, shifter_op, cond);
3519 } else {
3520 CHECK(rn != IP);
Vladimir Markoac6ac102015-12-17 12:14:00 +00003521 if (ShifterOperandCanHold(IP, kNoRegister, MVN, ~value, kCcKeep, &shifter_op)) {
3522 mvn(IP, shifter_op, cond, kCcKeep);
3523 cmp(rn, ShifterOperand(IP), cond);
3524 } else if (ShifterOperandCanHold(IP, kNoRegister, MVN, ~(-value), kCcKeep, &shifter_op)) {
3525 mvn(IP, shifter_op, cond, kCcKeep);
3526 cmn(rn, ShifterOperand(IP), cond);
3527 } else if (High16Bits(-value) == 0) {
3528 movw(IP, Low16Bits(-value), cond);
3529 cmn(rn, ShifterOperand(IP), cond);
3530 } else {
3531 movw(IP, Low16Bits(value), cond);
3532 uint16_t value_high = High16Bits(value);
3533 if (value_high != 0) {
3534 movt(IP, value_high, cond);
3535 }
3536 cmp(rn, ShifterOperand(IP), cond);
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003537 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003538 }
3539}
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003540
Dave Allison65fcc2c2014-04-28 13:45:27 -07003541void Thumb2Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
3542 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003543 if (ShifterOperandCanHold(rd, R0, MOV, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003544 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003545 } else if (ShifterOperandCanHold(rd, R0, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003546 mvn(rd, shifter_op, cond);
3547 } else {
3548 movw(rd, Low16Bits(value), cond);
3549 uint16_t value_high = High16Bits(value);
3550 if (value_high != 0) {
3551 movt(rd, value_high, cond);
3552 }
3553 }
3554}
3555
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003556int32_t Thumb2Assembler::GetAllowedLoadOffsetBits(LoadOperandType type) {
3557 switch (type) {
3558 case kLoadSignedByte:
3559 case kLoadSignedHalfword:
3560 case kLoadUnsignedHalfword:
3561 case kLoadUnsignedByte:
3562 case kLoadWord:
3563 // We can encode imm12 offset.
3564 return 0xfffu;
3565 case kLoadSWord:
3566 case kLoadDWord:
3567 case kLoadWordPair:
3568 // We can encode imm8:'00' offset.
3569 return 0xff << 2;
3570 default:
3571 LOG(FATAL) << "UNREACHABLE";
3572 UNREACHABLE();
3573 }
3574}
3575
3576int32_t Thumb2Assembler::GetAllowedStoreOffsetBits(StoreOperandType type) {
3577 switch (type) {
3578 case kStoreHalfword:
3579 case kStoreByte:
3580 case kStoreWord:
3581 // We can encode imm12 offset.
3582 return 0xfff;
3583 case kStoreSWord:
3584 case kStoreDWord:
3585 case kStoreWordPair:
3586 // We can encode imm8:'00' offset.
3587 return 0xff << 2;
3588 default:
3589 LOG(FATAL) << "UNREACHABLE";
3590 UNREACHABLE();
3591 }
3592}
3593
3594bool Thumb2Assembler::CanSplitLoadStoreOffset(int32_t allowed_offset_bits,
3595 int32_t offset,
3596 /*out*/ int32_t* add_to_base,
3597 /*out*/ int32_t* offset_for_load_store) {
3598 int32_t other_bits = offset & ~allowed_offset_bits;
3599 if (ShifterOperandCanAlwaysHold(other_bits) || ShifterOperandCanAlwaysHold(-other_bits)) {
3600 *add_to_base = offset & ~allowed_offset_bits;
3601 *offset_for_load_store = offset & allowed_offset_bits;
3602 return true;
3603 }
3604 return false;
3605}
3606
3607int32_t Thumb2Assembler::AdjustLoadStoreOffset(int32_t allowed_offset_bits,
3608 Register temp,
3609 Register base,
3610 int32_t offset,
3611 Condition cond) {
3612 DCHECK_NE(offset & ~allowed_offset_bits, 0);
3613 int32_t add_to_base, offset_for_load;
3614 if (CanSplitLoadStoreOffset(allowed_offset_bits, offset, &add_to_base, &offset_for_load)) {
3615 AddConstant(temp, base, add_to_base, cond, kCcKeep);
3616 return offset_for_load;
3617 } else {
3618 LoadImmediate(temp, offset, cond);
3619 add(temp, temp, ShifterOperand(base), cond, kCcKeep);
3620 return 0;
3621 }
3622}
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003623
Dave Allison65fcc2c2014-04-28 13:45:27 -07003624// Implementation note: this method must emit at most one instruction when
3625// Address::CanHoldLoadOffsetThumb.
3626void Thumb2Assembler::LoadFromOffset(LoadOperandType type,
3627 Register reg,
3628 Register base,
3629 int32_t offset,
3630 Condition cond) {
3631 if (!Address::CanHoldLoadOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003632 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003633 // Inlined AdjustLoadStoreOffset() allows us to pull a few more tricks.
3634 int32_t allowed_offset_bits = GetAllowedLoadOffsetBits(type);
3635 DCHECK_NE(offset & ~allowed_offset_bits, 0);
3636 int32_t add_to_base, offset_for_load;
3637 if (CanSplitLoadStoreOffset(allowed_offset_bits, offset, &add_to_base, &offset_for_load)) {
3638 // Use reg for the adjusted base. If it's low reg, we may end up using 16-bit load.
3639 AddConstant(reg, base, add_to_base, cond, kCcKeep);
3640 base = reg;
3641 offset = offset_for_load;
3642 } else {
3643 Register temp = (reg == base) ? IP : reg;
3644 LoadImmediate(temp, offset, cond);
3645 // TODO: Implement indexed load (not available for LDRD) and use it here to avoid the ADD.
3646 // Use reg for the adjusted base. If it's low reg, we may end up using 16-bit load.
3647 add(reg, reg, ShifterOperand((reg == base) ? IP : base), cond, kCcKeep);
3648 base = reg;
3649 offset = 0;
3650 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003651 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003652 DCHECK(Address::CanHoldLoadOffsetThumb(type, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003653 switch (type) {
3654 case kLoadSignedByte:
3655 ldrsb(reg, Address(base, offset), cond);
3656 break;
3657 case kLoadUnsignedByte:
3658 ldrb(reg, Address(base, offset), cond);
3659 break;
3660 case kLoadSignedHalfword:
3661 ldrsh(reg, Address(base, offset), cond);
3662 break;
3663 case kLoadUnsignedHalfword:
3664 ldrh(reg, Address(base, offset), cond);
3665 break;
3666 case kLoadWord:
3667 ldr(reg, Address(base, offset), cond);
3668 break;
3669 case kLoadWordPair:
3670 ldrd(reg, Address(base, offset), cond);
3671 break;
3672 default:
3673 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003674 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003675 }
3676}
3677
Dave Allison65fcc2c2014-04-28 13:45:27 -07003678// Implementation note: this method must emit at most one instruction when
3679// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3680void Thumb2Assembler::LoadSFromOffset(SRegister reg,
3681 Register base,
3682 int32_t offset,
3683 Condition cond) {
3684 if (!Address::CanHoldLoadOffsetThumb(kLoadSWord, offset)) {
3685 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003686 offset = AdjustLoadStoreOffset(GetAllowedLoadOffsetBits(kLoadSWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003687 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003688 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003689 DCHECK(Address::CanHoldLoadOffsetThumb(kLoadSWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003690 vldrs(reg, Address(base, offset), cond);
3691}
3692
3693
3694// Implementation note: this method must emit at most one instruction when
3695// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3696void Thumb2Assembler::LoadDFromOffset(DRegister reg,
3697 Register base,
3698 int32_t offset,
3699 Condition cond) {
3700 if (!Address::CanHoldLoadOffsetThumb(kLoadDWord, offset)) {
3701 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003702 offset = AdjustLoadStoreOffset(GetAllowedLoadOffsetBits(kLoadDWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003703 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003704 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003705 DCHECK(Address::CanHoldLoadOffsetThumb(kLoadDWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003706 vldrd(reg, Address(base, offset), cond);
3707}
3708
3709
3710// Implementation note: this method must emit at most one instruction when
3711// Address::CanHoldStoreOffsetThumb.
3712void Thumb2Assembler::StoreToOffset(StoreOperandType type,
3713 Register reg,
3714 Register base,
3715 int32_t offset,
3716 Condition cond) {
Roland Levillain775ef492014-11-04 17:43:11 +00003717 Register tmp_reg = kNoRegister;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003718 if (!Address::CanHoldStoreOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003719 CHECK_NE(base, IP);
Roland Levillain23f02f32015-08-25 18:23:20 +01003720 if ((reg != IP) &&
3721 ((type != kStoreWordPair) || (reg + 1 != IP))) {
Roland Levillain775ef492014-11-04 17:43:11 +00003722 tmp_reg = IP;
3723 } else {
Roland Levillain4af147e2015-04-07 13:54:49 +01003724 // Be careful not to use IP twice (for `reg` (or `reg` + 1 in
Roland Levillain23f02f32015-08-25 18:23:20 +01003725 // the case of a word-pair store) and `base`) to build the
3726 // Address object used by the store instruction(s) below.
3727 // Instead, save R5 on the stack (or R6 if R5 is already used by
3728 // `base`), use it as secondary temporary register, and restore
3729 // it after the store instruction has been emitted.
3730 tmp_reg = (base != R5) ? R5 : R6;
Roland Levillain775ef492014-11-04 17:43:11 +00003731 Push(tmp_reg);
3732 if (base == SP) {
3733 offset += kRegisterSize;
3734 }
3735 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003736 // TODO: Implement indexed store (not available for STRD), inline AdjustLoadStoreOffset()
3737 // and in the "unsplittable" path get rid of the "add" by using the store indexed instead.
3738 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(type), tmp_reg, base, offset, cond);
Roland Levillain775ef492014-11-04 17:43:11 +00003739 base = tmp_reg;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003740 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003741 DCHECK(Address::CanHoldStoreOffsetThumb(type, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003742 switch (type) {
3743 case kStoreByte:
3744 strb(reg, Address(base, offset), cond);
3745 break;
3746 case kStoreHalfword:
3747 strh(reg, Address(base, offset), cond);
3748 break;
3749 case kStoreWord:
3750 str(reg, Address(base, offset), cond);
3751 break;
3752 case kStoreWordPair:
3753 strd(reg, Address(base, offset), cond);
3754 break;
3755 default:
3756 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003757 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003758 }
Roland Levillain23f02f32015-08-25 18:23:20 +01003759 if ((tmp_reg != kNoRegister) && (tmp_reg != IP)) {
3760 CHECK((tmp_reg == R5) || (tmp_reg == R6));
Roland Levillain775ef492014-11-04 17:43:11 +00003761 Pop(tmp_reg);
3762 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003763}
3764
3765
3766// Implementation note: this method must emit at most one instruction when
3767// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreToOffset.
3768void Thumb2Assembler::StoreSToOffset(SRegister reg,
3769 Register base,
3770 int32_t offset,
3771 Condition cond) {
3772 if (!Address::CanHoldStoreOffsetThumb(kStoreSWord, offset)) {
3773 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003774 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(kStoreSWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003775 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003776 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003777 DCHECK(Address::CanHoldStoreOffsetThumb(kStoreSWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003778 vstrs(reg, Address(base, offset), cond);
3779}
3780
3781
3782// Implementation note: this method must emit at most one instruction when
3783// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreSToOffset.
3784void Thumb2Assembler::StoreDToOffset(DRegister reg,
3785 Register base,
3786 int32_t offset,
3787 Condition cond) {
3788 if (!Address::CanHoldStoreOffsetThumb(kStoreDWord, offset)) {
3789 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003790 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(kStoreDWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003791 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003792 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003793 DCHECK(Address::CanHoldStoreOffsetThumb(kStoreDWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003794 vstrd(reg, Address(base, offset), cond);
3795}
3796
3797
3798void Thumb2Assembler::MemoryBarrier(ManagedRegister mscratch) {
3799 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003800 dmb(SY);
3801}
3802
3803
3804void Thumb2Assembler::dmb(DmbOptions flavor) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003805 int32_t encoding = 0xf3bf8f50; // dmb in T1 encoding.
3806 Emit32(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003807}
3808
3809
3810void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003811 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003812 cbz(r, label);
3813 } else {
3814 cmp(r, ShifterOperand(0));
3815 b(label, EQ);
3816 }
3817}
3818
3819
Dave Allison65fcc2c2014-04-28 13:45:27 -07003820void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003821 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd126ba12015-05-20 11:25:27 +01003822 cbnz(r, label);
3823 } else {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003824 cmp(r, ShifterOperand(0));
3825 b(label, NE);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003826 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003827}
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003828
3829JumpTable* Thumb2Assembler::CreateJumpTable(std::vector<Label*>&& labels, Register base_reg) {
3830 jump_tables_.emplace_back(std::move(labels));
3831 JumpTable* table = &jump_tables_.back();
3832 DCHECK(!table->GetLabel()->IsBound());
3833
3834 bool use32bit = IsForced32Bit() || IsHighRegister(base_reg);
3835 uint32_t location = buffer_.Size();
3836 Fixup::Size size = use32bit ? Fixup::kLiteralAddr4KiB : Fixup::kLiteralAddr1KiB;
3837 FixupId fixup_id = AddFixup(Fixup::LoadLiteralAddress(location, base_reg, size));
3838 Emit16(static_cast<uint16_t>(table->GetLabel()->position_));
3839 table->GetLabel()->LinkTo(fixup_id);
3840 if (use32bit) {
3841 Emit16(0);
3842 }
3843 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3844
3845 return table;
3846}
3847
3848void Thumb2Assembler::EmitJumpTableDispatch(JumpTable* jump_table, Register displacement_reg) {
3849 CHECK(!IsForced32Bit()) << "Forced 32-bit dispatch not implemented yet";
3850 // 32-bit ADD doesn't support PC as an input, so we need a two-instruction sequence:
3851 // SUB ip, ip, #0
3852 // ADD pc, ip, reg
3853 // TODO: Implement.
3854
3855 // The anchor's position needs to be fixed up before we can compute offsets - so make it a tracked
3856 // label.
3857 BindTrackedLabel(jump_table->GetAnchorLabel());
3858
3859 add(PC, PC, ShifterOperand(displacement_reg));
3860}
3861
Dave Allison65fcc2c2014-04-28 13:45:27 -07003862} // namespace arm
3863} // namespace art