blob: 7ad5b440e028c9177cab58e8e4688306c2792780 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_thumb2.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Vladimir Marko6b756b52015-07-14 11:58:38 +010028void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) {
29 // For each Fixup, it's easy to find the Fixups that it depends on as they are either
30 // the following or the preceding Fixups until we find the target. However, for fixup
31 // adjustment we need the reverse lookup, i.e. what Fixups depend on a given Fixup.
32 // This function creates a compact representation of this relationship, where we have
33 // all the dependents in a single array and Fixups reference their ranges by start
34 // index and count. (Instead of having a per-fixup vector.)
35
36 // Count the number of dependents of each Fixup.
37 const FixupId end_id = assembler->fixups_.size();
38 Fixup* fixups = assembler->fixups_.data();
39 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
40 uint32_t target = fixups[fixup_id].target_;
41 if (target > fixups[fixup_id].location_) {
42 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
43 fixups[id].dependents_count_ += 1u;
44 }
45 } else {
46 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
47 fixups[id - 1u].dependents_count_ += 1u;
48 }
49 }
50 }
51 // Assign index ranges in fixup_dependents_ to individual fixups. Record the end of the
52 // range in dependents_start_, we shall later decrement it as we fill in fixup_dependents_.
53 uint32_t number_of_dependents = 0u;
54 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
55 number_of_dependents += fixups[fixup_id].dependents_count_;
56 fixups[fixup_id].dependents_start_ = number_of_dependents;
57 }
58 if (number_of_dependents == 0u) {
59 return;
60 }
61 // Create and fill in the fixup_dependents_.
62 assembler->fixup_dependents_.reset(new FixupId[number_of_dependents]);
63 FixupId* dependents = assembler->fixup_dependents_.get();
64 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
65 uint32_t target = fixups[fixup_id].target_;
66 if (target > fixups[fixup_id].location_) {
67 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
68 fixups[id].dependents_start_ -= 1u;
69 dependents[fixups[id].dependents_start_] = fixup_id;
70 }
71 } else {
72 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
73 fixups[id - 1u].dependents_start_ -= 1u;
74 dependents[fixups[id - 1u].dependents_start_] = fixup_id;
75 }
76 }
77 }
78}
79
Vladimir Markocf93a5c2015-06-16 11:33:24 +000080void Thumb2Assembler::BindLabel(Label* label, uint32_t bound_pc) {
81 CHECK(!label->IsBound());
82
83 while (label->IsLinked()) {
84 FixupId fixup_id = label->Position(); // The id for linked Fixup.
85 Fixup* fixup = GetFixup(fixup_id); // Get the Fixup at this id.
86 fixup->Resolve(bound_pc); // Fixup can be resolved now.
Vladimir Markocf93a5c2015-06-16 11:33:24 +000087 uint32_t fixup_location = fixup->GetLocation();
88 uint16_t next = buffer_.Load<uint16_t>(fixup_location); // Get next in chain.
89 buffer_.Store<int16_t>(fixup_location, 0);
90 label->position_ = next; // Move to next.
91 }
92 label->BindTo(bound_pc);
93}
94
Andreas Gampe7cffc3b2015-10-19 21:31:53 -070095uint32_t Thumb2Assembler::BindLiterals() {
Vladimir Markocf93a5c2015-06-16 11:33:24 +000096 // We don't add the padding here, that's done only after adjusting the Fixup sizes.
97 uint32_t code_size = buffer_.Size();
98 for (Literal& lit : literals_) {
99 Label* label = lit.GetLabel();
100 BindLabel(label, code_size);
101 code_size += lit.GetSize();
102 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700103 return code_size;
104}
105
106void Thumb2Assembler::BindJumpTables(uint32_t code_size) {
107 for (JumpTable& table : jump_tables_) {
108 Label* label = table.GetLabel();
109 BindLabel(label, code_size);
110 code_size += table.GetSize();
111 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000112}
113
114void Thumb2Assembler::AdjustFixupIfNeeded(Fixup* fixup, uint32_t* current_code_size,
115 std::deque<FixupId>* fixups_to_recalculate) {
116 uint32_t adjustment = fixup->AdjustSizeIfNeeded(*current_code_size);
117 if (adjustment != 0u) {
118 *current_code_size += adjustment;
Vladimir Marko6b756b52015-07-14 11:58:38 +0100119 for (FixupId dependent_id : fixup->Dependents(*this)) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000120 Fixup* dependent = GetFixup(dependent_id);
121 dependent->IncreaseAdjustment(adjustment);
122 if (buffer_.Load<int16_t>(dependent->GetLocation()) == 0) {
123 buffer_.Store<int16_t>(dependent->GetLocation(), 1);
124 fixups_to_recalculate->push_back(dependent_id);
125 }
126 }
127 }
128}
129
130uint32_t Thumb2Assembler::AdjustFixups() {
Vladimir Marko6b756b52015-07-14 11:58:38 +0100131 Fixup::PrepareDependents(this);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000132 uint32_t current_code_size = buffer_.Size();
133 std::deque<FixupId> fixups_to_recalculate;
134 if (kIsDebugBuild) {
135 // We will use the placeholders in the buffer_ to mark whether the fixup has
136 // been added to the fixups_to_recalculate. Make sure we start with zeros.
137 for (Fixup& fixup : fixups_) {
138 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
139 }
140 }
141 for (Fixup& fixup : fixups_) {
142 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
143 }
144 while (!fixups_to_recalculate.empty()) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100145 do {
146 // Pop the fixup.
147 FixupId fixup_id = fixups_to_recalculate.front();
148 fixups_to_recalculate.pop_front();
149 Fixup* fixup = GetFixup(fixup_id);
150 DCHECK_NE(buffer_.Load<int16_t>(fixup->GetLocation()), 0);
151 buffer_.Store<int16_t>(fixup->GetLocation(), 0);
152 // See if it needs adjustment.
153 AdjustFixupIfNeeded(fixup, &current_code_size, &fixups_to_recalculate);
154 } while (!fixups_to_recalculate.empty());
155
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700156 if ((current_code_size & 2) != 0 && (!literals_.empty() || !jump_tables_.empty())) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100157 // If we need to add padding before literals, this may just push some out of range,
158 // so recalculate all load literals. This makes up for the fact that we don't mark
159 // load literal as a dependency of all previous Fixups even though it actually is.
160 for (Fixup& fixup : fixups_) {
161 if (fixup.IsLoadLiteral()) {
162 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
163 }
164 }
165 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000166 }
167 if (kIsDebugBuild) {
168 // Check that no fixup is marked as being in fixups_to_recalculate anymore.
169 for (Fixup& fixup : fixups_) {
170 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
171 }
172 }
173
174 // Adjust literal pool labels for padding.
Roland Levillain14d90572015-07-16 10:52:26 +0100175 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000176 uint32_t literals_adjustment = current_code_size + (current_code_size & 2) - buffer_.Size();
177 if (literals_adjustment != 0u) {
178 for (Literal& literal : literals_) {
179 Label* label = literal.GetLabel();
180 DCHECK(label->IsBound());
181 int old_position = label->Position();
182 label->Reinitialize();
183 label->BindTo(old_position + literals_adjustment);
184 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700185 for (JumpTable& table : jump_tables_) {
186 Label* label = table.GetLabel();
187 DCHECK(label->IsBound());
188 int old_position = label->Position();
189 label->Reinitialize();
190 label->BindTo(old_position + literals_adjustment);
191 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000192 }
193
194 return current_code_size;
195}
196
197void Thumb2Assembler::EmitFixups(uint32_t adjusted_code_size) {
198 // Move non-fixup code to its final place and emit fixups.
199 // Process fixups in reverse order so that we don't repeatedly move the same data.
200 size_t src_end = buffer_.Size();
201 size_t dest_end = adjusted_code_size;
202 buffer_.Resize(dest_end);
203 DCHECK_GE(dest_end, src_end);
204 for (auto i = fixups_.rbegin(), end = fixups_.rend(); i != end; ++i) {
205 Fixup* fixup = &*i;
206 if (fixup->GetOriginalSize() == fixup->GetSize()) {
207 // The size of this Fixup didn't change. To avoid moving the data
208 // in small chunks, emit the code to its original position.
209 fixup->Emit(&buffer_, adjusted_code_size);
210 fixup->Finalize(dest_end - src_end);
211 } else {
212 // Move the data between the end of the fixup and src_end to its final location.
213 size_t old_fixup_location = fixup->GetLocation();
214 size_t src_begin = old_fixup_location + fixup->GetOriginalSizeInBytes();
215 size_t data_size = src_end - src_begin;
216 size_t dest_begin = dest_end - data_size;
217 buffer_.Move(dest_begin, src_begin, data_size);
218 src_end = old_fixup_location;
219 dest_end = dest_begin - fixup->GetSizeInBytes();
220 // Finalize the Fixup and emit the data to the new location.
221 fixup->Finalize(dest_end - src_end);
222 fixup->Emit(&buffer_, adjusted_code_size);
223 }
224 }
225 CHECK_EQ(src_end, dest_end);
226}
227
228void Thumb2Assembler::EmitLiterals() {
229 if (!literals_.empty()) {
230 // Load literal instructions (LDR, LDRD, VLDR) require 4-byte alignment.
231 // We don't support byte and half-word literals.
232 uint32_t code_size = buffer_.Size();
Roland Levillain14d90572015-07-16 10:52:26 +0100233 DCHECK_ALIGNED(code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000234 if ((code_size & 2u) != 0u) {
235 Emit16(0);
236 }
237 for (Literal& literal : literals_) {
238 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
239 DCHECK_EQ(static_cast<size_t>(literal.GetLabel()->Position()), buffer_.Size());
240 DCHECK(literal.GetSize() == 4u || literal.GetSize() == 8u);
241 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
242 buffer_.Emit<uint8_t>(literal.GetData()[i]);
243 }
244 }
245 }
246}
247
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700248void Thumb2Assembler::EmitJumpTables() {
249 if (!jump_tables_.empty()) {
250 // Jump tables require 4 byte alignment. (We don't support byte and half-word jump tables.)
251 uint32_t code_size = buffer_.Size();
252 DCHECK_ALIGNED(code_size, 2);
253 if ((code_size & 2u) != 0u) {
254 Emit16(0);
255 }
256 for (JumpTable& table : jump_tables_) {
257 // Bulk ensure capacity, as this may be large.
258 size_t orig_size = buffer_.Size();
259 buffer_.ExtendCapacity(orig_size + table.GetSize());
260#ifndef NDEBUG
261 buffer_.has_ensured_capacity_ = true;
262#endif
263
264 DCHECK_EQ(static_cast<size_t>(table.GetLabel()->Position()), buffer_.Size());
265 int32_t anchor_position = table.GetAnchorLabel()->Position() + 4;
266
267 for (Label* target : table.GetData()) {
268 // Ensure that the label was tracked, so that it will have the right position.
269 DCHECK(std::find(tracked_labels_.begin(), tracked_labels_.end(), target) !=
270 tracked_labels_.end());
271
272 int32_t offset = target->Position() - anchor_position;
273 buffer_.Emit<int32_t>(offset);
274 }
275
276#ifndef NDEBUG
277 buffer_.has_ensured_capacity_ = false;
278#endif
279 size_t new_size = buffer_.Size();
280 DCHECK_LE(new_size - orig_size, table.GetSize());
281 }
282 }
283}
284
Vladimir Marko10ef6942015-10-22 15:25:54 +0100285void Thumb2Assembler::PatchCFI() {
286 if (cfi().NumberOfDelayedAdvancePCs() == 0u) {
287 return;
288 }
289
290 typedef DebugFrameOpCodeWriterForAssembler::DelayedAdvancePC DelayedAdvancePC;
291 const auto data = cfi().ReleaseStreamAndPrepareForDelayedAdvancePC();
292 const std::vector<uint8_t>& old_stream = data.first;
293 const std::vector<DelayedAdvancePC>& advances = data.second;
294
295 // Refill our data buffer with patched opcodes.
296 cfi().ReserveCFIStream(old_stream.size() + advances.size() + 16);
297 size_t stream_pos = 0;
298 for (const DelayedAdvancePC& advance : advances) {
299 DCHECK_GE(advance.stream_pos, stream_pos);
300 // Copy old data up to the point where advance was issued.
301 cfi().AppendRawData(old_stream, stream_pos, advance.stream_pos);
302 stream_pos = advance.stream_pos;
303 // Insert the advance command with its final offset.
304 size_t final_pc = GetAdjustedPosition(advance.pc);
305 cfi().AdvancePC(final_pc);
306 }
307 // Copy the final segment if any.
308 cfi().AppendRawData(old_stream, stream_pos, old_stream.size());
309}
310
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000311inline int16_t Thumb2Assembler::BEncoding16(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100312 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000313 int16_t encoding = B15 | B14;
314 if (cond != AL) {
315 DCHECK(IsInt<9>(offset));
316 encoding |= B12 | (static_cast<int32_t>(cond) << 8) | ((offset >> 1) & 0xff);
317 } else {
318 DCHECK(IsInt<12>(offset));
319 encoding |= B13 | ((offset >> 1) & 0x7ff);
320 }
321 return encoding;
322}
323
324inline int32_t Thumb2Assembler::BEncoding32(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100325 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000326 int32_t s = (offset >> 31) & 1; // Sign bit.
327 int32_t encoding = B31 | B30 | B29 | B28 | B15 |
328 (s << 26) | // Sign bit goes to bit 26.
329 ((offset >> 1) & 0x7ff); // imm11 goes to bits 0-10.
330 if (cond != AL) {
331 DCHECK(IsInt<21>(offset));
332 // Encode cond, move imm6 from bits 12-17 to bits 16-21 and move J1 and J2.
333 encoding |= (static_cast<int32_t>(cond) << 22) | ((offset & 0x3f000) << (16 - 12)) |
334 ((offset & (1 << 19)) >> (19 - 13)) | // Extract J1 from bit 19 to bit 13.
335 ((offset & (1 << 18)) >> (18 - 11)); // Extract J2 from bit 18 to bit 11.
336 } else {
337 DCHECK(IsInt<25>(offset));
338 int32_t j1 = ((offset >> 23) ^ s ^ 1) & 1; // Calculate J1 from I1 extracted from bit 23.
339 int32_t j2 = ((offset >> 22)^ s ^ 1) & 1; // Calculate J2 from I2 extracted from bit 22.
340 // Move imm10 from bits 12-21 to bits 16-25 and add J1 and J2.
341 encoding |= B12 | ((offset & 0x3ff000) << (16 - 12)) |
342 (j1 << 13) | (j2 << 11);
343 }
344 return encoding;
345}
346
347inline int16_t Thumb2Assembler::CbxzEncoding16(Register rn, int32_t offset, Condition cond) {
348 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100349 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000350 DCHECK(IsUint<7>(offset));
351 DCHECK(cond == EQ || cond == NE);
352 return B15 | B13 | B12 | B8 | (cond == NE ? B11 : 0) | static_cast<int32_t>(rn) |
353 ((offset & 0x3e) << (3 - 1)) | // Move imm5 from bits 1-5 to bits 3-7.
354 ((offset & 0x40) << (9 - 6)); // Move i from bit 6 to bit 11
355}
356
357inline int16_t Thumb2Assembler::CmpRnImm8Encoding16(Register rn, int32_t value) {
358 DCHECK(!IsHighRegister(rn));
359 DCHECK(IsUint<8>(value));
360 return B13 | B11 | (rn << 8) | value;
361}
362
363inline int16_t Thumb2Assembler::AddRdnRmEncoding16(Register rdn, Register rm) {
364 // The high bit of rn is moved across 4-bit rm.
365 return B14 | B10 | (static_cast<int32_t>(rm) << 3) |
366 (static_cast<int32_t>(rdn) & 7) | ((static_cast<int32_t>(rdn) & 8) << 4);
367}
368
369inline int32_t Thumb2Assembler::MovwEncoding32(Register rd, int32_t value) {
370 DCHECK(IsUint<16>(value));
371 return B31 | B30 | B29 | B28 | B25 | B22 |
372 (static_cast<int32_t>(rd) << 8) |
373 ((value & 0xf000) << (16 - 12)) | // Move imm4 from bits 12-15 to bits 16-19.
374 ((value & 0x0800) << (26 - 11)) | // Move i from bit 11 to bit 26.
375 ((value & 0x0700) << (12 - 8)) | // Move imm3 from bits 8-10 to bits 12-14.
376 (value & 0xff); // Keep imm8 in bits 0-7.
377}
378
379inline int32_t Thumb2Assembler::MovtEncoding32(Register rd, int32_t value) {
380 DCHECK_EQ(value & 0xffff, 0);
381 int32_t movw_encoding = MovwEncoding32(rd, (value >> 16) & 0xffff);
382 return movw_encoding | B25 | B23;
383}
384
385inline int32_t Thumb2Assembler::MovModImmEncoding32(Register rd, int32_t value) {
386 uint32_t mod_imm = ModifiedImmediate(value);
387 DCHECK_NE(mod_imm, kInvalidModifiedImmediate);
388 return B31 | B30 | B29 | B28 | B22 | B19 | B18 | B17 | B16 |
389 (static_cast<int32_t>(rd) << 8) | static_cast<int32_t>(mod_imm);
390}
391
392inline int16_t Thumb2Assembler::LdrLitEncoding16(Register rt, int32_t offset) {
393 DCHECK(!IsHighRegister(rt));
Roland Levillain14d90572015-07-16 10:52:26 +0100394 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000395 DCHECK(IsUint<10>(offset));
396 return B14 | B11 | (static_cast<int32_t>(rt) << 8) | (offset >> 2);
397}
398
399inline int32_t Thumb2Assembler::LdrLitEncoding32(Register rt, int32_t offset) {
400 // NOTE: We don't support negative offset, i.e. U=0 (B23).
401 return LdrRtRnImm12Encoding(rt, PC, offset);
402}
403
404inline int32_t Thumb2Assembler::LdrdEncoding32(Register rt, Register rt2, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100405 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000406 CHECK(IsUint<10>(offset));
407 return B31 | B30 | B29 | B27 |
408 B24 /* P = 1 */ | B23 /* U = 1 */ | B22 | 0 /* W = 0 */ | B20 |
409 (static_cast<int32_t>(rn) << 16) | (static_cast<int32_t>(rt) << 12) |
410 (static_cast<int32_t>(rt2) << 8) | (offset >> 2);
411}
412
413inline int32_t Thumb2Assembler::VldrsEncoding32(SRegister sd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100414 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000415 CHECK(IsUint<10>(offset));
416 return B31 | B30 | B29 | B27 | B26 | B24 |
417 B23 /* U = 1 */ | B20 | B11 | B9 |
418 (static_cast<int32_t>(rn) << 16) |
419 ((static_cast<int32_t>(sd) & 0x01) << (22 - 0)) | // Move D from bit 0 to bit 22.
420 ((static_cast<int32_t>(sd) & 0x1e) << (12 - 1)) | // Move Vd from bits 1-4 to bits 12-15.
421 (offset >> 2);
422}
423
424inline int32_t Thumb2Assembler::VldrdEncoding32(DRegister dd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100425 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000426 CHECK(IsUint<10>(offset));
427 return B31 | B30 | B29 | B27 | B26 | B24 |
428 B23 /* U = 1 */ | B20 | B11 | B9 | B8 |
429 (rn << 16) |
430 ((static_cast<int32_t>(dd) & 0x10) << (22 - 4)) | // Move D from bit 4 to bit 22.
431 ((static_cast<int32_t>(dd) & 0x0f) << (12 - 0)) | // Move Vd from bits 0-3 to bits 12-15.
432 (offset >> 2);
433}
434
435inline int16_t Thumb2Assembler::LdrRtRnImm5Encoding16(Register rt, Register rn, int32_t offset) {
436 DCHECK(!IsHighRegister(rt));
437 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100438 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000439 DCHECK(IsUint<7>(offset));
440 return B14 | B13 | B11 |
441 (static_cast<int32_t>(rn) << 3) | static_cast<int32_t>(rt) |
442 (offset << (6 - 2)); // Move imm5 from bits 2-6 to bits 6-10.
443}
444
445int32_t Thumb2Assembler::Fixup::LoadWideOrFpEncoding(Register rbase, int32_t offset) const {
446 switch (type_) {
447 case kLoadLiteralWide:
448 return LdrdEncoding32(rn_, rt2_, rbase, offset);
449 case kLoadFPLiteralSingle:
450 return VldrsEncoding32(sd_, rbase, offset);
451 case kLoadFPLiteralDouble:
452 return VldrdEncoding32(dd_, rbase, offset);
453 default:
454 LOG(FATAL) << "Unexpected type: " << static_cast<int>(type_);
455 UNREACHABLE();
456 }
457}
458
459inline int32_t Thumb2Assembler::LdrRtRnImm12Encoding(Register rt, Register rn, int32_t offset) {
460 DCHECK(IsUint<12>(offset));
461 return B31 | B30 | B29 | B28 | B27 | B23 | B22 | B20 | (rn << 16) | (rt << 12) | offset;
462}
463
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700464inline int16_t Thumb2Assembler::AdrEncoding16(Register rd, int32_t offset) {
465 DCHECK(IsUint<10>(offset));
466 DCHECK(IsAligned<4>(offset));
467 DCHECK(!IsHighRegister(rd));
468 return B15 | B13 | (rd << 8) | (offset >> 2);
469}
470
471inline int32_t Thumb2Assembler::AdrEncoding32(Register rd, int32_t offset) {
472 DCHECK(IsUint<12>(offset));
473 // Bit 26: offset[11]
474 // Bits 14-12: offset[10-8]
475 // Bits 7-0: offset[7-0]
476 int32_t immediate_mask =
477 ((offset & (1 << 11)) << (26 - 11)) |
478 ((offset & (7 << 8)) << (12 - 8)) |
479 (offset & 0xFF);
480 return B31 | B30 | B29 | B28 | B25 | B19 | B18 | B17 | B16 | (rd << 8) | immediate_mask;
481}
482
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000483void Thumb2Assembler::FinalizeCode() {
484 ArmAssembler::FinalizeCode();
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700485 uint32_t size_after_literals = BindLiterals();
486 BindJumpTables(size_after_literals);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000487 uint32_t adjusted_code_size = AdjustFixups();
488 EmitFixups(adjusted_code_size);
489 EmitLiterals();
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700490 FinalizeTrackedLabels();
491 EmitJumpTables();
Vladimir Marko10ef6942015-10-22 15:25:54 +0100492 PatchCFI();
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000493}
494
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +0100495bool Thumb2Assembler::ShifterOperandCanAlwaysHold(uint32_t immediate) {
496 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
497}
498
Nicolas Geoffray3d1e7882015-02-03 13:59:52 +0000499bool Thumb2Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
500 Register rn ATTRIBUTE_UNUSED,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000501 Opcode opcode,
502 uint32_t immediate,
503 ShifterOperand* shifter_op) {
504 shifter_op->type_ = ShifterOperand::kImmediate;
505 shifter_op->immed_ = immediate;
506 shifter_op->is_shift_ = false;
507 shifter_op->is_rotate_ = false;
508 switch (opcode) {
509 case ADD:
510 case SUB:
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000511 if (immediate < (1 << 12)) { // Less than (or equal to) 12 bits can always be done.
512 return true;
513 }
514 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
515
516 case MOV:
517 // TODO: Support less than or equal to 12bits.
518 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100519
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000520 case MVN:
521 default:
522 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
523 }
524}
525
Dave Allison65fcc2c2014-04-28 13:45:27 -0700526void Thumb2Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100527 Condition cond, SetCc set_cc) {
528 EmitDataProcessing(cond, AND, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700529}
530
531
532void Thumb2Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100533 Condition cond, SetCc set_cc) {
534 EmitDataProcessing(cond, EOR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700535}
536
537
538void Thumb2Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100539 Condition cond, SetCc set_cc) {
540 EmitDataProcessing(cond, SUB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700541}
542
543
544void Thumb2Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100545 Condition cond, SetCc set_cc) {
546 EmitDataProcessing(cond, RSB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700547}
548
549
550void Thumb2Assembler::add(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100551 Condition cond, SetCc set_cc) {
552 EmitDataProcessing(cond, ADD, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700553}
554
555
556void Thumb2Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100557 Condition cond, SetCc set_cc) {
558 EmitDataProcessing(cond, ADC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700559}
560
561
562void Thumb2Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100563 Condition cond, SetCc set_cc) {
564 EmitDataProcessing(cond, SBC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700565}
566
567
568void Thumb2Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100569 Condition cond, SetCc set_cc) {
570 EmitDataProcessing(cond, RSC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700571}
572
573
574void Thumb2Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
575 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100576 EmitDataProcessing(cond, TST, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700577}
578
579
580void Thumb2Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
581 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100582 EmitDataProcessing(cond, TEQ, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700583}
584
585
586void Thumb2Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100587 EmitDataProcessing(cond, CMP, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700588}
589
590
591void Thumb2Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100592 EmitDataProcessing(cond, CMN, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700593}
594
595
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100596void Thumb2Assembler::orr(Register rd, Register rn, const ShifterOperand& so,
597 Condition cond, SetCc set_cc) {
598 EmitDataProcessing(cond, ORR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700599}
600
601
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100602void Thumb2Assembler::orn(Register rd, Register rn, const ShifterOperand& so,
603 Condition cond, SetCc set_cc) {
604 EmitDataProcessing(cond, ORN, set_cc, rn, rd, so);
605}
606
607
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100608void Thumb2Assembler::mov(Register rd, const ShifterOperand& so,
609 Condition cond, SetCc set_cc) {
610 EmitDataProcessing(cond, MOV, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700611}
612
613
614void Thumb2Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100615 Condition cond, SetCc set_cc) {
616 EmitDataProcessing(cond, BIC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700617}
618
619
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100620void Thumb2Assembler::mvn(Register rd, const ShifterOperand& so,
621 Condition cond, SetCc set_cc) {
622 EmitDataProcessing(cond, MVN, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700623}
624
625
626void Thumb2Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700627 CheckCondition(cond);
628
Dave Allison65fcc2c2014-04-28 13:45:27 -0700629 if (rd == rm && !IsHighRegister(rd) && !IsHighRegister(rn) && !force_32bit_) {
630 // 16 bit.
631 int16_t encoding = B14 | B9 | B8 | B6 |
632 rn << 3 | rd;
633 Emit16(encoding);
634 } else {
635 // 32 bit.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700636 uint32_t op1 = 0U /* 0b000 */;
637 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700638 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
639 op1 << 20 |
640 B15 | B14 | B13 | B12 |
641 op2 << 4 |
642 static_cast<uint32_t>(rd) << 8 |
643 static_cast<uint32_t>(rn) << 16 |
644 static_cast<uint32_t>(rm);
645
646 Emit32(encoding);
647 }
648}
649
650
651void Thumb2Assembler::mla(Register rd, Register rn, Register rm, Register ra,
652 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700653 CheckCondition(cond);
654
Andreas Gampec8ccf682014-09-29 20:07:43 -0700655 uint32_t op1 = 0U /* 0b000 */;
656 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700657 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
658 op1 << 20 |
659 op2 << 4 |
660 static_cast<uint32_t>(rd) << 8 |
661 static_cast<uint32_t>(ra) << 12 |
662 static_cast<uint32_t>(rn) << 16 |
663 static_cast<uint32_t>(rm);
664
665 Emit32(encoding);
666}
667
668
669void Thumb2Assembler::mls(Register rd, Register rn, Register rm, Register ra,
670 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700671 CheckCondition(cond);
672
Andreas Gampec8ccf682014-09-29 20:07:43 -0700673 uint32_t op1 = 0U /* 0b000 */;
674 uint32_t op2 = 01 /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700675 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
676 op1 << 20 |
677 op2 << 4 |
678 static_cast<uint32_t>(rd) << 8 |
679 static_cast<uint32_t>(ra) << 12 |
680 static_cast<uint32_t>(rn) << 16 |
681 static_cast<uint32_t>(rm);
682
683 Emit32(encoding);
684}
685
686
Zheng Xuc6667102015-05-15 16:08:45 +0800687void Thumb2Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
688 Register rm, Condition cond) {
689 CheckCondition(cond);
690
691 uint32_t op1 = 0U /* 0b000; */;
692 uint32_t op2 = 0U /* 0b0000 */;
693 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
694 op1 << 20 |
695 op2 << 4 |
696 static_cast<uint32_t>(rd_lo) << 12 |
697 static_cast<uint32_t>(rd_hi) << 8 |
698 static_cast<uint32_t>(rn) << 16 |
699 static_cast<uint32_t>(rm);
700
701 Emit32(encoding);
702}
703
704
Dave Allison65fcc2c2014-04-28 13:45:27 -0700705void Thumb2Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
706 Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700707 CheckCondition(cond);
708
Andreas Gampec8ccf682014-09-29 20:07:43 -0700709 uint32_t op1 = 2U /* 0b010; */;
710 uint32_t op2 = 0U /* 0b0000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700711 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
712 op1 << 20 |
713 op2 << 4 |
714 static_cast<uint32_t>(rd_lo) << 12 |
715 static_cast<uint32_t>(rd_hi) << 8 |
716 static_cast<uint32_t>(rn) << 16 |
717 static_cast<uint32_t>(rm);
718
719 Emit32(encoding);
720}
721
722
723void Thumb2Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700724 CheckCondition(cond);
725
Andreas Gampec8ccf682014-09-29 20:07:43 -0700726 uint32_t op1 = 1U /* 0b001 */;
727 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700728 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B20 |
729 op1 << 20 |
730 op2 << 4 |
731 0xf << 12 |
732 static_cast<uint32_t>(rd) << 8 |
733 static_cast<uint32_t>(rn) << 16 |
734 static_cast<uint32_t>(rm);
735
736 Emit32(encoding);
737}
738
739
740void Thumb2Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700741 CheckCondition(cond);
742
Andreas Gampec8ccf682014-09-29 20:07:43 -0700743 uint32_t op1 = 1U /* 0b001 */;
744 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700745 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B21 | B20 |
746 op1 << 20 |
747 op2 << 4 |
748 0xf << 12 |
749 static_cast<uint32_t>(rd) << 8 |
750 static_cast<uint32_t>(rn) << 16 |
751 static_cast<uint32_t>(rm);
752
753 Emit32(encoding);
754}
755
756
Roland Levillain51d3fc42014-11-13 14:11:42 +0000757void Thumb2Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
758 CheckCondition(cond);
759 CHECK_LE(lsb, 31U);
760 CHECK(1U <= width && width <= 32U) << width;
761 uint32_t widthminus1 = width - 1;
762 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
763 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
764
765 uint32_t op = 20U /* 0b10100 */;
766 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
767 op << 20 |
768 static_cast<uint32_t>(rn) << 16 |
769 imm3 << 12 |
770 static_cast<uint32_t>(rd) << 8 |
771 imm2 << 6 |
772 widthminus1;
773
774 Emit32(encoding);
775}
776
777
Roland Levillain981e4542014-11-14 11:47:14 +0000778void Thumb2Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
779 CheckCondition(cond);
780 CHECK_LE(lsb, 31U);
781 CHECK(1U <= width && width <= 32U) << width;
782 uint32_t widthminus1 = width - 1;
783 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
784 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
785
786 uint32_t op = 28U /* 0b11100 */;
787 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
788 op << 20 |
789 static_cast<uint32_t>(rn) << 16 |
790 imm3 << 12 |
791 static_cast<uint32_t>(rd) << 8 |
792 imm2 << 6 |
793 widthminus1;
794
795 Emit32(encoding);
796}
797
798
Dave Allison65fcc2c2014-04-28 13:45:27 -0700799void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) {
800 EmitLoadStore(cond, true, false, false, false, rd, ad);
801}
802
803
804void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) {
805 EmitLoadStore(cond, false, false, false, false, rd, ad);
806}
807
808
809void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
810 EmitLoadStore(cond, true, true, false, false, rd, ad);
811}
812
813
814void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) {
815 EmitLoadStore(cond, false, true, false, false, rd, ad);
816}
817
818
819void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
820 EmitLoadStore(cond, true, false, true, false, rd, ad);
821}
822
823
824void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) {
825 EmitLoadStore(cond, false, false, true, false, rd, ad);
826}
827
828
829void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
830 EmitLoadStore(cond, true, true, false, true, rd, ad);
831}
832
833
834void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
835 EmitLoadStore(cond, true, false, true, true, rd, ad);
836}
837
838
839void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100840 ldrd(rd, Register(rd + 1), ad, cond);
841}
842
843
844void Thumb2Assembler::ldrd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700845 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100846 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700847 // This is different from other loads. The encoding is like ARM.
848 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
849 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100850 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700851 ad.encodingThumbLdrdStrd();
852 Emit32(encoding);
853}
854
855
856void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100857 strd(rd, Register(rd + 1), ad, cond);
858}
859
860
861void Thumb2Assembler::strd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700862 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100863 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700864 // This is different from other loads. The encoding is like ARM.
865 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
866 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100867 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700868 ad.encodingThumbLdrdStrd();
869 Emit32(encoding);
870}
871
872
873void Thumb2Assembler::ldm(BlockAddressMode am,
874 Register base,
875 RegList regs,
876 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000877 CHECK_NE(regs, 0u); // Do not use ldm if there's nothing to load.
878 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700879 // Thumb doesn't support one reg in the list.
880 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000881 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700882 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700883 CHECK(am == DB_W); // Only writeback is supported.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700884 ldr(static_cast<Register>(reg), Address(base, kRegisterSize, Address::PostIndex), cond);
885 } else {
886 EmitMultiMemOp(cond, am, true, base, regs);
887 }
888}
889
890
891void Thumb2Assembler::stm(BlockAddressMode am,
892 Register base,
893 RegList regs,
894 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000895 CHECK_NE(regs, 0u); // Do not use stm if there's nothing to store.
896 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700897 // Thumb doesn't support one reg in the list.
898 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000899 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700900 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700901 CHECK(am == IA || am == IA_W);
902 Address::Mode strmode = am == IA ? Address::PreIndex : Address::Offset;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700903 str(static_cast<Register>(reg), Address(base, -kRegisterSize, strmode), cond);
904 } else {
905 EmitMultiMemOp(cond, am, false, base, regs);
906 }
907}
908
909
910bool Thumb2Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
911 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
912 if (((imm32 & ((1 << 19) - 1)) == 0) &&
913 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
914 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
915 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
916 ((imm32 >> 19) & ((1 << 6) -1));
917 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
918 sd, S0, S0);
919 return true;
920 }
921 return false;
922}
923
924
925bool Thumb2Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
926 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
927 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
928 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
929 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
930 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
931 ((imm64 >> 48) & ((1 << 6) -1));
932 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
933 dd, D0, D0);
934 return true;
935 }
936 return false;
937}
938
939
940void Thumb2Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
941 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
942}
943
944
945void Thumb2Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
946 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
947}
948
949
950void Thumb2Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
951 Condition cond) {
952 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
953}
954
955
956void Thumb2Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
957 Condition cond) {
958 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
959}
960
961
962void Thumb2Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
963 Condition cond) {
964 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
965}
966
967
968void Thumb2Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
969 Condition cond) {
970 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
971}
972
973
974void Thumb2Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
975 Condition cond) {
976 EmitVFPsss(cond, B21, sd, sn, sm);
977}
978
979
980void Thumb2Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
981 Condition cond) {
982 EmitVFPddd(cond, B21, dd, dn, dm);
983}
984
985
986void Thumb2Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
987 Condition cond) {
988 EmitVFPsss(cond, 0, sd, sn, sm);
989}
990
991
992void Thumb2Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
993 Condition cond) {
994 EmitVFPddd(cond, 0, dd, dn, dm);
995}
996
997
998void Thumb2Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
999 Condition cond) {
1000 EmitVFPsss(cond, B6, sd, sn, sm);
1001}
1002
1003
1004void Thumb2Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
1005 Condition cond) {
1006 EmitVFPddd(cond, B6, dd, dn, dm);
1007}
1008
1009
1010void Thumb2Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
1011 Condition cond) {
1012 EmitVFPsss(cond, B23, sd, sn, sm);
1013}
1014
1015
1016void Thumb2Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
1017 Condition cond) {
1018 EmitVFPddd(cond, B23, dd, dn, dm);
1019}
1020
1021
1022void Thumb2Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
1023 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
1024}
1025
1026
1027void Thumb2Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
1028 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
1029}
1030
1031
1032void Thumb2Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
1033 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
1034}
1035
1036
1037void Thumb2Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
1038 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
1039}
1040
1041
1042void Thumb2Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
1043 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
1044}
1045
1046void Thumb2Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
1047 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
1048}
1049
1050
1051void Thumb2Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
1052 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
1053}
1054
1055
1056void Thumb2Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
1057 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
1058}
1059
1060
1061void Thumb2Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
1062 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
1063}
1064
1065
1066void Thumb2Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
1067 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
1068}
1069
1070
1071void Thumb2Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
1072 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
1073}
1074
1075
1076void Thumb2Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
1077 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
1078}
1079
1080
1081void Thumb2Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
1082 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
1083}
1084
1085
1086void Thumb2Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
1087 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
1088}
1089
1090
1091void Thumb2Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
1092 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
1093}
1094
1095
1096void Thumb2Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
1097 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
1098}
1099
1100
1101void Thumb2Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
1102 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
1103}
1104
1105
1106void Thumb2Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
1107 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
1108}
1109
1110
1111void Thumb2Assembler::vcmpsz(SRegister sd, Condition cond) {
1112 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
1113}
1114
1115
1116void Thumb2Assembler::vcmpdz(DRegister dd, Condition cond) {
1117 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
1118}
1119
1120void Thumb2Assembler::b(Label* label, Condition cond) {
agicsakie2142d252015-06-30 17:10:03 -07001121 DCHECK_EQ(next_condition_, AL);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001122 EmitBranch(cond, label, false, false);
1123}
1124
1125
1126void Thumb2Assembler::bl(Label* label, Condition cond) {
1127 CheckCondition(cond);
1128 EmitBranch(cond, label, true, false);
1129}
1130
1131
1132void Thumb2Assembler::blx(Label* label) {
1133 EmitBranch(AL, label, true, true);
1134}
1135
1136
1137void Thumb2Assembler::MarkExceptionHandler(Label* label) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001138 EmitDataProcessing(AL, TST, kCcSet, PC, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001139 Label l;
1140 b(&l);
1141 EmitBranch(AL, label, false, false);
1142 Bind(&l);
1143}
1144
1145
1146void Thumb2Assembler::Emit32(int32_t value) {
1147 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1148 buffer_.Emit<int16_t>(value >> 16);
1149 buffer_.Emit<int16_t>(value & 0xffff);
1150}
1151
1152
1153void Thumb2Assembler::Emit16(int16_t value) {
1154 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1155 buffer_.Emit<int16_t>(value);
1156}
1157
1158
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001159bool Thumb2Assembler::Is32BitDataProcessing(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001160 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001161 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001162 Register rn,
1163 Register rd,
1164 const ShifterOperand& so) {
1165 if (force_32bit_) {
1166 return true;
1167 }
1168
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001169 // Check special case for SP relative ADD and SUB immediate.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001170 if ((opcode == ADD || opcode == SUB) && rn == SP && so.IsImmediate() && set_cc != kCcSet) {
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001171 // If the immediate is in range, use 16 bit.
1172 if (rd == SP) {
1173 if (so.GetImmediate() < (1 << 9)) { // 9 bit immediate.
1174 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001175 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001176 } else if (!IsHighRegister(rd) && opcode == ADD) {
1177 if (so.GetImmediate() < (1 << 10)) { // 10 bit immediate.
1178 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001179 }
1180 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001181 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001182
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001183 bool can_contain_high_register =
1184 (opcode == CMP) ||
1185 (opcode == MOV && set_cc != kCcSet) ||
1186 ((opcode == ADD) && (rn == rd) && set_cc != kCcSet);
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001187
1188 if (IsHighRegister(rd) || IsHighRegister(rn)) {
1189 if (!can_contain_high_register) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001190 return true;
1191 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001192
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001193 // There are high register instructions available for this opcode.
1194 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
1195 if (so.IsShift() && (so.GetShift() == RRX || so.GetImmediate() != 0u)) {
1196 return true;
1197 }
1198
1199 // The ADD and MOV instructions that work with high registers don't have 16-bit
1200 // immediate variants.
1201 if (so.IsImmediate()) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001202 return true;
1203 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001204 }
1205
1206 if (so.IsRegister() && IsHighRegister(so.GetRegister()) && !can_contain_high_register) {
1207 return true;
1208 }
1209
Dave Allison65fcc2c2014-04-28 13:45:27 -07001210 bool rn_is_valid = true;
1211
1212 // Check for single operand instructions and ADD/SUB.
1213 switch (opcode) {
1214 case CMP:
1215 case MOV:
1216 case TST:
1217 case MVN:
1218 rn_is_valid = false; // There is no Rn for these instructions.
1219 break;
1220 case TEQ:
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001221 case ORN:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001222 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001223 case ADD:
1224 case SUB:
1225 break;
1226 default:
1227 if (so.IsRegister() && rd != rn) {
1228 return true;
1229 }
1230 }
1231
1232 if (so.IsImmediate()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001233 if (opcode == RSB) {
1234 DCHECK(rn_is_valid);
1235 if (so.GetImmediate() != 0u) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001236 return true;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001237 }
1238 } else if (rn_is_valid && rn != rd) {
1239 // The only thumb1 instructions with a register and an immediate are ADD and SUB
1240 // with a 3-bit immediate, and RSB with zero immediate.
1241 if (opcode == ADD || opcode == SUB) {
1242 if (!IsUint<3>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001243 return true;
1244 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001245 } else {
1246 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001247 }
1248 } else {
1249 // ADD, SUB, CMP and MOV may be thumb1 only if the immediate is 8 bits.
1250 if (!(opcode == ADD || opcode == SUB || opcode == MOV || opcode == CMP)) {
1251 return true;
1252 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001253 if (!IsUint<8>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001254 return true;
1255 }
1256 }
1257 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001258 } else {
1259 DCHECK(so.IsRegister());
1260 if (so.IsShift()) {
1261 // Shift operand - check if it is a MOV convertible to a 16-bit shift instruction.
1262 if (opcode != MOV) {
Zheng Xuc6667102015-05-15 16:08:45 +08001263 return true;
1264 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001265 // Check for MOV with an ROR/RRX. There is no 16-bit ROR immediate and no 16-bit RRX.
1266 if (so.GetShift() == ROR || so.GetShift() == RRX) {
1267 return true;
1268 }
1269 // 16-bit shifts set condition codes if and only if outside IT block,
1270 // i.e. if and only if cond == AL.
1271 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1272 return true;
1273 }
1274 } else {
1275 // Register operand without shift.
1276 switch (opcode) {
1277 case ADD:
1278 // The 16-bit ADD that cannot contain high registers can set condition codes
1279 // if and only if outside IT block, i.e. if and only if cond == AL.
1280 if (!can_contain_high_register &&
1281 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1282 return true;
1283 }
1284 break;
1285 case AND:
1286 case BIC:
1287 case EOR:
1288 case ORR:
1289 case MVN:
1290 case ADC:
1291 case SUB:
1292 case SBC:
1293 // These 16-bit opcodes set condition codes if and only if outside IT block,
1294 // i.e. if and only if cond == AL.
1295 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1296 return true;
1297 }
1298 break;
1299 case RSB:
1300 case RSC:
1301 // No 16-bit RSB/RSC Rd, Rm, Rn. It would be equivalent to SUB/SBC Rd, Rn, Rm.
1302 return true;
1303 case CMP:
1304 default:
1305 break;
1306 }
Zheng Xuc6667102015-05-15 16:08:45 +08001307 }
1308 }
1309
Dave Allison65fcc2c2014-04-28 13:45:27 -07001310 // The instruction can be encoded in 16 bits.
1311 return false;
1312}
1313
1314
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001315void Thumb2Assembler::Emit32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001316 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001317 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001318 Register rn,
1319 Register rd,
1320 const ShifterOperand& so) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001321 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001322 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001323 case AND: thumb_opcode = 0U /* 0b0000 */; break;
1324 case EOR: thumb_opcode = 4U /* 0b0100 */; break;
1325 case SUB: thumb_opcode = 13U /* 0b1101 */; break;
1326 case RSB: thumb_opcode = 14U /* 0b1110 */; break;
1327 case ADD: thumb_opcode = 8U /* 0b1000 */; break;
Andreas Gampe35c68e32014-09-30 08:39:37 -07001328 case ADC: thumb_opcode = 10U /* 0b1010 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001329 case SBC: thumb_opcode = 11U /* 0b1011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001330 case RSC: break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001331 case TST: thumb_opcode = 0U /* 0b0000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1332 case TEQ: thumb_opcode = 4U /* 0b0100 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1333 case CMP: thumb_opcode = 13U /* 0b1101 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1334 case CMN: thumb_opcode = 8U /* 0b1000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001335 case ORR: thumb_opcode = 2U /* 0b0010 */; break;
1336 case MOV: thumb_opcode = 2U /* 0b0010 */; rn = PC; break;
1337 case BIC: thumb_opcode = 1U /* 0b0001 */; break;
1338 case MVN: thumb_opcode = 3U /* 0b0011 */; rn = PC; break;
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001339 case ORN: thumb_opcode = 3U /* 0b0011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001340 default:
1341 break;
1342 }
1343
Andreas Gampec8ccf682014-09-29 20:07:43 -07001344 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001345 LOG(FATAL) << "Invalid thumb2 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001346 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001347 }
1348
1349 int32_t encoding = 0;
1350 if (so.IsImmediate()) {
1351 // Check special cases.
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00001352 if ((opcode == SUB || opcode == ADD) && (so.GetImmediate() < (1u << 12)) &&
1353 /* Prefer T3 encoding to T4. */ !ShifterOperandCanAlwaysHold(so.GetImmediate())) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001354 if (set_cc != kCcSet) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001355 if (opcode == SUB) {
1356 thumb_opcode = 5U;
1357 } else if (opcode == ADD) {
1358 thumb_opcode = 0U;
1359 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001360 }
1361 uint32_t imm = so.GetImmediate();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001362
1363 uint32_t i = (imm >> 11) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001364 uint32_t imm3 = (imm >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001365 uint32_t imm8 = imm & 0xff;
1366
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001367 encoding = B31 | B30 | B29 | B28 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001368 (set_cc == kCcSet ? B20 : B25) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001369 thumb_opcode << 21 |
1370 rn << 16 |
1371 rd << 8 |
1372 i << 26 |
1373 imm3 << 12 |
1374 imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001375 } else {
1376 // Modified immediate.
Dave Allison45fdb932014-06-25 12:37:10 -07001377 uint32_t imm = ModifiedImmediate(so.encodingThumb());
Dave Allison65fcc2c2014-04-28 13:45:27 -07001378 if (imm == kInvalidModifiedImmediate) {
1379 LOG(FATAL) << "Immediate value cannot fit in thumb2 modified immediate";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001380 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001381 }
1382 encoding = B31 | B30 | B29 | B28 |
1383 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001384 (set_cc == kCcSet ? B20 : 0) |
Dave Allison65fcc2c2014-04-28 13:45:27 -07001385 rn << 16 |
1386 rd << 8 |
1387 imm;
1388 }
1389 } else if (so.IsRegister()) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001390 // Register (possibly shifted)
1391 encoding = B31 | B30 | B29 | B27 | B25 |
1392 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001393 (set_cc == kCcSet ? B20 : 0) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001394 rn << 16 |
1395 rd << 8 |
1396 so.encodingThumb();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001397 }
1398 Emit32(encoding);
1399}
1400
1401
1402void Thumb2Assembler::Emit16BitDataProcessing(Condition cond,
1403 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001404 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001405 Register rn,
1406 Register rd,
1407 const ShifterOperand& so) {
1408 if (opcode == ADD || opcode == SUB) {
1409 Emit16BitAddSub(cond, opcode, set_cc, rn, rd, so);
1410 return;
1411 }
Andreas Gampec8ccf682014-09-29 20:07:43 -07001412 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001413 // Thumb1.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001414 uint8_t dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001415 uint8_t opcode_shift = 6;
1416 uint8_t rd_shift = 0;
1417 uint8_t rn_shift = 3;
1418 uint8_t immediate_shift = 0;
1419 bool use_immediate = false;
1420 uint8_t immediate = 0;
1421
1422 if (opcode == MOV && so.IsRegister() && so.IsShift()) {
1423 // Convert shifted mov operand2 into 16 bit opcodes.
1424 dp_opcode = 0;
1425 opcode_shift = 11;
1426
1427 use_immediate = true;
1428 immediate = so.GetImmediate();
1429 immediate_shift = 6;
1430
1431 rn = so.GetRegister();
1432
1433 switch (so.GetShift()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001434 case LSL:
1435 DCHECK_LE(immediate, 31u);
1436 thumb_opcode = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001437 break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001438 case LSR:
1439 DCHECK(1 <= immediate && immediate <= 32);
1440 immediate &= 31; // 32 is encoded as 0.
1441 thumb_opcode = 1U /* 0b01 */;
1442 break;
1443 case ASR:
1444 DCHECK(1 <= immediate && immediate <= 32);
1445 immediate &= 31; // 32 is encoded as 0.
1446 thumb_opcode = 2U /* 0b10 */;
1447 break;
1448 case ROR: // No 16-bit ROR immediate.
1449 case RRX: // No 16-bit RRX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001450 default:
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001451 LOG(FATAL) << "Unexpected shift: " << so.GetShift();
1452 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001453 }
1454 } else {
1455 if (so.IsImmediate()) {
1456 use_immediate = true;
1457 immediate = so.GetImmediate();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001458 } else {
Guillaume "Vermeille" Sanchezab4a2f52015-03-11 14:00:30 +00001459 CHECK(!(so.IsRegister() && so.IsShift() && so.GetSecondRegister() != kNoRegister))
1460 << "No register-shifted register instruction available in thumb";
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001461 // Adjust rn and rd: only two registers will be emitted.
1462 switch (opcode) {
1463 case AND:
1464 case ORR:
1465 case EOR:
1466 case RSB:
1467 case ADC:
1468 case SBC:
1469 case BIC: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001470 // Sets condition codes if and only if outside IT block,
1471 // check that it complies with set_cc.
1472 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001473 if (rn == rd) {
1474 rn = so.GetRegister();
1475 } else {
1476 CHECK_EQ(rd, so.GetRegister());
1477 }
1478 break;
1479 }
1480 case CMP:
1481 case CMN: {
1482 CHECK_EQ(rd, 0);
1483 rd = rn;
1484 rn = so.GetRegister();
1485 break;
1486 }
1487 case MVN: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001488 // Sets condition codes if and only if outside IT block,
1489 // check that it complies with set_cc.
1490 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1491 CHECK_EQ(rn, 0);
1492 rn = so.GetRegister();
1493 break;
1494 }
1495 case TST:
1496 case TEQ: {
1497 DCHECK(set_cc == kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001498 CHECK_EQ(rn, 0);
1499 rn = so.GetRegister();
1500 break;
1501 }
1502 default:
1503 break;
1504 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001505 }
1506
1507 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001508 case AND: thumb_opcode = 0U /* 0b0000 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001509 case ORR: thumb_opcode = 12U /* 0b1100 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001510 case EOR: thumb_opcode = 1U /* 0b0001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001511 case RSB: thumb_opcode = 9U /* 0b1001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001512 case ADC: thumb_opcode = 5U /* 0b0101 */; break;
1513 case SBC: thumb_opcode = 6U /* 0b0110 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001514 case BIC: thumb_opcode = 14U /* 0b1110 */; break;
1515 case TST: thumb_opcode = 8U /* 0b1000 */; CHECK(!use_immediate); break;
1516 case MVN: thumb_opcode = 15U /* 0b1111 */; CHECK(!use_immediate); break;
1517 case CMP: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001518 DCHECK(set_cc == kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001519 if (use_immediate) {
1520 // T2 encoding.
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001521 dp_opcode = 0;
1522 opcode_shift = 11;
1523 thumb_opcode = 5U /* 0b101 */;
1524 rd_shift = 8;
1525 rn_shift = 8;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001526 } else if (IsHighRegister(rd) || IsHighRegister(rn)) {
1527 // Special cmp for high registers.
1528 dp_opcode = 1U /* 0b01 */;
1529 opcode_shift = 7;
1530 // Put the top bit of rd into the bottom bit of the opcode.
1531 thumb_opcode = 10U /* 0b0001010 */ | static_cast<uint32_t>(rd) >> 3;
1532 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001533 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001534 thumb_opcode = 10U /* 0b1010 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001535 }
1536
1537 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001538 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001539 case CMN: {
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001540 CHECK(!use_immediate);
Andreas Gampec8ccf682014-09-29 20:07:43 -07001541 thumb_opcode = 11U /* 0b1011 */;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001542 break;
1543 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001544 case MOV:
1545 dp_opcode = 0;
1546 if (use_immediate) {
1547 // T2 encoding.
1548 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001549 thumb_opcode = 4U /* 0b100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001550 rd_shift = 8;
1551 rn_shift = 8;
1552 } else {
1553 rn = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001554 if (set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001555 // Special mov for high registers.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001556 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001557 opcode_shift = 7;
1558 // Put the top bit of rd into the bottom bit of the opcode.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001559 thumb_opcode = 12U /* 0b0001100 */ | static_cast<uint32_t>(rd) >> 3;
1560 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001561 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001562 DCHECK(!IsHighRegister(rn));
1563 DCHECK(!IsHighRegister(rd));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001564 thumb_opcode = 0;
1565 }
1566 }
1567 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001568
1569 case TEQ:
1570 case RSC:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001571 default:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001572 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001573 break;
1574 }
1575 }
1576
Andreas Gampec8ccf682014-09-29 20:07:43 -07001577 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001578 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001579 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001580 }
1581
1582 int16_t encoding = dp_opcode << 14 |
1583 (thumb_opcode << opcode_shift) |
1584 rd << rd_shift |
1585 rn << rn_shift |
1586 (use_immediate ? (immediate << immediate_shift) : 0);
1587
1588 Emit16(encoding);
1589}
1590
1591
1592// ADD and SUB are complex enough to warrant their own emitter.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001593void Thumb2Assembler::Emit16BitAddSub(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001594 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001595 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001596 Register rn,
1597 Register rd,
1598 const ShifterOperand& so) {
1599 uint8_t dp_opcode = 0;
1600 uint8_t opcode_shift = 6;
1601 uint8_t rd_shift = 0;
1602 uint8_t rn_shift = 3;
1603 uint8_t immediate_shift = 0;
1604 bool use_immediate = false;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001605 uint32_t immediate = 0; // Should be at most 9 bits but keep the full immediate for CHECKs.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001606 uint8_t thumb_opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001607
1608 if (so.IsImmediate()) {
1609 use_immediate = true;
1610 immediate = so.GetImmediate();
1611 }
1612
1613 switch (opcode) {
1614 case ADD:
1615 if (so.IsRegister()) {
1616 Register rm = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001617 if (rn == rd && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001618 // Can use T2 encoding (allows 4 bit registers)
Andreas Gampec8ccf682014-09-29 20:07:43 -07001619 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001620 opcode_shift = 10;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001621 thumb_opcode = 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001622 // Make Rn also contain the top bit of rd.
1623 rn = static_cast<Register>(static_cast<uint32_t>(rm) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07001624 (static_cast<uint32_t>(rd) & 8U /* 0b1000 */) << 1);
1625 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001626 } else {
1627 // T1.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001628 DCHECK(!IsHighRegister(rd));
1629 DCHECK(!IsHighRegister(rn));
1630 DCHECK(!IsHighRegister(rm));
1631 // Sets condition codes if and only if outside IT block,
1632 // check that it complies with set_cc.
1633 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001634 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001635 thumb_opcode = 12U /* 0b01100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001636 immediate = static_cast<uint32_t>(so.GetRegister());
1637 use_immediate = true;
1638 immediate_shift = 6;
1639 }
1640 } else {
1641 // Immediate.
1642 if (rd == SP && rn == SP) {
1643 // ADD sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001644 dp_opcode = 2U /* 0b10 */;
1645 thumb_opcode = 3U /* 0b11 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001646 opcode_shift = 12;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001647 CHECK_LT(immediate, (1u << 9));
Roland Levillain14d90572015-07-16 10:52:26 +01001648 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001649
1650 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1651 rn = R0;
1652 rd = R0;
1653 rd_shift = 0;
1654 rn_shift = 0;
1655 immediate >>= 2;
1656 } else if (rd != SP && rn == SP) {
1657 // ADD rd, SP, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001658 dp_opcode = 2U /* 0b10 */;
1659 thumb_opcode = 5U /* 0b101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001660 opcode_shift = 11;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001661 CHECK_LT(immediate, (1u << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01001662 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001663
1664 // Remove rn from instruction.
1665 rn = R0;
1666 rn_shift = 0;
1667 rd_shift = 8;
1668 immediate >>= 2;
1669 } else if (rn != rd) {
1670 // Must use T1.
1671 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001672 thumb_opcode = 14U /* 0b01110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001673 immediate_shift = 6;
1674 } else {
1675 // T2 encoding.
1676 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001677 thumb_opcode = 6U /* 0b110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001678 rd_shift = 8;
1679 rn_shift = 8;
1680 }
1681 }
1682 break;
1683
1684 case SUB:
1685 if (so.IsRegister()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001686 // T1.
1687 Register rm = so.GetRegister();
1688 DCHECK(!IsHighRegister(rd));
1689 DCHECK(!IsHighRegister(rn));
1690 DCHECK(!IsHighRegister(rm));
1691 // Sets condition codes if and only if outside IT block,
1692 // check that it complies with set_cc.
1693 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1694 opcode_shift = 9;
1695 thumb_opcode = 13U /* 0b01101 */;
1696 immediate = static_cast<uint32_t>(rm);
1697 use_immediate = true;
1698 immediate_shift = 6;
1699 } else {
1700 if (rd == SP && rn == SP) {
1701 // SUB sp, sp, #imm
1702 dp_opcode = 2U /* 0b10 */;
1703 thumb_opcode = 0x61 /* 0b1100001 */;
1704 opcode_shift = 7;
1705 CHECK_LT(immediate, (1u << 9));
1706 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001707
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001708 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1709 rn = R0;
1710 rd = R0;
1711 rd_shift = 0;
1712 rn_shift = 0;
1713 immediate >>= 2;
1714 } else if (rn != rd) {
1715 // Must use T1.
1716 opcode_shift = 9;
1717 thumb_opcode = 15U /* 0b01111 */;
1718 immediate_shift = 6;
1719 } else {
1720 // T2 encoding.
1721 opcode_shift = 11;
1722 thumb_opcode = 7U /* 0b111 */;
1723 rd_shift = 8;
1724 rn_shift = 8;
1725 }
1726 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001727 break;
1728 default:
1729 LOG(FATAL) << "This opcode is not an ADD or SUB: " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001730 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001731 }
1732
1733 int16_t encoding = dp_opcode << 14 |
1734 (thumb_opcode << opcode_shift) |
1735 rd << rd_shift |
1736 rn << rn_shift |
1737 (use_immediate ? (immediate << immediate_shift) : 0);
1738
1739 Emit16(encoding);
1740}
1741
1742
1743void Thumb2Assembler::EmitDataProcessing(Condition cond,
1744 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001745 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001746 Register rn,
1747 Register rd,
1748 const ShifterOperand& so) {
1749 CHECK_NE(rd, kNoRegister);
1750 CheckCondition(cond);
1751
1752 if (Is32BitDataProcessing(cond, opcode, set_cc, rn, rd, so)) {
1753 Emit32BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1754 } else {
1755 Emit16BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1756 }
1757}
1758
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001759void Thumb2Assembler::EmitShift(Register rd,
1760 Register rm,
1761 Shift shift,
1762 uint8_t amount,
1763 Condition cond,
1764 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001765 CHECK_LT(amount, (1 << 5));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001766 if ((IsHighRegister(rd) || IsHighRegister(rm) || shift == ROR || shift == RRX) ||
1767 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001768 uint16_t opcode = 0;
1769 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001770 case LSL: opcode = 0U /* 0b00 */; break;
1771 case LSR: opcode = 1U /* 0b01 */; break;
1772 case ASR: opcode = 2U /* 0b10 */; break;
1773 case ROR: opcode = 3U /* 0b11 */; break;
1774 case RRX: opcode = 3U /* 0b11 */; amount = 0; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001775 default:
1776 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001777 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001778 }
1779 // 32 bit.
1780 int32_t encoding = B31 | B30 | B29 | B27 | B25 | B22 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001781 0xf << 16 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001782 uint32_t imm3 = amount >> 2;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001783 uint32_t imm2 = amount & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -07001784 encoding |= imm3 << 12 | imm2 << 6 | static_cast<int16_t>(rm) |
1785 static_cast<int16_t>(rd) << 8 | opcode << 4;
1786 Emit32(encoding);
1787 } else {
1788 // 16 bit shift
1789 uint16_t opcode = 0;
1790 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001791 case LSL: opcode = 0U /* 0b00 */; break;
1792 case LSR: opcode = 1U /* 0b01 */; break;
1793 case ASR: opcode = 2U /* 0b10 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001794 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001795 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1796 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001797 }
1798 int16_t encoding = opcode << 11 | amount << 6 | static_cast<int16_t>(rm) << 3 |
1799 static_cast<int16_t>(rd);
1800 Emit16(encoding);
1801 }
1802}
1803
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001804void Thumb2Assembler::EmitShift(Register rd,
1805 Register rn,
1806 Shift shift,
1807 Register rm,
1808 Condition cond,
1809 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001810 CHECK_NE(shift, RRX);
1811 bool must_be_32bit = false;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001812 if (IsHighRegister(rd) || IsHighRegister(rm) || IsHighRegister(rn) || rd != rn ||
1813 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001814 must_be_32bit = true;
1815 }
1816
1817 if (must_be_32bit) {
1818 uint16_t opcode = 0;
1819 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001820 case LSL: opcode = 0U /* 0b00 */; break;
1821 case LSR: opcode = 1U /* 0b01 */; break;
1822 case ASR: opcode = 2U /* 0b10 */; break;
1823 case ROR: opcode = 3U /* 0b11 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001824 default:
1825 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001826 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001827 }
1828 // 32 bit.
1829 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001830 0xf << 12 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001831 encoding |= static_cast<int16_t>(rn) << 16 | static_cast<int16_t>(rm) |
1832 static_cast<int16_t>(rd) << 8 | opcode << 21;
1833 Emit32(encoding);
1834 } else {
1835 uint16_t opcode = 0;
1836 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001837 case LSL: opcode = 2U /* 0b0010 */; break;
1838 case LSR: opcode = 3U /* 0b0011 */; break;
1839 case ASR: opcode = 4U /* 0b0100 */; break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001840 case ROR: opcode = 7U /* 0b0111 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001841 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001842 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1843 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001844 }
1845 int16_t encoding = B14 | opcode << 6 | static_cast<int16_t>(rm) << 3 |
1846 static_cast<int16_t>(rd);
1847 Emit16(encoding);
1848 }
1849}
1850
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001851inline size_t Thumb2Assembler::Fixup::SizeInBytes(Size size) {
1852 switch (size) {
1853 case kBranch16Bit:
1854 return 2u;
1855 case kBranch32Bit:
1856 return 4u;
Dave Allison45fdb932014-06-25 12:37:10 -07001857
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001858 case kCbxz16Bit:
1859 return 2u;
1860 case kCbxz32Bit:
1861 return 4u;
1862 case kCbxz48Bit:
1863 return 6u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001864
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001865 case kLiteral1KiB:
1866 return 2u;
1867 case kLiteral4KiB:
1868 return 4u;
1869 case kLiteral64KiB:
1870 return 8u;
1871 case kLiteral1MiB:
1872 return 10u;
1873 case kLiteralFar:
1874 return 14u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001875
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001876 case kLiteralAddr1KiB:
1877 return 2u;
1878 case kLiteralAddr4KiB:
1879 return 4u;
1880 case kLiteralAddr64KiB:
1881 return 6u;
1882 case kLiteralAddrFar:
1883 return 10u;
1884
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001885 case kLongOrFPLiteral1KiB:
1886 return 4u;
1887 case kLongOrFPLiteral256KiB:
1888 return 10u;
1889 case kLongOrFPLiteralFar:
1890 return 14u;
1891 }
1892 LOG(FATAL) << "Unexpected size: " << static_cast<int>(size);
1893 UNREACHABLE();
1894}
1895
1896inline uint32_t Thumb2Assembler::Fixup::GetOriginalSizeInBytes() const {
1897 return SizeInBytes(original_size_);
1898}
1899
1900inline uint32_t Thumb2Assembler::Fixup::GetSizeInBytes() const {
1901 return SizeInBytes(size_);
1902}
1903
1904inline size_t Thumb2Assembler::Fixup::LiteralPoolPaddingSize(uint32_t current_code_size) {
1905 // The code size must be a multiple of 2.
Roland Levillain14d90572015-07-16 10:52:26 +01001906 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001907 // If it isn't a multiple of 4, we need to add a 2-byte padding before the literal pool.
1908 return current_code_size & 2;
1909}
1910
1911inline int32_t Thumb2Assembler::Fixup::GetOffset(uint32_t current_code_size) const {
1912 static constexpr int32_t int32_min = std::numeric_limits<int32_t>::min();
1913 static constexpr int32_t int32_max = std::numeric_limits<int32_t>::max();
1914 DCHECK_LE(target_, static_cast<uint32_t>(int32_max));
1915 DCHECK_LE(location_, static_cast<uint32_t>(int32_max));
1916 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max));
1917 int32_t diff = static_cast<int32_t>(target_) - static_cast<int32_t>(location_);
1918 if (target_ > location_) {
1919 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max - diff));
1920 diff += static_cast<int32_t>(adjustment_);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001921 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001922 DCHECK_LE(int32_min + static_cast<int32_t>(adjustment_), diff);
1923 diff -= static_cast<int32_t>(adjustment_);
1924 }
1925 // The default PC adjustment for Thumb2 is 4 bytes.
1926 DCHECK_GE(diff, int32_min + 4);
1927 diff -= 4;
1928 // Add additional adjustment for instructions preceding the PC usage, padding
1929 // before the literal pool and rounding down the PC for literal loads.
1930 switch (GetSize()) {
1931 case kBranch16Bit:
1932 case kBranch32Bit:
1933 break;
1934
1935 case kCbxz16Bit:
1936 break;
1937 case kCbxz32Bit:
1938 case kCbxz48Bit:
1939 DCHECK_GE(diff, int32_min + 2);
1940 diff -= 2; // Extra CMP Rn, #0, 16-bit.
1941 break;
1942
1943 case kLiteral1KiB:
1944 case kLiteral4KiB:
1945 case kLongOrFPLiteral1KiB:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001946 case kLiteralAddr1KiB:
1947 case kLiteralAddr4KiB:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001948 DCHECK(diff >= 0 || (GetSize() == kLiteral1KiB && diff == -2));
1949 diff += LiteralPoolPaddingSize(current_code_size);
1950 // Load literal instructions round down the PC+4 to a multiple of 4, so if the PC
1951 // isn't a multiple of 2, we need to adjust. Since we already adjusted for the target
1952 // being aligned, current PC alignment can be inferred from diff.
Roland Levillain14d90572015-07-16 10:52:26 +01001953 DCHECK_ALIGNED(diff, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001954 diff = diff + (diff & 2);
1955 DCHECK_GE(diff, 0);
1956 break;
1957 case kLiteral1MiB:
1958 case kLiteral64KiB:
1959 case kLongOrFPLiteral256KiB:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001960 case kLiteralAddr64KiB:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001961 DCHECK_GE(diff, 4); // The target must be at least 4 bytes after the ADD rX, PC.
1962 diff -= 4; // One extra 32-bit MOV.
1963 diff += LiteralPoolPaddingSize(current_code_size);
1964 break;
1965 case kLiteralFar:
1966 case kLongOrFPLiteralFar:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001967 case kLiteralAddrFar:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001968 DCHECK_GE(diff, 8); // The target must be at least 4 bytes after the ADD rX, PC.
1969 diff -= 8; // Extra MOVW+MOVT; both 32-bit.
1970 diff += LiteralPoolPaddingSize(current_code_size);
1971 break;
1972 }
1973 return diff;
1974}
1975
1976inline size_t Thumb2Assembler::Fixup::IncreaseSize(Size new_size) {
1977 DCHECK_NE(target_, kUnresolved);
1978 Size old_size = size_;
1979 size_ = new_size;
1980 DCHECK_GT(SizeInBytes(new_size), SizeInBytes(old_size));
1981 size_t adjustment = SizeInBytes(new_size) - SizeInBytes(old_size);
1982 if (target_ > location_) {
1983 adjustment_ += adjustment;
1984 }
1985 return adjustment;
1986}
1987
1988uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
1989 uint32_t old_code_size = current_code_size;
1990 switch (GetSize()) {
1991 case kBranch16Bit:
1992 if (IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size))) {
1993 break;
Vladimir Markof38caa62015-05-29 15:50:18 +01001994 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001995 current_code_size += IncreaseSize(kBranch32Bit);
1996 FALLTHROUGH_INTENDED;
1997 case kBranch32Bit:
1998 // We don't support conditional branches beyond +-1MiB
1999 // or unconditional branches beyond +-16MiB.
2000 break;
2001
2002 case kCbxz16Bit:
2003 if (IsUint<7>(GetOffset(current_code_size))) {
2004 break;
2005 }
2006 current_code_size += IncreaseSize(kCbxz32Bit);
2007 FALLTHROUGH_INTENDED;
2008 case kCbxz32Bit:
2009 if (IsInt<9>(GetOffset(current_code_size))) {
2010 break;
2011 }
2012 current_code_size += IncreaseSize(kCbxz48Bit);
2013 FALLTHROUGH_INTENDED;
2014 case kCbxz48Bit:
2015 // We don't support conditional branches beyond +-1MiB.
2016 break;
2017
2018 case kLiteral1KiB:
2019 DCHECK(!IsHighRegister(rn_));
2020 if (IsUint<10>(GetOffset(current_code_size))) {
2021 break;
2022 }
2023 current_code_size += IncreaseSize(kLiteral4KiB);
2024 FALLTHROUGH_INTENDED;
2025 case kLiteral4KiB:
2026 if (IsUint<12>(GetOffset(current_code_size))) {
2027 break;
2028 }
2029 current_code_size += IncreaseSize(kLiteral64KiB);
2030 FALLTHROUGH_INTENDED;
2031 case kLiteral64KiB:
2032 // Can't handle high register which we can encounter by fall-through from kLiteral4KiB.
2033 if (!IsHighRegister(rn_) && IsUint<16>(GetOffset(current_code_size))) {
2034 break;
2035 }
2036 current_code_size += IncreaseSize(kLiteral1MiB);
2037 FALLTHROUGH_INTENDED;
2038 case kLiteral1MiB:
2039 if (IsUint<20>(GetOffset(current_code_size))) {
2040 break;
2041 }
2042 current_code_size += IncreaseSize(kLiteralFar);
2043 FALLTHROUGH_INTENDED;
2044 case kLiteralFar:
2045 // This encoding can reach any target.
2046 break;
2047
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002048 case kLiteralAddr1KiB:
2049 DCHECK(!IsHighRegister(rn_));
2050 if (IsUint<10>(GetOffset(current_code_size))) {
2051 break;
2052 }
2053 current_code_size += IncreaseSize(kLiteralAddr4KiB);
2054 FALLTHROUGH_INTENDED;
2055 case kLiteralAddr4KiB:
2056 if (IsUint<12>(GetOffset(current_code_size))) {
2057 break;
2058 }
2059 current_code_size += IncreaseSize(kLiteralAddr64KiB);
2060 FALLTHROUGH_INTENDED;
2061 case kLiteralAddr64KiB:
2062 if (IsUint<16>(GetOffset(current_code_size))) {
2063 break;
2064 }
2065 current_code_size += IncreaseSize(kLiteralAddrFar);
2066 FALLTHROUGH_INTENDED;
2067 case kLiteralAddrFar:
2068 // This encoding can reach any target.
2069 break;
2070
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002071 case kLongOrFPLiteral1KiB:
2072 if (IsUint<10>(GetOffset(current_code_size))) {
2073 break;
2074 }
2075 current_code_size += IncreaseSize(kLongOrFPLiteral256KiB);
2076 FALLTHROUGH_INTENDED;
2077 case kLongOrFPLiteral256KiB:
2078 if (IsUint<18>(GetOffset(current_code_size))) {
2079 break;
2080 }
2081 current_code_size += IncreaseSize(kLongOrFPLiteralFar);
2082 FALLTHROUGH_INTENDED;
2083 case kLongOrFPLiteralFar:
2084 // This encoding can reach any target.
2085 break;
2086 }
2087 return current_code_size - old_code_size;
2088}
2089
2090void Thumb2Assembler::Fixup::Emit(AssemblerBuffer* buffer, uint32_t code_size) const {
2091 switch (GetSize()) {
2092 case kBranch16Bit: {
2093 DCHECK(type_ == kUnconditional || type_ == kConditional);
2094 DCHECK_EQ(type_ == kConditional, cond_ != AL);
2095 int16_t encoding = BEncoding16(GetOffset(code_size), cond_);
Vladimir Markof38caa62015-05-29 15:50:18 +01002096 buffer->Store<int16_t>(location_, encoding);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002097 break;
2098 }
2099 case kBranch32Bit: {
2100 DCHECK(type_ == kConditional || type_ == kUnconditional ||
2101 type_ == kUnconditionalLink || type_ == kUnconditionalLinkX);
2102 DCHECK_EQ(type_ == kConditional, cond_ != AL);
2103 int32_t encoding = BEncoding32(GetOffset(code_size), cond_);
2104 if (type_ == kUnconditionalLink) {
2105 DCHECK_NE(encoding & B12, 0);
2106 encoding |= B14;
2107 } else if (type_ == kUnconditionalLinkX) {
2108 DCHECK_NE(encoding & B12, 0);
2109 encoding ^= B14 | B12;
2110 }
2111 buffer->Store<int16_t>(location_, encoding >> 16);
2112 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2113 break;
2114 }
2115
2116 case kCbxz16Bit: {
2117 DCHECK(type_ == kCompareAndBranchXZero);
2118 int16_t encoding = CbxzEncoding16(rn_, GetOffset(code_size), cond_);
2119 buffer->Store<int16_t>(location_, encoding);
2120 break;
2121 }
2122 case kCbxz32Bit: {
2123 DCHECK(type_ == kCompareAndBranchXZero);
2124 DCHECK(cond_ == EQ || cond_ == NE);
2125 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
2126 int16_t b_encoding = BEncoding16(GetOffset(code_size), cond_);
2127 buffer->Store<int16_t>(location_, cmp_encoding);
2128 buffer->Store<int16_t>(location_ + 2, b_encoding);
2129 break;
2130 }
2131 case kCbxz48Bit: {
2132 DCHECK(type_ == kCompareAndBranchXZero);
2133 DCHECK(cond_ == EQ || cond_ == NE);
2134 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
2135 int32_t b_encoding = BEncoding32(GetOffset(code_size), cond_);
2136 buffer->Store<int16_t>(location_, cmp_encoding);
2137 buffer->Store<int16_t>(location_ + 2u, b_encoding >> 16);
2138 buffer->Store<int16_t>(location_ + 4u, static_cast<int16_t>(b_encoding & 0xffff));
2139 break;
2140 }
2141
2142 case kLiteral1KiB: {
2143 DCHECK(type_ == kLoadLiteralNarrow);
2144 int16_t encoding = LdrLitEncoding16(rn_, GetOffset(code_size));
2145 buffer->Store<int16_t>(location_, encoding);
2146 break;
2147 }
2148 case kLiteral4KiB: {
2149 DCHECK(type_ == kLoadLiteralNarrow);
2150 // GetOffset() uses PC+4 but load literal uses AlignDown(PC+4, 4). Adjust offset accordingly.
2151 int32_t encoding = LdrLitEncoding32(rn_, GetOffset(code_size));
2152 buffer->Store<int16_t>(location_, encoding >> 16);
2153 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2154 break;
2155 }
2156 case kLiteral64KiB: {
2157 DCHECK(type_ == kLoadLiteralNarrow);
2158 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2159 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2160 int16_t ldr_encoding = LdrRtRnImm5Encoding16(rn_, rn_, 0);
2161 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2162 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2163 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2164 buffer->Store<int16_t>(location_ + 6u, ldr_encoding);
2165 break;
2166 }
2167 case kLiteral1MiB: {
2168 DCHECK(type_ == kLoadLiteralNarrow);
2169 int32_t offset = GetOffset(code_size);
2170 int32_t mov_encoding = MovModImmEncoding32(rn_, offset & ~0xfff);
2171 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2172 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, offset & 0xfff);
2173 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2174 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2175 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2176 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2177 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2178 break;
2179 }
2180 case kLiteralFar: {
2181 DCHECK(type_ == kLoadLiteralNarrow);
2182 int32_t offset = GetOffset(code_size);
2183 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2184 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2185 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2186 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, 0);
2187 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2188 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2189 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2190 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2191 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2192 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2193 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2194 break;
2195 }
2196
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002197 case kLiteralAddr1KiB: {
2198 DCHECK(type_ == kLoadLiteralAddr);
2199 int16_t encoding = AdrEncoding16(rn_, GetOffset(code_size));
2200 buffer->Store<int16_t>(location_, encoding);
2201 break;
2202 }
2203 case kLiteralAddr4KiB: {
2204 DCHECK(type_ == kLoadLiteralAddr);
2205 int32_t encoding = AdrEncoding32(rn_, GetOffset(code_size));
2206 buffer->Store<int16_t>(location_, encoding >> 16);
2207 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2208 break;
2209 }
2210 case kLiteralAddr64KiB: {
2211 DCHECK(type_ == kLoadLiteralAddr);
2212 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2213 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2214 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2215 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2216 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2217 break;
2218 }
2219 case kLiteralAddrFar: {
2220 DCHECK(type_ == kLoadLiteralAddr);
2221 int32_t offset = GetOffset(code_size);
2222 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2223 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2224 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2225 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2226 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2227 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2228 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2229 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2230 break;
2231 }
2232
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002233 case kLongOrFPLiteral1KiB: {
2234 int32_t encoding = LoadWideOrFpEncoding(PC, GetOffset(code_size)); // DCHECKs type_.
2235 buffer->Store<int16_t>(location_, encoding >> 16);
2236 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2237 break;
2238 }
2239 case kLongOrFPLiteral256KiB: {
2240 int32_t offset = GetOffset(code_size);
2241 int32_t mov_encoding = MovModImmEncoding32(IP, offset & ~0x3ff);
2242 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2243 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, offset & 0x3ff); // DCHECKs type_.
2244 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2245 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2246 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2247 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2248 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2249 break;
2250 }
2251 case kLongOrFPLiteralFar: {
2252 int32_t offset = GetOffset(code_size);
2253 int32_t movw_encoding = MovwEncoding32(IP, offset & 0xffff);
2254 int32_t movt_encoding = MovtEncoding32(IP, offset & ~0xffff);
2255 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2256 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0); // DCHECKs type_.
2257 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2258 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2259 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2260 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2261 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2262 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2263 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2264 break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002265 }
2266 }
2267}
2268
Dave Allison65fcc2c2014-04-28 13:45:27 -07002269uint16_t Thumb2Assembler::EmitCompareAndBranch(Register rn, uint16_t prev, bool n) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002270 CHECK(IsLowRegister(rn));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002271 uint32_t location = buffer_.Size();
2272
2273 // This is always unresolved as it must be a forward branch.
2274 Emit16(prev); // Previous link.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002275 return AddFixup(Fixup::CompareAndBranch(location, rn, n ? NE : EQ));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002276}
2277
2278
2279// NOTE: this only support immediate offsets, not [rx,ry].
2280// TODO: support [rx,ry] instructions.
2281void Thumb2Assembler::EmitLoadStore(Condition cond,
2282 bool load,
2283 bool byte,
2284 bool half,
2285 bool is_signed,
2286 Register rd,
2287 const Address& ad) {
2288 CHECK_NE(rd, kNoRegister);
2289 CheckCondition(cond);
2290 bool must_be_32bit = force_32bit_;
2291 if (IsHighRegister(rd)) {
2292 must_be_32bit = true;
2293 }
2294
2295 Register rn = ad.GetRegister();
Dave Allison45fdb932014-06-25 12:37:10 -07002296 if (IsHighRegister(rn) && rn != SP && rn != PC) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002297 must_be_32bit = true;
2298 }
2299
2300 if (is_signed || ad.GetOffset() < 0 || ad.GetMode() != Address::Offset) {
2301 must_be_32bit = true;
2302 }
2303
Dave Allison45fdb932014-06-25 12:37:10 -07002304 if (ad.IsImmediate()) {
2305 // Immediate offset
2306 int32_t offset = ad.GetOffset();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002307
Dave Allison45fdb932014-06-25 12:37:10 -07002308 // The 16 bit SP relative instruction can only have a 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002309 if (rn == SP && offset >= (1 << 10)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002310 must_be_32bit = true;
2311 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002312
2313 if (byte) {
Dave Allison45fdb932014-06-25 12:37:10 -07002314 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002315 if (offset >= (1 << 5)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002316 must_be_32bit = true;
2317 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002318 } else if (half) {
Dave Allison45fdb932014-06-25 12:37:10 -07002319 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002320 if (offset >= (1 << 6)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002321 must_be_32bit = true;
2322 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002323 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002324 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002325 if (offset >= (1 << 7)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002326 must_be_32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002327 }
2328 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002329
Dave Allison45fdb932014-06-25 12:37:10 -07002330 if (must_be_32bit) {
2331 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2332 (load ? B20 : 0) |
2333 (is_signed ? B24 : 0) |
2334 static_cast<uint32_t>(rd) << 12 |
2335 ad.encodingThumb(true) |
2336 (byte ? 0 : half ? B21 : B22);
2337 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002338 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002339 // 16 bit thumb1.
2340 uint8_t opA = 0;
2341 bool sp_relative = false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002342
2343 if (byte) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002344 opA = 7U /* 0b0111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002345 } else if (half) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002346 opA = 8U /* 0b1000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002347 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002348 if (rn == SP) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002349 opA = 9U /* 0b1001 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002350 sp_relative = true;
2351 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002352 opA = 6U /* 0b0110 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002353 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002354 }
Dave Allison45fdb932014-06-25 12:37:10 -07002355 int16_t encoding = opA << 12 |
2356 (load ? B11 : 0);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002357
Dave Allison45fdb932014-06-25 12:37:10 -07002358 CHECK_GE(offset, 0);
2359 if (sp_relative) {
2360 // SP relative, 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002361 CHECK_LT(offset, (1 << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01002362 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002363 encoding |= rd << 8 | offset >> 2;
2364 } else {
2365 // No SP relative. The offset is shifted right depending on
2366 // the size of the load/store.
2367 encoding |= static_cast<uint32_t>(rd);
2368
2369 if (byte) {
2370 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002371 CHECK_LT(offset, (1 << 5));
Dave Allison45fdb932014-06-25 12:37:10 -07002372 } else if (half) {
2373 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002374 CHECK_LT(offset, (1 << 6));
Roland Levillain14d90572015-07-16 10:52:26 +01002375 CHECK_ALIGNED(offset, 2);
Dave Allison45fdb932014-06-25 12:37:10 -07002376 offset >>= 1;
2377 } else {
2378 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002379 CHECK_LT(offset, (1 << 7));
Roland Levillain14d90572015-07-16 10:52:26 +01002380 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002381 offset >>= 2;
2382 }
2383 encoding |= rn << 3 | offset << 6;
2384 }
2385
2386 Emit16(encoding);
2387 }
2388 } else {
2389 // Register shift.
2390 if (ad.GetRegister() == PC) {
2391 // PC relative literal encoding.
2392 int32_t offset = ad.GetOffset();
Dave Allison0bb9ade2014-06-26 17:57:36 -07002393 if (must_be_32bit || offset < 0 || offset >= (1 << 10) || !load) {
Dave Allison45fdb932014-06-25 12:37:10 -07002394 int32_t up = B23;
2395 if (offset < 0) {
2396 offset = -offset;
2397 up = 0;
2398 }
2399 CHECK_LT(offset, (1 << 12));
2400 int32_t encoding = 0x1f << 27 | 0xf << 16 | B22 | (load ? B20 : 0) |
2401 offset | up |
2402 static_cast<uint32_t>(rd) << 12;
2403 Emit32(encoding);
2404 } else {
2405 // 16 bit literal load.
2406 CHECK_GE(offset, 0);
2407 CHECK_LT(offset, (1 << 10));
2408 int32_t encoding = B14 | (load ? B11 : 0) | static_cast<uint32_t>(rd) << 8 | offset >> 2;
2409 Emit16(encoding);
2410 }
2411 } else {
2412 if (ad.GetShiftCount() != 0) {
2413 // If there is a shift count this must be 32 bit.
2414 must_be_32bit = true;
2415 } else if (IsHighRegister(ad.GetRegisterOffset())) {
2416 must_be_32bit = true;
2417 }
2418
2419 if (must_be_32bit) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002420 int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 |
Dave Allison45fdb932014-06-25 12:37:10 -07002421 ad.encodingThumb(true);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002422 if (half) {
2423 encoding |= B21;
2424 } else if (!byte) {
2425 encoding |= B22;
2426 }
Dave Allison45fdb932014-06-25 12:37:10 -07002427 Emit32(encoding);
2428 } else {
2429 // 16 bit register offset.
2430 int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) |
2431 ad.encodingThumb(false);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002432 if (byte) {
2433 encoding |= B10;
2434 } else if (half) {
2435 encoding |= B9;
2436 }
Dave Allison45fdb932014-06-25 12:37:10 -07002437 Emit16(encoding);
2438 }
2439 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002440 }
2441}
2442
2443
2444void Thumb2Assembler::EmitMultiMemOp(Condition cond,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002445 BlockAddressMode bam,
Dave Allison65fcc2c2014-04-28 13:45:27 -07002446 bool load,
2447 Register base,
2448 RegList regs) {
2449 CHECK_NE(base, kNoRegister);
2450 CheckCondition(cond);
2451 bool must_be_32bit = force_32bit_;
2452
Vladimir Markoe8469c12014-11-26 18:09:30 +00002453 if (!must_be_32bit && base == SP && bam == (load ? IA_W : DB_W) &&
2454 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) {
2455 // Use 16-bit PUSH/POP.
2456 int16_t encoding = B15 | B13 | B12 | (load ? B11 : 0) | B10 |
2457 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff);
2458 Emit16(encoding);
2459 return;
2460 }
2461
Dave Allison65fcc2c2014-04-28 13:45:27 -07002462 if ((regs & 0xff00) != 0) {
2463 must_be_32bit = true;
2464 }
2465
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002466 bool w_bit = bam == IA_W || bam == DB_W || bam == DA_W || bam == IB_W;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002467 // 16 bit always uses writeback.
2468 if (!w_bit) {
2469 must_be_32bit = true;
2470 }
2471
2472 if (must_be_32bit) {
2473 uint32_t op = 0;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002474 switch (bam) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002475 case IA:
2476 case IA_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002477 op = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002478 break;
2479 case DB:
2480 case DB_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002481 op = 2U /* 0b10 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002482 break;
2483 case DA:
2484 case IB:
2485 case DA_W:
2486 case IB_W:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002487 LOG(FATAL) << "LDM/STM mode not supported on thumb: " << bam;
Vladimir Markoe8469c12014-11-26 18:09:30 +00002488 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002489 }
2490 if (load) {
2491 // Cannot have SP in the list.
2492 CHECK_EQ((regs & (1 << SP)), 0);
2493 } else {
2494 // Cannot have PC or SP in the list.
2495 CHECK_EQ((regs & (1 << PC | 1 << SP)), 0);
2496 }
2497 int32_t encoding = B31 | B30 | B29 | B27 |
2498 (op << 23) |
2499 (load ? B20 : 0) |
2500 base << 16 |
2501 regs |
2502 (w_bit << 21);
2503 Emit32(encoding);
2504 } else {
2505 int16_t encoding = B15 | B14 |
2506 (load ? B11 : 0) |
2507 base << 8 |
2508 regs;
2509 Emit16(encoding);
2510 }
2511}
2512
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002513void Thumb2Assembler::EmitBranch(Condition cond, Label* label, bool link, bool x) {
2514 bool use32bit = IsForced32Bit() || !CanRelocateBranches();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002515 uint32_t pc = buffer_.Size();
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002516 Fixup::Type branch_type;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002517 if (cond == AL) {
2518 if (link) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002519 use32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002520 if (x) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002521 branch_type = Fixup::kUnconditionalLinkX; // BLX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002522 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002523 branch_type = Fixup::kUnconditionalLink; // BX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002524 }
2525 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002526 branch_type = Fixup::kUnconditional; // B.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002527 }
2528 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002529 branch_type = Fixup::kConditional; // B<cond>.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002530 }
2531
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002532 Fixup::Size size = use32bit ? Fixup::kBranch32Bit : Fixup::kBranch16Bit;
2533 FixupId branch_id = AddFixup(Fixup::Branch(pc, branch_type, size, cond));
2534
Dave Allison65fcc2c2014-04-28 13:45:27 -07002535 if (label->IsBound()) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002536 // The branch is to a bound label which means that it's a backwards branch.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002537 GetFixup(branch_id)->Resolve(label->Position());
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002538 Emit16(0);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002539 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002540 // Branch target is an unbound label. Add it to a singly-linked list maintained within
2541 // the code with the label serving as the head.
2542 Emit16(static_cast<uint16_t>(label->position_));
2543 label->LinkTo(branch_id);
Vladimir Markof38caa62015-05-29 15:50:18 +01002544 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002545
2546 if (use32bit) {
2547 Emit16(0);
2548 }
2549 DCHECK_EQ(buffer_.Size() - pc, GetFixup(branch_id)->GetSizeInBytes());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002550}
2551
2552
2553void Thumb2Assembler::clz(Register rd, Register rm, Condition cond) {
2554 CHECK_NE(rd, kNoRegister);
2555 CHECK_NE(rm, kNoRegister);
2556 CheckCondition(cond);
2557 CHECK_NE(rd, PC);
2558 CHECK_NE(rm, PC);
2559 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2560 B25 | B23 | B21 | B20 |
2561 static_cast<uint32_t>(rm) << 16 |
2562 0xf << 12 |
2563 static_cast<uint32_t>(rd) << 8 |
2564 B7 |
2565 static_cast<uint32_t>(rm);
2566 Emit32(encoding);
2567}
2568
2569
2570void Thumb2Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
2571 CheckCondition(cond);
2572 bool must_be_32bit = force_32bit_;
2573 if (IsHighRegister(rd)|| imm16 >= 256u) {
2574 must_be_32bit = true;
2575 }
2576
2577 if (must_be_32bit) {
2578 // Use encoding T3.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002579 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2580 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2581 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002582 uint32_t imm8 = imm16 & 0xff;
2583 int32_t encoding = B31 | B30 | B29 | B28 |
2584 B25 | B22 |
2585 static_cast<uint32_t>(rd) << 8 |
2586 i << 26 |
2587 imm4 << 16 |
2588 imm3 << 12 |
2589 imm8;
2590 Emit32(encoding);
2591 } else {
2592 int16_t encoding = B13 | static_cast<uint16_t>(rd) << 8 |
2593 imm16;
2594 Emit16(encoding);
2595 }
2596}
2597
2598
2599void Thumb2Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
2600 CheckCondition(cond);
2601 // Always 32 bits.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002602 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2603 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2604 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002605 uint32_t imm8 = imm16 & 0xff;
2606 int32_t encoding = B31 | B30 | B29 | B28 |
2607 B25 | B23 | B22 |
2608 static_cast<uint32_t>(rd) << 8 |
2609 i << 26 |
2610 imm4 << 16 |
2611 imm3 << 12 |
2612 imm8;
2613 Emit32(encoding);
2614}
2615
2616
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002617void Thumb2Assembler::rbit(Register rd, Register rm, Condition cond) {
2618 CHECK_NE(rd, kNoRegister);
2619 CHECK_NE(rm, kNoRegister);
2620 CheckCondition(cond);
2621 CHECK_NE(rd, PC);
2622 CHECK_NE(rm, PC);
2623 CHECK_NE(rd, SP);
2624 CHECK_NE(rm, SP);
2625 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2626 B25 | B23 | B20 |
2627 static_cast<uint32_t>(rm) << 16 |
2628 0xf << 12 |
2629 static_cast<uint32_t>(rd) << 8 |
2630 B7 | B5 |
2631 static_cast<uint32_t>(rm);
2632 Emit32(encoding);
2633}
2634
2635
Dave Allison65fcc2c2014-04-28 13:45:27 -07002636void Thumb2Assembler::ldrex(Register rt, Register rn, uint16_t imm, Condition cond) {
2637 CHECK_NE(rn, kNoRegister);
2638 CHECK_NE(rt, kNoRegister);
2639 CheckCondition(cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002640 CHECK_LT(imm, (1u << 10));
2641
2642 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
2643 static_cast<uint32_t>(rn) << 16 |
2644 static_cast<uint32_t>(rt) << 12 |
2645 0xf << 8 |
2646 imm >> 2;
2647 Emit32(encoding);
2648}
2649
2650
2651void Thumb2Assembler::ldrex(Register rt, Register rn, Condition cond) {
2652 ldrex(rt, rn, 0, cond);
2653}
2654
2655
2656void Thumb2Assembler::strex(Register rd,
2657 Register rt,
2658 Register rn,
2659 uint16_t imm,
2660 Condition cond) {
2661 CHECK_NE(rn, kNoRegister);
2662 CHECK_NE(rd, kNoRegister);
2663 CHECK_NE(rt, kNoRegister);
2664 CheckCondition(cond);
2665 CHECK_LT(imm, (1u << 10));
2666
2667 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
2668 static_cast<uint32_t>(rn) << 16 |
2669 static_cast<uint32_t>(rt) << 12 |
2670 static_cast<uint32_t>(rd) << 8 |
2671 imm >> 2;
2672 Emit32(encoding);
2673}
2674
2675
Calin Juravle52c48962014-12-16 17:02:57 +00002676void Thumb2Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
2677 CHECK_NE(rn, kNoRegister);
2678 CHECK_NE(rt, kNoRegister);
2679 CHECK_NE(rt2, kNoRegister);
2680 CHECK_NE(rt, rt2);
2681 CheckCondition(cond);
2682
2683 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 | B20 |
2684 static_cast<uint32_t>(rn) << 16 |
2685 static_cast<uint32_t>(rt) << 12 |
2686 static_cast<uint32_t>(rt2) << 8 |
2687 B6 | B5 | B4 | B3 | B2 | B1 | B0;
2688 Emit32(encoding);
2689}
2690
2691
Dave Allison65fcc2c2014-04-28 13:45:27 -07002692void Thumb2Assembler::strex(Register rd,
2693 Register rt,
2694 Register rn,
2695 Condition cond) {
2696 strex(rd, rt, rn, 0, cond);
2697}
2698
2699
Calin Juravle52c48962014-12-16 17:02:57 +00002700void Thumb2Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
2701 CHECK_NE(rd, kNoRegister);
2702 CHECK_NE(rn, kNoRegister);
2703 CHECK_NE(rt, kNoRegister);
2704 CHECK_NE(rt2, kNoRegister);
2705 CHECK_NE(rt, rt2);
2706 CHECK_NE(rd, rt);
2707 CHECK_NE(rd, rt2);
2708 CheckCondition(cond);
2709
2710 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 |
2711 static_cast<uint32_t>(rn) << 16 |
2712 static_cast<uint32_t>(rt) << 12 |
2713 static_cast<uint32_t>(rt2) << 8 |
2714 B6 | B5 | B4 |
2715 static_cast<uint32_t>(rd);
2716 Emit32(encoding);
2717}
2718
2719
Dave Allison65fcc2c2014-04-28 13:45:27 -07002720void Thumb2Assembler::clrex(Condition cond) {
2721 CheckCondition(cond);
2722 int32_t encoding = B31 | B30 | B29 | B27 | B28 | B25 | B24 | B23 |
2723 B21 | B20 |
2724 0xf << 16 |
2725 B15 |
2726 0xf << 8 |
2727 B5 |
2728 0xf;
2729 Emit32(encoding);
2730}
2731
2732
2733void Thumb2Assembler::nop(Condition cond) {
2734 CheckCondition(cond);
Andreas Gampec8ccf682014-09-29 20:07:43 -07002735 uint16_t encoding = B15 | B13 | B12 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002736 B11 | B10 | B9 | B8;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002737 Emit16(static_cast<int16_t>(encoding));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002738}
2739
2740
2741void Thumb2Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
2742 CHECK_NE(sn, kNoSRegister);
2743 CHECK_NE(rt, kNoRegister);
2744 CHECK_NE(rt, SP);
2745 CHECK_NE(rt, PC);
2746 CheckCondition(cond);
2747 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2748 B27 | B26 | B25 |
2749 ((static_cast<int32_t>(sn) >> 1)*B16) |
2750 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2751 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2752 Emit32(encoding);
2753}
2754
2755
2756void Thumb2Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
2757 CHECK_NE(sn, kNoSRegister);
2758 CHECK_NE(rt, kNoRegister);
2759 CHECK_NE(rt, SP);
2760 CHECK_NE(rt, PC);
2761 CheckCondition(cond);
2762 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2763 B27 | B26 | B25 | B20 |
2764 ((static_cast<int32_t>(sn) >> 1)*B16) |
2765 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2766 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2767 Emit32(encoding);
2768}
2769
2770
2771void Thumb2Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
2772 Condition cond) {
2773 CHECK_NE(sm, kNoSRegister);
2774 CHECK_NE(sm, S31);
2775 CHECK_NE(rt, kNoRegister);
2776 CHECK_NE(rt, SP);
2777 CHECK_NE(rt, PC);
2778 CHECK_NE(rt2, kNoRegister);
2779 CHECK_NE(rt2, SP);
2780 CHECK_NE(rt2, PC);
2781 CheckCondition(cond);
2782 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2783 B27 | B26 | B22 |
2784 (static_cast<int32_t>(rt2)*B16) |
2785 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2786 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2787 (static_cast<int32_t>(sm) >> 1);
2788 Emit32(encoding);
2789}
2790
2791
2792void Thumb2Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
2793 Condition cond) {
2794 CHECK_NE(sm, kNoSRegister);
2795 CHECK_NE(sm, S31);
2796 CHECK_NE(rt, kNoRegister);
2797 CHECK_NE(rt, SP);
2798 CHECK_NE(rt, PC);
2799 CHECK_NE(rt2, kNoRegister);
2800 CHECK_NE(rt2, SP);
2801 CHECK_NE(rt2, PC);
2802 CHECK_NE(rt, rt2);
2803 CheckCondition(cond);
2804 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2805 B27 | B26 | B22 | B20 |
2806 (static_cast<int32_t>(rt2)*B16) |
2807 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2808 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2809 (static_cast<int32_t>(sm) >> 1);
2810 Emit32(encoding);
2811}
2812
2813
2814void Thumb2Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
2815 Condition cond) {
2816 CHECK_NE(dm, kNoDRegister);
2817 CHECK_NE(rt, kNoRegister);
2818 CHECK_NE(rt, SP);
2819 CHECK_NE(rt, PC);
2820 CHECK_NE(rt2, kNoRegister);
2821 CHECK_NE(rt2, SP);
2822 CHECK_NE(rt2, PC);
2823 CheckCondition(cond);
2824 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2825 B27 | B26 | B22 |
2826 (static_cast<int32_t>(rt2)*B16) |
2827 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2828 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2829 (static_cast<int32_t>(dm) & 0xf);
2830 Emit32(encoding);
2831}
2832
2833
2834void Thumb2Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
2835 Condition cond) {
2836 CHECK_NE(dm, kNoDRegister);
2837 CHECK_NE(rt, kNoRegister);
2838 CHECK_NE(rt, SP);
2839 CHECK_NE(rt, PC);
2840 CHECK_NE(rt2, kNoRegister);
2841 CHECK_NE(rt2, SP);
2842 CHECK_NE(rt2, PC);
2843 CHECK_NE(rt, rt2);
2844 CheckCondition(cond);
2845 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2846 B27 | B26 | B22 | B20 |
2847 (static_cast<int32_t>(rt2)*B16) |
2848 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2849 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2850 (static_cast<int32_t>(dm) & 0xf);
2851 Emit32(encoding);
2852}
2853
2854
2855void Thumb2Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
2856 const Address& addr = static_cast<const Address&>(ad);
2857 CHECK_NE(sd, kNoSRegister);
2858 CheckCondition(cond);
2859 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2860 B27 | B26 | B24 | B20 |
2861 ((static_cast<int32_t>(sd) & 1)*B22) |
2862 ((static_cast<int32_t>(sd) >> 1)*B12) |
2863 B11 | B9 | addr.vencoding();
2864 Emit32(encoding);
2865}
2866
2867
2868void Thumb2Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
2869 const Address& addr = static_cast<const Address&>(ad);
2870 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2871 CHECK_NE(sd, kNoSRegister);
2872 CheckCondition(cond);
2873 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2874 B27 | B26 | B24 |
2875 ((static_cast<int32_t>(sd) & 1)*B22) |
2876 ((static_cast<int32_t>(sd) >> 1)*B12) |
2877 B11 | B9 | addr.vencoding();
2878 Emit32(encoding);
2879}
2880
2881
2882void Thumb2Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
2883 const Address& addr = static_cast<const Address&>(ad);
2884 CHECK_NE(dd, kNoDRegister);
2885 CheckCondition(cond);
2886 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2887 B27 | B26 | B24 | B20 |
2888 ((static_cast<int32_t>(dd) >> 4)*B22) |
2889 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2890 B11 | B9 | B8 | addr.vencoding();
2891 Emit32(encoding);
2892}
2893
2894
2895void Thumb2Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
2896 const Address& addr = static_cast<const Address&>(ad);
2897 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2898 CHECK_NE(dd, kNoDRegister);
2899 CheckCondition(cond);
2900 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2901 B27 | B26 | B24 |
2902 ((static_cast<int32_t>(dd) >> 4)*B22) |
2903 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2904 B11 | B9 | B8 | addr.vencoding();
2905 Emit32(encoding);
2906}
2907
2908
2909void Thumb2Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
2910 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
2911}
2912
2913
2914void Thumb2Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
2915 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
2916}
2917
2918
2919void Thumb2Assembler::vpops(SRegister reg, int nregs, Condition cond) {
2920 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
2921}
2922
2923
2924void Thumb2Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
2925 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
2926}
2927
2928
2929void Thumb2Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
2930 CheckCondition(cond);
2931
2932 uint32_t D;
2933 uint32_t Vd;
2934 if (dbl) {
2935 // Encoded as D:Vd.
2936 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002937 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002938 } else {
2939 // Encoded as Vd:D.
2940 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002941 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002942 }
2943 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
2944 B11 | B9 |
2945 (dbl ? B8 : 0) |
2946 (push ? B24 : (B23 | B20)) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07002947 14U /* 0b1110 */ << 28 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002948 nregs << (dbl ? 1 : 0) |
2949 D << 22 |
2950 Vd << 12;
2951 Emit32(encoding);
2952}
2953
2954
2955void Thumb2Assembler::EmitVFPsss(Condition cond, int32_t opcode,
2956 SRegister sd, SRegister sn, SRegister sm) {
2957 CHECK_NE(sd, kNoSRegister);
2958 CHECK_NE(sn, kNoSRegister);
2959 CHECK_NE(sm, kNoSRegister);
2960 CheckCondition(cond);
2961 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2962 B27 | B26 | B25 | B11 | B9 | opcode |
2963 ((static_cast<int32_t>(sd) & 1)*B22) |
2964 ((static_cast<int32_t>(sn) >> 1)*B16) |
2965 ((static_cast<int32_t>(sd) >> 1)*B12) |
2966 ((static_cast<int32_t>(sn) & 1)*B7) |
2967 ((static_cast<int32_t>(sm) & 1)*B5) |
2968 (static_cast<int32_t>(sm) >> 1);
2969 Emit32(encoding);
2970}
2971
2972
2973void Thumb2Assembler::EmitVFPddd(Condition cond, int32_t opcode,
2974 DRegister dd, DRegister dn, DRegister dm) {
2975 CHECK_NE(dd, kNoDRegister);
2976 CHECK_NE(dn, kNoDRegister);
2977 CHECK_NE(dm, kNoDRegister);
2978 CheckCondition(cond);
2979 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2980 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
2981 ((static_cast<int32_t>(dd) >> 4)*B22) |
2982 ((static_cast<int32_t>(dn) & 0xf)*B16) |
2983 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2984 ((static_cast<int32_t>(dn) >> 4)*B7) |
2985 ((static_cast<int32_t>(dm) >> 4)*B5) |
2986 (static_cast<int32_t>(dm) & 0xf);
2987 Emit32(encoding);
2988}
2989
2990
2991void Thumb2Assembler::EmitVFPsd(Condition cond, int32_t opcode,
2992 SRegister sd, DRegister dm) {
2993 CHECK_NE(sd, kNoSRegister);
2994 CHECK_NE(dm, kNoDRegister);
2995 CheckCondition(cond);
2996 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2997 B27 | B26 | B25 | B11 | B9 | opcode |
2998 ((static_cast<int32_t>(sd) & 1)*B22) |
2999 ((static_cast<int32_t>(sd) >> 1)*B12) |
3000 ((static_cast<int32_t>(dm) >> 4)*B5) |
3001 (static_cast<int32_t>(dm) & 0xf);
3002 Emit32(encoding);
3003}
3004
3005
3006void Thumb2Assembler::EmitVFPds(Condition cond, int32_t opcode,
3007 DRegister dd, SRegister sm) {
3008 CHECK_NE(dd, kNoDRegister);
3009 CHECK_NE(sm, kNoSRegister);
3010 CheckCondition(cond);
3011 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3012 B27 | B26 | B25 | B11 | B9 | opcode |
3013 ((static_cast<int32_t>(dd) >> 4)*B22) |
3014 ((static_cast<int32_t>(dd) & 0xf)*B12) |
3015 ((static_cast<int32_t>(sm) & 1)*B5) |
3016 (static_cast<int32_t>(sm) >> 1);
3017 Emit32(encoding);
3018}
3019
3020
3021void Thumb2Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR.
Calin Juravleddb7df22014-11-25 20:56:51 +00003022 CHECK_NE(cond, kNoCondition);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003023 CheckCondition(cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00003024 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3025 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
3026 (static_cast<int32_t>(PC)*B12) |
3027 B11 | B9 | B4;
3028 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003029}
3030
3031
3032void Thumb2Assembler::svc(uint32_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08003033 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003034 int16_t encoding = B15 | B14 | B12 |
3035 B11 | B10 | B9 | B8 |
3036 imm8;
3037 Emit16(encoding);
3038}
3039
3040
3041void Thumb2Assembler::bkpt(uint16_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08003042 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003043 int16_t encoding = B15 | B13 | B12 |
3044 B11 | B10 | B9 |
3045 imm8;
3046 Emit16(encoding);
3047}
3048
3049// Convert the given IT state to a mask bit given bit 0 of the first
3050// condition and a shift position.
3051static uint8_t ToItMask(ItState s, uint8_t firstcond0, uint8_t shift) {
3052 switch (s) {
3053 case kItOmitted: return 1 << shift;
3054 case kItThen: return firstcond0 << shift;
3055 case kItElse: return !firstcond0 << shift;
3056 }
3057 return 0;
3058}
3059
3060
3061// Set the IT condition in the given position for the given state. This is used
3062// to check that conditional instructions match the preceding IT statement.
3063void Thumb2Assembler::SetItCondition(ItState s, Condition cond, uint8_t index) {
3064 switch (s) {
3065 case kItOmitted: it_conditions_[index] = AL; break;
3066 case kItThen: it_conditions_[index] = cond; break;
3067 case kItElse:
3068 it_conditions_[index] = static_cast<Condition>(static_cast<uint8_t>(cond) ^ 1);
3069 break;
3070 }
3071}
3072
3073
3074void Thumb2Assembler::it(Condition firstcond, ItState i1, ItState i2, ItState i3) {
3075 CheckCondition(AL); // Not allowed in IT block.
3076 uint8_t firstcond0 = static_cast<uint8_t>(firstcond) & 1;
3077
3078 // All conditions to AL.
3079 for (uint8_t i = 0; i < 4; ++i) {
3080 it_conditions_[i] = AL;
3081 }
3082
3083 SetItCondition(kItThen, firstcond, 0);
3084 uint8_t mask = ToItMask(i1, firstcond0, 3);
3085 SetItCondition(i1, firstcond, 1);
3086
3087 if (i1 != kItOmitted) {
3088 mask |= ToItMask(i2, firstcond0, 2);
3089 SetItCondition(i2, firstcond, 2);
3090 if (i2 != kItOmitted) {
3091 mask |= ToItMask(i3, firstcond0, 1);
3092 SetItCondition(i3, firstcond, 3);
3093 if (i3 != kItOmitted) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07003094 mask |= 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003095 }
3096 }
3097 }
3098
3099 // Start at first condition.
3100 it_cond_index_ = 0;
3101 next_condition_ = it_conditions_[0];
3102 uint16_t encoding = B15 | B13 | B12 |
3103 B11 | B10 | B9 | B8 |
3104 firstcond << 4 |
3105 mask;
3106 Emit16(encoding);
3107}
3108
3109
3110void Thumb2Assembler::cbz(Register rn, Label* label) {
3111 CheckCondition(AL);
3112 if (label->IsBound()) {
3113 LOG(FATAL) << "cbz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00003114 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003115 } else if (IsHighRegister(rn)) {
3116 LOG(FATAL) << "cbz can only be used with low registers";
3117 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003118 } else {
3119 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), false);
3120 label->LinkTo(branchid);
3121 }
3122}
3123
3124
3125void Thumb2Assembler::cbnz(Register rn, Label* label) {
3126 CheckCondition(AL);
3127 if (label->IsBound()) {
3128 LOG(FATAL) << "cbnz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00003129 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003130 } else if (IsHighRegister(rn)) {
3131 LOG(FATAL) << "cbnz can only be used with low registers";
3132 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003133 } else {
3134 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), true);
3135 label->LinkTo(branchid);
3136 }
3137}
3138
3139
3140void Thumb2Assembler::blx(Register rm, Condition cond) {
3141 CHECK_NE(rm, kNoRegister);
3142 CheckCondition(cond);
3143 int16_t encoding = B14 | B10 | B9 | B8 | B7 | static_cast<int16_t>(rm) << 3;
3144 Emit16(encoding);
3145}
3146
3147
3148void Thumb2Assembler::bx(Register rm, Condition cond) {
3149 CHECK_NE(rm, kNoRegister);
3150 CheckCondition(cond);
3151 int16_t encoding = B14 | B10 | B9 | B8 | static_cast<int16_t>(rm) << 3;
3152 Emit16(encoding);
3153}
3154
3155
3156void Thumb2Assembler::Push(Register rd, Condition cond) {
3157 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
3158}
3159
3160
3161void Thumb2Assembler::Pop(Register rd, Condition cond) {
3162 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
3163}
3164
3165
3166void Thumb2Assembler::PushList(RegList regs, Condition cond) {
3167 stm(DB_W, SP, regs, cond);
3168}
3169
3170
3171void Thumb2Assembler::PopList(RegList regs, Condition cond) {
3172 ldm(IA_W, SP, regs, cond);
3173}
3174
3175
3176void Thumb2Assembler::Mov(Register rd, Register rm, Condition cond) {
3177 if (cond != AL || rd != rm) {
3178 mov(rd, ShifterOperand(rm), cond);
3179 }
3180}
3181
3182
Dave Allison65fcc2c2014-04-28 13:45:27 -07003183void Thumb2Assembler::Bind(Label* label) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003184 BindLabel(label, buffer_.Size());
Dave Allison65fcc2c2014-04-28 13:45:27 -07003185}
3186
3187
3188void Thumb2Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003189 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003190 CHECK_LE(shift_imm, 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003191 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003192 EmitShift(rd, rm, LSL, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003193}
3194
3195
3196void Thumb2Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003197 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003198 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003199 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003200 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003201 EmitShift(rd, rm, LSR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003202}
3203
3204
3205void Thumb2Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003206 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003207 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003208 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003209 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003210 EmitShift(rd, rm, ASR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003211}
3212
3213
3214void Thumb2Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003215 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003216 CHECK(1u <= shift_imm && shift_imm <= 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003217 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003218 EmitShift(rd, rm, ROR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003219}
3220
3221
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003222void Thumb2Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003223 CheckCondition(cond);
Vladimir Markof9d741e2015-11-20 15:08:11 +00003224 EmitShift(rd, rm, RRX, 0, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003225}
3226
3227
3228void Thumb2Assembler::Lsl(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003229 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003230 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003231 EmitShift(rd, rm, LSL, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003232}
3233
3234
3235void Thumb2Assembler::Lsr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003236 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003237 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003238 EmitShift(rd, rm, LSR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003239}
3240
3241
3242void Thumb2Assembler::Asr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003243 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003244 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003245 EmitShift(rd, rm, ASR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003246}
3247
3248
3249void Thumb2Assembler::Ror(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003250 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003251 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003252 EmitShift(rd, rm, ROR, rn, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003253}
3254
3255
3256int32_t Thumb2Assembler::EncodeBranchOffset(int32_t offset, int32_t inst) {
3257 // The offset is off by 4 due to the way the ARM CPUs read PC.
3258 offset -= 4;
3259 offset >>= 1;
3260
3261 uint32_t value = 0;
3262 // There are two different encodings depending on the value of bit 12. In one case
3263 // intermediate values are calculated using the sign bit.
3264 if ((inst & B12) == B12) {
3265 // 25 bits of offset.
3266 uint32_t signbit = (offset >> 31) & 0x1;
3267 uint32_t i1 = (offset >> 22) & 0x1;
3268 uint32_t i2 = (offset >> 21) & 0x1;
3269 uint32_t imm10 = (offset >> 11) & 0x03ff;
3270 uint32_t imm11 = offset & 0x07ff;
3271 uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
3272 uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
3273 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
3274 imm11;
3275 // Remove the offset from the current encoding.
3276 inst &= ~(0x3ff << 16 | 0x7ff);
3277 } else {
3278 uint32_t signbit = (offset >> 31) & 0x1;
3279 uint32_t imm6 = (offset >> 11) & 0x03f;
3280 uint32_t imm11 = offset & 0x07ff;
3281 uint32_t j1 = (offset >> 19) & 1;
3282 uint32_t j2 = (offset >> 17) & 1;
3283 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm6 << 16) |
3284 imm11;
3285 // Remove the offset from the current encoding.
3286 inst &= ~(0x3f << 16 | 0x7ff);
3287 }
3288 // Mask out offset bits in current instruction.
3289 inst &= ~(B26 | B13 | B11);
3290 inst |= value;
3291 return inst;
3292}
3293
3294
3295int Thumb2Assembler::DecodeBranchOffset(int32_t instr) {
3296 int32_t imm32;
3297 if ((instr & B12) == B12) {
3298 uint32_t S = (instr >> 26) & 1;
3299 uint32_t J2 = (instr >> 11) & 1;
3300 uint32_t J1 = (instr >> 13) & 1;
3301 uint32_t imm10 = (instr >> 16) & 0x3FF;
3302 uint32_t imm11 = instr & 0x7FF;
3303
3304 uint32_t I1 = ~(J1 ^ S) & 1;
3305 uint32_t I2 = ~(J2 ^ S) & 1;
3306 imm32 = (S << 24) | (I1 << 23) | (I2 << 22) | (imm10 << 12) | (imm11 << 1);
3307 imm32 = (imm32 << 8) >> 8; // sign extend 24 bit immediate.
3308 } else {
3309 uint32_t S = (instr >> 26) & 1;
3310 uint32_t J2 = (instr >> 11) & 1;
3311 uint32_t J1 = (instr >> 13) & 1;
3312 uint32_t imm6 = (instr >> 16) & 0x3F;
3313 uint32_t imm11 = instr & 0x7FF;
3314
3315 imm32 = (S << 20) | (J2 << 19) | (J1 << 18) | (imm6 << 12) | (imm11 << 1);
3316 imm32 = (imm32 << 11) >> 11; // sign extend 21 bit immediate.
3317 }
3318 imm32 += 4;
3319 return imm32;
3320}
3321
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003322uint32_t Thumb2Assembler::GetAdjustedPosition(uint32_t old_position) {
3323 // We can reconstruct the adjustment by going through all the fixups from the beginning
3324 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
3325 // with increasing old_position, we can use the data from last AdjustedPosition() to
3326 // continue where we left off and the whole loop should be O(m+n) where m is the number
3327 // of positions to adjust and n is the number of fixups.
3328 if (old_position < last_old_position_) {
3329 last_position_adjustment_ = 0u;
3330 last_old_position_ = 0u;
3331 last_fixup_id_ = 0u;
3332 }
3333 while (last_fixup_id_ != fixups_.size()) {
3334 Fixup* fixup = GetFixup(last_fixup_id_);
3335 if (fixup->GetLocation() >= old_position + last_position_adjustment_) {
3336 break;
3337 }
3338 if (fixup->GetSize() != fixup->GetOriginalSize()) {
3339 last_position_adjustment_ += fixup->GetSizeInBytes() - fixup->GetOriginalSizeInBytes();
3340 }
3341 ++last_fixup_id_;
3342 }
3343 last_old_position_ = old_position;
3344 return old_position + last_position_adjustment_;
3345}
3346
3347Literal* Thumb2Assembler::NewLiteral(size_t size, const uint8_t* data) {
3348 DCHECK(size == 4u || size == 8u) << size;
3349 literals_.emplace_back(size, data);
3350 return &literals_.back();
3351}
3352
3353void Thumb2Assembler::LoadLiteral(Register rt, Literal* literal) {
3354 DCHECK_EQ(literal->GetSize(), 4u);
3355 DCHECK(!literal->GetLabel()->IsBound());
3356 bool use32bit = IsForced32Bit() || IsHighRegister(rt);
3357 uint32_t location = buffer_.Size();
3358 Fixup::Size size = use32bit ? Fixup::kLiteral4KiB : Fixup::kLiteral1KiB;
3359 FixupId fixup_id = AddFixup(Fixup::LoadNarrowLiteral(location, rt, size));
3360 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3361 literal->GetLabel()->LinkTo(fixup_id);
3362 if (use32bit) {
3363 Emit16(0);
3364 }
3365 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3366}
3367
3368void Thumb2Assembler::LoadLiteral(Register rt, Register rt2, Literal* literal) {
3369 DCHECK_EQ(literal->GetSize(), 8u);
3370 DCHECK(!literal->GetLabel()->IsBound());
3371 uint32_t location = buffer_.Size();
3372 FixupId fixup_id =
3373 AddFixup(Fixup::LoadWideLiteral(location, rt, rt2, Fixup::kLongOrFPLiteral1KiB));
3374 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3375 literal->GetLabel()->LinkTo(fixup_id);
3376 Emit16(0);
3377 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3378}
3379
3380void Thumb2Assembler::LoadLiteral(SRegister sd, Literal* literal) {
3381 DCHECK_EQ(literal->GetSize(), 4u);
3382 DCHECK(!literal->GetLabel()->IsBound());
3383 uint32_t location = buffer_.Size();
3384 FixupId fixup_id = AddFixup(Fixup::LoadSingleLiteral(location, sd, Fixup::kLongOrFPLiteral1KiB));
3385 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3386 literal->GetLabel()->LinkTo(fixup_id);
3387 Emit16(0);
3388 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3389}
3390
3391void Thumb2Assembler::LoadLiteral(DRegister dd, Literal* literal) {
3392 DCHECK_EQ(literal->GetSize(), 8u);
3393 DCHECK(!literal->GetLabel()->IsBound());
3394 uint32_t location = buffer_.Size();
3395 FixupId fixup_id = AddFixup(Fixup::LoadDoubleLiteral(location, dd, Fixup::kLongOrFPLiteral1KiB));
3396 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3397 literal->GetLabel()->LinkTo(fixup_id);
3398 Emit16(0);
3399 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3400}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003401
Dave Allison65fcc2c2014-04-28 13:45:27 -07003402
3403void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value,
Vladimir Marko449b1092015-09-08 12:16:45 +01003404 Condition cond, SetCc set_cc) {
3405 if (value == 0 && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003406 if (rd != rn) {
3407 mov(rd, ShifterOperand(rn), cond);
3408 }
3409 return;
3410 }
3411 // We prefer to select the shorter code sequence rather than selecting add for
3412 // positive values and sub for negatives ones, which would slightly improve
3413 // the readability of generated code for some constants.
3414 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003415 if (ShifterOperandCanHold(rd, rn, ADD, value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003416 add(rd, rn, shifter_op, cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003417 } else if (ShifterOperandCanHold(rd, rn, SUB, -value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003418 sub(rd, rn, shifter_op, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003419 } else {
3420 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003421 if (ShifterOperandCanHold(rd, rn, MVN, ~value, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003422 mvn(IP, shifter_op, cond, kCcKeep);
3423 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003424 } else if (ShifterOperandCanHold(rd, rn, MVN, ~(-value), &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003425 mvn(IP, shifter_op, cond, kCcKeep);
3426 sub(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003427 } else {
3428 movw(IP, Low16Bits(value), cond);
3429 uint16_t value_high = High16Bits(value);
3430 if (value_high != 0) {
3431 movt(IP, value_high, cond);
3432 }
Vladimir Marko449b1092015-09-08 12:16:45 +01003433 add(rd, rn, ShifterOperand(IP), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003434 }
3435 }
3436}
3437
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003438void Thumb2Assembler::CmpConstant(Register rn, int32_t value, Condition cond) {
3439 // We prefer to select the shorter code sequence rather than selecting add for
3440 // positive values and sub for negatives ones, which would slightly improve
3441 // the readability of generated code for some constants.
3442 ShifterOperand shifter_op;
3443 if (ShifterOperandCanHold(kNoRegister, rn, CMP, value, &shifter_op)) {
3444 cmp(rn, shifter_op, cond);
3445 } else if (ShifterOperandCanHold(kNoRegister, rn, CMN, ~value, &shifter_op)) {
3446 cmn(rn, shifter_op, cond);
3447 } else {
3448 CHECK(rn != IP);
3449 movw(IP, Low16Bits(value), cond);
3450 uint16_t value_high = High16Bits(value);
3451 if (value_high != 0) {
3452 movt(IP, value_high, cond);
3453 }
3454 cmp(rn, ShifterOperand(IP), cond);
3455 }
3456}
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003457
Dave Allison65fcc2c2014-04-28 13:45:27 -07003458void Thumb2Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
3459 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003460 if (ShifterOperandCanHold(rd, R0, MOV, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003461 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003462 } else if (ShifterOperandCanHold(rd, R0, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003463 mvn(rd, shifter_op, cond);
3464 } else {
3465 movw(rd, Low16Bits(value), cond);
3466 uint16_t value_high = High16Bits(value);
3467 if (value_high != 0) {
3468 movt(rd, value_high, cond);
3469 }
3470 }
3471}
3472
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003473int32_t Thumb2Assembler::GetAllowedLoadOffsetBits(LoadOperandType type) {
3474 switch (type) {
3475 case kLoadSignedByte:
3476 case kLoadSignedHalfword:
3477 case kLoadUnsignedHalfword:
3478 case kLoadUnsignedByte:
3479 case kLoadWord:
3480 // We can encode imm12 offset.
3481 return 0xfffu;
3482 case kLoadSWord:
3483 case kLoadDWord:
3484 case kLoadWordPair:
3485 // We can encode imm8:'00' offset.
3486 return 0xff << 2;
3487 default:
3488 LOG(FATAL) << "UNREACHABLE";
3489 UNREACHABLE();
3490 }
3491}
3492
3493int32_t Thumb2Assembler::GetAllowedStoreOffsetBits(StoreOperandType type) {
3494 switch (type) {
3495 case kStoreHalfword:
3496 case kStoreByte:
3497 case kStoreWord:
3498 // We can encode imm12 offset.
3499 return 0xfff;
3500 case kStoreSWord:
3501 case kStoreDWord:
3502 case kStoreWordPair:
3503 // We can encode imm8:'00' offset.
3504 return 0xff << 2;
3505 default:
3506 LOG(FATAL) << "UNREACHABLE";
3507 UNREACHABLE();
3508 }
3509}
3510
3511bool Thumb2Assembler::CanSplitLoadStoreOffset(int32_t allowed_offset_bits,
3512 int32_t offset,
3513 /*out*/ int32_t* add_to_base,
3514 /*out*/ int32_t* offset_for_load_store) {
3515 int32_t other_bits = offset & ~allowed_offset_bits;
3516 if (ShifterOperandCanAlwaysHold(other_bits) || ShifterOperandCanAlwaysHold(-other_bits)) {
3517 *add_to_base = offset & ~allowed_offset_bits;
3518 *offset_for_load_store = offset & allowed_offset_bits;
3519 return true;
3520 }
3521 return false;
3522}
3523
3524int32_t Thumb2Assembler::AdjustLoadStoreOffset(int32_t allowed_offset_bits,
3525 Register temp,
3526 Register base,
3527 int32_t offset,
3528 Condition cond) {
3529 DCHECK_NE(offset & ~allowed_offset_bits, 0);
3530 int32_t add_to_base, offset_for_load;
3531 if (CanSplitLoadStoreOffset(allowed_offset_bits, offset, &add_to_base, &offset_for_load)) {
3532 AddConstant(temp, base, add_to_base, cond, kCcKeep);
3533 return offset_for_load;
3534 } else {
3535 LoadImmediate(temp, offset, cond);
3536 add(temp, temp, ShifterOperand(base), cond, kCcKeep);
3537 return 0;
3538 }
3539}
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003540
Dave Allison65fcc2c2014-04-28 13:45:27 -07003541// Implementation note: this method must emit at most one instruction when
3542// Address::CanHoldLoadOffsetThumb.
3543void Thumb2Assembler::LoadFromOffset(LoadOperandType type,
3544 Register reg,
3545 Register base,
3546 int32_t offset,
3547 Condition cond) {
3548 if (!Address::CanHoldLoadOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003549 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003550 // Inlined AdjustLoadStoreOffset() allows us to pull a few more tricks.
3551 int32_t allowed_offset_bits = GetAllowedLoadOffsetBits(type);
3552 DCHECK_NE(offset & ~allowed_offset_bits, 0);
3553 int32_t add_to_base, offset_for_load;
3554 if (CanSplitLoadStoreOffset(allowed_offset_bits, offset, &add_to_base, &offset_for_load)) {
3555 // Use reg for the adjusted base. If it's low reg, we may end up using 16-bit load.
3556 AddConstant(reg, base, add_to_base, cond, kCcKeep);
3557 base = reg;
3558 offset = offset_for_load;
3559 } else {
3560 Register temp = (reg == base) ? IP : reg;
3561 LoadImmediate(temp, offset, cond);
3562 // TODO: Implement indexed load (not available for LDRD) and use it here to avoid the ADD.
3563 // Use reg for the adjusted base. If it's low reg, we may end up using 16-bit load.
3564 add(reg, reg, ShifterOperand((reg == base) ? IP : base), cond, kCcKeep);
3565 base = reg;
3566 offset = 0;
3567 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003568 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003569 DCHECK(Address::CanHoldLoadOffsetThumb(type, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003570 switch (type) {
3571 case kLoadSignedByte:
3572 ldrsb(reg, Address(base, offset), cond);
3573 break;
3574 case kLoadUnsignedByte:
3575 ldrb(reg, Address(base, offset), cond);
3576 break;
3577 case kLoadSignedHalfword:
3578 ldrsh(reg, Address(base, offset), cond);
3579 break;
3580 case kLoadUnsignedHalfword:
3581 ldrh(reg, Address(base, offset), cond);
3582 break;
3583 case kLoadWord:
3584 ldr(reg, Address(base, offset), cond);
3585 break;
3586 case kLoadWordPair:
3587 ldrd(reg, Address(base, offset), cond);
3588 break;
3589 default:
3590 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003591 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003592 }
3593}
3594
Dave Allison65fcc2c2014-04-28 13:45:27 -07003595// Implementation note: this method must emit at most one instruction when
3596// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3597void Thumb2Assembler::LoadSFromOffset(SRegister reg,
3598 Register base,
3599 int32_t offset,
3600 Condition cond) {
3601 if (!Address::CanHoldLoadOffsetThumb(kLoadSWord, offset)) {
3602 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003603 offset = AdjustLoadStoreOffset(GetAllowedLoadOffsetBits(kLoadSWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003604 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003605 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003606 DCHECK(Address::CanHoldLoadOffsetThumb(kLoadSWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003607 vldrs(reg, Address(base, offset), cond);
3608}
3609
3610
3611// Implementation note: this method must emit at most one instruction when
3612// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3613void Thumb2Assembler::LoadDFromOffset(DRegister reg,
3614 Register base,
3615 int32_t offset,
3616 Condition cond) {
3617 if (!Address::CanHoldLoadOffsetThumb(kLoadDWord, offset)) {
3618 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003619 offset = AdjustLoadStoreOffset(GetAllowedLoadOffsetBits(kLoadDWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003620 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003621 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003622 DCHECK(Address::CanHoldLoadOffsetThumb(kLoadDWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003623 vldrd(reg, Address(base, offset), cond);
3624}
3625
3626
3627// Implementation note: this method must emit at most one instruction when
3628// Address::CanHoldStoreOffsetThumb.
3629void Thumb2Assembler::StoreToOffset(StoreOperandType type,
3630 Register reg,
3631 Register base,
3632 int32_t offset,
3633 Condition cond) {
Roland Levillain775ef492014-11-04 17:43:11 +00003634 Register tmp_reg = kNoRegister;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003635 if (!Address::CanHoldStoreOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003636 CHECK_NE(base, IP);
Roland Levillain23f02f32015-08-25 18:23:20 +01003637 if ((reg != IP) &&
3638 ((type != kStoreWordPair) || (reg + 1 != IP))) {
Roland Levillain775ef492014-11-04 17:43:11 +00003639 tmp_reg = IP;
3640 } else {
Roland Levillain4af147e2015-04-07 13:54:49 +01003641 // Be careful not to use IP twice (for `reg` (or `reg` + 1 in
Roland Levillain23f02f32015-08-25 18:23:20 +01003642 // the case of a word-pair store) and `base`) to build the
3643 // Address object used by the store instruction(s) below.
3644 // Instead, save R5 on the stack (or R6 if R5 is already used by
3645 // `base`), use it as secondary temporary register, and restore
3646 // it after the store instruction has been emitted.
3647 tmp_reg = (base != R5) ? R5 : R6;
Roland Levillain775ef492014-11-04 17:43:11 +00003648 Push(tmp_reg);
3649 if (base == SP) {
3650 offset += kRegisterSize;
3651 }
3652 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003653 // TODO: Implement indexed store (not available for STRD), inline AdjustLoadStoreOffset()
3654 // and in the "unsplittable" path get rid of the "add" by using the store indexed instead.
3655 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(type), tmp_reg, base, offset, cond);
Roland Levillain775ef492014-11-04 17:43:11 +00003656 base = tmp_reg;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003657 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003658 DCHECK(Address::CanHoldStoreOffsetThumb(type, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003659 switch (type) {
3660 case kStoreByte:
3661 strb(reg, Address(base, offset), cond);
3662 break;
3663 case kStoreHalfword:
3664 strh(reg, Address(base, offset), cond);
3665 break;
3666 case kStoreWord:
3667 str(reg, Address(base, offset), cond);
3668 break;
3669 case kStoreWordPair:
3670 strd(reg, Address(base, offset), cond);
3671 break;
3672 default:
3673 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003674 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003675 }
Roland Levillain23f02f32015-08-25 18:23:20 +01003676 if ((tmp_reg != kNoRegister) && (tmp_reg != IP)) {
3677 CHECK((tmp_reg == R5) || (tmp_reg == R6));
Roland Levillain775ef492014-11-04 17:43:11 +00003678 Pop(tmp_reg);
3679 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003680}
3681
3682
3683// Implementation note: this method must emit at most one instruction when
3684// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreToOffset.
3685void Thumb2Assembler::StoreSToOffset(SRegister reg,
3686 Register base,
3687 int32_t offset,
3688 Condition cond) {
3689 if (!Address::CanHoldStoreOffsetThumb(kStoreSWord, offset)) {
3690 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003691 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(kStoreSWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003692 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003693 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003694 DCHECK(Address::CanHoldStoreOffsetThumb(kStoreSWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003695 vstrs(reg, Address(base, offset), cond);
3696}
3697
3698
3699// Implementation note: this method must emit at most one instruction when
3700// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreSToOffset.
3701void Thumb2Assembler::StoreDToOffset(DRegister reg,
3702 Register base,
3703 int32_t offset,
3704 Condition cond) {
3705 if (!Address::CanHoldStoreOffsetThumb(kStoreDWord, offset)) {
3706 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003707 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(kStoreDWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003708 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003709 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003710 DCHECK(Address::CanHoldStoreOffsetThumb(kStoreDWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003711 vstrd(reg, Address(base, offset), cond);
3712}
3713
3714
3715void Thumb2Assembler::MemoryBarrier(ManagedRegister mscratch) {
3716 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003717 dmb(SY);
3718}
3719
3720
3721void Thumb2Assembler::dmb(DmbOptions flavor) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003722 int32_t encoding = 0xf3bf8f50; // dmb in T1 encoding.
3723 Emit32(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003724}
3725
3726
3727void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003728 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003729 cbz(r, label);
3730 } else {
3731 cmp(r, ShifterOperand(0));
3732 b(label, EQ);
3733 }
3734}
3735
3736
Dave Allison65fcc2c2014-04-28 13:45:27 -07003737void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003738 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd126ba12015-05-20 11:25:27 +01003739 cbnz(r, label);
3740 } else {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003741 cmp(r, ShifterOperand(0));
3742 b(label, NE);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003743 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003744}
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003745
3746JumpTable* Thumb2Assembler::CreateJumpTable(std::vector<Label*>&& labels, Register base_reg) {
3747 jump_tables_.emplace_back(std::move(labels));
3748 JumpTable* table = &jump_tables_.back();
3749 DCHECK(!table->GetLabel()->IsBound());
3750
3751 bool use32bit = IsForced32Bit() || IsHighRegister(base_reg);
3752 uint32_t location = buffer_.Size();
3753 Fixup::Size size = use32bit ? Fixup::kLiteralAddr4KiB : Fixup::kLiteralAddr1KiB;
3754 FixupId fixup_id = AddFixup(Fixup::LoadLiteralAddress(location, base_reg, size));
3755 Emit16(static_cast<uint16_t>(table->GetLabel()->position_));
3756 table->GetLabel()->LinkTo(fixup_id);
3757 if (use32bit) {
3758 Emit16(0);
3759 }
3760 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3761
3762 return table;
3763}
3764
3765void Thumb2Assembler::EmitJumpTableDispatch(JumpTable* jump_table, Register displacement_reg) {
3766 CHECK(!IsForced32Bit()) << "Forced 32-bit dispatch not implemented yet";
3767 // 32-bit ADD doesn't support PC as an input, so we need a two-instruction sequence:
3768 // SUB ip, ip, #0
3769 // ADD pc, ip, reg
3770 // TODO: Implement.
3771
3772 // The anchor's position needs to be fixed up before we can compute offsets - so make it a tracked
3773 // label.
3774 BindTrackedLabel(jump_table->GetAnchorLabel());
3775
3776 add(PC, PC, ShifterOperand(displacement_reg));
3777}
3778
Dave Allison65fcc2c2014-04-28 13:45:27 -07003779} // namespace arm
3780} // namespace art