blob: a7f454751413f00a5b6e51fabcf87c5524750b6f [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiroa5d5cfd2011-06-21 12:46:59 -070016
Ian Rogers2c8f6532011-09-02 17:16:34 -070017#include "assembler_arm.h"
18
Andreas Gampe7cffc3b2015-10-19 21:31:53 -070019#include <algorithm>
20
Vladimir Marko80afd022015-05-19 18:08:00 +010021#include "base/bit_utils.h"
Elliott Hughes07ed66b2012-12-12 18:34:25 -080022#include "base/logging.h"
Ian Rogers166db042013-07-26 12:05:57 -070023#include "entrypoints/quick/quick_entrypoints.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070024#include "offsets.h"
Carl Shapiroe2d373e2011-07-25 15:20:06 -070025#include "thread.h"
Carl Shapiroa5d5cfd2011-06-21 12:46:59 -070026
Carl Shapiro6b6b5f02011-06-21 15:05:09 -070027namespace art {
Ian Rogers2c8f6532011-09-02 17:16:34 -070028namespace arm {
Carl Shapiroa5d5cfd2011-06-21 12:46:59 -070029
Dave Allison65fcc2c2014-04-28 13:45:27 -070030const char* kRegisterNames[] = {
Elliott Hughes1f359b02011-07-17 14:27:17 -070031 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10",
32 "fp", "ip", "sp", "lr", "pc"
33};
Dave Allison65fcc2c2014-04-28 13:45:27 -070034
35const char* kConditionNames[] = {
36 "EQ", "NE", "CS", "CC", "MI", "PL", "VS", "VC", "HI", "LS", "GE", "LT", "GT",
37 "LE", "AL",
38};
39
Elliott Hughes1f359b02011-07-17 14:27:17 -070040std::ostream& operator<<(std::ostream& os, const Register& rhs) {
41 if (rhs >= R0 && rhs <= PC) {
42 os << kRegisterNames[rhs];
43 } else {
Ian Rogersb033c752011-07-20 12:22:35 -070044 os << "Register[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -070045 }
46 return os;
47}
48
49
50std::ostream& operator<<(std::ostream& os, const SRegister& rhs) {
51 if (rhs >= S0 && rhs < kNumberOfSRegisters) {
Ian Rogersb033c752011-07-20 12:22:35 -070052 os << "s" << static_cast<int>(rhs);
Elliott Hughes1f359b02011-07-17 14:27:17 -070053 } else {
Ian Rogersb033c752011-07-20 12:22:35 -070054 os << "SRegister[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -070055 }
56 return os;
57}
58
59
60std::ostream& operator<<(std::ostream& os, const DRegister& rhs) {
61 if (rhs >= D0 && rhs < kNumberOfDRegisters) {
Ian Rogersb033c752011-07-20 12:22:35 -070062 os << "d" << static_cast<int>(rhs);
Elliott Hughes1f359b02011-07-17 14:27:17 -070063 } else {
Ian Rogersb033c752011-07-20 12:22:35 -070064 os << "DRegister[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -070065 }
66 return os;
67}
68
Elliott Hughes1f359b02011-07-17 14:27:17 -070069std::ostream& operator<<(std::ostream& os, const Condition& rhs) {
70 if (rhs >= EQ && rhs <= AL) {
71 os << kConditionNames[rhs];
72 } else {
Ian Rogersb033c752011-07-20 12:22:35 -070073 os << "Condition[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -070074 }
75 return os;
76}
77
Nicolas Geoffray96f89a22014-07-11 10:57:49 +010078ShifterOperand::ShifterOperand(uint32_t immed)
79 : type_(kImmediate), rm_(kNoRegister), rs_(kNoRegister),
80 is_rotate_(false), is_shift_(false), shift_(kNoShift), rotate_(0), immed_(immed) {
81 CHECK(immed < (1u << 12) || ArmAssembler::ModifiedImmediate(immed) != kInvalidModifiedImmediate);
82}
Carl Shapiroa2e18e12011-06-21 18:57:55 -070083
84
Dave Allison65fcc2c2014-04-28 13:45:27 -070085uint32_t ShifterOperand::encodingArm() const {
86 CHECK(is_valid());
87 switch (type_) {
88 case kImmediate:
89 if (is_rotate_) {
90 return (rotate_ << kRotateShift) | (immed_ << kImmed8Shift);
91 } else {
92 return immed_;
Ian Rogersb033c752011-07-20 12:22:35 -070093 }
Dave Allison65fcc2c2014-04-28 13:45:27 -070094 case kRegister:
95 if (is_shift_) {
Andreas Gampe849cc5e2014-11-18 13:46:46 -080096 uint32_t shift_type;
97 switch (shift_) {
98 case arm::Shift::ROR:
99 shift_type = static_cast<uint32_t>(shift_);
100 CHECK_NE(immed_, 0U);
101 break;
102 case arm::Shift::RRX:
103 shift_type = static_cast<uint32_t>(arm::Shift::ROR); // Same encoding as ROR.
104 CHECK_EQ(immed_, 0U);
105 break;
106 default:
107 shift_type = static_cast<uint32_t>(shift_);
108 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700109 // Shifted immediate or register.
110 if (rs_ == kNoRegister) {
111 // Immediate shift.
112 return immed_ << kShiftImmShift |
Andreas Gampe849cc5e2014-11-18 13:46:46 -0800113 shift_type << kShiftShift |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700114 static_cast<uint32_t>(rm_);
115 } else {
116 // Register shift.
117 return static_cast<uint32_t>(rs_) << kShiftRegisterShift |
Andreas Gampe849cc5e2014-11-18 13:46:46 -0800118 shift_type << kShiftShift | (1 << 4) |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700119 static_cast<uint32_t>(rm_);
120 }
121 } else {
122 // Simple register
123 return static_cast<uint32_t>(rm_);
Ian Rogersb033c752011-07-20 12:22:35 -0700124 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700125 default:
126 // Can't get here.
127 LOG(FATAL) << "Invalid shifter operand for ARM";
128 return 0;
Ian Rogersb033c752011-07-20 12:22:35 -0700129 }
130}
131
Dave Allison45fdb932014-06-25 12:37:10 -0700132uint32_t ShifterOperand::encodingThumb() const {
133 switch (type_) {
134 case kImmediate:
135 return immed_;
136 case kRegister:
137 if (is_shift_) {
138 // Shifted immediate or register.
139 if (rs_ == kNoRegister) {
140 // Immediate shift.
141 if (shift_ == RRX) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100142 DCHECK_EQ(immed_, 0u);
Dave Allison45fdb932014-06-25 12:37:10 -0700143 // RRX is encoded as an ROR with imm 0.
144 return ROR << 4 | static_cast<uint32_t>(rm_);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700145 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100146 DCHECK((1 <= immed_ && immed_ <= 31) ||
147 (immed_ == 0u && shift_ == LSL) ||
148 (immed_ == 32u && (shift_ == ASR || shift_ == LSR)));
149 uint32_t imm3 = (immed_ >> 2) & 7 /* 0b111*/;
Andreas Gampec8ccf682014-09-29 20:07:43 -0700150 uint32_t imm2 = immed_ & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -0700151
152 return imm3 << 12 | imm2 << 6 | shift_ << 4 |
153 static_cast<uint32_t>(rm_);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700154 }
155 } else {
Dave Allison45fdb932014-06-25 12:37:10 -0700156 LOG(FATAL) << "No register-shifted register instruction available in thumb";
157 return 0;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700158 }
Dave Allison45fdb932014-06-25 12:37:10 -0700159 } else {
160 // Simple register
161 return static_cast<uint32_t>(rm_);
162 }
Dave Allison45fdb932014-06-25 12:37:10 -0700163 default:
164 // Can't get here.
165 LOG(FATAL) << "Invalid shifter operand for thumb";
Andreas Gampe65b798e2015-04-06 09:35:22 -0700166 UNREACHABLE();
Ian Rogersb033c752011-07-20 12:22:35 -0700167 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700168}
169
Dave Allison65fcc2c2014-04-28 13:45:27 -0700170uint32_t Address::encodingArm() const {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800171 CHECK(IsAbsoluteUint<12>(offset_));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700172 uint32_t encoding;
Dave Allison45fdb932014-06-25 12:37:10 -0700173 if (is_immed_offset_) {
174 if (offset_ < 0) {
175 encoding = (am_ ^ (1 << kUShift)) | -offset_; // Flip U to adjust sign.
176 } else {
177 encoding = am_ | offset_;
178 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700179 } else {
Dave Allison45fdb932014-06-25 12:37:10 -0700180 uint32_t shift = shift_;
181 if (shift == RRX) {
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800182 CHECK_EQ(offset_, 0);
Dave Allison45fdb932014-06-25 12:37:10 -0700183 shift = ROR;
184 }
185 encoding = am_ | static_cast<uint32_t>(rm_) | shift << 5 | offset_ << 7 | B25;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700186 }
187 encoding |= static_cast<uint32_t>(rn_) << kRnShift;
188 return encoding;
189}
Ian Rogersb033c752011-07-20 12:22:35 -0700190
Dave Allison65fcc2c2014-04-28 13:45:27 -0700191
Dave Allison45fdb932014-06-25 12:37:10 -0700192uint32_t Address::encodingThumb(bool is_32bit) const {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700193 uint32_t encoding = 0;
Dave Allison45fdb932014-06-25 12:37:10 -0700194 if (is_immed_offset_) {
195 encoding = static_cast<uint32_t>(rn_) << 16;
196 // Check for the T3/T4 encoding.
197 // PUW must Offset for T3
198 // Convert ARM PU0W to PUW
199 // The Mode is in ARM encoding format which is:
200 // |P|U|0|W|
201 // we need this in thumb2 mode:
202 // |P|U|W|
Dave Allison65fcc2c2014-04-28 13:45:27 -0700203
Dave Allison45fdb932014-06-25 12:37:10 -0700204 uint32_t am = am_;
205 int32_t offset = offset_;
206 if (offset < 0) {
207 am ^= 1 << kUShift;
208 offset = -offset;
209 }
210 if (offset_ < 0 || (offset >= 0 && offset < 256 &&
Dave Allison65fcc2c2014-04-28 13:45:27 -0700211 am_ != Mode::Offset)) {
Dave Allison45fdb932014-06-25 12:37:10 -0700212 // T4 encoding.
213 uint32_t PUW = am >> 21; // Move down to bottom of word.
214 PUW = (PUW >> 1) | (PUW & 1); // Bits 3, 2 and 0.
215 // If P is 0 then W must be 1 (Different from ARM).
Andreas Gampec8ccf682014-09-29 20:07:43 -0700216 if ((PUW & 4U /* 0b100 */) == 0) {
217 PUW |= 1U /* 0b1 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700218 }
Dave Allison45fdb932014-06-25 12:37:10 -0700219 encoding |= B11 | PUW << 8 | offset;
220 } else {
221 // T3 encoding (also sets op1 to 0b01).
222 encoding |= B23 | offset_;
223 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700224 } else {
Dave Allison45fdb932014-06-25 12:37:10 -0700225 // Register offset, possibly shifted.
226 // Need to choose between encoding T1 (16 bit) or T2.
227 // Only Offset mode is supported. Shift must be LSL and the count
228 // is only 2 bits.
229 CHECK_EQ(shift_, LSL);
230 CHECK_LE(offset_, 4);
231 CHECK_EQ(am_, Offset);
232 bool is_t2 = is_32bit;
233 if (ArmAssembler::IsHighRegister(rn_) || ArmAssembler::IsHighRegister(rm_)) {
234 is_t2 = true;
235 } else if (offset_ != 0) {
236 is_t2 = true;
237 }
238 if (is_t2) {
239 encoding = static_cast<uint32_t>(rn_) << 16 | static_cast<uint32_t>(rm_) |
240 offset_ << 4;
241 } else {
242 encoding = static_cast<uint32_t>(rn_) << 3 | static_cast<uint32_t>(rm_) << 6;
243 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700244 }
245 return encoding;
246}
247
248// This is very like the ARM encoding except the offset is 10 bits.
249uint32_t Address::encodingThumbLdrdStrd() const {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800250 DCHECK(IsImmediate());
Dave Allison65fcc2c2014-04-28 13:45:27 -0700251 uint32_t encoding;
252 uint32_t am = am_;
253 // If P is 0 then W must be 1 (Different from ARM).
254 uint32_t PU1W = am_ >> 21; // Move down to bottom of word.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700255 if ((PU1W & 8U /* 0b1000 */) == 0) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700256 am |= 1 << 21; // Set W bit.
257 }
258 if (offset_ < 0) {
259 int32_t off = -offset_;
260 CHECK_LT(off, 1024);
Roland Levillain14d90572015-07-16 10:52:26 +0100261 CHECK_ALIGNED(off, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700262 encoding = (am ^ (1 << kUShift)) | off >> 2; // Flip U to adjust sign.
263 } else {
264 CHECK_LT(offset_, 1024);
Roland Levillain14d90572015-07-16 10:52:26 +0100265 CHECK_ALIGNED(offset_, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700266 encoding = am | offset_ >> 2;
267 }
268 encoding |= static_cast<uint32_t>(rn_) << 16;
269 return encoding;
270}
271
272// Encoding for ARM addressing mode 3.
273uint32_t Address::encoding3() const {
274 const uint32_t offset_mask = (1 << 12) - 1;
275 uint32_t encoding = encodingArm();
276 uint32_t offset = encoding & offset_mask;
277 CHECK_LT(offset, 256u);
278 return (encoding & ~offset_mask) | ((offset & 0xf0) << 4) | (offset & 0xf);
279}
280
281// Encoding for vfp load/store addressing.
282uint32_t Address::vencoding() const {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800283 CHECK(IsAbsoluteUint<10>(offset_)); // In the range -1020 to +1020.
284 CHECK_ALIGNED(offset_, 2); // Multiple of 4.
285
Dave Allison65fcc2c2014-04-28 13:45:27 -0700286 const uint32_t offset_mask = (1 << 12) - 1;
287 uint32_t encoding = encodingArm();
288 uint32_t offset = encoding & offset_mask;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700289 CHECK((am_ == Offset) || (am_ == NegOffset));
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800290 uint32_t vencoding_value = (encoding & (0xf << kRnShift)) | (offset >> 2);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700291 if (am_ == Offset) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800292 vencoding_value |= 1 << 23;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700293 }
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800294 return vencoding_value;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700295}
296
297
298bool Address::CanHoldLoadOffsetArm(LoadOperandType type, int offset) {
Ian Rogersb033c752011-07-20 12:22:35 -0700299 switch (type) {
300 case kLoadSignedByte:
301 case kLoadSignedHalfword:
302 case kLoadUnsignedHalfword:
303 case kLoadWordPair:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800304 return IsAbsoluteUint<8>(offset); // Addressing mode 3.
Ian Rogersb033c752011-07-20 12:22:35 -0700305 case kLoadUnsignedByte:
306 case kLoadWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800307 return IsAbsoluteUint<12>(offset); // Addressing mode 2.
Ian Rogersb033c752011-07-20 12:22:35 -0700308 case kLoadSWord:
309 case kLoadDWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800310 return IsAbsoluteUint<10>(offset); // VFP addressing mode.
Ian Rogersb033c752011-07-20 12:22:35 -0700311 default:
312 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700313 UNREACHABLE();
Ian Rogersb033c752011-07-20 12:22:35 -0700314 }
315}
316
317
Dave Allison65fcc2c2014-04-28 13:45:27 -0700318bool Address::CanHoldStoreOffsetArm(StoreOperandType type, int offset) {
Ian Rogersb033c752011-07-20 12:22:35 -0700319 switch (type) {
320 case kStoreHalfword:
321 case kStoreWordPair:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800322 return IsAbsoluteUint<8>(offset); // Addressing mode 3.
Ian Rogersb033c752011-07-20 12:22:35 -0700323 case kStoreByte:
324 case kStoreWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800325 return IsAbsoluteUint<12>(offset); // Addressing mode 2.
Ian Rogersb033c752011-07-20 12:22:35 -0700326 case kStoreSWord:
327 case kStoreDWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800328 return IsAbsoluteUint<10>(offset); // VFP addressing mode.
Ian Rogersb033c752011-07-20 12:22:35 -0700329 default:
330 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700331 UNREACHABLE();
Ian Rogersb033c752011-07-20 12:22:35 -0700332 }
333}
334
Dave Allison65fcc2c2014-04-28 13:45:27 -0700335bool Address::CanHoldLoadOffsetThumb(LoadOperandType type, int offset) {
Ian Rogersb033c752011-07-20 12:22:35 -0700336 switch (type) {
337 case kLoadSignedByte:
Ian Rogersb033c752011-07-20 12:22:35 -0700338 case kLoadSignedHalfword:
Ian Rogersb033c752011-07-20 12:22:35 -0700339 case kLoadUnsignedHalfword:
Dave Allison65fcc2c2014-04-28 13:45:27 -0700340 case kLoadUnsignedByte:
Ian Rogersb033c752011-07-20 12:22:35 -0700341 case kLoadWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800342 return IsAbsoluteUint<12>(offset);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700343 case kLoadSWord:
344 case kLoadDWord:
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000345 return IsAbsoluteUint<10>(offset) && (offset & 3) == 0; // VFP addressing mode.
Ian Rogersb033c752011-07-20 12:22:35 -0700346 case kLoadWordPair:
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000347 return IsAbsoluteUint<10>(offset) && (offset & 3) == 0;
Ian Rogers2c4257b2014-10-24 14:20:06 -0700348 default:
Ian Rogersb033c752011-07-20 12:22:35 -0700349 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700350 UNREACHABLE();
Ian Rogersb033c752011-07-20 12:22:35 -0700351 }
352}
353
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700354
Dave Allison65fcc2c2014-04-28 13:45:27 -0700355bool Address::CanHoldStoreOffsetThumb(StoreOperandType type, int offset) {
Ian Rogersb033c752011-07-20 12:22:35 -0700356 switch (type) {
Ian Rogersb033c752011-07-20 12:22:35 -0700357 case kStoreHalfword:
Dave Allison65fcc2c2014-04-28 13:45:27 -0700358 case kStoreByte:
Ian Rogersb033c752011-07-20 12:22:35 -0700359 case kStoreWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800360 return IsAbsoluteUint<12>(offset);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700361 case kStoreSWord:
362 case kStoreDWord:
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000363 return IsAbsoluteUint<10>(offset) && (offset & 3) == 0; // VFP addressing mode.
Ian Rogersb033c752011-07-20 12:22:35 -0700364 case kStoreWordPair:
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000365 return IsAbsoluteUint<10>(offset) && (offset & 3) == 0;
Ian Rogers2c4257b2014-10-24 14:20:06 -0700366 default:
Ian Rogersb033c752011-07-20 12:22:35 -0700367 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700368 UNREACHABLE();
Ian Rogersb033c752011-07-20 12:22:35 -0700369 }
370}
371
Dave Allison65fcc2c2014-04-28 13:45:27 -0700372void ArmAssembler::Pad(uint32_t bytes) {
373 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
374 for (uint32_t i = 0; i < bytes; ++i) {
Ian Rogers13735952014-10-08 12:43:28 -0700375 buffer_.Emit<uint8_t>(0);
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700376 }
Carl Shapiro9b9ba282011-08-14 15:30:39 -0700377}
378
David Srbeckydd973932015-04-07 20:29:48 +0100379static dwarf::Reg DWARFReg(Register reg) {
380 return dwarf::Reg::ArmCore(static_cast<int>(reg));
381}
382
383static dwarf::Reg DWARFReg(SRegister reg) {
384 return dwarf::Reg::ArmFp(static_cast<int>(reg));
385}
386
Mathieu Chartiere401d142015-04-22 13:56:20 -0700387constexpr size_t kFramePointerSize = kArmPointerSize;
Ian Rogers790a6b72014-04-01 10:36:00 -0700388
Vladimir Marko32248382016-05-19 10:37:24 +0100389void ArmAssembler::BuildFrame(size_t frame_size,
390 ManagedRegister method_reg,
391 ArrayRef<const ManagedRegister> callee_save_regs,
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700392 const ManagedRegisterEntrySpills& entry_spills) {
David Srbeckydd973932015-04-07 20:29:48 +0100393 CHECK_EQ(buffer_.Size(), 0U); // Nothing emitted yet
Elliott Hughes06b37d92011-10-16 11:51:29 -0700394 CHECK_ALIGNED(frame_size, kStackAlignment);
Ian Rogers2c8f6532011-09-02 17:16:34 -0700395 CHECK_EQ(R0, method_reg.AsArm().AsCoreRegister());
Ian Rogersbdb03912011-09-14 00:55:44 -0700396
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700397 // Push callee saves and link register.
David Srbeckydd973932015-04-07 20:29:48 +0100398 RegList core_spill_mask = 1 << LR;
399 uint32_t fp_spill_mask = 0;
400 for (const ManagedRegister& reg : callee_save_regs) {
401 if (reg.AsArm().IsCoreRegister()) {
402 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100403 } else {
David Srbeckydd973932015-04-07 20:29:48 +0100404 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100405 }
Ian Rogers0d666d82011-08-14 16:03:46 -0700406 }
David Srbeckydd973932015-04-07 20:29:48 +0100407 PushList(core_spill_mask);
408 cfi_.AdjustCFAOffset(POPCOUNT(core_spill_mask) * kFramePointerSize);
409 cfi_.RelOffsetForMany(DWARFReg(Register(0)), 0, core_spill_mask, kFramePointerSize);
410 if (fp_spill_mask != 0) {
411 vpushs(SRegister(CTZ(fp_spill_mask)), POPCOUNT(fp_spill_mask));
412 cfi_.AdjustCFAOffset(POPCOUNT(fp_spill_mask) * kFramePointerSize);
413 cfi_.RelOffsetForMany(DWARFReg(SRegister(0)), 0, fp_spill_mask, kFramePointerSize);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100414 }
Ian Rogersbdb03912011-09-14 00:55:44 -0700415
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700416 // Increase frame to required size.
David Srbeckydd973932015-04-07 20:29:48 +0100417 int pushed_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
Ian Rogers790a6b72014-04-01 10:36:00 -0700418 CHECK_GT(frame_size, pushed_values * kFramePointerSize); // Must at least have space for Method*.
David Srbeckydd973932015-04-07 20:29:48 +0100419 IncreaseFrameSize(frame_size - pushed_values * kFramePointerSize); // handles CFI as well.
Ian Rogersbdb03912011-09-14 00:55:44 -0700420
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700421 // Write out Method*.
Ian Rogersbdb03912011-09-14 00:55:44 -0700422 StoreToOffset(kStoreWord, R0, SP, 0);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700423
424 // Write out entry spills.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700425 int32_t offset = frame_size + kFramePointerSize;
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700426 for (size_t i = 0; i < entry_spills.size(); ++i) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800427 ArmManagedRegister reg = entry_spills.at(i).AsArm();
428 if (reg.IsNoRegister()) {
429 // only increment stack offset.
430 ManagedRegisterSpill spill = entry_spills.at(i);
431 offset += spill.getSize();
432 } else if (reg.IsCoreRegister()) {
433 StoreToOffset(kStoreWord, reg.AsCoreRegister(), SP, offset);
434 offset += 4;
435 } else if (reg.IsSRegister()) {
436 StoreSToOffset(reg.AsSRegister(), SP, offset);
437 offset += 4;
438 } else if (reg.IsDRegister()) {
439 StoreDToOffset(reg.AsDRegister(), SP, offset);
440 offset += 8;
441 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700442 }
Ian Rogersb033c752011-07-20 12:22:35 -0700443}
444
Ian Rogers2c8f6532011-09-02 17:16:34 -0700445void ArmAssembler::RemoveFrame(size_t frame_size,
Vladimir Marko32248382016-05-19 10:37:24 +0100446 ArrayRef<const ManagedRegister> callee_save_regs) {
Elliott Hughes06b37d92011-10-16 11:51:29 -0700447 CHECK_ALIGNED(frame_size, kStackAlignment);
David Srbeckydd973932015-04-07 20:29:48 +0100448 cfi_.RememberState();
449
Dave Allison65fcc2c2014-04-28 13:45:27 -0700450 // Compute callee saves to pop and PC.
David Srbeckydd973932015-04-07 20:29:48 +0100451 RegList core_spill_mask = 1 << PC;
452 uint32_t fp_spill_mask = 0;
453 for (const ManagedRegister& reg : callee_save_regs) {
454 if (reg.AsArm().IsCoreRegister()) {
455 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100456 } else {
David Srbeckydd973932015-04-07 20:29:48 +0100457 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100458 }
459 }
460
Dave Allison65fcc2c2014-04-28 13:45:27 -0700461 // Decrease frame to start of callee saves.
David Srbeckydd973932015-04-07 20:29:48 +0100462 int pop_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
Ian Rogers790a6b72014-04-01 10:36:00 -0700463 CHECK_GT(frame_size, pop_values * kFramePointerSize);
David Srbeckydd973932015-04-07 20:29:48 +0100464 DecreaseFrameSize(frame_size - (pop_values * kFramePointerSize)); // handles CFI as well.
Ian Rogersbdb03912011-09-14 00:55:44 -0700465
David Srbeckydd973932015-04-07 20:29:48 +0100466 if (fp_spill_mask != 0) {
467 vpops(SRegister(CTZ(fp_spill_mask)), POPCOUNT(fp_spill_mask));
468 cfi_.AdjustCFAOffset(-kFramePointerSize * POPCOUNT(fp_spill_mask));
469 cfi_.RestoreMany(DWARFReg(SRegister(0)), fp_spill_mask);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100470 }
471
Dave Allison65fcc2c2014-04-28 13:45:27 -0700472 // Pop callee saves and PC.
David Srbeckydd973932015-04-07 20:29:48 +0100473 PopList(core_spill_mask);
474
475 // The CFI should be restored for any code that follows the exit block.
476 cfi_.RestoreState();
477 cfi_.DefCFAOffset(frame_size);
Ian Rogers0d666d82011-08-14 16:03:46 -0700478}
479
Ian Rogers2c8f6532011-09-02 17:16:34 -0700480void ArmAssembler::IncreaseFrameSize(size_t adjust) {
Ian Rogersb033c752011-07-20 12:22:35 -0700481 AddConstant(SP, -adjust);
David Srbeckydd973932015-04-07 20:29:48 +0100482 cfi_.AdjustCFAOffset(adjust);
Ian Rogersb033c752011-07-20 12:22:35 -0700483}
484
Ian Rogers2c8f6532011-09-02 17:16:34 -0700485void ArmAssembler::DecreaseFrameSize(size_t adjust) {
Ian Rogersb033c752011-07-20 12:22:35 -0700486 AddConstant(SP, adjust);
David Srbeckydd973932015-04-07 20:29:48 +0100487 cfi_.AdjustCFAOffset(-adjust);
Ian Rogersb033c752011-07-20 12:22:35 -0700488}
489
Ian Rogers2c8f6532011-09-02 17:16:34 -0700490void ArmAssembler::Store(FrameOffset dest, ManagedRegister msrc, size_t size) {
491 ArmManagedRegister src = msrc.AsArm();
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700492 if (src.IsNoRegister()) {
493 CHECK_EQ(0u, size);
494 } else if (src.IsCoreRegister()) {
Ian Rogersb033c752011-07-20 12:22:35 -0700495 CHECK_EQ(4u, size);
496 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700497 } else if (src.IsRegisterPair()) {
498 CHECK_EQ(8u, size);
499 StoreToOffset(kStoreWord, src.AsRegisterPairLow(), SP, dest.Int32Value());
500 StoreToOffset(kStoreWord, src.AsRegisterPairHigh(),
501 SP, dest.Int32Value() + 4);
502 } else if (src.IsSRegister()) {
503 StoreSToOffset(src.AsSRegister(), SP, dest.Int32Value());
Ian Rogersb033c752011-07-20 12:22:35 -0700504 } else {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700505 CHECK(src.IsDRegister()) << src;
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700506 StoreDToOffset(src.AsDRegister(), SP, dest.Int32Value());
Ian Rogersb033c752011-07-20 12:22:35 -0700507 }
508}
509
Ian Rogers2c8f6532011-09-02 17:16:34 -0700510void ArmAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
511 ArmManagedRegister src = msrc.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700512 CHECK(src.IsCoreRegister()) << src;
Ian Rogersb033c752011-07-20 12:22:35 -0700513 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
514}
515
Ian Rogers2c8f6532011-09-02 17:16:34 -0700516void ArmAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
517 ArmManagedRegister src = msrc.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700518 CHECK(src.IsCoreRegister()) << src;
Ian Rogersdf20fe02011-07-20 20:34:16 -0700519 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
520}
521
Ian Rogers2c8f6532011-09-02 17:16:34 -0700522void ArmAssembler::StoreSpanning(FrameOffset dest, ManagedRegister msrc,
523 FrameOffset in_off, ManagedRegister mscratch) {
524 ArmManagedRegister src = msrc.AsArm();
525 ArmManagedRegister scratch = mscratch.AsArm();
Ian Rogers7a99c112011-09-07 12:48:27 -0700526 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
527 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, in_off.Int32Value());
528 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value() + 4);
529}
530
Ian Rogers2c8f6532011-09-02 17:16:34 -0700531void ArmAssembler::CopyRef(FrameOffset dest, FrameOffset src,
532 ManagedRegister mscratch) {
533 ArmManagedRegister scratch = mscratch.AsArm();
Ian Rogersb033c752011-07-20 12:22:35 -0700534 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value());
535 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
536}
537
Mathieu Chartiere401d142015-04-22 13:56:20 -0700538void ArmAssembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
Roland Levillain4d027112015-07-01 15:41:14 +0100539 bool unpoison_reference) {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700540 ArmManagedRegister dst = mdest.AsArm();
541 CHECK(dst.IsCoreRegister() && dst.IsCoreRegister()) << dst;
542 LoadFromOffset(kLoadWord, dst.AsCoreRegister(),
Ian Rogers2c8f6532011-09-02 17:16:34 -0700543 base.AsArm().AsCoreRegister(), offs.Int32Value());
Roland Levillain4d027112015-07-01 15:41:14 +0100544 if (unpoison_reference) {
545 MaybeUnpoisonHeapReference(dst.AsCoreRegister());
Hiroshi Yamauchie63a7452014-02-27 14:44:36 -0800546 }
Ian Rogersb033c752011-07-20 12:22:35 -0700547}
548
Ian Rogers2c8f6532011-09-02 17:16:34 -0700549void ArmAssembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700550 ArmManagedRegister dst = mdest.AsArm();
551 CHECK(dst.IsCoreRegister()) << dst;
552 LoadFromOffset(kLoadWord, dst.AsCoreRegister(), SP, src.Int32Value());
Elliott Hughes362f9bc2011-10-17 18:56:41 -0700553}
Ian Rogers2c8f6532011-09-02 17:16:34 -0700554
555void ArmAssembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
Ian Rogersa04d3972011-08-17 11:33:44 -0700556 Offset offs) {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700557 ArmManagedRegister dst = mdest.AsArm();
558 CHECK(dst.IsCoreRegister() && dst.IsCoreRegister()) << dst;
559 LoadFromOffset(kLoadWord, dst.AsCoreRegister(),
Ian Rogers2c8f6532011-09-02 17:16:34 -0700560 base.AsArm().AsCoreRegister(), offs.Int32Value());
Ian Rogersa04d3972011-08-17 11:33:44 -0700561}
562
Ian Rogers2c8f6532011-09-02 17:16:34 -0700563void ArmAssembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
564 ManagedRegister mscratch) {
565 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700566 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700567 LoadImmediate(scratch.AsCoreRegister(), imm);
568 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
569}
570
Ian Rogersdd7624d2014-03-14 17:43:00 -0700571void ArmAssembler::StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm,
Ian Rogers2c8f6532011-09-02 17:16:34 -0700572 ManagedRegister mscratch) {
573 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700574 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700575 LoadImmediate(scratch.AsCoreRegister(), imm);
576 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), TR, dest.Int32Value());
577}
578
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700579static void EmitLoad(ArmAssembler* assembler, ManagedRegister m_dst,
580 Register src_register, int32_t src_offset, size_t size) {
581 ArmManagedRegister dst = m_dst.AsArm();
582 if (dst.IsNoRegister()) {
583 CHECK_EQ(0u, size) << dst;
584 } else if (dst.IsCoreRegister()) {
585 CHECK_EQ(4u, size) << dst;
586 assembler->LoadFromOffset(kLoadWord, dst.AsCoreRegister(), src_register, src_offset);
587 } else if (dst.IsRegisterPair()) {
588 CHECK_EQ(8u, size) << dst;
589 assembler->LoadFromOffset(kLoadWord, dst.AsRegisterPairLow(), src_register, src_offset);
590 assembler->LoadFromOffset(kLoadWord, dst.AsRegisterPairHigh(), src_register, src_offset + 4);
591 } else if (dst.IsSRegister()) {
592 assembler->LoadSFromOffset(dst.AsSRegister(), src_register, src_offset);
Ian Rogersb033c752011-07-20 12:22:35 -0700593 } else {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700594 CHECK(dst.IsDRegister()) << dst;
595 assembler->LoadDFromOffset(dst.AsDRegister(), src_register, src_offset);
Ian Rogersb033c752011-07-20 12:22:35 -0700596 }
597}
598
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700599void ArmAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
600 return EmitLoad(this, m_dst, SP, src.Int32Value(), size);
Ian Rogers5a7a74a2011-09-26 16:32:29 -0700601}
602
Ian Rogersdd7624d2014-03-14 17:43:00 -0700603void ArmAssembler::LoadFromThread32(ManagedRegister m_dst, ThreadOffset<4> src, size_t size) {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700604 return EmitLoad(this, m_dst, TR, src.Int32Value(), size);
605}
606
Ian Rogersdd7624d2014-03-14 17:43:00 -0700607void ArmAssembler::LoadRawPtrFromThread32(ManagedRegister m_dst, ThreadOffset<4> offs) {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700608 ArmManagedRegister dst = m_dst.AsArm();
609 CHECK(dst.IsCoreRegister()) << dst;
610 LoadFromOffset(kLoadWord, dst.AsCoreRegister(), TR, offs.Int32Value());
Ian Rogersb033c752011-07-20 12:22:35 -0700611}
612
Ian Rogersdd7624d2014-03-14 17:43:00 -0700613void ArmAssembler::CopyRawPtrFromThread32(FrameOffset fr_offs,
614 ThreadOffset<4> thr_offs,
Ian Rogers2c8f6532011-09-02 17:16:34 -0700615 ManagedRegister mscratch) {
616 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700617 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700618 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
619 TR, thr_offs.Int32Value());
620 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
621 SP, fr_offs.Int32Value());
622}
623
Ian Rogersdd7624d2014-03-14 17:43:00 -0700624void ArmAssembler::CopyRawPtrToThread32(ThreadOffset<4> thr_offs,
Ian Rogers2c8f6532011-09-02 17:16:34 -0700625 FrameOffset fr_offs,
626 ManagedRegister mscratch) {
627 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700628 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700629 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
630 SP, fr_offs.Int32Value());
631 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
632 TR, thr_offs.Int32Value());
633}
634
Ian Rogersdd7624d2014-03-14 17:43:00 -0700635void ArmAssembler::StoreStackOffsetToThread32(ThreadOffset<4> thr_offs,
Ian Rogers2c8f6532011-09-02 17:16:34 -0700636 FrameOffset fr_offs,
637 ManagedRegister mscratch) {
638 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700639 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700640 AddConstant(scratch.AsCoreRegister(), SP, fr_offs.Int32Value(), AL);
641 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
642 TR, thr_offs.Int32Value());
643}
644
Ian Rogersdd7624d2014-03-14 17:43:00 -0700645void ArmAssembler::StoreStackPointerToThread32(ThreadOffset<4> thr_offs) {
Ian Rogers45a76cb2011-07-21 22:00:15 -0700646 StoreToOffset(kStoreWord, SP, TR, thr_offs.Int32Value());
647}
648
jeffhao58136ca2012-05-24 13:40:11 -0700649void ArmAssembler::SignExtend(ManagedRegister /*mreg*/, size_t /*size*/) {
650 UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm";
651}
652
jeffhaocee4d0c2012-06-15 14:42:01 -0700653void ArmAssembler::ZeroExtend(ManagedRegister /*mreg*/, size_t /*size*/) {
654 UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm";
655}
656
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700657void ArmAssembler::Move(ManagedRegister m_dst, ManagedRegister m_src, size_t /*size*/) {
658 ArmManagedRegister dst = m_dst.AsArm();
659 ArmManagedRegister src = m_src.AsArm();
660 if (!dst.Equals(src)) {
661 if (dst.IsCoreRegister()) {
662 CHECK(src.IsCoreRegister()) << src;
663 mov(dst.AsCoreRegister(), ShifterOperand(src.AsCoreRegister()));
664 } else if (dst.IsDRegister()) {
665 CHECK(src.IsDRegister()) << src;
666 vmovd(dst.AsDRegister(), src.AsDRegister());
667 } else if (dst.IsSRegister()) {
668 CHECK(src.IsSRegister()) << src;
669 vmovs(dst.AsSRegister(), src.AsSRegister());
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700670 } else {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700671 CHECK(dst.IsRegisterPair()) << dst;
672 CHECK(src.IsRegisterPair()) << src;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700673 // Ensure that the first move doesn't clobber the input of the second.
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700674 if (src.AsRegisterPairHigh() != dst.AsRegisterPairLow()) {
675 mov(dst.AsRegisterPairLow(), ShifterOperand(src.AsRegisterPairLow()));
676 mov(dst.AsRegisterPairHigh(), ShifterOperand(src.AsRegisterPairHigh()));
Ian Rogers7a99c112011-09-07 12:48:27 -0700677 } else {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700678 mov(dst.AsRegisterPairHigh(), ShifterOperand(src.AsRegisterPairHigh()));
679 mov(dst.AsRegisterPairLow(), ShifterOperand(src.AsRegisterPairLow()));
Ian Rogers7a99c112011-09-07 12:48:27 -0700680 }
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700681 }
Ian Rogersb033c752011-07-20 12:22:35 -0700682 }
683}
684
Ian Rogersdc51b792011-09-22 20:41:37 -0700685void ArmAssembler::Copy(FrameOffset dest, FrameOffset src, ManagedRegister mscratch, size_t size) {
Ian Rogers2c8f6532011-09-02 17:16:34 -0700686 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700687 CHECK(scratch.IsCoreRegister()) << scratch;
688 CHECK(size == 4 || size == 8) << size;
Ian Rogersb033c752011-07-20 12:22:35 -0700689 if (size == 4) {
Ian Rogersdc51b792011-09-22 20:41:37 -0700690 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value());
691 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
Shih-wei Liao5381cf92011-07-27 00:28:04 -0700692 } else if (size == 8) {
Ian Rogersdc51b792011-09-22 20:41:37 -0700693 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value());
694 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
695 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value() + 4);
696 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value() + 4);
Ian Rogersb033c752011-07-20 12:22:35 -0700697 }
698}
699
Ian Rogersdc51b792011-09-22 20:41:37 -0700700void ArmAssembler::Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset,
701 ManagedRegister mscratch, size_t size) {
702 Register scratch = mscratch.AsArm().AsCoreRegister();
703 CHECK_EQ(size, 4u);
704 LoadFromOffset(kLoadWord, scratch, src_base.AsArm().AsCoreRegister(), src_offset.Int32Value());
705 StoreToOffset(kStoreWord, scratch, SP, dest.Int32Value());
706}
707
Ian Rogers5a7a74a2011-09-26 16:32:29 -0700708void ArmAssembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
709 ManagedRegister mscratch, size_t size) {
710 Register scratch = mscratch.AsArm().AsCoreRegister();
711 CHECK_EQ(size, 4u);
712 LoadFromOffset(kLoadWord, scratch, SP, src.Int32Value());
713 StoreToOffset(kStoreWord, scratch, dest_base.AsArm().AsCoreRegister(), dest_offset.Int32Value());
714}
715
Elliott Hughes1bac54f2012-03-16 12:48:31 -0700716void ArmAssembler::Copy(FrameOffset /*dst*/, FrameOffset /*src_base*/, Offset /*src_offset*/,
717 ManagedRegister /*mscratch*/, size_t /*size*/) {
Ian Rogersdc51b792011-09-22 20:41:37 -0700718 UNIMPLEMENTED(FATAL);
719}
720
Ian Rogers5a7a74a2011-09-26 16:32:29 -0700721void ArmAssembler::Copy(ManagedRegister dest, Offset dest_offset,
722 ManagedRegister src, Offset src_offset,
723 ManagedRegister mscratch, size_t size) {
Ian Rogersdc51b792011-09-22 20:41:37 -0700724 CHECK_EQ(size, 4u);
Ian Rogers5a7a74a2011-09-26 16:32:29 -0700725 Register scratch = mscratch.AsArm().AsCoreRegister();
726 LoadFromOffset(kLoadWord, scratch, src.AsArm().AsCoreRegister(), src_offset.Int32Value());
727 StoreToOffset(kStoreWord, scratch, dest.AsArm().AsCoreRegister(), dest_offset.Int32Value());
728}
729
Elliott Hughes1bac54f2012-03-16 12:48:31 -0700730void ArmAssembler::Copy(FrameOffset /*dst*/, Offset /*dest_offset*/, FrameOffset /*src*/, Offset /*src_offset*/,
731 ManagedRegister /*scratch*/, size_t /*size*/) {
Ian Rogers5a7a74a2011-09-26 16:32:29 -0700732 UNIMPLEMENTED(FATAL);
Ian Rogersdc51b792011-09-22 20:41:37 -0700733}
734
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700735void ArmAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
736 FrameOffset handle_scope_offset,
Ian Rogers2c8f6532011-09-02 17:16:34 -0700737 ManagedRegister min_reg, bool null_allowed) {
738 ArmManagedRegister out_reg = mout_reg.AsArm();
739 ArmManagedRegister in_reg = min_reg.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700740 CHECK(in_reg.IsNoRegister() || in_reg.IsCoreRegister()) << in_reg;
741 CHECK(out_reg.IsCoreRegister()) << out_reg;
Ian Rogersb033c752011-07-20 12:22:35 -0700742 if (null_allowed) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700743 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
744 // the address in the handle scope holding the reference.
Ian Rogersb033c752011-07-20 12:22:35 -0700745 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700746 if (in_reg.IsNoRegister()) {
747 LoadFromOffset(kLoadWord, out_reg.AsCoreRegister(),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700748 SP, handle_scope_offset.Int32Value());
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700749 in_reg = out_reg;
750 }
Ian Rogersb033c752011-07-20 12:22:35 -0700751 cmp(in_reg.AsCoreRegister(), ShifterOperand(0));
752 if (!out_reg.Equals(in_reg)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700753 it(EQ, kItElse);
Ian Rogersb033c752011-07-20 12:22:35 -0700754 LoadImmediate(out_reg.AsCoreRegister(), 0, EQ);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700755 } else {
756 it(NE);
Ian Rogersb033c752011-07-20 12:22:35 -0700757 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700758 AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), NE);
Ian Rogersb033c752011-07-20 12:22:35 -0700759 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700760 AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), AL);
Ian Rogersb033c752011-07-20 12:22:35 -0700761 }
762}
763
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700764void ArmAssembler::CreateHandleScopeEntry(FrameOffset out_off,
765 FrameOffset handle_scope_offset,
Ian Rogers2c8f6532011-09-02 17:16:34 -0700766 ManagedRegister mscratch,
767 bool null_allowed) {
768 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700769 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700770 if (null_allowed) {
771 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700772 handle_scope_offset.Int32Value());
773 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
774 // the address in the handle scope holding the reference.
775 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
Ian Rogersb033c752011-07-20 12:22:35 -0700776 cmp(scratch.AsCoreRegister(), ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700777 it(NE);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700778 AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), NE);
Ian Rogersb033c752011-07-20 12:22:35 -0700779 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700780 AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), AL);
Ian Rogersb033c752011-07-20 12:22:35 -0700781 }
782 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, out_off.Int32Value());
783}
784
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700785void ArmAssembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
Ian Rogers2c8f6532011-09-02 17:16:34 -0700786 ManagedRegister min_reg) {
787 ArmManagedRegister out_reg = mout_reg.AsArm();
788 ArmManagedRegister in_reg = min_reg.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700789 CHECK(out_reg.IsCoreRegister()) << out_reg;
790 CHECK(in_reg.IsCoreRegister()) << in_reg;
Ian Rogersb033c752011-07-20 12:22:35 -0700791 Label null_arg;
792 if (!out_reg.Equals(in_reg)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700793 LoadImmediate(out_reg.AsCoreRegister(), 0, EQ); // TODO: why EQ?
Ian Rogersb033c752011-07-20 12:22:35 -0700794 }
795 cmp(in_reg.AsCoreRegister(), ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700796 it(NE);
Ian Rogersdf20fe02011-07-20 20:34:16 -0700797 LoadFromOffset(kLoadWord, out_reg.AsCoreRegister(),
798 in_reg.AsCoreRegister(), 0, NE);
Ian Rogersb033c752011-07-20 12:22:35 -0700799}
800
Elliott Hughes1bac54f2012-03-16 12:48:31 -0700801void ArmAssembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700802 // TODO: not validating references.
Ian Rogersb033c752011-07-20 12:22:35 -0700803}
804
Elliott Hughes1bac54f2012-03-16 12:48:31 -0700805void ArmAssembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700806 // TODO: not validating references.
Ian Rogersb033c752011-07-20 12:22:35 -0700807}
808
Ian Rogers2c8f6532011-09-02 17:16:34 -0700809void ArmAssembler::Call(ManagedRegister mbase, Offset offset,
810 ManagedRegister mscratch) {
811 ArmManagedRegister base = mbase.AsArm();
812 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700813 CHECK(base.IsCoreRegister()) << base;
814 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700815 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
816 base.AsCoreRegister(), offset.Int32Value());
817 blx(scratch.AsCoreRegister());
Dave Allison65fcc2c2014-04-28 13:45:27 -0700818 // TODO: place reference map on call.
Ian Rogersb033c752011-07-20 12:22:35 -0700819}
820
Ian Rogers2c8f6532011-09-02 17:16:34 -0700821void ArmAssembler::Call(FrameOffset base, Offset offset,
822 ManagedRegister mscratch) {
823 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700824 CHECK(scratch.IsCoreRegister()) << scratch;
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700825 // Call *(*(SP + base) + offset)
826 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
827 SP, base.Int32Value());
828 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
829 scratch.AsCoreRegister(), offset.Int32Value());
830 blx(scratch.AsCoreRegister());
831 // TODO: place reference map on call
832}
833
Ian Rogersdd7624d2014-03-14 17:43:00 -0700834void ArmAssembler::CallFromThread32(ThreadOffset<4> /*offset*/, ManagedRegister /*scratch*/) {
Ian Rogersbdb03912011-09-14 00:55:44 -0700835 UNIMPLEMENTED(FATAL);
836}
837
Ian Rogers2c8f6532011-09-02 17:16:34 -0700838void ArmAssembler::GetCurrentThread(ManagedRegister tr) {
839 mov(tr.AsArm().AsCoreRegister(), ShifterOperand(TR));
Shih-wei Liao668512a2011-09-01 14:18:34 -0700840}
841
Ian Rogers2c8f6532011-09-02 17:16:34 -0700842void ArmAssembler::GetCurrentThread(FrameOffset offset,
Elliott Hughes1bac54f2012-03-16 12:48:31 -0700843 ManagedRegister /*scratch*/) {
Shih-wei Liao668512a2011-09-01 14:18:34 -0700844 StoreToOffset(kStoreWord, TR, SP, offset.Int32Value(), AL);
845}
846
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700847void ArmAssembler::ExceptionPoll(ManagedRegister mscratch, size_t stack_adjust) {
Ian Rogers2c8f6532011-09-02 17:16:34 -0700848 ArmManagedRegister scratch = mscratch.AsArm();
Vladimir Marko93205e32016-04-13 11:59:46 +0100849 ArmExceptionSlowPath* slow = new (GetArena()) ArmExceptionSlowPath(scratch, stack_adjust);
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700850 buffer_.EnqueueSlowPath(slow);
851 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
Ian Rogersdd7624d2014-03-14 17:43:00 -0700852 TR, Thread::ExceptionOffset<4>().Int32Value());
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700853 cmp(scratch.AsCoreRegister(), ShifterOperand(0));
854 b(slow->Entry(), NE);
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700855}
856
Ian Rogers2c8f6532011-09-02 17:16:34 -0700857void ArmExceptionSlowPath::Emit(Assembler* sasm) {
858 ArmAssembler* sp_asm = down_cast<ArmAssembler*>(sasm);
859#define __ sp_asm->
860 __ Bind(&entry_);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700861 if (stack_adjust_ != 0) { // Fix up the frame.
862 __ DecreaseFrameSize(stack_adjust_);
863 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700864 // Pass exception object as argument.
865 // Don't care about preserving R0 as this call won't return.
Ian Rogers67375ac2011-09-14 00:55:44 -0700866 __ mov(R0, ShifterOperand(scratch_.AsCoreRegister()));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700867 // Set up call to Thread::Current()->pDeliverException.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700868 __ LoadFromOffset(kLoadWord, R12, TR, QUICK_ENTRYPOINT_OFFSET(4, pDeliverException).Int32Value());
Ian Rogers2c8f6532011-09-02 17:16:34 -0700869 __ blx(R12);
Ian Rogers2c8f6532011-09-02 17:16:34 -0700870#undef __
Ian Rogers45a76cb2011-07-21 22:00:15 -0700871}
872
Dave Allison65fcc2c2014-04-28 13:45:27 -0700873
874static int LeadingZeros(uint32_t val) {
875 uint32_t alt;
876 int32_t n;
877 int32_t count;
878
879 count = 16;
880 n = 32;
881 do {
882 alt = val >> count;
883 if (alt != 0) {
884 n = n - count;
885 val = alt;
886 }
887 count >>= 1;
888 } while (count);
889 return n - val;
890}
891
892
893uint32_t ArmAssembler::ModifiedImmediate(uint32_t value) {
894 int32_t z_leading;
895 int32_t z_trailing;
896 uint32_t b0 = value & 0xff;
897
898 /* Note: case of value==0 must use 0:000:0:0000000 encoding */
899 if (value <= 0xFF)
900 return b0; // 0:000:a:bcdefgh.
901 if (value == ((b0 << 16) | b0))
902 return (0x1 << 12) | b0; /* 0:001:a:bcdefgh */
903 if (value == ((b0 << 24) | (b0 << 16) | (b0 << 8) | b0))
904 return (0x3 << 12) | b0; /* 0:011:a:bcdefgh */
905 b0 = (value >> 8) & 0xff;
906 if (value == ((b0 << 24) | (b0 << 8)))
907 return (0x2 << 12) | b0; /* 0:010:a:bcdefgh */
908 /* Can we do it with rotation? */
909 z_leading = LeadingZeros(value);
910 z_trailing = 32 - LeadingZeros(~value & (value - 1));
911 /* A run of eight or fewer active bits? */
912 if ((z_leading + z_trailing) < 24)
913 return kInvalidModifiedImmediate; /* No - bail */
914 /* left-justify the constant, discarding msb (known to be 1) */
915 value <<= z_leading + 1;
916 /* Create bcdefgh */
917 value >>= 25;
918
919 /* Put it all together */
920 uint32_t v = 8 + z_leading;
921
Andreas Gampec8ccf682014-09-29 20:07:43 -0700922 uint32_t i = (v & 16U /* 0b10000 */) >> 4;
923 uint32_t imm3 = (v >> 1) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700924 uint32_t a = v & 1;
925 return value | i << 26 | imm3 << 12 | a << 7;
926}
927
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700928void ArmAssembler::FinalizeTrackedLabels() {
929 if (!tracked_labels_.empty()) {
930 // This array should be sorted, as assembly is generated in linearized order. It isn't
931 // technically required, but GetAdjustedPosition() used in AdjustLabelPosition() can take
932 // advantage of it. So ensure that it's actually the case.
933 DCHECK(std::is_sorted(
934 tracked_labels_.begin(),
935 tracked_labels_.end(),
936 [](const Label* lhs, const Label* rhs) { return lhs->Position() < rhs->Position(); }));
937
938 Label* last_label = nullptr; // Track duplicates, we must not adjust twice.
939 for (Label* label : tracked_labels_) {
940 DCHECK_NE(label, last_label);
941 AdjustLabelPosition(label);
942 last_label = label;
943 }
944 }
945}
946
Ian Rogers2c8f6532011-09-02 17:16:34 -0700947} // namespace arm
Carl Shapiro6b6b5f02011-06-21 15:05:09 -0700948} // namespace art