blob: 1796b3940c9d83df18a132d6b43af1f178024037 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiroa5d5cfd2011-06-21 12:46:59 -070016
Ian Rogers2c8f6532011-09-02 17:16:34 -070017#include "assembler_arm.h"
18
Andreas Gampe7cffc3b2015-10-19 21:31:53 -070019#include <algorithm>
20
Vladimir Marko80afd022015-05-19 18:08:00 +010021#include "base/bit_utils.h"
Elliott Hughes07ed66b2012-12-12 18:34:25 -080022#include "base/logging.h"
Ian Rogers166db042013-07-26 12:05:57 -070023#include "entrypoints/quick/quick_entrypoints.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070024#include "offsets.h"
Carl Shapiroe2d373e2011-07-25 15:20:06 -070025#include "thread.h"
Carl Shapiroa5d5cfd2011-06-21 12:46:59 -070026
Carl Shapiro6b6b5f02011-06-21 15:05:09 -070027namespace art {
Ian Rogers2c8f6532011-09-02 17:16:34 -070028namespace arm {
Carl Shapiroa5d5cfd2011-06-21 12:46:59 -070029
Dave Allison65fcc2c2014-04-28 13:45:27 -070030const char* kRegisterNames[] = {
Elliott Hughes1f359b02011-07-17 14:27:17 -070031 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10",
32 "fp", "ip", "sp", "lr", "pc"
33};
Dave Allison65fcc2c2014-04-28 13:45:27 -070034
35const char* kConditionNames[] = {
36 "EQ", "NE", "CS", "CC", "MI", "PL", "VS", "VC", "HI", "LS", "GE", "LT", "GT",
37 "LE", "AL",
38};
39
Elliott Hughes1f359b02011-07-17 14:27:17 -070040std::ostream& operator<<(std::ostream& os, const Register& rhs) {
41 if (rhs >= R0 && rhs <= PC) {
42 os << kRegisterNames[rhs];
43 } else {
Ian Rogersb033c752011-07-20 12:22:35 -070044 os << "Register[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -070045 }
46 return os;
47}
48
49
50std::ostream& operator<<(std::ostream& os, const SRegister& rhs) {
51 if (rhs >= S0 && rhs < kNumberOfSRegisters) {
Ian Rogersb033c752011-07-20 12:22:35 -070052 os << "s" << static_cast<int>(rhs);
Elliott Hughes1f359b02011-07-17 14:27:17 -070053 } else {
Ian Rogersb033c752011-07-20 12:22:35 -070054 os << "SRegister[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -070055 }
56 return os;
57}
58
59
60std::ostream& operator<<(std::ostream& os, const DRegister& rhs) {
61 if (rhs >= D0 && rhs < kNumberOfDRegisters) {
Ian Rogersb033c752011-07-20 12:22:35 -070062 os << "d" << static_cast<int>(rhs);
Elliott Hughes1f359b02011-07-17 14:27:17 -070063 } else {
Ian Rogersb033c752011-07-20 12:22:35 -070064 os << "DRegister[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -070065 }
66 return os;
67}
68
Elliott Hughes1f359b02011-07-17 14:27:17 -070069std::ostream& operator<<(std::ostream& os, const Condition& rhs) {
70 if (rhs >= EQ && rhs <= AL) {
71 os << kConditionNames[rhs];
72 } else {
Ian Rogersb033c752011-07-20 12:22:35 -070073 os << "Condition[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -070074 }
75 return os;
76}
77
Nicolas Geoffray96f89a22014-07-11 10:57:49 +010078ShifterOperand::ShifterOperand(uint32_t immed)
79 : type_(kImmediate), rm_(kNoRegister), rs_(kNoRegister),
80 is_rotate_(false), is_shift_(false), shift_(kNoShift), rotate_(0), immed_(immed) {
81 CHECK(immed < (1u << 12) || ArmAssembler::ModifiedImmediate(immed) != kInvalidModifiedImmediate);
82}
Carl Shapiroa2e18e12011-06-21 18:57:55 -070083
84
Dave Allison65fcc2c2014-04-28 13:45:27 -070085uint32_t ShifterOperand::encodingArm() const {
86 CHECK(is_valid());
87 switch (type_) {
88 case kImmediate:
89 if (is_rotate_) {
90 return (rotate_ << kRotateShift) | (immed_ << kImmed8Shift);
91 } else {
92 return immed_;
Ian Rogersb033c752011-07-20 12:22:35 -070093 }
Dave Allison65fcc2c2014-04-28 13:45:27 -070094 case kRegister:
95 if (is_shift_) {
Andreas Gampe849cc5e2014-11-18 13:46:46 -080096 uint32_t shift_type;
97 switch (shift_) {
98 case arm::Shift::ROR:
99 shift_type = static_cast<uint32_t>(shift_);
100 CHECK_NE(immed_, 0U);
101 break;
102 case arm::Shift::RRX:
103 shift_type = static_cast<uint32_t>(arm::Shift::ROR); // Same encoding as ROR.
104 CHECK_EQ(immed_, 0U);
105 break;
106 default:
107 shift_type = static_cast<uint32_t>(shift_);
108 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700109 // Shifted immediate or register.
110 if (rs_ == kNoRegister) {
111 // Immediate shift.
112 return immed_ << kShiftImmShift |
Andreas Gampe849cc5e2014-11-18 13:46:46 -0800113 shift_type << kShiftShift |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700114 static_cast<uint32_t>(rm_);
115 } else {
116 // Register shift.
117 return static_cast<uint32_t>(rs_) << kShiftRegisterShift |
Andreas Gampe849cc5e2014-11-18 13:46:46 -0800118 shift_type << kShiftShift | (1 << 4) |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700119 static_cast<uint32_t>(rm_);
120 }
121 } else {
122 // Simple register
123 return static_cast<uint32_t>(rm_);
Ian Rogersb033c752011-07-20 12:22:35 -0700124 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700125 default:
126 // Can't get here.
127 LOG(FATAL) << "Invalid shifter operand for ARM";
128 return 0;
Ian Rogersb033c752011-07-20 12:22:35 -0700129 }
130}
131
Dave Allison45fdb932014-06-25 12:37:10 -0700132uint32_t ShifterOperand::encodingThumb() const {
133 switch (type_) {
134 case kImmediate:
135 return immed_;
136 case kRegister:
137 if (is_shift_) {
138 // Shifted immediate or register.
139 if (rs_ == kNoRegister) {
140 // Immediate shift.
141 if (shift_ == RRX) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100142 DCHECK_EQ(immed_, 0u);
Dave Allison45fdb932014-06-25 12:37:10 -0700143 // RRX is encoded as an ROR with imm 0.
144 return ROR << 4 | static_cast<uint32_t>(rm_);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700145 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100146 DCHECK((1 <= immed_ && immed_ <= 31) ||
147 (immed_ == 0u && shift_ == LSL) ||
148 (immed_ == 32u && (shift_ == ASR || shift_ == LSR)));
149 uint32_t imm3 = (immed_ >> 2) & 7 /* 0b111*/;
Andreas Gampec8ccf682014-09-29 20:07:43 -0700150 uint32_t imm2 = immed_ & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -0700151
152 return imm3 << 12 | imm2 << 6 | shift_ << 4 |
153 static_cast<uint32_t>(rm_);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700154 }
155 } else {
Dave Allison45fdb932014-06-25 12:37:10 -0700156 LOG(FATAL) << "No register-shifted register instruction available in thumb";
157 return 0;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700158 }
Dave Allison45fdb932014-06-25 12:37:10 -0700159 } else {
160 // Simple register
161 return static_cast<uint32_t>(rm_);
162 }
Dave Allison45fdb932014-06-25 12:37:10 -0700163 default:
164 // Can't get here.
165 LOG(FATAL) << "Invalid shifter operand for thumb";
Andreas Gampe65b798e2015-04-06 09:35:22 -0700166 UNREACHABLE();
Ian Rogersb033c752011-07-20 12:22:35 -0700167 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700168}
169
Dave Allison65fcc2c2014-04-28 13:45:27 -0700170uint32_t Address::encodingArm() const {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800171 CHECK(IsAbsoluteUint<12>(offset_));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700172 uint32_t encoding;
Dave Allison45fdb932014-06-25 12:37:10 -0700173 if (is_immed_offset_) {
174 if (offset_ < 0) {
175 encoding = (am_ ^ (1 << kUShift)) | -offset_; // Flip U to adjust sign.
176 } else {
177 encoding = am_ | offset_;
178 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700179 } else {
Dave Allison45fdb932014-06-25 12:37:10 -0700180 uint32_t shift = shift_;
181 if (shift == RRX) {
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800182 CHECK_EQ(offset_, 0);
Dave Allison45fdb932014-06-25 12:37:10 -0700183 shift = ROR;
184 }
185 encoding = am_ | static_cast<uint32_t>(rm_) | shift << 5 | offset_ << 7 | B25;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700186 }
187 encoding |= static_cast<uint32_t>(rn_) << kRnShift;
188 return encoding;
189}
Ian Rogersb033c752011-07-20 12:22:35 -0700190
Dave Allison65fcc2c2014-04-28 13:45:27 -0700191
Dave Allison45fdb932014-06-25 12:37:10 -0700192uint32_t Address::encodingThumb(bool is_32bit) const {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700193 uint32_t encoding = 0;
Dave Allison45fdb932014-06-25 12:37:10 -0700194 if (is_immed_offset_) {
195 encoding = static_cast<uint32_t>(rn_) << 16;
196 // Check for the T3/T4 encoding.
197 // PUW must Offset for T3
198 // Convert ARM PU0W to PUW
199 // The Mode is in ARM encoding format which is:
200 // |P|U|0|W|
201 // we need this in thumb2 mode:
202 // |P|U|W|
Dave Allison65fcc2c2014-04-28 13:45:27 -0700203
Dave Allison45fdb932014-06-25 12:37:10 -0700204 uint32_t am = am_;
205 int32_t offset = offset_;
206 if (offset < 0) {
207 am ^= 1 << kUShift;
208 offset = -offset;
209 }
210 if (offset_ < 0 || (offset >= 0 && offset < 256 &&
Dave Allison65fcc2c2014-04-28 13:45:27 -0700211 am_ != Mode::Offset)) {
Dave Allison45fdb932014-06-25 12:37:10 -0700212 // T4 encoding.
213 uint32_t PUW = am >> 21; // Move down to bottom of word.
214 PUW = (PUW >> 1) | (PUW & 1); // Bits 3, 2 and 0.
215 // If P is 0 then W must be 1 (Different from ARM).
Andreas Gampec8ccf682014-09-29 20:07:43 -0700216 if ((PUW & 4U /* 0b100 */) == 0) {
217 PUW |= 1U /* 0b1 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700218 }
Dave Allison45fdb932014-06-25 12:37:10 -0700219 encoding |= B11 | PUW << 8 | offset;
220 } else {
221 // T3 encoding (also sets op1 to 0b01).
222 encoding |= B23 | offset_;
223 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700224 } else {
Dave Allison45fdb932014-06-25 12:37:10 -0700225 // Register offset, possibly shifted.
226 // Need to choose between encoding T1 (16 bit) or T2.
227 // Only Offset mode is supported. Shift must be LSL and the count
228 // is only 2 bits.
229 CHECK_EQ(shift_, LSL);
230 CHECK_LE(offset_, 4);
231 CHECK_EQ(am_, Offset);
232 bool is_t2 = is_32bit;
233 if (ArmAssembler::IsHighRegister(rn_) || ArmAssembler::IsHighRegister(rm_)) {
234 is_t2 = true;
235 } else if (offset_ != 0) {
236 is_t2 = true;
237 }
238 if (is_t2) {
239 encoding = static_cast<uint32_t>(rn_) << 16 | static_cast<uint32_t>(rm_) |
240 offset_ << 4;
241 } else {
242 encoding = static_cast<uint32_t>(rn_) << 3 | static_cast<uint32_t>(rm_) << 6;
243 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700244 }
245 return encoding;
246}
247
248// This is very like the ARM encoding except the offset is 10 bits.
249uint32_t Address::encodingThumbLdrdStrd() const {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800250 DCHECK(IsImmediate());
Dave Allison65fcc2c2014-04-28 13:45:27 -0700251 uint32_t encoding;
252 uint32_t am = am_;
253 // If P is 0 then W must be 1 (Different from ARM).
254 uint32_t PU1W = am_ >> 21; // Move down to bottom of word.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700255 if ((PU1W & 8U /* 0b1000 */) == 0) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700256 am |= 1 << 21; // Set W bit.
257 }
258 if (offset_ < 0) {
259 int32_t off = -offset_;
260 CHECK_LT(off, 1024);
Roland Levillain14d90572015-07-16 10:52:26 +0100261 CHECK_ALIGNED(off, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700262 encoding = (am ^ (1 << kUShift)) | off >> 2; // Flip U to adjust sign.
263 } else {
264 CHECK_LT(offset_, 1024);
Roland Levillain14d90572015-07-16 10:52:26 +0100265 CHECK_ALIGNED(offset_, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700266 encoding = am | offset_ >> 2;
267 }
268 encoding |= static_cast<uint32_t>(rn_) << 16;
269 return encoding;
270}
271
272// Encoding for ARM addressing mode 3.
273uint32_t Address::encoding3() const {
274 const uint32_t offset_mask = (1 << 12) - 1;
275 uint32_t encoding = encodingArm();
276 uint32_t offset = encoding & offset_mask;
277 CHECK_LT(offset, 256u);
278 return (encoding & ~offset_mask) | ((offset & 0xf0) << 4) | (offset & 0xf);
279}
280
281// Encoding for vfp load/store addressing.
282uint32_t Address::vencoding() const {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800283 CHECK(IsAbsoluteUint<10>(offset_)); // In the range -1020 to +1020.
284 CHECK_ALIGNED(offset_, 2); // Multiple of 4.
285
Dave Allison65fcc2c2014-04-28 13:45:27 -0700286 const uint32_t offset_mask = (1 << 12) - 1;
287 uint32_t encoding = encodingArm();
288 uint32_t offset = encoding & offset_mask;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700289 CHECK((am_ == Offset) || (am_ == NegOffset));
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800290 uint32_t vencoding_value = (encoding & (0xf << kRnShift)) | (offset >> 2);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700291 if (am_ == Offset) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800292 vencoding_value |= 1 << 23;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700293 }
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800294 return vencoding_value;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700295}
296
297
298bool Address::CanHoldLoadOffsetArm(LoadOperandType type, int offset) {
Ian Rogersb033c752011-07-20 12:22:35 -0700299 switch (type) {
300 case kLoadSignedByte:
301 case kLoadSignedHalfword:
302 case kLoadUnsignedHalfword:
303 case kLoadWordPair:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800304 return IsAbsoluteUint<8>(offset); // Addressing mode 3.
Ian Rogersb033c752011-07-20 12:22:35 -0700305 case kLoadUnsignedByte:
306 case kLoadWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800307 return IsAbsoluteUint<12>(offset); // Addressing mode 2.
Ian Rogersb033c752011-07-20 12:22:35 -0700308 case kLoadSWord:
309 case kLoadDWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800310 return IsAbsoluteUint<10>(offset); // VFP addressing mode.
Ian Rogersb033c752011-07-20 12:22:35 -0700311 default:
312 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700313 UNREACHABLE();
Ian Rogersb033c752011-07-20 12:22:35 -0700314 }
315}
316
317
Dave Allison65fcc2c2014-04-28 13:45:27 -0700318bool Address::CanHoldStoreOffsetArm(StoreOperandType type, int offset) {
Ian Rogersb033c752011-07-20 12:22:35 -0700319 switch (type) {
320 case kStoreHalfword:
321 case kStoreWordPair:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800322 return IsAbsoluteUint<8>(offset); // Addressing mode 3.
Ian Rogersb033c752011-07-20 12:22:35 -0700323 case kStoreByte:
324 case kStoreWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800325 return IsAbsoluteUint<12>(offset); // Addressing mode 2.
Ian Rogersb033c752011-07-20 12:22:35 -0700326 case kStoreSWord:
327 case kStoreDWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800328 return IsAbsoluteUint<10>(offset); // VFP addressing mode.
Ian Rogersb033c752011-07-20 12:22:35 -0700329 default:
330 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700331 UNREACHABLE();
Ian Rogersb033c752011-07-20 12:22:35 -0700332 }
333}
334
Dave Allison65fcc2c2014-04-28 13:45:27 -0700335bool Address::CanHoldLoadOffsetThumb(LoadOperandType type, int offset) {
Ian Rogersb033c752011-07-20 12:22:35 -0700336 switch (type) {
337 case kLoadSignedByte:
Ian Rogersb033c752011-07-20 12:22:35 -0700338 case kLoadSignedHalfword:
Ian Rogersb033c752011-07-20 12:22:35 -0700339 case kLoadUnsignedHalfword:
Dave Allison65fcc2c2014-04-28 13:45:27 -0700340 case kLoadUnsignedByte:
Ian Rogersb033c752011-07-20 12:22:35 -0700341 case kLoadWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800342 return IsAbsoluteUint<12>(offset);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700343 case kLoadSWord:
344 case kLoadDWord:
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000345 return IsAbsoluteUint<10>(offset) && (offset & 3) == 0; // VFP addressing mode.
Ian Rogersb033c752011-07-20 12:22:35 -0700346 case kLoadWordPair:
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000347 return IsAbsoluteUint<10>(offset) && (offset & 3) == 0;
Ian Rogers2c4257b2014-10-24 14:20:06 -0700348 default:
Ian Rogersb033c752011-07-20 12:22:35 -0700349 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700350 UNREACHABLE();
Ian Rogersb033c752011-07-20 12:22:35 -0700351 }
352}
353
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700354
Dave Allison65fcc2c2014-04-28 13:45:27 -0700355bool Address::CanHoldStoreOffsetThumb(StoreOperandType type, int offset) {
Ian Rogersb033c752011-07-20 12:22:35 -0700356 switch (type) {
Ian Rogersb033c752011-07-20 12:22:35 -0700357 case kStoreHalfword:
Dave Allison65fcc2c2014-04-28 13:45:27 -0700358 case kStoreByte:
Ian Rogersb033c752011-07-20 12:22:35 -0700359 case kStoreWord:
Andreas Gampeab1eb0d2015-02-13 19:23:55 -0800360 return IsAbsoluteUint<12>(offset);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700361 case kStoreSWord:
362 case kStoreDWord:
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000363 return IsAbsoluteUint<10>(offset) && (offset & 3) == 0; // VFP addressing mode.
Ian Rogersb033c752011-07-20 12:22:35 -0700364 case kStoreWordPair:
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +0000365 return IsAbsoluteUint<10>(offset) && (offset & 3) == 0;
Ian Rogers2c4257b2014-10-24 14:20:06 -0700366 default:
Ian Rogersb033c752011-07-20 12:22:35 -0700367 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700368 UNREACHABLE();
Ian Rogersb033c752011-07-20 12:22:35 -0700369 }
370}
371
Dave Allison65fcc2c2014-04-28 13:45:27 -0700372void ArmAssembler::Pad(uint32_t bytes) {
373 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
374 for (uint32_t i = 0; i < bytes; ++i) {
Ian Rogers13735952014-10-08 12:43:28 -0700375 buffer_.Emit<uint8_t>(0);
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700376 }
Carl Shapiro9b9ba282011-08-14 15:30:39 -0700377}
378
David Srbeckydd973932015-04-07 20:29:48 +0100379static dwarf::Reg DWARFReg(Register reg) {
380 return dwarf::Reg::ArmCore(static_cast<int>(reg));
381}
382
383static dwarf::Reg DWARFReg(SRegister reg) {
384 return dwarf::Reg::ArmFp(static_cast<int>(reg));
385}
386
Andreas Gampe542451c2016-07-26 09:02:02 -0700387constexpr size_t kFramePointerSize = static_cast<size_t>(kArmPointerSize);
Ian Rogers790a6b72014-04-01 10:36:00 -0700388
Vladimir Marko32248382016-05-19 10:37:24 +0100389void ArmAssembler::BuildFrame(size_t frame_size,
390 ManagedRegister method_reg,
391 ArrayRef<const ManagedRegister> callee_save_regs,
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700392 const ManagedRegisterEntrySpills& entry_spills) {
David Srbeckydd973932015-04-07 20:29:48 +0100393 CHECK_EQ(buffer_.Size(), 0U); // Nothing emitted yet
Elliott Hughes06b37d92011-10-16 11:51:29 -0700394 CHECK_ALIGNED(frame_size, kStackAlignment);
Ian Rogers2c8f6532011-09-02 17:16:34 -0700395 CHECK_EQ(R0, method_reg.AsArm().AsCoreRegister());
Ian Rogersbdb03912011-09-14 00:55:44 -0700396
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700397 // Push callee saves and link register.
David Srbeckydd973932015-04-07 20:29:48 +0100398 RegList core_spill_mask = 1 << LR;
399 uint32_t fp_spill_mask = 0;
400 for (const ManagedRegister& reg : callee_save_regs) {
401 if (reg.AsArm().IsCoreRegister()) {
402 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100403 } else {
David Srbeckydd973932015-04-07 20:29:48 +0100404 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100405 }
Ian Rogers0d666d82011-08-14 16:03:46 -0700406 }
David Srbeckydd973932015-04-07 20:29:48 +0100407 PushList(core_spill_mask);
408 cfi_.AdjustCFAOffset(POPCOUNT(core_spill_mask) * kFramePointerSize);
409 cfi_.RelOffsetForMany(DWARFReg(Register(0)), 0, core_spill_mask, kFramePointerSize);
410 if (fp_spill_mask != 0) {
411 vpushs(SRegister(CTZ(fp_spill_mask)), POPCOUNT(fp_spill_mask));
412 cfi_.AdjustCFAOffset(POPCOUNT(fp_spill_mask) * kFramePointerSize);
413 cfi_.RelOffsetForMany(DWARFReg(SRegister(0)), 0, fp_spill_mask, kFramePointerSize);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100414 }
Ian Rogersbdb03912011-09-14 00:55:44 -0700415
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700416 // Increase frame to required size.
David Srbeckydd973932015-04-07 20:29:48 +0100417 int pushed_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
Ian Rogers790a6b72014-04-01 10:36:00 -0700418 CHECK_GT(frame_size, pushed_values * kFramePointerSize); // Must at least have space for Method*.
David Srbeckydd973932015-04-07 20:29:48 +0100419 IncreaseFrameSize(frame_size - pushed_values * kFramePointerSize); // handles CFI as well.
Ian Rogersbdb03912011-09-14 00:55:44 -0700420
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700421 // Write out Method*.
Ian Rogersbdb03912011-09-14 00:55:44 -0700422 StoreToOffset(kStoreWord, R0, SP, 0);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700423
424 // Write out entry spills.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700425 int32_t offset = frame_size + kFramePointerSize;
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700426 for (size_t i = 0; i < entry_spills.size(); ++i) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800427 ArmManagedRegister reg = entry_spills.at(i).AsArm();
428 if (reg.IsNoRegister()) {
429 // only increment stack offset.
430 ManagedRegisterSpill spill = entry_spills.at(i);
431 offset += spill.getSize();
432 } else if (reg.IsCoreRegister()) {
433 StoreToOffset(kStoreWord, reg.AsCoreRegister(), SP, offset);
434 offset += 4;
435 } else if (reg.IsSRegister()) {
436 StoreSToOffset(reg.AsSRegister(), SP, offset);
437 offset += 4;
438 } else if (reg.IsDRegister()) {
439 StoreDToOffset(reg.AsDRegister(), SP, offset);
440 offset += 8;
441 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700442 }
Ian Rogersb033c752011-07-20 12:22:35 -0700443}
444
Ian Rogers2c8f6532011-09-02 17:16:34 -0700445void ArmAssembler::RemoveFrame(size_t frame_size,
Vladimir Marko32248382016-05-19 10:37:24 +0100446 ArrayRef<const ManagedRegister> callee_save_regs) {
Elliott Hughes06b37d92011-10-16 11:51:29 -0700447 CHECK_ALIGNED(frame_size, kStackAlignment);
David Srbeckydd973932015-04-07 20:29:48 +0100448 cfi_.RememberState();
449
Dave Allison65fcc2c2014-04-28 13:45:27 -0700450 // Compute callee saves to pop and PC.
David Srbeckydd973932015-04-07 20:29:48 +0100451 RegList core_spill_mask = 1 << PC;
452 uint32_t fp_spill_mask = 0;
453 for (const ManagedRegister& reg : callee_save_regs) {
454 if (reg.AsArm().IsCoreRegister()) {
455 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100456 } else {
David Srbeckydd973932015-04-07 20:29:48 +0100457 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100458 }
459 }
460
Dave Allison65fcc2c2014-04-28 13:45:27 -0700461 // Decrease frame to start of callee saves.
David Srbeckydd973932015-04-07 20:29:48 +0100462 int pop_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
Ian Rogers790a6b72014-04-01 10:36:00 -0700463 CHECK_GT(frame_size, pop_values * kFramePointerSize);
David Srbeckydd973932015-04-07 20:29:48 +0100464 DecreaseFrameSize(frame_size - (pop_values * kFramePointerSize)); // handles CFI as well.
Ian Rogersbdb03912011-09-14 00:55:44 -0700465
David Srbeckydd973932015-04-07 20:29:48 +0100466 if (fp_spill_mask != 0) {
467 vpops(SRegister(CTZ(fp_spill_mask)), POPCOUNT(fp_spill_mask));
468 cfi_.AdjustCFAOffset(-kFramePointerSize * POPCOUNT(fp_spill_mask));
469 cfi_.RestoreMany(DWARFReg(SRegister(0)), fp_spill_mask);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100470 }
471
Dave Allison65fcc2c2014-04-28 13:45:27 -0700472 // Pop callee saves and PC.
David Srbeckydd973932015-04-07 20:29:48 +0100473 PopList(core_spill_mask);
474
475 // The CFI should be restored for any code that follows the exit block.
476 cfi_.RestoreState();
477 cfi_.DefCFAOffset(frame_size);
Ian Rogers0d666d82011-08-14 16:03:46 -0700478}
479
Ian Rogers2c8f6532011-09-02 17:16:34 -0700480void ArmAssembler::IncreaseFrameSize(size_t adjust) {
Ian Rogersb033c752011-07-20 12:22:35 -0700481 AddConstant(SP, -adjust);
David Srbeckydd973932015-04-07 20:29:48 +0100482 cfi_.AdjustCFAOffset(adjust);
Ian Rogersb033c752011-07-20 12:22:35 -0700483}
484
Ian Rogers2c8f6532011-09-02 17:16:34 -0700485void ArmAssembler::DecreaseFrameSize(size_t adjust) {
Ian Rogersb033c752011-07-20 12:22:35 -0700486 AddConstant(SP, adjust);
David Srbeckydd973932015-04-07 20:29:48 +0100487 cfi_.AdjustCFAOffset(-adjust);
Ian Rogersb033c752011-07-20 12:22:35 -0700488}
489
Ian Rogers2c8f6532011-09-02 17:16:34 -0700490void ArmAssembler::Store(FrameOffset dest, ManagedRegister msrc, size_t size) {
491 ArmManagedRegister src = msrc.AsArm();
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700492 if (src.IsNoRegister()) {
493 CHECK_EQ(0u, size);
494 } else if (src.IsCoreRegister()) {
Ian Rogersb033c752011-07-20 12:22:35 -0700495 CHECK_EQ(4u, size);
496 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700497 } else if (src.IsRegisterPair()) {
498 CHECK_EQ(8u, size);
499 StoreToOffset(kStoreWord, src.AsRegisterPairLow(), SP, dest.Int32Value());
500 StoreToOffset(kStoreWord, src.AsRegisterPairHigh(),
501 SP, dest.Int32Value() + 4);
502 } else if (src.IsSRegister()) {
503 StoreSToOffset(src.AsSRegister(), SP, dest.Int32Value());
Ian Rogersb033c752011-07-20 12:22:35 -0700504 } else {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700505 CHECK(src.IsDRegister()) << src;
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700506 StoreDToOffset(src.AsDRegister(), SP, dest.Int32Value());
Ian Rogersb033c752011-07-20 12:22:35 -0700507 }
508}
509
Ian Rogers2c8f6532011-09-02 17:16:34 -0700510void ArmAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
511 ArmManagedRegister src = msrc.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700512 CHECK(src.IsCoreRegister()) << src;
Ian Rogersb033c752011-07-20 12:22:35 -0700513 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
514}
515
Ian Rogers2c8f6532011-09-02 17:16:34 -0700516void ArmAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
517 ArmManagedRegister src = msrc.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700518 CHECK(src.IsCoreRegister()) << src;
Ian Rogersdf20fe02011-07-20 20:34:16 -0700519 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
520}
521
Ian Rogers2c8f6532011-09-02 17:16:34 -0700522void ArmAssembler::StoreSpanning(FrameOffset dest, ManagedRegister msrc,
523 FrameOffset in_off, ManagedRegister mscratch) {
524 ArmManagedRegister src = msrc.AsArm();
525 ArmManagedRegister scratch = mscratch.AsArm();
Ian Rogers7a99c112011-09-07 12:48:27 -0700526 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
527 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, in_off.Int32Value());
528 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value() + 4);
529}
530
Ian Rogers2c8f6532011-09-02 17:16:34 -0700531void ArmAssembler::CopyRef(FrameOffset dest, FrameOffset src,
532 ManagedRegister mscratch) {
533 ArmManagedRegister scratch = mscratch.AsArm();
Ian Rogersb033c752011-07-20 12:22:35 -0700534 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value());
535 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
536}
537
Mathieu Chartiere401d142015-04-22 13:56:20 -0700538void ArmAssembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
Roland Levillain4d027112015-07-01 15:41:14 +0100539 bool unpoison_reference) {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700540 ArmManagedRegister dst = mdest.AsArm();
541 CHECK(dst.IsCoreRegister() && dst.IsCoreRegister()) << dst;
542 LoadFromOffset(kLoadWord, dst.AsCoreRegister(),
Ian Rogers2c8f6532011-09-02 17:16:34 -0700543 base.AsArm().AsCoreRegister(), offs.Int32Value());
Roland Levillain4d027112015-07-01 15:41:14 +0100544 if (unpoison_reference) {
545 MaybeUnpoisonHeapReference(dst.AsCoreRegister());
Hiroshi Yamauchie63a7452014-02-27 14:44:36 -0800546 }
Ian Rogersb033c752011-07-20 12:22:35 -0700547}
548
Ian Rogers2c8f6532011-09-02 17:16:34 -0700549void ArmAssembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700550 ArmManagedRegister dst = mdest.AsArm();
551 CHECK(dst.IsCoreRegister()) << dst;
552 LoadFromOffset(kLoadWord, dst.AsCoreRegister(), SP, src.Int32Value());
Elliott Hughes362f9bc2011-10-17 18:56:41 -0700553}
Ian Rogers2c8f6532011-09-02 17:16:34 -0700554
555void ArmAssembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
Ian Rogersa04d3972011-08-17 11:33:44 -0700556 Offset offs) {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700557 ArmManagedRegister dst = mdest.AsArm();
558 CHECK(dst.IsCoreRegister() && dst.IsCoreRegister()) << dst;
559 LoadFromOffset(kLoadWord, dst.AsCoreRegister(),
Ian Rogers2c8f6532011-09-02 17:16:34 -0700560 base.AsArm().AsCoreRegister(), offs.Int32Value());
Ian Rogersa04d3972011-08-17 11:33:44 -0700561}
562
Ian Rogers2c8f6532011-09-02 17:16:34 -0700563void ArmAssembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
564 ManagedRegister mscratch) {
565 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700566 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700567 LoadImmediate(scratch.AsCoreRegister(), imm);
568 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
569}
570
Andreas Gampe542451c2016-07-26 09:02:02 -0700571void ArmAssembler::StoreImmediateToThread32(ThreadOffset32 dest,
572 uint32_t imm,
573 ManagedRegister mscratch) {
Ian Rogers2c8f6532011-09-02 17:16:34 -0700574 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700575 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700576 LoadImmediate(scratch.AsCoreRegister(), imm);
577 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), TR, dest.Int32Value());
578}
579
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700580static void EmitLoad(ArmAssembler* assembler, ManagedRegister m_dst,
581 Register src_register, int32_t src_offset, size_t size) {
582 ArmManagedRegister dst = m_dst.AsArm();
583 if (dst.IsNoRegister()) {
584 CHECK_EQ(0u, size) << dst;
585 } else if (dst.IsCoreRegister()) {
586 CHECK_EQ(4u, size) << dst;
587 assembler->LoadFromOffset(kLoadWord, dst.AsCoreRegister(), src_register, src_offset);
588 } else if (dst.IsRegisterPair()) {
589 CHECK_EQ(8u, size) << dst;
590 assembler->LoadFromOffset(kLoadWord, dst.AsRegisterPairLow(), src_register, src_offset);
591 assembler->LoadFromOffset(kLoadWord, dst.AsRegisterPairHigh(), src_register, src_offset + 4);
592 } else if (dst.IsSRegister()) {
593 assembler->LoadSFromOffset(dst.AsSRegister(), src_register, src_offset);
Ian Rogersb033c752011-07-20 12:22:35 -0700594 } else {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700595 CHECK(dst.IsDRegister()) << dst;
596 assembler->LoadDFromOffset(dst.AsDRegister(), src_register, src_offset);
Ian Rogersb033c752011-07-20 12:22:35 -0700597 }
598}
599
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700600void ArmAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
601 return EmitLoad(this, m_dst, SP, src.Int32Value(), size);
Ian Rogers5a7a74a2011-09-26 16:32:29 -0700602}
603
Andreas Gampe542451c2016-07-26 09:02:02 -0700604void ArmAssembler::LoadFromThread32(ManagedRegister m_dst, ThreadOffset32 src, size_t size) {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700605 return EmitLoad(this, m_dst, TR, src.Int32Value(), size);
606}
607
Andreas Gampe542451c2016-07-26 09:02:02 -0700608void ArmAssembler::LoadRawPtrFromThread32(ManagedRegister m_dst, ThreadOffset32 offs) {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700609 ArmManagedRegister dst = m_dst.AsArm();
610 CHECK(dst.IsCoreRegister()) << dst;
611 LoadFromOffset(kLoadWord, dst.AsCoreRegister(), TR, offs.Int32Value());
Ian Rogersb033c752011-07-20 12:22:35 -0700612}
613
Ian Rogersdd7624d2014-03-14 17:43:00 -0700614void ArmAssembler::CopyRawPtrFromThread32(FrameOffset fr_offs,
Andreas Gampe542451c2016-07-26 09:02:02 -0700615 ThreadOffset32 thr_offs,
616 ManagedRegister mscratch) {
Ian Rogers2c8f6532011-09-02 17:16:34 -0700617 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700618 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700619 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
620 TR, thr_offs.Int32Value());
621 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
622 SP, fr_offs.Int32Value());
623}
624
Andreas Gampe542451c2016-07-26 09:02:02 -0700625void ArmAssembler::CopyRawPtrToThread32(ThreadOffset32 thr_offs,
626 FrameOffset fr_offs,
627 ManagedRegister mscratch) {
Ian Rogers2c8f6532011-09-02 17:16:34 -0700628 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700629 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700630 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
631 SP, fr_offs.Int32Value());
632 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
633 TR, thr_offs.Int32Value());
634}
635
Andreas Gampe542451c2016-07-26 09:02:02 -0700636void ArmAssembler::StoreStackOffsetToThread32(ThreadOffset32 thr_offs,
637 FrameOffset fr_offs,
638 ManagedRegister mscratch) {
Ian Rogers2c8f6532011-09-02 17:16:34 -0700639 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700640 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700641 AddConstant(scratch.AsCoreRegister(), SP, fr_offs.Int32Value(), AL);
642 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
643 TR, thr_offs.Int32Value());
644}
645
Andreas Gampe542451c2016-07-26 09:02:02 -0700646void ArmAssembler::StoreStackPointerToThread32(ThreadOffset32 thr_offs) {
Ian Rogers45a76cb2011-07-21 22:00:15 -0700647 StoreToOffset(kStoreWord, SP, TR, thr_offs.Int32Value());
648}
649
jeffhao58136ca2012-05-24 13:40:11 -0700650void ArmAssembler::SignExtend(ManagedRegister /*mreg*/, size_t /*size*/) {
651 UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm";
652}
653
jeffhaocee4d0c2012-06-15 14:42:01 -0700654void ArmAssembler::ZeroExtend(ManagedRegister /*mreg*/, size_t /*size*/) {
655 UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm";
656}
657
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700658void ArmAssembler::Move(ManagedRegister m_dst, ManagedRegister m_src, size_t /*size*/) {
659 ArmManagedRegister dst = m_dst.AsArm();
660 ArmManagedRegister src = m_src.AsArm();
661 if (!dst.Equals(src)) {
662 if (dst.IsCoreRegister()) {
663 CHECK(src.IsCoreRegister()) << src;
664 mov(dst.AsCoreRegister(), ShifterOperand(src.AsCoreRegister()));
665 } else if (dst.IsDRegister()) {
666 CHECK(src.IsDRegister()) << src;
667 vmovd(dst.AsDRegister(), src.AsDRegister());
668 } else if (dst.IsSRegister()) {
669 CHECK(src.IsSRegister()) << src;
670 vmovs(dst.AsSRegister(), src.AsSRegister());
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700671 } else {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700672 CHECK(dst.IsRegisterPair()) << dst;
673 CHECK(src.IsRegisterPair()) << src;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700674 // Ensure that the first move doesn't clobber the input of the second.
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700675 if (src.AsRegisterPairHigh() != dst.AsRegisterPairLow()) {
676 mov(dst.AsRegisterPairLow(), ShifterOperand(src.AsRegisterPairLow()));
677 mov(dst.AsRegisterPairHigh(), ShifterOperand(src.AsRegisterPairHigh()));
Ian Rogers7a99c112011-09-07 12:48:27 -0700678 } else {
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700679 mov(dst.AsRegisterPairHigh(), ShifterOperand(src.AsRegisterPairHigh()));
680 mov(dst.AsRegisterPairLow(), ShifterOperand(src.AsRegisterPairLow()));
Ian Rogers7a99c112011-09-07 12:48:27 -0700681 }
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700682 }
Ian Rogersb033c752011-07-20 12:22:35 -0700683 }
684}
685
Ian Rogersdc51b792011-09-22 20:41:37 -0700686void ArmAssembler::Copy(FrameOffset dest, FrameOffset src, ManagedRegister mscratch, size_t size) {
Ian Rogers2c8f6532011-09-02 17:16:34 -0700687 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700688 CHECK(scratch.IsCoreRegister()) << scratch;
689 CHECK(size == 4 || size == 8) << size;
Ian Rogersb033c752011-07-20 12:22:35 -0700690 if (size == 4) {
Ian Rogersdc51b792011-09-22 20:41:37 -0700691 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value());
692 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
Shih-wei Liao5381cf92011-07-27 00:28:04 -0700693 } else if (size == 8) {
Ian Rogersdc51b792011-09-22 20:41:37 -0700694 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value());
695 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
696 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value() + 4);
697 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value() + 4);
Ian Rogersb033c752011-07-20 12:22:35 -0700698 }
699}
700
Ian Rogersdc51b792011-09-22 20:41:37 -0700701void ArmAssembler::Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset,
702 ManagedRegister mscratch, size_t size) {
703 Register scratch = mscratch.AsArm().AsCoreRegister();
704 CHECK_EQ(size, 4u);
705 LoadFromOffset(kLoadWord, scratch, src_base.AsArm().AsCoreRegister(), src_offset.Int32Value());
706 StoreToOffset(kStoreWord, scratch, SP, dest.Int32Value());
707}
708
Ian Rogers5a7a74a2011-09-26 16:32:29 -0700709void ArmAssembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
710 ManagedRegister mscratch, size_t size) {
711 Register scratch = mscratch.AsArm().AsCoreRegister();
712 CHECK_EQ(size, 4u);
713 LoadFromOffset(kLoadWord, scratch, SP, src.Int32Value());
714 StoreToOffset(kStoreWord, scratch, dest_base.AsArm().AsCoreRegister(), dest_offset.Int32Value());
715}
716
Elliott Hughes1bac54f2012-03-16 12:48:31 -0700717void ArmAssembler::Copy(FrameOffset /*dst*/, FrameOffset /*src_base*/, Offset /*src_offset*/,
718 ManagedRegister /*mscratch*/, size_t /*size*/) {
Ian Rogersdc51b792011-09-22 20:41:37 -0700719 UNIMPLEMENTED(FATAL);
720}
721
Ian Rogers5a7a74a2011-09-26 16:32:29 -0700722void ArmAssembler::Copy(ManagedRegister dest, Offset dest_offset,
723 ManagedRegister src, Offset src_offset,
724 ManagedRegister mscratch, size_t size) {
Ian Rogersdc51b792011-09-22 20:41:37 -0700725 CHECK_EQ(size, 4u);
Ian Rogers5a7a74a2011-09-26 16:32:29 -0700726 Register scratch = mscratch.AsArm().AsCoreRegister();
727 LoadFromOffset(kLoadWord, scratch, src.AsArm().AsCoreRegister(), src_offset.Int32Value());
728 StoreToOffset(kStoreWord, scratch, dest.AsArm().AsCoreRegister(), dest_offset.Int32Value());
729}
730
Elliott Hughes1bac54f2012-03-16 12:48:31 -0700731void ArmAssembler::Copy(FrameOffset /*dst*/, Offset /*dest_offset*/, FrameOffset /*src*/, Offset /*src_offset*/,
732 ManagedRegister /*scratch*/, size_t /*size*/) {
Ian Rogers5a7a74a2011-09-26 16:32:29 -0700733 UNIMPLEMENTED(FATAL);
Ian Rogersdc51b792011-09-22 20:41:37 -0700734}
735
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700736void ArmAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
737 FrameOffset handle_scope_offset,
Ian Rogers2c8f6532011-09-02 17:16:34 -0700738 ManagedRegister min_reg, bool null_allowed) {
739 ArmManagedRegister out_reg = mout_reg.AsArm();
740 ArmManagedRegister in_reg = min_reg.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700741 CHECK(in_reg.IsNoRegister() || in_reg.IsCoreRegister()) << in_reg;
742 CHECK(out_reg.IsCoreRegister()) << out_reg;
Ian Rogersb033c752011-07-20 12:22:35 -0700743 if (null_allowed) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700744 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
745 // the address in the handle scope holding the reference.
Ian Rogersb033c752011-07-20 12:22:35 -0700746 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700747 if (in_reg.IsNoRegister()) {
748 LoadFromOffset(kLoadWord, out_reg.AsCoreRegister(),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700749 SP, handle_scope_offset.Int32Value());
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700750 in_reg = out_reg;
751 }
Ian Rogersb033c752011-07-20 12:22:35 -0700752 cmp(in_reg.AsCoreRegister(), ShifterOperand(0));
753 if (!out_reg.Equals(in_reg)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700754 it(EQ, kItElse);
Ian Rogersb033c752011-07-20 12:22:35 -0700755 LoadImmediate(out_reg.AsCoreRegister(), 0, EQ);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700756 } else {
757 it(NE);
Ian Rogersb033c752011-07-20 12:22:35 -0700758 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700759 AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), NE);
Ian Rogersb033c752011-07-20 12:22:35 -0700760 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700761 AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), AL);
Ian Rogersb033c752011-07-20 12:22:35 -0700762 }
763}
764
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700765void ArmAssembler::CreateHandleScopeEntry(FrameOffset out_off,
766 FrameOffset handle_scope_offset,
Ian Rogers2c8f6532011-09-02 17:16:34 -0700767 ManagedRegister mscratch,
768 bool null_allowed) {
769 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700770 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700771 if (null_allowed) {
772 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700773 handle_scope_offset.Int32Value());
774 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
775 // the address in the handle scope holding the reference.
776 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
Ian Rogersb033c752011-07-20 12:22:35 -0700777 cmp(scratch.AsCoreRegister(), ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700778 it(NE);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700779 AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), NE);
Ian Rogersb033c752011-07-20 12:22:35 -0700780 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700781 AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), AL);
Ian Rogersb033c752011-07-20 12:22:35 -0700782 }
783 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, out_off.Int32Value());
784}
785
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700786void ArmAssembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
Ian Rogers2c8f6532011-09-02 17:16:34 -0700787 ManagedRegister min_reg) {
788 ArmManagedRegister out_reg = mout_reg.AsArm();
789 ArmManagedRegister in_reg = min_reg.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700790 CHECK(out_reg.IsCoreRegister()) << out_reg;
791 CHECK(in_reg.IsCoreRegister()) << in_reg;
Ian Rogersb033c752011-07-20 12:22:35 -0700792 Label null_arg;
793 if (!out_reg.Equals(in_reg)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700794 LoadImmediate(out_reg.AsCoreRegister(), 0, EQ); // TODO: why EQ?
Ian Rogersb033c752011-07-20 12:22:35 -0700795 }
796 cmp(in_reg.AsCoreRegister(), ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700797 it(NE);
Ian Rogersdf20fe02011-07-20 20:34:16 -0700798 LoadFromOffset(kLoadWord, out_reg.AsCoreRegister(),
799 in_reg.AsCoreRegister(), 0, NE);
Ian Rogersb033c752011-07-20 12:22:35 -0700800}
801
Elliott Hughes1bac54f2012-03-16 12:48:31 -0700802void ArmAssembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700803 // TODO: not validating references.
Ian Rogersb033c752011-07-20 12:22:35 -0700804}
805
Elliott Hughes1bac54f2012-03-16 12:48:31 -0700806void ArmAssembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700807 // TODO: not validating references.
Ian Rogersb033c752011-07-20 12:22:35 -0700808}
809
Ian Rogers2c8f6532011-09-02 17:16:34 -0700810void ArmAssembler::Call(ManagedRegister mbase, Offset offset,
811 ManagedRegister mscratch) {
812 ArmManagedRegister base = mbase.AsArm();
813 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700814 CHECK(base.IsCoreRegister()) << base;
815 CHECK(scratch.IsCoreRegister()) << scratch;
Ian Rogersb033c752011-07-20 12:22:35 -0700816 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
817 base.AsCoreRegister(), offset.Int32Value());
818 blx(scratch.AsCoreRegister());
Dave Allison65fcc2c2014-04-28 13:45:27 -0700819 // TODO: place reference map on call.
Ian Rogersb033c752011-07-20 12:22:35 -0700820}
821
Ian Rogers2c8f6532011-09-02 17:16:34 -0700822void ArmAssembler::Call(FrameOffset base, Offset offset,
823 ManagedRegister mscratch) {
824 ArmManagedRegister scratch = mscratch.AsArm();
Elliott Hughesbf2739d2012-05-21 14:30:16 -0700825 CHECK(scratch.IsCoreRegister()) << scratch;
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700826 // Call *(*(SP + base) + offset)
827 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
828 SP, base.Int32Value());
829 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
830 scratch.AsCoreRegister(), offset.Int32Value());
831 blx(scratch.AsCoreRegister());
832 // TODO: place reference map on call
833}
834
Andreas Gampe542451c2016-07-26 09:02:02 -0700835void ArmAssembler::CallFromThread32(ThreadOffset32 offset ATTRIBUTE_UNUSED,
836 ManagedRegister scratch ATTRIBUTE_UNUSED) {
Ian Rogersbdb03912011-09-14 00:55:44 -0700837 UNIMPLEMENTED(FATAL);
838}
839
Ian Rogers2c8f6532011-09-02 17:16:34 -0700840void ArmAssembler::GetCurrentThread(ManagedRegister tr) {
841 mov(tr.AsArm().AsCoreRegister(), ShifterOperand(TR));
Shih-wei Liao668512a2011-09-01 14:18:34 -0700842}
843
Ian Rogers2c8f6532011-09-02 17:16:34 -0700844void ArmAssembler::GetCurrentThread(FrameOffset offset,
Elliott Hughes1bac54f2012-03-16 12:48:31 -0700845 ManagedRegister /*scratch*/) {
Shih-wei Liao668512a2011-09-01 14:18:34 -0700846 StoreToOffset(kStoreWord, TR, SP, offset.Int32Value(), AL);
847}
848
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700849void ArmAssembler::ExceptionPoll(ManagedRegister mscratch, size_t stack_adjust) {
Ian Rogers2c8f6532011-09-02 17:16:34 -0700850 ArmManagedRegister scratch = mscratch.AsArm();
Vladimir Marko93205e32016-04-13 11:59:46 +0100851 ArmExceptionSlowPath* slow = new (GetArena()) ArmExceptionSlowPath(scratch, stack_adjust);
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700852 buffer_.EnqueueSlowPath(slow);
Andreas Gampe542451c2016-07-26 09:02:02 -0700853 LoadFromOffset(kLoadWord,
854 scratch.AsCoreRegister(),
855 TR,
856 Thread::ExceptionOffset<kArmPointerSize>().Int32Value());
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700857 cmp(scratch.AsCoreRegister(), ShifterOperand(0));
858 b(slow->Entry(), NE);
Carl Shapiroe2d373e2011-07-25 15:20:06 -0700859}
860
Ian Rogers2c8f6532011-09-02 17:16:34 -0700861void ArmExceptionSlowPath::Emit(Assembler* sasm) {
862 ArmAssembler* sp_asm = down_cast<ArmAssembler*>(sasm);
863#define __ sp_asm->
864 __ Bind(&entry_);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700865 if (stack_adjust_ != 0) { // Fix up the frame.
866 __ DecreaseFrameSize(stack_adjust_);
867 }
Dave Allison65fcc2c2014-04-28 13:45:27 -0700868 // Pass exception object as argument.
869 // Don't care about preserving R0 as this call won't return.
Ian Rogers67375ac2011-09-14 00:55:44 -0700870 __ mov(R0, ShifterOperand(scratch_.AsCoreRegister()));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700871 // Set up call to Thread::Current()->pDeliverException.
Andreas Gampe542451c2016-07-26 09:02:02 -0700872 __ LoadFromOffset(kLoadWord,
873 R12,
874 TR,
875 QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pDeliverException).Int32Value());
Ian Rogers2c8f6532011-09-02 17:16:34 -0700876 __ blx(R12);
Ian Rogers2c8f6532011-09-02 17:16:34 -0700877#undef __
Ian Rogers45a76cb2011-07-21 22:00:15 -0700878}
879
Dave Allison65fcc2c2014-04-28 13:45:27 -0700880
881static int LeadingZeros(uint32_t val) {
882 uint32_t alt;
883 int32_t n;
884 int32_t count;
885
886 count = 16;
887 n = 32;
888 do {
889 alt = val >> count;
890 if (alt != 0) {
891 n = n - count;
892 val = alt;
893 }
894 count >>= 1;
895 } while (count);
896 return n - val;
897}
898
899
900uint32_t ArmAssembler::ModifiedImmediate(uint32_t value) {
901 int32_t z_leading;
902 int32_t z_trailing;
903 uint32_t b0 = value & 0xff;
904
905 /* Note: case of value==0 must use 0:000:0:0000000 encoding */
906 if (value <= 0xFF)
907 return b0; // 0:000:a:bcdefgh.
908 if (value == ((b0 << 16) | b0))
909 return (0x1 << 12) | b0; /* 0:001:a:bcdefgh */
910 if (value == ((b0 << 24) | (b0 << 16) | (b0 << 8) | b0))
911 return (0x3 << 12) | b0; /* 0:011:a:bcdefgh */
912 b0 = (value >> 8) & 0xff;
913 if (value == ((b0 << 24) | (b0 << 8)))
914 return (0x2 << 12) | b0; /* 0:010:a:bcdefgh */
915 /* Can we do it with rotation? */
916 z_leading = LeadingZeros(value);
917 z_trailing = 32 - LeadingZeros(~value & (value - 1));
918 /* A run of eight or fewer active bits? */
919 if ((z_leading + z_trailing) < 24)
920 return kInvalidModifiedImmediate; /* No - bail */
921 /* left-justify the constant, discarding msb (known to be 1) */
922 value <<= z_leading + 1;
923 /* Create bcdefgh */
924 value >>= 25;
925
926 /* Put it all together */
927 uint32_t v = 8 + z_leading;
928
Andreas Gampec8ccf682014-09-29 20:07:43 -0700929 uint32_t i = (v & 16U /* 0b10000 */) >> 4;
930 uint32_t imm3 = (v >> 1) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700931 uint32_t a = v & 1;
932 return value | i << 26 | imm3 << 12 | a << 7;
933}
934
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700935void ArmAssembler::FinalizeTrackedLabels() {
936 if (!tracked_labels_.empty()) {
937 // This array should be sorted, as assembly is generated in linearized order. It isn't
938 // technically required, but GetAdjustedPosition() used in AdjustLabelPosition() can take
939 // advantage of it. So ensure that it's actually the case.
940 DCHECK(std::is_sorted(
941 tracked_labels_.begin(),
942 tracked_labels_.end(),
943 [](const Label* lhs, const Label* rhs) { return lhs->Position() < rhs->Position(); }));
944
945 Label* last_label = nullptr; // Track duplicates, we must not adjust twice.
946 for (Label* label : tracked_labels_) {
947 DCHECK_NE(label, last_label);
948 AdjustLabelPosition(label);
949 last_label = label;
950 }
951 }
952}
953
Ian Rogers2c8f6532011-09-02 17:16:34 -0700954} // namespace arm
Carl Shapiro6b6b5f02011-06-21 15:05:09 -0700955} // namespace art