blob: 0d5aa90f35b022e1443a973a4dd8af8533968f52 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "arm_lir.h"
18#include "codegen_arm.h"
19#include "dex/quick/mir_to_lir-inl.h"
buzbeeb5860fb2014-06-21 15:31:01 -070020#include "dex/reg_storage_eq.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070021
22namespace art {
23
24/* This file contains codegen for the Thumb ISA. */
25
buzbee0d829482013-10-11 15:24:55 -070026static int32_t EncodeImmSingle(int32_t value) {
27 int32_t res;
28 int32_t bit_a = (value & 0x80000000) >> 31;
29 int32_t not_bit_b = (value & 0x40000000) >> 30;
30 int32_t bit_b = (value & 0x20000000) >> 29;
31 int32_t b_smear = (value & 0x3e000000) >> 25;
32 int32_t slice = (value & 0x01f80000) >> 19;
33 int32_t zeroes = (value & 0x0007ffff);
Brian Carlstrom7940e442013-07-12 13:46:57 -070034 if (zeroes != 0)
35 return -1;
36 if (bit_b) {
37 if ((not_bit_b != 0) || (b_smear != 0x1f))
38 return -1;
39 } else {
40 if ((not_bit_b != 1) || (b_smear != 0x0))
41 return -1;
42 }
43 res = (bit_a << 7) | (bit_b << 6) | slice;
44 return res;
45}
46
47/*
48 * Determine whether value can be encoded as a Thumb2 floating point
49 * immediate. If not, return -1. If so return encoded 8-bit value.
50 */
buzbee0d829482013-10-11 15:24:55 -070051static int32_t EncodeImmDouble(int64_t value) {
52 int32_t res;
Ian Rogers0f678472014-03-10 16:18:37 -070053 int32_t bit_a = (value & INT64_C(0x8000000000000000)) >> 63;
54 int32_t not_bit_b = (value & INT64_C(0x4000000000000000)) >> 62;
55 int32_t bit_b = (value & INT64_C(0x2000000000000000)) >> 61;
56 int32_t b_smear = (value & INT64_C(0x3fc0000000000000)) >> 54;
57 int32_t slice = (value & INT64_C(0x003f000000000000)) >> 48;
58 uint64_t zeroes = (value & INT64_C(0x0000ffffffffffff));
buzbee0d829482013-10-11 15:24:55 -070059 if (zeroes != 0ull)
Brian Carlstrom7940e442013-07-12 13:46:57 -070060 return -1;
61 if (bit_b) {
62 if ((not_bit_b != 0) || (b_smear != 0xff))
63 return -1;
64 } else {
65 if ((not_bit_b != 1) || (b_smear != 0x0))
66 return -1;
67 }
68 res = (bit_a << 7) | (bit_b << 6) | slice;
69 return res;
70}
71
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070072LIR* ArmMir2Lir::LoadFPConstantValue(int r_dest, int value) {
buzbee091cc402014-03-31 10:14:40 -070073 DCHECK(RegStorage::IsSingle(r_dest));
Brian Carlstrom7940e442013-07-12 13:46:57 -070074 if (value == 0) {
75 // TODO: we need better info about the target CPU. a vector exclusive or
76 // would probably be better here if we could rely on its existance.
77 // Load an immediate +2.0 (which encodes to 0)
78 NewLIR2(kThumb2Vmovs_IMM8, r_dest, 0);
79 // +0.0 = +2.0 - +2.0
80 return NewLIR3(kThumb2Vsubs, r_dest, r_dest, r_dest);
81 } else {
82 int encoded_imm = EncodeImmSingle(value);
83 if (encoded_imm >= 0) {
84 return NewLIR2(kThumb2Vmovs_IMM8, r_dest, encoded_imm);
85 }
86 }
87 LIR* data_target = ScanLiteralPool(literal_list_, value, 0);
88 if (data_target == NULL) {
89 data_target = AddWordData(&literal_list_, value);
90 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +010091 ScopedMemRefType mem_ref_type(this, ResourceMask::kLiteral);
Brian Carlstrom7940e442013-07-12 13:46:57 -070092 LIR* load_pc_rel = RawLIR(current_dalvik_offset_, kThumb2Vldrs,
buzbee091cc402014-03-31 10:14:40 -070093 r_dest, rs_r15pc.GetReg(), 0, 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -070094 AppendLIR(load_pc_rel);
95 return load_pc_rel;
96}
97
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070098static int LeadingZeros(uint32_t val) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070099 uint32_t alt;
buzbee0d829482013-10-11 15:24:55 -0700100 int32_t n;
101 int32_t count;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700102
103 count = 16;
104 n = 32;
105 do {
106 alt = val >> count;
107 if (alt != 0) {
108 n = n - count;
109 val = alt;
110 }
111 count >>= 1;
112 } while (count);
113 return n - val;
114}
115
116/*
117 * Determine whether value can be encoded as a Thumb2 modified
118 * immediate. If not, return -1. If so, return i:imm3:a:bcdefgh form.
119 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700120int ArmMir2Lir::ModifiedImmediate(uint32_t value) {
buzbee0d829482013-10-11 15:24:55 -0700121 int32_t z_leading;
122 int32_t z_trailing;
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700123 uint32_t b0 = value & 0xff;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700124
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700125 /* Note: case of value==0 must use 0:000:0:0000000 encoding */
126 if (value <= 0xFF)
127 return b0; // 0:000:a:bcdefgh
128 if (value == ((b0 << 16) | b0))
129 return (0x1 << 8) | b0; /* 0:001:a:bcdefgh */
130 if (value == ((b0 << 24) | (b0 << 16) | (b0 << 8) | b0))
131 return (0x3 << 8) | b0; /* 0:011:a:bcdefgh */
132 b0 = (value >> 8) & 0xff;
133 if (value == ((b0 << 24) | (b0 << 8)))
134 return (0x2 << 8) | b0; /* 0:010:a:bcdefgh */
135 /* Can we do it with rotation? */
136 z_leading = LeadingZeros(value);
137 z_trailing = 32 - LeadingZeros(~value & (value - 1));
138 /* A run of eight or fewer active bits? */
139 if ((z_leading + z_trailing) < 24)
140 return -1; /* No - bail */
141 /* left-justify the constant, discarding msb (known to be 1) */
142 value <<= z_leading + 1;
143 /* Create bcdefgh */
144 value >>= 25;
145 /* Put it all together */
146 return value | ((0x8 + z_leading) << 7); /* [01000..11111]:bcdefgh */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700147}
148
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700149bool ArmMir2Lir::InexpensiveConstantInt(int32_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700150 return (ModifiedImmediate(value) >= 0) || (ModifiedImmediate(~value) >= 0);
151}
152
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700153bool ArmMir2Lir::InexpensiveConstantFloat(int32_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700154 return EncodeImmSingle(value) >= 0;
155}
156
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700157bool ArmMir2Lir::InexpensiveConstantLong(int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700158 return InexpensiveConstantInt(High32Bits(value)) && InexpensiveConstantInt(Low32Bits(value));
159}
160
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700161bool ArmMir2Lir::InexpensiveConstantDouble(int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700162 return EncodeImmDouble(value) >= 0;
163}
164
165/*
166 * Load a immediate using a shortcut if possible; otherwise
167 * grab from the per-translation literal pool.
168 *
169 * No additional register clobbering operation performed. Use this version when
170 * 1) r_dest is freshly returned from AllocTemp or
171 * 2) The codegen is under fixed register usage
172 */
buzbee2700f7e2014-03-07 09:46:20 -0800173LIR* ArmMir2Lir::LoadConstantNoClobber(RegStorage r_dest, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700174 LIR* res;
175 int mod_imm;
176
buzbee091cc402014-03-31 10:14:40 -0700177 if (r_dest.IsFloat()) {
buzbee2700f7e2014-03-07 09:46:20 -0800178 return LoadFPConstantValue(r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700179 }
180
181 /* See if the value can be constructed cheaply */
buzbee091cc402014-03-31 10:14:40 -0700182 if (r_dest.Low8() && (value >= 0) && (value <= 255)) {
buzbee2700f7e2014-03-07 09:46:20 -0800183 return NewLIR2(kThumbMovImm, r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700184 }
185 /* Check Modified immediate special cases */
186 mod_imm = ModifiedImmediate(value);
187 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800188 res = NewLIR2(kThumb2MovI8M, r_dest.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700189 return res;
190 }
191 mod_imm = ModifiedImmediate(~value);
192 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800193 res = NewLIR2(kThumb2MvnI8M, r_dest.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700194 return res;
195 }
196 /* 16-bit immediate? */
197 if ((value & 0xffff) == value) {
buzbee2700f7e2014-03-07 09:46:20 -0800198 res = NewLIR2(kThumb2MovImm16, r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700199 return res;
200 }
201 /* Do a low/high pair */
buzbee2700f7e2014-03-07 09:46:20 -0800202 res = NewLIR2(kThumb2MovImm16, r_dest.GetReg(), Low16Bits(value));
203 NewLIR2(kThumb2MovImm16H, r_dest.GetReg(), High16Bits(value));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700204 return res;
205}
206
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700207LIR* ArmMir2Lir::OpUnconditionalBranch(LIR* target) {
buzbee091cc402014-03-31 10:14:40 -0700208 LIR* res = NewLIR1(kThumbBUncond, 0 /* offset to be patched during assembly */);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700209 res->target = target;
210 return res;
211}
212
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700213LIR* ArmMir2Lir::OpCondBranch(ConditionCode cc, LIR* target) {
Vladimir Marko58af1f92013-12-19 13:31:15 +0000214 // This is kThumb2BCond instead of kThumbBCond for performance reasons. The assembly
215 // time required for a new pass after kThumbBCond is fixed up to kThumb2BCond is
216 // substantial.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700217 LIR* branch = NewLIR2(kThumb2BCond, 0 /* offset to be patched */,
218 ArmConditionEncoding(cc));
219 branch->target = target;
220 return branch;
221}
222
buzbee2700f7e2014-03-07 09:46:20 -0800223LIR* ArmMir2Lir::OpReg(OpKind op, RegStorage r_dest_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700224 ArmOpcode opcode = kThumbBkpt;
225 switch (op) {
226 case kOpBlx:
227 opcode = kThumbBlxR;
228 break;
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700229 case kOpBx:
230 opcode = kThumbBx;
231 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700232 default:
233 LOG(FATAL) << "Bad opcode " << op;
234 }
buzbee2700f7e2014-03-07 09:46:20 -0800235 return NewLIR1(opcode, r_dest_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700236}
237
Ian Rogerse2143c02014-03-28 08:47:16 -0700238LIR* ArmMir2Lir::OpRegRegShift(OpKind op, RegStorage r_dest_src1, RegStorage r_src2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700239 int shift) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700240 bool thumb_form =
buzbee091cc402014-03-31 10:14:40 -0700241 ((shift == 0) && r_dest_src1.Low8() && r_src2.Low8());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700242 ArmOpcode opcode = kThumbBkpt;
243 switch (op) {
244 case kOpAdc:
245 opcode = (thumb_form) ? kThumbAdcRR : kThumb2AdcRRR;
246 break;
247 case kOpAnd:
248 opcode = (thumb_form) ? kThumbAndRR : kThumb2AndRRR;
249 break;
250 case kOpBic:
251 opcode = (thumb_form) ? kThumbBicRR : kThumb2BicRRR;
252 break;
253 case kOpCmn:
254 DCHECK_EQ(shift, 0);
255 opcode = (thumb_form) ? kThumbCmnRR : kThumb2CmnRR;
256 break;
257 case kOpCmp:
258 if (thumb_form)
259 opcode = kThumbCmpRR;
buzbee091cc402014-03-31 10:14:40 -0700260 else if ((shift == 0) && !r_dest_src1.Low8() && !r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700261 opcode = kThumbCmpHH;
buzbee091cc402014-03-31 10:14:40 -0700262 else if ((shift == 0) && r_dest_src1.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700263 opcode = kThumbCmpLH;
264 else if (shift == 0)
265 opcode = kThumbCmpHL;
266 else
267 opcode = kThumb2CmpRR;
268 break;
269 case kOpXor:
270 opcode = (thumb_form) ? kThumbEorRR : kThumb2EorRRR;
271 break;
272 case kOpMov:
273 DCHECK_EQ(shift, 0);
buzbee091cc402014-03-31 10:14:40 -0700274 if (r_dest_src1.Low8() && r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700275 opcode = kThumbMovRR;
buzbee091cc402014-03-31 10:14:40 -0700276 else if (!r_dest_src1.Low8() && !r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700277 opcode = kThumbMovRR_H2H;
buzbee091cc402014-03-31 10:14:40 -0700278 else if (r_dest_src1.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700279 opcode = kThumbMovRR_H2L;
280 else
281 opcode = kThumbMovRR_L2H;
282 break;
283 case kOpMul:
284 DCHECK_EQ(shift, 0);
285 opcode = (thumb_form) ? kThumbMul : kThumb2MulRRR;
286 break;
287 case kOpMvn:
288 opcode = (thumb_form) ? kThumbMvn : kThumb2MnvRR;
289 break;
290 case kOpNeg:
291 DCHECK_EQ(shift, 0);
292 opcode = (thumb_form) ? kThumbNeg : kThumb2NegRR;
293 break;
294 case kOpOr:
295 opcode = (thumb_form) ? kThumbOrr : kThumb2OrrRRR;
296 break;
297 case kOpSbc:
298 opcode = (thumb_form) ? kThumbSbc : kThumb2SbcRRR;
299 break;
300 case kOpTst:
301 opcode = (thumb_form) ? kThumbTst : kThumb2TstRR;
302 break;
303 case kOpLsl:
304 DCHECK_EQ(shift, 0);
305 opcode = (thumb_form) ? kThumbLslRR : kThumb2LslRRR;
306 break;
307 case kOpLsr:
308 DCHECK_EQ(shift, 0);
309 opcode = (thumb_form) ? kThumbLsrRR : kThumb2LsrRRR;
310 break;
311 case kOpAsr:
312 DCHECK_EQ(shift, 0);
313 opcode = (thumb_form) ? kThumbAsrRR : kThumb2AsrRRR;
314 break;
315 case kOpRor:
316 DCHECK_EQ(shift, 0);
317 opcode = (thumb_form) ? kThumbRorRR : kThumb2RorRRR;
318 break;
319 case kOpAdd:
320 opcode = (thumb_form) ? kThumbAddRRR : kThumb2AddRRR;
321 break;
322 case kOpSub:
323 opcode = (thumb_form) ? kThumbSubRRR : kThumb2SubRRR;
324 break;
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100325 case kOpRev:
326 DCHECK_EQ(shift, 0);
327 if (!thumb_form) {
328 // Binary, but rm is encoded twice.
Ian Rogerse2143c02014-03-28 08:47:16 -0700329 return NewLIR3(kThumb2RevRR, r_dest_src1.GetReg(), r_src2.GetReg(), r_src2.GetReg());
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100330 }
331 opcode = kThumbRev;
332 break;
333 case kOpRevsh:
334 DCHECK_EQ(shift, 0);
335 if (!thumb_form) {
336 // Binary, but rm is encoded twice.
Ian Rogerse2143c02014-03-28 08:47:16 -0700337 return NewLIR3(kThumb2RevshRR, r_dest_src1.GetReg(), r_src2.GetReg(), r_src2.GetReg());
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100338 }
339 opcode = kThumbRevsh;
340 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700341 case kOp2Byte:
342 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700343 return NewLIR4(kThumb2Sbfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 8);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700344 case kOp2Short:
345 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700346 return NewLIR4(kThumb2Sbfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700347 case kOp2Char:
348 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700349 return NewLIR4(kThumb2Ubfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700350 default:
351 LOG(FATAL) << "Bad opcode: " << op;
352 break;
353 }
buzbee409fe942013-10-11 10:49:56 -0700354 DCHECK(!IsPseudoLirOp(opcode));
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700355 if (EncodingMap[opcode].flags & IS_BINARY_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700356 return NewLIR2(opcode, r_dest_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700357 } else if (EncodingMap[opcode].flags & IS_TERTIARY_OP) {
358 if (EncodingMap[opcode].field_loc[2].kind == kFmtShift) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700359 return NewLIR3(opcode, r_dest_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700360 } else {
Ian Rogerse2143c02014-03-28 08:47:16 -0700361 return NewLIR3(opcode, r_dest_src1.GetReg(), r_dest_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700362 }
363 } else if (EncodingMap[opcode].flags & IS_QUAD_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700364 return NewLIR4(opcode, r_dest_src1.GetReg(), r_dest_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700365 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700366 LOG(FATAL) << "Unexpected encoding operand count";
367 return NULL;
368 }
369}
370
buzbee2700f7e2014-03-07 09:46:20 -0800371LIR* ArmMir2Lir::OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700372 return OpRegRegShift(op, r_dest_src1, r_src2, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700373}
374
buzbee2700f7e2014-03-07 09:46:20 -0800375LIR* ArmMir2Lir::OpMovRegMem(RegStorage r_dest, RegStorage r_base, int offset, MoveType move_type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700376 UNUSED(r_dest, r_base, offset, move_type);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800377 UNIMPLEMENTED(FATAL);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700378 UNREACHABLE();
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800379}
380
buzbee2700f7e2014-03-07 09:46:20 -0800381LIR* ArmMir2Lir::OpMovMemReg(RegStorage r_base, int offset, RegStorage r_src, MoveType move_type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700382 UNUSED(r_base, offset, r_src, move_type);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800383 UNIMPLEMENTED(FATAL);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700384 UNREACHABLE();
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800385}
386
buzbee2700f7e2014-03-07 09:46:20 -0800387LIR* ArmMir2Lir::OpCondRegReg(OpKind op, ConditionCode cc, RegStorage r_dest, RegStorage r_src) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700388 UNUSED(op, cc, r_dest, r_src);
Razvan A Lupusorubd288c22013-12-20 17:27:23 -0800389 LOG(FATAL) << "Unexpected use of OpCondRegReg for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700390 UNREACHABLE();
Razvan A Lupusorubd288c22013-12-20 17:27:23 -0800391}
392
Ian Rogerse2143c02014-03-28 08:47:16 -0700393LIR* ArmMir2Lir::OpRegRegRegShift(OpKind op, RegStorage r_dest, RegStorage r_src1,
394 RegStorage r_src2, int shift) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700395 ArmOpcode opcode = kThumbBkpt;
buzbee091cc402014-03-31 10:14:40 -0700396 bool thumb_form = (shift == 0) && r_dest.Low8() && r_src1.Low8() && r_src2.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700397 switch (op) {
398 case kOpAdd:
399 opcode = (thumb_form) ? kThumbAddRRR : kThumb2AddRRR;
400 break;
401 case kOpSub:
402 opcode = (thumb_form) ? kThumbSubRRR : kThumb2SubRRR;
403 break;
404 case kOpRsub:
405 opcode = kThumb2RsubRRR;
406 break;
407 case kOpAdc:
408 opcode = kThumb2AdcRRR;
409 break;
410 case kOpAnd:
411 opcode = kThumb2AndRRR;
412 break;
413 case kOpBic:
414 opcode = kThumb2BicRRR;
415 break;
416 case kOpXor:
417 opcode = kThumb2EorRRR;
418 break;
419 case kOpMul:
420 DCHECK_EQ(shift, 0);
421 opcode = kThumb2MulRRR;
422 break;
Dave Allison70202782013-10-22 17:52:19 -0700423 case kOpDiv:
424 DCHECK_EQ(shift, 0);
425 opcode = kThumb2SdivRRR;
426 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700427 case kOpOr:
428 opcode = kThumb2OrrRRR;
429 break;
430 case kOpSbc:
431 opcode = kThumb2SbcRRR;
432 break;
433 case kOpLsl:
434 DCHECK_EQ(shift, 0);
435 opcode = kThumb2LslRRR;
436 break;
437 case kOpLsr:
438 DCHECK_EQ(shift, 0);
439 opcode = kThumb2LsrRRR;
440 break;
441 case kOpAsr:
442 DCHECK_EQ(shift, 0);
443 opcode = kThumb2AsrRRR;
444 break;
445 case kOpRor:
446 DCHECK_EQ(shift, 0);
447 opcode = kThumb2RorRRR;
448 break;
449 default:
450 LOG(FATAL) << "Bad opcode: " << op;
451 break;
452 }
buzbee409fe942013-10-11 10:49:56 -0700453 DCHECK(!IsPseudoLirOp(opcode));
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700454 if (EncodingMap[opcode].flags & IS_QUAD_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700455 return NewLIR4(opcode, r_dest.GetReg(), r_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700456 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700457 DCHECK(EncodingMap[opcode].flags & IS_TERTIARY_OP);
Ian Rogerse2143c02014-03-28 08:47:16 -0700458 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700459 }
460}
461
buzbee2700f7e2014-03-07 09:46:20 -0800462LIR* ArmMir2Lir::OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700463 return OpRegRegRegShift(op, r_dest, r_src1, r_src2, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700464}
465
buzbee2700f7e2014-03-07 09:46:20 -0800466LIR* ArmMir2Lir::OpRegRegImm(OpKind op, RegStorage r_dest, RegStorage r_src1, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700467 bool neg = (value < 0);
buzbee0d829482013-10-11 15:24:55 -0700468 int32_t abs_value = (neg) ? -value : value;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700469 ArmOpcode opcode = kThumbBkpt;
470 ArmOpcode alt_opcode = kThumbBkpt;
buzbee091cc402014-03-31 10:14:40 -0700471 bool all_low_regs = r_dest.Low8() && r_src1.Low8();
buzbee0d829482013-10-11 15:24:55 -0700472 int32_t mod_imm = ModifiedImmediate(value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700473
474 switch (op) {
475 case kOpLsl:
476 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800477 return NewLIR3(kThumbLslRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700478 else
buzbee2700f7e2014-03-07 09:46:20 -0800479 return NewLIR3(kThumb2LslRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700480 case kOpLsr:
481 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800482 return NewLIR3(kThumbLsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700483 else
buzbee2700f7e2014-03-07 09:46:20 -0800484 return NewLIR3(kThumb2LsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700485 case kOpAsr:
486 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800487 return NewLIR3(kThumbAsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700488 else
buzbee2700f7e2014-03-07 09:46:20 -0800489 return NewLIR3(kThumb2AsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700490 case kOpRor:
buzbee2700f7e2014-03-07 09:46:20 -0800491 return NewLIR3(kThumb2RorRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700492 case kOpAdd:
buzbee091cc402014-03-31 10:14:40 -0700493 if (r_dest.Low8() && (r_src1 == rs_r13sp) && (value <= 1020) && ((value & 0x3) == 0)) {
buzbee2700f7e2014-03-07 09:46:20 -0800494 return NewLIR3(kThumbAddSpRel, r_dest.GetReg(), r_src1.GetReg(), value >> 2);
buzbee091cc402014-03-31 10:14:40 -0700495 } else if (r_dest.Low8() && (r_src1 == rs_r15pc) &&
Brian Carlstrom38f85e42013-07-18 14:45:22 -0700496 (value <= 1020) && ((value & 0x3) == 0)) {
buzbee2700f7e2014-03-07 09:46:20 -0800497 return NewLIR3(kThumbAddPcRel, r_dest.GetReg(), r_src1.GetReg(), value >> 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700498 }
Ian Rogersfc787ec2014-10-09 21:56:44 -0700499 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700500 case kOpSub:
501 if (all_low_regs && ((abs_value & 0x7) == abs_value)) {
502 if (op == kOpAdd)
503 opcode = (neg) ? kThumbSubRRI3 : kThumbAddRRI3;
504 else
505 opcode = (neg) ? kThumbAddRRI3 : kThumbSubRRI3;
buzbee2700f7e2014-03-07 09:46:20 -0800506 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), abs_value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700507 }
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000508 if (mod_imm < 0) {
509 mod_imm = ModifiedImmediate(-value);
510 if (mod_imm >= 0) {
511 op = (op == kOpAdd) ? kOpSub : kOpAdd;
512 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700513 }
Vladimir Markodbb8c492014-02-28 17:36:39 +0000514 if (mod_imm < 0 && (abs_value & 0x3ff) == abs_value) {
515 // This is deliberately used only if modified immediate encoding is inadequate since
516 // we sometimes actually use the flags for small values but not necessarily low regs.
517 if (op == kOpAdd)
518 opcode = (neg) ? kThumb2SubRRI12 : kThumb2AddRRI12;
519 else
520 opcode = (neg) ? kThumb2AddRRI12 : kThumb2SubRRI12;
buzbee2700f7e2014-03-07 09:46:20 -0800521 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), abs_value);
Vladimir Markodbb8c492014-02-28 17:36:39 +0000522 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700523 if (op == kOpSub) {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000524 opcode = kThumb2SubRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700525 alt_opcode = kThumb2SubRRR;
526 } else {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000527 opcode = kThumb2AddRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700528 alt_opcode = kThumb2AddRRR;
529 }
530 break;
531 case kOpRsub:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000532 opcode = kThumb2RsubRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700533 alt_opcode = kThumb2RsubRRR;
534 break;
535 case kOpAdc:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000536 opcode = kThumb2AdcRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700537 alt_opcode = kThumb2AdcRRR;
538 break;
539 case kOpSbc:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000540 opcode = kThumb2SbcRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700541 alt_opcode = kThumb2SbcRRR;
542 break;
543 case kOpOr:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000544 opcode = kThumb2OrrRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700545 alt_opcode = kThumb2OrrRRR;
546 break;
547 case kOpAnd:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000548 if (mod_imm < 0) {
549 mod_imm = ModifiedImmediate(~value);
550 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800551 return NewLIR3(kThumb2BicRRI8M, r_dest.GetReg(), r_src1.GetReg(), mod_imm);
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000552 }
553 }
554 opcode = kThumb2AndRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700555 alt_opcode = kThumb2AndRRR;
556 break;
557 case kOpXor:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000558 opcode = kThumb2EorRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700559 alt_opcode = kThumb2EorRRR;
560 break;
561 case kOpMul:
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700562 // TUNING: power of 2, shift & add
Brian Carlstrom7940e442013-07-12 13:46:57 -0700563 mod_imm = -1;
564 alt_opcode = kThumb2MulRRR;
565 break;
566 case kOpCmp: {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700567 LIR* res;
568 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800569 res = NewLIR2(kThumb2CmpRI8M, r_src1.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700570 } else {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000571 mod_imm = ModifiedImmediate(-value);
572 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800573 res = NewLIR2(kThumb2CmnRI8M, r_src1.GetReg(), mod_imm);
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000574 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800575 RegStorage r_tmp = AllocTemp();
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000576 res = LoadConstant(r_tmp, value);
577 OpRegReg(kOpCmp, r_src1, r_tmp);
578 FreeTemp(r_tmp);
579 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700580 }
581 return res;
582 }
583 default:
584 LOG(FATAL) << "Bad opcode: " << op;
585 }
586
587 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800588 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700589 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800590 RegStorage r_scratch = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700591 LoadConstant(r_scratch, value);
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800592 LIR* res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700593 if (EncodingMap[alt_opcode].flags & IS_QUAD_OP)
buzbee2700f7e2014-03-07 09:46:20 -0800594 res = NewLIR4(alt_opcode, r_dest.GetReg(), r_src1.GetReg(), r_scratch.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700595 else
buzbee2700f7e2014-03-07 09:46:20 -0800596 res = NewLIR3(alt_opcode, r_dest.GetReg(), r_src1.GetReg(), r_scratch.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700597 FreeTemp(r_scratch);
598 return res;
599 }
600}
601
602/* Handle Thumb-only variants here - otherwise punt to OpRegRegImm */
buzbee2700f7e2014-03-07 09:46:20 -0800603LIR* ArmMir2Lir::OpRegImm(OpKind op, RegStorage r_dest_src1, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700604 bool neg = (value < 0);
buzbee0d829482013-10-11 15:24:55 -0700605 int32_t abs_value = (neg) ? -value : value;
buzbee091cc402014-03-31 10:14:40 -0700606 bool short_form = (((abs_value & 0xff) == abs_value) && r_dest_src1.Low8());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700607 ArmOpcode opcode = kThumbBkpt;
608 switch (op) {
609 case kOpAdd:
buzbee2700f7e2014-03-07 09:46:20 -0800610 if (!neg && (r_dest_src1 == rs_r13sp) && (value <= 508)) { /* sp */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700611 DCHECK_EQ((value & 0x3), 0);
612 return NewLIR1(kThumbAddSpI7, value >> 2);
613 } else if (short_form) {
614 opcode = (neg) ? kThumbSubRI8 : kThumbAddRI8;
615 }
616 break;
617 case kOpSub:
buzbee2700f7e2014-03-07 09:46:20 -0800618 if (!neg && (r_dest_src1 == rs_r13sp) && (value <= 508)) { /* sp */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700619 DCHECK_EQ((value & 0x3), 0);
620 return NewLIR1(kThumbSubSpI7, value >> 2);
621 } else if (short_form) {
622 opcode = (neg) ? kThumbAddRI8 : kThumbSubRI8;
623 }
624 break;
625 case kOpCmp:
Vladimir Marko22479842013-11-19 17:04:50 +0000626 if (!neg && short_form) {
627 opcode = kThumbCmpRI8;
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700628 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700629 short_form = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700630 }
631 break;
632 default:
633 /* Punt to OpRegRegImm - if bad case catch it there */
634 short_form = false;
635 break;
636 }
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700637 if (short_form) {
buzbee2700f7e2014-03-07 09:46:20 -0800638 return NewLIR2(opcode, r_dest_src1.GetReg(), abs_value);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700639 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700640 return OpRegRegImm(op, r_dest_src1, r_dest_src1, value);
641 }
642}
643
buzbee2700f7e2014-03-07 09:46:20 -0800644LIR* ArmMir2Lir::LoadConstantWide(RegStorage r_dest, int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700645 LIR* res = NULL;
646 int32_t val_lo = Low32Bits(value);
647 int32_t val_hi = High32Bits(value);
buzbee091cc402014-03-31 10:14:40 -0700648 if (r_dest.IsFloat()) {
649 DCHECK(!r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700650 if ((val_lo == 0) && (val_hi == 0)) {
651 // TODO: we need better info about the target CPU. a vector exclusive or
652 // would probably be better here if we could rely on its existance.
653 // Load an immediate +2.0 (which encodes to 0)
buzbee091cc402014-03-31 10:14:40 -0700654 NewLIR2(kThumb2Vmovd_IMM8, r_dest.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700655 // +0.0 = +2.0 - +2.0
buzbee091cc402014-03-31 10:14:40 -0700656 res = NewLIR3(kThumb2Vsubd, r_dest.GetReg(), r_dest.GetReg(), r_dest.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700657 } else {
658 int encoded_imm = EncodeImmDouble(value);
659 if (encoded_imm >= 0) {
buzbee091cc402014-03-31 10:14:40 -0700660 res = NewLIR2(kThumb2Vmovd_IMM8, r_dest.GetReg(), encoded_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700661 }
662 }
663 } else {
buzbee091cc402014-03-31 10:14:40 -0700664 // NOTE: Arm32 assumption here.
665 DCHECK(r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700666 if ((InexpensiveConstantInt(val_lo) && (InexpensiveConstantInt(val_hi)))) {
buzbee2700f7e2014-03-07 09:46:20 -0800667 res = LoadConstantNoClobber(r_dest.GetLow(), val_lo);
668 LoadConstantNoClobber(r_dest.GetHigh(), val_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700669 }
670 }
671 if (res == NULL) {
672 // No short form - load from the literal pool.
673 LIR* data_target = ScanLiteralPoolWide(literal_list_, val_lo, val_hi);
674 if (data_target == NULL) {
675 data_target = AddWideData(&literal_list_, val_lo, val_hi);
676 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100677 ScopedMemRefType mem_ref_type(this, ResourceMask::kLiteral);
buzbee091cc402014-03-31 10:14:40 -0700678 if (r_dest.IsFloat()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700679 res = RawLIR(current_dalvik_offset_, kThumb2Vldrd,
buzbee091cc402014-03-31 10:14:40 -0700680 r_dest.GetReg(), rs_r15pc.GetReg(), 0, 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700681 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800682 DCHECK(r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700683 res = RawLIR(current_dalvik_offset_, kThumb2LdrdPcRel8,
buzbee091cc402014-03-31 10:14:40 -0700684 r_dest.GetLowReg(), r_dest.GetHighReg(), rs_r15pc.GetReg(), 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700685 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700686 AppendLIR(res);
687 }
688 return res;
689}
690
691int ArmMir2Lir::EncodeShift(int code, int amount) {
692 return ((amount & 0x1f) << 2) | code;
693}
694
buzbee2700f7e2014-03-07 09:46:20 -0800695LIR* ArmMir2Lir::LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700696 int scale, OpSize size) {
buzbee091cc402014-03-31 10:14:40 -0700697 bool all_low_regs = r_base.Low8() && r_index.Low8() && r_dest.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700698 LIR* load;
699 ArmOpcode opcode = kThumbBkpt;
700 bool thumb_form = (all_low_regs && (scale == 0));
buzbee2700f7e2014-03-07 09:46:20 -0800701 RegStorage reg_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700702
buzbee091cc402014-03-31 10:14:40 -0700703 if (r_dest.IsFloat()) {
704 if (r_dest.IsSingle()) {
buzbeefd698e62014-04-27 19:33:22 -0700705 DCHECK((size == k32) || (size == kSingle) || (size == kReference));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700706 opcode = kThumb2Vldrs;
707 size = kSingle;
708 } else {
buzbee091cc402014-03-31 10:14:40 -0700709 DCHECK(r_dest.IsDouble());
buzbee695d13a2014-04-19 13:32:20 -0700710 DCHECK((size == k64) || (size == kDouble));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700711 opcode = kThumb2Vldrd;
712 size = kDouble;
713 }
714 } else {
715 if (size == kSingle)
buzbee695d13a2014-04-19 13:32:20 -0700716 size = k32;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700717 }
718
719 switch (size) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700720 case kDouble: // fall-through
buzbee695d13a2014-04-19 13:32:20 -0700721 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700722 case kSingle:
723 reg_ptr = AllocTemp();
724 if (scale) {
buzbee2700f7e2014-03-07 09:46:20 -0800725 NewLIR4(kThumb2AddRRR, reg_ptr.GetReg(), r_base.GetReg(), r_index.GetReg(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700726 EncodeShift(kArmLsl, scale));
727 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800728 OpRegRegReg(kOpAdd, reg_ptr, r_base, r_index);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700729 }
buzbee2700f7e2014-03-07 09:46:20 -0800730 load = NewLIR3(opcode, r_dest.GetReg(), reg_ptr.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700731 FreeTemp(reg_ptr);
732 return load;
buzbee695d13a2014-04-19 13:32:20 -0700733 case k32:
734 // Intentional fall-though.
735 case kReference:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700736 opcode = (thumb_form) ? kThumbLdrRRR : kThumb2LdrRRR;
737 break;
738 case kUnsignedHalf:
739 opcode = (thumb_form) ? kThumbLdrhRRR : kThumb2LdrhRRR;
740 break;
741 case kSignedHalf:
742 opcode = (thumb_form) ? kThumbLdrshRRR : kThumb2LdrshRRR;
743 break;
744 case kUnsignedByte:
745 opcode = (thumb_form) ? kThumbLdrbRRR : kThumb2LdrbRRR;
746 break;
747 case kSignedByte:
748 opcode = (thumb_form) ? kThumbLdrsbRRR : kThumb2LdrsbRRR;
749 break;
750 default:
751 LOG(FATAL) << "Bad size: " << size;
752 }
753 if (thumb_form)
buzbee2700f7e2014-03-07 09:46:20 -0800754 load = NewLIR3(opcode, r_dest.GetReg(), r_base.GetReg(), r_index.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700755 else
buzbee2700f7e2014-03-07 09:46:20 -0800756 load = NewLIR4(opcode, r_dest.GetReg(), r_base.GetReg(), r_index.GetReg(), scale);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700757
758 return load;
759}
760
buzbee2700f7e2014-03-07 09:46:20 -0800761LIR* ArmMir2Lir::StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700762 int scale, OpSize size) {
buzbee091cc402014-03-31 10:14:40 -0700763 bool all_low_regs = r_base.Low8() && r_index.Low8() && r_src.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700764 LIR* store = NULL;
765 ArmOpcode opcode = kThumbBkpt;
766 bool thumb_form = (all_low_regs && (scale == 0));
buzbee2700f7e2014-03-07 09:46:20 -0800767 RegStorage reg_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700768
buzbee091cc402014-03-31 10:14:40 -0700769 if (r_src.IsFloat()) {
770 if (r_src.IsSingle()) {
buzbeefd698e62014-04-27 19:33:22 -0700771 DCHECK((size == k32) || (size == kSingle) || (size == kReference));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700772 opcode = kThumb2Vstrs;
773 size = kSingle;
774 } else {
buzbee091cc402014-03-31 10:14:40 -0700775 DCHECK(r_src.IsDouble());
buzbee695d13a2014-04-19 13:32:20 -0700776 DCHECK((size == k64) || (size == kDouble));
buzbee2700f7e2014-03-07 09:46:20 -0800777 DCHECK_EQ((r_src.GetReg() & 0x1), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700778 opcode = kThumb2Vstrd;
779 size = kDouble;
780 }
781 } else {
782 if (size == kSingle)
buzbee695d13a2014-04-19 13:32:20 -0700783 size = k32;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700784 }
785
786 switch (size) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700787 case kDouble: // fall-through
buzbee695d13a2014-04-19 13:32:20 -0700788 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700789 case kSingle:
790 reg_ptr = AllocTemp();
791 if (scale) {
buzbee2700f7e2014-03-07 09:46:20 -0800792 NewLIR4(kThumb2AddRRR, reg_ptr.GetReg(), r_base.GetReg(), r_index.GetReg(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700793 EncodeShift(kArmLsl, scale));
794 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800795 OpRegRegReg(kOpAdd, reg_ptr, r_base, r_index);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700796 }
buzbee2700f7e2014-03-07 09:46:20 -0800797 store = NewLIR3(opcode, r_src.GetReg(), reg_ptr.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700798 FreeTemp(reg_ptr);
799 return store;
buzbee695d13a2014-04-19 13:32:20 -0700800 case k32:
801 // Intentional fall-though.
802 case kReference:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700803 opcode = (thumb_form) ? kThumbStrRRR : kThumb2StrRRR;
804 break;
805 case kUnsignedHalf:
buzbee695d13a2014-04-19 13:32:20 -0700806 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700807 case kSignedHalf:
808 opcode = (thumb_form) ? kThumbStrhRRR : kThumb2StrhRRR;
809 break;
810 case kUnsignedByte:
buzbee695d13a2014-04-19 13:32:20 -0700811 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700812 case kSignedByte:
813 opcode = (thumb_form) ? kThumbStrbRRR : kThumb2StrbRRR;
814 break;
815 default:
816 LOG(FATAL) << "Bad size: " << size;
817 }
818 if (thumb_form)
buzbee2700f7e2014-03-07 09:46:20 -0800819 store = NewLIR3(opcode, r_src.GetReg(), r_base.GetReg(), r_index.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700820 else
buzbee2700f7e2014-03-07 09:46:20 -0800821 store = NewLIR4(opcode, r_src.GetReg(), r_base.GetReg(), r_index.GetReg(), scale);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700822
823 return store;
824}
825
Vladimir Markodb9d5232014-06-10 18:15:57 +0100826// Helper function for LoadBaseDispBody()/StoreBaseDispBody().
Vladimir Marko37573972014-06-16 10:32:25 +0100827LIR* ArmMir2Lir::LoadStoreUsingInsnWithOffsetImm8Shl2(ArmOpcode opcode, RegStorage r_base,
828 int displacement, RegStorage r_src_dest,
829 RegStorage r_work) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100830 DCHECK_EQ(displacement & 3, 0);
Vladimir Marko37573972014-06-16 10:32:25 +0100831 constexpr int kOffsetMask = 0xff << 2;
832 int encoded_disp = (displacement & kOffsetMask) >> 2; // Within range of the instruction.
Vladimir Markodb9d5232014-06-10 18:15:57 +0100833 RegStorage r_ptr = r_base;
Vladimir Marko37573972014-06-16 10:32:25 +0100834 if ((displacement & ~kOffsetMask) != 0) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100835 r_ptr = r_work.Valid() ? r_work : AllocTemp();
Vladimir Marko37573972014-06-16 10:32:25 +0100836 // Add displacement & ~kOffsetMask to base, it's a single instruction for up to +-256KiB.
837 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement & ~kOffsetMask);
Vladimir Markodb9d5232014-06-10 18:15:57 +0100838 }
839 LIR* lir = nullptr;
840 if (!r_src_dest.IsPair()) {
841 lir = NewLIR3(opcode, r_src_dest.GetReg(), r_ptr.GetReg(), encoded_disp);
842 } else {
843 lir = NewLIR4(opcode, r_src_dest.GetLowReg(), r_src_dest.GetHighReg(), r_ptr.GetReg(),
844 encoded_disp);
845 }
Vladimir Marko37573972014-06-16 10:32:25 +0100846 if ((displacement & ~kOffsetMask) != 0 && !r_work.Valid()) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100847 FreeTemp(r_ptr);
848 }
849 return lir;
850}
851
Brian Carlstrom7940e442013-07-12 13:46:57 -0700852/*
853 * Load value from base + displacement. Optionally perform null check
854 * on base (which must have an associated s_reg and MIR). If not
855 * performing null check, incoming MIR can be null.
856 */
buzbee2700f7e2014-03-07 09:46:20 -0800857LIR* ArmMir2Lir::LoadBaseDispBody(RegStorage r_base, int displacement, RegStorage r_dest,
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100858 OpSize size) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700859 LIR* load = NULL;
860 ArmOpcode opcode = kThumbBkpt;
861 bool short_form = false;
862 bool thumb2Form = (displacement < 4092 && displacement >= 0);
buzbee091cc402014-03-31 10:14:40 -0700863 bool all_low = r_dest.Is32Bit() && r_base.Low8() && r_dest.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700864 int encoded_disp = displacement;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700865 bool already_generated = false;
866 switch (size) {
867 case kDouble:
buzbee695d13a2014-04-19 13:32:20 -0700868 // Intentional fall-though.
Vladimir Markodb9d5232014-06-10 18:15:57 +0100869 case k64:
buzbee091cc402014-03-31 10:14:40 -0700870 if (r_dest.IsFloat()) {
871 DCHECK(!r_dest.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +0100872 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vldrd, r_base, displacement, r_dest);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700873 } else {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100874 DCHECK(r_dest.IsPair());
875 // Use the r_dest.GetLow() for the temporary pointer if needed.
Vladimir Marko37573972014-06-16 10:32:25 +0100876 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2LdrdI8, r_base, displacement, r_dest,
877 r_dest.GetLow());
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100878 }
879 already_generated = true;
buzbee2700f7e2014-03-07 09:46:20 -0800880 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700881 case kSingle:
buzbee695d13a2014-04-19 13:32:20 -0700882 // Intentional fall-though.
883 case k32:
884 // Intentional fall-though.
885 case kReference:
buzbee091cc402014-03-31 10:14:40 -0700886 if (r_dest.IsFloat()) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100887 DCHECK(r_dest.IsSingle());
Vladimir Marko37573972014-06-16 10:32:25 +0100888 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vldrs, r_base, displacement, r_dest);
Vladimir Markodb9d5232014-06-10 18:15:57 +0100889 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700890 break;
891 }
buzbee091cc402014-03-31 10:14:40 -0700892 if (r_dest.Low8() && (r_base == rs_rARM_PC) && (displacement <= 1020) &&
893 (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700894 short_form = true;
895 encoded_disp >>= 2;
896 opcode = kThumbLdrPcRel;
buzbee091cc402014-03-31 10:14:40 -0700897 } else if (r_dest.Low8() && (r_base == rs_rARM_SP) && (displacement <= 1020) &&
898 (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700899 short_form = true;
900 encoded_disp >>= 2;
901 opcode = kThumbLdrSpRel;
buzbee2700f7e2014-03-07 09:46:20 -0800902 } else if (all_low && displacement < 128 && displacement >= 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700903 DCHECK_EQ((displacement & 0x3), 0);
904 short_form = true;
905 encoded_disp >>= 2;
906 opcode = kThumbLdrRRI5;
907 } else if (thumb2Form) {
908 short_form = true;
909 opcode = kThumb2LdrRRI12;
910 }
911 break;
912 case kUnsignedHalf:
buzbee2700f7e2014-03-07 09:46:20 -0800913 if (all_low && displacement < 64 && displacement >= 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700914 DCHECK_EQ((displacement & 0x1), 0);
915 short_form = true;
916 encoded_disp >>= 1;
917 opcode = kThumbLdrhRRI5;
918 } else if (displacement < 4092 && displacement >= 0) {
919 short_form = true;
920 opcode = kThumb2LdrhRRI12;
921 }
922 break;
923 case kSignedHalf:
924 if (thumb2Form) {
925 short_form = true;
926 opcode = kThumb2LdrshRRI12;
927 }
928 break;
929 case kUnsignedByte:
buzbee2700f7e2014-03-07 09:46:20 -0800930 if (all_low && displacement < 32 && displacement >= 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700931 short_form = true;
932 opcode = kThumbLdrbRRI5;
933 } else if (thumb2Form) {
934 short_form = true;
935 opcode = kThumb2LdrbRRI12;
936 }
937 break;
938 case kSignedByte:
939 if (thumb2Form) {
940 short_form = true;
941 opcode = kThumb2LdrsbRRI12;
942 }
943 break;
944 default:
945 LOG(FATAL) << "Bad size: " << size;
946 }
947
948 if (!already_generated) {
949 if (short_form) {
buzbee2700f7e2014-03-07 09:46:20 -0800950 load = NewLIR3(opcode, r_dest.GetReg(), r_base.GetReg(), encoded_disp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700951 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800952 RegStorage reg_offset = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700953 LoadConstant(reg_offset, encoded_disp);
Vladimir Markodb9d5232014-06-10 18:15:57 +0100954 DCHECK(!r_dest.IsFloat());
955 load = LoadBaseIndexed(r_base, reg_offset, r_dest, 0, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700956 FreeTemp(reg_offset);
957 }
958 }
959
960 // TODO: in future may need to differentiate Dalvik accesses w/ spills
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100961 if (mem_ref_type_ == ResourceMask::kDalvikReg) {
Ian Rogersb28c1c02014-11-08 11:21:21 -0800962 DCHECK_EQ(r_base, rs_rARM_SP);
buzbee2700f7e2014-03-07 09:46:20 -0800963 AnnotateDalvikRegAccess(load, displacement >> 2, true /* is_load */, r_dest.Is64Bit());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700964 }
965 return load;
966}
967
Vladimir Marko674744e2014-04-24 15:18:26 +0100968LIR* ArmMir2Lir::LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest,
Andreas Gampe3c12c512014-06-24 18:46:29 +0000969 OpSize size, VolatileKind is_volatile) {
buzbee695d13a2014-04-19 13:32:20 -0700970 // TODO: base this on target.
971 if (size == kWord) {
972 size = k32;
973 }
Andreas Gampe3c12c512014-06-24 18:46:29 +0000974 LIR* load;
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700975 if (is_volatile == kVolatile && (size == k64 || size == kDouble) &&
976 !cu_->compiler_driver->GetInstructionSetFeatures()->
977 AsArmInstructionSetFeatures()->HasLpae()) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000978 // Only 64-bit load needs special handling.
979 // If the cpu supports LPAE, aligned LDRD is atomic - fall through to LoadBaseDisp().
980 DCHECK(!r_dest.IsFloat()); // See RegClassForFieldLoadSave().
981 // Use LDREXD for the atomic load. (Expect displacement > 0, don't optimize for == 0.)
982 RegStorage r_ptr = AllocTemp();
983 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
984 LIR* lir = NewLIR3(kThumb2Ldrexd, r_dest.GetLowReg(), r_dest.GetHighReg(), r_ptr.GetReg());
985 FreeTemp(r_ptr);
986 return lir;
987 } else {
988 load = LoadBaseDispBody(r_base, displacement, r_dest, size);
989 }
990
991 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -0700992 GenMemBarrier(kLoadAny);
Andreas Gampe3c12c512014-06-24 18:46:29 +0000993 }
994
995 return load;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700996}
997
Brian Carlstrom7940e442013-07-12 13:46:57 -0700998
buzbee2700f7e2014-03-07 09:46:20 -0800999LIR* ArmMir2Lir::StoreBaseDispBody(RegStorage r_base, int displacement, RegStorage r_src,
1000 OpSize size) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001001 LIR* store = NULL;
1002 ArmOpcode opcode = kThumbBkpt;
1003 bool short_form = false;
1004 bool thumb2Form = (displacement < 4092 && displacement >= 0);
buzbee091cc402014-03-31 10:14:40 -07001005 bool all_low = r_src.Is32Bit() && r_base.Low8() && r_src.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001006 int encoded_disp = displacement;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001007 bool already_generated = false;
1008 switch (size) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001009 case kDouble:
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001010 // Intentional fall-though.
Vladimir Markodb9d5232014-06-10 18:15:57 +01001011 case k64:
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001012 if (r_src.IsFloat()) {
Zheng Xu5667fdb2014-10-23 18:29:55 +08001013 // Note: If the register is retrieved by register allocator, it should never be a pair.
1014 // But some functions in mir2lir assume 64-bit registers are 32-bit register pairs.
1015 // TODO: Rework Mir2Lir::LoadArg() and Mir2Lir::LoadArgDirect().
1016 if (r_src.IsPair()) {
1017 r_src = As64BitFloatReg(r_src);
1018 }
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001019 DCHECK(!r_src.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +01001020 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vstrd, r_base, displacement, r_src);
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001021 } else {
Vladimir Markodb9d5232014-06-10 18:15:57 +01001022 DCHECK(r_src.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +01001023 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2StrdI8, r_base, displacement, r_src);
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001024 }
1025 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001026 break;
1027 case kSingle:
buzbee091cc402014-03-31 10:14:40 -07001028 // Intentional fall-through.
buzbee695d13a2014-04-19 13:32:20 -07001029 case k32:
buzbee091cc402014-03-31 10:14:40 -07001030 // Intentional fall-through.
buzbee695d13a2014-04-19 13:32:20 -07001031 case kReference:
buzbee091cc402014-03-31 10:14:40 -07001032 if (r_src.IsFloat()) {
1033 DCHECK(r_src.IsSingle());
Vladimir Marko37573972014-06-16 10:32:25 +01001034 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vstrs, r_base, displacement, r_src);
Vladimir Markodb9d5232014-06-10 18:15:57 +01001035 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001036 break;
1037 }
buzbee091cc402014-03-31 10:14:40 -07001038 if (r_src.Low8() && (r_base == rs_r13sp) && (displacement <= 1020) && (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001039 short_form = true;
1040 encoded_disp >>= 2;
1041 opcode = kThumbStrSpRel;
buzbee2700f7e2014-03-07 09:46:20 -08001042 } else if (all_low && displacement < 128 && displacement >= 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001043 DCHECK_EQ((displacement & 0x3), 0);
1044 short_form = true;
1045 encoded_disp >>= 2;
1046 opcode = kThumbStrRRI5;
1047 } else if (thumb2Form) {
1048 short_form = true;
1049 opcode = kThumb2StrRRI12;
1050 }
1051 break;
1052 case kUnsignedHalf:
1053 case kSignedHalf:
buzbee2700f7e2014-03-07 09:46:20 -08001054 if (all_low && displacement < 64 && displacement >= 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001055 DCHECK_EQ((displacement & 0x1), 0);
1056 short_form = true;
1057 encoded_disp >>= 1;
1058 opcode = kThumbStrhRRI5;
1059 } else if (thumb2Form) {
1060 short_form = true;
1061 opcode = kThumb2StrhRRI12;
1062 }
1063 break;
1064 case kUnsignedByte:
1065 case kSignedByte:
buzbee2700f7e2014-03-07 09:46:20 -08001066 if (all_low && displacement < 32 && displacement >= 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001067 short_form = true;
1068 opcode = kThumbStrbRRI5;
1069 } else if (thumb2Form) {
1070 short_form = true;
1071 opcode = kThumb2StrbRRI12;
1072 }
1073 break;
1074 default:
1075 LOG(FATAL) << "Bad size: " << size;
1076 }
1077 if (!already_generated) {
1078 if (short_form) {
buzbee2700f7e2014-03-07 09:46:20 -08001079 store = NewLIR3(opcode, r_src.GetReg(), r_base.GetReg(), encoded_disp);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001080 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001081 RegStorage r_scratch = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001082 LoadConstant(r_scratch, encoded_disp);
Vladimir Markodb9d5232014-06-10 18:15:57 +01001083 DCHECK(!r_src.IsFloat());
1084 store = StoreBaseIndexed(r_base, r_scratch, r_src, 0, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001085 FreeTemp(r_scratch);
1086 }
1087 }
1088
1089 // TODO: In future, may need to differentiate Dalvik & spill accesses
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001090 if (mem_ref_type_ == ResourceMask::kDalvikReg) {
Ian Rogersb28c1c02014-11-08 11:21:21 -08001091 DCHECK_EQ(r_base, rs_rARM_SP);
buzbee2700f7e2014-03-07 09:46:20 -08001092 AnnotateDalvikRegAccess(store, displacement >> 2, false /* is_load */, r_src.Is64Bit());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001093 }
1094 return store;
1095}
1096
Andreas Gampede686762014-06-24 18:42:06 +00001097LIR* ArmMir2Lir::StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src,
Andreas Gampe3c12c512014-06-24 18:46:29 +00001098 OpSize size, VolatileKind is_volatile) {
1099 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001100 // Ensure that prior accesses become visible to other threads first.
1101 GenMemBarrier(kAnyStore);
Andreas Gampe2689fba2014-06-23 13:23:04 -07001102 }
Andreas Gampe3c12c512014-06-24 18:46:29 +00001103
1104 LIR* store;
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001105 if (is_volatile == kVolatile && (size == k64 || size == kDouble) &&
1106 !cu_->compiler_driver->GetInstructionSetFeatures()->
1107 AsArmInstructionSetFeatures()->HasLpae()) {
Andreas Gampe3c12c512014-06-24 18:46:29 +00001108 // Only 64-bit store needs special handling.
1109 // If the cpu supports LPAE, aligned STRD is atomic - fall through to StoreBaseDisp().
1110 // Use STREXD for the atomic store. (Expect displacement > 0, don't optimize for == 0.)
1111 DCHECK(!r_src.IsFloat()); // See RegClassForFieldLoadSave().
1112 RegStorage r_ptr = AllocTemp();
1113 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
1114 LIR* fail_target = NewLIR0(kPseudoTargetLabel);
1115 // We have only 5 temporary registers available and if r_base, r_src and r_ptr already
1116 // take 4, we can't directly allocate 2 more for LDREXD temps. In that case clobber r_ptr
1117 // in LDREXD and recalculate it from r_base.
1118 RegStorage r_temp = AllocTemp();
Serguei Katkov9ee45192014-07-17 14:39:03 +07001119 RegStorage r_temp_high = AllocTemp(false); // We may not have another temp.
Andreas Gampe3c12c512014-06-24 18:46:29 +00001120 if (r_temp_high.Valid()) {
1121 NewLIR3(kThumb2Ldrexd, r_temp.GetReg(), r_temp_high.GetReg(), r_ptr.GetReg());
1122 FreeTemp(r_temp_high);
1123 FreeTemp(r_temp);
1124 } else {
1125 // If we don't have another temp, clobber r_ptr in LDREXD and reload it.
1126 NewLIR3(kThumb2Ldrexd, r_temp.GetReg(), r_ptr.GetReg(), r_ptr.GetReg());
1127 FreeTemp(r_temp); // May need the temp for kOpAdd.
1128 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
1129 }
1130 store = NewLIR4(kThumb2Strexd, r_temp.GetReg(), r_src.GetLowReg(), r_src.GetHighReg(),
1131 r_ptr.GetReg());
1132 OpCmpImmBranch(kCondNe, r_temp, 0, fail_target);
1133 FreeTemp(r_ptr);
1134 } else {
1135 // TODO: base this on target.
1136 if (size == kWord) {
1137 size = k32;
1138 }
1139
1140 store = StoreBaseDispBody(r_base, displacement, r_src, size);
1141 }
1142
1143 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001144 // Preserve order with respect to any subsequent volatile loads.
1145 // We need StoreLoad, but that generally requires the most expensive barrier.
1146 GenMemBarrier(kAnyAny);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001147 }
1148
1149 return store;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001150}
1151
buzbee2700f7e2014-03-07 09:46:20 -08001152LIR* ArmMir2Lir::OpFpRegCopy(RegStorage r_dest, RegStorage r_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001153 int opcode;
buzbee091cc402014-03-31 10:14:40 -07001154 DCHECK_EQ(r_dest.IsDouble(), r_src.IsDouble());
1155 if (r_dest.IsDouble()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001156 opcode = kThumb2Vmovd;
1157 } else {
buzbee091cc402014-03-31 10:14:40 -07001158 if (r_dest.IsSingle()) {
1159 opcode = r_src.IsSingle() ? kThumb2Vmovs : kThumb2Fmsr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001160 } else {
buzbee091cc402014-03-31 10:14:40 -07001161 DCHECK(r_src.IsSingle());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001162 opcode = kThumb2Fmrs;
1163 }
1164 }
buzbee2700f7e2014-03-07 09:46:20 -08001165 LIR* res = RawLIR(current_dalvik_offset_, opcode, r_dest.GetReg(), r_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001166 if (!(cu_->disable_opt & (1 << kSafeOptimizations)) && r_dest == r_src) {
1167 res->flags.is_nop = true;
1168 }
1169 return res;
1170}
1171
buzbee2700f7e2014-03-07 09:46:20 -08001172LIR* ArmMir2Lir::OpMem(OpKind op, RegStorage r_base, int disp) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001173 UNUSED(op, r_base, disp);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001174 LOG(FATAL) << "Unexpected use of OpMem for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001175 UNREACHABLE();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001176}
1177
Andreas Gampe98430592014-07-27 19:44:50 -07001178LIR* ArmMir2Lir::InvokeTrampoline(OpKind op, RegStorage r_tgt, QuickEntrypointEnum trampoline) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001179 UNUSED(trampoline); // The address of the trampoline is already loaded into r_tgt.
Andreas Gampe98430592014-07-27 19:44:50 -07001180 return OpReg(op, r_tgt);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001181}
1182
Serban Constantinescu63999682014-07-15 17:44:21 +01001183size_t ArmMir2Lir::GetInstructionOffset(LIR* lir) {
1184 uint64_t check_flags = GetTargetInstFlags(lir->opcode);
1185 DCHECK((check_flags & IS_LOAD) || (check_flags & IS_STORE));
1186 size_t offset = (check_flags & IS_TERTIARY_OP) ? lir->operands[2] : 0;
1187
1188 if (check_flags & SCALED_OFFSET_X2) {
1189 offset = offset * 2;
1190 } else if (check_flags & SCALED_OFFSET_X4) {
1191 offset = offset * 4;
1192 }
1193 return offset;
1194}
1195
Brian Carlstrom7940e442013-07-12 13:46:57 -07001196} // namespace art