blob: 36d065f0db3699ded8662bfcfb22bcebc915b85d [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstrom7940e442013-07-12 13:46:57 -070017#include "codegen_arm.h"
Ian Rogersd582fa42014-11-05 23:46:43 -080018
19#include "arch/arm/instruction_set_features_arm.h"
20#include "arm_lir.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070021#include "dex/quick/mir_to_lir-inl.h"
buzbeeb5860fb2014-06-21 15:31:01 -070022#include "dex/reg_storage_eq.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023
24namespace art {
25
26/* This file contains codegen for the Thumb ISA. */
27
buzbee0d829482013-10-11 15:24:55 -070028static int32_t EncodeImmSingle(int32_t value) {
29 int32_t res;
30 int32_t bit_a = (value & 0x80000000) >> 31;
31 int32_t not_bit_b = (value & 0x40000000) >> 30;
32 int32_t bit_b = (value & 0x20000000) >> 29;
33 int32_t b_smear = (value & 0x3e000000) >> 25;
34 int32_t slice = (value & 0x01f80000) >> 19;
35 int32_t zeroes = (value & 0x0007ffff);
Brian Carlstrom7940e442013-07-12 13:46:57 -070036 if (zeroes != 0)
37 return -1;
38 if (bit_b) {
39 if ((not_bit_b != 0) || (b_smear != 0x1f))
40 return -1;
41 } else {
42 if ((not_bit_b != 1) || (b_smear != 0x0))
43 return -1;
44 }
45 res = (bit_a << 7) | (bit_b << 6) | slice;
46 return res;
47}
48
49/*
50 * Determine whether value can be encoded as a Thumb2 floating point
51 * immediate. If not, return -1. If so return encoded 8-bit value.
52 */
buzbee0d829482013-10-11 15:24:55 -070053static int32_t EncodeImmDouble(int64_t value) {
54 int32_t res;
Ian Rogers0f678472014-03-10 16:18:37 -070055 int32_t bit_a = (value & INT64_C(0x8000000000000000)) >> 63;
56 int32_t not_bit_b = (value & INT64_C(0x4000000000000000)) >> 62;
57 int32_t bit_b = (value & INT64_C(0x2000000000000000)) >> 61;
58 int32_t b_smear = (value & INT64_C(0x3fc0000000000000)) >> 54;
59 int32_t slice = (value & INT64_C(0x003f000000000000)) >> 48;
60 uint64_t zeroes = (value & INT64_C(0x0000ffffffffffff));
buzbee0d829482013-10-11 15:24:55 -070061 if (zeroes != 0ull)
Brian Carlstrom7940e442013-07-12 13:46:57 -070062 return -1;
63 if (bit_b) {
64 if ((not_bit_b != 0) || (b_smear != 0xff))
65 return -1;
66 } else {
67 if ((not_bit_b != 1) || (b_smear != 0x0))
68 return -1;
69 }
70 res = (bit_a << 7) | (bit_b << 6) | slice;
71 return res;
72}
73
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070074LIR* ArmMir2Lir::LoadFPConstantValue(int r_dest, int value) {
buzbee091cc402014-03-31 10:14:40 -070075 DCHECK(RegStorage::IsSingle(r_dest));
Brian Carlstrom7940e442013-07-12 13:46:57 -070076 if (value == 0) {
77 // TODO: we need better info about the target CPU. a vector exclusive or
78 // would probably be better here if we could rely on its existance.
79 // Load an immediate +2.0 (which encodes to 0)
80 NewLIR2(kThumb2Vmovs_IMM8, r_dest, 0);
81 // +0.0 = +2.0 - +2.0
82 return NewLIR3(kThumb2Vsubs, r_dest, r_dest, r_dest);
83 } else {
84 int encoded_imm = EncodeImmSingle(value);
85 if (encoded_imm >= 0) {
86 return NewLIR2(kThumb2Vmovs_IMM8, r_dest, encoded_imm);
87 }
88 }
89 LIR* data_target = ScanLiteralPool(literal_list_, value, 0);
90 if (data_target == NULL) {
91 data_target = AddWordData(&literal_list_, value);
92 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +010093 ScopedMemRefType mem_ref_type(this, ResourceMask::kLiteral);
Brian Carlstrom7940e442013-07-12 13:46:57 -070094 LIR* load_pc_rel = RawLIR(current_dalvik_offset_, kThumb2Vldrs,
buzbee091cc402014-03-31 10:14:40 -070095 r_dest, rs_r15pc.GetReg(), 0, 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -070096 AppendLIR(load_pc_rel);
97 return load_pc_rel;
98}
99
Brian Carlstrom7940e442013-07-12 13:46:57 -0700100/*
101 * Determine whether value can be encoded as a Thumb2 modified
102 * immediate. If not, return -1. If so, return i:imm3:a:bcdefgh form.
103 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700104int ArmMir2Lir::ModifiedImmediate(uint32_t value) {
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700105 uint32_t b0 = value & 0xff;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700106
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700107 /* Note: case of value==0 must use 0:000:0:0000000 encoding */
108 if (value <= 0xFF)
109 return b0; // 0:000:a:bcdefgh
110 if (value == ((b0 << 16) | b0))
111 return (0x1 << 8) | b0; /* 0:001:a:bcdefgh */
112 if (value == ((b0 << 24) | (b0 << 16) | (b0 << 8) | b0))
113 return (0x3 << 8) | b0; /* 0:011:a:bcdefgh */
114 b0 = (value >> 8) & 0xff;
115 if (value == ((b0 << 24) | (b0 << 8)))
116 return (0x2 << 8) | b0; /* 0:010:a:bcdefgh */
117 /* Can we do it with rotation? */
Vladimir Markoa29f6982014-11-25 16:32:34 +0000118 int z_leading = CLZ(value);
119 int z_trailing = CTZ(value);
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700120 /* A run of eight or fewer active bits? */
121 if ((z_leading + z_trailing) < 24)
122 return -1; /* No - bail */
123 /* left-justify the constant, discarding msb (known to be 1) */
124 value <<= z_leading + 1;
125 /* Create bcdefgh */
126 value >>= 25;
127 /* Put it all together */
128 return value | ((0x8 + z_leading) << 7); /* [01000..11111]:bcdefgh */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700129}
130
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700131bool ArmMir2Lir::InexpensiveConstantInt(int32_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700132 return (ModifiedImmediate(value) >= 0) || (ModifiedImmediate(~value) >= 0);
133}
134
Vladimir Markoa29f6982014-11-25 16:32:34 +0000135bool ArmMir2Lir::InexpensiveConstantInt(int32_t value, Instruction::Code opcode) {
136 switch (opcode) {
137 case Instruction::ADD_INT:
138 case Instruction::ADD_INT_2ADDR:
139 case Instruction::SUB_INT:
140 case Instruction::SUB_INT_2ADDR:
141 if ((value >> 12) == (value >> 31)) { // Signed 12-bit, RRI12 versions of ADD/SUB.
142 return true;
143 }
144 FALLTHROUGH_INTENDED;
145 case Instruction::IF_EQ:
146 case Instruction::IF_NE:
147 case Instruction::IF_LT:
148 case Instruction::IF_GE:
149 case Instruction::IF_GT:
150 case Instruction::IF_LE:
151 return (ModifiedImmediate(value) >= 0) || (ModifiedImmediate(-value) >= 0);
152 case Instruction::SHL_INT:
153 case Instruction::SHL_INT_2ADDR:
154 case Instruction::SHR_INT:
155 case Instruction::SHR_INT_2ADDR:
156 case Instruction::USHR_INT:
157 case Instruction::USHR_INT_2ADDR:
158 return true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000159 case Instruction::CONST:
160 case Instruction::CONST_4:
161 case Instruction::CONST_16:
162 if ((value >> 16) == 0) {
163 return true; // movw, 16-bit unsigned.
164 }
165 FALLTHROUGH_INTENDED;
Vladimir Markoa29f6982014-11-25 16:32:34 +0000166 case Instruction::AND_INT:
167 case Instruction::AND_INT_2ADDR:
168 case Instruction::AND_INT_LIT16:
169 case Instruction::AND_INT_LIT8:
170 case Instruction::OR_INT:
171 case Instruction::OR_INT_2ADDR:
172 case Instruction::OR_INT_LIT16:
173 case Instruction::OR_INT_LIT8:
174 return (ModifiedImmediate(value) >= 0) || (ModifiedImmediate(~value) >= 0);
175 case Instruction::XOR_INT:
176 case Instruction::XOR_INT_2ADDR:
177 case Instruction::XOR_INT_LIT16:
178 case Instruction::XOR_INT_LIT8:
179 return (ModifiedImmediate(value) >= 0);
180 case Instruction::MUL_INT:
181 case Instruction::MUL_INT_2ADDR:
182 case Instruction::MUL_INT_LIT8:
183 case Instruction::MUL_INT_LIT16:
184 case Instruction::DIV_INT:
185 case Instruction::DIV_INT_2ADDR:
186 case Instruction::DIV_INT_LIT8:
187 case Instruction::DIV_INT_LIT16:
188 case Instruction::REM_INT:
189 case Instruction::REM_INT_2ADDR:
190 case Instruction::REM_INT_LIT8:
191 case Instruction::REM_INT_LIT16: {
192 EasyMultiplyOp ops[2];
193 return GetEasyMultiplyTwoOps(value, ops);
194 }
195 default:
196 return false;
197 }
198}
199
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700200bool ArmMir2Lir::InexpensiveConstantFloat(int32_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700201 return EncodeImmSingle(value) >= 0;
202}
203
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700204bool ArmMir2Lir::InexpensiveConstantLong(int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700205 return InexpensiveConstantInt(High32Bits(value)) && InexpensiveConstantInt(Low32Bits(value));
206}
207
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700208bool ArmMir2Lir::InexpensiveConstantDouble(int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700209 return EncodeImmDouble(value) >= 0;
210}
211
212/*
213 * Load a immediate using a shortcut if possible; otherwise
214 * grab from the per-translation literal pool.
215 *
216 * No additional register clobbering operation performed. Use this version when
217 * 1) r_dest is freshly returned from AllocTemp or
218 * 2) The codegen is under fixed register usage
219 */
buzbee2700f7e2014-03-07 09:46:20 -0800220LIR* ArmMir2Lir::LoadConstantNoClobber(RegStorage r_dest, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700221 LIR* res;
222 int mod_imm;
223
buzbee091cc402014-03-31 10:14:40 -0700224 if (r_dest.IsFloat()) {
buzbee2700f7e2014-03-07 09:46:20 -0800225 return LoadFPConstantValue(r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700226 }
227
228 /* See if the value can be constructed cheaply */
buzbee091cc402014-03-31 10:14:40 -0700229 if (r_dest.Low8() && (value >= 0) && (value <= 255)) {
buzbee2700f7e2014-03-07 09:46:20 -0800230 return NewLIR2(kThumbMovImm, r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700231 }
232 /* Check Modified immediate special cases */
233 mod_imm = ModifiedImmediate(value);
234 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800235 res = NewLIR2(kThumb2MovI8M, r_dest.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700236 return res;
237 }
238 mod_imm = ModifiedImmediate(~value);
239 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800240 res = NewLIR2(kThumb2MvnI8M, r_dest.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700241 return res;
242 }
243 /* 16-bit immediate? */
244 if ((value & 0xffff) == value) {
buzbee2700f7e2014-03-07 09:46:20 -0800245 res = NewLIR2(kThumb2MovImm16, r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700246 return res;
247 }
248 /* Do a low/high pair */
buzbee2700f7e2014-03-07 09:46:20 -0800249 res = NewLIR2(kThumb2MovImm16, r_dest.GetReg(), Low16Bits(value));
250 NewLIR2(kThumb2MovImm16H, r_dest.GetReg(), High16Bits(value));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700251 return res;
252}
253
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700254LIR* ArmMir2Lir::OpUnconditionalBranch(LIR* target) {
buzbee091cc402014-03-31 10:14:40 -0700255 LIR* res = NewLIR1(kThumbBUncond, 0 /* offset to be patched during assembly */);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700256 res->target = target;
257 return res;
258}
259
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700260LIR* ArmMir2Lir::OpCondBranch(ConditionCode cc, LIR* target) {
Vladimir Marko174636d2014-11-26 12:33:45 +0000261 LIR* branch = NewLIR2(kThumbBCond, 0 /* offset to be patched */,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700262 ArmConditionEncoding(cc));
263 branch->target = target;
264 return branch;
265}
266
buzbee2700f7e2014-03-07 09:46:20 -0800267LIR* ArmMir2Lir::OpReg(OpKind op, RegStorage r_dest_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700268 ArmOpcode opcode = kThumbBkpt;
269 switch (op) {
270 case kOpBlx:
271 opcode = kThumbBlxR;
272 break;
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700273 case kOpBx:
274 opcode = kThumbBx;
275 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700276 default:
277 LOG(FATAL) << "Bad opcode " << op;
278 }
buzbee2700f7e2014-03-07 09:46:20 -0800279 return NewLIR1(opcode, r_dest_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700280}
281
Ian Rogerse2143c02014-03-28 08:47:16 -0700282LIR* ArmMir2Lir::OpRegRegShift(OpKind op, RegStorage r_dest_src1, RegStorage r_src2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700283 int shift) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700284 bool thumb_form =
buzbee091cc402014-03-31 10:14:40 -0700285 ((shift == 0) && r_dest_src1.Low8() && r_src2.Low8());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700286 ArmOpcode opcode = kThumbBkpt;
287 switch (op) {
288 case kOpAdc:
289 opcode = (thumb_form) ? kThumbAdcRR : kThumb2AdcRRR;
290 break;
291 case kOpAnd:
292 opcode = (thumb_form) ? kThumbAndRR : kThumb2AndRRR;
293 break;
294 case kOpBic:
295 opcode = (thumb_form) ? kThumbBicRR : kThumb2BicRRR;
296 break;
297 case kOpCmn:
298 DCHECK_EQ(shift, 0);
299 opcode = (thumb_form) ? kThumbCmnRR : kThumb2CmnRR;
300 break;
301 case kOpCmp:
302 if (thumb_form)
303 opcode = kThumbCmpRR;
buzbee091cc402014-03-31 10:14:40 -0700304 else if ((shift == 0) && !r_dest_src1.Low8() && !r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700305 opcode = kThumbCmpHH;
buzbee091cc402014-03-31 10:14:40 -0700306 else if ((shift == 0) && r_dest_src1.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700307 opcode = kThumbCmpLH;
308 else if (shift == 0)
309 opcode = kThumbCmpHL;
310 else
311 opcode = kThumb2CmpRR;
312 break;
313 case kOpXor:
314 opcode = (thumb_form) ? kThumbEorRR : kThumb2EorRRR;
315 break;
316 case kOpMov:
317 DCHECK_EQ(shift, 0);
buzbee091cc402014-03-31 10:14:40 -0700318 if (r_dest_src1.Low8() && r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700319 opcode = kThumbMovRR;
buzbee091cc402014-03-31 10:14:40 -0700320 else if (!r_dest_src1.Low8() && !r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700321 opcode = kThumbMovRR_H2H;
buzbee091cc402014-03-31 10:14:40 -0700322 else if (r_dest_src1.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700323 opcode = kThumbMovRR_H2L;
324 else
325 opcode = kThumbMovRR_L2H;
326 break;
327 case kOpMul:
328 DCHECK_EQ(shift, 0);
329 opcode = (thumb_form) ? kThumbMul : kThumb2MulRRR;
330 break;
331 case kOpMvn:
332 opcode = (thumb_form) ? kThumbMvn : kThumb2MnvRR;
333 break;
334 case kOpNeg:
335 DCHECK_EQ(shift, 0);
336 opcode = (thumb_form) ? kThumbNeg : kThumb2NegRR;
337 break;
338 case kOpOr:
339 opcode = (thumb_form) ? kThumbOrr : kThumb2OrrRRR;
340 break;
341 case kOpSbc:
342 opcode = (thumb_form) ? kThumbSbc : kThumb2SbcRRR;
343 break;
344 case kOpTst:
345 opcode = (thumb_form) ? kThumbTst : kThumb2TstRR;
346 break;
347 case kOpLsl:
348 DCHECK_EQ(shift, 0);
349 opcode = (thumb_form) ? kThumbLslRR : kThumb2LslRRR;
350 break;
351 case kOpLsr:
352 DCHECK_EQ(shift, 0);
353 opcode = (thumb_form) ? kThumbLsrRR : kThumb2LsrRRR;
354 break;
355 case kOpAsr:
356 DCHECK_EQ(shift, 0);
357 opcode = (thumb_form) ? kThumbAsrRR : kThumb2AsrRRR;
358 break;
359 case kOpRor:
360 DCHECK_EQ(shift, 0);
361 opcode = (thumb_form) ? kThumbRorRR : kThumb2RorRRR;
362 break;
363 case kOpAdd:
364 opcode = (thumb_form) ? kThumbAddRRR : kThumb2AddRRR;
365 break;
366 case kOpSub:
367 opcode = (thumb_form) ? kThumbSubRRR : kThumb2SubRRR;
368 break;
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100369 case kOpRev:
370 DCHECK_EQ(shift, 0);
371 if (!thumb_form) {
372 // Binary, but rm is encoded twice.
Ian Rogerse2143c02014-03-28 08:47:16 -0700373 return NewLIR3(kThumb2RevRR, r_dest_src1.GetReg(), r_src2.GetReg(), r_src2.GetReg());
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100374 }
375 opcode = kThumbRev;
376 break;
377 case kOpRevsh:
378 DCHECK_EQ(shift, 0);
379 if (!thumb_form) {
380 // Binary, but rm is encoded twice.
Ian Rogerse2143c02014-03-28 08:47:16 -0700381 return NewLIR3(kThumb2RevshRR, r_dest_src1.GetReg(), r_src2.GetReg(), r_src2.GetReg());
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100382 }
383 opcode = kThumbRevsh;
384 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700385 case kOp2Byte:
386 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700387 return NewLIR4(kThumb2Sbfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 8);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700388 case kOp2Short:
389 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700390 return NewLIR4(kThumb2Sbfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700391 case kOp2Char:
392 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700393 return NewLIR4(kThumb2Ubfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700394 default:
395 LOG(FATAL) << "Bad opcode: " << op;
396 break;
397 }
buzbee409fe942013-10-11 10:49:56 -0700398 DCHECK(!IsPseudoLirOp(opcode));
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700399 if (EncodingMap[opcode].flags & IS_BINARY_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700400 return NewLIR2(opcode, r_dest_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700401 } else if (EncodingMap[opcode].flags & IS_TERTIARY_OP) {
402 if (EncodingMap[opcode].field_loc[2].kind == kFmtShift) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700403 return NewLIR3(opcode, r_dest_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700404 } else {
Ian Rogerse2143c02014-03-28 08:47:16 -0700405 return NewLIR3(opcode, r_dest_src1.GetReg(), r_dest_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700406 }
407 } else if (EncodingMap[opcode].flags & IS_QUAD_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700408 return NewLIR4(opcode, r_dest_src1.GetReg(), r_dest_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700409 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700410 LOG(FATAL) << "Unexpected encoding operand count";
411 return NULL;
412 }
413}
414
buzbee2700f7e2014-03-07 09:46:20 -0800415LIR* ArmMir2Lir::OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700416 return OpRegRegShift(op, r_dest_src1, r_src2, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700417}
418
buzbee2700f7e2014-03-07 09:46:20 -0800419LIR* ArmMir2Lir::OpMovRegMem(RegStorage r_dest, RegStorage r_base, int offset, MoveType move_type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700420 UNUSED(r_dest, r_base, offset, move_type);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800421 UNIMPLEMENTED(FATAL);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700422 UNREACHABLE();
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800423}
424
buzbee2700f7e2014-03-07 09:46:20 -0800425LIR* ArmMir2Lir::OpMovMemReg(RegStorage r_base, int offset, RegStorage r_src, MoveType move_type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700426 UNUSED(r_base, offset, r_src, move_type);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800427 UNIMPLEMENTED(FATAL);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700428 UNREACHABLE();
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800429}
430
buzbee2700f7e2014-03-07 09:46:20 -0800431LIR* ArmMir2Lir::OpCondRegReg(OpKind op, ConditionCode cc, RegStorage r_dest, RegStorage r_src) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700432 UNUSED(op, cc, r_dest, r_src);
Razvan A Lupusorubd288c22013-12-20 17:27:23 -0800433 LOG(FATAL) << "Unexpected use of OpCondRegReg for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700434 UNREACHABLE();
Razvan A Lupusorubd288c22013-12-20 17:27:23 -0800435}
436
Ian Rogerse2143c02014-03-28 08:47:16 -0700437LIR* ArmMir2Lir::OpRegRegRegShift(OpKind op, RegStorage r_dest, RegStorage r_src1,
438 RegStorage r_src2, int shift) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700439 ArmOpcode opcode = kThumbBkpt;
buzbee091cc402014-03-31 10:14:40 -0700440 bool thumb_form = (shift == 0) && r_dest.Low8() && r_src1.Low8() && r_src2.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700441 switch (op) {
442 case kOpAdd:
443 opcode = (thumb_form) ? kThumbAddRRR : kThumb2AddRRR;
444 break;
445 case kOpSub:
446 opcode = (thumb_form) ? kThumbSubRRR : kThumb2SubRRR;
447 break;
448 case kOpRsub:
449 opcode = kThumb2RsubRRR;
450 break;
451 case kOpAdc:
452 opcode = kThumb2AdcRRR;
453 break;
454 case kOpAnd:
455 opcode = kThumb2AndRRR;
456 break;
457 case kOpBic:
458 opcode = kThumb2BicRRR;
459 break;
460 case kOpXor:
461 opcode = kThumb2EorRRR;
462 break;
463 case kOpMul:
464 DCHECK_EQ(shift, 0);
465 opcode = kThumb2MulRRR;
466 break;
Dave Allison70202782013-10-22 17:52:19 -0700467 case kOpDiv:
468 DCHECK_EQ(shift, 0);
469 opcode = kThumb2SdivRRR;
470 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700471 case kOpOr:
472 opcode = kThumb2OrrRRR;
473 break;
474 case kOpSbc:
475 opcode = kThumb2SbcRRR;
476 break;
477 case kOpLsl:
478 DCHECK_EQ(shift, 0);
479 opcode = kThumb2LslRRR;
480 break;
481 case kOpLsr:
482 DCHECK_EQ(shift, 0);
483 opcode = kThumb2LsrRRR;
484 break;
485 case kOpAsr:
486 DCHECK_EQ(shift, 0);
487 opcode = kThumb2AsrRRR;
488 break;
489 case kOpRor:
490 DCHECK_EQ(shift, 0);
491 opcode = kThumb2RorRRR;
492 break;
493 default:
494 LOG(FATAL) << "Bad opcode: " << op;
495 break;
496 }
buzbee409fe942013-10-11 10:49:56 -0700497 DCHECK(!IsPseudoLirOp(opcode));
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700498 if (EncodingMap[opcode].flags & IS_QUAD_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700499 return NewLIR4(opcode, r_dest.GetReg(), r_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700500 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700501 DCHECK(EncodingMap[opcode].flags & IS_TERTIARY_OP);
Ian Rogerse2143c02014-03-28 08:47:16 -0700502 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700503 }
504}
505
buzbee2700f7e2014-03-07 09:46:20 -0800506LIR* ArmMir2Lir::OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700507 return OpRegRegRegShift(op, r_dest, r_src1, r_src2, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700508}
509
buzbee2700f7e2014-03-07 09:46:20 -0800510LIR* ArmMir2Lir::OpRegRegImm(OpKind op, RegStorage r_dest, RegStorage r_src1, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700511 bool neg = (value < 0);
buzbee0d829482013-10-11 15:24:55 -0700512 int32_t abs_value = (neg) ? -value : value;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700513 ArmOpcode opcode = kThumbBkpt;
514 ArmOpcode alt_opcode = kThumbBkpt;
buzbee091cc402014-03-31 10:14:40 -0700515 bool all_low_regs = r_dest.Low8() && r_src1.Low8();
buzbee0d829482013-10-11 15:24:55 -0700516 int32_t mod_imm = ModifiedImmediate(value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700517
518 switch (op) {
519 case kOpLsl:
520 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800521 return NewLIR3(kThumbLslRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700522 else
buzbee2700f7e2014-03-07 09:46:20 -0800523 return NewLIR3(kThumb2LslRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700524 case kOpLsr:
525 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800526 return NewLIR3(kThumbLsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700527 else
buzbee2700f7e2014-03-07 09:46:20 -0800528 return NewLIR3(kThumb2LsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700529 case kOpAsr:
530 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800531 return NewLIR3(kThumbAsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700532 else
buzbee2700f7e2014-03-07 09:46:20 -0800533 return NewLIR3(kThumb2AsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700534 case kOpRor:
buzbee2700f7e2014-03-07 09:46:20 -0800535 return NewLIR3(kThumb2RorRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700536 case kOpAdd:
buzbee091cc402014-03-31 10:14:40 -0700537 if (r_dest.Low8() && (r_src1 == rs_r13sp) && (value <= 1020) && ((value & 0x3) == 0)) {
buzbee2700f7e2014-03-07 09:46:20 -0800538 return NewLIR3(kThumbAddSpRel, r_dest.GetReg(), r_src1.GetReg(), value >> 2);
buzbee091cc402014-03-31 10:14:40 -0700539 } else if (r_dest.Low8() && (r_src1 == rs_r15pc) &&
Brian Carlstrom38f85e42013-07-18 14:45:22 -0700540 (value <= 1020) && ((value & 0x3) == 0)) {
buzbee2700f7e2014-03-07 09:46:20 -0800541 return NewLIR3(kThumbAddPcRel, r_dest.GetReg(), r_src1.GetReg(), value >> 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700542 }
Ian Rogersfc787ec2014-10-09 21:56:44 -0700543 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700544 case kOpSub:
545 if (all_low_regs && ((abs_value & 0x7) == abs_value)) {
546 if (op == kOpAdd)
547 opcode = (neg) ? kThumbSubRRI3 : kThumbAddRRI3;
548 else
549 opcode = (neg) ? kThumbAddRRI3 : kThumbSubRRI3;
buzbee2700f7e2014-03-07 09:46:20 -0800550 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), abs_value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700551 }
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000552 if (mod_imm < 0) {
553 mod_imm = ModifiedImmediate(-value);
554 if (mod_imm >= 0) {
555 op = (op == kOpAdd) ? kOpSub : kOpAdd;
556 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700557 }
Vladimir Markoa29f6982014-11-25 16:32:34 +0000558 if (mod_imm < 0 && (abs_value >> 12) == 0) {
Vladimir Markodbb8c492014-02-28 17:36:39 +0000559 // This is deliberately used only if modified immediate encoding is inadequate since
560 // we sometimes actually use the flags for small values but not necessarily low regs.
561 if (op == kOpAdd)
562 opcode = (neg) ? kThumb2SubRRI12 : kThumb2AddRRI12;
563 else
564 opcode = (neg) ? kThumb2AddRRI12 : kThumb2SubRRI12;
buzbee2700f7e2014-03-07 09:46:20 -0800565 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), abs_value);
Vladimir Markodbb8c492014-02-28 17:36:39 +0000566 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700567 if (op == kOpSub) {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000568 opcode = kThumb2SubRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700569 alt_opcode = kThumb2SubRRR;
570 } else {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000571 opcode = kThumb2AddRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700572 alt_opcode = kThumb2AddRRR;
573 }
574 break;
575 case kOpRsub:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000576 opcode = kThumb2RsubRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700577 alt_opcode = kThumb2RsubRRR;
578 break;
579 case kOpAdc:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000580 opcode = kThumb2AdcRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700581 alt_opcode = kThumb2AdcRRR;
582 break;
583 case kOpSbc:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000584 opcode = kThumb2SbcRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700585 alt_opcode = kThumb2SbcRRR;
586 break;
587 case kOpOr:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000588 opcode = kThumb2OrrRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700589 alt_opcode = kThumb2OrrRRR;
Vladimir Markoa29f6982014-11-25 16:32:34 +0000590 if (mod_imm < 0) {
591 mod_imm = ModifiedImmediate(~value);
592 if (mod_imm >= 0) {
593 opcode = kThumb2OrnRRI8M;
594 }
595 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700596 break;
597 case kOpAnd:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000598 if (mod_imm < 0) {
599 mod_imm = ModifiedImmediate(~value);
600 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800601 return NewLIR3(kThumb2BicRRI8M, r_dest.GetReg(), r_src1.GetReg(), mod_imm);
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000602 }
603 }
604 opcode = kThumb2AndRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700605 alt_opcode = kThumb2AndRRR;
606 break;
607 case kOpXor:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000608 opcode = kThumb2EorRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700609 alt_opcode = kThumb2EorRRR;
610 break;
611 case kOpMul:
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700612 // TUNING: power of 2, shift & add
Brian Carlstrom7940e442013-07-12 13:46:57 -0700613 mod_imm = -1;
614 alt_opcode = kThumb2MulRRR;
615 break;
616 case kOpCmp: {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700617 LIR* res;
618 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800619 res = NewLIR2(kThumb2CmpRI8M, r_src1.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700620 } else {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000621 mod_imm = ModifiedImmediate(-value);
622 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800623 res = NewLIR2(kThumb2CmnRI8M, r_src1.GetReg(), mod_imm);
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000624 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800625 RegStorage r_tmp = AllocTemp();
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000626 res = LoadConstant(r_tmp, value);
627 OpRegReg(kOpCmp, r_src1, r_tmp);
628 FreeTemp(r_tmp);
629 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700630 }
631 return res;
632 }
633 default:
634 LOG(FATAL) << "Bad opcode: " << op;
635 }
636
637 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800638 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700639 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800640 RegStorage r_scratch = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700641 LoadConstant(r_scratch, value);
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800642 LIR* res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700643 if (EncodingMap[alt_opcode].flags & IS_QUAD_OP)
buzbee2700f7e2014-03-07 09:46:20 -0800644 res = NewLIR4(alt_opcode, r_dest.GetReg(), r_src1.GetReg(), r_scratch.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700645 else
buzbee2700f7e2014-03-07 09:46:20 -0800646 res = NewLIR3(alt_opcode, r_dest.GetReg(), r_src1.GetReg(), r_scratch.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700647 FreeTemp(r_scratch);
648 return res;
649 }
650}
651
652/* Handle Thumb-only variants here - otherwise punt to OpRegRegImm */
buzbee2700f7e2014-03-07 09:46:20 -0800653LIR* ArmMir2Lir::OpRegImm(OpKind op, RegStorage r_dest_src1, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700654 bool neg = (value < 0);
buzbee0d829482013-10-11 15:24:55 -0700655 int32_t abs_value = (neg) ? -value : value;
buzbee091cc402014-03-31 10:14:40 -0700656 bool short_form = (((abs_value & 0xff) == abs_value) && r_dest_src1.Low8());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700657 ArmOpcode opcode = kThumbBkpt;
658 switch (op) {
659 case kOpAdd:
buzbee2700f7e2014-03-07 09:46:20 -0800660 if (!neg && (r_dest_src1 == rs_r13sp) && (value <= 508)) { /* sp */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700661 DCHECK_EQ((value & 0x3), 0);
662 return NewLIR1(kThumbAddSpI7, value >> 2);
663 } else if (short_form) {
664 opcode = (neg) ? kThumbSubRI8 : kThumbAddRI8;
665 }
666 break;
667 case kOpSub:
buzbee2700f7e2014-03-07 09:46:20 -0800668 if (!neg && (r_dest_src1 == rs_r13sp) && (value <= 508)) { /* sp */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700669 DCHECK_EQ((value & 0x3), 0);
670 return NewLIR1(kThumbSubSpI7, value >> 2);
671 } else if (short_form) {
672 opcode = (neg) ? kThumbAddRI8 : kThumbSubRI8;
673 }
674 break;
675 case kOpCmp:
Vladimir Marko22479842013-11-19 17:04:50 +0000676 if (!neg && short_form) {
677 opcode = kThumbCmpRI8;
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700678 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700679 short_form = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700680 }
681 break;
682 default:
683 /* Punt to OpRegRegImm - if bad case catch it there */
684 short_form = false;
685 break;
686 }
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700687 if (short_form) {
buzbee2700f7e2014-03-07 09:46:20 -0800688 return NewLIR2(opcode, r_dest_src1.GetReg(), abs_value);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700689 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700690 return OpRegRegImm(op, r_dest_src1, r_dest_src1, value);
691 }
692}
693
buzbee2700f7e2014-03-07 09:46:20 -0800694LIR* ArmMir2Lir::LoadConstantWide(RegStorage r_dest, int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700695 LIR* res = NULL;
696 int32_t val_lo = Low32Bits(value);
697 int32_t val_hi = High32Bits(value);
buzbee091cc402014-03-31 10:14:40 -0700698 if (r_dest.IsFloat()) {
699 DCHECK(!r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700700 if ((val_lo == 0) && (val_hi == 0)) {
701 // TODO: we need better info about the target CPU. a vector exclusive or
702 // would probably be better here if we could rely on its existance.
703 // Load an immediate +2.0 (which encodes to 0)
buzbee091cc402014-03-31 10:14:40 -0700704 NewLIR2(kThumb2Vmovd_IMM8, r_dest.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700705 // +0.0 = +2.0 - +2.0
buzbee091cc402014-03-31 10:14:40 -0700706 res = NewLIR3(kThumb2Vsubd, r_dest.GetReg(), r_dest.GetReg(), r_dest.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700707 } else {
708 int encoded_imm = EncodeImmDouble(value);
709 if (encoded_imm >= 0) {
buzbee091cc402014-03-31 10:14:40 -0700710 res = NewLIR2(kThumb2Vmovd_IMM8, r_dest.GetReg(), encoded_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700711 }
712 }
713 } else {
buzbee091cc402014-03-31 10:14:40 -0700714 // NOTE: Arm32 assumption here.
715 DCHECK(r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700716 if ((InexpensiveConstantInt(val_lo) && (InexpensiveConstantInt(val_hi)))) {
buzbee2700f7e2014-03-07 09:46:20 -0800717 res = LoadConstantNoClobber(r_dest.GetLow(), val_lo);
718 LoadConstantNoClobber(r_dest.GetHigh(), val_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719 }
720 }
721 if (res == NULL) {
722 // No short form - load from the literal pool.
723 LIR* data_target = ScanLiteralPoolWide(literal_list_, val_lo, val_hi);
724 if (data_target == NULL) {
725 data_target = AddWideData(&literal_list_, val_lo, val_hi);
726 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100727 ScopedMemRefType mem_ref_type(this, ResourceMask::kLiteral);
buzbee091cc402014-03-31 10:14:40 -0700728 if (r_dest.IsFloat()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700729 res = RawLIR(current_dalvik_offset_, kThumb2Vldrd,
buzbee091cc402014-03-31 10:14:40 -0700730 r_dest.GetReg(), rs_r15pc.GetReg(), 0, 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700731 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800732 DCHECK(r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700733 res = RawLIR(current_dalvik_offset_, kThumb2LdrdPcRel8,
buzbee091cc402014-03-31 10:14:40 -0700734 r_dest.GetLowReg(), r_dest.GetHighReg(), rs_r15pc.GetReg(), 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700735 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700736 AppendLIR(res);
737 }
738 return res;
739}
740
741int ArmMir2Lir::EncodeShift(int code, int amount) {
742 return ((amount & 0x1f) << 2) | code;
743}
744
buzbee2700f7e2014-03-07 09:46:20 -0800745LIR* ArmMir2Lir::LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700746 int scale, OpSize size) {
buzbee091cc402014-03-31 10:14:40 -0700747 bool all_low_regs = r_base.Low8() && r_index.Low8() && r_dest.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700748 LIR* load;
749 ArmOpcode opcode = kThumbBkpt;
750 bool thumb_form = (all_low_regs && (scale == 0));
buzbee2700f7e2014-03-07 09:46:20 -0800751 RegStorage reg_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700752
buzbee091cc402014-03-31 10:14:40 -0700753 if (r_dest.IsFloat()) {
754 if (r_dest.IsSingle()) {
buzbeefd698e62014-04-27 19:33:22 -0700755 DCHECK((size == k32) || (size == kSingle) || (size == kReference));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700756 opcode = kThumb2Vldrs;
757 size = kSingle;
758 } else {
buzbee091cc402014-03-31 10:14:40 -0700759 DCHECK(r_dest.IsDouble());
buzbee695d13a2014-04-19 13:32:20 -0700760 DCHECK((size == k64) || (size == kDouble));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700761 opcode = kThumb2Vldrd;
762 size = kDouble;
763 }
764 } else {
765 if (size == kSingle)
buzbee695d13a2014-04-19 13:32:20 -0700766 size = k32;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700767 }
768
769 switch (size) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700770 case kDouble: // fall-through
buzbee695d13a2014-04-19 13:32:20 -0700771 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700772 case kSingle:
773 reg_ptr = AllocTemp();
774 if (scale) {
buzbee2700f7e2014-03-07 09:46:20 -0800775 NewLIR4(kThumb2AddRRR, reg_ptr.GetReg(), r_base.GetReg(), r_index.GetReg(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700776 EncodeShift(kArmLsl, scale));
777 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800778 OpRegRegReg(kOpAdd, reg_ptr, r_base, r_index);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700779 }
buzbee2700f7e2014-03-07 09:46:20 -0800780 load = NewLIR3(opcode, r_dest.GetReg(), reg_ptr.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700781 FreeTemp(reg_ptr);
782 return load;
buzbee695d13a2014-04-19 13:32:20 -0700783 case k32:
784 // Intentional fall-though.
785 case kReference:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700786 opcode = (thumb_form) ? kThumbLdrRRR : kThumb2LdrRRR;
787 break;
788 case kUnsignedHalf:
789 opcode = (thumb_form) ? kThumbLdrhRRR : kThumb2LdrhRRR;
790 break;
791 case kSignedHalf:
792 opcode = (thumb_form) ? kThumbLdrshRRR : kThumb2LdrshRRR;
793 break;
794 case kUnsignedByte:
795 opcode = (thumb_form) ? kThumbLdrbRRR : kThumb2LdrbRRR;
796 break;
797 case kSignedByte:
798 opcode = (thumb_form) ? kThumbLdrsbRRR : kThumb2LdrsbRRR;
799 break;
800 default:
801 LOG(FATAL) << "Bad size: " << size;
802 }
803 if (thumb_form)
buzbee2700f7e2014-03-07 09:46:20 -0800804 load = NewLIR3(opcode, r_dest.GetReg(), r_base.GetReg(), r_index.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700805 else
buzbee2700f7e2014-03-07 09:46:20 -0800806 load = NewLIR4(opcode, r_dest.GetReg(), r_base.GetReg(), r_index.GetReg(), scale);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700807
808 return load;
809}
810
buzbee2700f7e2014-03-07 09:46:20 -0800811LIR* ArmMir2Lir::StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700812 int scale, OpSize size) {
buzbee091cc402014-03-31 10:14:40 -0700813 bool all_low_regs = r_base.Low8() && r_index.Low8() && r_src.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700814 LIR* store = NULL;
815 ArmOpcode opcode = kThumbBkpt;
816 bool thumb_form = (all_low_regs && (scale == 0));
buzbee2700f7e2014-03-07 09:46:20 -0800817 RegStorage reg_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700818
buzbee091cc402014-03-31 10:14:40 -0700819 if (r_src.IsFloat()) {
820 if (r_src.IsSingle()) {
buzbeefd698e62014-04-27 19:33:22 -0700821 DCHECK((size == k32) || (size == kSingle) || (size == kReference));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700822 opcode = kThumb2Vstrs;
823 size = kSingle;
824 } else {
buzbee091cc402014-03-31 10:14:40 -0700825 DCHECK(r_src.IsDouble());
buzbee695d13a2014-04-19 13:32:20 -0700826 DCHECK((size == k64) || (size == kDouble));
buzbee2700f7e2014-03-07 09:46:20 -0800827 DCHECK_EQ((r_src.GetReg() & 0x1), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700828 opcode = kThumb2Vstrd;
829 size = kDouble;
830 }
831 } else {
832 if (size == kSingle)
buzbee695d13a2014-04-19 13:32:20 -0700833 size = k32;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700834 }
835
836 switch (size) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700837 case kDouble: // fall-through
buzbee695d13a2014-04-19 13:32:20 -0700838 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700839 case kSingle:
840 reg_ptr = AllocTemp();
841 if (scale) {
buzbee2700f7e2014-03-07 09:46:20 -0800842 NewLIR4(kThumb2AddRRR, reg_ptr.GetReg(), r_base.GetReg(), r_index.GetReg(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700843 EncodeShift(kArmLsl, scale));
844 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800845 OpRegRegReg(kOpAdd, reg_ptr, r_base, r_index);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700846 }
buzbee2700f7e2014-03-07 09:46:20 -0800847 store = NewLIR3(opcode, r_src.GetReg(), reg_ptr.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700848 FreeTemp(reg_ptr);
849 return store;
buzbee695d13a2014-04-19 13:32:20 -0700850 case k32:
851 // Intentional fall-though.
852 case kReference:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700853 opcode = (thumb_form) ? kThumbStrRRR : kThumb2StrRRR;
854 break;
855 case kUnsignedHalf:
buzbee695d13a2014-04-19 13:32:20 -0700856 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700857 case kSignedHalf:
858 opcode = (thumb_form) ? kThumbStrhRRR : kThumb2StrhRRR;
859 break;
860 case kUnsignedByte:
buzbee695d13a2014-04-19 13:32:20 -0700861 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700862 case kSignedByte:
863 opcode = (thumb_form) ? kThumbStrbRRR : kThumb2StrbRRR;
864 break;
865 default:
866 LOG(FATAL) << "Bad size: " << size;
867 }
868 if (thumb_form)
buzbee2700f7e2014-03-07 09:46:20 -0800869 store = NewLIR3(opcode, r_src.GetReg(), r_base.GetReg(), r_index.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700870 else
buzbee2700f7e2014-03-07 09:46:20 -0800871 store = NewLIR4(opcode, r_src.GetReg(), r_base.GetReg(), r_index.GetReg(), scale);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700872
873 return store;
874}
875
Vladimir Markodb9d5232014-06-10 18:15:57 +0100876// Helper function for LoadBaseDispBody()/StoreBaseDispBody().
Vladimir Marko37573972014-06-16 10:32:25 +0100877LIR* ArmMir2Lir::LoadStoreUsingInsnWithOffsetImm8Shl2(ArmOpcode opcode, RegStorage r_base,
878 int displacement, RegStorage r_src_dest,
879 RegStorage r_work) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100880 DCHECK_EQ(displacement & 3, 0);
Vladimir Marko37573972014-06-16 10:32:25 +0100881 constexpr int kOffsetMask = 0xff << 2;
882 int encoded_disp = (displacement & kOffsetMask) >> 2; // Within range of the instruction.
Vladimir Markodb9d5232014-06-10 18:15:57 +0100883 RegStorage r_ptr = r_base;
Vladimir Marko37573972014-06-16 10:32:25 +0100884 if ((displacement & ~kOffsetMask) != 0) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100885 r_ptr = r_work.Valid() ? r_work : AllocTemp();
Vladimir Marko37573972014-06-16 10:32:25 +0100886 // Add displacement & ~kOffsetMask to base, it's a single instruction for up to +-256KiB.
887 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement & ~kOffsetMask);
Vladimir Markodb9d5232014-06-10 18:15:57 +0100888 }
889 LIR* lir = nullptr;
890 if (!r_src_dest.IsPair()) {
891 lir = NewLIR3(opcode, r_src_dest.GetReg(), r_ptr.GetReg(), encoded_disp);
892 } else {
893 lir = NewLIR4(opcode, r_src_dest.GetLowReg(), r_src_dest.GetHighReg(), r_ptr.GetReg(),
894 encoded_disp);
895 }
Vladimir Marko37573972014-06-16 10:32:25 +0100896 if ((displacement & ~kOffsetMask) != 0 && !r_work.Valid()) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100897 FreeTemp(r_ptr);
898 }
899 return lir;
900}
901
Brian Carlstrom7940e442013-07-12 13:46:57 -0700902/*
903 * Load value from base + displacement. Optionally perform null check
904 * on base (which must have an associated s_reg and MIR). If not
905 * performing null check, incoming MIR can be null.
906 */
buzbee2700f7e2014-03-07 09:46:20 -0800907LIR* ArmMir2Lir::LoadBaseDispBody(RegStorage r_base, int displacement, RegStorage r_dest,
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100908 OpSize size) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000909 LIR* load = nullptr;
910 ArmOpcode opcode16 = kThumbBkpt; // 16-bit Thumb opcode.
911 ArmOpcode opcode32 = kThumbBkpt; // 32-bit Thumb2 opcode.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700912 bool short_form = false;
buzbee091cc402014-03-31 10:14:40 -0700913 bool all_low = r_dest.Is32Bit() && r_base.Low8() && r_dest.Low8();
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000914 int scale = 0; // Used for opcode16 and some indexed loads.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700915 bool already_generated = false;
916 switch (size) {
917 case kDouble:
buzbee695d13a2014-04-19 13:32:20 -0700918 // Intentional fall-though.
Vladimir Markodb9d5232014-06-10 18:15:57 +0100919 case k64:
buzbee091cc402014-03-31 10:14:40 -0700920 if (r_dest.IsFloat()) {
921 DCHECK(!r_dest.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +0100922 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vldrd, r_base, displacement, r_dest);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700923 } else {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100924 DCHECK(r_dest.IsPair());
925 // Use the r_dest.GetLow() for the temporary pointer if needed.
Vladimir Marko37573972014-06-16 10:32:25 +0100926 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2LdrdI8, r_base, displacement, r_dest,
927 r_dest.GetLow());
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100928 }
929 already_generated = true;
buzbee2700f7e2014-03-07 09:46:20 -0800930 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700931 case kSingle:
buzbee695d13a2014-04-19 13:32:20 -0700932 // Intentional fall-though.
933 case k32:
934 // Intentional fall-though.
935 case kReference:
buzbee091cc402014-03-31 10:14:40 -0700936 if (r_dest.IsFloat()) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100937 DCHECK(r_dest.IsSingle());
Vladimir Marko37573972014-06-16 10:32:25 +0100938 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vldrs, r_base, displacement, r_dest);
Vladimir Markodb9d5232014-06-10 18:15:57 +0100939 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700940 break;
941 }
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000942 DCHECK_EQ((displacement & 0x3), 0);
943 scale = 2;
buzbee091cc402014-03-31 10:14:40 -0700944 if (r_dest.Low8() && (r_base == rs_rARM_PC) && (displacement <= 1020) &&
945 (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700946 short_form = true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000947 opcode16 = kThumbLdrPcRel;
buzbee091cc402014-03-31 10:14:40 -0700948 } else if (r_dest.Low8() && (r_base == rs_rARM_SP) && (displacement <= 1020) &&
949 (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700950 short_form = true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000951 opcode16 = kThumbLdrSpRel;
952 } else {
953 short_form = all_low && (displacement >> (5 + scale)) == 0;
954 opcode16 = kThumbLdrRRI5;
955 opcode32 = kThumb2LdrRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700956 }
957 break;
958 case kUnsignedHalf:
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000959 DCHECK_EQ((displacement & 0x1), 0);
960 scale = 1;
961 short_form = all_low && (displacement >> (5 + scale)) == 0;
962 opcode16 = kThumbLdrhRRI5;
963 opcode32 = kThumb2LdrhRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700964 break;
965 case kSignedHalf:
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000966 DCHECK_EQ((displacement & 0x1), 0);
967 scale = 1;
968 DCHECK_EQ(opcode16, kThumbBkpt); // Not available.
969 opcode32 = kThumb2LdrshRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700970 break;
971 case kUnsignedByte:
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000972 DCHECK_EQ(scale, 0); // Keep scale = 0.
973 short_form = all_low && (displacement >> (5 + scale)) == 0;
974 opcode16 = kThumbLdrbRRI5;
975 opcode32 = kThumb2LdrbRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700976 break;
977 case kSignedByte:
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000978 DCHECK_EQ(scale, 0); // Keep scale = 0.
979 DCHECK_EQ(opcode16, kThumbBkpt); // Not available.
980 opcode32 = kThumb2LdrsbRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700981 break;
982 default:
983 LOG(FATAL) << "Bad size: " << size;
984 }
985
986 if (!already_generated) {
987 if (short_form) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000988 load = NewLIR3(opcode16, r_dest.GetReg(), r_base.GetReg(), displacement >> scale);
989 } else if ((displacement >> 12) == 0) { // Thumb2 form.
990 load = NewLIR3(opcode32, r_dest.GetReg(), r_base.GetReg(), displacement);
991 } else if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) &&
992 InexpensiveConstantInt(displacement & ~0x00000fff, Instruction::ADD_INT)) {
993 // In this case, using LoadIndexed would emit 3 insns (movw+movt+ldr) but we can
994 // actually do it in two because we know that the kOpAdd is a single insn. On the
995 // other hand, we introduce an extra dependency, so this is not necessarily faster.
996 if (opcode16 != kThumbBkpt && r_dest.Low8() &&
997 InexpensiveConstantInt(displacement & ~(0x1f << scale), Instruction::ADD_INT)) {
998 // We can use the 16-bit Thumb opcode for the load.
999 OpRegRegImm(kOpAdd, r_dest, r_base, displacement & ~(0x1f << scale));
1000 load = NewLIR3(opcode16, r_dest.GetReg(), r_dest.GetReg(), (displacement >> scale) & 0x1f);
1001 } else {
1002 DCHECK_NE(opcode32, kThumbBkpt);
1003 OpRegRegImm(kOpAdd, r_dest, r_base, displacement & ~0x00000fff);
1004 load = NewLIR3(opcode32, r_dest.GetReg(), r_dest.GetReg(), displacement & 0x00000fff);
1005 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001006 } else {
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001007 if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) ||
1008 (scale != 0 && InexpensiveConstantInt(displacement, Instruction::CONST))) {
1009 scale = 0; // Prefer unscaled indexing if the same number of insns.
1010 }
buzbee2700f7e2014-03-07 09:46:20 -08001011 RegStorage reg_offset = AllocTemp();
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001012 LoadConstant(reg_offset, displacement >> scale);
Vladimir Markodb9d5232014-06-10 18:15:57 +01001013 DCHECK(!r_dest.IsFloat());
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001014 load = LoadBaseIndexed(r_base, reg_offset, r_dest, scale, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001015 FreeTemp(reg_offset);
1016 }
1017 }
1018
1019 // TODO: in future may need to differentiate Dalvik accesses w/ spills
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001020 if (mem_ref_type_ == ResourceMask::kDalvikReg) {
Ian Rogersb28c1c02014-11-08 11:21:21 -08001021 DCHECK_EQ(r_base, rs_rARM_SP);
buzbee2700f7e2014-03-07 09:46:20 -08001022 AnnotateDalvikRegAccess(load, displacement >> 2, true /* is_load */, r_dest.Is64Bit());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001023 }
1024 return load;
1025}
1026
Vladimir Marko674744e2014-04-24 15:18:26 +01001027LIR* ArmMir2Lir::LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest,
Andreas Gampe3c12c512014-06-24 18:46:29 +00001028 OpSize size, VolatileKind is_volatile) {
buzbee695d13a2014-04-19 13:32:20 -07001029 // TODO: base this on target.
1030 if (size == kWord) {
1031 size = k32;
1032 }
Andreas Gampe3c12c512014-06-24 18:46:29 +00001033 LIR* load;
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001034 if (is_volatile == kVolatile && (size == k64 || size == kDouble) &&
1035 !cu_->compiler_driver->GetInstructionSetFeatures()->
Ian Rogersd582fa42014-11-05 23:46:43 -08001036 AsArmInstructionSetFeatures()->HasAtomicLdrdAndStrd()) {
Andreas Gampe3c12c512014-06-24 18:46:29 +00001037 // Only 64-bit load needs special handling.
1038 // If the cpu supports LPAE, aligned LDRD is atomic - fall through to LoadBaseDisp().
1039 DCHECK(!r_dest.IsFloat()); // See RegClassForFieldLoadSave().
1040 // Use LDREXD for the atomic load. (Expect displacement > 0, don't optimize for == 0.)
1041 RegStorage r_ptr = AllocTemp();
1042 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
1043 LIR* lir = NewLIR3(kThumb2Ldrexd, r_dest.GetLowReg(), r_dest.GetHighReg(), r_ptr.GetReg());
1044 FreeTemp(r_ptr);
1045 return lir;
1046 } else {
1047 load = LoadBaseDispBody(r_base, displacement, r_dest, size);
1048 }
1049
1050 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001051 GenMemBarrier(kLoadAny);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001052 }
1053
1054 return load;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001055}
1056
Brian Carlstrom7940e442013-07-12 13:46:57 -07001057
buzbee2700f7e2014-03-07 09:46:20 -08001058LIR* ArmMir2Lir::StoreBaseDispBody(RegStorage r_base, int displacement, RegStorage r_src,
1059 OpSize size) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001060 LIR* store = nullptr;
1061 ArmOpcode opcode16 = kThumbBkpt; // 16-bit Thumb opcode.
1062 ArmOpcode opcode32 = kThumbBkpt; // 32-bit Thumb2 opcode.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001063 bool short_form = false;
buzbee091cc402014-03-31 10:14:40 -07001064 bool all_low = r_src.Is32Bit() && r_base.Low8() && r_src.Low8();
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001065 int scale = 0; // Used for opcode16 and some indexed loads.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001066 bool already_generated = false;
1067 switch (size) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001068 case kDouble:
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001069 // Intentional fall-though.
Vladimir Markodb9d5232014-06-10 18:15:57 +01001070 case k64:
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001071 if (r_src.IsFloat()) {
Zheng Xu5667fdb2014-10-23 18:29:55 +08001072 // Note: If the register is retrieved by register allocator, it should never be a pair.
1073 // But some functions in mir2lir assume 64-bit registers are 32-bit register pairs.
1074 // TODO: Rework Mir2Lir::LoadArg() and Mir2Lir::LoadArgDirect().
1075 if (r_src.IsPair()) {
1076 r_src = As64BitFloatReg(r_src);
1077 }
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001078 DCHECK(!r_src.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +01001079 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vstrd, r_base, displacement, r_src);
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001080 } else {
Vladimir Markodb9d5232014-06-10 18:15:57 +01001081 DCHECK(r_src.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +01001082 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2StrdI8, r_base, displacement, r_src);
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001083 }
1084 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001085 break;
1086 case kSingle:
buzbee091cc402014-03-31 10:14:40 -07001087 // Intentional fall-through.
buzbee695d13a2014-04-19 13:32:20 -07001088 case k32:
buzbee091cc402014-03-31 10:14:40 -07001089 // Intentional fall-through.
buzbee695d13a2014-04-19 13:32:20 -07001090 case kReference:
buzbee091cc402014-03-31 10:14:40 -07001091 if (r_src.IsFloat()) {
1092 DCHECK(r_src.IsSingle());
Vladimir Marko37573972014-06-16 10:32:25 +01001093 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vstrs, r_base, displacement, r_src);
Vladimir Markodb9d5232014-06-10 18:15:57 +01001094 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001095 break;
1096 }
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001097 DCHECK_EQ((displacement & 0x3), 0);
1098 scale = 2;
buzbee091cc402014-03-31 10:14:40 -07001099 if (r_src.Low8() && (r_base == rs_r13sp) && (displacement <= 1020) && (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001100 short_form = true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001101 opcode16 = kThumbStrSpRel;
1102 } else {
1103 short_form = all_low && (displacement >> (5 + scale)) == 0;
1104 opcode16 = kThumbStrRRI5;
1105 opcode32 = kThumb2StrRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001106 }
1107 break;
1108 case kUnsignedHalf:
1109 case kSignedHalf:
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001110 DCHECK_EQ((displacement & 0x1), 0);
1111 scale = 1;
1112 short_form = all_low && (displacement >> (5 + scale)) == 0;
1113 opcode16 = kThumbStrhRRI5;
1114 opcode32 = kThumb2StrhRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001115 break;
1116 case kUnsignedByte:
1117 case kSignedByte:
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001118 DCHECK_EQ(scale, 0); // Keep scale = 0.
1119 short_form = all_low && (displacement >> (5 + scale)) == 0;
1120 opcode16 = kThumbStrbRRI5;
1121 opcode32 = kThumb2StrbRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001122 break;
1123 default:
1124 LOG(FATAL) << "Bad size: " << size;
1125 }
1126 if (!already_generated) {
1127 if (short_form) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001128 store = NewLIR3(opcode16, r_src.GetReg(), r_base.GetReg(), displacement >> scale);
1129 } else if ((displacement >> 12) == 0) {
1130 store = NewLIR3(opcode32, r_src.GetReg(), r_base.GetReg(), displacement);
1131 } else if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) &&
1132 InexpensiveConstantInt(displacement & ~0x00000fff, Instruction::ADD_INT)) {
1133 // In this case, using StoreIndexed would emit 3 insns (movw+movt+str) but we can
1134 // actually do it in two because we know that the kOpAdd is a single insn. On the
1135 // other hand, we introduce an extra dependency, so this is not necessarily faster.
buzbee2700f7e2014-03-07 09:46:20 -08001136 RegStorage r_scratch = AllocTemp();
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001137 if (opcode16 != kThumbBkpt && r_src.Low8() && r_scratch.Low8() &&
1138 InexpensiveConstantInt(displacement & ~(0x1f << scale), Instruction::ADD_INT)) {
1139 // We can use the 16-bit Thumb opcode for the load.
1140 OpRegRegImm(kOpAdd, r_scratch, r_base, displacement & ~(0x1f << scale));
1141 store = NewLIR3(opcode16, r_src.GetReg(), r_scratch.GetReg(),
1142 (displacement >> scale) & 0x1f);
1143 } else {
1144 DCHECK_NE(opcode32, kThumbBkpt);
1145 OpRegRegImm(kOpAdd, r_scratch, r_base, displacement & ~0x00000fff);
1146 store = NewLIR3(opcode32, r_src.GetReg(), r_scratch.GetReg(), displacement & 0x00000fff);
1147 }
1148 FreeTemp(r_scratch);
1149 } else {
1150 if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) ||
1151 (scale != 0 && InexpensiveConstantInt(displacement, Instruction::CONST))) {
1152 scale = 0; // Prefer unscaled indexing if the same number of insns.
1153 }
1154 RegStorage r_scratch = AllocTemp();
1155 LoadConstant(r_scratch, displacement >> scale);
Vladimir Markodb9d5232014-06-10 18:15:57 +01001156 DCHECK(!r_src.IsFloat());
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001157 store = StoreBaseIndexed(r_base, r_scratch, r_src, scale, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001158 FreeTemp(r_scratch);
1159 }
1160 }
1161
1162 // TODO: In future, may need to differentiate Dalvik & spill accesses
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001163 if (mem_ref_type_ == ResourceMask::kDalvikReg) {
Ian Rogersb28c1c02014-11-08 11:21:21 -08001164 DCHECK_EQ(r_base, rs_rARM_SP);
buzbee2700f7e2014-03-07 09:46:20 -08001165 AnnotateDalvikRegAccess(store, displacement >> 2, false /* is_load */, r_src.Is64Bit());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001166 }
1167 return store;
1168}
1169
Andreas Gampede686762014-06-24 18:42:06 +00001170LIR* ArmMir2Lir::StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src,
Andreas Gampe3c12c512014-06-24 18:46:29 +00001171 OpSize size, VolatileKind is_volatile) {
1172 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001173 // Ensure that prior accesses become visible to other threads first.
1174 GenMemBarrier(kAnyStore);
Andreas Gampe2689fba2014-06-23 13:23:04 -07001175 }
Andreas Gampe3c12c512014-06-24 18:46:29 +00001176
1177 LIR* store;
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001178 if (is_volatile == kVolatile && (size == k64 || size == kDouble) &&
1179 !cu_->compiler_driver->GetInstructionSetFeatures()->
Ian Rogersd582fa42014-11-05 23:46:43 -08001180 AsArmInstructionSetFeatures()->HasAtomicLdrdAndStrd()) {
Andreas Gampe3c12c512014-06-24 18:46:29 +00001181 // Only 64-bit store needs special handling.
1182 // If the cpu supports LPAE, aligned STRD is atomic - fall through to StoreBaseDisp().
1183 // Use STREXD for the atomic store. (Expect displacement > 0, don't optimize for == 0.)
1184 DCHECK(!r_src.IsFloat()); // See RegClassForFieldLoadSave().
1185 RegStorage r_ptr = AllocTemp();
1186 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
1187 LIR* fail_target = NewLIR0(kPseudoTargetLabel);
1188 // We have only 5 temporary registers available and if r_base, r_src and r_ptr already
1189 // take 4, we can't directly allocate 2 more for LDREXD temps. In that case clobber r_ptr
1190 // in LDREXD and recalculate it from r_base.
1191 RegStorage r_temp = AllocTemp();
Serguei Katkov9ee45192014-07-17 14:39:03 +07001192 RegStorage r_temp_high = AllocTemp(false); // We may not have another temp.
Andreas Gampe3c12c512014-06-24 18:46:29 +00001193 if (r_temp_high.Valid()) {
1194 NewLIR3(kThumb2Ldrexd, r_temp.GetReg(), r_temp_high.GetReg(), r_ptr.GetReg());
1195 FreeTemp(r_temp_high);
1196 FreeTemp(r_temp);
1197 } else {
1198 // If we don't have another temp, clobber r_ptr in LDREXD and reload it.
1199 NewLIR3(kThumb2Ldrexd, r_temp.GetReg(), r_ptr.GetReg(), r_ptr.GetReg());
1200 FreeTemp(r_temp); // May need the temp for kOpAdd.
1201 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
1202 }
1203 store = NewLIR4(kThumb2Strexd, r_temp.GetReg(), r_src.GetLowReg(), r_src.GetHighReg(),
1204 r_ptr.GetReg());
1205 OpCmpImmBranch(kCondNe, r_temp, 0, fail_target);
1206 FreeTemp(r_ptr);
1207 } else {
1208 // TODO: base this on target.
1209 if (size == kWord) {
1210 size = k32;
1211 }
1212
1213 store = StoreBaseDispBody(r_base, displacement, r_src, size);
1214 }
1215
1216 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001217 // Preserve order with respect to any subsequent volatile loads.
1218 // We need StoreLoad, but that generally requires the most expensive barrier.
1219 GenMemBarrier(kAnyAny);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001220 }
1221
1222 return store;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001223}
1224
buzbee2700f7e2014-03-07 09:46:20 -08001225LIR* ArmMir2Lir::OpFpRegCopy(RegStorage r_dest, RegStorage r_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001226 int opcode;
buzbee091cc402014-03-31 10:14:40 -07001227 DCHECK_EQ(r_dest.IsDouble(), r_src.IsDouble());
1228 if (r_dest.IsDouble()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001229 opcode = kThumb2Vmovd;
1230 } else {
buzbee091cc402014-03-31 10:14:40 -07001231 if (r_dest.IsSingle()) {
1232 opcode = r_src.IsSingle() ? kThumb2Vmovs : kThumb2Fmsr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001233 } else {
buzbee091cc402014-03-31 10:14:40 -07001234 DCHECK(r_src.IsSingle());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001235 opcode = kThumb2Fmrs;
1236 }
1237 }
buzbee2700f7e2014-03-07 09:46:20 -08001238 LIR* res = RawLIR(current_dalvik_offset_, opcode, r_dest.GetReg(), r_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001239 if (!(cu_->disable_opt & (1 << kSafeOptimizations)) && r_dest == r_src) {
1240 res->flags.is_nop = true;
1241 }
1242 return res;
1243}
1244
buzbee2700f7e2014-03-07 09:46:20 -08001245LIR* ArmMir2Lir::OpMem(OpKind op, RegStorage r_base, int disp) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001246 UNUSED(op, r_base, disp);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001247 LOG(FATAL) << "Unexpected use of OpMem for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001248 UNREACHABLE();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001249}
1250
Andreas Gampe98430592014-07-27 19:44:50 -07001251LIR* ArmMir2Lir::InvokeTrampoline(OpKind op, RegStorage r_tgt, QuickEntrypointEnum trampoline) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001252 UNUSED(trampoline); // The address of the trampoline is already loaded into r_tgt.
Andreas Gampe98430592014-07-27 19:44:50 -07001253 return OpReg(op, r_tgt);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001254}
1255
Serban Constantinescu63999682014-07-15 17:44:21 +01001256size_t ArmMir2Lir::GetInstructionOffset(LIR* lir) {
1257 uint64_t check_flags = GetTargetInstFlags(lir->opcode);
1258 DCHECK((check_flags & IS_LOAD) || (check_flags & IS_STORE));
1259 size_t offset = (check_flags & IS_TERTIARY_OP) ? lir->operands[2] : 0;
1260
1261 if (check_flags & SCALED_OFFSET_X2) {
1262 offset = offset * 2;
1263 } else if (check_flags & SCALED_OFFSET_X4) {
1264 offset = offset * 4;
1265 }
1266 return offset;
1267}
1268
Brian Carlstrom7940e442013-07-12 13:46:57 -07001269} // namespace art