blob: 7970bd823d3d17ea944df94698eeb5cada42236a [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/* This file contains codegen for the Thumb2 ISA. */
18
Elliott Hughes8366ca02014-11-17 12:02:05 -080019#include "arch/instruction_set_features.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "arm_lir.h"
21#include "codegen_arm.h"
22#include "dex/quick/mir_to_lir-inl.h"
buzbeeb5860fb2014-06-21 15:31:01 -070023#include "dex/reg_storage_eq.h"
Ian Rogers166db042013-07-26 12:05:57 -070024#include "entrypoints/quick/quick_entrypoints.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070025#include "mirror/array-inl.h"
Andreas Gampe7e499922015-01-06 08:28:12 -080026#include "utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070027
28namespace art {
29
buzbee2700f7e2014-03-07 09:46:20 -080030LIR* ArmMir2Lir::OpCmpBranch(ConditionCode cond, RegStorage src1, RegStorage src2, LIR* target) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070031 OpRegReg(kOpCmp, src1, src2);
32 return OpCondBranch(cond, target);
33}
34
35/*
36 * Generate a Thumb2 IT instruction, which can nullify up to
37 * four subsequent instructions based on a condition and its
38 * inverse. The condition applies to the first instruction, which
39 * is executed if the condition is met. The string "guide" consists
40 * of 0 to 3 chars, and applies to the 2nd through 4th instruction.
41 * A "T" means the instruction is executed if the condition is
42 * met, and an "E" means the instruction is executed if the condition
43 * is not met.
44 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070045LIR* ArmMir2Lir::OpIT(ConditionCode ccode, const char* guide) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070046 int mask;
47 int mask3 = 0;
48 int mask2 = 0;
49 int mask1 = 0;
50 ArmConditionCode code = ArmConditionEncoding(ccode);
51 int cond_bit = code & 1;
52 int alt_bit = cond_bit ^ 1;
53
Brian Carlstrom7940e442013-07-12 13:46:57 -070054 switch (strlen(guide)) {
55 case 3:
56 mask1 = (guide[2] == 'T') ? cond_bit : alt_bit;
Ian Rogersfc787ec2014-10-09 21:56:44 -070057 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -070058 case 2:
59 mask2 = (guide[1] == 'T') ? cond_bit : alt_bit;
Ian Rogersfc787ec2014-10-09 21:56:44 -070060 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -070061 case 1:
62 mask3 = (guide[0] == 'T') ? cond_bit : alt_bit;
63 break;
64 case 0:
65 break;
66 default:
67 LOG(FATAL) << "OAT: bad case in OpIT";
Ian Rogersfc787ec2014-10-09 21:56:44 -070068 UNREACHABLE();
Brian Carlstrom7940e442013-07-12 13:46:57 -070069 }
70 mask = (mask3 << 3) | (mask2 << 2) | (mask1 << 1) |
71 (1 << (3 - strlen(guide)));
72 return NewLIR2(kThumb2It, code, mask);
73}
74
Andreas Gampeb14329f2014-05-15 11:16:06 -070075void ArmMir2Lir::UpdateIT(LIR* it, const char* new_guide) {
76 int mask;
77 int mask3 = 0;
78 int mask2 = 0;
79 int mask1 = 0;
80 ArmConditionCode code = static_cast<ArmConditionCode>(it->operands[0]);
81 int cond_bit = code & 1;
82 int alt_bit = cond_bit ^ 1;
83
Andreas Gampeb14329f2014-05-15 11:16:06 -070084 switch (strlen(new_guide)) {
85 case 3:
86 mask1 = (new_guide[2] == 'T') ? cond_bit : alt_bit;
Ian Rogersfc787ec2014-10-09 21:56:44 -070087 FALLTHROUGH_INTENDED;
Andreas Gampeb14329f2014-05-15 11:16:06 -070088 case 2:
89 mask2 = (new_guide[1] == 'T') ? cond_bit : alt_bit;
Ian Rogersfc787ec2014-10-09 21:56:44 -070090 FALLTHROUGH_INTENDED;
Andreas Gampeb14329f2014-05-15 11:16:06 -070091 case 1:
92 mask3 = (new_guide[0] == 'T') ? cond_bit : alt_bit;
93 break;
94 case 0:
95 break;
96 default:
97 LOG(FATAL) << "OAT: bad case in UpdateIT";
Ian Rogersfc787ec2014-10-09 21:56:44 -070098 UNREACHABLE();
Andreas Gampeb14329f2014-05-15 11:16:06 -070099 }
100 mask = (mask3 << 3) | (mask2 << 2) | (mask1 << 1) |
101 (1 << (3 - strlen(new_guide)));
102 it->operands[1] = mask;
103}
104
Dave Allison3da67a52014-04-02 17:03:45 -0700105void ArmMir2Lir::OpEndIT(LIR* it) {
106 // TODO: use the 'it' pointer to do some checks with the LIR, for example
107 // we could check that the number of instructions matches the mask
108 // in the IT instruction.
109 CHECK(it != nullptr);
110 GenBarrier();
111}
112
Brian Carlstrom7940e442013-07-12 13:46:57 -0700113/*
114 * 64-bit 3way compare function.
115 * mov rX, #-1
116 * cmp op1hi, op2hi
117 * blt done
118 * bgt flip
119 * sub rX, op1lo, op2lo (treat as unsigned)
120 * beq done
121 * ite hi
122 * mov(hi) rX, #-1
123 * mov(!hi) rX, #1
124 * flip:
125 * neg rX
126 * done:
127 */
buzbeea1983d42014-04-07 12:35:39 -0700128void ArmMir2Lir::GenCmpLong(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700129 LIR* target1;
130 LIR* target2;
131 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
132 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -0800133 RegStorage t_reg = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700134 LoadConstant(t_reg, -1);
buzbee2700f7e2014-03-07 09:46:20 -0800135 OpRegReg(kOpCmp, rl_src1.reg.GetHigh(), rl_src2.reg.GetHigh());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700136 LIR* branch1 = OpCondBranch(kCondLt, NULL);
137 LIR* branch2 = OpCondBranch(kCondGt, NULL);
buzbeea1983d42014-04-07 12:35:39 -0700138 OpRegRegReg(kOpSub, t_reg, rl_src1.reg.GetLow(), rl_src2.reg.GetLow());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700139 LIR* branch3 = OpCondBranch(kCondEq, NULL);
140
Dave Allison3da67a52014-04-02 17:03:45 -0700141 LIR* it = OpIT(kCondHi, "E");
buzbee2700f7e2014-03-07 09:46:20 -0800142 NewLIR2(kThumb2MovI8M, t_reg.GetReg(), ModifiedImmediate(-1));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700143 LoadConstant(t_reg, 1);
Dave Allison3da67a52014-04-02 17:03:45 -0700144 OpEndIT(it);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700145
146 target2 = NewLIR0(kPseudoTargetLabel);
147 OpRegReg(kOpNeg, t_reg, t_reg);
148
149 target1 = NewLIR0(kPseudoTargetLabel);
150
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700151 RegLocation rl_temp = LocCReturn(); // Just using as template, will change
buzbee2700f7e2014-03-07 09:46:20 -0800152 rl_temp.reg.SetReg(t_reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700153 StoreValue(rl_dest, rl_temp);
154 FreeTemp(t_reg);
155
156 branch1->target = target1;
157 branch2->target = target2;
158 branch3->target = branch1->target;
159}
160
161void ArmMir2Lir::GenFusedLongCmpImmBranch(BasicBlock* bb, RegLocation rl_src1,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700162 int64_t val, ConditionCode ccode) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700163 int32_t val_lo = Low32Bits(val);
164 int32_t val_hi = High32Bits(val);
Brian Carlstrom42748892013-07-18 18:04:08 -0700165 DCHECK_GE(ModifiedImmediate(val_lo), 0);
166 DCHECK_GE(ModifiedImmediate(val_hi), 0);
buzbee0d829482013-10-11 15:24:55 -0700167 LIR* taken = &block_label_list_[bb->taken];
168 LIR* not_taken = &block_label_list_[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700169 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -0800170 RegStorage low_reg = rl_src1.reg.GetLow();
171 RegStorage high_reg = rl_src1.reg.GetHigh();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700172
Vladimir Marko58af1f92013-12-19 13:31:15 +0000173 if (val == 0 && (ccode == kCondEq || ccode == kCondNe)) {
buzbee2700f7e2014-03-07 09:46:20 -0800174 RegStorage t_reg = AllocTemp();
175 NewLIR4(kThumb2OrrRRRs, t_reg.GetReg(), low_reg.GetReg(), high_reg.GetReg(), 0);
Vladimir Marko58af1f92013-12-19 13:31:15 +0000176 FreeTemp(t_reg);
177 OpCondBranch(ccode, taken);
178 return;
179 }
180
Brian Carlstromdf629502013-07-17 22:39:56 -0700181 switch (ccode) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700182 case kCondEq:
183 case kCondNe:
Vladimir Marko58af1f92013-12-19 13:31:15 +0000184 OpCmpImmBranch(kCondNe, high_reg, val_hi, (ccode == kCondEq) ? not_taken : taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700185 break;
186 case kCondLt:
187 OpCmpImmBranch(kCondLt, high_reg, val_hi, taken);
188 OpCmpImmBranch(kCondGt, high_reg, val_hi, not_taken);
Vladimir Marko58af1f92013-12-19 13:31:15 +0000189 ccode = kCondUlt;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700190 break;
191 case kCondLe:
192 OpCmpImmBranch(kCondLt, high_reg, val_hi, taken);
193 OpCmpImmBranch(kCondGt, high_reg, val_hi, not_taken);
194 ccode = kCondLs;
195 break;
196 case kCondGt:
197 OpCmpImmBranch(kCondGt, high_reg, val_hi, taken);
198 OpCmpImmBranch(kCondLt, high_reg, val_hi, not_taken);
199 ccode = kCondHi;
200 break;
201 case kCondGe:
202 OpCmpImmBranch(kCondGt, high_reg, val_hi, taken);
203 OpCmpImmBranch(kCondLt, high_reg, val_hi, not_taken);
Vladimir Marko58af1f92013-12-19 13:31:15 +0000204 ccode = kCondUge;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700205 break;
206 default:
207 LOG(FATAL) << "Unexpected ccode: " << ccode;
208 }
209 OpCmpImmBranch(ccode, low_reg, val_lo, taken);
210}
211
Andreas Gampe90969af2014-07-15 23:02:11 -0700212void ArmMir2Lir::GenSelectConst32(RegStorage left_op, RegStorage right_op, ConditionCode code,
213 int32_t true_val, int32_t false_val, RegStorage rs_dest,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700214 RegisterClass dest_reg_class) {
215 UNUSED(dest_reg_class);
Andreas Gampe90969af2014-07-15 23:02:11 -0700216 // TODO: Generalize the IT below to accept more than one-instruction loads.
217 DCHECK(InexpensiveConstantInt(true_val));
218 DCHECK(InexpensiveConstantInt(false_val));
219
220 if ((true_val == 0 && code == kCondEq) ||
221 (false_val == 0 && code == kCondNe)) {
222 OpRegRegReg(kOpSub, rs_dest, left_op, right_op);
223 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
224 LIR* it = OpIT(kCondNe, "");
225 LoadConstant(rs_dest, code == kCondEq ? false_val : true_val);
226 OpEndIT(it);
227 return;
228 }
229
230 OpRegReg(kOpCmp, left_op, right_op); // Same?
231 LIR* it = OpIT(code, "E"); // if-convert the test
232 LoadConstant(rs_dest, true_val); // .eq case - load true
233 LoadConstant(rs_dest, false_val); // .eq case - load true
234 OpEndIT(it);
235}
236
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700237void ArmMir2Lir::GenSelect(BasicBlock* bb, MIR* mir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700238 UNUSED(bb);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700239 RegLocation rl_result;
240 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700241 RegLocation rl_dest = mir_graph_->GetDest(mir);
buzbeea0cd2d72014-06-01 09:33:49 -0700242 // Avoid using float regs here.
243 RegisterClass src_reg_class = rl_src.ref ? kRefReg : kCoreReg;
244 RegisterClass result_reg_class = rl_dest.ref ? kRefReg : kCoreReg;
245 rl_src = LoadValue(rl_src, src_reg_class);
Vladimir Markoa1a70742014-03-03 10:28:05 +0000246 ConditionCode ccode = mir->meta.ccode;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700247 if (mir->ssa_rep->num_uses == 1) {
248 // CONST case
249 int true_val = mir->dalvikInsn.vB;
250 int false_val = mir->dalvikInsn.vC;
buzbeea0cd2d72014-06-01 09:33:49 -0700251 rl_result = EvalLoc(rl_dest, result_reg_class, true);
Vladimir Markoa1a70742014-03-03 10:28:05 +0000252 // Change kCondNe to kCondEq for the special cases below.
253 if (ccode == kCondNe) {
254 ccode = kCondEq;
255 std::swap(true_val, false_val);
256 }
257 bool cheap_false_val = InexpensiveConstantInt(false_val);
258 if (cheap_false_val && ccode == kCondEq && (true_val == 0 || true_val == -1)) {
buzbee2700f7e2014-03-07 09:46:20 -0800259 OpRegRegImm(kOpSub, rl_result.reg, rl_src.reg, -true_val);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100260 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Dave Allison3da67a52014-04-02 17:03:45 -0700261 LIR* it = OpIT(true_val == 0 ? kCondNe : kCondUge, "");
buzbee2700f7e2014-03-07 09:46:20 -0800262 LoadConstant(rl_result.reg, false_val);
Dave Allison3da67a52014-04-02 17:03:45 -0700263 OpEndIT(it); // Add a scheduling barrier to keep the IT shadow intact
Vladimir Markoa1a70742014-03-03 10:28:05 +0000264 } else if (cheap_false_val && ccode == kCondEq && true_val == 1) {
buzbee2700f7e2014-03-07 09:46:20 -0800265 OpRegRegImm(kOpRsub, rl_result.reg, rl_src.reg, 1);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100266 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Dave Allison3da67a52014-04-02 17:03:45 -0700267 LIR* it = OpIT(kCondLs, "");
buzbee2700f7e2014-03-07 09:46:20 -0800268 LoadConstant(rl_result.reg, false_val);
Dave Allison3da67a52014-04-02 17:03:45 -0700269 OpEndIT(it); // Add a scheduling barrier to keep the IT shadow intact
Vladimir Markoa1a70742014-03-03 10:28:05 +0000270 } else if (cheap_false_val && InexpensiveConstantInt(true_val)) {
buzbee2700f7e2014-03-07 09:46:20 -0800271 OpRegImm(kOpCmp, rl_src.reg, 0);
Dave Allison3da67a52014-04-02 17:03:45 -0700272 LIR* it = OpIT(ccode, "E");
buzbee2700f7e2014-03-07 09:46:20 -0800273 LoadConstant(rl_result.reg, true_val);
274 LoadConstant(rl_result.reg, false_val);
Dave Allison3da67a52014-04-02 17:03:45 -0700275 OpEndIT(it); // Add a scheduling barrier to keep the IT shadow intact
Brian Carlstrom7940e442013-07-12 13:46:57 -0700276 } else {
277 // Unlikely case - could be tuned.
buzbeea0cd2d72014-06-01 09:33:49 -0700278 RegStorage t_reg1 = AllocTypedTemp(false, result_reg_class);
279 RegStorage t_reg2 = AllocTypedTemp(false, result_reg_class);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700280 LoadConstant(t_reg1, true_val);
281 LoadConstant(t_reg2, false_val);
buzbee2700f7e2014-03-07 09:46:20 -0800282 OpRegImm(kOpCmp, rl_src.reg, 0);
Dave Allison3da67a52014-04-02 17:03:45 -0700283 LIR* it = OpIT(ccode, "E");
buzbee2700f7e2014-03-07 09:46:20 -0800284 OpRegCopy(rl_result.reg, t_reg1);
285 OpRegCopy(rl_result.reg, t_reg2);
Dave Allison3da67a52014-04-02 17:03:45 -0700286 OpEndIT(it); // Add a scheduling barrier to keep the IT shadow intact
Brian Carlstrom7940e442013-07-12 13:46:57 -0700287 }
288 } else {
289 // MOVE case
290 RegLocation rl_true = mir_graph_->reg_location_[mir->ssa_rep->uses[1]];
291 RegLocation rl_false = mir_graph_->reg_location_[mir->ssa_rep->uses[2]];
buzbeea0cd2d72014-06-01 09:33:49 -0700292 rl_true = LoadValue(rl_true, result_reg_class);
293 rl_false = LoadValue(rl_false, result_reg_class);
294 rl_result = EvalLoc(rl_dest, result_reg_class, true);
buzbee2700f7e2014-03-07 09:46:20 -0800295 OpRegImm(kOpCmp, rl_src.reg, 0);
Dave Allison3da67a52014-04-02 17:03:45 -0700296 LIR* it = nullptr;
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000297 if (rl_result.reg.GetReg() == rl_true.reg.GetReg()) { // Is the "true" case already in place?
Dave Allison3da67a52014-04-02 17:03:45 -0700298 it = OpIT(NegateComparison(ccode), "");
buzbee2700f7e2014-03-07 09:46:20 -0800299 OpRegCopy(rl_result.reg, rl_false.reg);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000300 } else if (rl_result.reg.GetReg() == rl_false.reg.GetReg()) { // False case in place?
Dave Allison3da67a52014-04-02 17:03:45 -0700301 it = OpIT(ccode, "");
buzbee2700f7e2014-03-07 09:46:20 -0800302 OpRegCopy(rl_result.reg, rl_true.reg);
buzbee252254b2013-09-08 16:20:53 -0700303 } else { // Normal - select between the two.
Dave Allison3da67a52014-04-02 17:03:45 -0700304 it = OpIT(ccode, "E");
buzbee2700f7e2014-03-07 09:46:20 -0800305 OpRegCopy(rl_result.reg, rl_true.reg);
306 OpRegCopy(rl_result.reg, rl_false.reg);
buzbee252254b2013-09-08 16:20:53 -0700307 }
Dave Allison3da67a52014-04-02 17:03:45 -0700308 OpEndIT(it); // Add a scheduling barrier to keep the IT shadow intact
Brian Carlstrom7940e442013-07-12 13:46:57 -0700309 }
310 StoreValue(rl_dest, rl_result);
311}
312
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700313void ArmMir2Lir::GenFusedLongCmpBranch(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700314 RegLocation rl_src1 = mir_graph_->GetSrcWide(mir, 0);
315 RegLocation rl_src2 = mir_graph_->GetSrcWide(mir, 2);
316 // Normalize such that if either operand is constant, src2 will be constant.
Vladimir Markoa8946072014-01-22 10:30:44 +0000317 ConditionCode ccode = mir->meta.ccode;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700318 if (rl_src1.is_const) {
Vladimir Marko58af1f92013-12-19 13:31:15 +0000319 std::swap(rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700320 ccode = FlipComparisonOrder(ccode);
321 }
322 if (rl_src2.is_const) {
buzbee082833c2014-05-17 23:16:26 -0700323 rl_src2 = UpdateLocWide(rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700324 // Do special compare/branch against simple const operand if not already in registers.
325 int64_t val = mir_graph_->ConstantValueWide(rl_src2);
buzbee082833c2014-05-17 23:16:26 -0700326 if ((rl_src2.location != kLocPhysReg) &&
Brian Carlstrom7940e442013-07-12 13:46:57 -0700327 ((ModifiedImmediate(Low32Bits(val)) >= 0) && (ModifiedImmediate(High32Bits(val)) >= 0))) {
328 GenFusedLongCmpImmBranch(bb, rl_src1, val, ccode);
329 return;
330 }
331 }
buzbee0d829482013-10-11 15:24:55 -0700332 LIR* taken = &block_label_list_[bb->taken];
333 LIR* not_taken = &block_label_list_[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700334 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
335 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -0800336 OpRegReg(kOpCmp, rl_src1.reg.GetHigh(), rl_src2.reg.GetHigh());
Brian Carlstromdf629502013-07-17 22:39:56 -0700337 switch (ccode) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700338 case kCondEq:
339 OpCondBranch(kCondNe, not_taken);
340 break;
341 case kCondNe:
342 OpCondBranch(kCondNe, taken);
343 break;
344 case kCondLt:
345 OpCondBranch(kCondLt, taken);
346 OpCondBranch(kCondGt, not_taken);
Vladimir Marko58af1f92013-12-19 13:31:15 +0000347 ccode = kCondUlt;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700348 break;
349 case kCondLe:
350 OpCondBranch(kCondLt, taken);
351 OpCondBranch(kCondGt, not_taken);
352 ccode = kCondLs;
353 break;
354 case kCondGt:
355 OpCondBranch(kCondGt, taken);
356 OpCondBranch(kCondLt, not_taken);
357 ccode = kCondHi;
358 break;
359 case kCondGe:
360 OpCondBranch(kCondGt, taken);
361 OpCondBranch(kCondLt, not_taken);
Vladimir Marko58af1f92013-12-19 13:31:15 +0000362 ccode = kCondUge;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700363 break;
364 default:
365 LOG(FATAL) << "Unexpected ccode: " << ccode;
366 }
buzbee2700f7e2014-03-07 09:46:20 -0800367 OpRegReg(kOpCmp, rl_src1.reg.GetLow(), rl_src2.reg.GetLow());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700368 OpCondBranch(ccode, taken);
369}
370
371/*
372 * Generate a register comparison to an immediate and branch. Caller
373 * is responsible for setting branch target field.
374 */
buzbee2700f7e2014-03-07 09:46:20 -0800375LIR* ArmMir2Lir::OpCmpImmBranch(ConditionCode cond, RegStorage reg, int check_value, LIR* target) {
Andreas Gampe9522af92014-07-14 20:16:59 -0700376 LIR* branch = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700377 ArmConditionCode arm_cond = ArmConditionEncoding(cond);
buzbeeb48819d2013-09-14 16:15:25 -0700378 /*
379 * A common use of OpCmpImmBranch is for null checks, and using the Thumb 16-bit
380 * compare-and-branch if zero is ideal if it will reach. However, because null checks
Mingyao Yang3a74d152014-04-21 15:39:44 -0700381 * branch forward to a slow path, they will frequently not reach - and thus have to
buzbeeb48819d2013-09-14 16:15:25 -0700382 * be converted to a long form during assembly (which will trigger another assembly
383 * pass). Here we estimate the branch distance for checks, and if large directly
384 * generate the long form in an attempt to avoid an extra assembly pass.
Mingyao Yang3a74d152014-04-21 15:39:44 -0700385 * TODO: consider interspersing slowpaths in code following unconditional branches.
buzbeeb48819d2013-09-14 16:15:25 -0700386 */
387 bool skip = ((target != NULL) && (target->opcode == kPseudoThrowTarget));
Razvan A Lupusoru75035972014-09-11 15:24:59 -0700388 skip &= ((mir_graph_->GetNumDalvikInsns() - current_dalvik_offset_) > 64);
Andreas Gampe9522af92014-07-14 20:16:59 -0700389 if (!skip && reg.Low8() && (check_value == 0)) {
390 if (arm_cond == kArmCondEq || arm_cond == kArmCondNe) {
391 branch = NewLIR2((arm_cond == kArmCondEq) ? kThumb2Cbz : kThumb2Cbnz,
392 reg.GetReg(), 0);
393 } else if (arm_cond == kArmCondLs) {
394 // kArmCondLs is an unsigned less or equal. A comparison r <= 0 is then the same as cbz.
395 // This case happens for a bounds check of array[0].
396 branch = NewLIR2(kThumb2Cbz, reg.GetReg(), 0);
397 }
398 }
399
400 if (branch == nullptr) {
Vladimir Marko22479842013-11-19 17:04:50 +0000401 OpRegImm(kOpCmp, reg, check_value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700402 branch = NewLIR2(kThumbBCond, 0, arm_cond);
403 }
Andreas Gampe9522af92014-07-14 20:16:59 -0700404
Brian Carlstrom7940e442013-07-12 13:46:57 -0700405 branch->target = target;
406 return branch;
407}
408
buzbee2700f7e2014-03-07 09:46:20 -0800409LIR* ArmMir2Lir::OpRegCopyNoInsert(RegStorage r_dest, RegStorage r_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700410 LIR* res;
411 int opcode;
buzbee2700f7e2014-03-07 09:46:20 -0800412 // If src or dest is a pair, we'll be using low reg.
413 if (r_dest.IsPair()) {
414 r_dest = r_dest.GetLow();
415 }
416 if (r_src.IsPair()) {
417 r_src = r_src.GetLow();
418 }
buzbee091cc402014-03-31 10:14:40 -0700419 if (r_dest.IsFloat() || r_src.IsFloat())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700420 return OpFpRegCopy(r_dest, r_src);
buzbee091cc402014-03-31 10:14:40 -0700421 if (r_dest.Low8() && r_src.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700422 opcode = kThumbMovRR;
buzbee091cc402014-03-31 10:14:40 -0700423 else if (!r_dest.Low8() && !r_src.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700424 opcode = kThumbMovRR_H2H;
buzbee091cc402014-03-31 10:14:40 -0700425 else if (r_dest.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700426 opcode = kThumbMovRR_H2L;
427 else
428 opcode = kThumbMovRR_L2H;
buzbee2700f7e2014-03-07 09:46:20 -0800429 res = RawLIR(current_dalvik_offset_, opcode, r_dest.GetReg(), r_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700430 if (!(cu_->disable_opt & (1 << kSafeOptimizations)) && r_dest == r_src) {
431 res->flags.is_nop = true;
432 }
433 return res;
434}
435
buzbee7a11ab02014-04-28 20:02:38 -0700436void ArmMir2Lir::OpRegCopy(RegStorage r_dest, RegStorage r_src) {
437 if (r_dest != r_src) {
438 LIR* res = OpRegCopyNoInsert(r_dest, r_src);
439 AppendLIR(res);
440 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700441}
442
buzbee2700f7e2014-03-07 09:46:20 -0800443void ArmMir2Lir::OpRegCopyWide(RegStorage r_dest, RegStorage r_src) {
buzbee7a11ab02014-04-28 20:02:38 -0700444 if (r_dest != r_src) {
buzbee091cc402014-03-31 10:14:40 -0700445 bool dest_fp = r_dest.IsFloat();
446 bool src_fp = r_src.IsFloat();
447 DCHECK(r_dest.Is64Bit());
448 DCHECK(r_src.Is64Bit());
Zheng Xu5667fdb2014-10-23 18:29:55 +0800449 // Note: If the register is get by register allocator, it should never be a pair.
450 // But some functions in mir_2_lir assume 64-bit registers are 32-bit register pairs.
451 // TODO: Rework Mir2Lir::LoadArg() and Mir2Lir::LoadArgDirect().
452 if (dest_fp && r_dest.IsPair()) {
453 r_dest = As64BitFloatReg(r_dest);
454 }
455 if (src_fp && r_src.IsPair()) {
456 r_src = As64BitFloatReg(r_src);
457 }
buzbee7a11ab02014-04-28 20:02:38 -0700458 if (dest_fp) {
459 if (src_fp) {
buzbee091cc402014-03-31 10:14:40 -0700460 OpRegCopy(r_dest, r_src);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700461 } else {
buzbee091cc402014-03-31 10:14:40 -0700462 NewLIR3(kThumb2Fmdrr, r_dest.GetReg(), r_src.GetLowReg(), r_src.GetHighReg());
buzbee7a11ab02014-04-28 20:02:38 -0700463 }
464 } else {
465 if (src_fp) {
buzbee091cc402014-03-31 10:14:40 -0700466 NewLIR3(kThumb2Fmrrd, r_dest.GetLowReg(), r_dest.GetHighReg(), r_src.GetReg());
buzbee7a11ab02014-04-28 20:02:38 -0700467 } else {
468 // Handle overlap
469 if (r_src.GetHighReg() == r_dest.GetLowReg()) {
470 DCHECK_NE(r_src.GetLowReg(), r_dest.GetHighReg());
471 OpRegCopy(r_dest.GetHigh(), r_src.GetHigh());
472 OpRegCopy(r_dest.GetLow(), r_src.GetLow());
473 } else {
474 OpRegCopy(r_dest.GetLow(), r_src.GetLow());
475 OpRegCopy(r_dest.GetHigh(), r_src.GetHigh());
476 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700477 }
478 }
479 }
480}
481
482// Table of magic divisors
483struct MagicTable {
484 uint32_t magic;
485 uint32_t shift;
486 DividePattern pattern;
487};
488
489static const MagicTable magic_table[] = {
490 {0, 0, DivideNone}, // 0
491 {0, 0, DivideNone}, // 1
492 {0, 0, DivideNone}, // 2
493 {0x55555556, 0, Divide3}, // 3
494 {0, 0, DivideNone}, // 4
495 {0x66666667, 1, Divide5}, // 5
496 {0x2AAAAAAB, 0, Divide3}, // 6
497 {0x92492493, 2, Divide7}, // 7
498 {0, 0, DivideNone}, // 8
499 {0x38E38E39, 1, Divide5}, // 9
500 {0x66666667, 2, Divide5}, // 10
501 {0x2E8BA2E9, 1, Divide5}, // 11
502 {0x2AAAAAAB, 1, Divide5}, // 12
503 {0x4EC4EC4F, 2, Divide5}, // 13
504 {0x92492493, 3, Divide7}, // 14
505 {0x88888889, 3, Divide7}, // 15
506};
507
508// Integer division by constant via reciprocal multiply (Hacker's Delight, 10-4)
buzbee11b63d12013-08-27 07:34:17 -0700509bool ArmMir2Lir::SmallLiteralDivRem(Instruction::Code dalvik_opcode, bool is_div,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700510 RegLocation rl_src, RegLocation rl_dest, int lit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700511 UNUSED(dalvik_opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700512 if ((lit < 0) || (lit >= static_cast<int>(sizeof(magic_table)/sizeof(magic_table[0])))) {
513 return false;
514 }
515 DividePattern pattern = magic_table[lit].pattern;
516 if (pattern == DivideNone) {
517 return false;
518 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700519
buzbee2700f7e2014-03-07 09:46:20 -0800520 RegStorage r_magic = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700521 LoadConstant(r_magic, magic_table[lit].magic);
522 rl_src = LoadValue(rl_src, kCoreReg);
523 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800524 RegStorage r_hi = AllocTemp();
525 RegStorage r_lo = AllocTemp();
Zheng Xuf9719f92014-04-02 13:31:31 +0100526
527 // rl_dest and rl_src might overlap.
528 // Reuse r_hi to save the div result for reminder case.
529 RegStorage r_div_result = is_div ? rl_result.reg : r_hi;
530
buzbee2700f7e2014-03-07 09:46:20 -0800531 NewLIR4(kThumb2Smull, r_lo.GetReg(), r_hi.GetReg(), r_magic.GetReg(), rl_src.reg.GetReg());
Brian Carlstromdf629502013-07-17 22:39:56 -0700532 switch (pattern) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700533 case Divide3:
Zheng Xuf9719f92014-04-02 13:31:31 +0100534 OpRegRegRegShift(kOpSub, r_div_result, r_hi, rl_src.reg, EncodeShift(kArmAsr, 31));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700535 break;
536 case Divide5:
buzbee2700f7e2014-03-07 09:46:20 -0800537 OpRegRegImm(kOpAsr, r_lo, rl_src.reg, 31);
Zheng Xuf9719f92014-04-02 13:31:31 +0100538 OpRegRegRegShift(kOpRsub, r_div_result, r_lo, r_hi,
Ian Rogerse2143c02014-03-28 08:47:16 -0700539 EncodeShift(kArmAsr, magic_table[lit].shift));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700540 break;
541 case Divide7:
buzbee2700f7e2014-03-07 09:46:20 -0800542 OpRegReg(kOpAdd, r_hi, rl_src.reg);
543 OpRegRegImm(kOpAsr, r_lo, rl_src.reg, 31);
Zheng Xuf9719f92014-04-02 13:31:31 +0100544 OpRegRegRegShift(kOpRsub, r_div_result, r_lo, r_hi,
Ian Rogerse2143c02014-03-28 08:47:16 -0700545 EncodeShift(kArmAsr, magic_table[lit].shift));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700546 break;
547 default:
548 LOG(FATAL) << "Unexpected pattern: " << pattern;
549 }
Zheng Xuf9719f92014-04-02 13:31:31 +0100550
551 if (!is_div) {
552 // div_result = src / lit
553 // tmp1 = div_result * lit
554 // dest = src - tmp1
555 RegStorage tmp1 = r_lo;
556 EasyMultiplyOp ops[2];
557
558 bool canEasyMultiply = GetEasyMultiplyTwoOps(lit, ops);
559 DCHECK_NE(canEasyMultiply, false);
560
561 GenEasyMultiplyTwoOps(tmp1, r_div_result, ops);
562 OpRegRegReg(kOpSub, rl_result.reg, rl_src.reg, tmp1);
563 }
564
Brian Carlstrom7940e442013-07-12 13:46:57 -0700565 StoreValue(rl_dest, rl_result);
566 return true;
567}
568
Ian Rogerse2143c02014-03-28 08:47:16 -0700569// Try to convert *lit to 1 RegRegRegShift/RegRegShift form.
570bool ArmMir2Lir::GetEasyMultiplyOp(int lit, ArmMir2Lir::EasyMultiplyOp* op) {
Andreas Gampecfe71e52015-01-05 19:30:59 -0800571 if (lit == 0) {
572 // Special case for *divide-by-zero*. The ops won't actually be used to generate code, as
573 // GenArithOpIntLit will directly generate exception-throwing code, and multiply-by-zero will
574 // have been optimized away earlier.
575 op->op = kOpInvalid;
Dmitry Petrochenkoddf05aa2015-01-14 15:54:20 +0600576 op->shift = 0;
Andreas Gampecfe71e52015-01-05 19:30:59 -0800577 return true;
578 }
579
Ian Rogerse2143c02014-03-28 08:47:16 -0700580 if (IsPowerOfTwo(lit)) {
581 op->op = kOpLsl;
Andreas Gampe7e499922015-01-06 08:28:12 -0800582 op->shift = CTZ(lit);
Ian Rogerse2143c02014-03-28 08:47:16 -0700583 return true;
584 }
585
586 if (IsPowerOfTwo(lit - 1)) {
587 op->op = kOpAdd;
Andreas Gampe7e499922015-01-06 08:28:12 -0800588 op->shift = CTZ(lit - 1);
Ian Rogerse2143c02014-03-28 08:47:16 -0700589 return true;
590 }
591
592 if (IsPowerOfTwo(lit + 1)) {
593 op->op = kOpRsub;
Andreas Gampe7e499922015-01-06 08:28:12 -0800594 op->shift = CTZ(lit + 1);
Ian Rogerse2143c02014-03-28 08:47:16 -0700595 return true;
596 }
597
598 op->op = kOpInvalid;
Zheng Xuf9719f92014-04-02 13:31:31 +0100599 op->shift = 0;
Ian Rogerse2143c02014-03-28 08:47:16 -0700600 return false;
601}
602
603// Try to convert *lit to 1~2 RegRegRegShift/RegRegShift forms.
604bool ArmMir2Lir::GetEasyMultiplyTwoOps(int lit, EasyMultiplyOp* ops) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700605 if (GetEasyMultiplyOp(lit, &ops[0])) {
606 ops[1].op = kOpInvalid;
Zheng Xuf9719f92014-04-02 13:31:31 +0100607 ops[1].shift = 0;
Ian Rogerse2143c02014-03-28 08:47:16 -0700608 return true;
609 }
610
611 int lit1 = lit;
Andreas Gampe7e499922015-01-06 08:28:12 -0800612 uint32_t shift = CTZ(lit1);
Ian Rogerse2143c02014-03-28 08:47:16 -0700613 if (GetEasyMultiplyOp(lit1 >> shift, &ops[0])) {
614 ops[1].op = kOpLsl;
615 ops[1].shift = shift;
616 return true;
617 }
618
619 lit1 = lit - 1;
Andreas Gampe7e499922015-01-06 08:28:12 -0800620 shift = CTZ(lit1);
Ian Rogerse2143c02014-03-28 08:47:16 -0700621 if (GetEasyMultiplyOp(lit1 >> shift, &ops[0])) {
622 ops[1].op = kOpAdd;
623 ops[1].shift = shift;
624 return true;
625 }
626
627 lit1 = lit + 1;
Andreas Gampe7e499922015-01-06 08:28:12 -0800628 shift = CTZ(lit1);
Ian Rogerse2143c02014-03-28 08:47:16 -0700629 if (GetEasyMultiplyOp(lit1 >> shift, &ops[0])) {
630 ops[1].op = kOpRsub;
631 ops[1].shift = shift;
632 return true;
633 }
634
635 return false;
636}
637
Zheng Xuf9719f92014-04-02 13:31:31 +0100638// Generate instructions to do multiply.
639// Additional temporary register is required,
640// if it need to generate 2 instructions and src/dest overlap.
Ian Rogerse2143c02014-03-28 08:47:16 -0700641void ArmMir2Lir::GenEasyMultiplyTwoOps(RegStorage r_dest, RegStorage r_src, EasyMultiplyOp* ops) {
Zheng Xuf9719f92014-04-02 13:31:31 +0100642 // tmp1 = ( src << shift1) + [ src | -src | 0 ]
643 // dest = (tmp1 << shift2) + [ src | -src | 0 ]
644
645 RegStorage r_tmp1;
646 if (ops[1].op == kOpInvalid) {
647 r_tmp1 = r_dest;
648 } else if (r_dest.GetReg() != r_src.GetReg()) {
649 r_tmp1 = r_dest;
650 } else {
651 r_tmp1 = AllocTemp();
652 }
653
654 switch (ops[0].op) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700655 case kOpLsl:
Zheng Xuf9719f92014-04-02 13:31:31 +0100656 OpRegRegImm(kOpLsl, r_tmp1, r_src, ops[0].shift);
Ian Rogerse2143c02014-03-28 08:47:16 -0700657 break;
658 case kOpAdd:
Zheng Xuf9719f92014-04-02 13:31:31 +0100659 OpRegRegRegShift(kOpAdd, r_tmp1, r_src, r_src, EncodeShift(kArmLsl, ops[0].shift));
Ian Rogerse2143c02014-03-28 08:47:16 -0700660 break;
661 case kOpRsub:
Zheng Xuf9719f92014-04-02 13:31:31 +0100662 OpRegRegRegShift(kOpRsub, r_tmp1, r_src, r_src, EncodeShift(kArmLsl, ops[0].shift));
Ian Rogerse2143c02014-03-28 08:47:16 -0700663 break;
664 default:
Zheng Xuf9719f92014-04-02 13:31:31 +0100665 DCHECK_EQ(ops[0].op, kOpInvalid);
Ian Rogerse2143c02014-03-28 08:47:16 -0700666 break;
Zheng Xuf9719f92014-04-02 13:31:31 +0100667 }
668
669 switch (ops[1].op) {
670 case kOpInvalid:
671 return;
672 case kOpLsl:
673 OpRegRegImm(kOpLsl, r_dest, r_tmp1, ops[1].shift);
674 break;
675 case kOpAdd:
676 OpRegRegRegShift(kOpAdd, r_dest, r_src, r_tmp1, EncodeShift(kArmLsl, ops[1].shift));
677 break;
678 case kOpRsub:
679 OpRegRegRegShift(kOpRsub, r_dest, r_src, r_tmp1, EncodeShift(kArmLsl, ops[1].shift));
680 break;
681 default:
682 LOG(FATAL) << "Unexpected opcode passed to GenEasyMultiplyTwoOps";
683 break;
Ian Rogerse2143c02014-03-28 08:47:16 -0700684 }
685}
686
687bool ArmMir2Lir::EasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) {
688 EasyMultiplyOp ops[2];
689
690 if (!GetEasyMultiplyTwoOps(lit, ops)) {
691 return false;
692 }
693
694 rl_src = LoadValue(rl_src, kCoreReg);
695 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
696
697 GenEasyMultiplyTwoOps(rl_result.reg, rl_src.reg, ops);
698 StoreValue(rl_dest, rl_result);
699 return true;
700}
701
Mark Mendell2bf31e62014-01-23 12:13:40 -0800702RegLocation ArmMir2Lir::GenDivRem(RegLocation rl_dest, RegLocation rl_src1,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700703 RegLocation rl_src2, bool is_div, int flags) {
704 UNUSED(rl_dest, rl_src1, rl_src2, is_div, flags);
Mark Mendell2bf31e62014-01-23 12:13:40 -0800705 LOG(FATAL) << "Unexpected use of GenDivRem for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700706 UNREACHABLE();
Mark Mendell2bf31e62014-01-23 12:13:40 -0800707}
708
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700709RegLocation ArmMir2Lir::GenDivRemLit(RegLocation rl_dest, RegLocation rl_src1, int lit,
710 bool is_div) {
711 UNUSED(rl_dest, rl_src1, lit, is_div);
Mark Mendell2bf31e62014-01-23 12:13:40 -0800712 LOG(FATAL) << "Unexpected use of GenDivRemLit for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700713 UNREACHABLE();
Mark Mendell2bf31e62014-01-23 12:13:40 -0800714}
715
buzbee2700f7e2014-03-07 09:46:20 -0800716RegLocation ArmMir2Lir::GenDivRemLit(RegLocation rl_dest, RegStorage reg1, int lit, bool is_div) {
Dave Allison70202782013-10-22 17:52:19 -0700717 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
718
719 // Put the literal in a temp.
buzbee2700f7e2014-03-07 09:46:20 -0800720 RegStorage lit_temp = AllocTemp();
Dave Allison70202782013-10-22 17:52:19 -0700721 LoadConstant(lit_temp, lit);
722 // Use the generic case for div/rem with arg2 in a register.
723 // TODO: The literal temp can be freed earlier during a modulus to reduce reg pressure.
724 rl_result = GenDivRem(rl_result, reg1, lit_temp, is_div);
725 FreeTemp(lit_temp);
726
727 return rl_result;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700728}
729
buzbee2700f7e2014-03-07 09:46:20 -0800730RegLocation ArmMir2Lir::GenDivRem(RegLocation rl_dest, RegStorage reg1, RegStorage reg2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700731 bool is_div) {
Dave Allison70202782013-10-22 17:52:19 -0700732 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
733 if (is_div) {
734 // Simple case, use sdiv instruction.
buzbee2700f7e2014-03-07 09:46:20 -0800735 OpRegRegReg(kOpDiv, rl_result.reg, reg1, reg2);
Dave Allison70202782013-10-22 17:52:19 -0700736 } else {
737 // Remainder case, use the following code:
738 // temp = reg1 / reg2 - integer division
739 // temp = temp * reg2
740 // dest = reg1 - temp
741
buzbee2700f7e2014-03-07 09:46:20 -0800742 RegStorage temp = AllocTemp();
Dave Allison70202782013-10-22 17:52:19 -0700743 OpRegRegReg(kOpDiv, temp, reg1, reg2);
744 OpRegReg(kOpMul, temp, reg2);
buzbee2700f7e2014-03-07 09:46:20 -0800745 OpRegRegReg(kOpSub, rl_result.reg, reg1, temp);
Dave Allison70202782013-10-22 17:52:19 -0700746 FreeTemp(temp);
747 }
748
749 return rl_result;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700750}
751
Serban Constantinescu23abec92014-07-02 16:13:38 +0100752bool ArmMir2Lir::GenInlinedMinMax(CallInfo* info, bool is_min, bool is_long) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700753 DCHECK_EQ(cu_->instruction_set, kThumb2);
Serban Constantinescu23abec92014-07-02 16:13:38 +0100754 if (is_long) {
755 return false;
756 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700757 RegLocation rl_src1 = info->args[0];
758 RegLocation rl_src2 = info->args[1];
759 rl_src1 = LoadValue(rl_src1, kCoreReg);
760 rl_src2 = LoadValue(rl_src2, kCoreReg);
761 RegLocation rl_dest = InlineTarget(info);
762 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800763 OpRegReg(kOpCmp, rl_src1.reg, rl_src2.reg);
Dave Allison3da67a52014-04-02 17:03:45 -0700764 LIR* it = OpIT((is_min) ? kCondGt : kCondLt, "E");
buzbee2700f7e2014-03-07 09:46:20 -0800765 OpRegReg(kOpMov, rl_result.reg, rl_src2.reg);
766 OpRegReg(kOpMov, rl_result.reg, rl_src1.reg);
Dave Allison3da67a52014-04-02 17:03:45 -0700767 OpEndIT(it);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700768 StoreValue(rl_dest, rl_result);
769 return true;
770}
771
Vladimir Markoe508a202013-11-04 15:24:22 +0000772bool ArmMir2Lir::GenInlinedPeek(CallInfo* info, OpSize size) {
773 RegLocation rl_src_address = info->args[0]; // long address
buzbee2700f7e2014-03-07 09:46:20 -0800774 rl_src_address = NarrowRegLoc(rl_src_address); // ignore high half in info->args[1]
Vladimir Markoe508a202013-11-04 15:24:22 +0000775 RegLocation rl_dest = InlineTarget(info);
776 RegLocation rl_address = LoadValue(rl_src_address, kCoreReg);
777 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700778 if (size == k64) {
Vladimir Markoe508a202013-11-04 15:24:22 +0000779 // Fake unaligned LDRD by two unaligned LDR instructions on ARMv7 with SCTLR.A set to 0.
buzbee2700f7e2014-03-07 09:46:20 -0800780 if (rl_address.reg.GetReg() != rl_result.reg.GetLowReg()) {
buzbee695d13a2014-04-19 13:32:20 -0700781 Load32Disp(rl_address.reg, 0, rl_result.reg.GetLow());
782 Load32Disp(rl_address.reg, 4, rl_result.reg.GetHigh());
Vladimir Markoe508a202013-11-04 15:24:22 +0000783 } else {
buzbee695d13a2014-04-19 13:32:20 -0700784 Load32Disp(rl_address.reg, 4, rl_result.reg.GetHigh());
785 Load32Disp(rl_address.reg, 0, rl_result.reg.GetLow());
Vladimir Markoe508a202013-11-04 15:24:22 +0000786 }
787 StoreValueWide(rl_dest, rl_result);
788 } else {
buzbee695d13a2014-04-19 13:32:20 -0700789 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32);
Vladimir Markoe508a202013-11-04 15:24:22 +0000790 // Unaligned load with LDR and LDRSH is allowed on ARMv7 with SCTLR.A set to 0.
Andreas Gampe3c12c512014-06-24 18:46:29 +0000791 LoadBaseDisp(rl_address.reg, 0, rl_result.reg, size, kNotVolatile);
Vladimir Markoe508a202013-11-04 15:24:22 +0000792 StoreValue(rl_dest, rl_result);
793 }
794 return true;
795}
796
797bool ArmMir2Lir::GenInlinedPoke(CallInfo* info, OpSize size) {
798 RegLocation rl_src_address = info->args[0]; // long address
buzbee2700f7e2014-03-07 09:46:20 -0800799 rl_src_address = NarrowRegLoc(rl_src_address); // ignore high half in info->args[1]
Vladimir Markoe508a202013-11-04 15:24:22 +0000800 RegLocation rl_src_value = info->args[2]; // [size] value
801 RegLocation rl_address = LoadValue(rl_src_address, kCoreReg);
buzbee695d13a2014-04-19 13:32:20 -0700802 if (size == k64) {
Vladimir Markoe508a202013-11-04 15:24:22 +0000803 // Fake unaligned STRD by two unaligned STR instructions on ARMv7 with SCTLR.A set to 0.
804 RegLocation rl_value = LoadValueWide(rl_src_value, kCoreReg);
Andreas Gampe3c12c512014-06-24 18:46:29 +0000805 StoreBaseDisp(rl_address.reg, 0, rl_value.reg.GetLow(), k32, kNotVolatile);
806 StoreBaseDisp(rl_address.reg, 4, rl_value.reg.GetHigh(), k32, kNotVolatile);
Vladimir Markoe508a202013-11-04 15:24:22 +0000807 } else {
buzbee695d13a2014-04-19 13:32:20 -0700808 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32);
Vladimir Markoe508a202013-11-04 15:24:22 +0000809 // Unaligned store with STR and STRSH is allowed on ARMv7 with SCTLR.A set to 0.
810 RegLocation rl_value = LoadValue(rl_src_value, kCoreReg);
Andreas Gampe3c12c512014-06-24 18:46:29 +0000811 StoreBaseDisp(rl_address.reg, 0, rl_value.reg, size, kNotVolatile);
Vladimir Markoe508a202013-11-04 15:24:22 +0000812 }
813 return true;
814}
815
Hans Boehm48f5c472014-06-27 14:50:10 -0700816// Generate a CAS with memory_order_seq_cst semantics.
Vladimir Marko1c282e22013-11-21 14:49:47 +0000817bool ArmMir2Lir::GenInlinedCas(CallInfo* info, bool is_long, bool is_object) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700818 DCHECK_EQ(cu_->instruction_set, kThumb2);
819 // Unused - RegLocation rl_src_unsafe = info->args[0];
Vladimir Marko1c282e22013-11-21 14:49:47 +0000820 RegLocation rl_src_obj = info->args[1]; // Object - known non-null
821 RegLocation rl_src_offset = info->args[2]; // long low
buzbee2700f7e2014-03-07 09:46:20 -0800822 rl_src_offset = NarrowRegLoc(rl_src_offset); // ignore high half in info->args[3]
Vladimir Marko1c282e22013-11-21 14:49:47 +0000823 RegLocation rl_src_expected = info->args[4]; // int, long or Object
Vladimir Marko3e5af822013-11-21 15:01:20 +0000824 // If is_long, high half is in info->args[5]
825 RegLocation rl_src_new_value = info->args[is_long ? 6 : 5]; // int, long or Object
826 // If is_long, high half is in info->args[7]
Brian Carlstrom7940e442013-07-12 13:46:57 -0700827 RegLocation rl_dest = InlineTarget(info); // boolean place for result
828
Vladimir Marko3e5af822013-11-21 15:01:20 +0000829 // We have only 5 temporary registers available and actually only 4 if the InlineTarget
830 // above locked one of the temps. For a straightforward CAS64 we need 7 registers:
831 // r_ptr (1), new_value (2), expected(2) and ldrexd result (2). If neither expected nor
832 // new_value is in a non-temp core register we shall reload them in the ldrex/strex loop
833 // into the same temps, reducing the number of required temps down to 5. We shall work
834 // around the potentially locked temp by using LR for r_ptr, unconditionally.
835 // TODO: Pass information about the need for more temps to the stack frame generation
836 // code so that we can rely on being able to allocate enough temps.
buzbee091cc402014-03-31 10:14:40 -0700837 DCHECK(!GetRegInfo(rs_rARM_LR)->IsTemp());
838 MarkTemp(rs_rARM_LR);
839 FreeTemp(rs_rARM_LR);
840 LockTemp(rs_rARM_LR);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000841 bool load_early = true;
842 if (is_long) {
buzbee091cc402014-03-31 10:14:40 -0700843 RegStorage expected_reg = rl_src_expected.reg.IsPair() ? rl_src_expected.reg.GetLow() :
844 rl_src_expected.reg;
845 RegStorage new_val_reg = rl_src_new_value.reg.IsPair() ? rl_src_new_value.reg.GetLow() :
846 rl_src_new_value.reg;
847 bool expected_is_core_reg = rl_src_expected.location == kLocPhysReg && !expected_reg.IsFloat();
848 bool new_value_is_core_reg = rl_src_new_value.location == kLocPhysReg && !new_val_reg.IsFloat();
buzbee2700f7e2014-03-07 09:46:20 -0800849 bool expected_is_good_reg = expected_is_core_reg && !IsTemp(expected_reg);
850 bool new_value_is_good_reg = new_value_is_core_reg && !IsTemp(new_val_reg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000851
852 if (!expected_is_good_reg && !new_value_is_good_reg) {
853 // None of expected/new_value is non-temp reg, need to load both late
854 load_early = false;
855 // Make sure they are not in the temp regs and the load will not be skipped.
856 if (expected_is_core_reg) {
buzbee2700f7e2014-03-07 09:46:20 -0800857 FlushRegWide(rl_src_expected.reg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000858 ClobberSReg(rl_src_expected.s_reg_low);
859 ClobberSReg(GetSRegHi(rl_src_expected.s_reg_low));
860 rl_src_expected.location = kLocDalvikFrame;
861 }
862 if (new_value_is_core_reg) {
buzbee2700f7e2014-03-07 09:46:20 -0800863 FlushRegWide(rl_src_new_value.reg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000864 ClobberSReg(rl_src_new_value.s_reg_low);
865 ClobberSReg(GetSRegHi(rl_src_new_value.s_reg_low));
866 rl_src_new_value.location = kLocDalvikFrame;
867 }
868 }
869 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700870
Hans Boehm48f5c472014-06-27 14:50:10 -0700871 // Prevent reordering with prior memory operations.
872 GenMemBarrier(kAnyStore);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700873
buzbeea0cd2d72014-06-01 09:33:49 -0700874 RegLocation rl_object = LoadValue(rl_src_obj, kRefReg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000875 RegLocation rl_new_value;
876 if (!is_long) {
buzbee7c02e912014-10-03 13:14:17 -0700877 rl_new_value = LoadValue(rl_src_new_value, LocToRegClass(rl_src_new_value));
Vladimir Marko3e5af822013-11-21 15:01:20 +0000878 } else if (load_early) {
879 rl_new_value = LoadValueWide(rl_src_new_value, kCoreReg);
880 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700881
Vladimir Marko1c282e22013-11-21 14:49:47 +0000882 if (is_object && !mir_graph_->IsConstantNullRef(rl_new_value)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700883 // Mark card for object assuming new value is stored.
Vladimir Marko743b98c2014-11-24 19:45:41 +0000884 MarkGCCard(0, rl_new_value.reg, rl_object.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700885 }
886
887 RegLocation rl_offset = LoadValue(rl_src_offset, kCoreReg);
888
buzbee2700f7e2014-03-07 09:46:20 -0800889 RegStorage r_ptr = rs_rARM_LR;
890 OpRegRegReg(kOpAdd, r_ptr, rl_object.reg, rl_offset.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700891
892 // Free now unneeded rl_object and rl_offset to give more temps.
893 ClobberSReg(rl_object.s_reg_low);
buzbee091cc402014-03-31 10:14:40 -0700894 FreeTemp(rl_object.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700895 ClobberSReg(rl_offset.s_reg_low);
buzbee091cc402014-03-31 10:14:40 -0700896 FreeTemp(rl_offset.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700897
Vladimir Marko3e5af822013-11-21 15:01:20 +0000898 RegLocation rl_expected;
899 if (!is_long) {
buzbee7c02e912014-10-03 13:14:17 -0700900 rl_expected = LoadValue(rl_src_expected, LocToRegClass(rl_src_new_value));
Vladimir Marko3e5af822013-11-21 15:01:20 +0000901 } else if (load_early) {
902 rl_expected = LoadValueWide(rl_src_expected, kCoreReg);
903 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000904 // NOTE: partially defined rl_expected & rl_new_value - but we just want the regs.
buzbee091cc402014-03-31 10:14:40 -0700905 RegStorage low_reg = AllocTemp();
906 RegStorage high_reg = AllocTemp();
907 rl_new_value.reg = RegStorage::MakeRegPair(low_reg, high_reg);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000908 rl_expected = rl_new_value;
Vladimir Marko3e5af822013-11-21 15:01:20 +0000909 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700910
Vladimir Marko3e5af822013-11-21 15:01:20 +0000911 // do {
912 // tmp = [r_ptr] - expected;
913 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
914 // result = tmp != 0;
915
buzbee2700f7e2014-03-07 09:46:20 -0800916 RegStorage r_tmp = AllocTemp();
Jeff Hao2de2aa12013-09-12 17:20:31 -0700917 LIR* target = NewLIR0(kPseudoTargetLabel);
Jeff Hao2de2aa12013-09-12 17:20:31 -0700918
Dave Allison3da67a52014-04-02 17:03:45 -0700919 LIR* it = nullptr;
Vladimir Marko3e5af822013-11-21 15:01:20 +0000920 if (is_long) {
buzbee2700f7e2014-03-07 09:46:20 -0800921 RegStorage r_tmp_high = AllocTemp();
Vladimir Marko3e5af822013-11-21 15:01:20 +0000922 if (!load_early) {
buzbee2700f7e2014-03-07 09:46:20 -0800923 LoadValueDirectWide(rl_src_expected, rl_expected.reg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000924 }
buzbee2700f7e2014-03-07 09:46:20 -0800925 NewLIR3(kThumb2Ldrexd, r_tmp.GetReg(), r_tmp_high.GetReg(), r_ptr.GetReg());
926 OpRegReg(kOpSub, r_tmp, rl_expected.reg.GetLow());
927 OpRegReg(kOpSub, r_tmp_high, rl_expected.reg.GetHigh());
Vladimir Marko3e5af822013-11-21 15:01:20 +0000928 if (!load_early) {
buzbee2700f7e2014-03-07 09:46:20 -0800929 LoadValueDirectWide(rl_src_new_value, rl_new_value.reg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000930 }
931 // Make sure we use ORR that sets the ccode
buzbee091cc402014-03-31 10:14:40 -0700932 if (r_tmp.Low8() && r_tmp_high.Low8()) {
buzbee2700f7e2014-03-07 09:46:20 -0800933 NewLIR2(kThumbOrr, r_tmp.GetReg(), r_tmp_high.GetReg());
Vladimir Marko3e5af822013-11-21 15:01:20 +0000934 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800935 NewLIR4(kThumb2OrrRRRs, r_tmp.GetReg(), r_tmp.GetReg(), r_tmp_high.GetReg(), 0);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000936 }
937 FreeTemp(r_tmp_high); // Now unneeded
938
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100939 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Dave Allison3da67a52014-04-02 17:03:45 -0700940 it = OpIT(kCondEq, "T");
buzbee2700f7e2014-03-07 09:46:20 -0800941 NewLIR4(kThumb2Strexd /* eq */, r_tmp.GetReg(), rl_new_value.reg.GetLowReg(), rl_new_value.reg.GetHighReg(), r_ptr.GetReg());
Vladimir Marko3e5af822013-11-21 15:01:20 +0000942
943 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800944 NewLIR3(kThumb2Ldrex, r_tmp.GetReg(), r_ptr.GetReg(), 0);
945 OpRegReg(kOpSub, r_tmp, rl_expected.reg);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100946 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Dave Allison3da67a52014-04-02 17:03:45 -0700947 it = OpIT(kCondEq, "T");
buzbee2700f7e2014-03-07 09:46:20 -0800948 NewLIR4(kThumb2Strex /* eq */, r_tmp.GetReg(), rl_new_value.reg.GetReg(), r_ptr.GetReg(), 0);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000949 }
950
951 // Still one conditional left from OpIT(kCondEq, "T") from either branch
952 OpRegImm(kOpCmp /* eq */, r_tmp, 1);
Dave Allison3da67a52014-04-02 17:03:45 -0700953 OpEndIT(it);
Dave Allison43a065c2014-04-01 15:14:46 -0700954
Jeff Hao2de2aa12013-09-12 17:20:31 -0700955 OpCondBranch(kCondEq, target);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700956
Vladimir Marko3e5af822013-11-21 15:01:20 +0000957 if (!load_early) {
buzbee2700f7e2014-03-07 09:46:20 -0800958 FreeTemp(rl_expected.reg); // Now unneeded.
Vladimir Marko3e5af822013-11-21 15:01:20 +0000959 }
960
Hans Boehm48f5c472014-06-27 14:50:10 -0700961 // Prevent reordering with subsequent memory operations.
962 GenMemBarrier(kLoadAny);
963
Vladimir Marko3e5af822013-11-21 15:01:20 +0000964 // result := (tmp1 != 0) ? 0 : 1;
965 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800966 OpRegRegImm(kOpRsub, rl_result.reg, r_tmp, 1);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100967 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Dave Allison3da67a52014-04-02 17:03:45 -0700968 it = OpIT(kCondUlt, "");
buzbee2700f7e2014-03-07 09:46:20 -0800969 LoadConstant(rl_result.reg, 0); /* cc */
Vladimir Marko3e5af822013-11-21 15:01:20 +0000970 FreeTemp(r_tmp); // Now unneeded.
Dave Allison3da67a52014-04-02 17:03:45 -0700971 OpEndIT(it); // Barrier to terminate OpIT.
Vladimir Marko3e5af822013-11-21 15:01:20 +0000972
Brian Carlstrom7940e442013-07-12 13:46:57 -0700973 StoreValue(rl_dest, rl_result);
974
Vladimir Marko3e5af822013-11-21 15:01:20 +0000975 // Now, restore lr to its non-temp status.
buzbee091cc402014-03-31 10:14:40 -0700976 Clobber(rs_rARM_LR);
977 UnmarkTemp(rs_rARM_LR);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700978 return true;
979}
980
Zheng Xu947717a2014-08-07 14:05:23 +0800981bool ArmMir2Lir::GenInlinedArrayCopyCharArray(CallInfo* info) {
982 constexpr int kLargeArrayThreshold = 256;
983
984 RegLocation rl_src = info->args[0];
985 RegLocation rl_src_pos = info->args[1];
986 RegLocation rl_dst = info->args[2];
987 RegLocation rl_dst_pos = info->args[3];
988 RegLocation rl_length = info->args[4];
989 // Compile time check, handle exception by non-inline method to reduce related meta-data.
990 if ((rl_src_pos.is_const && (mir_graph_->ConstantValue(rl_src_pos) < 0)) ||
991 (rl_dst_pos.is_const && (mir_graph_->ConstantValue(rl_dst_pos) < 0)) ||
992 (rl_length.is_const && (mir_graph_->ConstantValue(rl_length) < 0))) {
993 return false;
994 }
995
996 ClobberCallerSave();
997 LockCallTemps(); // Prepare for explicit register usage.
998 LockTemp(rs_r12);
999 RegStorage rs_src = rs_r0;
1000 RegStorage rs_dst = rs_r1;
1001 LoadValueDirectFixed(rl_src, rs_src);
1002 LoadValueDirectFixed(rl_dst, rs_dst);
1003
1004 // Handle null pointer exception in slow-path.
1005 LIR* src_check_branch = OpCmpImmBranch(kCondEq, rs_src, 0, nullptr);
1006 LIR* dst_check_branch = OpCmpImmBranch(kCondEq, rs_dst, 0, nullptr);
1007 // Handle potential overlapping in slow-path.
1008 LIR* src_dst_same = OpCmpBranch(kCondEq, rs_src, rs_dst, nullptr);
1009 // Handle exception or big length in slow-path.
1010 RegStorage rs_length = rs_r2;
1011 LoadValueDirectFixed(rl_length, rs_length);
1012 LIR* len_neg_or_too_big = OpCmpImmBranch(kCondHi, rs_length, kLargeArrayThreshold, nullptr);
1013 // Src bounds check.
1014 RegStorage rs_pos = rs_r3;
1015 RegStorage rs_arr_length = rs_r12;
1016 LoadValueDirectFixed(rl_src_pos, rs_pos);
1017 LIR* src_pos_negative = OpCmpImmBranch(kCondLt, rs_pos, 0, nullptr);
1018 Load32Disp(rs_src, mirror::Array::LengthOffset().Int32Value(), rs_arr_length);
1019 OpRegReg(kOpSub, rs_arr_length, rs_pos);
1020 LIR* src_bad_len = OpCmpBranch(kCondLt, rs_arr_length, rs_length, nullptr);
1021 // Dst bounds check.
1022 LoadValueDirectFixed(rl_dst_pos, rs_pos);
1023 LIR* dst_pos_negative = OpCmpImmBranch(kCondLt, rs_pos, 0, nullptr);
1024 Load32Disp(rs_dst, mirror::Array::LengthOffset().Int32Value(), rs_arr_length);
1025 OpRegReg(kOpSub, rs_arr_length, rs_pos);
1026 LIR* dst_bad_len = OpCmpBranch(kCondLt, rs_arr_length, rs_length, nullptr);
1027
1028 // Everything is checked now.
1029 OpRegImm(kOpAdd, rs_dst, mirror::Array::DataOffset(2).Int32Value());
1030 OpRegReg(kOpAdd, rs_dst, rs_pos);
1031 OpRegReg(kOpAdd, rs_dst, rs_pos);
1032 OpRegImm(kOpAdd, rs_src, mirror::Array::DataOffset(2).Int32Value());
1033 LoadValueDirectFixed(rl_src_pos, rs_pos);
1034 OpRegReg(kOpAdd, rs_src, rs_pos);
1035 OpRegReg(kOpAdd, rs_src, rs_pos);
1036
1037 RegStorage rs_tmp = rs_pos;
1038 OpRegRegImm(kOpLsl, rs_length, rs_length, 1);
1039
1040 // Copy one element.
1041 OpRegRegImm(kOpAnd, rs_tmp, rs_length, 2);
1042 LIR* jmp_to_begin_loop = OpCmpImmBranch(kCondEq, rs_tmp, 0, nullptr);
1043 OpRegImm(kOpSub, rs_length, 2);
1044 LoadBaseIndexed(rs_src, rs_length, rs_tmp, 0, kSignedHalf);
1045 StoreBaseIndexed(rs_dst, rs_length, rs_tmp, 0, kSignedHalf);
1046
1047 // Copy two elements.
1048 LIR *begin_loop = NewLIR0(kPseudoTargetLabel);
1049 LIR* jmp_to_ret = OpCmpImmBranch(kCondEq, rs_length, 0, nullptr);
1050 OpRegImm(kOpSub, rs_length, 4);
1051 LoadBaseIndexed(rs_src, rs_length, rs_tmp, 0, k32);
1052 StoreBaseIndexed(rs_dst, rs_length, rs_tmp, 0, k32);
1053 OpUnconditionalBranch(begin_loop);
1054
1055 LIR *check_failed = NewLIR0(kPseudoTargetLabel);
1056 LIR* launchpad_branch = OpUnconditionalBranch(nullptr);
1057 LIR* return_point = NewLIR0(kPseudoTargetLabel);
1058
1059 src_check_branch->target = check_failed;
1060 dst_check_branch->target = check_failed;
1061 src_dst_same->target = check_failed;
1062 len_neg_or_too_big->target = check_failed;
1063 src_pos_negative->target = check_failed;
1064 src_bad_len->target = check_failed;
1065 dst_pos_negative->target = check_failed;
1066 dst_bad_len->target = check_failed;
1067 jmp_to_begin_loop->target = begin_loop;
1068 jmp_to_ret->target = return_point;
1069
1070 AddIntrinsicSlowPath(info, launchpad_branch, return_point);
Serguei Katkov9863daf2014-09-04 15:21:32 +07001071 ClobberCallerSave(); // We must clobber everything because slow path will return here
Zheng Xu947717a2014-08-07 14:05:23 +08001072
1073 return true;
1074}
1075
buzbee2700f7e2014-03-07 09:46:20 -08001076LIR* ArmMir2Lir::OpPcRelLoad(RegStorage reg, LIR* target) {
1077 return RawLIR(current_dalvik_offset_, kThumb2LdrPcRel12, reg.GetReg(), 0, 0, 0, 0, target);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001078}
1079
buzbee2700f7e2014-03-07 09:46:20 -08001080LIR* ArmMir2Lir::OpVldm(RegStorage r_base, int count) {
buzbee091cc402014-03-31 10:14:40 -07001081 return NewLIR3(kThumb2Vldms, r_base.GetReg(), rs_fr0.GetReg(), count);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001082}
1083
buzbee2700f7e2014-03-07 09:46:20 -08001084LIR* ArmMir2Lir::OpVstm(RegStorage r_base, int count) {
buzbee091cc402014-03-31 10:14:40 -07001085 return NewLIR3(kThumb2Vstms, r_base.GetReg(), rs_fr0.GetReg(), count);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001086}
1087
Ningsheng Jiana262f772014-11-25 16:48:07 +08001088void ArmMir2Lir::GenMaddMsubInt(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2,
1089 RegLocation rl_src3, bool is_sub) {
1090 rl_src1 = LoadValue(rl_src1, kCoreReg);
1091 rl_src2 = LoadValue(rl_src2, kCoreReg);
1092 rl_src3 = LoadValue(rl_src3, kCoreReg);
1093 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1094 NewLIR4(is_sub ? kThumb2Mls : kThumb2Mla, rl_result.reg.GetReg(), rl_src1.reg.GetReg(),
1095 rl_src2.reg.GetReg(), rl_src3.reg.GetReg());
1096 StoreValue(rl_dest, rl_result);
1097}
1098
Brian Carlstrom7940e442013-07-12 13:46:57 -07001099void ArmMir2Lir::GenMultiplyByTwoBitMultiplier(RegLocation rl_src,
1100 RegLocation rl_result, int lit,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001101 int first_bit, int second_bit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001102 UNUSED(lit);
Ian Rogerse2143c02014-03-28 08:47:16 -07001103 OpRegRegRegShift(kOpAdd, rl_result.reg, rl_src.reg, rl_src.reg,
Brian Carlstrom7940e442013-07-12 13:46:57 -07001104 EncodeShift(kArmLsl, second_bit - first_bit));
1105 if (first_bit != 0) {
buzbee2700f7e2014-03-07 09:46:20 -08001106 OpRegRegImm(kOpLsl, rl_result.reg, rl_result.reg, first_bit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001107 }
1108}
1109
Mingyao Yange643a172014-04-08 11:02:52 -07001110void ArmMir2Lir::GenDivZeroCheckWide(RegStorage reg) {
buzbee2700f7e2014-03-07 09:46:20 -08001111 DCHECK(reg.IsPair()); // TODO: support k64BitSolo.
1112 RegStorage t_reg = AllocTemp();
1113 NewLIR4(kThumb2OrrRRRs, t_reg.GetReg(), reg.GetLowReg(), reg.GetHighReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001114 FreeTemp(t_reg);
Mingyao Yange643a172014-04-08 11:02:52 -07001115 GenDivZeroCheck(kCondEq);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001116}
1117
1118// Test suspend flag, return target of taken suspend branch
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001119LIR* ArmMir2Lir::OpTestSuspend(LIR* target) {
Wei Jin04f4d8a2014-05-29 18:04:29 -07001120#ifdef ARM_R4_SUSPEND_FLAG
buzbee091cc402014-03-31 10:14:40 -07001121 NewLIR2(kThumbSubRI8, rs_rARM_SUSPEND.GetReg(), 1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001122 return OpCondBranch((target == NULL) ? kCondEq : kCondNe, target);
Wei Jin04f4d8a2014-05-29 18:04:29 -07001123#else
1124 RegStorage t_reg = AllocTemp();
1125 LoadBaseDisp(rs_rARM_SELF, Thread::ThreadFlagsOffset<4>().Int32Value(),
Ian Rogers8ba17f62014-10-27 18:48:49 -07001126 t_reg, kUnsignedHalf, kNotVolatile);
Wei Jin04f4d8a2014-05-29 18:04:29 -07001127 LIR* cmp_branch = OpCmpImmBranch((target == NULL) ? kCondNe : kCondEq, t_reg,
1128 0, target);
1129 FreeTemp(t_reg);
1130 return cmp_branch;
1131#endif
Brian Carlstrom7940e442013-07-12 13:46:57 -07001132}
1133
1134// Decrement register and branch on condition
buzbee2700f7e2014-03-07 09:46:20 -08001135LIR* ArmMir2Lir::OpDecAndBranch(ConditionCode c_code, RegStorage reg, LIR* target) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001136 // Combine sub & test using sub setflags encoding here
Vladimir Markodbb8c492014-02-28 17:36:39 +00001137 OpRegRegImm(kOpSub, reg, reg, 1); // For value == 1, this should set flags.
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001138 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001139 return OpCondBranch(c_code, target);
1140}
1141
Andreas Gampeb14329f2014-05-15 11:16:06 -07001142bool ArmMir2Lir::GenMemBarrier(MemBarrierKind barrier_kind) {
Elliott Hughes8366ca02014-11-17 12:02:05 -08001143 if (!cu_->GetInstructionSetFeatures()->IsSmp()) {
1144 return false;
1145 }
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001146 // Start off with using the last LIR as the barrier. If it is not enough, then we will generate one.
1147 LIR* barrier = last_lir_insn_;
1148
Brian Carlstrom7940e442013-07-12 13:46:57 -07001149 int dmb_flavor;
1150 // TODO: revisit Arm barrier kinds
1151 switch (barrier_kind) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001152 case kAnyStore: dmb_flavor = kISH; break;
1153 case kLoadAny: dmb_flavor = kISH; break;
Ian Rogersb122a4b2013-11-19 18:00:50 -08001154 case kStoreStore: dmb_flavor = kISHST; break;
Hans Boehm48f5c472014-06-27 14:50:10 -07001155 case kAnyAny: dmb_flavor = kISH; break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001156 default:
1157 LOG(FATAL) << "Unexpected MemBarrierKind: " << barrier_kind;
1158 dmb_flavor = kSY; // quiet gcc.
1159 break;
1160 }
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001161
Andreas Gampeb14329f2014-05-15 11:16:06 -07001162 bool ret = false;
1163
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001164 // If the same barrier already exists, don't generate another.
1165 if (barrier == nullptr
1166 || (barrier != nullptr && (barrier->opcode != kThumb2Dmb || barrier->operands[0] != dmb_flavor))) {
1167 barrier = NewLIR1(kThumb2Dmb, dmb_flavor);
Andreas Gampeb14329f2014-05-15 11:16:06 -07001168 ret = true;
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001169 }
1170
1171 // At this point we must have a memory barrier. Mark it as a scheduling barrier as well.
1172 DCHECK(!barrier->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001173 barrier->u.m.def_mask = &kEncodeAll;
Andreas Gampeb14329f2014-05-15 11:16:06 -07001174 return ret;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001175}
1176
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001177void ArmMir2Lir::GenNegLong(RegLocation rl_dest, RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001178 rl_src = LoadValueWide(rl_src, kCoreReg);
1179 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001180 RegStorage z_reg = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001181 LoadConstantNoClobber(z_reg, 0);
1182 // Check for destructive overlap
buzbee2700f7e2014-03-07 09:46:20 -08001183 if (rl_result.reg.GetLowReg() == rl_src.reg.GetHighReg()) {
1184 RegStorage t_reg = AllocTemp();
Vladimir Marko2f340a82014-12-01 16:48:48 +00001185 OpRegCopy(t_reg, rl_result.reg.GetLow());
buzbee2700f7e2014-03-07 09:46:20 -08001186 OpRegRegReg(kOpSub, rl_result.reg.GetLow(), z_reg, rl_src.reg.GetLow());
1187 OpRegRegReg(kOpSbc, rl_result.reg.GetHigh(), z_reg, t_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001188 FreeTemp(t_reg);
1189 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001190 OpRegRegReg(kOpSub, rl_result.reg.GetLow(), z_reg, rl_src.reg.GetLow());
1191 OpRegRegReg(kOpSbc, rl_result.reg.GetHigh(), z_reg, rl_src.reg.GetHigh());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001192 }
1193 FreeTemp(z_reg);
1194 StoreValueWide(rl_dest, rl_result);
1195}
1196
Mark Mendelle02d48f2014-01-15 11:19:23 -08001197void ArmMir2Lir::GenMulLong(Instruction::Code opcode, RegLocation rl_dest,
1198 RegLocation rl_src1, RegLocation rl_src2) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001199 UNUSED(opcode);
1200 /*
1201 * tmp1 = src1.hi * src2.lo; // src1.hi is no longer needed
1202 * dest = src1.lo * src2.lo;
1203 * tmp1 += src1.lo * src2.hi;
1204 * dest.hi += tmp1;
1205 *
1206 * To pull off inline multiply, we have a worst-case requirement of 7 temporary
1207 * registers. Normally for Arm, we get 5. We can get to 6 by including
1208 * lr in the temp set. The only problematic case is all operands and result are
1209 * distinct, and none have been promoted. In that case, we can succeed by aggressively
1210 * freeing operand temp registers after they are no longer needed. All other cases
1211 * can proceed normally. We'll just punt on the case of the result having a misaligned
1212 * overlap with either operand and send that case to a runtime handler.
1213 */
1214 RegLocation rl_result;
1215 if (PartiallyIntersects(rl_src1, rl_dest) || (PartiallyIntersects(rl_src2, rl_dest))) {
1216 FlushAllRegs();
1217 CallRuntimeHelperRegLocationRegLocation(kQuickLmul, rl_src1, rl_src2, false);
1218 rl_result = GetReturnWide(kCoreReg);
Zheng Xud7f8e022014-03-13 13:40:30 +00001219 StoreValueWide(rl_dest, rl_result);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001220 return;
1221 }
1222
1223 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
1224 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
1225
1226 int reg_status = 0;
1227 RegStorage res_lo;
1228 RegStorage res_hi;
1229 bool dest_promoted = rl_dest.location == kLocPhysReg && rl_dest.reg.Valid() &&
1230 !IsTemp(rl_dest.reg.GetLow()) && !IsTemp(rl_dest.reg.GetHigh());
1231 bool src1_promoted = !IsTemp(rl_src1.reg.GetLow()) && !IsTemp(rl_src1.reg.GetHigh());
1232 bool src2_promoted = !IsTemp(rl_src2.reg.GetLow()) && !IsTemp(rl_src2.reg.GetHigh());
1233 // Check if rl_dest is *not* either operand and we have enough temp registers.
1234 if ((rl_dest.s_reg_low != rl_src1.s_reg_low && rl_dest.s_reg_low != rl_src2.s_reg_low) &&
1235 (dest_promoted || src1_promoted || src2_promoted)) {
1236 // In this case, we do not need to manually allocate temp registers for result.
1237 rl_result = EvalLoc(rl_dest, kCoreReg, true);
1238 res_lo = rl_result.reg.GetLow();
1239 res_hi = rl_result.reg.GetHigh();
1240 } else {
1241 res_lo = AllocTemp();
1242 if ((rl_src1.s_reg_low == rl_src2.s_reg_low) || src1_promoted || src2_promoted) {
1243 // In this case, we have enough temp registers to be allocated for result.
1244 res_hi = AllocTemp();
1245 reg_status = 1;
1246 } else {
1247 // In this case, all temps are now allocated.
1248 // res_hi will be allocated after we can free src1_hi.
1249 reg_status = 2;
1250 }
1251 }
1252
1253 // Temporarily add LR to the temp pool, and assign it to tmp1
1254 MarkTemp(rs_rARM_LR);
1255 FreeTemp(rs_rARM_LR);
1256 RegStorage tmp1 = rs_rARM_LR;
1257 LockTemp(rs_rARM_LR);
1258
1259 if (rl_src1.reg == rl_src2.reg) {
1260 DCHECK(res_hi.Valid());
1261 DCHECK(res_lo.Valid());
1262 NewLIR3(kThumb2MulRRR, tmp1.GetReg(), rl_src1.reg.GetLowReg(), rl_src1.reg.GetHighReg());
1263 NewLIR4(kThumb2Umull, res_lo.GetReg(), res_hi.GetReg(), rl_src1.reg.GetLowReg(),
1264 rl_src1.reg.GetLowReg());
1265 OpRegRegRegShift(kOpAdd, res_hi, res_hi, tmp1, EncodeShift(kArmLsl, 1));
1266 } else {
1267 NewLIR3(kThumb2MulRRR, tmp1.GetReg(), rl_src2.reg.GetLowReg(), rl_src1.reg.GetHighReg());
1268 if (reg_status == 2) {
1269 DCHECK(!res_hi.Valid());
1270 DCHECK_NE(rl_src1.reg.GetLowReg(), rl_src2.reg.GetLowReg());
1271 DCHECK_NE(rl_src1.reg.GetHighReg(), rl_src2.reg.GetHighReg());
1272 // Will force free src1_hi, so must clobber.
1273 Clobber(rl_src1.reg);
1274 FreeTemp(rl_src1.reg.GetHigh());
1275 res_hi = AllocTemp();
1276 }
1277 DCHECK(res_hi.Valid());
1278 DCHECK(res_lo.Valid());
1279 NewLIR4(kThumb2Umull, res_lo.GetReg(), res_hi.GetReg(), rl_src2.reg.GetLowReg(),
1280 rl_src1.reg.GetLowReg());
1281 NewLIR4(kThumb2Mla, tmp1.GetReg(), rl_src1.reg.GetLowReg(), rl_src2.reg.GetHighReg(),
1282 tmp1.GetReg());
1283 NewLIR4(kThumb2AddRRR, res_hi.GetReg(), tmp1.GetReg(), res_hi.GetReg(), 0);
1284 if (reg_status == 2) {
1285 FreeTemp(rl_src1.reg.GetLow());
1286 }
1287 }
1288
1289 // Now, restore lr to its non-temp status.
1290 FreeTemp(tmp1);
1291 Clobber(rs_rARM_LR);
1292 UnmarkTemp(rs_rARM_LR);
1293
1294 if (reg_status != 0) {
1295 // We had manually allocated registers for rl_result.
1296 // Now construct a RegLocation.
1297 rl_result = GetReturnWide(kCoreReg); // Just using as a template.
1298 rl_result.reg = RegStorage::MakeRegPair(res_lo, res_hi);
1299 }
1300
1301 StoreValueWide(rl_dest, rl_result);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001302}
1303
Andreas Gampec76c6142014-08-04 16:30:03 -07001304void ArmMir2Lir::GenArithOpLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001305 RegLocation rl_src2, int flags) {
Andreas Gampec76c6142014-08-04 16:30:03 -07001306 switch (opcode) {
1307 case Instruction::MUL_LONG:
1308 case Instruction::MUL_LONG_2ADDR:
1309 GenMulLong(opcode, rl_dest, rl_src1, rl_src2);
1310 return;
1311 case Instruction::NEG_LONG:
1312 GenNegLong(rl_dest, rl_src2);
1313 return;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001314
Andreas Gampec76c6142014-08-04 16:30:03 -07001315 default:
1316 break;
1317 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001318
Andreas Gampec76c6142014-08-04 16:30:03 -07001319 // Fallback for all other ops.
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001320 Mir2Lir::GenArithOpLong(opcode, rl_dest, rl_src1, rl_src2, flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001321}
1322
1323/*
1324 * Generate array load
1325 */
1326void ArmMir2Lir::GenArrayGet(int opt_flags, OpSize size, RegLocation rl_array,
Ian Rogersa9a82542013-10-04 11:17:26 -07001327 RegLocation rl_index, RegLocation rl_dest, int scale) {
buzbee091cc402014-03-31 10:14:40 -07001328 RegisterClass reg_class = RegClassBySize(size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001329 int len_offset = mirror::Array::LengthOffset().Int32Value();
1330 int data_offset;
1331 RegLocation rl_result;
1332 bool constant_index = rl_index.is_const;
buzbeea0cd2d72014-06-01 09:33:49 -07001333 rl_array = LoadValue(rl_array, kRefReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001334 if (!constant_index) {
1335 rl_index = LoadValue(rl_index, kCoreReg);
1336 }
1337
1338 if (rl_dest.wide) {
1339 data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Int32Value();
1340 } else {
1341 data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Int32Value();
1342 }
1343
1344 // If index is constant, just fold it into the data offset
1345 if (constant_index) {
1346 data_offset += mir_graph_->ConstantValue(rl_index) << scale;
1347 }
1348
1349 /* null object? */
buzbee2700f7e2014-03-07 09:46:20 -08001350 GenNullCheck(rl_array.reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001351
1352 bool needs_range_check = (!(opt_flags & MIR_IGNORE_RANGE_CHECK));
buzbee2700f7e2014-03-07 09:46:20 -08001353 RegStorage reg_len;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001354 if (needs_range_check) {
1355 reg_len = AllocTemp();
1356 /* Get len */
buzbee695d13a2014-04-19 13:32:20 -07001357 Load32Disp(rl_array.reg, len_offset, reg_len);
Dave Allisonb373e092014-02-20 16:06:36 -08001358 MarkPossibleNullPointerException(opt_flags);
1359 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001360 ForceImplicitNullCheck(rl_array.reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001361 }
1362 if (rl_dest.wide || rl_dest.fp || constant_index) {
buzbee2700f7e2014-03-07 09:46:20 -08001363 RegStorage reg_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001364 if (constant_index) {
buzbee2700f7e2014-03-07 09:46:20 -08001365 reg_ptr = rl_array.reg; // NOTE: must not alter reg_ptr in constant case.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001366 } else {
1367 // No special indexed operation, lea + load w/ displacement
buzbeea0cd2d72014-06-01 09:33:49 -07001368 reg_ptr = AllocTempRef();
Ian Rogerse2143c02014-03-28 08:47:16 -07001369 OpRegRegRegShift(kOpAdd, reg_ptr, rl_array.reg, rl_index.reg, EncodeShift(kArmLsl, scale));
buzbee091cc402014-03-31 10:14:40 -07001370 FreeTemp(rl_index.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001371 }
1372 rl_result = EvalLoc(rl_dest, reg_class, true);
1373
1374 if (needs_range_check) {
1375 if (constant_index) {
Mingyao Yang80365d92014-04-18 12:10:58 -07001376 GenArrayBoundsCheck(mir_graph_->ConstantValue(rl_index), reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001377 } else {
Mingyao Yang80365d92014-04-18 12:10:58 -07001378 GenArrayBoundsCheck(rl_index.reg, reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001379 }
1380 FreeTemp(reg_len);
1381 }
Andreas Gampe3c12c512014-06-24 18:46:29 +00001382 LoadBaseDisp(reg_ptr, data_offset, rl_result.reg, size, kNotVolatile);
Vladimir Marko455759b2014-05-06 20:49:36 +01001383 if (!constant_index) {
1384 FreeTemp(reg_ptr);
1385 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001386 if (rl_dest.wide) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001387 StoreValueWide(rl_dest, rl_result);
1388 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001389 StoreValue(rl_dest, rl_result);
1390 }
1391 } else {
1392 // Offset base, then use indexed load
buzbeea0cd2d72014-06-01 09:33:49 -07001393 RegStorage reg_ptr = AllocTempRef();
buzbee2700f7e2014-03-07 09:46:20 -08001394 OpRegRegImm(kOpAdd, reg_ptr, rl_array.reg, data_offset);
buzbee091cc402014-03-31 10:14:40 -07001395 FreeTemp(rl_array.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001396 rl_result = EvalLoc(rl_dest, reg_class, true);
1397
1398 if (needs_range_check) {
Mingyao Yang80365d92014-04-18 12:10:58 -07001399 GenArrayBoundsCheck(rl_index.reg, reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001400 FreeTemp(reg_len);
1401 }
buzbee2700f7e2014-03-07 09:46:20 -08001402 LoadBaseIndexed(reg_ptr, rl_index.reg, rl_result.reg, scale, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001403 FreeTemp(reg_ptr);
1404 StoreValue(rl_dest, rl_result);
1405 }
1406}
1407
1408/*
1409 * Generate array store
1410 *
1411 */
1412void ArmMir2Lir::GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array,
Ian Rogersa9a82542013-10-04 11:17:26 -07001413 RegLocation rl_index, RegLocation rl_src, int scale, bool card_mark) {
buzbee091cc402014-03-31 10:14:40 -07001414 RegisterClass reg_class = RegClassBySize(size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001415 int len_offset = mirror::Array::LengthOffset().Int32Value();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001416 bool constant_index = rl_index.is_const;
1417
Ian Rogersa9a82542013-10-04 11:17:26 -07001418 int data_offset;
buzbee695d13a2014-04-19 13:32:20 -07001419 if (size == k64 || size == kDouble) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001420 data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Int32Value();
1421 } else {
1422 data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Int32Value();
1423 }
1424
1425 // If index is constant, just fold it into the data offset.
1426 if (constant_index) {
1427 data_offset += mir_graph_->ConstantValue(rl_index) << scale;
1428 }
1429
buzbeea0cd2d72014-06-01 09:33:49 -07001430 rl_array = LoadValue(rl_array, kRefReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001431 if (!constant_index) {
1432 rl_index = LoadValue(rl_index, kCoreReg);
1433 }
1434
buzbee2700f7e2014-03-07 09:46:20 -08001435 RegStorage reg_ptr;
Ian Rogers773aab12013-10-14 13:50:10 -07001436 bool allocated_reg_ptr_temp = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001437 if (constant_index) {
buzbee2700f7e2014-03-07 09:46:20 -08001438 reg_ptr = rl_array.reg;
buzbee091cc402014-03-31 10:14:40 -07001439 } else if (IsTemp(rl_array.reg) && !card_mark) {
1440 Clobber(rl_array.reg);
buzbee2700f7e2014-03-07 09:46:20 -08001441 reg_ptr = rl_array.reg;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001442 } else {
Ian Rogers773aab12013-10-14 13:50:10 -07001443 allocated_reg_ptr_temp = true;
buzbeea0cd2d72014-06-01 09:33:49 -07001444 reg_ptr = AllocTempRef();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001445 }
1446
1447 /* null object? */
buzbee2700f7e2014-03-07 09:46:20 -08001448 GenNullCheck(rl_array.reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001449
1450 bool needs_range_check = (!(opt_flags & MIR_IGNORE_RANGE_CHECK));
buzbee2700f7e2014-03-07 09:46:20 -08001451 RegStorage reg_len;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001452 if (needs_range_check) {
1453 reg_len = AllocTemp();
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001454 // NOTE: max live temps(4) here.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001455 /* Get len */
buzbee695d13a2014-04-19 13:32:20 -07001456 Load32Disp(rl_array.reg, len_offset, reg_len);
Dave Allisonb373e092014-02-20 16:06:36 -08001457 MarkPossibleNullPointerException(opt_flags);
1458 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001459 ForceImplicitNullCheck(rl_array.reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001460 }
1461 /* at this point, reg_ptr points to array, 2 live temps */
1462 if (rl_src.wide || rl_src.fp || constant_index) {
1463 if (rl_src.wide) {
1464 rl_src = LoadValueWide(rl_src, reg_class);
1465 } else {
1466 rl_src = LoadValue(rl_src, reg_class);
1467 }
1468 if (!constant_index) {
Ian Rogerse2143c02014-03-28 08:47:16 -07001469 OpRegRegRegShift(kOpAdd, reg_ptr, rl_array.reg, rl_index.reg, EncodeShift(kArmLsl, scale));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001470 }
1471 if (needs_range_check) {
1472 if (constant_index) {
Mingyao Yang80365d92014-04-18 12:10:58 -07001473 GenArrayBoundsCheck(mir_graph_->ConstantValue(rl_index), reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001474 } else {
Mingyao Yang80365d92014-04-18 12:10:58 -07001475 GenArrayBoundsCheck(rl_index.reg, reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001476 }
1477 FreeTemp(reg_len);
1478 }
1479
Andreas Gampe3c12c512014-06-24 18:46:29 +00001480 StoreBaseDisp(reg_ptr, data_offset, rl_src.reg, size, kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001481 } else {
1482 /* reg_ptr -> array data */
buzbee2700f7e2014-03-07 09:46:20 -08001483 OpRegRegImm(kOpAdd, reg_ptr, rl_array.reg, data_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001484 rl_src = LoadValue(rl_src, reg_class);
1485 if (needs_range_check) {
Mingyao Yang80365d92014-04-18 12:10:58 -07001486 GenArrayBoundsCheck(rl_index.reg, reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001487 FreeTemp(reg_len);
1488 }
buzbee2700f7e2014-03-07 09:46:20 -08001489 StoreBaseIndexed(reg_ptr, rl_index.reg, rl_src.reg, scale, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001490 }
Ian Rogers773aab12013-10-14 13:50:10 -07001491 if (allocated_reg_ptr_temp) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001492 FreeTemp(reg_ptr);
1493 }
Ian Rogersa9a82542013-10-04 11:17:26 -07001494 if (card_mark) {
Vladimir Marko743b98c2014-11-24 19:45:41 +00001495 MarkGCCard(opt_flags, rl_src.reg, rl_array.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001496 }
1497}
1498
Ian Rogersa9a82542013-10-04 11:17:26 -07001499
Brian Carlstrom7940e442013-07-12 13:46:57 -07001500void ArmMir2Lir::GenShiftImmOpLong(Instruction::Code opcode,
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001501 RegLocation rl_dest, RegLocation rl_src, RegLocation rl_shift,
1502 int flags) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001503 UNUSED(flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001504 rl_src = LoadValueWide(rl_src, kCoreReg);
1505 // Per spec, we only care about low 6 bits of shift amount.
1506 int shift_amount = mir_graph_->ConstantValue(rl_shift) & 0x3f;
1507 if (shift_amount == 0) {
1508 StoreValueWide(rl_dest, rl_src);
1509 return;
1510 }
Alexei Zavjalovd8c3e362014-10-08 15:51:59 +07001511 if (PartiallyIntersects(rl_src, rl_dest)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001512 GenShiftOpLong(opcode, rl_dest, rl_src, rl_shift);
1513 return;
1514 }
1515 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
Brian Carlstromdf629502013-07-17 22:39:56 -07001516 switch (opcode) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001517 case Instruction::SHL_LONG:
1518 case Instruction::SHL_LONG_2ADDR:
1519 if (shift_amount == 1) {
buzbee2700f7e2014-03-07 09:46:20 -08001520 OpRegRegReg(kOpAdd, rl_result.reg.GetLow(), rl_src.reg.GetLow(), rl_src.reg.GetLow());
1521 OpRegRegReg(kOpAdc, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), rl_src.reg.GetHigh());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001522 } else if (shift_amount == 32) {
buzbee2700f7e2014-03-07 09:46:20 -08001523 OpRegCopy(rl_result.reg.GetHigh(), rl_src.reg);
1524 LoadConstant(rl_result.reg.GetLow(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001525 } else if (shift_amount > 31) {
buzbee2700f7e2014-03-07 09:46:20 -08001526 OpRegRegImm(kOpLsl, rl_result.reg.GetHigh(), rl_src.reg.GetLow(), shift_amount - 32);
1527 LoadConstant(rl_result.reg.GetLow(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001528 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001529 OpRegRegImm(kOpLsl, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), shift_amount);
Ian Rogerse2143c02014-03-28 08:47:16 -07001530 OpRegRegRegShift(kOpOr, rl_result.reg.GetHigh(), rl_result.reg.GetHigh(), rl_src.reg.GetLow(),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001531 EncodeShift(kArmLsr, 32 - shift_amount));
buzbee2700f7e2014-03-07 09:46:20 -08001532 OpRegRegImm(kOpLsl, rl_result.reg.GetLow(), rl_src.reg.GetLow(), shift_amount);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001533 }
1534 break;
1535 case Instruction::SHR_LONG:
1536 case Instruction::SHR_LONG_2ADDR:
1537 if (shift_amount == 32) {
buzbee2700f7e2014-03-07 09:46:20 -08001538 OpRegCopy(rl_result.reg.GetLow(), rl_src.reg.GetHigh());
1539 OpRegRegImm(kOpAsr, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001540 } else if (shift_amount > 31) {
buzbee2700f7e2014-03-07 09:46:20 -08001541 OpRegRegImm(kOpAsr, rl_result.reg.GetLow(), rl_src.reg.GetHigh(), shift_amount - 32);
1542 OpRegRegImm(kOpAsr, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001543 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001544 RegStorage t_reg = AllocTemp();
1545 OpRegRegImm(kOpLsr, t_reg, rl_src.reg.GetLow(), shift_amount);
Ian Rogerse2143c02014-03-28 08:47:16 -07001546 OpRegRegRegShift(kOpOr, rl_result.reg.GetLow(), t_reg, rl_src.reg.GetHigh(),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001547 EncodeShift(kArmLsl, 32 - shift_amount));
1548 FreeTemp(t_reg);
buzbee2700f7e2014-03-07 09:46:20 -08001549 OpRegRegImm(kOpAsr, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), shift_amount);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001550 }
1551 break;
1552 case Instruction::USHR_LONG:
1553 case Instruction::USHR_LONG_2ADDR:
1554 if (shift_amount == 32) {
buzbee2700f7e2014-03-07 09:46:20 -08001555 OpRegCopy(rl_result.reg.GetLow(), rl_src.reg.GetHigh());
1556 LoadConstant(rl_result.reg.GetHigh(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001557 } else if (shift_amount > 31) {
buzbee2700f7e2014-03-07 09:46:20 -08001558 OpRegRegImm(kOpLsr, rl_result.reg.GetLow(), rl_src.reg.GetHigh(), shift_amount - 32);
1559 LoadConstant(rl_result.reg.GetHigh(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001560 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001561 RegStorage t_reg = AllocTemp();
1562 OpRegRegImm(kOpLsr, t_reg, rl_src.reg.GetLow(), shift_amount);
Ian Rogerse2143c02014-03-28 08:47:16 -07001563 OpRegRegRegShift(kOpOr, rl_result.reg.GetLow(), t_reg, rl_src.reg.GetHigh(),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001564 EncodeShift(kArmLsl, 32 - shift_amount));
1565 FreeTemp(t_reg);
buzbee2700f7e2014-03-07 09:46:20 -08001566 OpRegRegImm(kOpLsr, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), shift_amount);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001567 }
1568 break;
1569 default:
1570 LOG(FATAL) << "Unexpected case";
1571 }
1572 StoreValueWide(rl_dest, rl_result);
1573}
1574
1575void ArmMir2Lir::GenArithImmOpLong(Instruction::Code opcode,
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001576 RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2,
1577 int flags) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001578 if ((opcode == Instruction::SUB_LONG_2ADDR) || (opcode == Instruction::SUB_LONG)) {
1579 if (!rl_src2.is_const) {
1580 // Don't bother with special handling for subtract from immediate.
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001581 GenArithOpLong(opcode, rl_dest, rl_src1, rl_src2, flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001582 return;
1583 }
1584 } else {
1585 // Normalize
1586 if (!rl_src2.is_const) {
1587 DCHECK(rl_src1.is_const);
Vladimir Marko58af1f92013-12-19 13:31:15 +00001588 std::swap(rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001589 }
1590 }
Alexei Zavjalovd8c3e362014-10-08 15:51:59 +07001591 if (PartiallyIntersects(rl_src1, rl_dest)) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001592 GenArithOpLong(opcode, rl_dest, rl_src1, rl_src2, flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001593 return;
1594 }
1595 DCHECK(rl_src2.is_const);
1596 int64_t val = mir_graph_->ConstantValueWide(rl_src2);
1597 uint32_t val_lo = Low32Bits(val);
1598 uint32_t val_hi = High32Bits(val);
1599 int32_t mod_imm_lo = ModifiedImmediate(val_lo);
1600 int32_t mod_imm_hi = ModifiedImmediate(val_hi);
1601
1602 // Only a subset of add/sub immediate instructions set carry - so bail if we don't fit
Brian Carlstromdf629502013-07-17 22:39:56 -07001603 switch (opcode) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001604 case Instruction::ADD_LONG:
1605 case Instruction::ADD_LONG_2ADDR:
1606 case Instruction::SUB_LONG:
1607 case Instruction::SUB_LONG_2ADDR:
1608 if ((mod_imm_lo < 0) || (mod_imm_hi < 0)) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001609 GenArithOpLong(opcode, rl_dest, rl_src1, rl_src2, flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001610 return;
1611 }
1612 break;
1613 default:
1614 break;
1615 }
1616 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
1617 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1618 // NOTE: once we've done the EvalLoc on dest, we can no longer bail.
1619 switch (opcode) {
1620 case Instruction::ADD_LONG:
1621 case Instruction::ADD_LONG_2ADDR:
buzbee2700f7e2014-03-07 09:46:20 -08001622 NewLIR3(kThumb2AddRRI8M, rl_result.reg.GetLowReg(), rl_src1.reg.GetLowReg(), mod_imm_lo);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001623 NewLIR3(kThumb2AdcRRI8M, rl_result.reg.GetHighReg(), rl_src1.reg.GetHighReg(), mod_imm_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001624 break;
1625 case Instruction::OR_LONG:
1626 case Instruction::OR_LONG_2ADDR:
buzbee2700f7e2014-03-07 09:46:20 -08001627 if ((val_lo != 0) || (rl_result.reg.GetLowReg() != rl_src1.reg.GetLowReg())) {
1628 OpRegRegImm(kOpOr, rl_result.reg.GetLow(), rl_src1.reg.GetLow(), val_lo);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001629 }
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001630 if ((val_hi != 0) || (rl_result.reg.GetHighReg() != rl_src1.reg.GetHighReg())) {
buzbee2700f7e2014-03-07 09:46:20 -08001631 OpRegRegImm(kOpOr, rl_result.reg.GetHigh(), rl_src1.reg.GetHigh(), val_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001632 }
1633 break;
1634 case Instruction::XOR_LONG:
1635 case Instruction::XOR_LONG_2ADDR:
buzbee2700f7e2014-03-07 09:46:20 -08001636 OpRegRegImm(kOpXor, rl_result.reg.GetLow(), rl_src1.reg.GetLow(), val_lo);
1637 OpRegRegImm(kOpXor, rl_result.reg.GetHigh(), rl_src1.reg.GetHigh(), val_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001638 break;
1639 case Instruction::AND_LONG:
1640 case Instruction::AND_LONG_2ADDR:
buzbee2700f7e2014-03-07 09:46:20 -08001641 if ((val_lo != 0xffffffff) || (rl_result.reg.GetLowReg() != rl_src1.reg.GetLowReg())) {
1642 OpRegRegImm(kOpAnd, rl_result.reg.GetLow(), rl_src1.reg.GetLow(), val_lo);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001643 }
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001644 if ((val_hi != 0xffffffff) || (rl_result.reg.GetHighReg() != rl_src1.reg.GetHighReg())) {
buzbee2700f7e2014-03-07 09:46:20 -08001645 OpRegRegImm(kOpAnd, rl_result.reg.GetHigh(), rl_src1.reg.GetHigh(), val_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001646 }
1647 break;
1648 case Instruction::SUB_LONG_2ADDR:
1649 case Instruction::SUB_LONG:
buzbee2700f7e2014-03-07 09:46:20 -08001650 NewLIR3(kThumb2SubRRI8M, rl_result.reg.GetLowReg(), rl_src1.reg.GetLowReg(), mod_imm_lo);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001651 NewLIR3(kThumb2SbcRRI8M, rl_result.reg.GetHighReg(), rl_src1.reg.GetHighReg(), mod_imm_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001652 break;
1653 default:
1654 LOG(FATAL) << "Unexpected opcode " << opcode;
1655 }
1656 StoreValueWide(rl_dest, rl_result);
1657}
1658
Andreas Gamped500b532015-01-16 22:09:55 -08001659bool ArmMir2Lir::HandleEasyDivRem(Instruction::Code dalvik_opcode, bool is_div,
1660 RegLocation rl_src, RegLocation rl_dest, int lit) {
1661 if (lit < 2) {
1662 return false;
1663 }
1664
1665 // ARM does either not support a division instruction, or it is potentially expensive. Look for
1666 // more special cases.
1667 if (!IsPowerOfTwo(lit)) {
1668 return SmallLiteralDivRem(dalvik_opcode, is_div, rl_src, rl_dest, lit);
1669 }
1670
1671 return Mir2Lir::HandleEasyDivRem(dalvik_opcode, is_div, rl_src, rl_dest, lit);
1672}
1673
Brian Carlstrom7940e442013-07-12 13:46:57 -07001674} // namespace art