blob: ee0225738a9f4094c637a4724786848d213ff781 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "codegen_x86.h"
18#include "dex/quick/mir_to_lir-inl.h"
Mark Mendell67c39c42014-01-31 17:28:00 -080019#include "dex/dataflow_iterator-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "x86_lir.h"
21
22namespace art {
23
24/* This file contains codegen for the X86 ISA */
25
buzbee2700f7e2014-03-07 09:46:20 -080026LIR* X86Mir2Lir::OpFpRegCopy(RegStorage r_dest, RegStorage r_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070027 int opcode;
28 /* must be both DOUBLE or both not DOUBLE */
buzbee091cc402014-03-31 10:14:40 -070029 DCHECK(r_dest.IsFloat() || r_src.IsFloat());
30 DCHECK_EQ(r_dest.IsDouble(), r_src.IsDouble());
31 if (r_dest.IsDouble()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070032 opcode = kX86MovsdRR;
33 } else {
buzbee091cc402014-03-31 10:14:40 -070034 if (r_dest.IsSingle()) {
35 if (r_src.IsSingle()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070036 opcode = kX86MovssRR;
37 } else { // Fpr <- Gpr
38 opcode = kX86MovdxrRR;
39 }
40 } else { // Gpr <- Fpr
buzbee091cc402014-03-31 10:14:40 -070041 DCHECK(r_src.IsSingle()) << "Raw: 0x" << std::hex << r_src.GetRawBits();
Brian Carlstrom7940e442013-07-12 13:46:57 -070042 opcode = kX86MovdrxRR;
43 }
44 }
45 DCHECK_NE((EncodingMap[opcode].flags & IS_BINARY_OP), 0ULL);
buzbee2700f7e2014-03-07 09:46:20 -080046 LIR* res = RawLIR(current_dalvik_offset_, opcode, r_dest.GetReg(), r_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -070047 if (r_dest == r_src) {
48 res->flags.is_nop = true;
49 }
50 return res;
51}
52
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070053bool X86Mir2Lir::InexpensiveConstantInt(int32_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070054 return true;
55}
56
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070057bool X86Mir2Lir::InexpensiveConstantFloat(int32_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070058 return false;
59}
60
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070061bool X86Mir2Lir::InexpensiveConstantLong(int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070062 return true;
63}
64
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070065bool X86Mir2Lir::InexpensiveConstantDouble(int64_t value) {
Mark Mendell67c39c42014-01-31 17:28:00 -080066 return value == 0;
Brian Carlstrom7940e442013-07-12 13:46:57 -070067}
68
69/*
70 * Load a immediate using a shortcut if possible; otherwise
71 * grab from the per-translation literal pool. If target is
72 * a high register, build constant into a low register and copy.
73 *
74 * No additional register clobbering operation performed. Use this version when
75 * 1) r_dest is freshly returned from AllocTemp or
76 * 2) The codegen is under fixed register usage
77 */
buzbee2700f7e2014-03-07 09:46:20 -080078LIR* X86Mir2Lir::LoadConstantNoClobber(RegStorage r_dest, int value) {
79 RegStorage r_dest_save = r_dest;
buzbee091cc402014-03-31 10:14:40 -070080 if (r_dest.IsFloat()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070081 if (value == 0) {
buzbee2700f7e2014-03-07 09:46:20 -080082 return NewLIR2(kX86XorpsRR, r_dest.GetReg(), r_dest.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -070083 }
Brian Carlstrom7940e442013-07-12 13:46:57 -070084 r_dest = AllocTemp();
85 }
86
87 LIR *res;
88 if (value == 0) {
buzbee2700f7e2014-03-07 09:46:20 -080089 res = NewLIR2(kX86Xor32RR, r_dest.GetReg(), r_dest.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -070090 } else {
91 // Note, there is no byte immediate form of a 32 bit immediate move.
Dmitry Petrochenko96992e82014-05-20 04:03:46 +070092 if (r_dest.Is64Bit()) {
93 res = NewLIR2(kX86Mov64RI, r_dest.GetReg(), value);
94 } else {
95 res = NewLIR2(kX86Mov32RI, r_dest.GetReg(), value);
96 }
Brian Carlstrom7940e442013-07-12 13:46:57 -070097 }
98
buzbee091cc402014-03-31 10:14:40 -070099 if (r_dest_save.IsFloat()) {
buzbee2700f7e2014-03-07 09:46:20 -0800100 NewLIR2(kX86MovdxrRR, r_dest_save.GetReg(), r_dest.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700101 FreeTemp(r_dest);
102 }
103
104 return res;
105}
106
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700107LIR* X86Mir2Lir::OpUnconditionalBranch(LIR* target) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700108 LIR* res = NewLIR1(kX86Jmp8, 0 /* offset to be patched during assembly*/);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700109 res->target = target;
110 return res;
111}
112
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700113LIR* X86Mir2Lir::OpCondBranch(ConditionCode cc, LIR* target) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700114 LIR* branch = NewLIR2(kX86Jcc8, 0 /* offset to be patched */,
115 X86ConditionEncoding(cc));
116 branch->target = target;
117 return branch;
118}
119
buzbee2700f7e2014-03-07 09:46:20 -0800120LIR* X86Mir2Lir::OpReg(OpKind op, RegStorage r_dest_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700121 X86OpCode opcode = kX86Bkpt;
122 switch (op) {
123 case kOpNeg: opcode = kX86Neg32R; break;
124 case kOpNot: opcode = kX86Not32R; break;
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100125 case kOpRev: opcode = kX86Bswap32R; break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700126 case kOpBlx: opcode = kX86CallR; break;
127 default:
128 LOG(FATAL) << "Bad case in OpReg " << op;
129 }
buzbee2700f7e2014-03-07 09:46:20 -0800130 return NewLIR1(opcode, r_dest_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700131}
132
buzbee2700f7e2014-03-07 09:46:20 -0800133LIR* X86Mir2Lir::OpRegImm(OpKind op, RegStorage r_dest_src1, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700134 X86OpCode opcode = kX86Bkpt;
135 bool byte_imm = IS_SIMM8(value);
buzbee091cc402014-03-31 10:14:40 -0700136 DCHECK(!r_dest_src1.IsFloat());
Dmitry Petrochenko9ee801f2014-05-12 11:31:37 +0700137 if (r_dest_src1.Is64Bit()) {
138 switch (op) {
139 case kOpAdd: opcode = byte_imm ? kX86Add64RI8 : kX86Add64RI; break;
140 case kOpSub: opcode = byte_imm ? kX86Sub64RI8 : kX86Sub64RI; break;
141 default:
142 LOG(FATAL) << "Bad case in OpRegImm (64-bit) " << op;
143 }
144 } else {
145 switch (op) {
146 case kOpLsl: opcode = kX86Sal32RI; break;
147 case kOpLsr: opcode = kX86Shr32RI; break;
148 case kOpAsr: opcode = kX86Sar32RI; break;
149 case kOpAdd: opcode = byte_imm ? kX86Add32RI8 : kX86Add32RI; break;
150 case kOpOr: opcode = byte_imm ? kX86Or32RI8 : kX86Or32RI; break;
151 case kOpAdc: opcode = byte_imm ? kX86Adc32RI8 : kX86Adc32RI; break;
152 // case kOpSbb: opcode = kX86Sbb32RI; break;
153 case kOpAnd: opcode = byte_imm ? kX86And32RI8 : kX86And32RI; break;
154 case kOpSub: opcode = byte_imm ? kX86Sub32RI8 : kX86Sub32RI; break;
155 case kOpXor: opcode = byte_imm ? kX86Xor32RI8 : kX86Xor32RI; break;
156 case kOpCmp: opcode = byte_imm ? kX86Cmp32RI8 : kX86Cmp32RI; break;
157 case kOpMov:
158 /*
159 * Moving the constant zero into register can be specialized as an xor of the register.
160 * However, that sets eflags while the move does not. For that reason here, always do
161 * the move and if caller is flexible, they should be calling LoadConstantNoClobber instead.
162 */
163 opcode = kX86Mov32RI;
164 break;
165 case kOpMul:
166 opcode = byte_imm ? kX86Imul32RRI8 : kX86Imul32RRI;
167 return NewLIR3(opcode, r_dest_src1.GetReg(), r_dest_src1.GetReg(), value);
Mark Mendelle87f9b52014-04-30 14:13:18 -0400168 case kOp2Byte:
169 opcode = kX86Mov32RI;
170 value = static_cast<int8_t>(value);
171 break;
172 case kOp2Short:
173 opcode = kX86Mov32RI;
174 value = static_cast<int16_t>(value);
175 break;
176 case kOp2Char:
177 opcode = kX86Mov32RI;
178 value = static_cast<uint16_t>(value);
179 break;
180 case kOpNeg:
181 opcode = kX86Mov32RI;
182 value = -value;
183 break;
Dmitry Petrochenko9ee801f2014-05-12 11:31:37 +0700184 default:
185 LOG(FATAL) << "Bad case in OpRegImm " << op;
186 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700187 }
buzbee2700f7e2014-03-07 09:46:20 -0800188 return NewLIR2(opcode, r_dest_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700189}
190
buzbee2700f7e2014-03-07 09:46:20 -0800191LIR* X86Mir2Lir::OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700192 X86OpCode opcode = kX86Nop;
193 bool src2_must_be_cx = false;
194 switch (op) {
195 // X86 unary opcodes
196 case kOpMvn:
197 OpRegCopy(r_dest_src1, r_src2);
198 return OpReg(kOpNot, r_dest_src1);
199 case kOpNeg:
200 OpRegCopy(r_dest_src1, r_src2);
201 return OpReg(kOpNeg, r_dest_src1);
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100202 case kOpRev:
203 OpRegCopy(r_dest_src1, r_src2);
204 return OpReg(kOpRev, r_dest_src1);
205 case kOpRevsh:
206 OpRegCopy(r_dest_src1, r_src2);
207 OpReg(kOpRev, r_dest_src1);
208 return OpRegImm(kOpAsr, r_dest_src1, 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700209 // X86 binary opcodes
210 case kOpSub: opcode = kX86Sub32RR; break;
211 case kOpSbc: opcode = kX86Sbb32RR; break;
212 case kOpLsl: opcode = kX86Sal32RC; src2_must_be_cx = true; break;
213 case kOpLsr: opcode = kX86Shr32RC; src2_must_be_cx = true; break;
214 case kOpAsr: opcode = kX86Sar32RC; src2_must_be_cx = true; break;
215 case kOpMov: opcode = kX86Mov32RR; break;
216 case kOpCmp: opcode = kX86Cmp32RR; break;
217 case kOpAdd: opcode = kX86Add32RR; break;
218 case kOpAdc: opcode = kX86Adc32RR; break;
219 case kOpAnd: opcode = kX86And32RR; break;
220 case kOpOr: opcode = kX86Or32RR; break;
221 case kOpXor: opcode = kX86Xor32RR; break;
222 case kOp2Byte:
buzbee091cc402014-03-31 10:14:40 -0700223 // TODO: there are several instances of this check. A utility function perhaps?
224 // TODO: Similar to Arm's reg < 8 check. Perhaps add attribute checks to RegStorage?
Brian Carlstrom7940e442013-07-12 13:46:57 -0700225 // Use shifts instead of a byte operand if the source can't be byte accessed.
buzbee091cc402014-03-31 10:14:40 -0700226 if (r_src2.GetRegNum() >= rs_rX86_SP.GetRegNum()) {
buzbee2700f7e2014-03-07 09:46:20 -0800227 NewLIR2(kX86Mov32RR, r_dest_src1.GetReg(), r_src2.GetReg());
228 NewLIR2(kX86Sal32RI, r_dest_src1.GetReg(), 24);
229 return NewLIR2(kX86Sar32RI, r_dest_src1.GetReg(), 24);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700230 } else {
231 opcode = kX86Movsx8RR;
232 }
233 break;
234 case kOp2Short: opcode = kX86Movsx16RR; break;
235 case kOp2Char: opcode = kX86Movzx16RR; break;
236 case kOpMul: opcode = kX86Imul32RR; break;
237 default:
238 LOG(FATAL) << "Bad case in OpRegReg " << op;
239 break;
240 }
buzbee091cc402014-03-31 10:14:40 -0700241 CHECK(!src2_must_be_cx || r_src2.GetReg() == rs_rCX.GetReg());
buzbee2700f7e2014-03-07 09:46:20 -0800242 return NewLIR2(opcode, r_dest_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700243}
244
buzbee2700f7e2014-03-07 09:46:20 -0800245LIR* X86Mir2Lir::OpMovRegMem(RegStorage r_dest, RegStorage r_base, int offset, MoveType move_type) {
buzbee091cc402014-03-31 10:14:40 -0700246 DCHECK(!r_base.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800247 X86OpCode opcode = kX86Nop;
buzbee2700f7e2014-03-07 09:46:20 -0800248 int dest = r_dest.IsPair() ? r_dest.GetLowReg() : r_dest.GetReg();
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800249 switch (move_type) {
250 case kMov8GP:
buzbee091cc402014-03-31 10:14:40 -0700251 CHECK(!r_dest.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800252 opcode = kX86Mov8RM;
253 break;
254 case kMov16GP:
buzbee091cc402014-03-31 10:14:40 -0700255 CHECK(!r_dest.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800256 opcode = kX86Mov16RM;
257 break;
258 case kMov32GP:
buzbee091cc402014-03-31 10:14:40 -0700259 CHECK(!r_dest.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800260 opcode = kX86Mov32RM;
261 break;
262 case kMov32FP:
buzbee091cc402014-03-31 10:14:40 -0700263 CHECK(r_dest.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800264 opcode = kX86MovssRM;
265 break;
266 case kMov64FP:
buzbee091cc402014-03-31 10:14:40 -0700267 CHECK(r_dest.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800268 opcode = kX86MovsdRM;
269 break;
270 case kMovU128FP:
buzbee091cc402014-03-31 10:14:40 -0700271 CHECK(r_dest.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800272 opcode = kX86MovupsRM;
273 break;
274 case kMovA128FP:
buzbee091cc402014-03-31 10:14:40 -0700275 CHECK(r_dest.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800276 opcode = kX86MovapsRM;
277 break;
278 case kMovLo128FP:
buzbee091cc402014-03-31 10:14:40 -0700279 CHECK(r_dest.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800280 opcode = kX86MovlpsRM;
281 break;
282 case kMovHi128FP:
buzbee091cc402014-03-31 10:14:40 -0700283 CHECK(r_dest.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800284 opcode = kX86MovhpsRM;
285 break;
286 case kMov64GP:
287 case kMovLo64FP:
288 case kMovHi64FP:
289 default:
290 LOG(FATAL) << "Bad case in OpMovRegMem";
291 break;
292 }
293
buzbee2700f7e2014-03-07 09:46:20 -0800294 return NewLIR3(opcode, dest, r_base.GetReg(), offset);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800295}
296
buzbee2700f7e2014-03-07 09:46:20 -0800297LIR* X86Mir2Lir::OpMovMemReg(RegStorage r_base, int offset, RegStorage r_src, MoveType move_type) {
buzbee091cc402014-03-31 10:14:40 -0700298 DCHECK(!r_base.IsFloat());
buzbee2700f7e2014-03-07 09:46:20 -0800299 int src = r_src.IsPair() ? r_src.GetLowReg() : r_src.GetReg();
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800300
301 X86OpCode opcode = kX86Nop;
302 switch (move_type) {
303 case kMov8GP:
buzbee091cc402014-03-31 10:14:40 -0700304 CHECK(!r_src.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800305 opcode = kX86Mov8MR;
306 break;
307 case kMov16GP:
buzbee091cc402014-03-31 10:14:40 -0700308 CHECK(!r_src.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800309 opcode = kX86Mov16MR;
310 break;
311 case kMov32GP:
buzbee091cc402014-03-31 10:14:40 -0700312 CHECK(!r_src.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800313 opcode = kX86Mov32MR;
314 break;
315 case kMov32FP:
buzbee091cc402014-03-31 10:14:40 -0700316 CHECK(r_src.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800317 opcode = kX86MovssMR;
318 break;
319 case kMov64FP:
buzbee091cc402014-03-31 10:14:40 -0700320 CHECK(r_src.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800321 opcode = kX86MovsdMR;
322 break;
323 case kMovU128FP:
buzbee091cc402014-03-31 10:14:40 -0700324 CHECK(r_src.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800325 opcode = kX86MovupsMR;
326 break;
327 case kMovA128FP:
buzbee091cc402014-03-31 10:14:40 -0700328 CHECK(r_src.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800329 opcode = kX86MovapsMR;
330 break;
331 case kMovLo128FP:
buzbee091cc402014-03-31 10:14:40 -0700332 CHECK(r_src.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800333 opcode = kX86MovlpsMR;
334 break;
335 case kMovHi128FP:
buzbee091cc402014-03-31 10:14:40 -0700336 CHECK(r_src.IsFloat());
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800337 opcode = kX86MovhpsMR;
338 break;
339 case kMov64GP:
340 case kMovLo64FP:
341 case kMovHi64FP:
342 default:
343 LOG(FATAL) << "Bad case in OpMovMemReg";
344 break;
345 }
346
buzbee2700f7e2014-03-07 09:46:20 -0800347 return NewLIR3(opcode, r_base.GetReg(), offset, src);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800348}
349
buzbee2700f7e2014-03-07 09:46:20 -0800350LIR* X86Mir2Lir::OpCondRegReg(OpKind op, ConditionCode cc, RegStorage r_dest, RegStorage r_src) {
Razvan A Lupusorubd288c22013-12-20 17:27:23 -0800351 // The only conditional reg to reg operation supported is Cmov
352 DCHECK_EQ(op, kOpCmov);
buzbee2700f7e2014-03-07 09:46:20 -0800353 return NewLIR3(kX86Cmov32RRC, r_dest.GetReg(), r_src.GetReg(), X86ConditionEncoding(cc));
Razvan A Lupusorubd288c22013-12-20 17:27:23 -0800354}
355
buzbee2700f7e2014-03-07 09:46:20 -0800356LIR* X86Mir2Lir::OpRegMem(OpKind op, RegStorage r_dest, RegStorage r_base, int offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700357 X86OpCode opcode = kX86Nop;
358 switch (op) {
359 // X86 binary opcodes
360 case kOpSub: opcode = kX86Sub32RM; break;
361 case kOpMov: opcode = kX86Mov32RM; break;
362 case kOpCmp: opcode = kX86Cmp32RM; break;
363 case kOpAdd: opcode = kX86Add32RM; break;
364 case kOpAnd: opcode = kX86And32RM; break;
365 case kOpOr: opcode = kX86Or32RM; break;
366 case kOpXor: opcode = kX86Xor32RM; break;
367 case kOp2Byte: opcode = kX86Movsx8RM; break;
368 case kOp2Short: opcode = kX86Movsx16RM; break;
369 case kOp2Char: opcode = kX86Movzx16RM; break;
370 case kOpMul:
371 default:
372 LOG(FATAL) << "Bad case in OpRegMem " << op;
373 break;
374 }
buzbee2700f7e2014-03-07 09:46:20 -0800375 LIR *l = NewLIR3(opcode, r_dest.GetReg(), r_base.GetReg(), offset);
376 if (r_base == rs_rX86_SP) {
Mark Mendellfeb2b4e2014-01-28 12:59:49 -0800377 AnnotateDalvikRegAccess(l, offset >> 2, true /* is_load */, false /* is_64bit */);
378 }
379 return l;
380}
381
382LIR* X86Mir2Lir::OpMemReg(OpKind op, RegLocation rl_dest, int r_value) {
383 DCHECK_NE(rl_dest.location, kLocPhysReg);
384 int displacement = SRegOffset(rl_dest.s_reg_low);
385 X86OpCode opcode = kX86Nop;
386 switch (op) {
387 case kOpSub: opcode = kX86Sub32MR; break;
388 case kOpMov: opcode = kX86Mov32MR; break;
389 case kOpCmp: opcode = kX86Cmp32MR; break;
390 case kOpAdd: opcode = kX86Add32MR; break;
391 case kOpAnd: opcode = kX86And32MR; break;
392 case kOpOr: opcode = kX86Or32MR; break;
393 case kOpXor: opcode = kX86Xor32MR; break;
394 case kOpLsl: opcode = kX86Sal32MC; break;
395 case kOpLsr: opcode = kX86Shr32MC; break;
396 case kOpAsr: opcode = kX86Sar32MC; break;
397 default:
398 LOG(FATAL) << "Bad case in OpMemReg " << op;
399 break;
400 }
buzbee091cc402014-03-31 10:14:40 -0700401 LIR *l = NewLIR3(opcode, rs_rX86_SP.GetReg(), displacement, r_value);
Serguei Katkov217fe732014-03-27 14:41:56 +0700402 AnnotateDalvikRegAccess(l, displacement >> 2, true /* is_load */, false /* is_64bit */);
Mark Mendellfeb2b4e2014-01-28 12:59:49 -0800403 AnnotateDalvikRegAccess(l, displacement >> 2, false /* is_load */, false /* is_64bit */);
404 return l;
405}
406
buzbee2700f7e2014-03-07 09:46:20 -0800407LIR* X86Mir2Lir::OpRegMem(OpKind op, RegStorage r_dest, RegLocation rl_value) {
Mark Mendellfeb2b4e2014-01-28 12:59:49 -0800408 DCHECK_NE(rl_value.location, kLocPhysReg);
409 int displacement = SRegOffset(rl_value.s_reg_low);
410 X86OpCode opcode = kX86Nop;
411 switch (op) {
412 case kOpSub: opcode = kX86Sub32RM; break;
413 case kOpMov: opcode = kX86Mov32RM; break;
414 case kOpCmp: opcode = kX86Cmp32RM; break;
415 case kOpAdd: opcode = kX86Add32RM; break;
416 case kOpAnd: opcode = kX86And32RM; break;
417 case kOpOr: opcode = kX86Or32RM; break;
418 case kOpXor: opcode = kX86Xor32RM; break;
419 case kOpMul: opcode = kX86Imul32RM; break;
420 default:
421 LOG(FATAL) << "Bad case in OpRegMem " << op;
422 break;
423 }
buzbee091cc402014-03-31 10:14:40 -0700424 LIR *l = NewLIR3(opcode, r_dest.GetReg(), rs_rX86_SP.GetReg(), displacement);
Mark Mendellfeb2b4e2014-01-28 12:59:49 -0800425 AnnotateDalvikRegAccess(l, displacement >> 2, true /* is_load */, false /* is_64bit */);
426 return l;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700427}
428
buzbee2700f7e2014-03-07 09:46:20 -0800429LIR* X86Mir2Lir::OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1,
430 RegStorage r_src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700431 if (r_dest != r_src1 && r_dest != r_src2) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700432 if (op == kOpAdd) { // lea special case, except can't encode rbp as base
Brian Carlstrom7940e442013-07-12 13:46:57 -0700433 if (r_src1 == r_src2) {
434 OpRegCopy(r_dest, r_src1);
435 return OpRegImm(kOpLsl, r_dest, 1);
buzbee2700f7e2014-03-07 09:46:20 -0800436 } else if (r_src1 != rs_rBP) {
437 return NewLIR5(kX86Lea32RA, r_dest.GetReg(), r_src1.GetReg() /* base */,
438 r_src2.GetReg() /* index */, 0 /* scale */, 0 /* disp */);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700439 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800440 return NewLIR5(kX86Lea32RA, r_dest.GetReg(), r_src2.GetReg() /* base */,
441 r_src1.GetReg() /* index */, 0 /* scale */, 0 /* disp */);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700442 }
443 } else {
444 OpRegCopy(r_dest, r_src1);
445 return OpRegReg(op, r_dest, r_src2);
446 }
447 } else if (r_dest == r_src1) {
448 return OpRegReg(op, r_dest, r_src2);
449 } else { // r_dest == r_src2
450 switch (op) {
451 case kOpSub: // non-commutative
452 OpReg(kOpNeg, r_dest);
453 op = kOpAdd;
454 break;
455 case kOpSbc:
456 case kOpLsl: case kOpLsr: case kOpAsr: case kOpRor: {
buzbee2700f7e2014-03-07 09:46:20 -0800457 RegStorage t_reg = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700458 OpRegCopy(t_reg, r_src1);
459 OpRegReg(op, t_reg, r_src2);
buzbee7a11ab02014-04-28 20:02:38 -0700460 LIR* res = OpRegCopyNoInsert(r_dest, t_reg);
461 AppendLIR(res);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700462 FreeTemp(t_reg);
463 return res;
464 }
465 case kOpAdd: // commutative
466 case kOpOr:
467 case kOpAdc:
468 case kOpAnd:
469 case kOpXor:
470 break;
471 default:
472 LOG(FATAL) << "Bad case in OpRegRegReg " << op;
473 }
474 return OpRegReg(op, r_dest, r_src1);
475 }
476}
477
buzbee2700f7e2014-03-07 09:46:20 -0800478LIR* X86Mir2Lir::OpRegRegImm(OpKind op, RegStorage r_dest, RegStorage r_src, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700479 if (op == kOpMul) {
480 X86OpCode opcode = IS_SIMM8(value) ? kX86Imul32RRI8 : kX86Imul32RRI;
buzbee2700f7e2014-03-07 09:46:20 -0800481 return NewLIR3(opcode, r_dest.GetReg(), r_src.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700482 } else if (op == kOpAnd) {
buzbee091cc402014-03-31 10:14:40 -0700483 if (value == 0xFF && r_src.Low4()) {
buzbee2700f7e2014-03-07 09:46:20 -0800484 return NewLIR2(kX86Movzx8RR, r_dest.GetReg(), r_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700485 } else if (value == 0xFFFF) {
buzbee2700f7e2014-03-07 09:46:20 -0800486 return NewLIR2(kX86Movzx16RR, r_dest.GetReg(), r_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700487 }
488 }
489 if (r_dest != r_src) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700490 if (false && op == kOpLsl && value >= 0 && value <= 3) { // lea shift special case
Brian Carlstrom7940e442013-07-12 13:46:57 -0700491 // TODO: fix bug in LEA encoding when disp == 0
buzbee2700f7e2014-03-07 09:46:20 -0800492 return NewLIR5(kX86Lea32RA, r_dest.GetReg(), r5sib_no_base /* base */,
493 r_src.GetReg() /* index */, value /* scale */, 0 /* disp */);
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700494 } else if (op == kOpAdd) { // lea add special case
buzbee2700f7e2014-03-07 09:46:20 -0800495 return NewLIR5(kX86Lea32RA, r_dest.GetReg(), r_src.GetReg() /* base */,
Dmitry Petrochenko9ee801f2014-05-12 11:31:37 +0700496 rs_rX86_SP.GetReg()/*r4sib_no_index*/ /* index */, 0 /* scale */, value /* disp */);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700497 }
498 OpRegCopy(r_dest, r_src);
499 }
500 return OpRegImm(op, r_dest, value);
501}
502
Ian Rogersdd7624d2014-03-14 17:43:00 -0700503LIR* X86Mir2Lir::OpThreadMem(OpKind op, ThreadOffset<4> thread_offset) {
Andreas Gampe2f244e92014-05-08 03:35:25 -0700504 DCHECK_EQ(kX86, cu_->instruction_set);
505 X86OpCode opcode = kX86Bkpt;
506 switch (op) {
507 case kOpBlx: opcode = kX86CallT; break;
508 case kOpBx: opcode = kX86JmpT; break;
509 default:
510 LOG(FATAL) << "Bad opcode: " << op;
511 break;
512 }
513 return NewLIR1(opcode, thread_offset.Int32Value());
514}
515
516LIR* X86Mir2Lir::OpThreadMem(OpKind op, ThreadOffset<8> thread_offset) {
517 DCHECK_EQ(kX86_64, cu_->instruction_set);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700518 X86OpCode opcode = kX86Bkpt;
519 switch (op) {
520 case kOpBlx: opcode = kX86CallT; break;
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700521 case kOpBx: opcode = kX86JmpT; break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700522 default:
523 LOG(FATAL) << "Bad opcode: " << op;
524 break;
525 }
Ian Rogers468532e2013-08-05 10:56:33 -0700526 return NewLIR1(opcode, thread_offset.Int32Value());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700527}
528
buzbee2700f7e2014-03-07 09:46:20 -0800529LIR* X86Mir2Lir::OpMem(OpKind op, RegStorage r_base, int disp) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700530 X86OpCode opcode = kX86Bkpt;
531 switch (op) {
532 case kOpBlx: opcode = kX86CallM; break;
533 default:
534 LOG(FATAL) << "Bad opcode: " << op;
535 break;
536 }
buzbee2700f7e2014-03-07 09:46:20 -0800537 return NewLIR2(opcode, r_base.GetReg(), disp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700538}
539
buzbee2700f7e2014-03-07 09:46:20 -0800540LIR* X86Mir2Lir::LoadConstantWide(RegStorage r_dest, int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700541 int32_t val_lo = Low32Bits(value);
542 int32_t val_hi = High32Bits(value);
buzbee2700f7e2014-03-07 09:46:20 -0800543 int32_t low_reg_val = r_dest.IsPair() ? r_dest.GetLowReg() : r_dest.GetReg();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700544 LIR *res;
Mark Mendelle87f9b52014-04-30 14:13:18 -0400545 bool is_fp = r_dest.IsFloat();
buzbee2700f7e2014-03-07 09:46:20 -0800546 // TODO: clean this up once we fully recognize 64-bit storage containers.
547 if (is_fp) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700548 if (value == 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800549 return NewLIR2(kX86XorpsRR, low_reg_val, low_reg_val);
Mark Mendell67c39c42014-01-31 17:28:00 -0800550 } else if (base_of_code_ != nullptr) {
551 // We will load the value from the literal area.
552 LIR* data_target = ScanLiteralPoolWide(literal_list_, val_lo, val_hi);
553 if (data_target == NULL) {
554 data_target = AddWideData(&literal_list_, val_lo, val_hi);
555 }
556
557 // Address the start of the method
558 RegLocation rl_method = mir_graph_->GetRegLocation(base_of_code_->s_reg_low);
559 rl_method = LoadValue(rl_method, kCoreReg);
560
561 // Load the proper value from the literal area.
562 // We don't know the proper offset for the value, so pick one that will force
563 // 4 byte offset. We will fix this up in the assembler later to have the right
564 // value.
Mark Mendell0c524512014-05-27 15:52:21 -0400565 res = LoadBaseDisp(rl_method.reg, 256 /* bogus */, RegStorage::FloatSolo64(low_reg_val),
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100566 kDouble);
Mark Mendell67c39c42014-01-31 17:28:00 -0800567 res->target = data_target;
568 res->flags.fixup = kFixupLoad;
569 SetMemRefType(res, true, kLiteral);
Mark Mendell55d0eac2014-02-06 11:02:52 -0800570 store_method_addr_used_ = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700571 } else {
572 if (val_lo == 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800573 res = NewLIR2(kX86XorpsRR, low_reg_val, low_reg_val);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700574 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800575 res = LoadConstantNoClobber(RegStorage::Solo32(low_reg_val), val_lo);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700576 }
577 if (val_hi != 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800578 RegStorage r_dest_hi = AllocTempDouble();
buzbee091cc402014-03-31 10:14:40 -0700579 LoadConstantNoClobber(r_dest_hi, val_hi);
580 NewLIR2(kX86PunpckldqRR, low_reg_val, r_dest_hi.GetReg());
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000581 FreeTemp(r_dest_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700582 }
583 }
584 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800585 res = LoadConstantNoClobber(r_dest.GetLow(), val_lo);
586 LoadConstantNoClobber(r_dest.GetHigh(), val_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700587 }
588 return res;
589}
590
buzbee2700f7e2014-03-07 09:46:20 -0800591LIR* X86Mir2Lir::LoadBaseIndexedDisp(RegStorage r_base, RegStorage r_index, int scale,
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100592 int displacement, RegStorage r_dest, OpSize size) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700593 LIR *load = NULL;
594 LIR *load2 = NULL;
buzbee2700f7e2014-03-07 09:46:20 -0800595 bool is_array = r_index.Valid();
buzbee091cc402014-03-31 10:14:40 -0700596 bool pair = r_dest.IsPair();
597 bool is64bit = ((size == k64) || (size == kDouble));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700598 X86OpCode opcode = kX86Nop;
599 switch (size) {
buzbee695d13a2014-04-19 13:32:20 -0700600 case k64:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700601 case kDouble:
buzbee091cc402014-03-31 10:14:40 -0700602 if (r_dest.IsFloat()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700603 opcode = is_array ? kX86MovsdRA : kX86MovsdRM;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700604 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700605 opcode = is_array ? kX86Mov32RA : kX86Mov32RM;
606 }
607 // TODO: double store is to unaligned address
608 DCHECK_EQ((displacement & 0x3), 0);
609 break;
Dmitry Petrochenko9ee801f2014-05-12 11:31:37 +0700610 case kWord:
611 if (Gen64Bit()) {
612 opcode = is_array ? kX86Mov64RA : kX86Mov64RM;
613 CHECK_EQ(is_array, false);
614 CHECK_EQ(r_dest.IsFloat(), false);
615 break;
616 } // else fall-through to k32 case
buzbee695d13a2014-04-19 13:32:20 -0700617 case k32:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700618 case kSingle:
buzbee695d13a2014-04-19 13:32:20 -0700619 case kReference: // TODO: update for reference decompression on 64-bit targets.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700620 opcode = is_array ? kX86Mov32RA : kX86Mov32RM;
buzbee091cc402014-03-31 10:14:40 -0700621 if (r_dest.IsFloat()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700622 opcode = is_array ? kX86MovssRA : kX86MovssRM;
buzbee091cc402014-03-31 10:14:40 -0700623 DCHECK(r_dest.IsFloat());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700624 }
625 DCHECK_EQ((displacement & 0x3), 0);
626 break;
627 case kUnsignedHalf:
628 opcode = is_array ? kX86Movzx16RA : kX86Movzx16RM;
629 DCHECK_EQ((displacement & 0x1), 0);
630 break;
631 case kSignedHalf:
632 opcode = is_array ? kX86Movsx16RA : kX86Movsx16RM;
633 DCHECK_EQ((displacement & 0x1), 0);
634 break;
635 case kUnsignedByte:
636 opcode = is_array ? kX86Movzx8RA : kX86Movzx8RM;
637 break;
638 case kSignedByte:
639 opcode = is_array ? kX86Movsx8RA : kX86Movsx8RM;
640 break;
641 default:
642 LOG(FATAL) << "Bad case in LoadBaseIndexedDispBody";
643 }
644
645 if (!is_array) {
646 if (!pair) {
buzbee2700f7e2014-03-07 09:46:20 -0800647 load = NewLIR3(opcode, r_dest.GetReg(), r_base.GetReg(), displacement + LOWORD_OFFSET);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700648 } else {
buzbee091cc402014-03-31 10:14:40 -0700649 DCHECK(!r_dest.IsFloat()); // Make sure we're not still using a pair here.
650 if (r_base == r_dest.GetLow()) {
651 load2 = NewLIR3(opcode, r_dest.GetHighReg(), r_base.GetReg(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700652 displacement + HIWORD_OFFSET);
buzbee091cc402014-03-31 10:14:40 -0700653 load = NewLIR3(opcode, r_dest.GetLowReg(), r_base.GetReg(), displacement + LOWORD_OFFSET);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700654 } else {
buzbee091cc402014-03-31 10:14:40 -0700655 load = NewLIR3(opcode, r_dest.GetLowReg(), r_base.GetReg(), displacement + LOWORD_OFFSET);
656 load2 = NewLIR3(opcode, r_dest.GetHighReg(), r_base.GetReg(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700657 displacement + HIWORD_OFFSET);
658 }
659 }
buzbee2700f7e2014-03-07 09:46:20 -0800660 if (r_base == rs_rX86_SP) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700661 AnnotateDalvikRegAccess(load, (displacement + (pair ? LOWORD_OFFSET : 0)) >> 2,
662 true /* is_load */, is64bit);
663 if (pair) {
664 AnnotateDalvikRegAccess(load2, (displacement + HIWORD_OFFSET) >> 2,
665 true /* is_load */, is64bit);
666 }
667 }
668 } else {
669 if (!pair) {
buzbee2700f7e2014-03-07 09:46:20 -0800670 load = NewLIR5(opcode, r_dest.GetReg(), r_base.GetReg(), r_index.GetReg(), scale,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700671 displacement + LOWORD_OFFSET);
672 } else {
buzbee091cc402014-03-31 10:14:40 -0700673 DCHECK(!r_dest.IsFloat()); // Make sure we're not still using a pair here.
674 if (r_base == r_dest.GetLow()) {
675 if (r_dest.GetHigh() == r_index) {
Mark Mendellae427c32014-01-24 09:17:22 -0800676 // We can't use either register for the first load.
buzbee2700f7e2014-03-07 09:46:20 -0800677 RegStorage temp = AllocTemp();
678 load2 = NewLIR5(opcode, temp.GetReg(), r_base.GetReg(), r_index.GetReg(), scale,
Mark Mendellae427c32014-01-24 09:17:22 -0800679 displacement + HIWORD_OFFSET);
buzbee091cc402014-03-31 10:14:40 -0700680 load = NewLIR5(opcode, r_dest.GetLowReg(), r_base.GetReg(), r_index.GetReg(), scale,
Mark Mendellae427c32014-01-24 09:17:22 -0800681 displacement + LOWORD_OFFSET);
buzbee091cc402014-03-31 10:14:40 -0700682 OpRegCopy(r_dest.GetHigh(), temp);
Mark Mendellae427c32014-01-24 09:17:22 -0800683 FreeTemp(temp);
684 } else {
buzbee091cc402014-03-31 10:14:40 -0700685 load2 = NewLIR5(opcode, r_dest.GetHighReg(), r_base.GetReg(), r_index.GetReg(), scale,
Mark Mendellae427c32014-01-24 09:17:22 -0800686 displacement + HIWORD_OFFSET);
buzbee091cc402014-03-31 10:14:40 -0700687 load = NewLIR5(opcode, r_dest.GetLowReg(), r_base.GetReg(), r_index.GetReg(), scale,
Mark Mendellae427c32014-01-24 09:17:22 -0800688 displacement + LOWORD_OFFSET);
689 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700690 } else {
buzbee091cc402014-03-31 10:14:40 -0700691 if (r_dest.GetLow() == r_index) {
Mark Mendellae427c32014-01-24 09:17:22 -0800692 // We can't use either register for the first load.
buzbee2700f7e2014-03-07 09:46:20 -0800693 RegStorage temp = AllocTemp();
694 load = NewLIR5(opcode, temp.GetReg(), r_base.GetReg(), r_index.GetReg(), scale,
Mark Mendellae427c32014-01-24 09:17:22 -0800695 displacement + LOWORD_OFFSET);
buzbee091cc402014-03-31 10:14:40 -0700696 load2 = NewLIR5(opcode, r_dest.GetHighReg(), r_base.GetReg(), r_index.GetReg(), scale,
Mark Mendellae427c32014-01-24 09:17:22 -0800697 displacement + HIWORD_OFFSET);
buzbee091cc402014-03-31 10:14:40 -0700698 OpRegCopy(r_dest.GetLow(), temp);
Mark Mendellae427c32014-01-24 09:17:22 -0800699 FreeTemp(temp);
700 } else {
buzbee091cc402014-03-31 10:14:40 -0700701 load = NewLIR5(opcode, r_dest.GetLowReg(), r_base.GetReg(), r_index.GetReg(), scale,
Mark Mendellae427c32014-01-24 09:17:22 -0800702 displacement + LOWORD_OFFSET);
buzbee091cc402014-03-31 10:14:40 -0700703 load2 = NewLIR5(opcode, r_dest.GetHighReg(), r_base.GetReg(), r_index.GetReg(), scale,
Mark Mendellae427c32014-01-24 09:17:22 -0800704 displacement + HIWORD_OFFSET);
705 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700706 }
707 }
708 }
709
710 return load;
711}
712
713/* Load value from base + scaled index. */
buzbee2700f7e2014-03-07 09:46:20 -0800714LIR* X86Mir2Lir::LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest,
715 int scale, OpSize size) {
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100716 return LoadBaseIndexedDisp(r_base, r_index, scale, 0, r_dest, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700717}
718
Vladimir Marko674744e2014-04-24 15:18:26 +0100719LIR* X86Mir2Lir::LoadBaseDispVolatile(RegStorage r_base, int displacement, RegStorage r_dest,
720 OpSize size) {
721 // LoadBaseDisp() will emit correct insn for atomic load on x86
722 // assuming r_dest is correctly prepared using RegClassForFieldLoadStore().
723 return LoadBaseDisp(r_base, displacement, r_dest, size);
724}
725
buzbee091cc402014-03-31 10:14:40 -0700726LIR* X86Mir2Lir::LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest,
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100727 OpSize size) {
buzbee091cc402014-03-31 10:14:40 -0700728 return LoadBaseIndexedDisp(r_base, RegStorage::InvalidReg(), 0, displacement, r_dest,
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100729 size);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700730}
731
buzbee2700f7e2014-03-07 09:46:20 -0800732LIR* X86Mir2Lir::StoreBaseIndexedDisp(RegStorage r_base, RegStorage r_index, int scale,
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100733 int displacement, RegStorage r_src, OpSize size) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700734 LIR *store = NULL;
735 LIR *store2 = NULL;
buzbee2700f7e2014-03-07 09:46:20 -0800736 bool is_array = r_index.Valid();
buzbee091cc402014-03-31 10:14:40 -0700737 bool pair = r_src.IsPair();
738 bool is64bit = (size == k64) || (size == kDouble);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700739 X86OpCode opcode = kX86Nop;
740 switch (size) {
buzbee695d13a2014-04-19 13:32:20 -0700741 case k64:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700742 case kDouble:
buzbee091cc402014-03-31 10:14:40 -0700743 if (r_src.IsFloat()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700744 opcode = is_array ? kX86MovsdAR : kX86MovsdMR;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700745 } else {
Dmitry Petrochenko9ee801f2014-05-12 11:31:37 +0700746 if (Gen64Bit()) {
747 opcode = is_array ? kX86Mov64AR : kX86Mov64MR;
748 } else {
749 // TODO(64): pair = true;
750 opcode = is_array ? kX86Mov32AR : kX86Mov32MR;
751 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700752 }
753 // TODO: double store is to unaligned address
754 DCHECK_EQ((displacement & 0x3), 0);
755 break;
Dmitry Petrochenko9ee801f2014-05-12 11:31:37 +0700756 case kWord:
757 if (Gen64Bit()) {
758 opcode = is_array ? kX86Mov64AR : kX86Mov64MR;
759 CHECK_EQ(is_array, false);
760 CHECK_EQ(r_src.IsFloat(), false);
761 break;
762 } // else fall-through to k32 case
buzbee695d13a2014-04-19 13:32:20 -0700763 case k32:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700764 case kSingle:
buzbee695d13a2014-04-19 13:32:20 -0700765 case kReference:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700766 opcode = is_array ? kX86Mov32AR : kX86Mov32MR;
buzbee091cc402014-03-31 10:14:40 -0700767 if (r_src.IsFloat()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700768 opcode = is_array ? kX86MovssAR : kX86MovssMR;
buzbee091cc402014-03-31 10:14:40 -0700769 DCHECK(r_src.IsSingle());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700770 }
771 DCHECK_EQ((displacement & 0x3), 0);
772 break;
773 case kUnsignedHalf:
774 case kSignedHalf:
775 opcode = is_array ? kX86Mov16AR : kX86Mov16MR;
776 DCHECK_EQ((displacement & 0x1), 0);
777 break;
778 case kUnsignedByte:
779 case kSignedByte:
780 opcode = is_array ? kX86Mov8AR : kX86Mov8MR;
781 break;
782 default:
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000783 LOG(FATAL) << "Bad case in StoreBaseIndexedDispBody";
Brian Carlstrom7940e442013-07-12 13:46:57 -0700784 }
785
786 if (!is_array) {
787 if (!pair) {
buzbee2700f7e2014-03-07 09:46:20 -0800788 store = NewLIR3(opcode, r_base.GetReg(), displacement + LOWORD_OFFSET, r_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700789 } else {
buzbee091cc402014-03-31 10:14:40 -0700790 DCHECK(!r_src.IsFloat()); // Make sure we're not still using a pair here.
791 store = NewLIR3(opcode, r_base.GetReg(), displacement + LOWORD_OFFSET, r_src.GetLowReg());
792 store2 = NewLIR3(opcode, r_base.GetReg(), displacement + HIWORD_OFFSET, r_src.GetHighReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700793 }
buzbee2700f7e2014-03-07 09:46:20 -0800794 if (r_base == rs_rX86_SP) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700795 AnnotateDalvikRegAccess(store, (displacement + (pair ? LOWORD_OFFSET : 0)) >> 2,
796 false /* is_load */, is64bit);
797 if (pair) {
798 AnnotateDalvikRegAccess(store2, (displacement + HIWORD_OFFSET) >> 2,
799 false /* is_load */, is64bit);
800 }
801 }
802 } else {
803 if (!pair) {
buzbee2700f7e2014-03-07 09:46:20 -0800804 store = NewLIR5(opcode, r_base.GetReg(), r_index.GetReg(), scale,
805 displacement + LOWORD_OFFSET, r_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700806 } else {
buzbee091cc402014-03-31 10:14:40 -0700807 DCHECK(!r_src.IsFloat()); // Make sure we're not still using a pair here.
buzbee2700f7e2014-03-07 09:46:20 -0800808 store = NewLIR5(opcode, r_base.GetReg(), r_index.GetReg(), scale,
buzbee091cc402014-03-31 10:14:40 -0700809 displacement + LOWORD_OFFSET, r_src.GetLowReg());
buzbee2700f7e2014-03-07 09:46:20 -0800810 store2 = NewLIR5(opcode, r_base.GetReg(), r_index.GetReg(), scale,
buzbee091cc402014-03-31 10:14:40 -0700811 displacement + HIWORD_OFFSET, r_src.GetHighReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700812 }
813 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700814 return store;
815}
816
817/* store value base base + scaled index. */
buzbee2700f7e2014-03-07 09:46:20 -0800818LIR* X86Mir2Lir::StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700819 int scale, OpSize size) {
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100820 return StoreBaseIndexedDisp(r_base, r_index, scale, 0, r_src, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700821}
822
Vladimir Marko674744e2014-04-24 15:18:26 +0100823LIR* X86Mir2Lir::StoreBaseDispVolatile(RegStorage r_base, int displacement,
824 RegStorage r_src, OpSize size) {
825 // StoreBaseDisp() will emit correct insn for atomic store on x86
826 // assuming r_dest is correctly prepared using RegClassForFieldLoadStore().
827 return StoreBaseDisp(r_base, displacement, r_src, size);
828}
829
buzbee2700f7e2014-03-07 09:46:20 -0800830LIR* X86Mir2Lir::StoreBaseDisp(RegStorage r_base, int displacement,
831 RegStorage r_src, OpSize size) {
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100832 return StoreBaseIndexedDisp(r_base, RegStorage::InvalidReg(), 0, displacement, r_src, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700833}
834
buzbee2700f7e2014-03-07 09:46:20 -0800835LIR* X86Mir2Lir::OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg,
Mark Mendell766e9292014-01-27 07:55:47 -0800836 int offset, int check_value, LIR* target) {
buzbee2700f7e2014-03-07 09:46:20 -0800837 NewLIR3(IS_SIMM8(check_value) ? kX86Cmp32MI8 : kX86Cmp32MI, base_reg.GetReg(), offset,
Mark Mendell766e9292014-01-27 07:55:47 -0800838 check_value);
839 LIR* branch = OpCondBranch(cond, target);
840 return branch;
841}
842
Mark Mendell67c39c42014-01-31 17:28:00 -0800843void X86Mir2Lir::AnalyzeMIR() {
844 // Assume we don't need a pointer to the base of the code.
845 cu_->NewTimingSplit("X86 MIR Analysis");
846 store_method_addr_ = false;
847
848 // Walk the MIR looking for interesting items.
849 PreOrderDfsIterator iter(mir_graph_);
850 BasicBlock* curr_bb = iter.Next();
851 while (curr_bb != NULL) {
852 AnalyzeBB(curr_bb);
853 curr_bb = iter.Next();
854 }
855
856 // Did we need a pointer to the method code?
857 if (store_method_addr_) {
858 base_of_code_ = mir_graph_->GetNewCompilerTemp(kCompilerTempVR, false);
859 } else {
860 base_of_code_ = nullptr;
861 }
862}
863
864void X86Mir2Lir::AnalyzeBB(BasicBlock * bb) {
865 if (bb->block_type == kDead) {
866 // Ignore dead blocks
867 return;
868 }
869
870 for (MIR *mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
871 int opcode = mir->dalvikInsn.opcode;
872 if (opcode >= kMirOpFirst) {
873 AnalyzeExtendedMIR(opcode, bb, mir);
874 } else {
875 AnalyzeMIR(opcode, bb, mir);
876 }
877 }
878}
879
880
881void X86Mir2Lir::AnalyzeExtendedMIR(int opcode, BasicBlock * bb, MIR *mir) {
882 switch (opcode) {
883 // Instructions referencing doubles.
884 case kMirOpFusedCmplDouble:
885 case kMirOpFusedCmpgDouble:
886 AnalyzeFPInstruction(opcode, bb, mir);
887 break;
Mark Mendelld65c51a2014-04-29 16:55:20 -0400888 case kMirOpConstVector:
889 store_method_addr_ = true;
890 break;
Mark Mendell67c39c42014-01-31 17:28:00 -0800891 default:
892 // Ignore the rest.
893 break;
894 }
895}
896
897void X86Mir2Lir::AnalyzeMIR(int opcode, BasicBlock * bb, MIR *mir) {
898 // Looking for
899 // - Do we need a pointer to the code (used for packed switches and double lits)?
900
901 switch (opcode) {
902 // Instructions referencing doubles.
903 case Instruction::CMPL_DOUBLE:
904 case Instruction::CMPG_DOUBLE:
905 case Instruction::NEG_DOUBLE:
906 case Instruction::ADD_DOUBLE:
907 case Instruction::SUB_DOUBLE:
908 case Instruction::MUL_DOUBLE:
909 case Instruction::DIV_DOUBLE:
910 case Instruction::REM_DOUBLE:
911 case Instruction::ADD_DOUBLE_2ADDR:
912 case Instruction::SUB_DOUBLE_2ADDR:
913 case Instruction::MUL_DOUBLE_2ADDR:
914 case Instruction::DIV_DOUBLE_2ADDR:
915 case Instruction::REM_DOUBLE_2ADDR:
916 AnalyzeFPInstruction(opcode, bb, mir);
917 break;
Mark Mendell55d0eac2014-02-06 11:02:52 -0800918
Mark Mendell67c39c42014-01-31 17:28:00 -0800919 // Packed switches and array fills need a pointer to the base of the method.
920 case Instruction::FILL_ARRAY_DATA:
921 case Instruction::PACKED_SWITCH:
922 store_method_addr_ = true;
923 break;
924 default:
925 // Other instructions are not interesting yet.
926 break;
927 }
928}
929
930void X86Mir2Lir::AnalyzeFPInstruction(int opcode, BasicBlock * bb, MIR *mir) {
931 // Look at all the uses, and see if they are double constants.
Jean Christophe Beylercc794c32014-05-02 09:34:13 -0700932 uint64_t attrs = MIRGraph::GetDataFlowAttributes(static_cast<Instruction::Code>(opcode));
Mark Mendell67c39c42014-01-31 17:28:00 -0800933 int next_sreg = 0;
934 if (attrs & DF_UA) {
935 if (attrs & DF_A_WIDE) {
936 AnalyzeDoubleUse(mir_graph_->GetSrcWide(mir, next_sreg));
937 next_sreg += 2;
938 } else {
939 next_sreg++;
940 }
941 }
942 if (attrs & DF_UB) {
943 if (attrs & DF_B_WIDE) {
944 AnalyzeDoubleUse(mir_graph_->GetSrcWide(mir, next_sreg));
945 next_sreg += 2;
946 } else {
947 next_sreg++;
948 }
949 }
950 if (attrs & DF_UC) {
951 if (attrs & DF_C_WIDE) {
952 AnalyzeDoubleUse(mir_graph_->GetSrcWide(mir, next_sreg));
953 }
954 }
955}
956
957void X86Mir2Lir::AnalyzeDoubleUse(RegLocation use) {
958 // If this is a double literal, we will want it in the literal pool.
959 if (use.is_const) {
960 store_method_addr_ = true;
961 }
962}
963
buzbee30adc732014-05-09 15:10:18 -0700964RegLocation X86Mir2Lir::UpdateLocTyped(RegLocation loc, int reg_class) {
965 loc = UpdateLoc(loc);
966 if ((loc.location == kLocPhysReg) && (loc.fp != loc.reg.IsFloat())) {
967 if (GetRegInfo(loc.reg)->IsTemp()) {
968 Clobber(loc.reg);
969 FreeTemp(loc.reg);
970 loc.reg = RegStorage::InvalidReg();
971 loc.location = kLocDalvikFrame;
972 }
973 }
974 return loc;
975}
976
977RegLocation X86Mir2Lir::UpdateLocWideTyped(RegLocation loc, int reg_class) {
978 loc = UpdateLocWide(loc);
979 if ((loc.location == kLocPhysReg) && (loc.fp != loc.reg.IsFloat())) {
980 if (GetRegInfo(loc.reg)->IsTemp()) {
981 Clobber(loc.reg);
982 FreeTemp(loc.reg);
983 loc.reg = RegStorage::InvalidReg();
984 loc.location = kLocDalvikFrame;
985 }
986 }
987 return loc;
988}
989
Brian Carlstrom7940e442013-07-12 13:46:57 -0700990} // namespace art