blob: edb3b23493df266cdf597ad15898a5553508032c [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex/dataflow_iterator-inl.h"
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080019#include "dex/quick/dex_file_method_inliner.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "mir_to_lir-inl.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070021#include "thread-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070022
23namespace art {
24
buzbeea0cd2d72014-06-01 09:33:49 -070025RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) {
26 RegisterClass res;
27 switch (shorty_type) {
28 case 'L':
29 res = kRefReg;
30 break;
31 case 'F':
32 // Expected fallthrough.
33 case 'D':
34 res = kFPReg;
35 break;
36 default:
37 res = kCoreReg;
38 }
39 return res;
40}
41
42RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) {
43 RegisterClass res;
44 if (loc.fp) {
45 DCHECK(!loc.ref) << "At most, one of ref/fp may be set";
46 res = kFPReg;
47 } else if (loc.ref) {
48 res = kRefReg;
49 } else {
50 res = kCoreReg;
51 }
52 return res;
53}
54
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080055void Mir2Lir::LockArg(int in_position, bool wide) {
buzbee2700f7e2014-03-07 09:46:20 -080056 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
57 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
58 RegStorage::InvalidReg();
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080059
buzbee2700f7e2014-03-07 09:46:20 -080060 if (reg_arg_low.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080061 LockTemp(reg_arg_low);
62 }
buzbeeb5860fb2014-06-21 15:31:01 -070063 if (reg_arg_high.Valid() && reg_arg_low.NotExactlyEquals(reg_arg_high)) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080064 LockTemp(reg_arg_high);
65 }
66}
67
buzbee33ae5582014-06-12 14:56:32 -070068// TODO: simplify when 32-bit targets go hard-float.
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010069RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +010070 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000071 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070072
73 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080074 /*
75 * When doing a call for x86, it moves the stack pointer in order to push return.
76 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080077 */
78 offset += sizeof(uint32_t);
79 }
80
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070081 if (cu_->instruction_set == kX86_64) {
82 /*
83 * When doing a call for x86, it moves the stack pointer in order to push return.
84 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
85 */
86 offset += sizeof(uint64_t);
87 }
88
buzbee33ae5582014-06-12 14:56:32 -070089 if (cu_->target64) {
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070090 RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position);
91 if (!reg_arg.Valid()) {
buzbee33ae5582014-06-12 14:56:32 -070092 RegStorage new_reg =
93 wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class);
Chao-ying Fua77ee512014-07-01 17:43:41 -070094 LoadBaseDisp(TargetPtrReg(kSp), offset, new_reg, wide ? k64 : k32, kNotVolatile);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070095 return new_reg;
96 } else {
97 // Check if we need to copy the arg to a different reg_class.
98 if (!RegClassMatches(reg_class, reg_arg)) {
99 if (wide) {
100 RegStorage new_reg = AllocTypedTempWide(false, reg_class);
101 OpRegCopyWide(new_reg, reg_arg);
102 reg_arg = new_reg;
103 } else {
104 RegStorage new_reg = AllocTypedTemp(false, reg_class);
105 OpRegCopy(new_reg, reg_arg);
106 reg_arg = new_reg;
107 }
108 }
109 }
110 return reg_arg;
111 }
112
113 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
114 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
115 RegStorage::InvalidReg();
116
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800117 // If the VR is wide and there is no register for high part, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -0800118 if (wide && !reg_arg_high.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800119 // If the low part is not in a reg, we allocate a pair. Otherwise, we just load to high reg.
buzbee2700f7e2014-03-07 09:46:20 -0800120 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100121 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700122 LoadBaseDisp(TargetPtrReg(kSp), offset, new_regs, k64, kNotVolatile);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100123 return new_regs; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800124 } else {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100125 // Assume that no ABI allows splitting a wide fp reg between a narrow fp reg and memory,
126 // i.e. the low part is in a core reg. Load the second part in a core reg as well for now.
127 DCHECK(!reg_arg_low.IsFloat());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800128 reg_arg_high = AllocTemp();
129 int offset_high = offset + sizeof(uint32_t);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700130 Load32Disp(TargetPtrReg(kSp), offset_high, reg_arg_high);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100131 // Continue below to check the reg_class.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800132 }
133 }
134
135 // If the low part is not in a register yet, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -0800136 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100137 // Assume that if the low part of a wide arg is passed in memory, so is the high part,
138 // thus we don't get here for wide args as it's handled above. Big-endian ABIs could
139 // conceivably break this assumption but Android supports only little-endian architectures.
140 DCHECK(!wide);
141 reg_arg_low = AllocTypedTemp(false, reg_class);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700142 Load32Disp(TargetPtrReg(kSp), offset, reg_arg_low);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100143 return reg_arg_low; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800144 }
145
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100146 RegStorage reg_arg = wide ? RegStorage::MakeRegPair(reg_arg_low, reg_arg_high) : reg_arg_low;
147 // Check if we need to copy the arg to a different reg_class.
148 if (!RegClassMatches(reg_class, reg_arg)) {
149 if (wide) {
150 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
151 OpRegCopyWide(new_regs, reg_arg);
152 reg_arg = new_regs;
153 } else {
154 RegStorage new_reg = AllocTypedTemp(false, reg_class);
155 OpRegCopy(new_reg, reg_arg);
156 reg_arg = new_reg;
157 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800158 }
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100159 return reg_arg;
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800160}
161
buzbee33ae5582014-06-12 14:56:32 -0700162// TODO: simpilfy when 32-bit targets go hard float.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800163void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100164 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000165 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700166 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800167 /*
168 * When doing a call for x86, it moves the stack pointer in order to push return.
169 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800170 */
171 offset += sizeof(uint32_t);
172 }
173
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700174 if (cu_->instruction_set == kX86_64) {
175 /*
176 * When doing a call for x86, it moves the stack pointer in order to push return.
177 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
178 */
179 offset += sizeof(uint64_t);
180 }
181
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800182 if (!rl_dest.wide) {
buzbee2700f7e2014-03-07 09:46:20 -0800183 RegStorage reg = GetArgMappingToPhysicalReg(in_position);
184 if (reg.Valid()) {
185 OpRegCopy(rl_dest.reg, reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800186 } else {
Chao-ying Fua77ee512014-07-01 17:43:41 -0700187 Load32Disp(TargetPtrReg(kSp), offset, rl_dest.reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800188 }
189 } else {
buzbee33ae5582014-06-12 14:56:32 -0700190 if (cu_->target64) {
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700191 RegStorage reg = GetArgMappingToPhysicalReg(in_position);
192 if (reg.Valid()) {
193 OpRegCopy(rl_dest.reg, reg);
194 } else {
Chao-ying Fua77ee512014-07-01 17:43:41 -0700195 LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700196 }
197 return;
198 }
199
buzbee2700f7e2014-03-07 09:46:20 -0800200 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
201 RegStorage reg_arg_high = GetArgMappingToPhysicalReg(in_position + 1);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800202
buzbee2700f7e2014-03-07 09:46:20 -0800203 if (reg_arg_low.Valid() && reg_arg_high.Valid()) {
204 OpRegCopyWide(rl_dest.reg, RegStorage::MakeRegPair(reg_arg_low, reg_arg_high));
205 } else if (reg_arg_low.Valid() && !reg_arg_high.Valid()) {
206 OpRegCopy(rl_dest.reg, reg_arg_low);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800207 int offset_high = offset + sizeof(uint32_t);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700208 Load32Disp(TargetPtrReg(kSp), offset_high, rl_dest.reg.GetHigh());
buzbee2700f7e2014-03-07 09:46:20 -0800209 } else if (!reg_arg_low.Valid() && reg_arg_high.Valid()) {
210 OpRegCopy(rl_dest.reg.GetHigh(), reg_arg_high);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700211 Load32Disp(TargetPtrReg(kSp), offset, rl_dest.reg.GetLow());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800212 } else {
Chao-ying Fua77ee512014-07-01 17:43:41 -0700213 LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800214 }
215 }
216}
217
218bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) {
219 // FastInstance() already checked by DexFileMethodInliner.
220 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100221 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800222 // The object is not "this" and has to be null-checked.
223 return false;
224 }
225
Vladimir Markoe3e02602014-03-12 15:42:41 +0000226 bool wide = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE));
Vladimir Marko455759b2014-05-06 20:49:36 +0100227 bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT));
228 OpSize size = LoadStoreOpSize(wide, ref);
Vladimir Marko674744e2014-04-24 15:18:26 +0100229 if (data.is_volatile && !SupportsVolatileLoadStore(size)) {
230 return false;
231 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100232
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800233 // Point of no return - no aborts after this
234 GenPrintLabel(mir);
235 LockArg(data.object_arg);
buzbeea0cd2d72014-06-01 09:33:49 -0700236 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100237 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
buzbeea0cd2d72014-06-01 09:33:49 -0700238 RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]);
239 RegLocation rl_dest = wide ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100240 RegStorage r_result = rl_dest.reg;
241 if (!RegClassMatches(reg_class, r_result)) {
242 r_result = wide ? AllocTypedTempWide(rl_dest.fp, reg_class)
243 : AllocTypedTemp(rl_dest.fp, reg_class);
244 }
Andreas Gampe3c12c512014-06-24 18:46:29 +0000245 if (ref) {
246 LoadRefDisp(reg_obj, data.field_offset, r_result, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100247 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000248 LoadBaseDisp(reg_obj, data.field_offset, r_result, size, data.is_volatile ? kVolatile :
249 kNotVolatile);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100250 }
buzbeeb5860fb2014-06-21 15:31:01 -0700251 if (r_result.NotExactlyEquals(rl_dest.reg)) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100252 if (wide) {
253 OpRegCopyWide(rl_dest.reg, r_result);
254 } else {
255 OpRegCopy(rl_dest.reg, r_result);
256 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800257 }
258 return true;
259}
260
261bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) {
262 // FastInstance() already checked by DexFileMethodInliner.
263 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100264 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800265 // The object is not "this" and has to be null-checked.
266 return false;
267 }
Vladimir Markoe1fced12014-04-04 14:52:53 +0100268 if (data.return_arg_plus1 != 0u) {
269 // The setter returns a method argument which we don't support here.
270 return false;
271 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800272
Vladimir Markoe3e02602014-03-12 15:42:41 +0000273 bool wide = (data.op_variant == InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE));
Vladimir Marko455759b2014-05-06 20:49:36 +0100274 bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT));
275 OpSize size = LoadStoreOpSize(wide, ref);
Vladimir Marko674744e2014-04-24 15:18:26 +0100276 if (data.is_volatile && !SupportsVolatileLoadStore(size)) {
277 return false;
278 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800279
280 // Point of no return - no aborts after this
281 GenPrintLabel(mir);
282 LockArg(data.object_arg);
283 LockArg(data.src_arg, wide);
buzbeea0cd2d72014-06-01 09:33:49 -0700284 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100285 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
286 RegStorage reg_src = LoadArg(data.src_arg, reg_class, wide);
Andreas Gampe3c12c512014-06-24 18:46:29 +0000287 if (ref) {
288 StoreRefDisp(reg_obj, data.field_offset, reg_src, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100289 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000290 StoreBaseDisp(reg_obj, data.field_offset, reg_src, size, data.is_volatile ? kVolatile :
291 kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800292 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100293 if (ref) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800294 MarkGCCard(reg_src, reg_obj);
295 }
296 return true;
297}
298
299bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) {
300 const InlineReturnArgData& data = special.d.return_data;
Vladimir Markoe3e02602014-03-12 15:42:41 +0000301 bool wide = (data.is_wide != 0u);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800302
303 // Point of no return - no aborts after this
304 GenPrintLabel(mir);
305 LockArg(data.arg, wide);
buzbeea0cd2d72014-06-01 09:33:49 -0700306 RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]);
307 RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800308 LoadArgDirect(data.arg, rl_dest);
309 return true;
310}
311
312/*
313 * Special-case code generation for simple non-throwing leaf methods.
314 */
315bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) {
316 DCHECK(special.flags & kInlineSpecial);
317 current_dalvik_offset_ = mir->offset;
318 MIR* return_mir = nullptr;
319 bool successful = false;
320
321 switch (special.opcode) {
322 case kInlineOpNop:
323 successful = true;
324 DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID);
325 return_mir = mir;
326 break;
327 case kInlineOpNonWideConst: {
328 successful = true;
buzbeea0cd2d72014-06-01 09:33:49 -0700329 RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0]));
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800330 GenPrintLabel(mir);
buzbee2700f7e2014-03-07 09:46:20 -0800331 LoadConstant(rl_dest.reg, static_cast<int>(special.d.data));
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700332 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800333 break;
334 }
335 case kInlineOpReturnArg:
336 successful = GenSpecialIdentity(mir, special);
337 return_mir = mir;
338 break;
339 case kInlineOpIGet:
340 successful = GenSpecialIGet(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700341 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800342 break;
343 case kInlineOpIPut:
344 successful = GenSpecialIPut(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700345 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800346 break;
347 default:
348 break;
349 }
350
351 if (successful) {
Vladimir Marko39d95e62014-02-28 12:51:24 +0000352 if (kIsDebugBuild) {
353 // Clear unreachable catch entries.
354 mir_graph_->catches_.clear();
355 }
356
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800357 // Handle verbosity for return MIR.
358 if (return_mir != nullptr) {
359 current_dalvik_offset_ = return_mir->offset;
360 // Not handling special identity case because it already generated code as part
361 // of the return. The label should have been added before any code was generated.
362 if (special.opcode != kInlineOpReturnArg) {
363 GenPrintLabel(return_mir);
364 }
365 }
366 GenSpecialExitSequence();
367
368 core_spill_mask_ = 0;
369 num_core_spills_ = 0;
370 fp_spill_mask_ = 0;
371 num_fp_spills_ = 0;
372 frame_size_ = 0;
373 core_vmap_table_.clear();
374 fp_vmap_table_.clear();
375 }
376
377 return successful;
378}
379
Brian Carlstrom7940e442013-07-12 13:46:57 -0700380/*
381 * Target-independent code generation. Use only high-level
382 * load/store utilities here, or target-dependent genXX() handlers
383 * when necessary.
384 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700385void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700386 RegLocation rl_src[3];
387 RegLocation rl_dest = mir_graph_->GetBadLoc();
388 RegLocation rl_result = mir_graph_->GetBadLoc();
389 Instruction::Code opcode = mir->dalvikInsn.opcode;
390 int opt_flags = mir->optimization_flags;
391 uint32_t vB = mir->dalvikInsn.vB;
392 uint32_t vC = mir->dalvikInsn.vC;
buzbee082833c2014-05-17 23:16:26 -0700393 DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:"
394 << std::hex << current_dalvik_offset_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700395
396 // Prep Src and Dest locations.
397 int next_sreg = 0;
398 int next_loc = 0;
Jean Christophe Beylercc794c32014-05-02 09:34:13 -0700399 uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700400 rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc();
401 if (attrs & DF_UA) {
402 if (attrs & DF_A_WIDE) {
403 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
404 next_sreg+= 2;
405 } else {
406 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
407 next_sreg++;
408 }
409 }
410 if (attrs & DF_UB) {
411 if (attrs & DF_B_WIDE) {
412 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
413 next_sreg+= 2;
414 } else {
415 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
416 next_sreg++;
417 }
418 }
419 if (attrs & DF_UC) {
420 if (attrs & DF_C_WIDE) {
421 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
422 } else {
423 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
424 }
425 }
426 if (attrs & DF_DA) {
427 if (attrs & DF_A_WIDE) {
428 rl_dest = mir_graph_->GetDestWide(mir);
429 } else {
430 rl_dest = mir_graph_->GetDest(mir);
431 }
432 }
433 switch (opcode) {
434 case Instruction::NOP:
435 break;
436
437 case Instruction::MOVE_EXCEPTION:
438 GenMoveException(rl_dest);
439 break;
440
441 case Instruction::RETURN_VOID:
442 if (((cu_->access_flags & kAccConstructor) != 0) &&
443 cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file,
444 cu_->class_def_idx)) {
445 GenMemBarrier(kStoreStore);
446 }
Wei Jin04f4d8a2014-05-29 18:04:29 -0700447 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700448 GenSuspendTest(opt_flags);
449 }
450 break;
451
Brian Carlstrom7940e442013-07-12 13:46:57 -0700452 case Instruction::RETURN_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700453 DCHECK(rl_src[0].ref);
454 // Intentional fallthrough.
455 case Instruction::RETURN:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700456 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700457 GenSuspendTest(opt_flags);
458 }
buzbeea0cd2d72014-06-01 09:33:49 -0700459 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
460 StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700461 break;
462
463 case Instruction::RETURN_WIDE:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700464 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700465 GenSuspendTest(opt_flags);
466 }
buzbeea0cd2d72014-06-01 09:33:49 -0700467 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
468 StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700469 break;
470
471 case Instruction::MOVE_RESULT_WIDE:
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000472 if ((opt_flags & MIR_INLINED) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700473 break; // Nop - combined w/ previous invoke.
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000474 }
buzbeea0cd2d72014-06-01 09:33:49 -0700475 StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700476 break;
477
478 case Instruction::MOVE_RESULT:
479 case Instruction::MOVE_RESULT_OBJECT:
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000480 if ((opt_flags & MIR_INLINED) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700481 break; // Nop - combined w/ previous invoke.
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000482 }
buzbeea0cd2d72014-06-01 09:33:49 -0700483 StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700484 break;
485
486 case Instruction::MOVE:
487 case Instruction::MOVE_OBJECT:
488 case Instruction::MOVE_16:
489 case Instruction::MOVE_OBJECT_16:
490 case Instruction::MOVE_FROM16:
491 case Instruction::MOVE_OBJECT_FROM16:
492 StoreValue(rl_dest, rl_src[0]);
493 break;
494
495 case Instruction::MOVE_WIDE:
496 case Instruction::MOVE_WIDE_16:
497 case Instruction::MOVE_WIDE_FROM16:
498 StoreValueWide(rl_dest, rl_src[0]);
499 break;
500
501 case Instruction::CONST:
502 case Instruction::CONST_4:
503 case Instruction::CONST_16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400504 GenConst(rl_dest, vB);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700505 break;
506
507 case Instruction::CONST_HIGH16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400508 GenConst(rl_dest, vB << 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700509 break;
510
511 case Instruction::CONST_WIDE_16:
512 case Instruction::CONST_WIDE_32:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000513 GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700514 break;
515
516 case Instruction::CONST_WIDE:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000517 GenConstWide(rl_dest, mir->dalvikInsn.vB_wide);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700518 break;
519
520 case Instruction::CONST_WIDE_HIGH16:
521 rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800522 LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700523 StoreValueWide(rl_dest, rl_result);
524 break;
525
526 case Instruction::MONITOR_ENTER:
527 GenMonitorEnter(opt_flags, rl_src[0]);
528 break;
529
530 case Instruction::MONITOR_EXIT:
531 GenMonitorExit(opt_flags, rl_src[0]);
532 break;
533
534 case Instruction::CHECK_CAST: {
535 GenCheckCast(mir->offset, vB, rl_src[0]);
536 break;
537 }
538 case Instruction::INSTANCE_OF:
539 GenInstanceof(vC, rl_dest, rl_src[0]);
540 break;
541
542 case Instruction::NEW_INSTANCE:
543 GenNewInstance(vB, rl_dest);
544 break;
545
546 case Instruction::THROW:
547 GenThrow(rl_src[0]);
548 break;
549
550 case Instruction::ARRAY_LENGTH:
551 int len_offset;
552 len_offset = mirror::Array::LengthOffset().Int32Value();
buzbeea0cd2d72014-06-01 09:33:49 -0700553 rl_src[0] = LoadValue(rl_src[0], kRefReg);
buzbee2700f7e2014-03-07 09:46:20 -0800554 GenNullCheck(rl_src[0].reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700555 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700556 Load32Disp(rl_src[0].reg, len_offset, rl_result.reg);
Dave Allisonf9439142014-03-27 15:10:22 -0700557 MarkPossibleNullPointerException(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700558 StoreValue(rl_dest, rl_result);
559 break;
560
561 case Instruction::CONST_STRING:
562 case Instruction::CONST_STRING_JUMBO:
563 GenConstString(vB, rl_dest);
564 break;
565
566 case Instruction::CONST_CLASS:
567 GenConstClass(vB, rl_dest);
568 break;
569
570 case Instruction::FILL_ARRAY_DATA:
571 GenFillArrayData(vB, rl_src[0]);
572 break;
573
574 case Instruction::FILLED_NEW_ARRAY:
575 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
576 false /* not range */));
577 break;
578
579 case Instruction::FILLED_NEW_ARRAY_RANGE:
580 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
581 true /* range */));
582 break;
583
584 case Instruction::NEW_ARRAY:
585 GenNewArray(vC, rl_dest, rl_src[0]);
586 break;
587
588 case Instruction::GOTO:
589 case Instruction::GOTO_16:
590 case Instruction::GOTO_32:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700591 if (mir_graph_->IsBackedge(bb, bb->taken) &&
592 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken))) {
buzbee0d829482013-10-11 15:24:55 -0700593 GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700594 } else {
buzbee0d829482013-10-11 15:24:55 -0700595 OpUnconditionalBranch(&label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700596 }
597 break;
598
599 case Instruction::PACKED_SWITCH:
600 GenPackedSwitch(mir, vB, rl_src[0]);
601 break;
602
603 case Instruction::SPARSE_SWITCH:
604 GenSparseSwitch(mir, vB, rl_src[0]);
605 break;
606
607 case Instruction::CMPL_FLOAT:
608 case Instruction::CMPG_FLOAT:
609 case Instruction::CMPL_DOUBLE:
610 case Instruction::CMPG_DOUBLE:
611 GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]);
612 break;
613
614 case Instruction::CMP_LONG:
615 GenCmpLong(rl_dest, rl_src[0], rl_src[1]);
616 break;
617
618 case Instruction::IF_EQ:
619 case Instruction::IF_NE:
620 case Instruction::IF_LT:
621 case Instruction::IF_GE:
622 case Instruction::IF_GT:
623 case Instruction::IF_LE: {
buzbee0d829482013-10-11 15:24:55 -0700624 LIR* taken = &label_list[bb->taken];
625 LIR* fall_through = &label_list[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700626 // Result known at compile time?
627 if (rl_src[0].is_const && rl_src[1].is_const) {
628 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg),
629 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
buzbee0d829482013-10-11 15:24:55 -0700630 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
Wei Jin04f4d8a2014-05-29 18:04:29 -0700631 if (mir_graph_->IsBackedge(bb, target_id) &&
632 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700633 GenSuspendTest(opt_flags);
634 }
buzbee0d829482013-10-11 15:24:55 -0700635 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700636 } else {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700637 if (mir_graph_->IsBackwardsBranch(bb) &&
638 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
639 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700640 GenSuspendTest(opt_flags);
641 }
buzbee0d829482013-10-11 15:24:55 -0700642 GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken, fall_through);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700643 }
644 break;
645 }
646
647 case Instruction::IF_EQZ:
648 case Instruction::IF_NEZ:
649 case Instruction::IF_LTZ:
650 case Instruction::IF_GEZ:
651 case Instruction::IF_GTZ:
652 case Instruction::IF_LEZ: {
buzbee0d829482013-10-11 15:24:55 -0700653 LIR* taken = &label_list[bb->taken];
654 LIR* fall_through = &label_list[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700655 // Result known at compile time?
656 if (rl_src[0].is_const) {
657 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), 0);
buzbee0d829482013-10-11 15:24:55 -0700658 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
Wei Jin04f4d8a2014-05-29 18:04:29 -0700659 if (mir_graph_->IsBackedge(bb, target_id) &&
660 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700661 GenSuspendTest(opt_flags);
662 }
buzbee0d829482013-10-11 15:24:55 -0700663 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700664 } else {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700665 if (mir_graph_->IsBackwardsBranch(bb) &&
666 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
667 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700668 GenSuspendTest(opt_flags);
669 }
670 GenCompareZeroAndBranch(opcode, rl_src[0], taken, fall_through);
671 }
672 break;
673 }
674
675 case Instruction::AGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700676 GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3);
677 break;
678 case Instruction::AGET_OBJECT:
679 GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700680 break;
681 case Instruction::AGET:
buzbee695d13a2014-04-19 13:32:20 -0700682 GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700683 break;
684 case Instruction::AGET_BOOLEAN:
685 GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0);
686 break;
687 case Instruction::AGET_BYTE:
688 GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0);
689 break;
690 case Instruction::AGET_CHAR:
691 GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
692 break;
693 case Instruction::AGET_SHORT:
694 GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
695 break;
696 case Instruction::APUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700697 GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700698 break;
699 case Instruction::APUT:
buzbee695d13a2014-04-19 13:32:20 -0700700 GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700701 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700702 case Instruction::APUT_OBJECT: {
703 bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]);
704 bool is_safe = is_null; // Always safe to store null.
705 if (!is_safe) {
706 // Check safety from verifier type information.
Vladimir Marko2730db02014-01-27 11:15:17 +0000707 const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit();
708 is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset);
Ian Rogersa9a82542013-10-04 11:17:26 -0700709 }
710 if (is_null || is_safe) {
711 // Store of constant null doesn't require an assignability test and can be generated inline
712 // without fixed register usage or a card mark.
buzbee695d13a2014-04-19 13:32:20 -0700713 GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null);
Ian Rogersa9a82542013-10-04 11:17:26 -0700714 } else {
715 GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]);
716 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700717 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700718 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719 case Instruction::APUT_SHORT:
720 case Instruction::APUT_CHAR:
Ian Rogersa9a82542013-10-04 11:17:26 -0700721 GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700722 break;
723 case Instruction::APUT_BYTE:
724 case Instruction::APUT_BOOLEAN:
Ian Rogersa9a82542013-10-04 11:17:26 -0700725 GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700726 break;
727
728 case Instruction::IGET_OBJECT:
buzbee695d13a2014-04-19 13:32:20 -0700729 GenIGet(mir, opt_flags, kReference, rl_dest, rl_src[0], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700730 break;
731
732 case Instruction::IGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700733 GenIGet(mir, opt_flags, k64, rl_dest, rl_src[0], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700734 break;
735
736 case Instruction::IGET:
buzbee695d13a2014-04-19 13:32:20 -0700737 GenIGet(mir, opt_flags, k32, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700738 break;
739
740 case Instruction::IGET_CHAR:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000741 GenIGet(mir, opt_flags, kUnsignedHalf, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700742 break;
743
744 case Instruction::IGET_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000745 GenIGet(mir, opt_flags, kSignedHalf, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700746 break;
747
748 case Instruction::IGET_BOOLEAN:
749 case Instruction::IGET_BYTE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000750 GenIGet(mir, opt_flags, kUnsignedByte, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700751 break;
752
753 case Instruction::IPUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700754 GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700755 break;
756
757 case Instruction::IPUT_OBJECT:
buzbee695d13a2014-04-19 13:32:20 -0700758 GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700759 break;
760
761 case Instruction::IPUT:
buzbee695d13a2014-04-19 13:32:20 -0700762 GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700763 break;
764
765 case Instruction::IPUT_BOOLEAN:
766 case Instruction::IPUT_BYTE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000767 GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700768 break;
769
770 case Instruction::IPUT_CHAR:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000771 GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700772 break;
773
774 case Instruction::IPUT_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000775 GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700776 break;
777
778 case Instruction::SGET_OBJECT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000779 GenSget(mir, rl_dest, false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700780 break;
781 case Instruction::SGET:
782 case Instruction::SGET_BOOLEAN:
783 case Instruction::SGET_BYTE:
784 case Instruction::SGET_CHAR:
785 case Instruction::SGET_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000786 GenSget(mir, rl_dest, false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700787 break;
788
789 case Instruction::SGET_WIDE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000790 GenSget(mir, rl_dest, true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700791 break;
792
793 case Instruction::SPUT_OBJECT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000794 GenSput(mir, rl_src[0], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700795 break;
796
797 case Instruction::SPUT:
798 case Instruction::SPUT_BOOLEAN:
799 case Instruction::SPUT_BYTE:
800 case Instruction::SPUT_CHAR:
801 case Instruction::SPUT_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000802 GenSput(mir, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700803 break;
804
805 case Instruction::SPUT_WIDE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000806 GenSput(mir, rl_src[0], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700807 break;
808
809 case Instruction::INVOKE_STATIC_RANGE:
810 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700811 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
812 // If the invocation is not inlined, we can assume there is already a
813 // suspend check at the return site
814 mir_graph_->AppendGenSuspendTestList(bb);
815 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700816 break;
817 case Instruction::INVOKE_STATIC:
818 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700819 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
820 mir_graph_->AppendGenSuspendTestList(bb);
821 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700822 break;
823
824 case Instruction::INVOKE_DIRECT:
825 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700826 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
827 mir_graph_->AppendGenSuspendTestList(bb);
828 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700829 break;
830 case Instruction::INVOKE_DIRECT_RANGE:
831 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700832 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
833 mir_graph_->AppendGenSuspendTestList(bb);
834 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700835 break;
836
837 case Instruction::INVOKE_VIRTUAL:
838 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700839 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
840 mir_graph_->AppendGenSuspendTestList(bb);
841 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700842 break;
843 case Instruction::INVOKE_VIRTUAL_RANGE:
844 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700845 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
846 mir_graph_->AppendGenSuspendTestList(bb);
847 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700848 break;
849
850 case Instruction::INVOKE_SUPER:
851 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700852 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
853 mir_graph_->AppendGenSuspendTestList(bb);
854 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700855 break;
856 case Instruction::INVOKE_SUPER_RANGE:
857 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700858 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
859 mir_graph_->AppendGenSuspendTestList(bb);
860 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700861 break;
862
863 case Instruction::INVOKE_INTERFACE:
864 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700865 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
866 mir_graph_->AppendGenSuspendTestList(bb);
867 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700868 break;
869 case Instruction::INVOKE_INTERFACE_RANGE:
870 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700871 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
872 mir_graph_->AppendGenSuspendTestList(bb);
873 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700874 break;
875
876 case Instruction::NEG_INT:
877 case Instruction::NOT_INT:
878 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0]);
879 break;
880
881 case Instruction::NEG_LONG:
882 case Instruction::NOT_LONG:
883 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0]);
884 break;
885
886 case Instruction::NEG_FLOAT:
887 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]);
888 break;
889
890 case Instruction::NEG_DOUBLE:
891 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]);
892 break;
893
894 case Instruction::INT_TO_LONG:
895 GenIntToLong(rl_dest, rl_src[0]);
896 break;
897
898 case Instruction::LONG_TO_INT:
899 rl_src[0] = UpdateLocWide(rl_src[0]);
buzbeea0cd2d72014-06-01 09:33:49 -0700900 rl_src[0] = NarrowRegLoc(rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700901 StoreValue(rl_dest, rl_src[0]);
902 break;
903
904 case Instruction::INT_TO_BYTE:
905 case Instruction::INT_TO_SHORT:
906 case Instruction::INT_TO_CHAR:
907 GenIntNarrowing(opcode, rl_dest, rl_src[0]);
908 break;
909
910 case Instruction::INT_TO_FLOAT:
911 case Instruction::INT_TO_DOUBLE:
912 case Instruction::LONG_TO_FLOAT:
913 case Instruction::LONG_TO_DOUBLE:
914 case Instruction::FLOAT_TO_INT:
915 case Instruction::FLOAT_TO_LONG:
916 case Instruction::FLOAT_TO_DOUBLE:
917 case Instruction::DOUBLE_TO_INT:
918 case Instruction::DOUBLE_TO_LONG:
919 case Instruction::DOUBLE_TO_FLOAT:
920 GenConversion(opcode, rl_dest, rl_src[0]);
921 break;
922
923
924 case Instruction::ADD_INT:
925 case Instruction::ADD_INT_2ADDR:
926 case Instruction::MUL_INT:
927 case Instruction::MUL_INT_2ADDR:
928 case Instruction::AND_INT:
929 case Instruction::AND_INT_2ADDR:
930 case Instruction::OR_INT:
931 case Instruction::OR_INT_2ADDR:
932 case Instruction::XOR_INT:
933 case Instruction::XOR_INT_2ADDR:
934 if (rl_src[0].is_const &&
935 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]))) {
936 GenArithOpIntLit(opcode, rl_dest, rl_src[1],
937 mir_graph_->ConstantValue(rl_src[0].orig_sreg));
938 } else if (rl_src[1].is_const &&
939 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) {
940 GenArithOpIntLit(opcode, rl_dest, rl_src[0],
941 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
942 } else {
943 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]);
944 }
945 break;
946
947 case Instruction::SUB_INT:
948 case Instruction::SUB_INT_2ADDR:
949 case Instruction::DIV_INT:
950 case Instruction::DIV_INT_2ADDR:
951 case Instruction::REM_INT:
952 case Instruction::REM_INT_2ADDR:
953 case Instruction::SHL_INT:
954 case Instruction::SHL_INT_2ADDR:
955 case Instruction::SHR_INT:
956 case Instruction::SHR_INT_2ADDR:
957 case Instruction::USHR_INT:
958 case Instruction::USHR_INT_2ADDR:
959 if (rl_src[1].is_const &&
960 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) {
961 GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1]));
962 } else {
963 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]);
964 }
965 break;
966
967 case Instruction::ADD_LONG:
968 case Instruction::SUB_LONG:
969 case Instruction::AND_LONG:
970 case Instruction::OR_LONG:
971 case Instruction::XOR_LONG:
972 case Instruction::ADD_LONG_2ADDR:
973 case Instruction::SUB_LONG_2ADDR:
974 case Instruction::AND_LONG_2ADDR:
975 case Instruction::OR_LONG_2ADDR:
976 case Instruction::XOR_LONG_2ADDR:
977 if (rl_src[0].is_const || rl_src[1].is_const) {
978 GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
979 break;
980 }
981 // Note: intentional fallthrough.
982
983 case Instruction::MUL_LONG:
984 case Instruction::DIV_LONG:
985 case Instruction::REM_LONG:
986 case Instruction::MUL_LONG_2ADDR:
987 case Instruction::DIV_LONG_2ADDR:
988 case Instruction::REM_LONG_2ADDR:
989 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
990 break;
991
992 case Instruction::SHL_LONG:
993 case Instruction::SHR_LONG:
994 case Instruction::USHR_LONG:
995 case Instruction::SHL_LONG_2ADDR:
996 case Instruction::SHR_LONG_2ADDR:
997 case Instruction::USHR_LONG_2ADDR:
998 if (rl_src[1].is_const) {
999 GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
1000 } else {
1001 GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
1002 }
1003 break;
1004
1005 case Instruction::ADD_FLOAT:
1006 case Instruction::SUB_FLOAT:
1007 case Instruction::MUL_FLOAT:
1008 case Instruction::DIV_FLOAT:
1009 case Instruction::REM_FLOAT:
1010 case Instruction::ADD_FLOAT_2ADDR:
1011 case Instruction::SUB_FLOAT_2ADDR:
1012 case Instruction::MUL_FLOAT_2ADDR:
1013 case Instruction::DIV_FLOAT_2ADDR:
1014 case Instruction::REM_FLOAT_2ADDR:
1015 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]);
1016 break;
1017
1018 case Instruction::ADD_DOUBLE:
1019 case Instruction::SUB_DOUBLE:
1020 case Instruction::MUL_DOUBLE:
1021 case Instruction::DIV_DOUBLE:
1022 case Instruction::REM_DOUBLE:
1023 case Instruction::ADD_DOUBLE_2ADDR:
1024 case Instruction::SUB_DOUBLE_2ADDR:
1025 case Instruction::MUL_DOUBLE_2ADDR:
1026 case Instruction::DIV_DOUBLE_2ADDR:
1027 case Instruction::REM_DOUBLE_2ADDR:
1028 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]);
1029 break;
1030
1031 case Instruction::RSUB_INT:
1032 case Instruction::ADD_INT_LIT16:
1033 case Instruction::MUL_INT_LIT16:
1034 case Instruction::DIV_INT_LIT16:
1035 case Instruction::REM_INT_LIT16:
1036 case Instruction::AND_INT_LIT16:
1037 case Instruction::OR_INT_LIT16:
1038 case Instruction::XOR_INT_LIT16:
1039 case Instruction::ADD_INT_LIT8:
1040 case Instruction::RSUB_INT_LIT8:
1041 case Instruction::MUL_INT_LIT8:
1042 case Instruction::DIV_INT_LIT8:
1043 case Instruction::REM_INT_LIT8:
1044 case Instruction::AND_INT_LIT8:
1045 case Instruction::OR_INT_LIT8:
1046 case Instruction::XOR_INT_LIT8:
1047 case Instruction::SHL_INT_LIT8:
1048 case Instruction::SHR_INT_LIT8:
1049 case Instruction::USHR_INT_LIT8:
1050 GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC);
1051 break;
1052
1053 default:
1054 LOG(FATAL) << "Unexpected opcode: " << opcode;
1055 }
buzbee082833c2014-05-17 23:16:26 -07001056 DCHECK(CheckCorePoolSanity());
Brian Carlstrom1895ea32013-07-18 13:28:37 -07001057} // NOLINT(readability/fn_size)
Brian Carlstrom7940e442013-07-12 13:46:57 -07001058
1059// Process extended MIR instructions
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001060void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001061 switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) {
1062 case kMirOpCopy: {
1063 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
1064 RegLocation rl_dest = mir_graph_->GetDest(mir);
1065 StoreValue(rl_dest, rl_src);
1066 break;
1067 }
1068 case kMirOpFusedCmplFloat:
1069 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/);
1070 break;
1071 case kMirOpFusedCmpgFloat:
1072 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/);
1073 break;
1074 case kMirOpFusedCmplDouble:
1075 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/);
1076 break;
1077 case kMirOpFusedCmpgDouble:
1078 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/);
1079 break;
1080 case kMirOpFusedCmpLong:
1081 GenFusedLongCmpBranch(bb, mir);
1082 break;
1083 case kMirOpSelect:
1084 GenSelect(bb, mir);
1085 break;
Mark Mendelld65c51a2014-04-29 16:55:20 -04001086 case kMirOpPhi:
1087 case kMirOpNop:
1088 case kMirOpNullCheck:
1089 case kMirOpRangeCheck:
1090 case kMirOpDivZeroCheck:
1091 case kMirOpCheck:
1092 case kMirOpCheckPart2:
1093 // Ignore these known opcodes
1094 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001095 default:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001096 // Give the backends a chance to handle unknown extended MIR opcodes.
1097 GenMachineSpecificExtendedMethodMIR(bb, mir);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001098 break;
1099 }
1100}
1101
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001102void Mir2Lir::GenPrintLabel(MIR* mir) {
1103 // Mark the beginning of a Dalvik instruction for line tracking.
1104 if (cu_->verbose) {
1105 char* inst_str = mir_graph_->GetDalvikDisassembly(mir);
1106 MarkBoundary(mir->offset, inst_str);
1107 }
1108}
1109
Brian Carlstrom7940e442013-07-12 13:46:57 -07001110// Handle the content in each basic block.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001111bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001112 if (bb->block_type == kDead) return false;
1113 current_dalvik_offset_ = bb->start_offset;
1114 MIR* mir;
1115 int block_id = bb->id;
1116
1117 block_label_list_[block_id].operands[0] = bb->start_offset;
1118
1119 // Insert the block label.
1120 block_label_list_[block_id].opcode = kPseudoNormalBlockLabel;
buzbeeb48819d2013-09-14 16:15:25 -07001121 block_label_list_[block_id].flags.fixup = kFixupLabel;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001122 AppendLIR(&block_label_list_[block_id]);
1123
1124 LIR* head_lir = NULL;
1125
1126 // If this is a catch block, export the start address.
1127 if (bb->catch_entry) {
1128 head_lir = NewLIR0(kPseudoExportedPC);
1129 }
1130
1131 // Free temp registers and reset redundant store tracking.
buzbeeba574512014-05-12 15:13:16 -07001132 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001133
1134 if (bb->block_type == kEntryBlock) {
buzbee56c71782013-09-05 17:13:19 -07001135 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001136 int start_vreg = cu_->num_dalvik_registers - cu_->num_ins;
1137 GenEntrySequence(&mir_graph_->reg_location_[start_vreg],
1138 mir_graph_->reg_location_[mir_graph_->GetMethodSReg()]);
1139 } else if (bb->block_type == kExitBlock) {
buzbee56c71782013-09-05 17:13:19 -07001140 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001141 GenExitSequence();
1142 }
1143
1144 for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
1145 ResetRegPool();
1146 if (cu_->disable_opt & (1 << kTrackLiveTemps)) {
buzbeeba574512014-05-12 15:13:16 -07001147 ClobberAllTemps();
buzbee7a11ab02014-04-28 20:02:38 -07001148 // Reset temp allocation to minimize differences when A/B testing.
buzbee091cc402014-03-31 10:14:40 -07001149 reg_pool_->ResetNextTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001150 }
1151
1152 if (cu_->disable_opt & (1 << kSuppressLoads)) {
1153 ResetDefTracking();
1154 }
1155
1156 // Reset temp tracking sanity check.
1157 if (kIsDebugBuild) {
1158 live_sreg_ = INVALID_SREG;
1159 }
1160
1161 current_dalvik_offset_ = mir->offset;
1162 int opcode = mir->dalvikInsn.opcode;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001163
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001164 GenPrintLabel(mir);
1165
Brian Carlstrom7940e442013-07-12 13:46:57 -07001166 // Remember the first LIR for this block.
1167 if (head_lir == NULL) {
buzbee252254b2013-09-08 16:20:53 -07001168 head_lir = &block_label_list_[bb->id];
1169 // Set the first label as a scheduling barrier.
buzbeeb48819d2013-09-14 16:15:25 -07001170 DCHECK(!head_lir->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001171 head_lir->u.m.def_mask = &kEncodeAll;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001172 }
1173
1174 if (opcode == kMirOpCheck) {
1175 // Combine check and work halves of throwing instruction.
1176 MIR* work_half = mir->meta.throw_insn;
1177 mir->dalvikInsn.opcode = work_half->dalvikInsn.opcode;
Vladimir Marko4376c872014-01-23 12:39:29 +00001178 mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001179 opcode = work_half->dalvikInsn.opcode;
1180 SSARepresentation* ssa_rep = work_half->ssa_rep;
1181 work_half->ssa_rep = mir->ssa_rep;
1182 mir->ssa_rep = ssa_rep;
1183 work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2);
Vladimir Marko4376c872014-01-23 12:39:29 +00001184 work_half->meta.throw_insn = mir;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001185 }
1186
Jean Christophe Beyler2ab40eb2014-06-02 09:03:14 -07001187 if (MIR::DecodedInstruction::IsPseudoMirOp(opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001188 HandleExtendedMethodMIR(bb, mir);
1189 continue;
1190 }
1191
1192 CompileDalvikInstruction(mir, bb, block_label_list_);
1193 }
1194
1195 if (head_lir) {
1196 // Eliminate redundant loads/stores and delay stores into later slots.
1197 ApplyLocalOptimizations(head_lir, last_lir_insn_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001198 }
1199 return false;
1200}
1201
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001202bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) {
Vladimir Marko5816ed42013-11-27 17:04:20 +00001203 cu_->NewTimingSplit("SpecialMIR2LIR");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001204 // Find the first DalvikByteCode block.
1205 int num_reachable_blocks = mir_graph_->GetNumReachableBlocks();
1206 BasicBlock*bb = NULL;
1207 for (int idx = 0; idx < num_reachable_blocks; idx++) {
1208 // TODO: no direct access of growable lists.
1209 int dfs_index = mir_graph_->GetDfsOrder()->Get(idx);
1210 bb = mir_graph_->GetBasicBlock(dfs_index);
1211 if (bb->block_type == kDalvikByteCode) {
1212 break;
1213 }
1214 }
1215 if (bb == NULL) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001216 return false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001217 }
1218 DCHECK_EQ(bb->start_offset, 0);
1219 DCHECK(bb->first_mir_insn != NULL);
1220
1221 // Get the first instruction.
1222 MIR* mir = bb->first_mir_insn;
1223
1224 // Free temp registers and reset redundant store tracking.
1225 ResetRegPool();
1226 ResetDefTracking();
buzbeeba574512014-05-12 15:13:16 -07001227 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001228
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001229 return GenSpecialCase(bb, mir, special);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001230}
1231
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001232void Mir2Lir::MethodMIR2LIR() {
buzbeea61f4952013-08-23 14:27:06 -07001233 cu_->NewTimingSplit("MIR2LIR");
1234
Brian Carlstrom7940e442013-07-12 13:46:57 -07001235 // Hold the labels of each block.
1236 block_label_list_ =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -07001237 static_cast<LIR*>(arena_->Alloc(sizeof(LIR) * mir_graph_->GetNumBlocks(),
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001238 kArenaAllocLIR));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001239
buzbee56c71782013-09-05 17:13:19 -07001240 PreOrderDfsIterator iter(mir_graph_);
buzbee252254b2013-09-08 16:20:53 -07001241 BasicBlock* curr_bb = iter.Next();
1242 BasicBlock* next_bb = iter.Next();
1243 while (curr_bb != NULL) {
1244 MethodBlockCodeGen(curr_bb);
1245 // If the fall_through block is no longer laid out consecutively, drop in a branch.
buzbee0d829482013-10-11 15:24:55 -07001246 BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through);
1247 if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) {
1248 OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]);
buzbee252254b2013-09-08 16:20:53 -07001249 }
1250 curr_bb = next_bb;
1251 do {
1252 next_bb = iter.Next();
1253 } while ((next_bb != NULL) && (next_bb->block_type == kDead));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001254 }
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001255 HandleSlowPaths();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001256}
1257
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001258//
1259// LIR Slow Path
1260//
1261
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001262LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001263 m2l_->SetCurrentDexPc(current_dex_pc_);
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001264 LIR* target = m2l_->NewLIR0(opcode);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001265 fromfast_->target = target;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001266 return target;
1267}
Vladimir Marko3bc86152014-03-13 14:11:28 +00001268
Andreas Gampe4b537a82014-06-30 22:24:53 -07001269
1270void Mir2Lir::CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp,
1271 bool fail, bool report)
1272 const {
1273 if (rs.Valid()) {
1274 if (ref == RefCheck::kCheckRef) {
1275 if (cu_->target64 && !rs.Is64Bit()) {
1276 if (fail) {
1277 CHECK(false) << "Reg storage not 64b for ref.";
1278 } else if (report) {
1279 LOG(WARNING) << "Reg storage not 64b for ref.";
1280 }
1281 }
1282 }
1283 if (wide == WidenessCheck::kCheckWide) {
1284 if (!rs.Is64Bit()) {
1285 if (fail) {
1286 CHECK(false) << "Reg storage not 64b for wide.";
1287 } else if (report) {
1288 LOG(WARNING) << "Reg storage not 64b for wide.";
1289 }
1290 }
1291 }
1292 // A tighter check would be nice, but for now soft-float will not check float at all.
1293 if (fp == FPCheck::kCheckFP && cu_->instruction_set != kArm) {
1294 if (!rs.IsFloat()) {
1295 if (fail) {
1296 CHECK(false) << "Reg storage not float for fp.";
1297 } else if (report) {
1298 LOG(WARNING) << "Reg storage not float for fp.";
1299 }
1300 }
1301 } else if (fp == FPCheck::kCheckNotFP) {
1302 if (rs.IsFloat()) {
1303 if (fail) {
1304 CHECK(false) << "Reg storage float for not-fp.";
1305 } else if (report) {
1306 LOG(WARNING) << "Reg storage float for not-fp.";
1307 }
1308 }
1309 }
1310 }
1311}
1312
1313void Mir2Lir::CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const {
1314 // Regrettably can't use the fp part of rl, as that is not really indicative of where a value
1315 // will be stored.
1316 CheckRegStorageImpl(rl.reg, rl.wide ? WidenessCheck::kCheckWide : WidenessCheck::kCheckNotWide,
1317 rl.ref ? RefCheck::kCheckRef : RefCheck::kCheckNotRef, FPCheck::kIgnoreFP, fail, report);
1318}
1319
Brian Carlstrom7940e442013-07-12 13:46:57 -07001320} // namespace art