blob: a85be5e90c0b4de403aebf89b73067ca4881b2de [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex/dataflow_iterator-inl.h"
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080019#include "dex/quick/dex_file_method_inliner.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "mir_to_lir-inl.h"
21#include "object_utils.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070022#include "thread-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023
24namespace art {
25
buzbeea0cd2d72014-06-01 09:33:49 -070026RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) {
27 RegisterClass res;
28 switch (shorty_type) {
29 case 'L':
30 res = kRefReg;
31 break;
32 case 'F':
33 // Expected fallthrough.
34 case 'D':
35 res = kFPReg;
36 break;
37 default:
38 res = kCoreReg;
39 }
40 return res;
41}
42
43RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) {
44 RegisterClass res;
45 if (loc.fp) {
46 DCHECK(!loc.ref) << "At most, one of ref/fp may be set";
47 res = kFPReg;
48 } else if (loc.ref) {
49 res = kRefReg;
50 } else {
51 res = kCoreReg;
52 }
53 return res;
54}
55
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080056void Mir2Lir::LockArg(int in_position, bool wide) {
buzbee2700f7e2014-03-07 09:46:20 -080057 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
58 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
59 RegStorage::InvalidReg();
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080060
buzbee2700f7e2014-03-07 09:46:20 -080061 if (reg_arg_low.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080062 LockTemp(reg_arg_low);
63 }
buzbee2700f7e2014-03-07 09:46:20 -080064 if (reg_arg_high.Valid() && reg_arg_low != reg_arg_high) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080065 LockTemp(reg_arg_high);
66 }
67}
68
buzbee2700f7e2014-03-07 09:46:20 -080069// TODO: needs revisit for 64-bit.
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010070RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000071 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070072
73 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080074 /*
75 * When doing a call for x86, it moves the stack pointer in order to push return.
76 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080077 */
78 offset += sizeof(uint32_t);
79 }
80
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070081 if (cu_->instruction_set == kX86_64) {
82 /*
83 * When doing a call for x86, it moves the stack pointer in order to push return.
84 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
85 */
86 offset += sizeof(uint64_t);
87 }
88
89 if (cu_->instruction_set == kX86_64) {
90 RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position);
91 if (!reg_arg.Valid()) {
92 RegStorage new_reg = wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class);
93 LoadBaseDisp(TargetReg(kSp), offset, new_reg, wide ? k64 : k32);
94 return new_reg;
95 } else {
96 // Check if we need to copy the arg to a different reg_class.
97 if (!RegClassMatches(reg_class, reg_arg)) {
98 if (wide) {
99 RegStorage new_reg = AllocTypedTempWide(false, reg_class);
100 OpRegCopyWide(new_reg, reg_arg);
101 reg_arg = new_reg;
102 } else {
103 RegStorage new_reg = AllocTypedTemp(false, reg_class);
104 OpRegCopy(new_reg, reg_arg);
105 reg_arg = new_reg;
106 }
107 }
108 }
109 return reg_arg;
110 }
111
112 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
113 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
114 RegStorage::InvalidReg();
115
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800116 // If the VR is wide and there is no register for high part, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -0800117 if (wide && !reg_arg_high.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800118 // If the low part is not in a reg, we allocate a pair. Otherwise, we just load to high reg.
buzbee2700f7e2014-03-07 09:46:20 -0800119 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100120 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100121 LoadBaseDisp(TargetReg(kSp), offset, new_regs, k64);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100122 return new_regs; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800123 } else {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100124 // Assume that no ABI allows splitting a wide fp reg between a narrow fp reg and memory,
125 // i.e. the low part is in a core reg. Load the second part in a core reg as well for now.
126 DCHECK(!reg_arg_low.IsFloat());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800127 reg_arg_high = AllocTemp();
128 int offset_high = offset + sizeof(uint32_t);
buzbee695d13a2014-04-19 13:32:20 -0700129 Load32Disp(TargetReg(kSp), offset_high, reg_arg_high);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100130 // Continue below to check the reg_class.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800131 }
132 }
133
134 // If the low part is not in a register yet, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -0800135 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100136 // Assume that if the low part of a wide arg is passed in memory, so is the high part,
137 // thus we don't get here for wide args as it's handled above. Big-endian ABIs could
138 // conceivably break this assumption but Android supports only little-endian architectures.
139 DCHECK(!wide);
140 reg_arg_low = AllocTypedTemp(false, reg_class);
buzbee695d13a2014-04-19 13:32:20 -0700141 Load32Disp(TargetReg(kSp), offset, reg_arg_low);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100142 return reg_arg_low; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800143 }
144
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100145 RegStorage reg_arg = wide ? RegStorage::MakeRegPair(reg_arg_low, reg_arg_high) : reg_arg_low;
146 // Check if we need to copy the arg to a different reg_class.
147 if (!RegClassMatches(reg_class, reg_arg)) {
148 if (wide) {
149 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
150 OpRegCopyWide(new_regs, reg_arg);
151 reg_arg = new_regs;
152 } else {
153 RegStorage new_reg = AllocTypedTemp(false, reg_class);
154 OpRegCopy(new_reg, reg_arg);
155 reg_arg = new_reg;
156 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800157 }
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100158 return reg_arg;
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800159}
160
161void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000162 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700163 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800164 /*
165 * When doing a call for x86, it moves the stack pointer in order to push return.
166 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800167 */
168 offset += sizeof(uint32_t);
169 }
170
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700171 if (cu_->instruction_set == kX86_64) {
172 /*
173 * When doing a call for x86, it moves the stack pointer in order to push return.
174 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
175 */
176 offset += sizeof(uint64_t);
177 }
178
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800179 if (!rl_dest.wide) {
buzbee2700f7e2014-03-07 09:46:20 -0800180 RegStorage reg = GetArgMappingToPhysicalReg(in_position);
181 if (reg.Valid()) {
182 OpRegCopy(rl_dest.reg, reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800183 } else {
buzbee695d13a2014-04-19 13:32:20 -0700184 Load32Disp(TargetReg(kSp), offset, rl_dest.reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800185 }
186 } else {
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700187 if (cu_->instruction_set == kX86_64) {
188 RegStorage reg = GetArgMappingToPhysicalReg(in_position);
189 if (reg.Valid()) {
190 OpRegCopy(rl_dest.reg, reg);
191 } else {
192 LoadBaseDisp(TargetReg(kSp), offset, rl_dest.reg, k64);
193 }
194 return;
195 }
196
buzbee2700f7e2014-03-07 09:46:20 -0800197 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
198 RegStorage reg_arg_high = GetArgMappingToPhysicalReg(in_position + 1);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800199
buzbee2700f7e2014-03-07 09:46:20 -0800200 if (reg_arg_low.Valid() && reg_arg_high.Valid()) {
201 OpRegCopyWide(rl_dest.reg, RegStorage::MakeRegPair(reg_arg_low, reg_arg_high));
202 } else if (reg_arg_low.Valid() && !reg_arg_high.Valid()) {
203 OpRegCopy(rl_dest.reg, reg_arg_low);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800204 int offset_high = offset + sizeof(uint32_t);
buzbee695d13a2014-04-19 13:32:20 -0700205 Load32Disp(TargetReg(kSp), offset_high, rl_dest.reg.GetHigh());
buzbee2700f7e2014-03-07 09:46:20 -0800206 } else if (!reg_arg_low.Valid() && reg_arg_high.Valid()) {
207 OpRegCopy(rl_dest.reg.GetHigh(), reg_arg_high);
buzbee695d13a2014-04-19 13:32:20 -0700208 Load32Disp(TargetReg(kSp), offset, rl_dest.reg.GetLow());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800209 } else {
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100210 LoadBaseDisp(TargetReg(kSp), offset, rl_dest.reg, k64);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800211 }
212 }
213}
214
215bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) {
216 // FastInstance() already checked by DexFileMethodInliner.
217 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100218 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800219 // The object is not "this" and has to be null-checked.
220 return false;
221 }
222
Vladimir Markoe3e02602014-03-12 15:42:41 +0000223 bool wide = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE));
Vladimir Marko455759b2014-05-06 20:49:36 +0100224 bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT));
225 OpSize size = LoadStoreOpSize(wide, ref);
Vladimir Marko674744e2014-04-24 15:18:26 +0100226 if (data.is_volatile && !SupportsVolatileLoadStore(size)) {
227 return false;
228 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100229
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800230 // Point of no return - no aborts after this
231 GenPrintLabel(mir);
232 LockArg(data.object_arg);
buzbeea0cd2d72014-06-01 09:33:49 -0700233 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100234 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
buzbeea0cd2d72014-06-01 09:33:49 -0700235 RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]);
236 RegLocation rl_dest = wide ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100237 RegStorage r_result = rl_dest.reg;
238 if (!RegClassMatches(reg_class, r_result)) {
239 r_result = wide ? AllocTypedTempWide(rl_dest.fp, reg_class)
240 : AllocTypedTemp(rl_dest.fp, reg_class);
241 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800242 if (data.is_volatile) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100243 LoadBaseDispVolatile(reg_obj, data.field_offset, r_result, size);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800244 // Without context sensitive analysis, we must issue the most conservative barriers.
245 // In this case, either a load or store may follow so we issue both barriers.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800246 GenMemBarrier(kLoadLoad);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800247 GenMemBarrier(kLoadStore);
Vladimir Marko674744e2014-04-24 15:18:26 +0100248 } else {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100249 LoadBaseDisp(reg_obj, data.field_offset, r_result, size);
250 }
251 if (r_result != rl_dest.reg) {
252 if (wide) {
253 OpRegCopyWide(rl_dest.reg, r_result);
254 } else {
255 OpRegCopy(rl_dest.reg, r_result);
256 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800257 }
258 return true;
259}
260
261bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) {
262 // FastInstance() already checked by DexFileMethodInliner.
263 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100264 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800265 // The object is not "this" and has to be null-checked.
266 return false;
267 }
Vladimir Markoe1fced12014-04-04 14:52:53 +0100268 if (data.return_arg_plus1 != 0u) {
269 // The setter returns a method argument which we don't support here.
270 return false;
271 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800272
Vladimir Markoe3e02602014-03-12 15:42:41 +0000273 bool wide = (data.op_variant == InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE));
Vladimir Marko455759b2014-05-06 20:49:36 +0100274 bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT));
275 OpSize size = LoadStoreOpSize(wide, ref);
Vladimir Marko674744e2014-04-24 15:18:26 +0100276 if (data.is_volatile && !SupportsVolatileLoadStore(size)) {
277 return false;
278 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800279
280 // Point of no return - no aborts after this
281 GenPrintLabel(mir);
282 LockArg(data.object_arg);
283 LockArg(data.src_arg, wide);
buzbeea0cd2d72014-06-01 09:33:49 -0700284 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100285 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
286 RegStorage reg_src = LoadArg(data.src_arg, reg_class, wide);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800287 if (data.is_volatile) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800288 // There might have been a store before this volatile one so insert StoreStore barrier.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800289 GenMemBarrier(kStoreStore);
Vladimir Marko674744e2014-04-24 15:18:26 +0100290 StoreBaseDispVolatile(reg_obj, data.field_offset, reg_src, size);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800291 // A load might follow the volatile store so insert a StoreLoad barrier.
292 GenMemBarrier(kStoreLoad);
Vladimir Marko674744e2014-04-24 15:18:26 +0100293 } else {
294 StoreBaseDisp(reg_obj, data.field_offset, reg_src, size);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800295 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100296 if (ref) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800297 MarkGCCard(reg_src, reg_obj);
298 }
299 return true;
300}
301
302bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) {
303 const InlineReturnArgData& data = special.d.return_data;
Vladimir Markoe3e02602014-03-12 15:42:41 +0000304 bool wide = (data.is_wide != 0u);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800305
306 // Point of no return - no aborts after this
307 GenPrintLabel(mir);
308 LockArg(data.arg, wide);
buzbeea0cd2d72014-06-01 09:33:49 -0700309 RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]);
310 RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800311 LoadArgDirect(data.arg, rl_dest);
312 return true;
313}
314
315/*
316 * Special-case code generation for simple non-throwing leaf methods.
317 */
318bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) {
319 DCHECK(special.flags & kInlineSpecial);
320 current_dalvik_offset_ = mir->offset;
321 MIR* return_mir = nullptr;
322 bool successful = false;
323
324 switch (special.opcode) {
325 case kInlineOpNop:
326 successful = true;
327 DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID);
328 return_mir = mir;
329 break;
330 case kInlineOpNonWideConst: {
331 successful = true;
buzbeea0cd2d72014-06-01 09:33:49 -0700332 RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0]));
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800333 GenPrintLabel(mir);
buzbee2700f7e2014-03-07 09:46:20 -0800334 LoadConstant(rl_dest.reg, static_cast<int>(special.d.data));
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700335 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800336 break;
337 }
338 case kInlineOpReturnArg:
339 successful = GenSpecialIdentity(mir, special);
340 return_mir = mir;
341 break;
342 case kInlineOpIGet:
343 successful = GenSpecialIGet(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700344 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800345 break;
346 case kInlineOpIPut:
347 successful = GenSpecialIPut(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700348 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800349 break;
350 default:
351 break;
352 }
353
354 if (successful) {
Vladimir Marko39d95e62014-02-28 12:51:24 +0000355 if (kIsDebugBuild) {
356 // Clear unreachable catch entries.
357 mir_graph_->catches_.clear();
358 }
359
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800360 // Handle verbosity for return MIR.
361 if (return_mir != nullptr) {
362 current_dalvik_offset_ = return_mir->offset;
363 // Not handling special identity case because it already generated code as part
364 // of the return. The label should have been added before any code was generated.
365 if (special.opcode != kInlineOpReturnArg) {
366 GenPrintLabel(return_mir);
367 }
368 }
369 GenSpecialExitSequence();
370
371 core_spill_mask_ = 0;
372 num_core_spills_ = 0;
373 fp_spill_mask_ = 0;
374 num_fp_spills_ = 0;
375 frame_size_ = 0;
376 core_vmap_table_.clear();
377 fp_vmap_table_.clear();
378 }
379
380 return successful;
381}
382
Brian Carlstrom7940e442013-07-12 13:46:57 -0700383/*
384 * Target-independent code generation. Use only high-level
385 * load/store utilities here, or target-dependent genXX() handlers
386 * when necessary.
387 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700388void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700389 RegLocation rl_src[3];
390 RegLocation rl_dest = mir_graph_->GetBadLoc();
391 RegLocation rl_result = mir_graph_->GetBadLoc();
392 Instruction::Code opcode = mir->dalvikInsn.opcode;
393 int opt_flags = mir->optimization_flags;
394 uint32_t vB = mir->dalvikInsn.vB;
395 uint32_t vC = mir->dalvikInsn.vC;
buzbee082833c2014-05-17 23:16:26 -0700396 DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:"
397 << std::hex << current_dalvik_offset_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700398
399 // Prep Src and Dest locations.
400 int next_sreg = 0;
401 int next_loc = 0;
Jean Christophe Beylercc794c32014-05-02 09:34:13 -0700402 uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700403 rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc();
404 if (attrs & DF_UA) {
405 if (attrs & DF_A_WIDE) {
406 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
407 next_sreg+= 2;
408 } else {
409 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
410 next_sreg++;
411 }
412 }
413 if (attrs & DF_UB) {
414 if (attrs & DF_B_WIDE) {
415 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
416 next_sreg+= 2;
417 } else {
418 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
419 next_sreg++;
420 }
421 }
422 if (attrs & DF_UC) {
423 if (attrs & DF_C_WIDE) {
424 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
425 } else {
426 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
427 }
428 }
429 if (attrs & DF_DA) {
430 if (attrs & DF_A_WIDE) {
431 rl_dest = mir_graph_->GetDestWide(mir);
432 } else {
433 rl_dest = mir_graph_->GetDest(mir);
434 }
435 }
436 switch (opcode) {
437 case Instruction::NOP:
438 break;
439
440 case Instruction::MOVE_EXCEPTION:
441 GenMoveException(rl_dest);
442 break;
443
444 case Instruction::RETURN_VOID:
445 if (((cu_->access_flags & kAccConstructor) != 0) &&
446 cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file,
447 cu_->class_def_idx)) {
448 GenMemBarrier(kStoreStore);
449 }
Wei Jin04f4d8a2014-05-29 18:04:29 -0700450 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700451 GenSuspendTest(opt_flags);
452 }
453 break;
454
Brian Carlstrom7940e442013-07-12 13:46:57 -0700455 case Instruction::RETURN_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700456 DCHECK(rl_src[0].ref);
457 // Intentional fallthrough.
458 case Instruction::RETURN:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700459 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700460 GenSuspendTest(opt_flags);
461 }
buzbeea0cd2d72014-06-01 09:33:49 -0700462 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
463 StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700464 break;
465
466 case Instruction::RETURN_WIDE:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700467 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700468 GenSuspendTest(opt_flags);
469 }
buzbeea0cd2d72014-06-01 09:33:49 -0700470 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
471 StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700472 break;
473
474 case Instruction::MOVE_RESULT_WIDE:
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000475 if ((opt_flags & MIR_INLINED) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700476 break; // Nop - combined w/ previous invoke.
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000477 }
buzbeea0cd2d72014-06-01 09:33:49 -0700478 StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700479 break;
480
481 case Instruction::MOVE_RESULT:
482 case Instruction::MOVE_RESULT_OBJECT:
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000483 if ((opt_flags & MIR_INLINED) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700484 break; // Nop - combined w/ previous invoke.
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000485 }
buzbeea0cd2d72014-06-01 09:33:49 -0700486 StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700487 break;
488
489 case Instruction::MOVE:
490 case Instruction::MOVE_OBJECT:
491 case Instruction::MOVE_16:
492 case Instruction::MOVE_OBJECT_16:
493 case Instruction::MOVE_FROM16:
494 case Instruction::MOVE_OBJECT_FROM16:
495 StoreValue(rl_dest, rl_src[0]);
496 break;
497
498 case Instruction::MOVE_WIDE:
499 case Instruction::MOVE_WIDE_16:
500 case Instruction::MOVE_WIDE_FROM16:
501 StoreValueWide(rl_dest, rl_src[0]);
502 break;
503
504 case Instruction::CONST:
505 case Instruction::CONST_4:
506 case Instruction::CONST_16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400507 GenConst(rl_dest, vB);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700508 break;
509
510 case Instruction::CONST_HIGH16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400511 GenConst(rl_dest, vB << 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700512 break;
513
514 case Instruction::CONST_WIDE_16:
515 case Instruction::CONST_WIDE_32:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000516 GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700517 break;
518
519 case Instruction::CONST_WIDE:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000520 GenConstWide(rl_dest, mir->dalvikInsn.vB_wide);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700521 break;
522
523 case Instruction::CONST_WIDE_HIGH16:
524 rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800525 LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700526 StoreValueWide(rl_dest, rl_result);
527 break;
528
529 case Instruction::MONITOR_ENTER:
530 GenMonitorEnter(opt_flags, rl_src[0]);
531 break;
532
533 case Instruction::MONITOR_EXIT:
534 GenMonitorExit(opt_flags, rl_src[0]);
535 break;
536
537 case Instruction::CHECK_CAST: {
538 GenCheckCast(mir->offset, vB, rl_src[0]);
539 break;
540 }
541 case Instruction::INSTANCE_OF:
542 GenInstanceof(vC, rl_dest, rl_src[0]);
543 break;
544
545 case Instruction::NEW_INSTANCE:
546 GenNewInstance(vB, rl_dest);
547 break;
548
549 case Instruction::THROW:
550 GenThrow(rl_src[0]);
551 break;
552
553 case Instruction::ARRAY_LENGTH:
554 int len_offset;
555 len_offset = mirror::Array::LengthOffset().Int32Value();
buzbeea0cd2d72014-06-01 09:33:49 -0700556 rl_src[0] = LoadValue(rl_src[0], kRefReg);
buzbee2700f7e2014-03-07 09:46:20 -0800557 GenNullCheck(rl_src[0].reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700558 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700559 Load32Disp(rl_src[0].reg, len_offset, rl_result.reg);
Dave Allisonf9439142014-03-27 15:10:22 -0700560 MarkPossibleNullPointerException(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700561 StoreValue(rl_dest, rl_result);
562 break;
563
564 case Instruction::CONST_STRING:
565 case Instruction::CONST_STRING_JUMBO:
566 GenConstString(vB, rl_dest);
567 break;
568
569 case Instruction::CONST_CLASS:
570 GenConstClass(vB, rl_dest);
571 break;
572
573 case Instruction::FILL_ARRAY_DATA:
574 GenFillArrayData(vB, rl_src[0]);
575 break;
576
577 case Instruction::FILLED_NEW_ARRAY:
578 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
579 false /* not range */));
580 break;
581
582 case Instruction::FILLED_NEW_ARRAY_RANGE:
583 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
584 true /* range */));
585 break;
586
587 case Instruction::NEW_ARRAY:
588 GenNewArray(vC, rl_dest, rl_src[0]);
589 break;
590
591 case Instruction::GOTO:
592 case Instruction::GOTO_16:
593 case Instruction::GOTO_32:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700594 if (mir_graph_->IsBackedge(bb, bb->taken) &&
595 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken))) {
buzbee0d829482013-10-11 15:24:55 -0700596 GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700597 } else {
buzbee0d829482013-10-11 15:24:55 -0700598 OpUnconditionalBranch(&label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700599 }
600 break;
601
602 case Instruction::PACKED_SWITCH:
603 GenPackedSwitch(mir, vB, rl_src[0]);
604 break;
605
606 case Instruction::SPARSE_SWITCH:
607 GenSparseSwitch(mir, vB, rl_src[0]);
608 break;
609
610 case Instruction::CMPL_FLOAT:
611 case Instruction::CMPG_FLOAT:
612 case Instruction::CMPL_DOUBLE:
613 case Instruction::CMPG_DOUBLE:
614 GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]);
615 break;
616
617 case Instruction::CMP_LONG:
618 GenCmpLong(rl_dest, rl_src[0], rl_src[1]);
619 break;
620
621 case Instruction::IF_EQ:
622 case Instruction::IF_NE:
623 case Instruction::IF_LT:
624 case Instruction::IF_GE:
625 case Instruction::IF_GT:
626 case Instruction::IF_LE: {
buzbee0d829482013-10-11 15:24:55 -0700627 LIR* taken = &label_list[bb->taken];
628 LIR* fall_through = &label_list[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700629 // Result known at compile time?
630 if (rl_src[0].is_const && rl_src[1].is_const) {
631 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg),
632 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
buzbee0d829482013-10-11 15:24:55 -0700633 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
Wei Jin04f4d8a2014-05-29 18:04:29 -0700634 if (mir_graph_->IsBackedge(bb, target_id) &&
635 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700636 GenSuspendTest(opt_flags);
637 }
buzbee0d829482013-10-11 15:24:55 -0700638 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700639 } else {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700640 if (mir_graph_->IsBackwardsBranch(bb) &&
641 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
642 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700643 GenSuspendTest(opt_flags);
644 }
buzbee0d829482013-10-11 15:24:55 -0700645 GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken, fall_through);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700646 }
647 break;
648 }
649
650 case Instruction::IF_EQZ:
651 case Instruction::IF_NEZ:
652 case Instruction::IF_LTZ:
653 case Instruction::IF_GEZ:
654 case Instruction::IF_GTZ:
655 case Instruction::IF_LEZ: {
buzbee0d829482013-10-11 15:24:55 -0700656 LIR* taken = &label_list[bb->taken];
657 LIR* fall_through = &label_list[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700658 // Result known at compile time?
659 if (rl_src[0].is_const) {
660 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), 0);
buzbee0d829482013-10-11 15:24:55 -0700661 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
Wei Jin04f4d8a2014-05-29 18:04:29 -0700662 if (mir_graph_->IsBackedge(bb, target_id) &&
663 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700664 GenSuspendTest(opt_flags);
665 }
buzbee0d829482013-10-11 15:24:55 -0700666 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700667 } else {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700668 if (mir_graph_->IsBackwardsBranch(bb) &&
669 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
670 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700671 GenSuspendTest(opt_flags);
672 }
673 GenCompareZeroAndBranch(opcode, rl_src[0], taken, fall_through);
674 }
675 break;
676 }
677
678 case Instruction::AGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700679 GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3);
680 break;
681 case Instruction::AGET_OBJECT:
682 GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700683 break;
684 case Instruction::AGET:
buzbee695d13a2014-04-19 13:32:20 -0700685 GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700686 break;
687 case Instruction::AGET_BOOLEAN:
688 GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0);
689 break;
690 case Instruction::AGET_BYTE:
691 GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0);
692 break;
693 case Instruction::AGET_CHAR:
694 GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
695 break;
696 case Instruction::AGET_SHORT:
697 GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
698 break;
699 case Instruction::APUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700700 GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700701 break;
702 case Instruction::APUT:
buzbee695d13a2014-04-19 13:32:20 -0700703 GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700704 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700705 case Instruction::APUT_OBJECT: {
706 bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]);
707 bool is_safe = is_null; // Always safe to store null.
708 if (!is_safe) {
709 // Check safety from verifier type information.
Vladimir Marko2730db02014-01-27 11:15:17 +0000710 const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit();
711 is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset);
Ian Rogersa9a82542013-10-04 11:17:26 -0700712 }
713 if (is_null || is_safe) {
714 // Store of constant null doesn't require an assignability test and can be generated inline
715 // without fixed register usage or a card mark.
buzbee695d13a2014-04-19 13:32:20 -0700716 GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null);
Ian Rogersa9a82542013-10-04 11:17:26 -0700717 } else {
718 GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]);
719 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700720 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700721 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700722 case Instruction::APUT_SHORT:
723 case Instruction::APUT_CHAR:
Ian Rogersa9a82542013-10-04 11:17:26 -0700724 GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700725 break;
726 case Instruction::APUT_BYTE:
727 case Instruction::APUT_BOOLEAN:
Ian Rogersa9a82542013-10-04 11:17:26 -0700728 GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700729 break;
730
731 case Instruction::IGET_OBJECT:
buzbee695d13a2014-04-19 13:32:20 -0700732 GenIGet(mir, opt_flags, kReference, rl_dest, rl_src[0], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700733 break;
734
735 case Instruction::IGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700736 GenIGet(mir, opt_flags, k64, rl_dest, rl_src[0], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700737 break;
738
739 case Instruction::IGET:
buzbee695d13a2014-04-19 13:32:20 -0700740 GenIGet(mir, opt_flags, k32, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700741 break;
742
743 case Instruction::IGET_CHAR:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000744 GenIGet(mir, opt_flags, kUnsignedHalf, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700745 break;
746
747 case Instruction::IGET_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000748 GenIGet(mir, opt_flags, kSignedHalf, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700749 break;
750
751 case Instruction::IGET_BOOLEAN:
752 case Instruction::IGET_BYTE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000753 GenIGet(mir, opt_flags, kUnsignedByte, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700754 break;
755
756 case Instruction::IPUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700757 GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700758 break;
759
760 case Instruction::IPUT_OBJECT:
buzbee695d13a2014-04-19 13:32:20 -0700761 GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700762 break;
763
764 case Instruction::IPUT:
buzbee695d13a2014-04-19 13:32:20 -0700765 GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700766 break;
767
768 case Instruction::IPUT_BOOLEAN:
769 case Instruction::IPUT_BYTE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000770 GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700771 break;
772
773 case Instruction::IPUT_CHAR:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000774 GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700775 break;
776
777 case Instruction::IPUT_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000778 GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700779 break;
780
781 case Instruction::SGET_OBJECT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000782 GenSget(mir, rl_dest, false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700783 break;
784 case Instruction::SGET:
785 case Instruction::SGET_BOOLEAN:
786 case Instruction::SGET_BYTE:
787 case Instruction::SGET_CHAR:
788 case Instruction::SGET_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000789 GenSget(mir, rl_dest, false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700790 break;
791
792 case Instruction::SGET_WIDE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000793 GenSget(mir, rl_dest, true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700794 break;
795
796 case Instruction::SPUT_OBJECT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000797 GenSput(mir, rl_src[0], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700798 break;
799
800 case Instruction::SPUT:
801 case Instruction::SPUT_BOOLEAN:
802 case Instruction::SPUT_BYTE:
803 case Instruction::SPUT_CHAR:
804 case Instruction::SPUT_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000805 GenSput(mir, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700806 break;
807
808 case Instruction::SPUT_WIDE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000809 GenSput(mir, rl_src[0], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700810 break;
811
812 case Instruction::INVOKE_STATIC_RANGE:
813 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700814 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
815 // If the invocation is not inlined, we can assume there is already a
816 // suspend check at the return site
817 mir_graph_->AppendGenSuspendTestList(bb);
818 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700819 break;
820 case Instruction::INVOKE_STATIC:
821 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700822 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
823 mir_graph_->AppendGenSuspendTestList(bb);
824 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700825 break;
826
827 case Instruction::INVOKE_DIRECT:
828 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700829 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
830 mir_graph_->AppendGenSuspendTestList(bb);
831 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700832 break;
833 case Instruction::INVOKE_DIRECT_RANGE:
834 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700835 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
836 mir_graph_->AppendGenSuspendTestList(bb);
837 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700838 break;
839
840 case Instruction::INVOKE_VIRTUAL:
841 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700842 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
843 mir_graph_->AppendGenSuspendTestList(bb);
844 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700845 break;
846 case Instruction::INVOKE_VIRTUAL_RANGE:
847 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700848 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
849 mir_graph_->AppendGenSuspendTestList(bb);
850 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700851 break;
852
853 case Instruction::INVOKE_SUPER:
854 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700855 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
856 mir_graph_->AppendGenSuspendTestList(bb);
857 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700858 break;
859 case Instruction::INVOKE_SUPER_RANGE:
860 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700861 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
862 mir_graph_->AppendGenSuspendTestList(bb);
863 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700864 break;
865
866 case Instruction::INVOKE_INTERFACE:
867 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700868 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
869 mir_graph_->AppendGenSuspendTestList(bb);
870 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700871 break;
872 case Instruction::INVOKE_INTERFACE_RANGE:
873 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700874 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
875 mir_graph_->AppendGenSuspendTestList(bb);
876 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700877 break;
878
879 case Instruction::NEG_INT:
880 case Instruction::NOT_INT:
881 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0]);
882 break;
883
884 case Instruction::NEG_LONG:
885 case Instruction::NOT_LONG:
886 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0]);
887 break;
888
889 case Instruction::NEG_FLOAT:
890 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]);
891 break;
892
893 case Instruction::NEG_DOUBLE:
894 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]);
895 break;
896
897 case Instruction::INT_TO_LONG:
898 GenIntToLong(rl_dest, rl_src[0]);
899 break;
900
901 case Instruction::LONG_TO_INT:
902 rl_src[0] = UpdateLocWide(rl_src[0]);
buzbeea0cd2d72014-06-01 09:33:49 -0700903 rl_src[0] = NarrowRegLoc(rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700904 StoreValue(rl_dest, rl_src[0]);
905 break;
906
907 case Instruction::INT_TO_BYTE:
908 case Instruction::INT_TO_SHORT:
909 case Instruction::INT_TO_CHAR:
910 GenIntNarrowing(opcode, rl_dest, rl_src[0]);
911 break;
912
913 case Instruction::INT_TO_FLOAT:
914 case Instruction::INT_TO_DOUBLE:
915 case Instruction::LONG_TO_FLOAT:
916 case Instruction::LONG_TO_DOUBLE:
917 case Instruction::FLOAT_TO_INT:
918 case Instruction::FLOAT_TO_LONG:
919 case Instruction::FLOAT_TO_DOUBLE:
920 case Instruction::DOUBLE_TO_INT:
921 case Instruction::DOUBLE_TO_LONG:
922 case Instruction::DOUBLE_TO_FLOAT:
923 GenConversion(opcode, rl_dest, rl_src[0]);
924 break;
925
926
927 case Instruction::ADD_INT:
928 case Instruction::ADD_INT_2ADDR:
929 case Instruction::MUL_INT:
930 case Instruction::MUL_INT_2ADDR:
931 case Instruction::AND_INT:
932 case Instruction::AND_INT_2ADDR:
933 case Instruction::OR_INT:
934 case Instruction::OR_INT_2ADDR:
935 case Instruction::XOR_INT:
936 case Instruction::XOR_INT_2ADDR:
937 if (rl_src[0].is_const &&
938 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]))) {
939 GenArithOpIntLit(opcode, rl_dest, rl_src[1],
940 mir_graph_->ConstantValue(rl_src[0].orig_sreg));
941 } else if (rl_src[1].is_const &&
942 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) {
943 GenArithOpIntLit(opcode, rl_dest, rl_src[0],
944 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
945 } else {
946 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]);
947 }
948 break;
949
950 case Instruction::SUB_INT:
951 case Instruction::SUB_INT_2ADDR:
952 case Instruction::DIV_INT:
953 case Instruction::DIV_INT_2ADDR:
954 case Instruction::REM_INT:
955 case Instruction::REM_INT_2ADDR:
956 case Instruction::SHL_INT:
957 case Instruction::SHL_INT_2ADDR:
958 case Instruction::SHR_INT:
959 case Instruction::SHR_INT_2ADDR:
960 case Instruction::USHR_INT:
961 case Instruction::USHR_INT_2ADDR:
962 if (rl_src[1].is_const &&
963 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) {
964 GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1]));
965 } else {
966 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]);
967 }
968 break;
969
970 case Instruction::ADD_LONG:
971 case Instruction::SUB_LONG:
972 case Instruction::AND_LONG:
973 case Instruction::OR_LONG:
974 case Instruction::XOR_LONG:
975 case Instruction::ADD_LONG_2ADDR:
976 case Instruction::SUB_LONG_2ADDR:
977 case Instruction::AND_LONG_2ADDR:
978 case Instruction::OR_LONG_2ADDR:
979 case Instruction::XOR_LONG_2ADDR:
980 if (rl_src[0].is_const || rl_src[1].is_const) {
981 GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
982 break;
983 }
984 // Note: intentional fallthrough.
985
986 case Instruction::MUL_LONG:
987 case Instruction::DIV_LONG:
988 case Instruction::REM_LONG:
989 case Instruction::MUL_LONG_2ADDR:
990 case Instruction::DIV_LONG_2ADDR:
991 case Instruction::REM_LONG_2ADDR:
992 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
993 break;
994
995 case Instruction::SHL_LONG:
996 case Instruction::SHR_LONG:
997 case Instruction::USHR_LONG:
998 case Instruction::SHL_LONG_2ADDR:
999 case Instruction::SHR_LONG_2ADDR:
1000 case Instruction::USHR_LONG_2ADDR:
1001 if (rl_src[1].is_const) {
1002 GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
1003 } else {
1004 GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
1005 }
1006 break;
1007
1008 case Instruction::ADD_FLOAT:
1009 case Instruction::SUB_FLOAT:
1010 case Instruction::MUL_FLOAT:
1011 case Instruction::DIV_FLOAT:
1012 case Instruction::REM_FLOAT:
1013 case Instruction::ADD_FLOAT_2ADDR:
1014 case Instruction::SUB_FLOAT_2ADDR:
1015 case Instruction::MUL_FLOAT_2ADDR:
1016 case Instruction::DIV_FLOAT_2ADDR:
1017 case Instruction::REM_FLOAT_2ADDR:
1018 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]);
1019 break;
1020
1021 case Instruction::ADD_DOUBLE:
1022 case Instruction::SUB_DOUBLE:
1023 case Instruction::MUL_DOUBLE:
1024 case Instruction::DIV_DOUBLE:
1025 case Instruction::REM_DOUBLE:
1026 case Instruction::ADD_DOUBLE_2ADDR:
1027 case Instruction::SUB_DOUBLE_2ADDR:
1028 case Instruction::MUL_DOUBLE_2ADDR:
1029 case Instruction::DIV_DOUBLE_2ADDR:
1030 case Instruction::REM_DOUBLE_2ADDR:
1031 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]);
1032 break;
1033
1034 case Instruction::RSUB_INT:
1035 case Instruction::ADD_INT_LIT16:
1036 case Instruction::MUL_INT_LIT16:
1037 case Instruction::DIV_INT_LIT16:
1038 case Instruction::REM_INT_LIT16:
1039 case Instruction::AND_INT_LIT16:
1040 case Instruction::OR_INT_LIT16:
1041 case Instruction::XOR_INT_LIT16:
1042 case Instruction::ADD_INT_LIT8:
1043 case Instruction::RSUB_INT_LIT8:
1044 case Instruction::MUL_INT_LIT8:
1045 case Instruction::DIV_INT_LIT8:
1046 case Instruction::REM_INT_LIT8:
1047 case Instruction::AND_INT_LIT8:
1048 case Instruction::OR_INT_LIT8:
1049 case Instruction::XOR_INT_LIT8:
1050 case Instruction::SHL_INT_LIT8:
1051 case Instruction::SHR_INT_LIT8:
1052 case Instruction::USHR_INT_LIT8:
1053 GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC);
1054 break;
1055
1056 default:
1057 LOG(FATAL) << "Unexpected opcode: " << opcode;
1058 }
buzbee082833c2014-05-17 23:16:26 -07001059 DCHECK(CheckCorePoolSanity());
Brian Carlstrom1895ea32013-07-18 13:28:37 -07001060} // NOLINT(readability/fn_size)
Brian Carlstrom7940e442013-07-12 13:46:57 -07001061
1062// Process extended MIR instructions
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001063void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001064 switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) {
1065 case kMirOpCopy: {
1066 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
1067 RegLocation rl_dest = mir_graph_->GetDest(mir);
1068 StoreValue(rl_dest, rl_src);
1069 break;
1070 }
1071 case kMirOpFusedCmplFloat:
1072 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/);
1073 break;
1074 case kMirOpFusedCmpgFloat:
1075 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/);
1076 break;
1077 case kMirOpFusedCmplDouble:
1078 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/);
1079 break;
1080 case kMirOpFusedCmpgDouble:
1081 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/);
1082 break;
1083 case kMirOpFusedCmpLong:
1084 GenFusedLongCmpBranch(bb, mir);
1085 break;
1086 case kMirOpSelect:
1087 GenSelect(bb, mir);
1088 break;
Mark Mendelld65c51a2014-04-29 16:55:20 -04001089 case kMirOpPhi:
1090 case kMirOpNop:
1091 case kMirOpNullCheck:
1092 case kMirOpRangeCheck:
1093 case kMirOpDivZeroCheck:
1094 case kMirOpCheck:
1095 case kMirOpCheckPart2:
1096 // Ignore these known opcodes
1097 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001098 default:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001099 // Give the backends a chance to handle unknown extended MIR opcodes.
1100 GenMachineSpecificExtendedMethodMIR(bb, mir);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001101 break;
1102 }
1103}
1104
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001105void Mir2Lir::GenPrintLabel(MIR* mir) {
1106 // Mark the beginning of a Dalvik instruction for line tracking.
1107 if (cu_->verbose) {
1108 char* inst_str = mir_graph_->GetDalvikDisassembly(mir);
1109 MarkBoundary(mir->offset, inst_str);
1110 }
1111}
1112
Brian Carlstrom7940e442013-07-12 13:46:57 -07001113// Handle the content in each basic block.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001114bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001115 if (bb->block_type == kDead) return false;
1116 current_dalvik_offset_ = bb->start_offset;
1117 MIR* mir;
1118 int block_id = bb->id;
1119
1120 block_label_list_[block_id].operands[0] = bb->start_offset;
1121
1122 // Insert the block label.
1123 block_label_list_[block_id].opcode = kPseudoNormalBlockLabel;
buzbeeb48819d2013-09-14 16:15:25 -07001124 block_label_list_[block_id].flags.fixup = kFixupLabel;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001125 AppendLIR(&block_label_list_[block_id]);
1126
1127 LIR* head_lir = NULL;
1128
1129 // If this is a catch block, export the start address.
1130 if (bb->catch_entry) {
1131 head_lir = NewLIR0(kPseudoExportedPC);
1132 }
1133
1134 // Free temp registers and reset redundant store tracking.
buzbeeba574512014-05-12 15:13:16 -07001135 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001136
1137 if (bb->block_type == kEntryBlock) {
buzbee56c71782013-09-05 17:13:19 -07001138 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001139 int start_vreg = cu_->num_dalvik_registers - cu_->num_ins;
1140 GenEntrySequence(&mir_graph_->reg_location_[start_vreg],
1141 mir_graph_->reg_location_[mir_graph_->GetMethodSReg()]);
1142 } else if (bb->block_type == kExitBlock) {
buzbee56c71782013-09-05 17:13:19 -07001143 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001144 GenExitSequence();
1145 }
1146
1147 for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
1148 ResetRegPool();
1149 if (cu_->disable_opt & (1 << kTrackLiveTemps)) {
buzbeeba574512014-05-12 15:13:16 -07001150 ClobberAllTemps();
buzbee7a11ab02014-04-28 20:02:38 -07001151 // Reset temp allocation to minimize differences when A/B testing.
buzbee091cc402014-03-31 10:14:40 -07001152 reg_pool_->ResetNextTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001153 }
1154
1155 if (cu_->disable_opt & (1 << kSuppressLoads)) {
1156 ResetDefTracking();
1157 }
1158
1159 // Reset temp tracking sanity check.
1160 if (kIsDebugBuild) {
1161 live_sreg_ = INVALID_SREG;
1162 }
1163
1164 current_dalvik_offset_ = mir->offset;
1165 int opcode = mir->dalvikInsn.opcode;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001166
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001167 GenPrintLabel(mir);
1168
Brian Carlstrom7940e442013-07-12 13:46:57 -07001169 // Remember the first LIR for this block.
1170 if (head_lir == NULL) {
buzbee252254b2013-09-08 16:20:53 -07001171 head_lir = &block_label_list_[bb->id];
1172 // Set the first label as a scheduling barrier.
buzbeeb48819d2013-09-14 16:15:25 -07001173 DCHECK(!head_lir->flags.use_def_invalid);
1174 head_lir->u.m.def_mask = ENCODE_ALL;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001175 }
1176
1177 if (opcode == kMirOpCheck) {
1178 // Combine check and work halves of throwing instruction.
1179 MIR* work_half = mir->meta.throw_insn;
1180 mir->dalvikInsn.opcode = work_half->dalvikInsn.opcode;
Vladimir Marko4376c872014-01-23 12:39:29 +00001181 mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001182 opcode = work_half->dalvikInsn.opcode;
1183 SSARepresentation* ssa_rep = work_half->ssa_rep;
1184 work_half->ssa_rep = mir->ssa_rep;
1185 mir->ssa_rep = ssa_rep;
1186 work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2);
Vladimir Marko4376c872014-01-23 12:39:29 +00001187 work_half->meta.throw_insn = mir;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001188 }
1189
buzbee35ba7f32014-05-31 08:59:01 -07001190 if (MIRGraph::IsPseudoMirOp(opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001191 HandleExtendedMethodMIR(bb, mir);
1192 continue;
1193 }
1194
1195 CompileDalvikInstruction(mir, bb, block_label_list_);
1196 }
1197
1198 if (head_lir) {
1199 // Eliminate redundant loads/stores and delay stores into later slots.
1200 ApplyLocalOptimizations(head_lir, last_lir_insn_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001201 }
1202 return false;
1203}
1204
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001205bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) {
Vladimir Marko5816ed42013-11-27 17:04:20 +00001206 cu_->NewTimingSplit("SpecialMIR2LIR");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001207 // Find the first DalvikByteCode block.
1208 int num_reachable_blocks = mir_graph_->GetNumReachableBlocks();
1209 BasicBlock*bb = NULL;
1210 for (int idx = 0; idx < num_reachable_blocks; idx++) {
1211 // TODO: no direct access of growable lists.
1212 int dfs_index = mir_graph_->GetDfsOrder()->Get(idx);
1213 bb = mir_graph_->GetBasicBlock(dfs_index);
1214 if (bb->block_type == kDalvikByteCode) {
1215 break;
1216 }
1217 }
1218 if (bb == NULL) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001219 return false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001220 }
1221 DCHECK_EQ(bb->start_offset, 0);
1222 DCHECK(bb->first_mir_insn != NULL);
1223
1224 // Get the first instruction.
1225 MIR* mir = bb->first_mir_insn;
1226
1227 // Free temp registers and reset redundant store tracking.
1228 ResetRegPool();
1229 ResetDefTracking();
buzbeeba574512014-05-12 15:13:16 -07001230 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001231
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001232 return GenSpecialCase(bb, mir, special);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001233}
1234
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001235void Mir2Lir::MethodMIR2LIR() {
buzbeea61f4952013-08-23 14:27:06 -07001236 cu_->NewTimingSplit("MIR2LIR");
1237
Brian Carlstrom7940e442013-07-12 13:46:57 -07001238 // Hold the labels of each block.
1239 block_label_list_ =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -07001240 static_cast<LIR*>(arena_->Alloc(sizeof(LIR) * mir_graph_->GetNumBlocks(),
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001241 kArenaAllocLIR));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001242
buzbee56c71782013-09-05 17:13:19 -07001243 PreOrderDfsIterator iter(mir_graph_);
buzbee252254b2013-09-08 16:20:53 -07001244 BasicBlock* curr_bb = iter.Next();
1245 BasicBlock* next_bb = iter.Next();
1246 while (curr_bb != NULL) {
1247 MethodBlockCodeGen(curr_bb);
1248 // If the fall_through block is no longer laid out consecutively, drop in a branch.
buzbee0d829482013-10-11 15:24:55 -07001249 BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through);
1250 if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) {
1251 OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]);
buzbee252254b2013-09-08 16:20:53 -07001252 }
1253 curr_bb = next_bb;
1254 do {
1255 next_bb = iter.Next();
1256 } while ((next_bb != NULL) && (next_bb->block_type == kDead));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001257 }
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001258 HandleSlowPaths();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001259}
1260
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001261//
1262// LIR Slow Path
1263//
1264
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001265LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001266 m2l_->SetCurrentDexPc(current_dex_pc_);
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001267 LIR* target = m2l_->NewLIR0(opcode);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001268 fromfast_->target = target;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001269 return target;
1270}
Vladimir Marko3bc86152014-03-13 14:11:28 +00001271
Brian Carlstrom7940e442013-07-12 13:46:57 -07001272} // namespace art