blob: 1f12b6fe69ae245d47c410d81e611a459e93b440 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex/dataflow_iterator-inl.h"
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080019#include "dex/quick/dex_file_method_inliner.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "mir_to_lir-inl.h"
21#include "object_utils.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070022#include "thread-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023
24namespace art {
25
buzbeea0cd2d72014-06-01 09:33:49 -070026RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) {
27 RegisterClass res;
28 switch (shorty_type) {
29 case 'L':
30 res = kRefReg;
31 break;
32 case 'F':
33 // Expected fallthrough.
34 case 'D':
35 res = kFPReg;
36 break;
37 default:
38 res = kCoreReg;
39 }
40 return res;
41}
42
43RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) {
44 RegisterClass res;
45 if (loc.fp) {
46 DCHECK(!loc.ref) << "At most, one of ref/fp may be set";
47 res = kFPReg;
48 } else if (loc.ref) {
49 res = kRefReg;
50 } else {
51 res = kCoreReg;
52 }
53 return res;
54}
55
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080056void Mir2Lir::LockArg(int in_position, bool wide) {
buzbee2700f7e2014-03-07 09:46:20 -080057 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
58 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
59 RegStorage::InvalidReg();
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080060
buzbee2700f7e2014-03-07 09:46:20 -080061 if (reg_arg_low.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080062 LockTemp(reg_arg_low);
63 }
buzbee2700f7e2014-03-07 09:46:20 -080064 if (reg_arg_high.Valid() && reg_arg_low != reg_arg_high) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080065 LockTemp(reg_arg_high);
66 }
67}
68
buzbee2700f7e2014-03-07 09:46:20 -080069// TODO: needs revisit for 64-bit.
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010070RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) {
buzbee2700f7e2014-03-07 09:46:20 -080071 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
72 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
73 RegStorage::InvalidReg();
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080074
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000075 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +070076 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080077 /*
78 * When doing a call for x86, it moves the stack pointer in order to push return.
79 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
80 * TODO: This needs revisited for 64-bit.
81 */
82 offset += sizeof(uint32_t);
83 }
84
85 // If the VR is wide and there is no register for high part, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -080086 if (wide && !reg_arg_high.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080087 // If the low part is not in a reg, we allocate a pair. Otherwise, we just load to high reg.
buzbee2700f7e2014-03-07 09:46:20 -080088 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010089 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
Vladimir Marko3bf7c602014-05-07 14:55:43 +010090 LoadBaseDisp(TargetReg(kSp), offset, new_regs, k64);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010091 return new_regs; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080092 } else {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010093 // Assume that no ABI allows splitting a wide fp reg between a narrow fp reg and memory,
94 // i.e. the low part is in a core reg. Load the second part in a core reg as well for now.
95 DCHECK(!reg_arg_low.IsFloat());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080096 reg_arg_high = AllocTemp();
97 int offset_high = offset + sizeof(uint32_t);
buzbee695d13a2014-04-19 13:32:20 -070098 Load32Disp(TargetReg(kSp), offset_high, reg_arg_high);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010099 // Continue below to check the reg_class.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800100 }
101 }
102
103 // If the low part is not in a register yet, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -0800104 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100105 // Assume that if the low part of a wide arg is passed in memory, so is the high part,
106 // thus we don't get here for wide args as it's handled above. Big-endian ABIs could
107 // conceivably break this assumption but Android supports only little-endian architectures.
108 DCHECK(!wide);
109 reg_arg_low = AllocTypedTemp(false, reg_class);
buzbee695d13a2014-04-19 13:32:20 -0700110 Load32Disp(TargetReg(kSp), offset, reg_arg_low);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100111 return reg_arg_low; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800112 }
113
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100114 RegStorage reg_arg = wide ? RegStorage::MakeRegPair(reg_arg_low, reg_arg_high) : reg_arg_low;
115 // Check if we need to copy the arg to a different reg_class.
116 if (!RegClassMatches(reg_class, reg_arg)) {
117 if (wide) {
118 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
119 OpRegCopyWide(new_regs, reg_arg);
120 reg_arg = new_regs;
121 } else {
122 RegStorage new_reg = AllocTypedTemp(false, reg_class);
123 OpRegCopy(new_reg, reg_arg);
124 reg_arg = new_reg;
125 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800126 }
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100127 return reg_arg;
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800128}
129
130void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000131 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700132 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800133 /*
134 * When doing a call for x86, it moves the stack pointer in order to push return.
135 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
136 * TODO: This needs revisited for 64-bit.
137 */
138 offset += sizeof(uint32_t);
139 }
140
141 if (!rl_dest.wide) {
buzbee2700f7e2014-03-07 09:46:20 -0800142 RegStorage reg = GetArgMappingToPhysicalReg(in_position);
143 if (reg.Valid()) {
144 OpRegCopy(rl_dest.reg, reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800145 } else {
buzbee695d13a2014-04-19 13:32:20 -0700146 Load32Disp(TargetReg(kSp), offset, rl_dest.reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800147 }
148 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800149 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
150 RegStorage reg_arg_high = GetArgMappingToPhysicalReg(in_position + 1);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800151
buzbee2700f7e2014-03-07 09:46:20 -0800152 if (reg_arg_low.Valid() && reg_arg_high.Valid()) {
153 OpRegCopyWide(rl_dest.reg, RegStorage::MakeRegPair(reg_arg_low, reg_arg_high));
154 } else if (reg_arg_low.Valid() && !reg_arg_high.Valid()) {
155 OpRegCopy(rl_dest.reg, reg_arg_low);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800156 int offset_high = offset + sizeof(uint32_t);
buzbee695d13a2014-04-19 13:32:20 -0700157 Load32Disp(TargetReg(kSp), offset_high, rl_dest.reg.GetHigh());
buzbee2700f7e2014-03-07 09:46:20 -0800158 } else if (!reg_arg_low.Valid() && reg_arg_high.Valid()) {
159 OpRegCopy(rl_dest.reg.GetHigh(), reg_arg_high);
buzbee695d13a2014-04-19 13:32:20 -0700160 Load32Disp(TargetReg(kSp), offset, rl_dest.reg.GetLow());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800161 } else {
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100162 LoadBaseDisp(TargetReg(kSp), offset, rl_dest.reg, k64);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800163 }
164 }
165}
166
167bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) {
168 // FastInstance() already checked by DexFileMethodInliner.
169 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100170 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800171 // The object is not "this" and has to be null-checked.
172 return false;
173 }
174
Vladimir Markoe3e02602014-03-12 15:42:41 +0000175 bool wide = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE));
Vladimir Marko455759b2014-05-06 20:49:36 +0100176 bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT));
177 OpSize size = LoadStoreOpSize(wide, ref);
Vladimir Marko674744e2014-04-24 15:18:26 +0100178 if (data.is_volatile && !SupportsVolatileLoadStore(size)) {
179 return false;
180 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100181
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800182 // Point of no return - no aborts after this
183 GenPrintLabel(mir);
184 LockArg(data.object_arg);
buzbeea0cd2d72014-06-01 09:33:49 -0700185 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100186 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
buzbeea0cd2d72014-06-01 09:33:49 -0700187 RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]);
188 RegLocation rl_dest = wide ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100189 RegStorage r_result = rl_dest.reg;
190 if (!RegClassMatches(reg_class, r_result)) {
191 r_result = wide ? AllocTypedTempWide(rl_dest.fp, reg_class)
192 : AllocTypedTemp(rl_dest.fp, reg_class);
193 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800194 if (data.is_volatile) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100195 LoadBaseDispVolatile(reg_obj, data.field_offset, r_result, size);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800196 // Without context sensitive analysis, we must issue the most conservative barriers.
197 // In this case, either a load or store may follow so we issue both barriers.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800198 GenMemBarrier(kLoadLoad);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800199 GenMemBarrier(kLoadStore);
Vladimir Marko674744e2014-04-24 15:18:26 +0100200 } else {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100201 LoadBaseDisp(reg_obj, data.field_offset, r_result, size);
202 }
203 if (r_result != rl_dest.reg) {
204 if (wide) {
205 OpRegCopyWide(rl_dest.reg, r_result);
206 } else {
207 OpRegCopy(rl_dest.reg, r_result);
208 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800209 }
210 return true;
211}
212
213bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) {
214 // FastInstance() already checked by DexFileMethodInliner.
215 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100216 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800217 // The object is not "this" and has to be null-checked.
218 return false;
219 }
Vladimir Markoe1fced12014-04-04 14:52:53 +0100220 if (data.return_arg_plus1 != 0u) {
221 // The setter returns a method argument which we don't support here.
222 return false;
223 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800224
Vladimir Markoe3e02602014-03-12 15:42:41 +0000225 bool wide = (data.op_variant == InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE));
Vladimir Marko455759b2014-05-06 20:49:36 +0100226 bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT));
227 OpSize size = LoadStoreOpSize(wide, ref);
Vladimir Marko674744e2014-04-24 15:18:26 +0100228 if (data.is_volatile && !SupportsVolatileLoadStore(size)) {
229 return false;
230 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800231
232 // Point of no return - no aborts after this
233 GenPrintLabel(mir);
234 LockArg(data.object_arg);
235 LockArg(data.src_arg, wide);
buzbeea0cd2d72014-06-01 09:33:49 -0700236 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100237 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
238 RegStorage reg_src = LoadArg(data.src_arg, reg_class, wide);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800239 if (data.is_volatile) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800240 // There might have been a store before this volatile one so insert StoreStore barrier.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800241 GenMemBarrier(kStoreStore);
Vladimir Marko674744e2014-04-24 15:18:26 +0100242 StoreBaseDispVolatile(reg_obj, data.field_offset, reg_src, size);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800243 // A load might follow the volatile store so insert a StoreLoad barrier.
244 GenMemBarrier(kStoreLoad);
Vladimir Marko674744e2014-04-24 15:18:26 +0100245 } else {
246 StoreBaseDisp(reg_obj, data.field_offset, reg_src, size);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800247 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100248 if (ref) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800249 MarkGCCard(reg_src, reg_obj);
250 }
251 return true;
252}
253
254bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) {
255 const InlineReturnArgData& data = special.d.return_data;
Vladimir Markoe3e02602014-03-12 15:42:41 +0000256 bool wide = (data.is_wide != 0u);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800257
258 // Point of no return - no aborts after this
259 GenPrintLabel(mir);
260 LockArg(data.arg, wide);
buzbeea0cd2d72014-06-01 09:33:49 -0700261 RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]);
262 RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800263 LoadArgDirect(data.arg, rl_dest);
264 return true;
265}
266
267/*
268 * Special-case code generation for simple non-throwing leaf methods.
269 */
270bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) {
271 DCHECK(special.flags & kInlineSpecial);
272 current_dalvik_offset_ = mir->offset;
273 MIR* return_mir = nullptr;
274 bool successful = false;
275
276 switch (special.opcode) {
277 case kInlineOpNop:
278 successful = true;
279 DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID);
280 return_mir = mir;
281 break;
282 case kInlineOpNonWideConst: {
283 successful = true;
buzbeea0cd2d72014-06-01 09:33:49 -0700284 RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0]));
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800285 GenPrintLabel(mir);
buzbee2700f7e2014-03-07 09:46:20 -0800286 LoadConstant(rl_dest.reg, static_cast<int>(special.d.data));
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700287 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800288 break;
289 }
290 case kInlineOpReturnArg:
291 successful = GenSpecialIdentity(mir, special);
292 return_mir = mir;
293 break;
294 case kInlineOpIGet:
295 successful = GenSpecialIGet(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700296 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800297 break;
298 case kInlineOpIPut:
299 successful = GenSpecialIPut(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700300 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800301 break;
302 default:
303 break;
304 }
305
306 if (successful) {
Vladimir Marko39d95e62014-02-28 12:51:24 +0000307 if (kIsDebugBuild) {
308 // Clear unreachable catch entries.
309 mir_graph_->catches_.clear();
310 }
311
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800312 // Handle verbosity for return MIR.
313 if (return_mir != nullptr) {
314 current_dalvik_offset_ = return_mir->offset;
315 // Not handling special identity case because it already generated code as part
316 // of the return. The label should have been added before any code was generated.
317 if (special.opcode != kInlineOpReturnArg) {
318 GenPrintLabel(return_mir);
319 }
320 }
321 GenSpecialExitSequence();
322
323 core_spill_mask_ = 0;
324 num_core_spills_ = 0;
325 fp_spill_mask_ = 0;
326 num_fp_spills_ = 0;
327 frame_size_ = 0;
328 core_vmap_table_.clear();
329 fp_vmap_table_.clear();
330 }
331
332 return successful;
333}
334
Brian Carlstrom7940e442013-07-12 13:46:57 -0700335/*
336 * Target-independent code generation. Use only high-level
337 * load/store utilities here, or target-dependent genXX() handlers
338 * when necessary.
339 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700340void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700341 RegLocation rl_src[3];
342 RegLocation rl_dest = mir_graph_->GetBadLoc();
343 RegLocation rl_result = mir_graph_->GetBadLoc();
344 Instruction::Code opcode = mir->dalvikInsn.opcode;
345 int opt_flags = mir->optimization_flags;
346 uint32_t vB = mir->dalvikInsn.vB;
347 uint32_t vC = mir->dalvikInsn.vC;
buzbee082833c2014-05-17 23:16:26 -0700348 DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:"
349 << std::hex << current_dalvik_offset_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700350
351 // Prep Src and Dest locations.
352 int next_sreg = 0;
353 int next_loc = 0;
Jean Christophe Beylercc794c32014-05-02 09:34:13 -0700354 uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700355 rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc();
356 if (attrs & DF_UA) {
357 if (attrs & DF_A_WIDE) {
358 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
359 next_sreg+= 2;
360 } else {
361 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
362 next_sreg++;
363 }
364 }
365 if (attrs & DF_UB) {
366 if (attrs & DF_B_WIDE) {
367 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
368 next_sreg+= 2;
369 } else {
370 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
371 next_sreg++;
372 }
373 }
374 if (attrs & DF_UC) {
375 if (attrs & DF_C_WIDE) {
376 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
377 } else {
378 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
379 }
380 }
381 if (attrs & DF_DA) {
382 if (attrs & DF_A_WIDE) {
383 rl_dest = mir_graph_->GetDestWide(mir);
384 } else {
385 rl_dest = mir_graph_->GetDest(mir);
386 }
387 }
388 switch (opcode) {
389 case Instruction::NOP:
390 break;
391
392 case Instruction::MOVE_EXCEPTION:
393 GenMoveException(rl_dest);
394 break;
395
396 case Instruction::RETURN_VOID:
397 if (((cu_->access_flags & kAccConstructor) != 0) &&
398 cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file,
399 cu_->class_def_idx)) {
400 GenMemBarrier(kStoreStore);
401 }
Wei Jin04f4d8a2014-05-29 18:04:29 -0700402 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700403 GenSuspendTest(opt_flags);
404 }
405 break;
406
Brian Carlstrom7940e442013-07-12 13:46:57 -0700407 case Instruction::RETURN_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700408 DCHECK(rl_src[0].ref);
409 // Intentional fallthrough.
410 case Instruction::RETURN:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700411 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700412 GenSuspendTest(opt_flags);
413 }
buzbeea0cd2d72014-06-01 09:33:49 -0700414 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
415 StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700416 break;
417
418 case Instruction::RETURN_WIDE:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700419 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700420 GenSuspendTest(opt_flags);
421 }
buzbeea0cd2d72014-06-01 09:33:49 -0700422 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
423 StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700424 break;
425
426 case Instruction::MOVE_RESULT_WIDE:
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000427 if ((opt_flags & MIR_INLINED) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700428 break; // Nop - combined w/ previous invoke.
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000429 }
buzbeea0cd2d72014-06-01 09:33:49 -0700430 StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700431 break;
432
433 case Instruction::MOVE_RESULT:
434 case Instruction::MOVE_RESULT_OBJECT:
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000435 if ((opt_flags & MIR_INLINED) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700436 break; // Nop - combined w/ previous invoke.
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000437 }
buzbeea0cd2d72014-06-01 09:33:49 -0700438 StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700439 break;
440
441 case Instruction::MOVE:
442 case Instruction::MOVE_OBJECT:
443 case Instruction::MOVE_16:
444 case Instruction::MOVE_OBJECT_16:
445 case Instruction::MOVE_FROM16:
446 case Instruction::MOVE_OBJECT_FROM16:
447 StoreValue(rl_dest, rl_src[0]);
448 break;
449
450 case Instruction::MOVE_WIDE:
451 case Instruction::MOVE_WIDE_16:
452 case Instruction::MOVE_WIDE_FROM16:
453 StoreValueWide(rl_dest, rl_src[0]);
454 break;
455
456 case Instruction::CONST:
457 case Instruction::CONST_4:
458 case Instruction::CONST_16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400459 GenConst(rl_dest, vB);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700460 break;
461
462 case Instruction::CONST_HIGH16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400463 GenConst(rl_dest, vB << 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700464 break;
465
466 case Instruction::CONST_WIDE_16:
467 case Instruction::CONST_WIDE_32:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000468 GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700469 break;
470
471 case Instruction::CONST_WIDE:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000472 GenConstWide(rl_dest, mir->dalvikInsn.vB_wide);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700473 break;
474
475 case Instruction::CONST_WIDE_HIGH16:
476 rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800477 LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700478 StoreValueWide(rl_dest, rl_result);
479 break;
480
481 case Instruction::MONITOR_ENTER:
482 GenMonitorEnter(opt_flags, rl_src[0]);
483 break;
484
485 case Instruction::MONITOR_EXIT:
486 GenMonitorExit(opt_flags, rl_src[0]);
487 break;
488
489 case Instruction::CHECK_CAST: {
490 GenCheckCast(mir->offset, vB, rl_src[0]);
491 break;
492 }
493 case Instruction::INSTANCE_OF:
494 GenInstanceof(vC, rl_dest, rl_src[0]);
495 break;
496
497 case Instruction::NEW_INSTANCE:
498 GenNewInstance(vB, rl_dest);
499 break;
500
501 case Instruction::THROW:
502 GenThrow(rl_src[0]);
503 break;
504
505 case Instruction::ARRAY_LENGTH:
506 int len_offset;
507 len_offset = mirror::Array::LengthOffset().Int32Value();
buzbeea0cd2d72014-06-01 09:33:49 -0700508 rl_src[0] = LoadValue(rl_src[0], kRefReg);
buzbee2700f7e2014-03-07 09:46:20 -0800509 GenNullCheck(rl_src[0].reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700510 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700511 Load32Disp(rl_src[0].reg, len_offset, rl_result.reg);
Dave Allisonf9439142014-03-27 15:10:22 -0700512 MarkPossibleNullPointerException(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700513 StoreValue(rl_dest, rl_result);
514 break;
515
516 case Instruction::CONST_STRING:
517 case Instruction::CONST_STRING_JUMBO:
518 GenConstString(vB, rl_dest);
519 break;
520
521 case Instruction::CONST_CLASS:
522 GenConstClass(vB, rl_dest);
523 break;
524
525 case Instruction::FILL_ARRAY_DATA:
526 GenFillArrayData(vB, rl_src[0]);
527 break;
528
529 case Instruction::FILLED_NEW_ARRAY:
530 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
531 false /* not range */));
532 break;
533
534 case Instruction::FILLED_NEW_ARRAY_RANGE:
535 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
536 true /* range */));
537 break;
538
539 case Instruction::NEW_ARRAY:
540 GenNewArray(vC, rl_dest, rl_src[0]);
541 break;
542
543 case Instruction::GOTO:
544 case Instruction::GOTO_16:
545 case Instruction::GOTO_32:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700546 if (mir_graph_->IsBackedge(bb, bb->taken) &&
547 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken))) {
buzbee0d829482013-10-11 15:24:55 -0700548 GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700549 } else {
buzbee0d829482013-10-11 15:24:55 -0700550 OpUnconditionalBranch(&label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700551 }
552 break;
553
554 case Instruction::PACKED_SWITCH:
555 GenPackedSwitch(mir, vB, rl_src[0]);
556 break;
557
558 case Instruction::SPARSE_SWITCH:
559 GenSparseSwitch(mir, vB, rl_src[0]);
560 break;
561
562 case Instruction::CMPL_FLOAT:
563 case Instruction::CMPG_FLOAT:
564 case Instruction::CMPL_DOUBLE:
565 case Instruction::CMPG_DOUBLE:
566 GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]);
567 break;
568
569 case Instruction::CMP_LONG:
570 GenCmpLong(rl_dest, rl_src[0], rl_src[1]);
571 break;
572
573 case Instruction::IF_EQ:
574 case Instruction::IF_NE:
575 case Instruction::IF_LT:
576 case Instruction::IF_GE:
577 case Instruction::IF_GT:
578 case Instruction::IF_LE: {
buzbee0d829482013-10-11 15:24:55 -0700579 LIR* taken = &label_list[bb->taken];
580 LIR* fall_through = &label_list[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700581 // Result known at compile time?
582 if (rl_src[0].is_const && rl_src[1].is_const) {
583 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg),
584 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
buzbee0d829482013-10-11 15:24:55 -0700585 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
Wei Jin04f4d8a2014-05-29 18:04:29 -0700586 if (mir_graph_->IsBackedge(bb, target_id) &&
587 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700588 GenSuspendTest(opt_flags);
589 }
buzbee0d829482013-10-11 15:24:55 -0700590 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700591 } else {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700592 if (mir_graph_->IsBackwardsBranch(bb) &&
593 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
594 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700595 GenSuspendTest(opt_flags);
596 }
buzbee0d829482013-10-11 15:24:55 -0700597 GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken, fall_through);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700598 }
599 break;
600 }
601
602 case Instruction::IF_EQZ:
603 case Instruction::IF_NEZ:
604 case Instruction::IF_LTZ:
605 case Instruction::IF_GEZ:
606 case Instruction::IF_GTZ:
607 case Instruction::IF_LEZ: {
buzbee0d829482013-10-11 15:24:55 -0700608 LIR* taken = &label_list[bb->taken];
609 LIR* fall_through = &label_list[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700610 // Result known at compile time?
611 if (rl_src[0].is_const) {
612 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), 0);
buzbee0d829482013-10-11 15:24:55 -0700613 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
Wei Jin04f4d8a2014-05-29 18:04:29 -0700614 if (mir_graph_->IsBackedge(bb, target_id) &&
615 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700616 GenSuspendTest(opt_flags);
617 }
buzbee0d829482013-10-11 15:24:55 -0700618 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700619 } else {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700620 if (mir_graph_->IsBackwardsBranch(bb) &&
621 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
622 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700623 GenSuspendTest(opt_flags);
624 }
625 GenCompareZeroAndBranch(opcode, rl_src[0], taken, fall_through);
626 }
627 break;
628 }
629
630 case Instruction::AGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700631 GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3);
632 break;
633 case Instruction::AGET_OBJECT:
634 GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700635 break;
636 case Instruction::AGET:
buzbee695d13a2014-04-19 13:32:20 -0700637 GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700638 break;
639 case Instruction::AGET_BOOLEAN:
640 GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0);
641 break;
642 case Instruction::AGET_BYTE:
643 GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0);
644 break;
645 case Instruction::AGET_CHAR:
646 GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
647 break;
648 case Instruction::AGET_SHORT:
649 GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
650 break;
651 case Instruction::APUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700652 GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700653 break;
654 case Instruction::APUT:
buzbee695d13a2014-04-19 13:32:20 -0700655 GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700656 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700657 case Instruction::APUT_OBJECT: {
658 bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]);
659 bool is_safe = is_null; // Always safe to store null.
660 if (!is_safe) {
661 // Check safety from verifier type information.
Vladimir Marko2730db02014-01-27 11:15:17 +0000662 const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit();
663 is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset);
Ian Rogersa9a82542013-10-04 11:17:26 -0700664 }
665 if (is_null || is_safe) {
666 // Store of constant null doesn't require an assignability test and can be generated inline
667 // without fixed register usage or a card mark.
buzbee695d13a2014-04-19 13:32:20 -0700668 GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null);
Ian Rogersa9a82542013-10-04 11:17:26 -0700669 } else {
670 GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]);
671 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700672 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700673 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700674 case Instruction::APUT_SHORT:
675 case Instruction::APUT_CHAR:
Ian Rogersa9a82542013-10-04 11:17:26 -0700676 GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700677 break;
678 case Instruction::APUT_BYTE:
679 case Instruction::APUT_BOOLEAN:
Ian Rogersa9a82542013-10-04 11:17:26 -0700680 GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700681 break;
682
683 case Instruction::IGET_OBJECT:
buzbee695d13a2014-04-19 13:32:20 -0700684 GenIGet(mir, opt_flags, kReference, rl_dest, rl_src[0], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700685 break;
686
687 case Instruction::IGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700688 GenIGet(mir, opt_flags, k64, rl_dest, rl_src[0], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700689 break;
690
691 case Instruction::IGET:
buzbee695d13a2014-04-19 13:32:20 -0700692 GenIGet(mir, opt_flags, k32, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700693 break;
694
695 case Instruction::IGET_CHAR:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000696 GenIGet(mir, opt_flags, kUnsignedHalf, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700697 break;
698
699 case Instruction::IGET_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000700 GenIGet(mir, opt_flags, kSignedHalf, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700701 break;
702
703 case Instruction::IGET_BOOLEAN:
704 case Instruction::IGET_BYTE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000705 GenIGet(mir, opt_flags, kUnsignedByte, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700706 break;
707
708 case Instruction::IPUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700709 GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700710 break;
711
712 case Instruction::IPUT_OBJECT:
buzbee695d13a2014-04-19 13:32:20 -0700713 GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700714 break;
715
716 case Instruction::IPUT:
buzbee695d13a2014-04-19 13:32:20 -0700717 GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700718 break;
719
720 case Instruction::IPUT_BOOLEAN:
721 case Instruction::IPUT_BYTE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000722 GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700723 break;
724
725 case Instruction::IPUT_CHAR:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000726 GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700727 break;
728
729 case Instruction::IPUT_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000730 GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700731 break;
732
733 case Instruction::SGET_OBJECT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000734 GenSget(mir, rl_dest, false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700735 break;
736 case Instruction::SGET:
737 case Instruction::SGET_BOOLEAN:
738 case Instruction::SGET_BYTE:
739 case Instruction::SGET_CHAR:
740 case Instruction::SGET_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000741 GenSget(mir, rl_dest, false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700742 break;
743
744 case Instruction::SGET_WIDE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000745 GenSget(mir, rl_dest, true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700746 break;
747
748 case Instruction::SPUT_OBJECT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000749 GenSput(mir, rl_src[0], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700750 break;
751
752 case Instruction::SPUT:
753 case Instruction::SPUT_BOOLEAN:
754 case Instruction::SPUT_BYTE:
755 case Instruction::SPUT_CHAR:
756 case Instruction::SPUT_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000757 GenSput(mir, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700758 break;
759
760 case Instruction::SPUT_WIDE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000761 GenSput(mir, rl_src[0], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700762 break;
763
764 case Instruction::INVOKE_STATIC_RANGE:
765 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700766 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
767 // If the invocation is not inlined, we can assume there is already a
768 // suspend check at the return site
769 mir_graph_->AppendGenSuspendTestList(bb);
770 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700771 break;
772 case Instruction::INVOKE_STATIC:
773 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700774 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
775 mir_graph_->AppendGenSuspendTestList(bb);
776 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700777 break;
778
779 case Instruction::INVOKE_DIRECT:
780 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700781 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
782 mir_graph_->AppendGenSuspendTestList(bb);
783 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700784 break;
785 case Instruction::INVOKE_DIRECT_RANGE:
786 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700787 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
788 mir_graph_->AppendGenSuspendTestList(bb);
789 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700790 break;
791
792 case Instruction::INVOKE_VIRTUAL:
793 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700794 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
795 mir_graph_->AppendGenSuspendTestList(bb);
796 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700797 break;
798 case Instruction::INVOKE_VIRTUAL_RANGE:
799 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700800 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
801 mir_graph_->AppendGenSuspendTestList(bb);
802 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700803 break;
804
805 case Instruction::INVOKE_SUPER:
806 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700807 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
808 mir_graph_->AppendGenSuspendTestList(bb);
809 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700810 break;
811 case Instruction::INVOKE_SUPER_RANGE:
812 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700813 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
814 mir_graph_->AppendGenSuspendTestList(bb);
815 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700816 break;
817
818 case Instruction::INVOKE_INTERFACE:
819 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700820 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
821 mir_graph_->AppendGenSuspendTestList(bb);
822 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700823 break;
824 case Instruction::INVOKE_INTERFACE_RANGE:
825 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700826 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
827 mir_graph_->AppendGenSuspendTestList(bb);
828 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700829 break;
830
831 case Instruction::NEG_INT:
832 case Instruction::NOT_INT:
833 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0]);
834 break;
835
836 case Instruction::NEG_LONG:
837 case Instruction::NOT_LONG:
838 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0]);
839 break;
840
841 case Instruction::NEG_FLOAT:
842 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]);
843 break;
844
845 case Instruction::NEG_DOUBLE:
846 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]);
847 break;
848
849 case Instruction::INT_TO_LONG:
850 GenIntToLong(rl_dest, rl_src[0]);
851 break;
852
853 case Instruction::LONG_TO_INT:
854 rl_src[0] = UpdateLocWide(rl_src[0]);
buzbeea0cd2d72014-06-01 09:33:49 -0700855 rl_src[0] = NarrowRegLoc(rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700856 StoreValue(rl_dest, rl_src[0]);
857 break;
858
859 case Instruction::INT_TO_BYTE:
860 case Instruction::INT_TO_SHORT:
861 case Instruction::INT_TO_CHAR:
862 GenIntNarrowing(opcode, rl_dest, rl_src[0]);
863 break;
864
865 case Instruction::INT_TO_FLOAT:
866 case Instruction::INT_TO_DOUBLE:
867 case Instruction::LONG_TO_FLOAT:
868 case Instruction::LONG_TO_DOUBLE:
869 case Instruction::FLOAT_TO_INT:
870 case Instruction::FLOAT_TO_LONG:
871 case Instruction::FLOAT_TO_DOUBLE:
872 case Instruction::DOUBLE_TO_INT:
873 case Instruction::DOUBLE_TO_LONG:
874 case Instruction::DOUBLE_TO_FLOAT:
875 GenConversion(opcode, rl_dest, rl_src[0]);
876 break;
877
878
879 case Instruction::ADD_INT:
880 case Instruction::ADD_INT_2ADDR:
881 case Instruction::MUL_INT:
882 case Instruction::MUL_INT_2ADDR:
883 case Instruction::AND_INT:
884 case Instruction::AND_INT_2ADDR:
885 case Instruction::OR_INT:
886 case Instruction::OR_INT_2ADDR:
887 case Instruction::XOR_INT:
888 case Instruction::XOR_INT_2ADDR:
889 if (rl_src[0].is_const &&
890 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]))) {
891 GenArithOpIntLit(opcode, rl_dest, rl_src[1],
892 mir_graph_->ConstantValue(rl_src[0].orig_sreg));
893 } else if (rl_src[1].is_const &&
894 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) {
895 GenArithOpIntLit(opcode, rl_dest, rl_src[0],
896 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
897 } else {
898 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]);
899 }
900 break;
901
902 case Instruction::SUB_INT:
903 case Instruction::SUB_INT_2ADDR:
904 case Instruction::DIV_INT:
905 case Instruction::DIV_INT_2ADDR:
906 case Instruction::REM_INT:
907 case Instruction::REM_INT_2ADDR:
908 case Instruction::SHL_INT:
909 case Instruction::SHL_INT_2ADDR:
910 case Instruction::SHR_INT:
911 case Instruction::SHR_INT_2ADDR:
912 case Instruction::USHR_INT:
913 case Instruction::USHR_INT_2ADDR:
914 if (rl_src[1].is_const &&
915 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) {
916 GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1]));
917 } else {
918 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]);
919 }
920 break;
921
922 case Instruction::ADD_LONG:
923 case Instruction::SUB_LONG:
924 case Instruction::AND_LONG:
925 case Instruction::OR_LONG:
926 case Instruction::XOR_LONG:
927 case Instruction::ADD_LONG_2ADDR:
928 case Instruction::SUB_LONG_2ADDR:
929 case Instruction::AND_LONG_2ADDR:
930 case Instruction::OR_LONG_2ADDR:
931 case Instruction::XOR_LONG_2ADDR:
932 if (rl_src[0].is_const || rl_src[1].is_const) {
933 GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
934 break;
935 }
936 // Note: intentional fallthrough.
937
938 case Instruction::MUL_LONG:
939 case Instruction::DIV_LONG:
940 case Instruction::REM_LONG:
941 case Instruction::MUL_LONG_2ADDR:
942 case Instruction::DIV_LONG_2ADDR:
943 case Instruction::REM_LONG_2ADDR:
944 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
945 break;
946
947 case Instruction::SHL_LONG:
948 case Instruction::SHR_LONG:
949 case Instruction::USHR_LONG:
950 case Instruction::SHL_LONG_2ADDR:
951 case Instruction::SHR_LONG_2ADDR:
952 case Instruction::USHR_LONG_2ADDR:
953 if (rl_src[1].is_const) {
954 GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
955 } else {
956 GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
957 }
958 break;
959
960 case Instruction::ADD_FLOAT:
961 case Instruction::SUB_FLOAT:
962 case Instruction::MUL_FLOAT:
963 case Instruction::DIV_FLOAT:
964 case Instruction::REM_FLOAT:
965 case Instruction::ADD_FLOAT_2ADDR:
966 case Instruction::SUB_FLOAT_2ADDR:
967 case Instruction::MUL_FLOAT_2ADDR:
968 case Instruction::DIV_FLOAT_2ADDR:
969 case Instruction::REM_FLOAT_2ADDR:
970 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]);
971 break;
972
973 case Instruction::ADD_DOUBLE:
974 case Instruction::SUB_DOUBLE:
975 case Instruction::MUL_DOUBLE:
976 case Instruction::DIV_DOUBLE:
977 case Instruction::REM_DOUBLE:
978 case Instruction::ADD_DOUBLE_2ADDR:
979 case Instruction::SUB_DOUBLE_2ADDR:
980 case Instruction::MUL_DOUBLE_2ADDR:
981 case Instruction::DIV_DOUBLE_2ADDR:
982 case Instruction::REM_DOUBLE_2ADDR:
983 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]);
984 break;
985
986 case Instruction::RSUB_INT:
987 case Instruction::ADD_INT_LIT16:
988 case Instruction::MUL_INT_LIT16:
989 case Instruction::DIV_INT_LIT16:
990 case Instruction::REM_INT_LIT16:
991 case Instruction::AND_INT_LIT16:
992 case Instruction::OR_INT_LIT16:
993 case Instruction::XOR_INT_LIT16:
994 case Instruction::ADD_INT_LIT8:
995 case Instruction::RSUB_INT_LIT8:
996 case Instruction::MUL_INT_LIT8:
997 case Instruction::DIV_INT_LIT8:
998 case Instruction::REM_INT_LIT8:
999 case Instruction::AND_INT_LIT8:
1000 case Instruction::OR_INT_LIT8:
1001 case Instruction::XOR_INT_LIT8:
1002 case Instruction::SHL_INT_LIT8:
1003 case Instruction::SHR_INT_LIT8:
1004 case Instruction::USHR_INT_LIT8:
1005 GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC);
1006 break;
1007
1008 default:
1009 LOG(FATAL) << "Unexpected opcode: " << opcode;
1010 }
buzbee082833c2014-05-17 23:16:26 -07001011 DCHECK(CheckCorePoolSanity());
Brian Carlstrom1895ea32013-07-18 13:28:37 -07001012} // NOLINT(readability/fn_size)
Brian Carlstrom7940e442013-07-12 13:46:57 -07001013
1014// Process extended MIR instructions
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001015void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001016 switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) {
1017 case kMirOpCopy: {
1018 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
1019 RegLocation rl_dest = mir_graph_->GetDest(mir);
1020 StoreValue(rl_dest, rl_src);
1021 break;
1022 }
1023 case kMirOpFusedCmplFloat:
1024 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/);
1025 break;
1026 case kMirOpFusedCmpgFloat:
1027 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/);
1028 break;
1029 case kMirOpFusedCmplDouble:
1030 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/);
1031 break;
1032 case kMirOpFusedCmpgDouble:
1033 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/);
1034 break;
1035 case kMirOpFusedCmpLong:
1036 GenFusedLongCmpBranch(bb, mir);
1037 break;
1038 case kMirOpSelect:
1039 GenSelect(bb, mir);
1040 break;
Mark Mendelld65c51a2014-04-29 16:55:20 -04001041 case kMirOpPhi:
1042 case kMirOpNop:
1043 case kMirOpNullCheck:
1044 case kMirOpRangeCheck:
1045 case kMirOpDivZeroCheck:
1046 case kMirOpCheck:
1047 case kMirOpCheckPart2:
1048 // Ignore these known opcodes
1049 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001050 default:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001051 // Give the backends a chance to handle unknown extended MIR opcodes.
1052 GenMachineSpecificExtendedMethodMIR(bb, mir);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001053 break;
1054 }
1055}
1056
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001057void Mir2Lir::GenPrintLabel(MIR* mir) {
1058 // Mark the beginning of a Dalvik instruction for line tracking.
1059 if (cu_->verbose) {
1060 char* inst_str = mir_graph_->GetDalvikDisassembly(mir);
1061 MarkBoundary(mir->offset, inst_str);
1062 }
1063}
1064
Brian Carlstrom7940e442013-07-12 13:46:57 -07001065// Handle the content in each basic block.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001066bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001067 if (bb->block_type == kDead) return false;
1068 current_dalvik_offset_ = bb->start_offset;
1069 MIR* mir;
1070 int block_id = bb->id;
1071
1072 block_label_list_[block_id].operands[0] = bb->start_offset;
1073
1074 // Insert the block label.
1075 block_label_list_[block_id].opcode = kPseudoNormalBlockLabel;
buzbeeb48819d2013-09-14 16:15:25 -07001076 block_label_list_[block_id].flags.fixup = kFixupLabel;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001077 AppendLIR(&block_label_list_[block_id]);
1078
1079 LIR* head_lir = NULL;
1080
1081 // If this is a catch block, export the start address.
1082 if (bb->catch_entry) {
1083 head_lir = NewLIR0(kPseudoExportedPC);
1084 }
1085
1086 // Free temp registers and reset redundant store tracking.
buzbeeba574512014-05-12 15:13:16 -07001087 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001088
1089 if (bb->block_type == kEntryBlock) {
buzbee56c71782013-09-05 17:13:19 -07001090 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001091 int start_vreg = cu_->num_dalvik_registers - cu_->num_ins;
1092 GenEntrySequence(&mir_graph_->reg_location_[start_vreg],
1093 mir_graph_->reg_location_[mir_graph_->GetMethodSReg()]);
1094 } else if (bb->block_type == kExitBlock) {
buzbee56c71782013-09-05 17:13:19 -07001095 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001096 GenExitSequence();
1097 }
1098
1099 for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
1100 ResetRegPool();
1101 if (cu_->disable_opt & (1 << kTrackLiveTemps)) {
buzbeeba574512014-05-12 15:13:16 -07001102 ClobberAllTemps();
buzbee7a11ab02014-04-28 20:02:38 -07001103 // Reset temp allocation to minimize differences when A/B testing.
buzbee091cc402014-03-31 10:14:40 -07001104 reg_pool_->ResetNextTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001105 }
1106
1107 if (cu_->disable_opt & (1 << kSuppressLoads)) {
1108 ResetDefTracking();
1109 }
1110
1111 // Reset temp tracking sanity check.
1112 if (kIsDebugBuild) {
1113 live_sreg_ = INVALID_SREG;
1114 }
1115
1116 current_dalvik_offset_ = mir->offset;
1117 int opcode = mir->dalvikInsn.opcode;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001118
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001119 GenPrintLabel(mir);
1120
Brian Carlstrom7940e442013-07-12 13:46:57 -07001121 // Remember the first LIR for this block.
1122 if (head_lir == NULL) {
buzbee252254b2013-09-08 16:20:53 -07001123 head_lir = &block_label_list_[bb->id];
1124 // Set the first label as a scheduling barrier.
buzbeeb48819d2013-09-14 16:15:25 -07001125 DCHECK(!head_lir->flags.use_def_invalid);
1126 head_lir->u.m.def_mask = ENCODE_ALL;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001127 }
1128
1129 if (opcode == kMirOpCheck) {
1130 // Combine check and work halves of throwing instruction.
1131 MIR* work_half = mir->meta.throw_insn;
1132 mir->dalvikInsn.opcode = work_half->dalvikInsn.opcode;
Vladimir Marko4376c872014-01-23 12:39:29 +00001133 mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001134 opcode = work_half->dalvikInsn.opcode;
1135 SSARepresentation* ssa_rep = work_half->ssa_rep;
1136 work_half->ssa_rep = mir->ssa_rep;
1137 mir->ssa_rep = ssa_rep;
1138 work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2);
Vladimir Marko4376c872014-01-23 12:39:29 +00001139 work_half->meta.throw_insn = mir;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001140 }
1141
buzbee35ba7f32014-05-31 08:59:01 -07001142 if (MIRGraph::IsPseudoMirOp(opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001143 HandleExtendedMethodMIR(bb, mir);
1144 continue;
1145 }
1146
1147 CompileDalvikInstruction(mir, bb, block_label_list_);
1148 }
1149
1150 if (head_lir) {
1151 // Eliminate redundant loads/stores and delay stores into later slots.
1152 ApplyLocalOptimizations(head_lir, last_lir_insn_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001153 }
1154 return false;
1155}
1156
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001157bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) {
Vladimir Marko5816ed42013-11-27 17:04:20 +00001158 cu_->NewTimingSplit("SpecialMIR2LIR");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001159 // Find the first DalvikByteCode block.
1160 int num_reachable_blocks = mir_graph_->GetNumReachableBlocks();
1161 BasicBlock*bb = NULL;
1162 for (int idx = 0; idx < num_reachable_blocks; idx++) {
1163 // TODO: no direct access of growable lists.
1164 int dfs_index = mir_graph_->GetDfsOrder()->Get(idx);
1165 bb = mir_graph_->GetBasicBlock(dfs_index);
1166 if (bb->block_type == kDalvikByteCode) {
1167 break;
1168 }
1169 }
1170 if (bb == NULL) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001171 return false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001172 }
1173 DCHECK_EQ(bb->start_offset, 0);
1174 DCHECK(bb->first_mir_insn != NULL);
1175
1176 // Get the first instruction.
1177 MIR* mir = bb->first_mir_insn;
1178
1179 // Free temp registers and reset redundant store tracking.
1180 ResetRegPool();
1181 ResetDefTracking();
buzbeeba574512014-05-12 15:13:16 -07001182 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001183
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001184 return GenSpecialCase(bb, mir, special);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001185}
1186
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001187void Mir2Lir::MethodMIR2LIR() {
buzbeea61f4952013-08-23 14:27:06 -07001188 cu_->NewTimingSplit("MIR2LIR");
1189
Brian Carlstrom7940e442013-07-12 13:46:57 -07001190 // Hold the labels of each block.
1191 block_label_list_ =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -07001192 static_cast<LIR*>(arena_->Alloc(sizeof(LIR) * mir_graph_->GetNumBlocks(),
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001193 kArenaAllocLIR));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001194
buzbee56c71782013-09-05 17:13:19 -07001195 PreOrderDfsIterator iter(mir_graph_);
buzbee252254b2013-09-08 16:20:53 -07001196 BasicBlock* curr_bb = iter.Next();
1197 BasicBlock* next_bb = iter.Next();
1198 while (curr_bb != NULL) {
1199 MethodBlockCodeGen(curr_bb);
1200 // If the fall_through block is no longer laid out consecutively, drop in a branch.
buzbee0d829482013-10-11 15:24:55 -07001201 BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through);
1202 if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) {
1203 OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]);
buzbee252254b2013-09-08 16:20:53 -07001204 }
1205 curr_bb = next_bb;
1206 do {
1207 next_bb = iter.Next();
1208 } while ((next_bb != NULL) && (next_bb->block_type == kDead));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001209 }
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001210 HandleSlowPaths();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001211}
1212
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001213//
1214// LIR Slow Path
1215//
1216
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001217LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001218 m2l_->SetCurrentDexPc(current_dex_pc_);
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001219 LIR* target = m2l_->NewLIR0(opcode);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001220 fromfast_->target = target;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001221 return target;
1222}
Vladimir Marko3bc86152014-03-13 14:11:28 +00001223
Brian Carlstrom7940e442013-07-12 13:46:57 -07001224} // namespace art