blob: bd88091add55eb36da1e0c73cadaaf140ab15839 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex/dataflow_iterator-inl.h"
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080019#include "dex/quick/dex_file_method_inliner.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "mir_to_lir-inl.h"
Fred Shih37f05ef2014-07-16 18:38:08 -070021#include "primitive.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070022#include "thread-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023
24namespace art {
25
buzbeea0cd2d72014-06-01 09:33:49 -070026RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) {
27 RegisterClass res;
28 switch (shorty_type) {
29 case 'L':
30 res = kRefReg;
31 break;
32 case 'F':
33 // Expected fallthrough.
34 case 'D':
35 res = kFPReg;
36 break;
37 default:
38 res = kCoreReg;
39 }
40 return res;
41}
42
43RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) {
44 RegisterClass res;
45 if (loc.fp) {
46 DCHECK(!loc.ref) << "At most, one of ref/fp may be set";
47 res = kFPReg;
48 } else if (loc.ref) {
49 res = kRefReg;
50 } else {
51 res = kCoreReg;
52 }
53 return res;
54}
55
Serguei Katkov717a3e42014-11-13 17:19:42 +060056void Mir2Lir::LockArg(int in_position, bool) {
57 RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080058
Serguei Katkov717a3e42014-11-13 17:19:42 +060059 if (reg_arg.Valid()) {
60 LockTemp(reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080061 }
62}
63
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010064RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +010065 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000066 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070067
68 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080069 /*
70 * When doing a call for x86, it moves the stack pointer in order to push return.
71 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080072 */
73 offset += sizeof(uint32_t);
74 }
75
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070076 if (cu_->instruction_set == kX86_64) {
77 /*
78 * When doing a call for x86, it moves the stack pointer in order to push return.
79 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
80 */
81 offset += sizeof(uint64_t);
82 }
83
Serguei Katkov717a3e42014-11-13 17:19:42 +060084 RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position);
85
86 // TODO: REVISIT: This adds a spill of low part while we could just copy it.
87 if (reg_arg.Valid() && wide && (reg_arg.GetWideKind() == kNotWide)) {
88 // For wide register we've got only half of it.
89 // Flush it to memory then.
90 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, k32, kNotVolatile);
91 reg_arg = RegStorage::InvalidReg();
92 }
93
94 if (!reg_arg.Valid()) {
95 reg_arg = wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class);
96 LoadBaseDisp(TargetPtrReg(kSp), offset, reg_arg, wide ? k64 : k32, kNotVolatile);
97 } else {
98 // Check if we need to copy the arg to a different reg_class.
99 if (!RegClassMatches(reg_class, reg_arg)) {
100 if (wide) {
101 RegStorage new_reg = AllocTypedTempWide(false, reg_class);
102 OpRegCopyWide(new_reg, reg_arg);
103 reg_arg = new_reg;
104 } else {
105 RegStorage new_reg = AllocTypedTemp(false, reg_class);
106 OpRegCopy(new_reg, reg_arg);
107 reg_arg = new_reg;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700108 }
109 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800110 }
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100111 return reg_arg;
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800112}
113
114void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600115 DCHECK_EQ(rl_dest.location, kLocPhysReg);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100116 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000117 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700118 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800119 /*
120 * When doing a call for x86, it moves the stack pointer in order to push return.
121 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800122 */
123 offset += sizeof(uint32_t);
124 }
125
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700126 if (cu_->instruction_set == kX86_64) {
127 /*
128 * When doing a call for x86, it moves the stack pointer in order to push return.
129 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
130 */
131 offset += sizeof(uint64_t);
132 }
133
Serguei Katkov717a3e42014-11-13 17:19:42 +0600134 RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position);
135
136 // TODO: REVISIT: This adds a spill of low part while we could just copy it.
137 if (reg_arg.Valid() && rl_dest.wide && (reg_arg.GetWideKind() == kNotWide)) {
138 // For wide register we've got only half of it.
139 // Flush it to memory then.
140 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, k32, kNotVolatile);
141 reg_arg = RegStorage::InvalidReg();
142 }
143
144 if (!reg_arg.Valid()) {
145 LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, rl_dest.wide ? k64 : k32, kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800146 } else {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600147 if (rl_dest.wide) {
148 OpRegCopyWide(rl_dest.reg, reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800149 } else {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600150 OpRegCopy(rl_dest.reg, reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800151 }
152 }
153}
154
155bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) {
156 // FastInstance() already checked by DexFileMethodInliner.
157 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100158 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800159 // The object is not "this" and has to be null-checked.
160 return false;
161 }
162
Fred Shih37f05ef2014-07-16 18:38:08 -0700163 OpSize size = k32;
164 switch (data.op_variant) {
165 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT):
166 size = kReference;
167 break;
168 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE):
169 size = k64;
170 break;
171 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_SHORT):
172 size = kSignedHalf;
173 break;
174 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_CHAR):
175 size = kUnsignedHalf;
176 break;
177 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BYTE):
178 size = kSignedByte;
179 break;
180 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BOOLEAN):
181 size = kUnsignedByte;
182 break;
183 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100184
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800185 // Point of no return - no aborts after this
186 GenPrintLabel(mir);
187 LockArg(data.object_arg);
buzbeea0cd2d72014-06-01 09:33:49 -0700188 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100189 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
buzbeea0cd2d72014-06-01 09:33:49 -0700190 RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]);
Fred Shih37f05ef2014-07-16 18:38:08 -0700191 RegLocation rl_dest = IsWide(size) ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100192 RegStorage r_result = rl_dest.reg;
193 if (!RegClassMatches(reg_class, r_result)) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700194 r_result = IsWide(size) ? AllocTypedTempWide(rl_dest.fp, reg_class)
195 : AllocTypedTemp(rl_dest.fp, reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100196 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700197 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000198 LoadRefDisp(reg_obj, data.field_offset, r_result, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100199 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000200 LoadBaseDisp(reg_obj, data.field_offset, r_result, size, data.is_volatile ? kVolatile :
201 kNotVolatile);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100202 }
buzbeeb5860fb2014-06-21 15:31:01 -0700203 if (r_result.NotExactlyEquals(rl_dest.reg)) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700204 if (IsWide(size)) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100205 OpRegCopyWide(rl_dest.reg, r_result);
206 } else {
207 OpRegCopy(rl_dest.reg, r_result);
208 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800209 }
210 return true;
211}
212
213bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) {
214 // FastInstance() already checked by DexFileMethodInliner.
215 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100216 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800217 // The object is not "this" and has to be null-checked.
218 return false;
219 }
Vladimir Markoe1fced12014-04-04 14:52:53 +0100220 if (data.return_arg_plus1 != 0u) {
221 // The setter returns a method argument which we don't support here.
222 return false;
223 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800224
Fred Shih37f05ef2014-07-16 18:38:08 -0700225 OpSize size = k32;
226 switch (data.op_variant) {
227 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_OBJECT):
228 size = kReference;
229 break;
230 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE):
231 size = k64;
232 break;
233 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_SHORT):
234 size = kSignedHalf;
235 break;
236 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_CHAR):
237 size = kUnsignedHalf;
238 break;
239 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BYTE):
240 size = kSignedByte;
241 break;
242 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BOOLEAN):
243 size = kUnsignedByte;
244 break;
245 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800246
247 // Point of no return - no aborts after this
248 GenPrintLabel(mir);
249 LockArg(data.object_arg);
Fred Shih37f05ef2014-07-16 18:38:08 -0700250 LockArg(data.src_arg, IsWide(size));
buzbeea0cd2d72014-06-01 09:33:49 -0700251 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100252 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
Fred Shih37f05ef2014-07-16 18:38:08 -0700253 RegStorage reg_src = LoadArg(data.src_arg, reg_class, IsWide(size));
254 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000255 StoreRefDisp(reg_obj, data.field_offset, reg_src, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100256 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000257 StoreBaseDisp(reg_obj, data.field_offset, reg_src, size, data.is_volatile ? kVolatile :
258 kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800259 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700260 if (IsRef(size)) {
Vladimir Marko743b98c2014-11-24 19:45:41 +0000261 MarkGCCard(0, reg_src, reg_obj);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800262 }
263 return true;
264}
265
266bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) {
267 const InlineReturnArgData& data = special.d.return_data;
Vladimir Markoe3e02602014-03-12 15:42:41 +0000268 bool wide = (data.is_wide != 0u);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800269
270 // Point of no return - no aborts after this
271 GenPrintLabel(mir);
272 LockArg(data.arg, wide);
buzbeea0cd2d72014-06-01 09:33:49 -0700273 RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]);
274 RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800275 LoadArgDirect(data.arg, rl_dest);
276 return true;
277}
278
279/*
280 * Special-case code generation for simple non-throwing leaf methods.
281 */
282bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) {
283 DCHECK(special.flags & kInlineSpecial);
284 current_dalvik_offset_ = mir->offset;
285 MIR* return_mir = nullptr;
286 bool successful = false;
287
288 switch (special.opcode) {
289 case kInlineOpNop:
290 successful = true;
291 DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID);
292 return_mir = mir;
293 break;
294 case kInlineOpNonWideConst: {
295 successful = true;
buzbeea0cd2d72014-06-01 09:33:49 -0700296 RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0]));
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800297 GenPrintLabel(mir);
buzbee2700f7e2014-03-07 09:46:20 -0800298 LoadConstant(rl_dest.reg, static_cast<int>(special.d.data));
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700299 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800300 break;
301 }
302 case kInlineOpReturnArg:
303 successful = GenSpecialIdentity(mir, special);
304 return_mir = mir;
305 break;
306 case kInlineOpIGet:
307 successful = GenSpecialIGet(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700308 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800309 break;
310 case kInlineOpIPut:
311 successful = GenSpecialIPut(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700312 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800313 break;
314 default:
315 break;
316 }
317
318 if (successful) {
Vladimir Marko39d95e62014-02-28 12:51:24 +0000319 if (kIsDebugBuild) {
320 // Clear unreachable catch entries.
321 mir_graph_->catches_.clear();
322 }
323
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800324 // Handle verbosity for return MIR.
325 if (return_mir != nullptr) {
326 current_dalvik_offset_ = return_mir->offset;
327 // Not handling special identity case because it already generated code as part
328 // of the return. The label should have been added before any code was generated.
329 if (special.opcode != kInlineOpReturnArg) {
330 GenPrintLabel(return_mir);
331 }
332 }
333 GenSpecialExitSequence();
334
335 core_spill_mask_ = 0;
336 num_core_spills_ = 0;
337 fp_spill_mask_ = 0;
338 num_fp_spills_ = 0;
339 frame_size_ = 0;
340 core_vmap_table_.clear();
341 fp_vmap_table_.clear();
342 }
343
344 return successful;
345}
346
Brian Carlstrom7940e442013-07-12 13:46:57 -0700347/*
348 * Target-independent code generation. Use only high-level
349 * load/store utilities here, or target-dependent genXX() handlers
350 * when necessary.
351 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700352void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700353 RegLocation rl_src[3];
354 RegLocation rl_dest = mir_graph_->GetBadLoc();
355 RegLocation rl_result = mir_graph_->GetBadLoc();
Ian Rogersc35cda82014-11-10 16:34:29 -0800356 const Instruction::Code opcode = mir->dalvikInsn.opcode;
357 const int opt_flags = mir->optimization_flags;
358 const uint32_t vB = mir->dalvikInsn.vB;
359 const uint32_t vC = mir->dalvikInsn.vC;
buzbee082833c2014-05-17 23:16:26 -0700360 DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:"
361 << std::hex << current_dalvik_offset_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700362
363 // Prep Src and Dest locations.
364 int next_sreg = 0;
365 int next_loc = 0;
Jean Christophe Beylercc794c32014-05-02 09:34:13 -0700366 uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700367 rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc();
368 if (attrs & DF_UA) {
369 if (attrs & DF_A_WIDE) {
370 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
371 next_sreg+= 2;
372 } else {
373 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
374 next_sreg++;
375 }
376 }
377 if (attrs & DF_UB) {
378 if (attrs & DF_B_WIDE) {
379 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
380 next_sreg+= 2;
381 } else {
382 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
383 next_sreg++;
384 }
385 }
386 if (attrs & DF_UC) {
387 if (attrs & DF_C_WIDE) {
388 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
389 } else {
390 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
391 }
392 }
393 if (attrs & DF_DA) {
394 if (attrs & DF_A_WIDE) {
395 rl_dest = mir_graph_->GetDestWide(mir);
396 } else {
397 rl_dest = mir_graph_->GetDest(mir);
398 }
399 }
400 switch (opcode) {
401 case Instruction::NOP:
402 break;
403
404 case Instruction::MOVE_EXCEPTION:
405 GenMoveException(rl_dest);
406 break;
407
408 case Instruction::RETURN_VOID:
409 if (((cu_->access_flags & kAccConstructor) != 0) &&
410 cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file,
411 cu_->class_def_idx)) {
412 GenMemBarrier(kStoreStore);
413 }
Wei Jin04f4d8a2014-05-29 18:04:29 -0700414 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700415 GenSuspendTest(opt_flags);
416 }
417 break;
418
Brian Carlstrom7940e442013-07-12 13:46:57 -0700419 case Instruction::RETURN_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700420 DCHECK(rl_src[0].ref);
Ian Rogersfc787ec2014-10-09 21:56:44 -0700421 FALLTHROUGH_INTENDED;
buzbeea0cd2d72014-06-01 09:33:49 -0700422 case Instruction::RETURN:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700423 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700424 GenSuspendTest(opt_flags);
425 }
buzbeea0cd2d72014-06-01 09:33:49 -0700426 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
427 StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700428 break;
429
430 case Instruction::RETURN_WIDE:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700431 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700432 GenSuspendTest(opt_flags);
433 }
buzbeea0cd2d72014-06-01 09:33:49 -0700434 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
435 StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700436 break;
437
438 case Instruction::MOVE_RESULT_WIDE:
buzbeea0cd2d72014-06-01 09:33:49 -0700439 StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700440 break;
441
442 case Instruction::MOVE_RESULT:
443 case Instruction::MOVE_RESULT_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700444 StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700445 break;
446
447 case Instruction::MOVE:
448 case Instruction::MOVE_OBJECT:
449 case Instruction::MOVE_16:
450 case Instruction::MOVE_OBJECT_16:
451 case Instruction::MOVE_FROM16:
452 case Instruction::MOVE_OBJECT_FROM16:
453 StoreValue(rl_dest, rl_src[0]);
454 break;
455
456 case Instruction::MOVE_WIDE:
457 case Instruction::MOVE_WIDE_16:
458 case Instruction::MOVE_WIDE_FROM16:
459 StoreValueWide(rl_dest, rl_src[0]);
460 break;
461
462 case Instruction::CONST:
463 case Instruction::CONST_4:
464 case Instruction::CONST_16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400465 GenConst(rl_dest, vB);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700466 break;
467
468 case Instruction::CONST_HIGH16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400469 GenConst(rl_dest, vB << 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700470 break;
471
472 case Instruction::CONST_WIDE_16:
473 case Instruction::CONST_WIDE_32:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000474 GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700475 break;
476
477 case Instruction::CONST_WIDE:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000478 GenConstWide(rl_dest, mir->dalvikInsn.vB_wide);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700479 break;
480
481 case Instruction::CONST_WIDE_HIGH16:
482 rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800483 LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700484 StoreValueWide(rl_dest, rl_result);
485 break;
486
487 case Instruction::MONITOR_ENTER:
488 GenMonitorEnter(opt_flags, rl_src[0]);
489 break;
490
491 case Instruction::MONITOR_EXIT:
492 GenMonitorExit(opt_flags, rl_src[0]);
493 break;
494
495 case Instruction::CHECK_CAST: {
496 GenCheckCast(mir->offset, vB, rl_src[0]);
497 break;
498 }
499 case Instruction::INSTANCE_OF:
500 GenInstanceof(vC, rl_dest, rl_src[0]);
501 break;
502
503 case Instruction::NEW_INSTANCE:
504 GenNewInstance(vB, rl_dest);
505 break;
506
507 case Instruction::THROW:
508 GenThrow(rl_src[0]);
509 break;
510
Ian Rogersc35cda82014-11-10 16:34:29 -0800511 case Instruction::ARRAY_LENGTH: {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700512 int len_offset;
513 len_offset = mirror::Array::LengthOffset().Int32Value();
buzbeea0cd2d72014-06-01 09:33:49 -0700514 rl_src[0] = LoadValue(rl_src[0], kRefReg);
buzbee2700f7e2014-03-07 09:46:20 -0800515 GenNullCheck(rl_src[0].reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700516 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700517 Load32Disp(rl_src[0].reg, len_offset, rl_result.reg);
Dave Allisonf9439142014-03-27 15:10:22 -0700518 MarkPossibleNullPointerException(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700519 StoreValue(rl_dest, rl_result);
520 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800521 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700522 case Instruction::CONST_STRING:
523 case Instruction::CONST_STRING_JUMBO:
524 GenConstString(vB, rl_dest);
525 break;
526
527 case Instruction::CONST_CLASS:
528 GenConstClass(vB, rl_dest);
529 break;
530
531 case Instruction::FILL_ARRAY_DATA:
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700532 GenFillArrayData(mir, vB, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700533 break;
534
535 case Instruction::FILLED_NEW_ARRAY:
536 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
537 false /* not range */));
538 break;
539
540 case Instruction::FILLED_NEW_ARRAY_RANGE:
541 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
542 true /* range */));
543 break;
544
545 case Instruction::NEW_ARRAY:
546 GenNewArray(vC, rl_dest, rl_src[0]);
547 break;
548
549 case Instruction::GOTO:
550 case Instruction::GOTO_16:
551 case Instruction::GOTO_32:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700552 if (mir_graph_->IsBackedge(bb, bb->taken) &&
553 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken))) {
buzbee0d829482013-10-11 15:24:55 -0700554 GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700555 } else {
buzbee0d829482013-10-11 15:24:55 -0700556 OpUnconditionalBranch(&label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700557 }
558 break;
559
560 case Instruction::PACKED_SWITCH:
561 GenPackedSwitch(mir, vB, rl_src[0]);
562 break;
563
564 case Instruction::SPARSE_SWITCH:
565 GenSparseSwitch(mir, vB, rl_src[0]);
566 break;
567
568 case Instruction::CMPL_FLOAT:
569 case Instruction::CMPG_FLOAT:
570 case Instruction::CMPL_DOUBLE:
571 case Instruction::CMPG_DOUBLE:
572 GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]);
573 break;
574
575 case Instruction::CMP_LONG:
576 GenCmpLong(rl_dest, rl_src[0], rl_src[1]);
577 break;
578
579 case Instruction::IF_EQ:
580 case Instruction::IF_NE:
581 case Instruction::IF_LT:
582 case Instruction::IF_GE:
583 case Instruction::IF_GT:
584 case Instruction::IF_LE: {
buzbee0d829482013-10-11 15:24:55 -0700585 LIR* taken = &label_list[bb->taken];
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000586 if (mir_graph_->IsBackwardsBranch(bb) &&
587 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
588 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
589 GenSuspendTest(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700590 }
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000591 GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700592 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800593 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700594 case Instruction::IF_EQZ:
595 case Instruction::IF_NEZ:
596 case Instruction::IF_LTZ:
597 case Instruction::IF_GEZ:
598 case Instruction::IF_GTZ:
599 case Instruction::IF_LEZ: {
buzbee0d829482013-10-11 15:24:55 -0700600 LIR* taken = &label_list[bb->taken];
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000601 if (mir_graph_->IsBackwardsBranch(bb) &&
602 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
603 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
604 GenSuspendTest(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700605 }
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000606 GenCompareZeroAndBranch(opcode, rl_src[0], taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700607 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800608 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700609
610 case Instruction::AGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700611 GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3);
612 break;
613 case Instruction::AGET_OBJECT:
614 GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700615 break;
616 case Instruction::AGET:
buzbee695d13a2014-04-19 13:32:20 -0700617 GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700618 break;
619 case Instruction::AGET_BOOLEAN:
620 GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0);
621 break;
622 case Instruction::AGET_BYTE:
623 GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0);
624 break;
625 case Instruction::AGET_CHAR:
626 GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
627 break;
628 case Instruction::AGET_SHORT:
629 GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
630 break;
631 case Instruction::APUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700632 GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700633 break;
634 case Instruction::APUT:
buzbee695d13a2014-04-19 13:32:20 -0700635 GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700636 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700637 case Instruction::APUT_OBJECT: {
638 bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]);
639 bool is_safe = is_null; // Always safe to store null.
640 if (!is_safe) {
641 // Check safety from verifier type information.
Vladimir Marko2730db02014-01-27 11:15:17 +0000642 const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit();
643 is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset);
Ian Rogersa9a82542013-10-04 11:17:26 -0700644 }
645 if (is_null || is_safe) {
646 // Store of constant null doesn't require an assignability test and can be generated inline
647 // without fixed register usage or a card mark.
buzbee695d13a2014-04-19 13:32:20 -0700648 GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null);
Ian Rogersa9a82542013-10-04 11:17:26 -0700649 } else {
650 GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]);
651 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700652 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700653 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700654 case Instruction::APUT_SHORT:
655 case Instruction::APUT_CHAR:
Ian Rogersa9a82542013-10-04 11:17:26 -0700656 GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700657 break;
658 case Instruction::APUT_BYTE:
659 case Instruction::APUT_BOOLEAN:
Ian Rogersa9a82542013-10-04 11:17:26 -0700660 GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700661 break;
662
663 case Instruction::IGET_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700664 GenIGet(mir, opt_flags, kReference, Primitive::kPrimNot, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700665 break;
666
667 case Instruction::IGET_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700668 // kPrimLong and kPrimDouble share the same entrypoints.
669 GenIGet(mir, opt_flags, k64, Primitive::kPrimLong, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700670 break;
671
672 case Instruction::IGET:
Fred Shih37f05ef2014-07-16 18:38:08 -0700673 GenIGet(mir, opt_flags, k32, Primitive::kPrimInt, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700674 break;
675
676 case Instruction::IGET_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700677 GenIGet(mir, opt_flags, kUnsignedHalf, Primitive::kPrimChar, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700678 break;
679
680 case Instruction::IGET_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700681 GenIGet(mir, opt_flags, kSignedHalf, Primitive::kPrimShort, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700682 break;
683
684 case Instruction::IGET_BOOLEAN:
Fred Shih37f05ef2014-07-16 18:38:08 -0700685 GenIGet(mir, opt_flags, kUnsignedByte, Primitive::kPrimBoolean, rl_dest, rl_src[0]);
686 break;
687
Brian Carlstrom7940e442013-07-12 13:46:57 -0700688 case Instruction::IGET_BYTE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700689 GenIGet(mir, opt_flags, kSignedByte, Primitive::kPrimByte, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700690 break;
691
692 case Instruction::IPUT_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700693 GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700694 break;
695
696 case Instruction::IPUT_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700697 GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700698 break;
699
700 case Instruction::IPUT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700701 GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700702 break;
703
Brian Carlstrom7940e442013-07-12 13:46:57 -0700704 case Instruction::IPUT_BYTE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700705 case Instruction::IPUT_BOOLEAN:
706 GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700707 break;
708
709 case Instruction::IPUT_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700710 GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700711 break;
712
713 case Instruction::IPUT_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700714 GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700715 break;
716
717 case Instruction::SGET_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700718 GenSget(mir, rl_dest, kReference, Primitive::kPrimNot);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719 break;
Fred Shih37f05ef2014-07-16 18:38:08 -0700720
Brian Carlstrom7940e442013-07-12 13:46:57 -0700721 case Instruction::SGET:
Fred Shih37f05ef2014-07-16 18:38:08 -0700722 GenSget(mir, rl_dest, k32, Primitive::kPrimInt);
723 break;
724
Brian Carlstrom7940e442013-07-12 13:46:57 -0700725 case Instruction::SGET_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700726 GenSget(mir, rl_dest, kUnsignedHalf, Primitive::kPrimChar);
727 break;
728
Brian Carlstrom7940e442013-07-12 13:46:57 -0700729 case Instruction::SGET_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700730 GenSget(mir, rl_dest, kSignedHalf, Primitive::kPrimShort);
731 break;
732
733 case Instruction::SGET_BOOLEAN:
734 GenSget(mir, rl_dest, kUnsignedByte, Primitive::kPrimBoolean);
735 break;
736
737 case Instruction::SGET_BYTE:
738 GenSget(mir, rl_dest, kSignedByte, Primitive::kPrimByte);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700739 break;
740
741 case Instruction::SGET_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700742 // kPrimLong and kPrimDouble share the same entrypoints.
743 GenSget(mir, rl_dest, k64, Primitive::kPrimLong);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700744 break;
745
746 case Instruction::SPUT_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700747 GenSput(mir, rl_src[0], kReference);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700748 break;
749
750 case Instruction::SPUT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700751 GenSput(mir, rl_src[0], k32);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700752 break;
753
Fred Shih37f05ef2014-07-16 18:38:08 -0700754 case Instruction::SPUT_BYTE:
755 case Instruction::SPUT_BOOLEAN:
756 GenSput(mir, rl_src[0], kUnsignedByte);
757 break;
758
759 case Instruction::SPUT_CHAR:
760 GenSput(mir, rl_src[0], kUnsignedHalf);
761 break;
762
763 case Instruction::SPUT_SHORT:
764 GenSput(mir, rl_src[0], kSignedHalf);
765 break;
766
767
Brian Carlstrom7940e442013-07-12 13:46:57 -0700768 case Instruction::SPUT_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700769 GenSput(mir, rl_src[0], k64);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700770 break;
771
772 case Instruction::INVOKE_STATIC_RANGE:
773 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100774 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700775 // If the invocation is not inlined, we can assume there is already a
776 // suspend check at the return site
777 mir_graph_->AppendGenSuspendTestList(bb);
778 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700779 break;
780 case Instruction::INVOKE_STATIC:
781 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100782 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700783 mir_graph_->AppendGenSuspendTestList(bb);
784 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700785 break;
786
787 case Instruction::INVOKE_DIRECT:
788 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100789 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700790 mir_graph_->AppendGenSuspendTestList(bb);
791 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700792 break;
793 case Instruction::INVOKE_DIRECT_RANGE:
794 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100795 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700796 mir_graph_->AppendGenSuspendTestList(bb);
797 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700798 break;
799
800 case Instruction::INVOKE_VIRTUAL:
801 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100802 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700803 mir_graph_->AppendGenSuspendTestList(bb);
804 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700805 break;
806 case Instruction::INVOKE_VIRTUAL_RANGE:
807 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100808 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700809 mir_graph_->AppendGenSuspendTestList(bb);
810 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700811 break;
812
813 case Instruction::INVOKE_SUPER:
814 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100815 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700816 mir_graph_->AppendGenSuspendTestList(bb);
817 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700818 break;
819 case Instruction::INVOKE_SUPER_RANGE:
820 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100821 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700822 mir_graph_->AppendGenSuspendTestList(bb);
823 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700824 break;
825
826 case Instruction::INVOKE_INTERFACE:
827 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100828 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700829 mir_graph_->AppendGenSuspendTestList(bb);
830 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700831 break;
832 case Instruction::INVOKE_INTERFACE_RANGE:
833 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100834 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700835 mir_graph_->AppendGenSuspendTestList(bb);
836 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700837 break;
838
839 case Instruction::NEG_INT:
840 case Instruction::NOT_INT:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700841 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700842 break;
843
844 case Instruction::NEG_LONG:
845 case Instruction::NOT_LONG:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700846 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700847 break;
848
849 case Instruction::NEG_FLOAT:
850 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]);
851 break;
852
853 case Instruction::NEG_DOUBLE:
854 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]);
855 break;
856
857 case Instruction::INT_TO_LONG:
858 GenIntToLong(rl_dest, rl_src[0]);
859 break;
860
861 case Instruction::LONG_TO_INT:
Yevgeny Rouban6af82062014-11-26 18:11:54 +0600862 GenLongToInt(rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700863 break;
864
865 case Instruction::INT_TO_BYTE:
866 case Instruction::INT_TO_SHORT:
867 case Instruction::INT_TO_CHAR:
868 GenIntNarrowing(opcode, rl_dest, rl_src[0]);
869 break;
870
871 case Instruction::INT_TO_FLOAT:
872 case Instruction::INT_TO_DOUBLE:
873 case Instruction::LONG_TO_FLOAT:
874 case Instruction::LONG_TO_DOUBLE:
875 case Instruction::FLOAT_TO_INT:
876 case Instruction::FLOAT_TO_LONG:
877 case Instruction::FLOAT_TO_DOUBLE:
878 case Instruction::DOUBLE_TO_INT:
879 case Instruction::DOUBLE_TO_LONG:
880 case Instruction::DOUBLE_TO_FLOAT:
881 GenConversion(opcode, rl_dest, rl_src[0]);
882 break;
883
884
885 case Instruction::ADD_INT:
886 case Instruction::ADD_INT_2ADDR:
887 case Instruction::MUL_INT:
888 case Instruction::MUL_INT_2ADDR:
889 case Instruction::AND_INT:
890 case Instruction::AND_INT_2ADDR:
891 case Instruction::OR_INT:
892 case Instruction::OR_INT_2ADDR:
893 case Instruction::XOR_INT:
894 case Instruction::XOR_INT_2ADDR:
895 if (rl_src[0].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +0100896 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700897 GenArithOpIntLit(opcode, rl_dest, rl_src[1],
898 mir_graph_->ConstantValue(rl_src[0].orig_sreg));
899 } else if (rl_src[1].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +0100900 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700901 GenArithOpIntLit(opcode, rl_dest, rl_src[0],
902 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
903 } else {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700904 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700905 }
906 break;
907
908 case Instruction::SUB_INT:
909 case Instruction::SUB_INT_2ADDR:
910 case Instruction::DIV_INT:
911 case Instruction::DIV_INT_2ADDR:
912 case Instruction::REM_INT:
913 case Instruction::REM_INT_2ADDR:
914 case Instruction::SHL_INT:
915 case Instruction::SHL_INT_2ADDR:
916 case Instruction::SHR_INT:
917 case Instruction::SHR_INT_2ADDR:
918 case Instruction::USHR_INT:
919 case Instruction::USHR_INT_2ADDR:
920 if (rl_src[1].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +0100921 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700922 GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1]));
923 } else {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700924 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700925 }
926 break;
927
928 case Instruction::ADD_LONG:
929 case Instruction::SUB_LONG:
930 case Instruction::AND_LONG:
931 case Instruction::OR_LONG:
932 case Instruction::XOR_LONG:
933 case Instruction::ADD_LONG_2ADDR:
934 case Instruction::SUB_LONG_2ADDR:
935 case Instruction::AND_LONG_2ADDR:
936 case Instruction::OR_LONG_2ADDR:
937 case Instruction::XOR_LONG_2ADDR:
938 if (rl_src[0].is_const || rl_src[1].is_const) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700939 GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700940 break;
941 }
Ian Rogersfc787ec2014-10-09 21:56:44 -0700942 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700943 case Instruction::MUL_LONG:
944 case Instruction::DIV_LONG:
945 case Instruction::REM_LONG:
946 case Instruction::MUL_LONG_2ADDR:
947 case Instruction::DIV_LONG_2ADDR:
948 case Instruction::REM_LONG_2ADDR:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700949 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700950 break;
951
952 case Instruction::SHL_LONG:
953 case Instruction::SHR_LONG:
954 case Instruction::USHR_LONG:
955 case Instruction::SHL_LONG_2ADDR:
956 case Instruction::SHR_LONG_2ADDR:
957 case Instruction::USHR_LONG_2ADDR:
958 if (rl_src[1].is_const) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700959 GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700960 } else {
961 GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
962 }
963 break;
964
Ningsheng Jian675e09b2014-10-23 13:48:36 +0800965 case Instruction::DIV_FLOAT:
966 case Instruction::DIV_FLOAT_2ADDR:
967 if (HandleEasyFloatingPointDiv(rl_dest, rl_src[0], rl_src[1])) {
968 break;
969 }
970 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700971 case Instruction::ADD_FLOAT:
972 case Instruction::SUB_FLOAT:
973 case Instruction::MUL_FLOAT:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700974 case Instruction::REM_FLOAT:
975 case Instruction::ADD_FLOAT_2ADDR:
976 case Instruction::SUB_FLOAT_2ADDR:
977 case Instruction::MUL_FLOAT_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700978 case Instruction::REM_FLOAT_2ADDR:
979 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]);
980 break;
981
Ningsheng Jian675e09b2014-10-23 13:48:36 +0800982 case Instruction::DIV_DOUBLE:
983 case Instruction::DIV_DOUBLE_2ADDR:
984 if (HandleEasyFloatingPointDiv(rl_dest, rl_src[0], rl_src[1])) {
985 break;
986 }
987 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700988 case Instruction::ADD_DOUBLE:
989 case Instruction::SUB_DOUBLE:
990 case Instruction::MUL_DOUBLE:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700991 case Instruction::REM_DOUBLE:
992 case Instruction::ADD_DOUBLE_2ADDR:
993 case Instruction::SUB_DOUBLE_2ADDR:
994 case Instruction::MUL_DOUBLE_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700995 case Instruction::REM_DOUBLE_2ADDR:
996 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]);
997 break;
998
999 case Instruction::RSUB_INT:
1000 case Instruction::ADD_INT_LIT16:
1001 case Instruction::MUL_INT_LIT16:
1002 case Instruction::DIV_INT_LIT16:
1003 case Instruction::REM_INT_LIT16:
1004 case Instruction::AND_INT_LIT16:
1005 case Instruction::OR_INT_LIT16:
1006 case Instruction::XOR_INT_LIT16:
1007 case Instruction::ADD_INT_LIT8:
1008 case Instruction::RSUB_INT_LIT8:
1009 case Instruction::MUL_INT_LIT8:
1010 case Instruction::DIV_INT_LIT8:
1011 case Instruction::REM_INT_LIT8:
1012 case Instruction::AND_INT_LIT8:
1013 case Instruction::OR_INT_LIT8:
1014 case Instruction::XOR_INT_LIT8:
1015 case Instruction::SHL_INT_LIT8:
1016 case Instruction::SHR_INT_LIT8:
1017 case Instruction::USHR_INT_LIT8:
1018 GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC);
1019 break;
1020
1021 default:
1022 LOG(FATAL) << "Unexpected opcode: " << opcode;
1023 }
buzbee082833c2014-05-17 23:16:26 -07001024 DCHECK(CheckCorePoolSanity());
Brian Carlstrom1895ea32013-07-18 13:28:37 -07001025} // NOLINT(readability/fn_size)
Brian Carlstrom7940e442013-07-12 13:46:57 -07001026
1027// Process extended MIR instructions
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001028void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001029 switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) {
1030 case kMirOpCopy: {
1031 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
1032 RegLocation rl_dest = mir_graph_->GetDest(mir);
1033 StoreValue(rl_dest, rl_src);
1034 break;
1035 }
1036 case kMirOpFusedCmplFloat:
1037 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/);
1038 break;
1039 case kMirOpFusedCmpgFloat:
1040 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/);
1041 break;
1042 case kMirOpFusedCmplDouble:
1043 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/);
1044 break;
1045 case kMirOpFusedCmpgDouble:
1046 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/);
1047 break;
1048 case kMirOpFusedCmpLong:
1049 GenFusedLongCmpBranch(bb, mir);
1050 break;
1051 case kMirOpSelect:
1052 GenSelect(bb, mir);
1053 break;
Razvan A Lupusoru76423242014-08-04 09:38:46 -07001054 case kMirOpNullCheck: {
1055 RegLocation rl_obj = mir_graph_->GetSrc(mir, 0);
1056 rl_obj = LoadValue(rl_obj, kRefReg);
1057 // An explicit check is done because it is not expected that when this is used,
1058 // that it will actually trip up the implicit checks (since an invalid access
1059 // is needed on the null object).
1060 GenExplicitNullCheck(rl_obj.reg, mir->optimization_flags);
1061 break;
1062 }
Mark Mendelld65c51a2014-04-29 16:55:20 -04001063 case kMirOpPhi:
1064 case kMirOpNop:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001065 case kMirOpRangeCheck:
1066 case kMirOpDivZeroCheck:
1067 case kMirOpCheck:
1068 case kMirOpCheckPart2:
1069 // Ignore these known opcodes
1070 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001071 default:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001072 // Give the backends a chance to handle unknown extended MIR opcodes.
1073 GenMachineSpecificExtendedMethodMIR(bb, mir);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001074 break;
1075 }
1076}
1077
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001078void Mir2Lir::GenPrintLabel(MIR* mir) {
1079 // Mark the beginning of a Dalvik instruction for line tracking.
1080 if (cu_->verbose) {
1081 char* inst_str = mir_graph_->GetDalvikDisassembly(mir);
1082 MarkBoundary(mir->offset, inst_str);
1083 }
1084}
1085
Brian Carlstrom7940e442013-07-12 13:46:57 -07001086// Handle the content in each basic block.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001087bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001088 if (bb->block_type == kDead) return false;
1089 current_dalvik_offset_ = bb->start_offset;
1090 MIR* mir;
1091 int block_id = bb->id;
1092
1093 block_label_list_[block_id].operands[0] = bb->start_offset;
1094
1095 // Insert the block label.
1096 block_label_list_[block_id].opcode = kPseudoNormalBlockLabel;
buzbeeb48819d2013-09-14 16:15:25 -07001097 block_label_list_[block_id].flags.fixup = kFixupLabel;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001098 AppendLIR(&block_label_list_[block_id]);
1099
1100 LIR* head_lir = NULL;
1101
1102 // If this is a catch block, export the start address.
1103 if (bb->catch_entry) {
1104 head_lir = NewLIR0(kPseudoExportedPC);
1105 }
1106
1107 // Free temp registers and reset redundant store tracking.
buzbeeba574512014-05-12 15:13:16 -07001108 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001109
1110 if (bb->block_type == kEntryBlock) {
buzbee56c71782013-09-05 17:13:19 -07001111 ResetRegPool();
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001112 int start_vreg = mir_graph_->GetFirstInVR();
1113 GenEntrySequence(&mir_graph_->reg_location_[start_vreg], mir_graph_->GetMethodLoc());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001114 } else if (bb->block_type == kExitBlock) {
buzbee56c71782013-09-05 17:13:19 -07001115 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001116 GenExitSequence();
1117 }
1118
1119 for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
1120 ResetRegPool();
1121 if (cu_->disable_opt & (1 << kTrackLiveTemps)) {
buzbeeba574512014-05-12 15:13:16 -07001122 ClobberAllTemps();
buzbee7a11ab02014-04-28 20:02:38 -07001123 // Reset temp allocation to minimize differences when A/B testing.
buzbee091cc402014-03-31 10:14:40 -07001124 reg_pool_->ResetNextTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001125 }
1126
1127 if (cu_->disable_opt & (1 << kSuppressLoads)) {
1128 ResetDefTracking();
1129 }
1130
1131 // Reset temp tracking sanity check.
1132 if (kIsDebugBuild) {
1133 live_sreg_ = INVALID_SREG;
1134 }
1135
1136 current_dalvik_offset_ = mir->offset;
1137 int opcode = mir->dalvikInsn.opcode;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001138
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001139 GenPrintLabel(mir);
1140
Brian Carlstrom7940e442013-07-12 13:46:57 -07001141 // Remember the first LIR for this block.
1142 if (head_lir == NULL) {
buzbee252254b2013-09-08 16:20:53 -07001143 head_lir = &block_label_list_[bb->id];
1144 // Set the first label as a scheduling barrier.
buzbeeb48819d2013-09-14 16:15:25 -07001145 DCHECK(!head_lir->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001146 head_lir->u.m.def_mask = &kEncodeAll;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001147 }
1148
1149 if (opcode == kMirOpCheck) {
1150 // Combine check and work halves of throwing instruction.
1151 MIR* work_half = mir->meta.throw_insn;
Alexei Zavjalov56e8e602014-10-30 20:47:28 +06001152 mir->dalvikInsn = work_half->dalvikInsn;
Vladimir Markocc8cc7c2014-10-06 10:52:20 +01001153 mir->optimization_flags = work_half->optimization_flags;
Vladimir Marko4376c872014-01-23 12:39:29 +00001154 mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001155 opcode = work_half->dalvikInsn.opcode;
1156 SSARepresentation* ssa_rep = work_half->ssa_rep;
1157 work_half->ssa_rep = mir->ssa_rep;
1158 mir->ssa_rep = ssa_rep;
1159 work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2);
Vladimir Marko4376c872014-01-23 12:39:29 +00001160 work_half->meta.throw_insn = mir;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001161 }
1162
Jean Christophe Beyler2ab40eb2014-06-02 09:03:14 -07001163 if (MIR::DecodedInstruction::IsPseudoMirOp(opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001164 HandleExtendedMethodMIR(bb, mir);
1165 continue;
1166 }
1167
1168 CompileDalvikInstruction(mir, bb, block_label_list_);
1169 }
1170
1171 if (head_lir) {
1172 // Eliminate redundant loads/stores and delay stores into later slots.
1173 ApplyLocalOptimizations(head_lir, last_lir_insn_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001174 }
1175 return false;
1176}
1177
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001178bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) {
Vladimir Marko5816ed42013-11-27 17:04:20 +00001179 cu_->NewTimingSplit("SpecialMIR2LIR");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001180 // Find the first DalvikByteCode block.
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001181 DCHECK_EQ(mir_graph_->GetNumReachableBlocks(), mir_graph_->GetDfsOrder().size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001182 BasicBlock*bb = NULL;
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001183 for (BasicBlockId dfs_id : mir_graph_->GetDfsOrder()) {
1184 BasicBlock* candidate = mir_graph_->GetBasicBlock(dfs_id);
1185 if (candidate->block_type == kDalvikByteCode) {
1186 bb = candidate;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001187 break;
1188 }
1189 }
1190 if (bb == NULL) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001191 return false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001192 }
1193 DCHECK_EQ(bb->start_offset, 0);
1194 DCHECK(bb->first_mir_insn != NULL);
1195
1196 // Get the first instruction.
1197 MIR* mir = bb->first_mir_insn;
1198
1199 // Free temp registers and reset redundant store tracking.
1200 ResetRegPool();
1201 ResetDefTracking();
buzbeeba574512014-05-12 15:13:16 -07001202 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001203
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001204 return GenSpecialCase(bb, mir, special);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001205}
1206
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001207void Mir2Lir::MethodMIR2LIR() {
buzbeea61f4952013-08-23 14:27:06 -07001208 cu_->NewTimingSplit("MIR2LIR");
1209
Brian Carlstrom7940e442013-07-12 13:46:57 -07001210 // Hold the labels of each block.
1211 block_label_list_ =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -07001212 static_cast<LIR*>(arena_->Alloc(sizeof(LIR) * mir_graph_->GetNumBlocks(),
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001213 kArenaAllocLIR));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001214
buzbee56c71782013-09-05 17:13:19 -07001215 PreOrderDfsIterator iter(mir_graph_);
buzbee252254b2013-09-08 16:20:53 -07001216 BasicBlock* curr_bb = iter.Next();
1217 BasicBlock* next_bb = iter.Next();
1218 while (curr_bb != NULL) {
1219 MethodBlockCodeGen(curr_bb);
1220 // If the fall_through block is no longer laid out consecutively, drop in a branch.
buzbee0d829482013-10-11 15:24:55 -07001221 BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through);
1222 if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) {
1223 OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]);
buzbee252254b2013-09-08 16:20:53 -07001224 }
1225 curr_bb = next_bb;
1226 do {
1227 next_bb = iter.Next();
1228 } while ((next_bb != NULL) && (next_bb->block_type == kDead));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001229 }
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001230 HandleSlowPaths();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001231}
1232
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001233//
1234// LIR Slow Path
1235//
1236
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001237LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001238 m2l_->SetCurrentDexPc(current_dex_pc_);
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001239 LIR* target = m2l_->NewLIR0(opcode);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001240 fromfast_->target = target;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001241 return target;
1242}
Vladimir Marko3bc86152014-03-13 14:11:28 +00001243
Andreas Gampe4b537a82014-06-30 22:24:53 -07001244
1245void Mir2Lir::CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp,
1246 bool fail, bool report)
1247 const {
1248 if (rs.Valid()) {
1249 if (ref == RefCheck::kCheckRef) {
1250 if (cu_->target64 && !rs.Is64Bit()) {
1251 if (fail) {
1252 CHECK(false) << "Reg storage not 64b for ref.";
1253 } else if (report) {
1254 LOG(WARNING) << "Reg storage not 64b for ref.";
1255 }
1256 }
1257 }
1258 if (wide == WidenessCheck::kCheckWide) {
1259 if (!rs.Is64Bit()) {
1260 if (fail) {
1261 CHECK(false) << "Reg storage not 64b for wide.";
1262 } else if (report) {
1263 LOG(WARNING) << "Reg storage not 64b for wide.";
1264 }
1265 }
1266 }
1267 // A tighter check would be nice, but for now soft-float will not check float at all.
1268 if (fp == FPCheck::kCheckFP && cu_->instruction_set != kArm) {
1269 if (!rs.IsFloat()) {
1270 if (fail) {
1271 CHECK(false) << "Reg storage not float for fp.";
1272 } else if (report) {
1273 LOG(WARNING) << "Reg storage not float for fp.";
1274 }
1275 }
1276 } else if (fp == FPCheck::kCheckNotFP) {
1277 if (rs.IsFloat()) {
1278 if (fail) {
1279 CHECK(false) << "Reg storage float for not-fp.";
1280 } else if (report) {
1281 LOG(WARNING) << "Reg storage float for not-fp.";
1282 }
1283 }
1284 }
1285 }
1286}
1287
1288void Mir2Lir::CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const {
1289 // Regrettably can't use the fp part of rl, as that is not really indicative of where a value
1290 // will be stored.
1291 CheckRegStorageImpl(rl.reg, rl.wide ? WidenessCheck::kCheckWide : WidenessCheck::kCheckNotWide,
1292 rl.ref ? RefCheck::kCheckRef : RefCheck::kCheckNotRef, FPCheck::kIgnoreFP, fail, report);
1293}
1294
Serban Constantinescu63999682014-07-15 17:44:21 +01001295size_t Mir2Lir::GetInstructionOffset(LIR* lir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001296 UNUSED(lir);
1297 UNIMPLEMENTED(FATAL) << "Unsupported GetInstructionOffset()";
1298 UNREACHABLE();
Serban Constantinescu63999682014-07-15 17:44:21 +01001299}
1300
Serguei Katkov717a3e42014-11-13 17:19:42 +06001301void Mir2Lir::InToRegStorageMapping::Initialize(ShortyIterator* shorty,
1302 InToRegStorageMapper* mapper) {
1303 DCHECK(mapper != nullptr);
1304 DCHECK(shorty != nullptr);
1305 max_mapped_in_ = -1;
1306 has_arguments_on_stack_ = false;
1307 while (shorty->Next()) {
1308 ShortyArg arg = shorty->GetArg();
1309 RegStorage reg = mapper->GetNextReg(arg);
1310 if (reg.Valid()) {
1311 mapping_.Put(count_, reg);
1312 max_mapped_in_ = count_;
1313 // If the VR is wide and was mapped as wide then account for it.
1314 if (arg.IsWide() && reg.Is64Bit()) {
1315 max_mapped_in_++;
1316 }
1317 } else {
1318 has_arguments_on_stack_ = true;
1319 }
1320 count_ += arg.IsWide() ? 2 : 1;
1321 }
1322 initialized_ = true;
1323}
1324
1325RegStorage Mir2Lir::InToRegStorageMapping::Get(int in_position) {
1326 DCHECK(IsInitialized());
1327 DCHECK_LT(in_position, count_);
1328 auto res = mapping_.find(in_position);
1329 return res != mapping_.end() ? res->second : RegStorage::InvalidReg();
1330}
1331
Brian Carlstrom7940e442013-07-12 13:46:57 -07001332} // namespace art