blob: 9f6d8af1fbbd3c2894deef7271a0c101cfa5046f [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe0b9203e2015-01-22 20:39:27 -080017#include "mir_to_lir-inl.h"
18
Brian Carlstrom7940e442013-07-12 13:46:57 -070019#include "dex/dataflow_iterator-inl.h"
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080020#include "dex/quick/dex_file_method_inliner.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080021#include "driver/compiler_driver.h"
Fred Shih37f05ef2014-07-16 18:38:08 -070022#include "primitive.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070023#include "thread-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070024
25namespace art {
26
buzbeea0cd2d72014-06-01 09:33:49 -070027RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) {
28 RegisterClass res;
29 switch (shorty_type) {
30 case 'L':
31 res = kRefReg;
32 break;
33 case 'F':
34 // Expected fallthrough.
35 case 'D':
36 res = kFPReg;
37 break;
38 default:
39 res = kCoreReg;
40 }
41 return res;
42}
43
44RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) {
45 RegisterClass res;
46 if (loc.fp) {
47 DCHECK(!loc.ref) << "At most, one of ref/fp may be set";
48 res = kFPReg;
49 } else if (loc.ref) {
50 res = kRefReg;
51 } else {
52 res = kCoreReg;
53 }
54 return res;
55}
56
Serguei Katkov717a3e42014-11-13 17:19:42 +060057void Mir2Lir::LockArg(int in_position, bool) {
58 RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080059
Serguei Katkov717a3e42014-11-13 17:19:42 +060060 if (reg_arg.Valid()) {
61 LockTemp(reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080062 }
63}
64
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010065RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +010066 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000067 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070068
69 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080070 /*
71 * When doing a call for x86, it moves the stack pointer in order to push return.
72 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080073 */
74 offset += sizeof(uint32_t);
75 }
76
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070077 if (cu_->instruction_set == kX86_64) {
78 /*
79 * When doing a call for x86, it moves the stack pointer in order to push return.
80 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
81 */
82 offset += sizeof(uint64_t);
83 }
84
Serguei Katkov717a3e42014-11-13 17:19:42 +060085 RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position);
86
87 // TODO: REVISIT: This adds a spill of low part while we could just copy it.
88 if (reg_arg.Valid() && wide && (reg_arg.GetWideKind() == kNotWide)) {
89 // For wide register we've got only half of it.
90 // Flush it to memory then.
91 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, k32, kNotVolatile);
92 reg_arg = RegStorage::InvalidReg();
93 }
94
95 if (!reg_arg.Valid()) {
96 reg_arg = wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class);
97 LoadBaseDisp(TargetPtrReg(kSp), offset, reg_arg, wide ? k64 : k32, kNotVolatile);
98 } else {
99 // Check if we need to copy the arg to a different reg_class.
100 if (!RegClassMatches(reg_class, reg_arg)) {
101 if (wide) {
102 RegStorage new_reg = AllocTypedTempWide(false, reg_class);
103 OpRegCopyWide(new_reg, reg_arg);
104 reg_arg = new_reg;
105 } else {
106 RegStorage new_reg = AllocTypedTemp(false, reg_class);
107 OpRegCopy(new_reg, reg_arg);
108 reg_arg = new_reg;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700109 }
110 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800111 }
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100112 return reg_arg;
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800113}
114
115void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600116 DCHECK_EQ(rl_dest.location, kLocPhysReg);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100117 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000118 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700119 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800120 /*
121 * When doing a call for x86, it moves the stack pointer in order to push return.
122 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800123 */
124 offset += sizeof(uint32_t);
125 }
126
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700127 if (cu_->instruction_set == kX86_64) {
128 /*
129 * When doing a call for x86, it moves the stack pointer in order to push return.
130 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
131 */
132 offset += sizeof(uint64_t);
133 }
134
Serguei Katkov717a3e42014-11-13 17:19:42 +0600135 RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position);
136
137 // TODO: REVISIT: This adds a spill of low part while we could just copy it.
138 if (reg_arg.Valid() && rl_dest.wide && (reg_arg.GetWideKind() == kNotWide)) {
139 // For wide register we've got only half of it.
140 // Flush it to memory then.
141 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, k32, kNotVolatile);
142 reg_arg = RegStorage::InvalidReg();
143 }
144
145 if (!reg_arg.Valid()) {
146 LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, rl_dest.wide ? k64 : k32, kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800147 } else {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600148 if (rl_dest.wide) {
149 OpRegCopyWide(rl_dest.reg, reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800150 } else {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600151 OpRegCopy(rl_dest.reg, reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800152 }
153 }
154}
155
156bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) {
157 // FastInstance() already checked by DexFileMethodInliner.
158 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100159 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800160 // The object is not "this" and has to be null-checked.
161 return false;
162 }
163
Fred Shih37f05ef2014-07-16 18:38:08 -0700164 OpSize size = k32;
165 switch (data.op_variant) {
166 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT):
167 size = kReference;
168 break;
169 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE):
170 size = k64;
171 break;
172 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_SHORT):
173 size = kSignedHalf;
174 break;
175 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_CHAR):
176 size = kUnsignedHalf;
177 break;
178 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BYTE):
179 size = kSignedByte;
180 break;
181 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BOOLEAN):
182 size = kUnsignedByte;
183 break;
184 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100185
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800186 // Point of no return - no aborts after this
187 GenPrintLabel(mir);
188 LockArg(data.object_arg);
buzbeea0cd2d72014-06-01 09:33:49 -0700189 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100190 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
buzbeea0cd2d72014-06-01 09:33:49 -0700191 RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]);
Fred Shih37f05ef2014-07-16 18:38:08 -0700192 RegLocation rl_dest = IsWide(size) ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100193 RegStorage r_result = rl_dest.reg;
194 if (!RegClassMatches(reg_class, r_result)) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700195 r_result = IsWide(size) ? AllocTypedTempWide(rl_dest.fp, reg_class)
196 : AllocTypedTemp(rl_dest.fp, reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100197 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700198 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000199 LoadRefDisp(reg_obj, data.field_offset, r_result, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100200 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000201 LoadBaseDisp(reg_obj, data.field_offset, r_result, size, data.is_volatile ? kVolatile :
202 kNotVolatile);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100203 }
buzbeeb5860fb2014-06-21 15:31:01 -0700204 if (r_result.NotExactlyEquals(rl_dest.reg)) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700205 if (IsWide(size)) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100206 OpRegCopyWide(rl_dest.reg, r_result);
207 } else {
208 OpRegCopy(rl_dest.reg, r_result);
209 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800210 }
211 return true;
212}
213
214bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) {
215 // FastInstance() already checked by DexFileMethodInliner.
216 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100217 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800218 // The object is not "this" and has to be null-checked.
219 return false;
220 }
Vladimir Markoe1fced12014-04-04 14:52:53 +0100221 if (data.return_arg_plus1 != 0u) {
222 // The setter returns a method argument which we don't support here.
223 return false;
224 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800225
Fred Shih37f05ef2014-07-16 18:38:08 -0700226 OpSize size = k32;
227 switch (data.op_variant) {
228 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_OBJECT):
229 size = kReference;
230 break;
231 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE):
232 size = k64;
233 break;
234 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_SHORT):
235 size = kSignedHalf;
236 break;
237 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_CHAR):
238 size = kUnsignedHalf;
239 break;
240 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BYTE):
241 size = kSignedByte;
242 break;
243 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BOOLEAN):
244 size = kUnsignedByte;
245 break;
246 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800247
248 // Point of no return - no aborts after this
249 GenPrintLabel(mir);
250 LockArg(data.object_arg);
Fred Shih37f05ef2014-07-16 18:38:08 -0700251 LockArg(data.src_arg, IsWide(size));
buzbeea0cd2d72014-06-01 09:33:49 -0700252 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100253 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
Fred Shih37f05ef2014-07-16 18:38:08 -0700254 RegStorage reg_src = LoadArg(data.src_arg, reg_class, IsWide(size));
255 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000256 StoreRefDisp(reg_obj, data.field_offset, reg_src, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100257 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000258 StoreBaseDisp(reg_obj, data.field_offset, reg_src, size, data.is_volatile ? kVolatile :
259 kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800260 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700261 if (IsRef(size)) {
Vladimir Marko743b98c2014-11-24 19:45:41 +0000262 MarkGCCard(0, reg_src, reg_obj);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800263 }
264 return true;
265}
266
267bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) {
268 const InlineReturnArgData& data = special.d.return_data;
Vladimir Markoe3e02602014-03-12 15:42:41 +0000269 bool wide = (data.is_wide != 0u);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800270
271 // Point of no return - no aborts after this
272 GenPrintLabel(mir);
273 LockArg(data.arg, wide);
buzbeea0cd2d72014-06-01 09:33:49 -0700274 RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]);
275 RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800276 LoadArgDirect(data.arg, rl_dest);
277 return true;
278}
279
280/*
281 * Special-case code generation for simple non-throwing leaf methods.
282 */
283bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) {
284 DCHECK(special.flags & kInlineSpecial);
285 current_dalvik_offset_ = mir->offset;
286 MIR* return_mir = nullptr;
287 bool successful = false;
288
289 switch (special.opcode) {
290 case kInlineOpNop:
291 successful = true;
292 DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID);
293 return_mir = mir;
294 break;
295 case kInlineOpNonWideConst: {
296 successful = true;
buzbeea0cd2d72014-06-01 09:33:49 -0700297 RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0]));
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800298 GenPrintLabel(mir);
buzbee2700f7e2014-03-07 09:46:20 -0800299 LoadConstant(rl_dest.reg, static_cast<int>(special.d.data));
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700300 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800301 break;
302 }
303 case kInlineOpReturnArg:
304 successful = GenSpecialIdentity(mir, special);
305 return_mir = mir;
306 break;
307 case kInlineOpIGet:
308 successful = GenSpecialIGet(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700309 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800310 break;
311 case kInlineOpIPut:
312 successful = GenSpecialIPut(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700313 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800314 break;
315 default:
316 break;
317 }
318
319 if (successful) {
Vladimir Marko39d95e62014-02-28 12:51:24 +0000320 if (kIsDebugBuild) {
321 // Clear unreachable catch entries.
322 mir_graph_->catches_.clear();
323 }
324
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800325 // Handle verbosity for return MIR.
326 if (return_mir != nullptr) {
327 current_dalvik_offset_ = return_mir->offset;
328 // Not handling special identity case because it already generated code as part
329 // of the return. The label should have been added before any code was generated.
330 if (special.opcode != kInlineOpReturnArg) {
331 GenPrintLabel(return_mir);
332 }
333 }
334 GenSpecialExitSequence();
335
336 core_spill_mask_ = 0;
337 num_core_spills_ = 0;
338 fp_spill_mask_ = 0;
339 num_fp_spills_ = 0;
340 frame_size_ = 0;
341 core_vmap_table_.clear();
342 fp_vmap_table_.clear();
343 }
344
345 return successful;
346}
347
Brian Carlstrom7940e442013-07-12 13:46:57 -0700348/*
349 * Target-independent code generation. Use only high-level
350 * load/store utilities here, or target-dependent genXX() handlers
351 * when necessary.
352 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700353void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700354 RegLocation rl_src[3];
355 RegLocation rl_dest = mir_graph_->GetBadLoc();
356 RegLocation rl_result = mir_graph_->GetBadLoc();
Ian Rogersc35cda82014-11-10 16:34:29 -0800357 const Instruction::Code opcode = mir->dalvikInsn.opcode;
358 const int opt_flags = mir->optimization_flags;
359 const uint32_t vB = mir->dalvikInsn.vB;
360 const uint32_t vC = mir->dalvikInsn.vC;
buzbee082833c2014-05-17 23:16:26 -0700361 DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:"
362 << std::hex << current_dalvik_offset_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700363
364 // Prep Src and Dest locations.
365 int next_sreg = 0;
366 int next_loc = 0;
Jean Christophe Beylercc794c32014-05-02 09:34:13 -0700367 uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700368 rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc();
369 if (attrs & DF_UA) {
370 if (attrs & DF_A_WIDE) {
371 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
372 next_sreg+= 2;
373 } else {
374 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
375 next_sreg++;
376 }
377 }
378 if (attrs & DF_UB) {
379 if (attrs & DF_B_WIDE) {
380 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
381 next_sreg+= 2;
382 } else {
383 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
384 next_sreg++;
385 }
386 }
387 if (attrs & DF_UC) {
388 if (attrs & DF_C_WIDE) {
389 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
390 } else {
391 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
392 }
393 }
394 if (attrs & DF_DA) {
395 if (attrs & DF_A_WIDE) {
396 rl_dest = mir_graph_->GetDestWide(mir);
397 } else {
398 rl_dest = mir_graph_->GetDest(mir);
399 }
400 }
401 switch (opcode) {
402 case Instruction::NOP:
403 break;
404
405 case Instruction::MOVE_EXCEPTION:
406 GenMoveException(rl_dest);
407 break;
408
409 case Instruction::RETURN_VOID:
410 if (((cu_->access_flags & kAccConstructor) != 0) &&
411 cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file,
412 cu_->class_def_idx)) {
413 GenMemBarrier(kStoreStore);
414 }
Wei Jin04f4d8a2014-05-29 18:04:29 -0700415 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700416 GenSuspendTest(opt_flags);
417 }
418 break;
419
Brian Carlstrom7940e442013-07-12 13:46:57 -0700420 case Instruction::RETURN_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700421 DCHECK(rl_src[0].ref);
Ian Rogersfc787ec2014-10-09 21:56:44 -0700422 FALLTHROUGH_INTENDED;
buzbeea0cd2d72014-06-01 09:33:49 -0700423 case Instruction::RETURN:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700424 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700425 GenSuspendTest(opt_flags);
426 }
buzbeea0cd2d72014-06-01 09:33:49 -0700427 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
428 StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700429 break;
430
431 case Instruction::RETURN_WIDE:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700432 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700433 GenSuspendTest(opt_flags);
434 }
buzbeea0cd2d72014-06-01 09:33:49 -0700435 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
436 StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700437 break;
438
439 case Instruction::MOVE_RESULT_WIDE:
buzbeea0cd2d72014-06-01 09:33:49 -0700440 StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700441 break;
442
443 case Instruction::MOVE_RESULT:
444 case Instruction::MOVE_RESULT_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700445 StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700446 break;
447
448 case Instruction::MOVE:
449 case Instruction::MOVE_OBJECT:
450 case Instruction::MOVE_16:
451 case Instruction::MOVE_OBJECT_16:
452 case Instruction::MOVE_FROM16:
453 case Instruction::MOVE_OBJECT_FROM16:
454 StoreValue(rl_dest, rl_src[0]);
455 break;
456
457 case Instruction::MOVE_WIDE:
458 case Instruction::MOVE_WIDE_16:
459 case Instruction::MOVE_WIDE_FROM16:
460 StoreValueWide(rl_dest, rl_src[0]);
461 break;
462
463 case Instruction::CONST:
464 case Instruction::CONST_4:
465 case Instruction::CONST_16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400466 GenConst(rl_dest, vB);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700467 break;
468
469 case Instruction::CONST_HIGH16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400470 GenConst(rl_dest, vB << 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700471 break;
472
473 case Instruction::CONST_WIDE_16:
474 case Instruction::CONST_WIDE_32:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000475 GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700476 break;
477
478 case Instruction::CONST_WIDE:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000479 GenConstWide(rl_dest, mir->dalvikInsn.vB_wide);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700480 break;
481
482 case Instruction::CONST_WIDE_HIGH16:
483 rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800484 LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700485 StoreValueWide(rl_dest, rl_result);
486 break;
487
488 case Instruction::MONITOR_ENTER:
489 GenMonitorEnter(opt_flags, rl_src[0]);
490 break;
491
492 case Instruction::MONITOR_EXIT:
493 GenMonitorExit(opt_flags, rl_src[0]);
494 break;
495
496 case Instruction::CHECK_CAST: {
497 GenCheckCast(mir->offset, vB, rl_src[0]);
498 break;
499 }
500 case Instruction::INSTANCE_OF:
501 GenInstanceof(vC, rl_dest, rl_src[0]);
502 break;
503
504 case Instruction::NEW_INSTANCE:
505 GenNewInstance(vB, rl_dest);
506 break;
507
508 case Instruction::THROW:
509 GenThrow(rl_src[0]);
510 break;
511
Ian Rogersc35cda82014-11-10 16:34:29 -0800512 case Instruction::ARRAY_LENGTH: {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700513 int len_offset;
514 len_offset = mirror::Array::LengthOffset().Int32Value();
buzbeea0cd2d72014-06-01 09:33:49 -0700515 rl_src[0] = LoadValue(rl_src[0], kRefReg);
buzbee2700f7e2014-03-07 09:46:20 -0800516 GenNullCheck(rl_src[0].reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700517 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700518 Load32Disp(rl_src[0].reg, len_offset, rl_result.reg);
Dave Allisonf9439142014-03-27 15:10:22 -0700519 MarkPossibleNullPointerException(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700520 StoreValue(rl_dest, rl_result);
521 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800522 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700523 case Instruction::CONST_STRING:
524 case Instruction::CONST_STRING_JUMBO:
525 GenConstString(vB, rl_dest);
526 break;
527
528 case Instruction::CONST_CLASS:
529 GenConstClass(vB, rl_dest);
530 break;
531
532 case Instruction::FILL_ARRAY_DATA:
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700533 GenFillArrayData(mir, vB, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700534 break;
535
536 case Instruction::FILLED_NEW_ARRAY:
537 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
538 false /* not range */));
539 break;
540
541 case Instruction::FILLED_NEW_ARRAY_RANGE:
542 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
543 true /* range */));
544 break;
545
546 case Instruction::NEW_ARRAY:
547 GenNewArray(vC, rl_dest, rl_src[0]);
548 break;
549
550 case Instruction::GOTO:
551 case Instruction::GOTO_16:
552 case Instruction::GOTO_32:
Vladimir Marko8b858e12014-11-27 14:52:37 +0000553 if (mir_graph_->IsBackEdge(bb, bb->taken)) {
buzbee0d829482013-10-11 15:24:55 -0700554 GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700555 } else {
buzbee0d829482013-10-11 15:24:55 -0700556 OpUnconditionalBranch(&label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700557 }
558 break;
559
560 case Instruction::PACKED_SWITCH:
561 GenPackedSwitch(mir, vB, rl_src[0]);
562 break;
563
564 case Instruction::SPARSE_SWITCH:
565 GenSparseSwitch(mir, vB, rl_src[0]);
566 break;
567
568 case Instruction::CMPL_FLOAT:
569 case Instruction::CMPG_FLOAT:
570 case Instruction::CMPL_DOUBLE:
571 case Instruction::CMPG_DOUBLE:
572 GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]);
573 break;
574
575 case Instruction::CMP_LONG:
576 GenCmpLong(rl_dest, rl_src[0], rl_src[1]);
577 break;
578
579 case Instruction::IF_EQ:
580 case Instruction::IF_NE:
581 case Instruction::IF_LT:
582 case Instruction::IF_GE:
583 case Instruction::IF_GT:
584 case Instruction::IF_LE: {
Vladimir Marko8b858e12014-11-27 14:52:37 +0000585 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000586 GenSuspendTest(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700587 }
Vladimir Marko8b858e12014-11-27 14:52:37 +0000588 LIR* taken = &label_list[bb->taken];
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000589 GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700590 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800591 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700592 case Instruction::IF_EQZ:
593 case Instruction::IF_NEZ:
594 case Instruction::IF_LTZ:
595 case Instruction::IF_GEZ:
596 case Instruction::IF_GTZ:
597 case Instruction::IF_LEZ: {
Vladimir Marko8b858e12014-11-27 14:52:37 +0000598 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000599 GenSuspendTest(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700600 }
Vladimir Marko8b858e12014-11-27 14:52:37 +0000601 LIR* taken = &label_list[bb->taken];
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000602 GenCompareZeroAndBranch(opcode, rl_src[0], taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700603 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800604 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700605
606 case Instruction::AGET_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400607 GenArrayGet(opt_flags, rl_dest.fp ? kDouble : k64, rl_src[0], rl_src[1], rl_dest, 3);
buzbee695d13a2014-04-19 13:32:20 -0700608 break;
609 case Instruction::AGET_OBJECT:
610 GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700611 break;
612 case Instruction::AGET:
Mark Mendellca541342014-10-15 16:59:49 -0400613 GenArrayGet(opt_flags, rl_dest.fp ? kSingle : k32, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700614 break;
615 case Instruction::AGET_BOOLEAN:
616 GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0);
617 break;
618 case Instruction::AGET_BYTE:
619 GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0);
620 break;
621 case Instruction::AGET_CHAR:
622 GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
623 break;
624 case Instruction::AGET_SHORT:
625 GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
626 break;
627 case Instruction::APUT_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400628 GenArrayPut(opt_flags, rl_src[0].fp ? kDouble : k64, rl_src[1], rl_src[2], rl_src[0], 3, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700629 break;
630 case Instruction::APUT:
Mark Mendellca541342014-10-15 16:59:49 -0400631 GenArrayPut(opt_flags, rl_src[0].fp ? kSingle : k32, rl_src[1], rl_src[2], rl_src[0], 2, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700632 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700633 case Instruction::APUT_OBJECT: {
634 bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]);
635 bool is_safe = is_null; // Always safe to store null.
636 if (!is_safe) {
637 // Check safety from verifier type information.
Vladimir Marko2730db02014-01-27 11:15:17 +0000638 const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit();
639 is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset);
Ian Rogersa9a82542013-10-04 11:17:26 -0700640 }
641 if (is_null || is_safe) {
642 // Store of constant null doesn't require an assignability test and can be generated inline
643 // without fixed register usage or a card mark.
buzbee695d13a2014-04-19 13:32:20 -0700644 GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null);
Ian Rogersa9a82542013-10-04 11:17:26 -0700645 } else {
646 GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]);
647 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700648 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700649 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700650 case Instruction::APUT_SHORT:
651 case Instruction::APUT_CHAR:
Ian Rogersa9a82542013-10-04 11:17:26 -0700652 GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700653 break;
654 case Instruction::APUT_BYTE:
655 case Instruction::APUT_BOOLEAN:
Ian Rogersa9a82542013-10-04 11:17:26 -0700656 GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700657 break;
658
659 case Instruction::IGET_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700660 GenIGet(mir, opt_flags, kReference, Primitive::kPrimNot, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700661 break;
662
663 case Instruction::IGET_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700664 // kPrimLong and kPrimDouble share the same entrypoints.
Mark Mendellca541342014-10-15 16:59:49 -0400665 if (rl_dest.fp) {
666 GenIGet(mir, opt_flags, kDouble, Primitive::kPrimDouble, rl_dest, rl_src[0]);
667 } else {
668 GenIGet(mir, opt_flags, k64, Primitive::kPrimLong, rl_dest, rl_src[0]);
669 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700670 break;
671
672 case Instruction::IGET:
Mark Mendellca541342014-10-15 16:59:49 -0400673 if (rl_dest.fp) {
674 GenIGet(mir, opt_flags, kSingle, Primitive::kPrimFloat, rl_dest, rl_src[0]);
675 } else {
676 GenIGet(mir, opt_flags, k32, Primitive::kPrimInt, rl_dest, rl_src[0]);
677 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700678 break;
679
680 case Instruction::IGET_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700681 GenIGet(mir, opt_flags, kUnsignedHalf, Primitive::kPrimChar, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700682 break;
683
684 case Instruction::IGET_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700685 GenIGet(mir, opt_flags, kSignedHalf, Primitive::kPrimShort, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700686 break;
687
688 case Instruction::IGET_BOOLEAN:
Fred Shih37f05ef2014-07-16 18:38:08 -0700689 GenIGet(mir, opt_flags, kUnsignedByte, Primitive::kPrimBoolean, rl_dest, rl_src[0]);
690 break;
691
Brian Carlstrom7940e442013-07-12 13:46:57 -0700692 case Instruction::IGET_BYTE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700693 GenIGet(mir, opt_flags, kSignedByte, Primitive::kPrimByte, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700694 break;
695
696 case Instruction::IPUT_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400697 GenIPut(mir, opt_flags, rl_src[0].fp ? kDouble : k64, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700698 break;
699
700 case Instruction::IPUT_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700701 GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700702 break;
703
704 case Instruction::IPUT:
Mark Mendellca541342014-10-15 16:59:49 -0400705 GenIPut(mir, opt_flags, rl_src[0].fp ? kSingle : k32, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700706 break;
707
Brian Carlstrom7940e442013-07-12 13:46:57 -0700708 case Instruction::IPUT_BYTE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700709 case Instruction::IPUT_BOOLEAN:
710 GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700711 break;
712
713 case Instruction::IPUT_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700714 GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700715 break;
716
717 case Instruction::IPUT_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700718 GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719 break;
720
721 case Instruction::SGET_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700722 GenSget(mir, rl_dest, kReference, Primitive::kPrimNot);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700723 break;
Fred Shih37f05ef2014-07-16 18:38:08 -0700724
Brian Carlstrom7940e442013-07-12 13:46:57 -0700725 case Instruction::SGET:
Mark Mendellca541342014-10-15 16:59:49 -0400726 GenSget(mir, rl_dest, rl_dest.fp ? kSingle : k32, Primitive::kPrimInt);
Fred Shih37f05ef2014-07-16 18:38:08 -0700727 break;
728
Brian Carlstrom7940e442013-07-12 13:46:57 -0700729 case Instruction::SGET_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700730 GenSget(mir, rl_dest, kUnsignedHalf, Primitive::kPrimChar);
731 break;
732
Brian Carlstrom7940e442013-07-12 13:46:57 -0700733 case Instruction::SGET_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700734 GenSget(mir, rl_dest, kSignedHalf, Primitive::kPrimShort);
735 break;
736
737 case Instruction::SGET_BOOLEAN:
738 GenSget(mir, rl_dest, kUnsignedByte, Primitive::kPrimBoolean);
739 break;
740
741 case Instruction::SGET_BYTE:
742 GenSget(mir, rl_dest, kSignedByte, Primitive::kPrimByte);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700743 break;
744
745 case Instruction::SGET_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700746 // kPrimLong and kPrimDouble share the same entrypoints.
Mark Mendellca541342014-10-15 16:59:49 -0400747 GenSget(mir, rl_dest, rl_dest.fp ? kDouble : k64, Primitive::kPrimDouble);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700748 break;
749
750 case Instruction::SPUT_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700751 GenSput(mir, rl_src[0], kReference);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700752 break;
753
754 case Instruction::SPUT:
Mark Mendellca541342014-10-15 16:59:49 -0400755 GenSput(mir, rl_src[0], rl_src[0].fp ? kSingle : k32);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700756 break;
757
Fred Shih37f05ef2014-07-16 18:38:08 -0700758 case Instruction::SPUT_BYTE:
759 case Instruction::SPUT_BOOLEAN:
760 GenSput(mir, rl_src[0], kUnsignedByte);
761 break;
762
763 case Instruction::SPUT_CHAR:
764 GenSput(mir, rl_src[0], kUnsignedHalf);
765 break;
766
767 case Instruction::SPUT_SHORT:
768 GenSput(mir, rl_src[0], kSignedHalf);
769 break;
770
771
Brian Carlstrom7940e442013-07-12 13:46:57 -0700772 case Instruction::SPUT_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400773 GenSput(mir, rl_src[0], rl_src[0].fp ? kDouble : k64);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700774 break;
775
776 case Instruction::INVOKE_STATIC_RANGE:
777 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true));
778 break;
779 case Instruction::INVOKE_STATIC:
780 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false));
781 break;
782
783 case Instruction::INVOKE_DIRECT:
784 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false));
785 break;
786 case Instruction::INVOKE_DIRECT_RANGE:
787 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true));
788 break;
789
790 case Instruction::INVOKE_VIRTUAL:
791 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false));
792 break;
793 case Instruction::INVOKE_VIRTUAL_RANGE:
794 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true));
795 break;
796
797 case Instruction::INVOKE_SUPER:
798 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false));
799 break;
800 case Instruction::INVOKE_SUPER_RANGE:
801 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true));
802 break;
803
804 case Instruction::INVOKE_INTERFACE:
805 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false));
806 break;
807 case Instruction::INVOKE_INTERFACE_RANGE:
808 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true));
809 break;
810
811 case Instruction::NEG_INT:
812 case Instruction::NOT_INT:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700813 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700814 break;
815
816 case Instruction::NEG_LONG:
817 case Instruction::NOT_LONG:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700818 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700819 break;
820
821 case Instruction::NEG_FLOAT:
822 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]);
823 break;
824
825 case Instruction::NEG_DOUBLE:
826 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]);
827 break;
828
829 case Instruction::INT_TO_LONG:
830 GenIntToLong(rl_dest, rl_src[0]);
831 break;
832
833 case Instruction::LONG_TO_INT:
Yevgeny Rouban6af82062014-11-26 18:11:54 +0600834 GenLongToInt(rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700835 break;
836
837 case Instruction::INT_TO_BYTE:
838 case Instruction::INT_TO_SHORT:
839 case Instruction::INT_TO_CHAR:
840 GenIntNarrowing(opcode, rl_dest, rl_src[0]);
841 break;
842
843 case Instruction::INT_TO_FLOAT:
844 case Instruction::INT_TO_DOUBLE:
845 case Instruction::LONG_TO_FLOAT:
846 case Instruction::LONG_TO_DOUBLE:
847 case Instruction::FLOAT_TO_INT:
848 case Instruction::FLOAT_TO_LONG:
849 case Instruction::FLOAT_TO_DOUBLE:
850 case Instruction::DOUBLE_TO_INT:
851 case Instruction::DOUBLE_TO_LONG:
852 case Instruction::DOUBLE_TO_FLOAT:
853 GenConversion(opcode, rl_dest, rl_src[0]);
854 break;
855
856
857 case Instruction::ADD_INT:
858 case Instruction::ADD_INT_2ADDR:
859 case Instruction::MUL_INT:
860 case Instruction::MUL_INT_2ADDR:
861 case Instruction::AND_INT:
862 case Instruction::AND_INT_2ADDR:
863 case Instruction::OR_INT:
864 case Instruction::OR_INT_2ADDR:
865 case Instruction::XOR_INT:
866 case Instruction::XOR_INT_2ADDR:
867 if (rl_src[0].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +0100868 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700869 GenArithOpIntLit(opcode, rl_dest, rl_src[1],
870 mir_graph_->ConstantValue(rl_src[0].orig_sreg));
871 } else if (rl_src[1].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +0100872 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700873 GenArithOpIntLit(opcode, rl_dest, rl_src[0],
874 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
875 } else {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700876 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700877 }
878 break;
879
880 case Instruction::SUB_INT:
881 case Instruction::SUB_INT_2ADDR:
882 case Instruction::DIV_INT:
883 case Instruction::DIV_INT_2ADDR:
884 case Instruction::REM_INT:
885 case Instruction::REM_INT_2ADDR:
886 case Instruction::SHL_INT:
887 case Instruction::SHL_INT_2ADDR:
888 case Instruction::SHR_INT:
889 case Instruction::SHR_INT_2ADDR:
890 case Instruction::USHR_INT:
891 case Instruction::USHR_INT_2ADDR:
892 if (rl_src[1].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +0100893 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700894 GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1]));
895 } else {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700896 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700897 }
898 break;
899
900 case Instruction::ADD_LONG:
901 case Instruction::SUB_LONG:
902 case Instruction::AND_LONG:
903 case Instruction::OR_LONG:
904 case Instruction::XOR_LONG:
905 case Instruction::ADD_LONG_2ADDR:
906 case Instruction::SUB_LONG_2ADDR:
907 case Instruction::AND_LONG_2ADDR:
908 case Instruction::OR_LONG_2ADDR:
909 case Instruction::XOR_LONG_2ADDR:
910 if (rl_src[0].is_const || rl_src[1].is_const) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700911 GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700912 break;
913 }
Ian Rogersfc787ec2014-10-09 21:56:44 -0700914 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700915 case Instruction::MUL_LONG:
916 case Instruction::DIV_LONG:
917 case Instruction::REM_LONG:
918 case Instruction::MUL_LONG_2ADDR:
919 case Instruction::DIV_LONG_2ADDR:
920 case Instruction::REM_LONG_2ADDR:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700921 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700922 break;
923
924 case Instruction::SHL_LONG:
925 case Instruction::SHR_LONG:
926 case Instruction::USHR_LONG:
927 case Instruction::SHL_LONG_2ADDR:
928 case Instruction::SHR_LONG_2ADDR:
929 case Instruction::USHR_LONG_2ADDR:
930 if (rl_src[1].is_const) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700931 GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700932 } else {
933 GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
934 }
935 break;
936
Ningsheng Jian675e09b2014-10-23 13:48:36 +0800937 case Instruction::DIV_FLOAT:
938 case Instruction::DIV_FLOAT_2ADDR:
939 if (HandleEasyFloatingPointDiv(rl_dest, rl_src[0], rl_src[1])) {
940 break;
941 }
942 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700943 case Instruction::ADD_FLOAT:
944 case Instruction::SUB_FLOAT:
945 case Instruction::MUL_FLOAT:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700946 case Instruction::REM_FLOAT:
947 case Instruction::ADD_FLOAT_2ADDR:
948 case Instruction::SUB_FLOAT_2ADDR:
949 case Instruction::MUL_FLOAT_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700950 case Instruction::REM_FLOAT_2ADDR:
951 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]);
952 break;
953
Ningsheng Jian675e09b2014-10-23 13:48:36 +0800954 case Instruction::DIV_DOUBLE:
955 case Instruction::DIV_DOUBLE_2ADDR:
956 if (HandleEasyFloatingPointDiv(rl_dest, rl_src[0], rl_src[1])) {
957 break;
958 }
959 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700960 case Instruction::ADD_DOUBLE:
961 case Instruction::SUB_DOUBLE:
962 case Instruction::MUL_DOUBLE:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700963 case Instruction::REM_DOUBLE:
964 case Instruction::ADD_DOUBLE_2ADDR:
965 case Instruction::SUB_DOUBLE_2ADDR:
966 case Instruction::MUL_DOUBLE_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700967 case Instruction::REM_DOUBLE_2ADDR:
968 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]);
969 break;
970
971 case Instruction::RSUB_INT:
972 case Instruction::ADD_INT_LIT16:
973 case Instruction::MUL_INT_LIT16:
974 case Instruction::DIV_INT_LIT16:
975 case Instruction::REM_INT_LIT16:
976 case Instruction::AND_INT_LIT16:
977 case Instruction::OR_INT_LIT16:
978 case Instruction::XOR_INT_LIT16:
979 case Instruction::ADD_INT_LIT8:
980 case Instruction::RSUB_INT_LIT8:
981 case Instruction::MUL_INT_LIT8:
982 case Instruction::DIV_INT_LIT8:
983 case Instruction::REM_INT_LIT8:
984 case Instruction::AND_INT_LIT8:
985 case Instruction::OR_INT_LIT8:
986 case Instruction::XOR_INT_LIT8:
987 case Instruction::SHL_INT_LIT8:
988 case Instruction::SHR_INT_LIT8:
989 case Instruction::USHR_INT_LIT8:
990 GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC);
991 break;
992
993 default:
994 LOG(FATAL) << "Unexpected opcode: " << opcode;
995 }
buzbee082833c2014-05-17 23:16:26 -0700996 DCHECK(CheckCorePoolSanity());
Brian Carlstrom1895ea32013-07-18 13:28:37 -0700997} // NOLINT(readability/fn_size)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700998
999// Process extended MIR instructions
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001000void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001001 switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) {
1002 case kMirOpCopy: {
1003 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
1004 RegLocation rl_dest = mir_graph_->GetDest(mir);
1005 StoreValue(rl_dest, rl_src);
1006 break;
1007 }
1008 case kMirOpFusedCmplFloat:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001009 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1010 GenSuspendTest(mir->optimization_flags);
1011 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001012 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/);
1013 break;
1014 case kMirOpFusedCmpgFloat:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001015 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1016 GenSuspendTest(mir->optimization_flags);
1017 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001018 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/);
1019 break;
1020 case kMirOpFusedCmplDouble:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001021 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1022 GenSuspendTest(mir->optimization_flags);
1023 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001024 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/);
1025 break;
1026 case kMirOpFusedCmpgDouble:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001027 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1028 GenSuspendTest(mir->optimization_flags);
1029 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001030 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/);
1031 break;
1032 case kMirOpFusedCmpLong:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001033 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1034 GenSuspendTest(mir->optimization_flags);
1035 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001036 GenFusedLongCmpBranch(bb, mir);
1037 break;
1038 case kMirOpSelect:
1039 GenSelect(bb, mir);
1040 break;
Razvan A Lupusoru76423242014-08-04 09:38:46 -07001041 case kMirOpNullCheck: {
1042 RegLocation rl_obj = mir_graph_->GetSrc(mir, 0);
1043 rl_obj = LoadValue(rl_obj, kRefReg);
1044 // An explicit check is done because it is not expected that when this is used,
1045 // that it will actually trip up the implicit checks (since an invalid access
1046 // is needed on the null object).
1047 GenExplicitNullCheck(rl_obj.reg, mir->optimization_flags);
1048 break;
1049 }
Mark Mendelld65c51a2014-04-29 16:55:20 -04001050 case kMirOpPhi:
1051 case kMirOpNop:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001052 case kMirOpRangeCheck:
1053 case kMirOpDivZeroCheck:
1054 case kMirOpCheck:
1055 case kMirOpCheckPart2:
1056 // Ignore these known opcodes
1057 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001058 default:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001059 // Give the backends a chance to handle unknown extended MIR opcodes.
1060 GenMachineSpecificExtendedMethodMIR(bb, mir);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001061 break;
1062 }
1063}
1064
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001065void Mir2Lir::GenPrintLabel(MIR* mir) {
1066 // Mark the beginning of a Dalvik instruction for line tracking.
1067 if (cu_->verbose) {
1068 char* inst_str = mir_graph_->GetDalvikDisassembly(mir);
1069 MarkBoundary(mir->offset, inst_str);
1070 }
1071}
1072
Brian Carlstrom7940e442013-07-12 13:46:57 -07001073// Handle the content in each basic block.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001074bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001075 if (bb->block_type == kDead) return false;
1076 current_dalvik_offset_ = bb->start_offset;
1077 MIR* mir;
1078 int block_id = bb->id;
1079
1080 block_label_list_[block_id].operands[0] = bb->start_offset;
1081
1082 // Insert the block label.
1083 block_label_list_[block_id].opcode = kPseudoNormalBlockLabel;
buzbeeb48819d2013-09-14 16:15:25 -07001084 block_label_list_[block_id].flags.fixup = kFixupLabel;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001085 AppendLIR(&block_label_list_[block_id]);
1086
1087 LIR* head_lir = NULL;
1088
1089 // If this is a catch block, export the start address.
1090 if (bb->catch_entry) {
1091 head_lir = NewLIR0(kPseudoExportedPC);
1092 }
1093
1094 // Free temp registers and reset redundant store tracking.
buzbeeba574512014-05-12 15:13:16 -07001095 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001096
1097 if (bb->block_type == kEntryBlock) {
buzbee56c71782013-09-05 17:13:19 -07001098 ResetRegPool();
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001099 int start_vreg = mir_graph_->GetFirstInVR();
1100 GenEntrySequence(&mir_graph_->reg_location_[start_vreg], mir_graph_->GetMethodLoc());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001101 } else if (bb->block_type == kExitBlock) {
buzbee56c71782013-09-05 17:13:19 -07001102 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001103 GenExitSequence();
1104 }
1105
1106 for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
1107 ResetRegPool();
1108 if (cu_->disable_opt & (1 << kTrackLiveTemps)) {
buzbeeba574512014-05-12 15:13:16 -07001109 ClobberAllTemps();
buzbee7a11ab02014-04-28 20:02:38 -07001110 // Reset temp allocation to minimize differences when A/B testing.
buzbee091cc402014-03-31 10:14:40 -07001111 reg_pool_->ResetNextTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001112 }
1113
1114 if (cu_->disable_opt & (1 << kSuppressLoads)) {
1115 ResetDefTracking();
1116 }
1117
1118 // Reset temp tracking sanity check.
1119 if (kIsDebugBuild) {
1120 live_sreg_ = INVALID_SREG;
1121 }
1122
1123 current_dalvik_offset_ = mir->offset;
1124 int opcode = mir->dalvikInsn.opcode;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001125
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001126 GenPrintLabel(mir);
1127
Brian Carlstrom7940e442013-07-12 13:46:57 -07001128 // Remember the first LIR for this block.
1129 if (head_lir == NULL) {
buzbee252254b2013-09-08 16:20:53 -07001130 head_lir = &block_label_list_[bb->id];
1131 // Set the first label as a scheduling barrier.
buzbeeb48819d2013-09-14 16:15:25 -07001132 DCHECK(!head_lir->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001133 head_lir->u.m.def_mask = &kEncodeAll;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001134 }
1135
1136 if (opcode == kMirOpCheck) {
1137 // Combine check and work halves of throwing instruction.
1138 MIR* work_half = mir->meta.throw_insn;
Alexei Zavjalov56e8e602014-10-30 20:47:28 +06001139 mir->dalvikInsn = work_half->dalvikInsn;
Vladimir Markocc8cc7c2014-10-06 10:52:20 +01001140 mir->optimization_flags = work_half->optimization_flags;
Vladimir Marko4376c872014-01-23 12:39:29 +00001141 mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001142 opcode = work_half->dalvikInsn.opcode;
1143 SSARepresentation* ssa_rep = work_half->ssa_rep;
1144 work_half->ssa_rep = mir->ssa_rep;
1145 mir->ssa_rep = ssa_rep;
1146 work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2);
Vladimir Marko4376c872014-01-23 12:39:29 +00001147 work_half->meta.throw_insn = mir;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001148 }
1149
Jean Christophe Beyler2ab40eb2014-06-02 09:03:14 -07001150 if (MIR::DecodedInstruction::IsPseudoMirOp(opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001151 HandleExtendedMethodMIR(bb, mir);
1152 continue;
1153 }
1154
1155 CompileDalvikInstruction(mir, bb, block_label_list_);
1156 }
1157
1158 if (head_lir) {
1159 // Eliminate redundant loads/stores and delay stores into later slots.
1160 ApplyLocalOptimizations(head_lir, last_lir_insn_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001161 }
1162 return false;
1163}
1164
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001165bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) {
Vladimir Marko5816ed42013-11-27 17:04:20 +00001166 cu_->NewTimingSplit("SpecialMIR2LIR");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001167 // Find the first DalvikByteCode block.
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001168 DCHECK_EQ(mir_graph_->GetNumReachableBlocks(), mir_graph_->GetDfsOrder().size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001169 BasicBlock*bb = NULL;
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001170 for (BasicBlockId dfs_id : mir_graph_->GetDfsOrder()) {
1171 BasicBlock* candidate = mir_graph_->GetBasicBlock(dfs_id);
1172 if (candidate->block_type == kDalvikByteCode) {
1173 bb = candidate;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001174 break;
1175 }
1176 }
1177 if (bb == NULL) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001178 return false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001179 }
1180 DCHECK_EQ(bb->start_offset, 0);
1181 DCHECK(bb->first_mir_insn != NULL);
1182
1183 // Get the first instruction.
1184 MIR* mir = bb->first_mir_insn;
1185
1186 // Free temp registers and reset redundant store tracking.
1187 ResetRegPool();
1188 ResetDefTracking();
buzbeeba574512014-05-12 15:13:16 -07001189 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001190
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001191 return GenSpecialCase(bb, mir, special);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001192}
1193
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001194void Mir2Lir::MethodMIR2LIR() {
buzbeea61f4952013-08-23 14:27:06 -07001195 cu_->NewTimingSplit("MIR2LIR");
1196
Brian Carlstrom7940e442013-07-12 13:46:57 -07001197 // Hold the labels of each block.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +00001198 block_label_list_ = arena_->AllocArray<LIR>(mir_graph_->GetNumBlocks(), kArenaAllocLIR);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001199
buzbee56c71782013-09-05 17:13:19 -07001200 PreOrderDfsIterator iter(mir_graph_);
buzbee252254b2013-09-08 16:20:53 -07001201 BasicBlock* curr_bb = iter.Next();
1202 BasicBlock* next_bb = iter.Next();
1203 while (curr_bb != NULL) {
1204 MethodBlockCodeGen(curr_bb);
1205 // If the fall_through block is no longer laid out consecutively, drop in a branch.
buzbee0d829482013-10-11 15:24:55 -07001206 BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through);
1207 if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) {
1208 OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]);
buzbee252254b2013-09-08 16:20:53 -07001209 }
1210 curr_bb = next_bb;
1211 do {
1212 next_bb = iter.Next();
1213 } while ((next_bb != NULL) && (next_bb->block_type == kDead));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001214 }
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001215 HandleSlowPaths();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001216}
1217
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001218//
1219// LIR Slow Path
1220//
1221
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001222LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001223 m2l_->SetCurrentDexPc(current_dex_pc_);
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001224 LIR* target = m2l_->NewLIR0(opcode);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001225 fromfast_->target = target;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001226 return target;
1227}
Vladimir Marko3bc86152014-03-13 14:11:28 +00001228
Andreas Gampe4b537a82014-06-30 22:24:53 -07001229
1230void Mir2Lir::CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp,
1231 bool fail, bool report)
1232 const {
1233 if (rs.Valid()) {
1234 if (ref == RefCheck::kCheckRef) {
1235 if (cu_->target64 && !rs.Is64Bit()) {
1236 if (fail) {
1237 CHECK(false) << "Reg storage not 64b for ref.";
1238 } else if (report) {
1239 LOG(WARNING) << "Reg storage not 64b for ref.";
1240 }
1241 }
1242 }
1243 if (wide == WidenessCheck::kCheckWide) {
1244 if (!rs.Is64Bit()) {
1245 if (fail) {
1246 CHECK(false) << "Reg storage not 64b for wide.";
1247 } else if (report) {
1248 LOG(WARNING) << "Reg storage not 64b for wide.";
1249 }
1250 }
1251 }
1252 // A tighter check would be nice, but for now soft-float will not check float at all.
1253 if (fp == FPCheck::kCheckFP && cu_->instruction_set != kArm) {
1254 if (!rs.IsFloat()) {
1255 if (fail) {
1256 CHECK(false) << "Reg storage not float for fp.";
1257 } else if (report) {
1258 LOG(WARNING) << "Reg storage not float for fp.";
1259 }
1260 }
1261 } else if (fp == FPCheck::kCheckNotFP) {
1262 if (rs.IsFloat()) {
1263 if (fail) {
1264 CHECK(false) << "Reg storage float for not-fp.";
1265 } else if (report) {
1266 LOG(WARNING) << "Reg storage float for not-fp.";
1267 }
1268 }
1269 }
1270 }
1271}
1272
1273void Mir2Lir::CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const {
1274 // Regrettably can't use the fp part of rl, as that is not really indicative of where a value
1275 // will be stored.
1276 CheckRegStorageImpl(rl.reg, rl.wide ? WidenessCheck::kCheckWide : WidenessCheck::kCheckNotWide,
1277 rl.ref ? RefCheck::kCheckRef : RefCheck::kCheckNotRef, FPCheck::kIgnoreFP, fail, report);
1278}
1279
Serban Constantinescu63999682014-07-15 17:44:21 +01001280size_t Mir2Lir::GetInstructionOffset(LIR* lir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001281 UNUSED(lir);
1282 UNIMPLEMENTED(FATAL) << "Unsupported GetInstructionOffset()";
1283 UNREACHABLE();
Serban Constantinescu63999682014-07-15 17:44:21 +01001284}
1285
Serguei Katkov717a3e42014-11-13 17:19:42 +06001286void Mir2Lir::InToRegStorageMapping::Initialize(ShortyIterator* shorty,
1287 InToRegStorageMapper* mapper) {
1288 DCHECK(mapper != nullptr);
1289 DCHECK(shorty != nullptr);
1290 max_mapped_in_ = -1;
1291 has_arguments_on_stack_ = false;
1292 while (shorty->Next()) {
1293 ShortyArg arg = shorty->GetArg();
1294 RegStorage reg = mapper->GetNextReg(arg);
1295 if (reg.Valid()) {
1296 mapping_.Put(count_, reg);
1297 max_mapped_in_ = count_;
1298 // If the VR is wide and was mapped as wide then account for it.
1299 if (arg.IsWide() && reg.Is64Bit()) {
1300 max_mapped_in_++;
1301 }
1302 } else {
1303 has_arguments_on_stack_ = true;
1304 }
1305 count_ += arg.IsWide() ? 2 : 1;
1306 }
1307 initialized_ = true;
1308}
1309
1310RegStorage Mir2Lir::InToRegStorageMapping::Get(int in_position) {
1311 DCHECK(IsInitialized());
1312 DCHECK_LT(in_position, count_);
1313 auto res = mapping_.find(in_position);
1314 return res != mapping_.end() ? res->second : RegStorage::InvalidReg();
1315}
1316
Brian Carlstrom7940e442013-07-12 13:46:57 -07001317} // namespace art