blob: 635393796a402c34b5f5f55646ba4b922a118c99 [file] [log] [blame]
buzbee311ca162013-02-28 15:56:43 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "compiler_internals.h"
18#include "local_value_numbering.h"
Ian Rogers8d3a1172013-06-04 01:13:28 -070019#include "dataflow_iterator-inl.h"
buzbee311ca162013-02-28 15:56:43 -080020
21namespace art {
22
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070023static unsigned int Predecessors(BasicBlock* bb) {
buzbee862a7602013-04-05 10:58:54 -070024 return bb->predecessors->Size();
buzbee311ca162013-02-28 15:56:43 -080025}
26
27/* Setup a constant value for opcodes thare have the DF_SETS_CONST attribute */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070028void MIRGraph::SetConstant(int32_t ssa_reg, int value) {
buzbee862a7602013-04-05 10:58:54 -070029 is_constant_v_->SetBit(ssa_reg);
buzbee311ca162013-02-28 15:56:43 -080030 constant_values_[ssa_reg] = value;
31}
32
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070033void MIRGraph::SetConstantWide(int ssa_reg, int64_t value) {
buzbee862a7602013-04-05 10:58:54 -070034 is_constant_v_->SetBit(ssa_reg);
buzbee311ca162013-02-28 15:56:43 -080035 constant_values_[ssa_reg] = Low32Bits(value);
36 constant_values_[ssa_reg + 1] = High32Bits(value);
37}
38
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070039void MIRGraph::DoConstantPropogation(BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -080040 MIR* mir;
buzbee311ca162013-02-28 15:56:43 -080041
42 for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
buzbee1da1e2f2013-11-15 13:37:01 -080043 uint64_t df_attributes = oat_data_flow_attributes_[mir->dalvikInsn.opcode];
buzbee311ca162013-02-28 15:56:43 -080044
45 DecodedInstruction *d_insn = &mir->dalvikInsn;
46
47 if (!(df_attributes & DF_HAS_DEFS)) continue;
48
49 /* Handle instructions that set up constants directly */
50 if (df_attributes & DF_SETS_CONST) {
51 if (df_attributes & DF_DA) {
52 int32_t vB = static_cast<int32_t>(d_insn->vB);
53 switch (d_insn->opcode) {
54 case Instruction::CONST_4:
55 case Instruction::CONST_16:
56 case Instruction::CONST:
57 SetConstant(mir->ssa_rep->defs[0], vB);
58 break;
59 case Instruction::CONST_HIGH16:
60 SetConstant(mir->ssa_rep->defs[0], vB << 16);
61 break;
62 case Instruction::CONST_WIDE_16:
63 case Instruction::CONST_WIDE_32:
64 SetConstantWide(mir->ssa_rep->defs[0], static_cast<int64_t>(vB));
65 break;
66 case Instruction::CONST_WIDE:
Brian Carlstromb1eba212013-07-17 18:07:19 -070067 SetConstantWide(mir->ssa_rep->defs[0], d_insn->vB_wide);
buzbee311ca162013-02-28 15:56:43 -080068 break;
69 case Instruction::CONST_WIDE_HIGH16:
70 SetConstantWide(mir->ssa_rep->defs[0], static_cast<int64_t>(vB) << 48);
71 break;
72 default:
73 break;
74 }
75 }
76 /* Handle instructions that set up constants directly */
77 } else if (df_attributes & DF_IS_MOVE) {
78 int i;
79
80 for (i = 0; i < mir->ssa_rep->num_uses; i++) {
buzbee862a7602013-04-05 10:58:54 -070081 if (!is_constant_v_->IsBitSet(mir->ssa_rep->uses[i])) break;
buzbee311ca162013-02-28 15:56:43 -080082 }
83 /* Move a register holding a constant to another register */
84 if (i == mir->ssa_rep->num_uses) {
85 SetConstant(mir->ssa_rep->defs[0], constant_values_[mir->ssa_rep->uses[0]]);
86 if (df_attributes & DF_A_WIDE) {
87 SetConstant(mir->ssa_rep->defs[1], constant_values_[mir->ssa_rep->uses[1]]);
88 }
89 }
90 }
91 }
92 /* TODO: implement code to handle arithmetic operations */
buzbee311ca162013-02-28 15:56:43 -080093}
94
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070095void MIRGraph::PropagateConstants() {
buzbee862a7602013-04-05 10:58:54 -070096 is_constant_v_ = new (arena_) ArenaBitVector(arena_, GetNumSSARegs(), false);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070097 constant_values_ = static_cast<int*>(arena_->Alloc(sizeof(int) * GetNumSSARegs(),
98 ArenaAllocator::kAllocDFInfo));
buzbee56c71782013-09-05 17:13:19 -070099 AllNodesIterator iter(this);
buzbee311ca162013-02-28 15:56:43 -0800100 for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) {
101 DoConstantPropogation(bb);
102 }
buzbee311ca162013-02-28 15:56:43 -0800103}
104
105/* Advance to next strictly dominated MIR node in an extended basic block */
buzbee0d829482013-10-11 15:24:55 -0700106MIR* MIRGraph::AdvanceMIR(BasicBlock** p_bb, MIR* mir) {
buzbee311ca162013-02-28 15:56:43 -0800107 BasicBlock* bb = *p_bb;
108 if (mir != NULL) {
109 mir = mir->next;
110 if (mir == NULL) {
buzbee0d829482013-10-11 15:24:55 -0700111 bb = GetBasicBlock(bb->fall_through);
buzbee311ca162013-02-28 15:56:43 -0800112 if ((bb == NULL) || Predecessors(bb) != 1) {
113 mir = NULL;
114 } else {
115 *p_bb = bb;
116 mir = bb->first_mir_insn;
117 }
118 }
119 }
120 return mir;
121}
122
123/*
124 * To be used at an invoke mir. If the logically next mir node represents
125 * a move-result, return it. Else, return NULL. If a move-result exists,
126 * it is required to immediately follow the invoke with no intervening
127 * opcodes or incoming arcs. However, if the result of the invoke is not
128 * used, a move-result may not be present.
129 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700130MIR* MIRGraph::FindMoveResult(BasicBlock* bb, MIR* mir) {
buzbee311ca162013-02-28 15:56:43 -0800131 BasicBlock* tbb = bb;
132 mir = AdvanceMIR(&tbb, mir);
133 while (mir != NULL) {
134 int opcode = mir->dalvikInsn.opcode;
135 if ((mir->dalvikInsn.opcode == Instruction::MOVE_RESULT) ||
136 (mir->dalvikInsn.opcode == Instruction::MOVE_RESULT_OBJECT) ||
137 (mir->dalvikInsn.opcode == Instruction::MOVE_RESULT_WIDE)) {
138 break;
139 }
140 // Keep going if pseudo op, otherwise terminate
141 if (opcode < kNumPackedOpcodes) {
142 mir = NULL;
143 } else {
144 mir = AdvanceMIR(&tbb, mir);
145 }
146 }
147 return mir;
148}
149
buzbee0d829482013-10-11 15:24:55 -0700150BasicBlock* MIRGraph::NextDominatedBlock(BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800151 if (bb->block_type == kDead) {
152 return NULL;
153 }
154 DCHECK((bb->block_type == kEntryBlock) || (bb->block_type == kDalvikByteCode)
155 || (bb->block_type == kExitBlock));
buzbee0d829482013-10-11 15:24:55 -0700156 BasicBlock* bb_taken = GetBasicBlock(bb->taken);
157 BasicBlock* bb_fall_through = GetBasicBlock(bb->fall_through);
buzbee1da1e2f2013-11-15 13:37:01 -0800158 if (((bb_fall_through == NULL) && (bb_taken != NULL)) &&
buzbee0d829482013-10-11 15:24:55 -0700159 ((bb_taken->block_type == kDalvikByteCode) || (bb_taken->block_type == kExitBlock))) {
buzbeecbcfaf32013-08-19 07:37:40 -0700160 // Follow simple unconditional branches.
buzbee0d829482013-10-11 15:24:55 -0700161 bb = bb_taken;
buzbeecbcfaf32013-08-19 07:37:40 -0700162 } else {
163 // Follow simple fallthrough
buzbee0d829482013-10-11 15:24:55 -0700164 bb = (bb_taken != NULL) ? NULL : bb_fall_through;
buzbeecbcfaf32013-08-19 07:37:40 -0700165 }
buzbee311ca162013-02-28 15:56:43 -0800166 if (bb == NULL || (Predecessors(bb) != 1)) {
167 return NULL;
168 }
169 DCHECK((bb->block_type == kDalvikByteCode) || (bb->block_type == kExitBlock));
170 return bb;
171}
172
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700173static MIR* FindPhi(BasicBlock* bb, int ssa_name) {
buzbee311ca162013-02-28 15:56:43 -0800174 for (MIR* mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
175 if (static_cast<int>(mir->dalvikInsn.opcode) == kMirOpPhi) {
176 for (int i = 0; i < mir->ssa_rep->num_uses; i++) {
177 if (mir->ssa_rep->uses[i] == ssa_name) {
178 return mir;
179 }
180 }
181 }
182 }
183 return NULL;
184}
185
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700186static SelectInstructionKind SelectKind(MIR* mir) {
buzbee311ca162013-02-28 15:56:43 -0800187 switch (mir->dalvikInsn.opcode) {
188 case Instruction::MOVE:
189 case Instruction::MOVE_OBJECT:
190 case Instruction::MOVE_16:
191 case Instruction::MOVE_OBJECT_16:
192 case Instruction::MOVE_FROM16:
193 case Instruction::MOVE_OBJECT_FROM16:
194 return kSelectMove;
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700195 case Instruction::CONST:
196 case Instruction::CONST_4:
197 case Instruction::CONST_16:
buzbee311ca162013-02-28 15:56:43 -0800198 return kSelectConst;
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700199 case Instruction::GOTO:
200 case Instruction::GOTO_16:
201 case Instruction::GOTO_32:
buzbee311ca162013-02-28 15:56:43 -0800202 return kSelectGoto;
Brian Carlstrom02c8cc62013-07-18 15:54:44 -0700203 default:
204 return kSelectNone;
buzbee311ca162013-02-28 15:56:43 -0800205 }
buzbee311ca162013-02-28 15:56:43 -0800206}
207
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700208int MIRGraph::GetSSAUseCount(int s_reg) {
buzbee862a7602013-04-05 10:58:54 -0700209 return raw_use_counts_.Get(s_reg);
buzbee311ca162013-02-28 15:56:43 -0800210}
211
212
213/* Do some MIR-level extended basic block optimizations */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700214bool MIRGraph::BasicBlockOpt(BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800215 if (bb->block_type == kDead) {
216 return true;
217 }
218 int num_temps = 0;
buzbee1da1e2f2013-11-15 13:37:01 -0800219 bool use_lvn = bb->use_lvn;
220 UniquePtr<LocalValueNumbering> local_valnum;
221 if (use_lvn) {
222 local_valnum.reset(new LocalValueNumbering(cu_));
223 }
buzbee311ca162013-02-28 15:56:43 -0800224 while (bb != NULL) {
225 for (MIR* mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
226 // TUNING: use the returned value number for CSE.
buzbee1da1e2f2013-11-15 13:37:01 -0800227 if (use_lvn) {
228 local_valnum->GetValueNumber(mir);
229 }
buzbee311ca162013-02-28 15:56:43 -0800230 // Look for interesting opcodes, skip otherwise
231 Instruction::Code opcode = mir->dalvikInsn.opcode;
232 switch (opcode) {
233 case Instruction::CMPL_FLOAT:
234 case Instruction::CMPL_DOUBLE:
235 case Instruction::CMPG_FLOAT:
236 case Instruction::CMPG_DOUBLE:
237 case Instruction::CMP_LONG:
buzbee1fd33462013-03-25 13:40:45 -0700238 if ((cu_->disable_opt & (1 << kBranchFusing)) != 0) {
buzbee311ca162013-02-28 15:56:43 -0800239 // Bitcode doesn't allow this optimization.
240 break;
241 }
242 if (mir->next != NULL) {
243 MIR* mir_next = mir->next;
244 Instruction::Code br_opcode = mir_next->dalvikInsn.opcode;
245 ConditionCode ccode = kCondNv;
Brian Carlstromdf629502013-07-17 22:39:56 -0700246 switch (br_opcode) {
buzbee311ca162013-02-28 15:56:43 -0800247 case Instruction::IF_EQZ:
248 ccode = kCondEq;
249 break;
250 case Instruction::IF_NEZ:
251 ccode = kCondNe;
252 break;
253 case Instruction::IF_LTZ:
254 ccode = kCondLt;
255 break;
256 case Instruction::IF_GEZ:
257 ccode = kCondGe;
258 break;
259 case Instruction::IF_GTZ:
260 ccode = kCondGt;
261 break;
262 case Instruction::IF_LEZ:
263 ccode = kCondLe;
264 break;
265 default:
266 break;
267 }
268 // Make sure result of cmp is used by next insn and nowhere else
269 if ((ccode != kCondNv) &&
270 (mir->ssa_rep->defs[0] == mir_next->ssa_rep->uses[0]) &&
271 (GetSSAUseCount(mir->ssa_rep->defs[0]) == 1)) {
272 mir_next->dalvikInsn.arg[0] = ccode;
Brian Carlstromdf629502013-07-17 22:39:56 -0700273 switch (opcode) {
buzbee311ca162013-02-28 15:56:43 -0800274 case Instruction::CMPL_FLOAT:
275 mir_next->dalvikInsn.opcode =
276 static_cast<Instruction::Code>(kMirOpFusedCmplFloat);
277 break;
278 case Instruction::CMPL_DOUBLE:
279 mir_next->dalvikInsn.opcode =
280 static_cast<Instruction::Code>(kMirOpFusedCmplDouble);
281 break;
282 case Instruction::CMPG_FLOAT:
283 mir_next->dalvikInsn.opcode =
284 static_cast<Instruction::Code>(kMirOpFusedCmpgFloat);
285 break;
286 case Instruction::CMPG_DOUBLE:
287 mir_next->dalvikInsn.opcode =
288 static_cast<Instruction::Code>(kMirOpFusedCmpgDouble);
289 break;
290 case Instruction::CMP_LONG:
291 mir_next->dalvikInsn.opcode =
292 static_cast<Instruction::Code>(kMirOpFusedCmpLong);
293 break;
294 default: LOG(ERROR) << "Unexpected opcode: " << opcode;
295 }
296 mir->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpNop);
297 mir_next->ssa_rep->num_uses = mir->ssa_rep->num_uses;
298 mir_next->ssa_rep->uses = mir->ssa_rep->uses;
299 mir_next->ssa_rep->fp_use = mir->ssa_rep->fp_use;
300 mir_next->ssa_rep->num_defs = 0;
301 mir->ssa_rep->num_uses = 0;
302 mir->ssa_rep->num_defs = 0;
303 }
304 }
305 break;
306 case Instruction::GOTO:
307 case Instruction::GOTO_16:
308 case Instruction::GOTO_32:
309 case Instruction::IF_EQ:
310 case Instruction::IF_NE:
311 case Instruction::IF_LT:
312 case Instruction::IF_GE:
313 case Instruction::IF_GT:
314 case Instruction::IF_LE:
315 case Instruction::IF_EQZ:
316 case Instruction::IF_NEZ:
317 case Instruction::IF_LTZ:
318 case Instruction::IF_GEZ:
319 case Instruction::IF_GTZ:
320 case Instruction::IF_LEZ:
buzbeecbcfaf32013-08-19 07:37:40 -0700321 // If we've got a backwards branch to return, no need to suspend check.
buzbee0d829482013-10-11 15:24:55 -0700322 if ((IsBackedge(bb, bb->taken) && GetBasicBlock(bb->taken)->dominates_return) ||
323 (IsBackedge(bb, bb->fall_through) &&
324 GetBasicBlock(bb->fall_through)->dominates_return)) {
buzbee311ca162013-02-28 15:56:43 -0800325 mir->optimization_flags |= MIR_IGNORE_SUSPEND_CHECK;
326 if (cu_->verbose) {
buzbee0d829482013-10-11 15:24:55 -0700327 LOG(INFO) << "Suppressed suspend check on branch to return at 0x" << std::hex
328 << mir->offset;
buzbee311ca162013-02-28 15:56:43 -0800329 }
330 }
331 break;
332 default:
333 break;
334 }
335 // Is this the select pattern?
336 // TODO: flesh out support for Mips and X86. NOTE: llvm's select op doesn't quite work here.
337 // TUNING: expand to support IF_xx compare & branches
buzbee409fe942013-10-11 10:49:56 -0700338 if (!(cu_->compiler_backend == kPortable) && (cu_->instruction_set == kThumb2) &&
buzbee311ca162013-02-28 15:56:43 -0800339 ((mir->dalvikInsn.opcode == Instruction::IF_EQZ) ||
340 (mir->dalvikInsn.opcode == Instruction::IF_NEZ))) {
buzbee0d829482013-10-11 15:24:55 -0700341 BasicBlock* ft = GetBasicBlock(bb->fall_through);
buzbee311ca162013-02-28 15:56:43 -0800342 DCHECK(ft != NULL);
buzbee0d829482013-10-11 15:24:55 -0700343 BasicBlock* ft_ft = GetBasicBlock(ft->fall_through);
344 BasicBlock* ft_tk = GetBasicBlock(ft->taken);
buzbee311ca162013-02-28 15:56:43 -0800345
buzbee0d829482013-10-11 15:24:55 -0700346 BasicBlock* tk = GetBasicBlock(bb->taken);
buzbee311ca162013-02-28 15:56:43 -0800347 DCHECK(tk != NULL);
buzbee0d829482013-10-11 15:24:55 -0700348 BasicBlock* tk_ft = GetBasicBlock(tk->fall_through);
349 BasicBlock* tk_tk = GetBasicBlock(tk->taken);
buzbee311ca162013-02-28 15:56:43 -0800350
351 /*
352 * In the select pattern, the taken edge goes to a block that unconditionally
353 * transfers to the rejoin block and the fall_though edge goes to a block that
354 * unconditionally falls through to the rejoin block.
355 */
356 if ((tk_ft == NULL) && (ft_tk == NULL) && (tk_tk == ft_ft) &&
357 (Predecessors(tk) == 1) && (Predecessors(ft) == 1)) {
358 /*
359 * Okay - we have the basic diamond shape. At the very least, we can eliminate the
360 * suspend check on the taken-taken branch back to the join point.
361 */
362 if (SelectKind(tk->last_mir_insn) == kSelectGoto) {
363 tk->last_mir_insn->optimization_flags |= (MIR_IGNORE_SUSPEND_CHECK);
364 }
365 // Are the block bodies something we can handle?
366 if ((ft->first_mir_insn == ft->last_mir_insn) &&
367 (tk->first_mir_insn != tk->last_mir_insn) &&
368 (tk->first_mir_insn->next == tk->last_mir_insn) &&
369 ((SelectKind(ft->first_mir_insn) == kSelectMove) ||
370 (SelectKind(ft->first_mir_insn) == kSelectConst)) &&
371 (SelectKind(ft->first_mir_insn) == SelectKind(tk->first_mir_insn)) &&
372 (SelectKind(tk->last_mir_insn) == kSelectGoto)) {
373 // Almost there. Are the instructions targeting the same vreg?
374 MIR* if_true = tk->first_mir_insn;
375 MIR* if_false = ft->first_mir_insn;
376 // It's possible that the target of the select isn't used - skip those (rare) cases.
377 MIR* phi = FindPhi(tk_tk, if_true->ssa_rep->defs[0]);
378 if ((phi != NULL) && (if_true->dalvikInsn.vA == if_false->dalvikInsn.vA)) {
379 /*
380 * We'll convert the IF_EQZ/IF_NEZ to a SELECT. We need to find the
381 * Phi node in the merge block and delete it (while using the SSA name
382 * of the merge as the target of the SELECT. Delete both taken and
383 * fallthrough blocks, and set fallthrough to merge block.
384 * NOTE: not updating other dataflow info (no longer used at this point).
385 * If this changes, need to update i_dom, etc. here (and in CombineBlocks).
386 */
387 if (opcode == Instruction::IF_NEZ) {
388 // Normalize.
389 MIR* tmp_mir = if_true;
390 if_true = if_false;
391 if_false = tmp_mir;
392 }
393 mir->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpSelect);
394 bool const_form = (SelectKind(if_true) == kSelectConst);
395 if ((SelectKind(if_true) == kSelectMove)) {
396 if (IsConst(if_true->ssa_rep->uses[0]) &&
397 IsConst(if_false->ssa_rep->uses[0])) {
398 const_form = true;
399 if_true->dalvikInsn.vB = ConstantValue(if_true->ssa_rep->uses[0]);
400 if_false->dalvikInsn.vB = ConstantValue(if_false->ssa_rep->uses[0]);
401 }
402 }
403 if (const_form) {
404 // "true" set val in vB
405 mir->dalvikInsn.vB = if_true->dalvikInsn.vB;
406 // "false" set val in vC
407 mir->dalvikInsn.vC = if_false->dalvikInsn.vB;
408 } else {
409 DCHECK_EQ(SelectKind(if_true), kSelectMove);
410 DCHECK_EQ(SelectKind(if_false), kSelectMove);
buzbee862a7602013-04-05 10:58:54 -0700411 int* src_ssa =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700412 static_cast<int*>(arena_->Alloc(sizeof(int) * 3, ArenaAllocator::kAllocDFInfo));
buzbee311ca162013-02-28 15:56:43 -0800413 src_ssa[0] = mir->ssa_rep->uses[0];
414 src_ssa[1] = if_true->ssa_rep->uses[0];
415 src_ssa[2] = if_false->ssa_rep->uses[0];
416 mir->ssa_rep->uses = src_ssa;
417 mir->ssa_rep->num_uses = 3;
418 }
419 mir->ssa_rep->num_defs = 1;
buzbee862a7602013-04-05 10:58:54 -0700420 mir->ssa_rep->defs =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700421 static_cast<int*>(arena_->Alloc(sizeof(int) * 1, ArenaAllocator::kAllocDFInfo));
buzbee862a7602013-04-05 10:58:54 -0700422 mir->ssa_rep->fp_def =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700423 static_cast<bool*>(arena_->Alloc(sizeof(bool) * 1, ArenaAllocator::kAllocDFInfo));
buzbee311ca162013-02-28 15:56:43 -0800424 mir->ssa_rep->fp_def[0] = if_true->ssa_rep->fp_def[0];
buzbee817e45a2013-05-30 18:59:12 -0700425 // Match type of uses to def.
426 mir->ssa_rep->fp_use =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700427 static_cast<bool*>(arena_->Alloc(sizeof(bool) * mir->ssa_rep->num_uses,
428 ArenaAllocator::kAllocDFInfo));
buzbee817e45a2013-05-30 18:59:12 -0700429 for (int i = 0; i < mir->ssa_rep->num_uses; i++) {
430 mir->ssa_rep->fp_use[i] = mir->ssa_rep->fp_def[0];
431 }
buzbee311ca162013-02-28 15:56:43 -0800432 /*
433 * There is usually a Phi node in the join block for our two cases. If the
434 * Phi node only contains our two cases as input, we will use the result
435 * SSA name of the Phi node as our select result and delete the Phi. If
436 * the Phi node has more than two operands, we will arbitrarily use the SSA
437 * name of the "true" path, delete the SSA name of the "false" path from the
438 * Phi node (and fix up the incoming arc list).
439 */
440 if (phi->ssa_rep->num_uses == 2) {
441 mir->ssa_rep->defs[0] = phi->ssa_rep->defs[0];
442 phi->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpNop);
443 } else {
444 int dead_def = if_false->ssa_rep->defs[0];
445 int live_def = if_true->ssa_rep->defs[0];
446 mir->ssa_rep->defs[0] = live_def;
buzbee0d829482013-10-11 15:24:55 -0700447 BasicBlockId* incoming = phi->meta.phi_incoming;
buzbee311ca162013-02-28 15:56:43 -0800448 for (int i = 0; i < phi->ssa_rep->num_uses; i++) {
449 if (phi->ssa_rep->uses[i] == live_def) {
450 incoming[i] = bb->id;
451 }
452 }
453 for (int i = 0; i < phi->ssa_rep->num_uses; i++) {
454 if (phi->ssa_rep->uses[i] == dead_def) {
455 int last_slot = phi->ssa_rep->num_uses - 1;
456 phi->ssa_rep->uses[i] = phi->ssa_rep->uses[last_slot];
457 incoming[i] = incoming[last_slot];
458 }
459 }
460 }
461 phi->ssa_rep->num_uses--;
buzbee0d829482013-10-11 15:24:55 -0700462 bb->taken = NullBasicBlockId;
buzbee311ca162013-02-28 15:56:43 -0800463 tk->block_type = kDead;
464 for (MIR* tmir = ft->first_mir_insn; tmir != NULL; tmir = tmir->next) {
465 tmir->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpNop);
466 }
467 }
468 }
469 }
470 }
471 }
buzbee1da1e2f2013-11-15 13:37:01 -0800472 bb = ((cu_->disable_opt & (1 << kSuppressExceptionEdges)) != 0) ? NextDominatedBlock(bb) : NULL;
buzbee311ca162013-02-28 15:56:43 -0800473 }
474
475 if (num_temps > cu_->num_compiler_temps) {
476 cu_->num_compiler_temps = num_temps;
477 }
478 return true;
479}
480
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700481void MIRGraph::NullCheckEliminationInit(struct BasicBlock* bb) {
buzbee862a7602013-04-05 10:58:54 -0700482 if (bb->data_flow_info != NULL) {
483 bb->data_flow_info->ending_null_check_v =
484 new (arena_) ArenaBitVector(arena_, GetNumSSARegs(), false, kBitMapNullCheck);
485 }
buzbee311ca162013-02-28 15:56:43 -0800486}
487
488/* Collect stats on number of checks removed */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700489void MIRGraph::CountChecks(struct BasicBlock* bb) {
buzbee862a7602013-04-05 10:58:54 -0700490 if (bb->data_flow_info != NULL) {
491 for (MIR* mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
492 if (mir->ssa_rep == NULL) {
493 continue;
buzbee311ca162013-02-28 15:56:43 -0800494 }
buzbee1da1e2f2013-11-15 13:37:01 -0800495 uint64_t df_attributes = oat_data_flow_attributes_[mir->dalvikInsn.opcode];
buzbee862a7602013-04-05 10:58:54 -0700496 if (df_attributes & DF_HAS_NULL_CHKS) {
497 checkstats_->null_checks++;
498 if (mir->optimization_flags & MIR_IGNORE_NULL_CHECK) {
499 checkstats_->null_checks_eliminated++;
500 }
501 }
502 if (df_attributes & DF_HAS_RANGE_CHKS) {
503 checkstats_->range_checks++;
504 if (mir->optimization_flags & MIR_IGNORE_RANGE_CHECK) {
505 checkstats_->range_checks_eliminated++;
506 }
buzbee311ca162013-02-28 15:56:43 -0800507 }
508 }
509 }
buzbee311ca162013-02-28 15:56:43 -0800510}
511
512/* Try to make common case the fallthrough path */
buzbee0d829482013-10-11 15:24:55 -0700513bool MIRGraph::LayoutBlocks(BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800514 // TODO: For now, just looking for direct throws. Consider generalizing for profile feedback
515 if (!bb->explicit_throw) {
516 return false;
517 }
518 BasicBlock* walker = bb;
519 while (true) {
520 // Check termination conditions
521 if ((walker->block_type == kEntryBlock) || (Predecessors(walker) != 1)) {
522 break;
523 }
buzbee0d829482013-10-11 15:24:55 -0700524 BasicBlock* prev = GetBasicBlock(walker->predecessors->Get(0));
buzbee311ca162013-02-28 15:56:43 -0800525 if (prev->conditional_branch) {
buzbee0d829482013-10-11 15:24:55 -0700526 if (GetBasicBlock(prev->fall_through) == walker) {
buzbee311ca162013-02-28 15:56:43 -0800527 // Already done - return
528 break;
529 }
buzbee0d829482013-10-11 15:24:55 -0700530 DCHECK_EQ(walker, GetBasicBlock(prev->taken));
buzbee311ca162013-02-28 15:56:43 -0800531 // Got one. Flip it and exit
532 Instruction::Code opcode = prev->last_mir_insn->dalvikInsn.opcode;
533 switch (opcode) {
534 case Instruction::IF_EQ: opcode = Instruction::IF_NE; break;
535 case Instruction::IF_NE: opcode = Instruction::IF_EQ; break;
536 case Instruction::IF_LT: opcode = Instruction::IF_GE; break;
537 case Instruction::IF_GE: opcode = Instruction::IF_LT; break;
538 case Instruction::IF_GT: opcode = Instruction::IF_LE; break;
539 case Instruction::IF_LE: opcode = Instruction::IF_GT; break;
540 case Instruction::IF_EQZ: opcode = Instruction::IF_NEZ; break;
541 case Instruction::IF_NEZ: opcode = Instruction::IF_EQZ; break;
542 case Instruction::IF_LTZ: opcode = Instruction::IF_GEZ; break;
543 case Instruction::IF_GEZ: opcode = Instruction::IF_LTZ; break;
544 case Instruction::IF_GTZ: opcode = Instruction::IF_LEZ; break;
545 case Instruction::IF_LEZ: opcode = Instruction::IF_GTZ; break;
546 default: LOG(FATAL) << "Unexpected opcode " << opcode;
547 }
548 prev->last_mir_insn->dalvikInsn.opcode = opcode;
buzbee0d829482013-10-11 15:24:55 -0700549 BasicBlockId t_bb = prev->taken;
buzbee311ca162013-02-28 15:56:43 -0800550 prev->taken = prev->fall_through;
551 prev->fall_through = t_bb;
552 break;
553 }
554 walker = prev;
555 }
556 return false;
557}
558
559/* Combine any basic blocks terminated by instructions that we now know can't throw */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700560bool MIRGraph::CombineBlocks(struct BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800561 // Loop here to allow combining a sequence of blocks
562 while (true) {
563 // Check termination conditions
564 if ((bb->first_mir_insn == NULL)
565 || (bb->data_flow_info == NULL)
566 || (bb->block_type == kExceptionHandling)
567 || (bb->block_type == kExitBlock)
568 || (bb->block_type == kDead)
buzbee0d829482013-10-11 15:24:55 -0700569 || (bb->taken == NullBasicBlockId)
570 || (GetBasicBlock(bb->taken)->block_type != kExceptionHandling)
571 || (bb->successor_block_list_type != kNotUsed)
buzbee311ca162013-02-28 15:56:43 -0800572 || (static_cast<int>(bb->last_mir_insn->dalvikInsn.opcode) != kMirOpCheck)) {
573 break;
574 }
575
576 // Test the kMirOpCheck instruction
577 MIR* mir = bb->last_mir_insn;
578 // Grab the attributes from the paired opcode
579 MIR* throw_insn = mir->meta.throw_insn;
buzbee1da1e2f2013-11-15 13:37:01 -0800580 uint64_t df_attributes = oat_data_flow_attributes_[throw_insn->dalvikInsn.opcode];
buzbee311ca162013-02-28 15:56:43 -0800581 bool can_combine = true;
582 if (df_attributes & DF_HAS_NULL_CHKS) {
583 can_combine &= ((throw_insn->optimization_flags & MIR_IGNORE_NULL_CHECK) != 0);
584 }
585 if (df_attributes & DF_HAS_RANGE_CHKS) {
586 can_combine &= ((throw_insn->optimization_flags & MIR_IGNORE_RANGE_CHECK) != 0);
587 }
588 if (!can_combine) {
589 break;
590 }
591 // OK - got one. Combine
buzbee0d829482013-10-11 15:24:55 -0700592 BasicBlock* bb_next = GetBasicBlock(bb->fall_through);
buzbee311ca162013-02-28 15:56:43 -0800593 DCHECK(!bb_next->catch_entry);
594 DCHECK_EQ(Predecessors(bb_next), 1U);
buzbee311ca162013-02-28 15:56:43 -0800595 // Overwrite the kOpCheck insn with the paired opcode
596 DCHECK_EQ(bb_next->first_mir_insn, throw_insn);
597 *bb->last_mir_insn = *throw_insn;
buzbee311ca162013-02-28 15:56:43 -0800598 // Use the successor info from the next block
buzbee0d829482013-10-11 15:24:55 -0700599 bb->successor_block_list_type = bb_next->successor_block_list_type;
600 bb->successor_blocks = bb_next->successor_blocks;
buzbee311ca162013-02-28 15:56:43 -0800601 // Use the ending block linkage from the next block
602 bb->fall_through = bb_next->fall_through;
buzbee0d829482013-10-11 15:24:55 -0700603 GetBasicBlock(bb->taken)->block_type = kDead; // Kill the unused exception block
buzbee311ca162013-02-28 15:56:43 -0800604 bb->taken = bb_next->taken;
605 // Include the rest of the instructions
606 bb->last_mir_insn = bb_next->last_mir_insn;
607 /*
608 * If lower-half of pair of blocks to combine contained a return, move the flag
609 * to the newly combined block.
610 */
611 bb->terminated_by_return = bb_next->terminated_by_return;
612
613 /*
614 * NOTE: we aren't updating all dataflow info here. Should either make sure this pass
615 * happens after uses of i_dominated, dom_frontier or update the dataflow info here.
616 */
617
618 // Kill bb_next and remap now-dead id to parent
619 bb_next->block_type = kDead;
buzbee1fd33462013-03-25 13:40:45 -0700620 block_id_map_.Overwrite(bb_next->id, bb->id);
buzbee311ca162013-02-28 15:56:43 -0800621
622 // Now, loop back and see if we can keep going
623 }
624 return false;
625}
626
buzbee1da1e2f2013-11-15 13:37:01 -0800627/*
628 * Eliminate unnecessary null checks for a basic block. Also, while we're doing
629 * an iterative walk go ahead and perform type and size inference.
630 */
631bool MIRGraph::EliminateNullChecksAndInferTypes(struct BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800632 if (bb->data_flow_info == NULL) return false;
buzbee1da1e2f2013-11-15 13:37:01 -0800633 bool infer_changed = false;
634 bool do_nce = ((cu_->disable_opt & (1 << kNullCheckElimination)) == 0);
buzbee311ca162013-02-28 15:56:43 -0800635
buzbee1da1e2f2013-11-15 13:37:01 -0800636 if (do_nce) {
637 /*
638 * Set initial state. Be conservative with catch
639 * blocks and start with no assumptions about null check
640 * status (except for "this").
641 */
642 if ((bb->block_type == kEntryBlock) | bb->catch_entry) {
643 temp_ssa_register_v_->ClearAllBits();
644 // Assume all ins are objects.
645 for (uint16_t in_reg = cu_->num_dalvik_registers - cu_->num_ins;
646 in_reg < cu_->num_dalvik_registers; in_reg++) {
647 temp_ssa_register_v_->SetBit(in_reg);
648 }
649 if ((cu_->access_flags & kAccStatic) == 0) {
650 // If non-static method, mark "this" as non-null
651 int this_reg = cu_->num_dalvik_registers - cu_->num_ins;
652 temp_ssa_register_v_->ClearBit(this_reg);
653 }
654 } else if (bb->predecessors->Size() == 1) {
655 BasicBlock* pred_bb = GetBasicBlock(bb->predecessors->Get(0));
656 temp_ssa_register_v_->Copy(pred_bb->data_flow_info->ending_null_check_v);
657 if (pred_bb->block_type == kDalvikByteCode) {
658 // Check to see if predecessor had an explicit null-check.
659 MIR* last_insn = pred_bb->last_mir_insn;
660 Instruction::Code last_opcode = last_insn->dalvikInsn.opcode;
661 if (last_opcode == Instruction::IF_EQZ) {
662 if (pred_bb->fall_through == bb->id) {
663 // The fall-through of a block following a IF_EQZ, set the vA of the IF_EQZ to show that
664 // it can't be null.
665 temp_ssa_register_v_->ClearBit(last_insn->ssa_rep->uses[0]);
666 }
667 } else if (last_opcode == Instruction::IF_NEZ) {
668 if (pred_bb->taken == bb->id) {
669 // The taken block following a IF_NEZ, set the vA of the IF_NEZ to show that it can't be
670 // null.
671 temp_ssa_register_v_->ClearBit(last_insn->ssa_rep->uses[0]);
672 }
Ian Rogers22fd6a02013-06-13 15:06:54 -0700673 }
674 }
buzbee1da1e2f2013-11-15 13:37:01 -0800675 } else {
676 // Starting state is union of all incoming arcs
677 GrowableArray<BasicBlockId>::Iterator iter(bb->predecessors);
678 BasicBlock* pred_bb = GetBasicBlock(iter.Next());
679 DCHECK(pred_bb != NULL);
680 temp_ssa_register_v_->Copy(pred_bb->data_flow_info->ending_null_check_v);
681 while (true) {
682 pred_bb = GetBasicBlock(iter.Next());
683 if (!pred_bb) break;
684 if ((pred_bb->data_flow_info == NULL) ||
685 (pred_bb->data_flow_info->ending_null_check_v == NULL)) {
686 continue;
687 }
688 temp_ssa_register_v_->Union(pred_bb->data_flow_info->ending_null_check_v);
buzbee311ca162013-02-28 15:56:43 -0800689 }
buzbee311ca162013-02-28 15:56:43 -0800690 }
buzbee1da1e2f2013-11-15 13:37:01 -0800691 // At this point, temp_ssa_register_v_ shows which sregs have an object definition with
692 // no intervening uses.
buzbee311ca162013-02-28 15:56:43 -0800693 }
694
695 // Walk through the instruction in the block, updating as necessary
696 for (MIR* mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
697 if (mir->ssa_rep == NULL) {
698 continue;
699 }
buzbee1da1e2f2013-11-15 13:37:01 -0800700
701 // Propagate type info.
702 infer_changed = InferTypeAndSize(bb, mir, infer_changed);
703 if (!do_nce) {
704 continue;
705 }
706
707 uint64_t df_attributes = oat_data_flow_attributes_[mir->dalvikInsn.opcode];
buzbee311ca162013-02-28 15:56:43 -0800708
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000709 // Might need a null check?
710 if (df_attributes & DF_HAS_NULL_CHKS) {
711 int src_idx;
712 if (df_attributes & DF_NULL_CHK_1) {
713 src_idx = 1;
714 } else if (df_attributes & DF_NULL_CHK_2) {
715 src_idx = 2;
716 } else {
717 src_idx = 0;
718 }
719 int src_sreg = mir->ssa_rep->uses[src_idx];
720 if (!temp_ssa_register_v_->IsBitSet(src_sreg)) {
721 // Eliminate the null check.
722 mir->optimization_flags |= MIR_IGNORE_NULL_CHECK;
723 } else {
724 // Do the null check.
725 mir->optimization_flags &= ~MIR_IGNORE_NULL_CHECK;
726 // Mark s_reg as null-checked
727 temp_ssa_register_v_->ClearBit(src_sreg);
728 }
729 }
730
731 if ((df_attributes & DF_A_WIDE) ||
732 (df_attributes & (DF_REF_A | DF_SETS_CONST | DF_NULL_TRANSFER)) == 0) {
733 continue;
734 }
735
736 /*
737 * First, mark all object definitions as requiring null check.
738 * Note: we can't tell if a CONST definition might be used as an object, so treat
739 * them all as object definitions.
740 */
741 if (((df_attributes & (DF_DA | DF_REF_A)) == (DF_DA | DF_REF_A)) ||
742 (df_attributes & DF_SETS_CONST)) {
Ian Rogers31aa97c2013-10-25 23:07:29 +0000743 temp_ssa_register_v_->SetBit(mir->ssa_rep->defs[0]);
buzbee4db179d2013-10-23 12:16:39 -0700744 }
745
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000746 // Now, remove mark from all object definitions we know are non-null.
747 if (df_attributes & DF_NON_NULL_DST) {
748 // Mark target of NEW* as non-null
749 temp_ssa_register_v_->ClearBit(mir->ssa_rep->defs[0]);
750 }
751
buzbee311ca162013-02-28 15:56:43 -0800752 // Mark non-null returns from invoke-style NEW*
753 if (df_attributes & DF_NON_NULL_RET) {
754 MIR* next_mir = mir->next;
755 // Next should be an MOVE_RESULT_OBJECT
756 if (next_mir &&
757 next_mir->dalvikInsn.opcode == Instruction::MOVE_RESULT_OBJECT) {
758 // Mark as null checked
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000759 temp_ssa_register_v_->ClearBit(next_mir->ssa_rep->defs[0]);
buzbee311ca162013-02-28 15:56:43 -0800760 } else {
761 if (next_mir) {
762 LOG(WARNING) << "Unexpected opcode following new: " << next_mir->dalvikInsn.opcode;
buzbee0d829482013-10-11 15:24:55 -0700763 } else if (bb->fall_through != NullBasicBlockId) {
buzbee311ca162013-02-28 15:56:43 -0800764 // Look in next basic block
buzbee0d829482013-10-11 15:24:55 -0700765 struct BasicBlock* next_bb = GetBasicBlock(bb->fall_through);
buzbee311ca162013-02-28 15:56:43 -0800766 for (MIR* tmir = next_bb->first_mir_insn; tmir != NULL;
767 tmir =tmir->next) {
768 if (static_cast<int>(tmir->dalvikInsn.opcode) >= static_cast<int>(kMirOpFirst)) {
769 continue;
770 }
771 // First non-pseudo should be MOVE_RESULT_OBJECT
772 if (tmir->dalvikInsn.opcode == Instruction::MOVE_RESULT_OBJECT) {
773 // Mark as null checked
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000774 temp_ssa_register_v_->ClearBit(tmir->ssa_rep->defs[0]);
buzbee311ca162013-02-28 15:56:43 -0800775 } else {
776 LOG(WARNING) << "Unexpected op after new: " << tmir->dalvikInsn.opcode;
777 }
778 break;
779 }
780 }
781 }
782 }
783
784 /*
785 * Propagate nullcheck state on register copies (including
786 * Phi pseudo copies. For the latter, nullcheck state is
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000787 * the "or" of all the Phi's operands.
buzbee311ca162013-02-28 15:56:43 -0800788 */
789 if (df_attributes & (DF_NULL_TRANSFER_0 | DF_NULL_TRANSFER_N)) {
790 int tgt_sreg = mir->ssa_rep->defs[0];
791 int operands = (df_attributes & DF_NULL_TRANSFER_0) ? 1 :
792 mir->ssa_rep->num_uses;
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000793 bool needs_null_check = false;
buzbee311ca162013-02-28 15:56:43 -0800794 for (int i = 0; i < operands; i++) {
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000795 needs_null_check |= temp_ssa_register_v_->IsBitSet(mir->ssa_rep->uses[i]);
buzbee311ca162013-02-28 15:56:43 -0800796 }
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000797 if (needs_null_check) {
buzbee862a7602013-04-05 10:58:54 -0700798 temp_ssa_register_v_->SetBit(tgt_sreg);
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000799 } else {
800 temp_ssa_register_v_->ClearBit(tgt_sreg);
buzbee311ca162013-02-28 15:56:43 -0800801 }
802 }
buzbee311ca162013-02-28 15:56:43 -0800803 }
804
805 // Did anything change?
buzbee1da1e2f2013-11-15 13:37:01 -0800806 bool nce_changed = do_nce && !temp_ssa_register_v_->Equal(bb->data_flow_info->ending_null_check_v);
807 if (nce_changed) {
buzbee862a7602013-04-05 10:58:54 -0700808 bb->data_flow_info->ending_null_check_v->Copy(temp_ssa_register_v_);
buzbee311ca162013-02-28 15:56:43 -0800809 }
buzbee1da1e2f2013-11-15 13:37:01 -0800810 return infer_changed | nce_changed;
buzbee311ca162013-02-28 15:56:43 -0800811}
812
buzbee1da1e2f2013-11-15 13:37:01 -0800813void MIRGraph::NullCheckEliminationAndTypeInference() {
814 DCHECK(temp_ssa_register_v_ != NULL);
815 if ((cu_->disable_opt & (1 << kNullCheckElimination)) == 0) {
buzbee56c71782013-09-05 17:13:19 -0700816 AllNodesIterator iter(this);
buzbee311ca162013-02-28 15:56:43 -0800817 for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) {
818 NullCheckEliminationInit(bb);
819 }
buzbee1da1e2f2013-11-15 13:37:01 -0800820 }
821 RepeatingPreOrderDfsIterator iter2(this);
822 bool change = false;
823 for (BasicBlock* bb = iter2.Next(change); bb != NULL; bb = iter2.Next(change)) {
824 change = EliminateNullChecksAndInferTypes(bb);
buzbee311ca162013-02-28 15:56:43 -0800825 }
826 if (cu_->enable_debug & (1 << kDebugDumpCFG)) {
827 DumpCFG("/sdcard/4_post_nce_cfg/", false);
828 }
829}
830
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700831void MIRGraph::BasicBlockCombine() {
buzbee1da1e2f2013-11-15 13:37:01 -0800832 if ((cu_->disable_opt & (1 << kSuppressExceptionEdges)) != 0) {
833 PreOrderDfsIterator iter(this);
834 for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) {
835 CombineBlocks(bb);
836 }
837 if (cu_->enable_debug & (1 << kDebugDumpCFG)) {
838 DumpCFG("/sdcard/5_post_bbcombine_cfg/", false);
839 }
buzbee311ca162013-02-28 15:56:43 -0800840 }
841}
842
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700843void MIRGraph::CodeLayout() {
buzbee1fd33462013-03-25 13:40:45 -0700844 if (cu_->enable_debug & (1 << kDebugVerifyDataflow)) {
845 VerifyDataflow();
846 }
buzbee56c71782013-09-05 17:13:19 -0700847 AllNodesIterator iter(this);
buzbee311ca162013-02-28 15:56:43 -0800848 for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) {
849 LayoutBlocks(bb);
850 }
851 if (cu_->enable_debug & (1 << kDebugDumpCFG)) {
852 DumpCFG("/sdcard/2_post_layout_cfg/", true);
853 }
854}
855
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700856void MIRGraph::DumpCheckStats() {
buzbee311ca162013-02-28 15:56:43 -0800857 Checkstats* stats =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700858 static_cast<Checkstats*>(arena_->Alloc(sizeof(Checkstats), ArenaAllocator::kAllocDFInfo));
buzbee1fd33462013-03-25 13:40:45 -0700859 checkstats_ = stats;
buzbee56c71782013-09-05 17:13:19 -0700860 AllNodesIterator iter(this);
buzbee311ca162013-02-28 15:56:43 -0800861 for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) {
862 CountChecks(bb);
863 }
864 if (stats->null_checks > 0) {
865 float eliminated = static_cast<float>(stats->null_checks_eliminated);
866 float checks = static_cast<float>(stats->null_checks);
867 LOG(INFO) << "Null Checks: " << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " "
868 << stats->null_checks_eliminated << " of " << stats->null_checks << " -> "
869 << (eliminated/checks) * 100.0 << "%";
870 }
871 if (stats->range_checks > 0) {
872 float eliminated = static_cast<float>(stats->range_checks_eliminated);
873 float checks = static_cast<float>(stats->range_checks);
874 LOG(INFO) << "Range Checks: " << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " "
875 << stats->range_checks_eliminated << " of " << stats->range_checks << " -> "
876 << (eliminated/checks) * 100.0 << "%";
877 }
878}
879
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700880bool MIRGraph::BuildExtendedBBList(struct BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800881 if (bb->visited) return false;
882 if (!((bb->block_type == kEntryBlock) || (bb->block_type == kDalvikByteCode)
883 || (bb->block_type == kExitBlock))) {
884 // Ignore special blocks
885 bb->visited = true;
886 return false;
887 }
888 // Must be head of extended basic block.
889 BasicBlock* start_bb = bb;
buzbee0d829482013-10-11 15:24:55 -0700890 extended_basic_blocks_.push_back(bb->id);
buzbee311ca162013-02-28 15:56:43 -0800891 bool terminated_by_return = false;
buzbee1da1e2f2013-11-15 13:37:01 -0800892 bool do_local_value_numbering = false;
buzbee311ca162013-02-28 15:56:43 -0800893 // Visit blocks strictly dominated by this head.
894 while (bb != NULL) {
895 bb->visited = true;
896 terminated_by_return |= bb->terminated_by_return;
buzbee1da1e2f2013-11-15 13:37:01 -0800897 do_local_value_numbering |= bb->use_lvn;
buzbee311ca162013-02-28 15:56:43 -0800898 bb = NextDominatedBlock(bb);
899 }
buzbee1da1e2f2013-11-15 13:37:01 -0800900 if (terminated_by_return || do_local_value_numbering) {
901 // Do lvn for all blocks in this extended set.
buzbee311ca162013-02-28 15:56:43 -0800902 bb = start_bb;
903 while (bb != NULL) {
buzbee1da1e2f2013-11-15 13:37:01 -0800904 bb->use_lvn = do_local_value_numbering;
905 bb->dominates_return = terminated_by_return;
buzbee311ca162013-02-28 15:56:43 -0800906 bb = NextDominatedBlock(bb);
907 }
908 }
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700909 return false; // Not iterative - return value will be ignored
buzbee311ca162013-02-28 15:56:43 -0800910}
911
912
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700913void MIRGraph::BasicBlockOptimization() {
buzbee311ca162013-02-28 15:56:43 -0800914 if (!(cu_->disable_opt & (1 << kBBOpt))) {
buzbee311ca162013-02-28 15:56:43 -0800915 DCHECK_EQ(cu_->num_compiler_temps, 0);
buzbee1da1e2f2013-11-15 13:37:01 -0800916 if ((cu_->disable_opt & (1 << kSuppressExceptionEdges)) != 0) {
917 ClearAllVisitedFlags();
918 PreOrderDfsIterator iter2(this);
919 for (BasicBlock* bb = iter2.Next(); bb != NULL; bb = iter2.Next()) {
920 BuildExtendedBBList(bb);
921 }
922 // Perform extended basic block optimizations.
923 for (unsigned int i = 0; i < extended_basic_blocks_.size(); i++) {
924 BasicBlockOpt(GetBasicBlock(extended_basic_blocks_[i]));
925 }
buzbee311ca162013-02-28 15:56:43 -0800926 }
buzbee1da1e2f2013-11-15 13:37:01 -0800927 } else {
928 PreOrderDfsIterator iter(this);
929 for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) {
930 BasicBlockOpt(bb);
buzbee311ca162013-02-28 15:56:43 -0800931 }
932 }
933 if (cu_->enable_debug & (1 << kDebugDumpCFG)) {
934 DumpCFG("/sdcard/6_post_bbo_cfg/", false);
935 }
936}
937
938} // namespace art