blob: 9f694de6a5244e3c27a815213e42b239ab6d8f6b [file] [log] [blame]
buzbee311ca162013-02-28 15:56:43 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "compiler_internals.h"
18#include "local_value_numbering.h"
Ian Rogers8d3a1172013-06-04 01:13:28 -070019#include "dataflow_iterator-inl.h"
buzbee311ca162013-02-28 15:56:43 -080020
21namespace art {
22
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070023static unsigned int Predecessors(BasicBlock* bb) {
buzbee862a7602013-04-05 10:58:54 -070024 return bb->predecessors->Size();
buzbee311ca162013-02-28 15:56:43 -080025}
26
27/* Setup a constant value for opcodes thare have the DF_SETS_CONST attribute */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070028void MIRGraph::SetConstant(int32_t ssa_reg, int value) {
buzbee862a7602013-04-05 10:58:54 -070029 is_constant_v_->SetBit(ssa_reg);
buzbee311ca162013-02-28 15:56:43 -080030 constant_values_[ssa_reg] = value;
31}
32
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070033void MIRGraph::SetConstantWide(int ssa_reg, int64_t value) {
buzbee862a7602013-04-05 10:58:54 -070034 is_constant_v_->SetBit(ssa_reg);
buzbee311ca162013-02-28 15:56:43 -080035 constant_values_[ssa_reg] = Low32Bits(value);
36 constant_values_[ssa_reg + 1] = High32Bits(value);
37}
38
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070039void MIRGraph::DoConstantPropogation(BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -080040 MIR* mir;
buzbee311ca162013-02-28 15:56:43 -080041
42 for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
buzbee1fd33462013-03-25 13:40:45 -070043 int df_attributes = oat_data_flow_attributes_[mir->dalvikInsn.opcode];
buzbee311ca162013-02-28 15:56:43 -080044
45 DecodedInstruction *d_insn = &mir->dalvikInsn;
46
47 if (!(df_attributes & DF_HAS_DEFS)) continue;
48
49 /* Handle instructions that set up constants directly */
50 if (df_attributes & DF_SETS_CONST) {
51 if (df_attributes & DF_DA) {
52 int32_t vB = static_cast<int32_t>(d_insn->vB);
53 switch (d_insn->opcode) {
54 case Instruction::CONST_4:
55 case Instruction::CONST_16:
56 case Instruction::CONST:
57 SetConstant(mir->ssa_rep->defs[0], vB);
58 break;
59 case Instruction::CONST_HIGH16:
60 SetConstant(mir->ssa_rep->defs[0], vB << 16);
61 break;
62 case Instruction::CONST_WIDE_16:
63 case Instruction::CONST_WIDE_32:
64 SetConstantWide(mir->ssa_rep->defs[0], static_cast<int64_t>(vB));
65 break;
66 case Instruction::CONST_WIDE:
Brian Carlstromb1eba212013-07-17 18:07:19 -070067 SetConstantWide(mir->ssa_rep->defs[0], d_insn->vB_wide);
buzbee311ca162013-02-28 15:56:43 -080068 break;
69 case Instruction::CONST_WIDE_HIGH16:
70 SetConstantWide(mir->ssa_rep->defs[0], static_cast<int64_t>(vB) << 48);
71 break;
72 default:
73 break;
74 }
75 }
76 /* Handle instructions that set up constants directly */
77 } else if (df_attributes & DF_IS_MOVE) {
78 int i;
79
80 for (i = 0; i < mir->ssa_rep->num_uses; i++) {
buzbee862a7602013-04-05 10:58:54 -070081 if (!is_constant_v_->IsBitSet(mir->ssa_rep->uses[i])) break;
buzbee311ca162013-02-28 15:56:43 -080082 }
83 /* Move a register holding a constant to another register */
84 if (i == mir->ssa_rep->num_uses) {
85 SetConstant(mir->ssa_rep->defs[0], constant_values_[mir->ssa_rep->uses[0]]);
86 if (df_attributes & DF_A_WIDE) {
87 SetConstant(mir->ssa_rep->defs[1], constant_values_[mir->ssa_rep->uses[1]]);
88 }
89 }
90 }
91 }
92 /* TODO: implement code to handle arithmetic operations */
buzbee311ca162013-02-28 15:56:43 -080093}
94
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070095void MIRGraph::PropagateConstants() {
buzbee862a7602013-04-05 10:58:54 -070096 is_constant_v_ = new (arena_) ArenaBitVector(arena_, GetNumSSARegs(), false);
97 constant_values_ = static_cast<int*>(arena_->NewMem(sizeof(int) * GetNumSSARegs(), true,
98 ArenaAllocator::kAllocDFInfo));
buzbee0665fe02013-03-21 12:32:21 -070099 AllNodesIterator iter(this, false /* not iterative */);
buzbee311ca162013-02-28 15:56:43 -0800100 for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) {
101 DoConstantPropogation(bb);
102 }
buzbee311ca162013-02-28 15:56:43 -0800103}
104
105/* Advance to next strictly dominated MIR node in an extended basic block */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700106static MIR* AdvanceMIR(BasicBlock** p_bb, MIR* mir) {
buzbee311ca162013-02-28 15:56:43 -0800107 BasicBlock* bb = *p_bb;
108 if (mir != NULL) {
109 mir = mir->next;
110 if (mir == NULL) {
111 bb = bb->fall_through;
112 if ((bb == NULL) || Predecessors(bb) != 1) {
113 mir = NULL;
114 } else {
115 *p_bb = bb;
116 mir = bb->first_mir_insn;
117 }
118 }
119 }
120 return mir;
121}
122
123/*
124 * To be used at an invoke mir. If the logically next mir node represents
125 * a move-result, return it. Else, return NULL. If a move-result exists,
126 * it is required to immediately follow the invoke with no intervening
127 * opcodes or incoming arcs. However, if the result of the invoke is not
128 * used, a move-result may not be present.
129 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700130MIR* MIRGraph::FindMoveResult(BasicBlock* bb, MIR* mir) {
buzbee311ca162013-02-28 15:56:43 -0800131 BasicBlock* tbb = bb;
132 mir = AdvanceMIR(&tbb, mir);
133 while (mir != NULL) {
134 int opcode = mir->dalvikInsn.opcode;
135 if ((mir->dalvikInsn.opcode == Instruction::MOVE_RESULT) ||
136 (mir->dalvikInsn.opcode == Instruction::MOVE_RESULT_OBJECT) ||
137 (mir->dalvikInsn.opcode == Instruction::MOVE_RESULT_WIDE)) {
138 break;
139 }
140 // Keep going if pseudo op, otherwise terminate
141 if (opcode < kNumPackedOpcodes) {
142 mir = NULL;
143 } else {
144 mir = AdvanceMIR(&tbb, mir);
145 }
146 }
147 return mir;
148}
149
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700150static BasicBlock* NextDominatedBlock(BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800151 if (bb->block_type == kDead) {
152 return NULL;
153 }
154 DCHECK((bb->block_type == kEntryBlock) || (bb->block_type == kDalvikByteCode)
155 || (bb->block_type == kExitBlock));
buzbeecbcfaf32013-08-19 07:37:40 -0700156 if (((bb->taken != NULL) && (bb->fall_through == NULL)) &&
157 ((bb->taken->block_type == kDalvikByteCode) || (bb->taken->block_type == kExitBlock))) {
158 // Follow simple unconditional branches.
159 bb = bb->taken;
160 } else {
161 // Follow simple fallthrough
162 bb = (bb->taken != NULL) ? NULL : bb->fall_through;
163 }
buzbee311ca162013-02-28 15:56:43 -0800164 if (bb == NULL || (Predecessors(bb) != 1)) {
165 return NULL;
166 }
167 DCHECK((bb->block_type == kDalvikByteCode) || (bb->block_type == kExitBlock));
168 return bb;
169}
170
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700171static MIR* FindPhi(BasicBlock* bb, int ssa_name) {
buzbee311ca162013-02-28 15:56:43 -0800172 for (MIR* mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
173 if (static_cast<int>(mir->dalvikInsn.opcode) == kMirOpPhi) {
174 for (int i = 0; i < mir->ssa_rep->num_uses; i++) {
175 if (mir->ssa_rep->uses[i] == ssa_name) {
176 return mir;
177 }
178 }
179 }
180 }
181 return NULL;
182}
183
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700184static SelectInstructionKind SelectKind(MIR* mir) {
buzbee311ca162013-02-28 15:56:43 -0800185 switch (mir->dalvikInsn.opcode) {
186 case Instruction::MOVE:
187 case Instruction::MOVE_OBJECT:
188 case Instruction::MOVE_16:
189 case Instruction::MOVE_OBJECT_16:
190 case Instruction::MOVE_FROM16:
191 case Instruction::MOVE_OBJECT_FROM16:
192 return kSelectMove;
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700193 case Instruction::CONST:
194 case Instruction::CONST_4:
195 case Instruction::CONST_16:
buzbee311ca162013-02-28 15:56:43 -0800196 return kSelectConst;
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700197 case Instruction::GOTO:
198 case Instruction::GOTO_16:
199 case Instruction::GOTO_32:
buzbee311ca162013-02-28 15:56:43 -0800200 return kSelectGoto;
Brian Carlstrom02c8cc62013-07-18 15:54:44 -0700201 default:
202 return kSelectNone;
buzbee311ca162013-02-28 15:56:43 -0800203 }
buzbee311ca162013-02-28 15:56:43 -0800204}
205
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700206int MIRGraph::GetSSAUseCount(int s_reg) {
buzbee862a7602013-04-05 10:58:54 -0700207 return raw_use_counts_.Get(s_reg);
buzbee311ca162013-02-28 15:56:43 -0800208}
209
210
211/* Do some MIR-level extended basic block optimizations */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700212bool MIRGraph::BasicBlockOpt(BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800213 if (bb->block_type == kDead) {
214 return true;
215 }
216 int num_temps = 0;
217 LocalValueNumbering local_valnum(cu_);
218 while (bb != NULL) {
219 for (MIR* mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
220 // TUNING: use the returned value number for CSE.
221 local_valnum.GetValueNumber(mir);
222 // Look for interesting opcodes, skip otherwise
223 Instruction::Code opcode = mir->dalvikInsn.opcode;
224 switch (opcode) {
225 case Instruction::CMPL_FLOAT:
226 case Instruction::CMPL_DOUBLE:
227 case Instruction::CMPG_FLOAT:
228 case Instruction::CMPG_DOUBLE:
229 case Instruction::CMP_LONG:
buzbee1fd33462013-03-25 13:40:45 -0700230 if ((cu_->disable_opt & (1 << kBranchFusing)) != 0) {
buzbee311ca162013-02-28 15:56:43 -0800231 // Bitcode doesn't allow this optimization.
232 break;
233 }
234 if (mir->next != NULL) {
235 MIR* mir_next = mir->next;
236 Instruction::Code br_opcode = mir_next->dalvikInsn.opcode;
237 ConditionCode ccode = kCondNv;
Brian Carlstromdf629502013-07-17 22:39:56 -0700238 switch (br_opcode) {
buzbee311ca162013-02-28 15:56:43 -0800239 case Instruction::IF_EQZ:
240 ccode = kCondEq;
241 break;
242 case Instruction::IF_NEZ:
243 ccode = kCondNe;
244 break;
245 case Instruction::IF_LTZ:
246 ccode = kCondLt;
247 break;
248 case Instruction::IF_GEZ:
249 ccode = kCondGe;
250 break;
251 case Instruction::IF_GTZ:
252 ccode = kCondGt;
253 break;
254 case Instruction::IF_LEZ:
255 ccode = kCondLe;
256 break;
257 default:
258 break;
259 }
260 // Make sure result of cmp is used by next insn and nowhere else
261 if ((ccode != kCondNv) &&
262 (mir->ssa_rep->defs[0] == mir_next->ssa_rep->uses[0]) &&
263 (GetSSAUseCount(mir->ssa_rep->defs[0]) == 1)) {
264 mir_next->dalvikInsn.arg[0] = ccode;
Brian Carlstromdf629502013-07-17 22:39:56 -0700265 switch (opcode) {
buzbee311ca162013-02-28 15:56:43 -0800266 case Instruction::CMPL_FLOAT:
267 mir_next->dalvikInsn.opcode =
268 static_cast<Instruction::Code>(kMirOpFusedCmplFloat);
269 break;
270 case Instruction::CMPL_DOUBLE:
271 mir_next->dalvikInsn.opcode =
272 static_cast<Instruction::Code>(kMirOpFusedCmplDouble);
273 break;
274 case Instruction::CMPG_FLOAT:
275 mir_next->dalvikInsn.opcode =
276 static_cast<Instruction::Code>(kMirOpFusedCmpgFloat);
277 break;
278 case Instruction::CMPG_DOUBLE:
279 mir_next->dalvikInsn.opcode =
280 static_cast<Instruction::Code>(kMirOpFusedCmpgDouble);
281 break;
282 case Instruction::CMP_LONG:
283 mir_next->dalvikInsn.opcode =
284 static_cast<Instruction::Code>(kMirOpFusedCmpLong);
285 break;
286 default: LOG(ERROR) << "Unexpected opcode: " << opcode;
287 }
288 mir->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpNop);
289 mir_next->ssa_rep->num_uses = mir->ssa_rep->num_uses;
290 mir_next->ssa_rep->uses = mir->ssa_rep->uses;
291 mir_next->ssa_rep->fp_use = mir->ssa_rep->fp_use;
292 mir_next->ssa_rep->num_defs = 0;
293 mir->ssa_rep->num_uses = 0;
294 mir->ssa_rep->num_defs = 0;
295 }
296 }
297 break;
298 case Instruction::GOTO:
299 case Instruction::GOTO_16:
300 case Instruction::GOTO_32:
301 case Instruction::IF_EQ:
302 case Instruction::IF_NE:
303 case Instruction::IF_LT:
304 case Instruction::IF_GE:
305 case Instruction::IF_GT:
306 case Instruction::IF_LE:
307 case Instruction::IF_EQZ:
308 case Instruction::IF_NEZ:
309 case Instruction::IF_LTZ:
310 case Instruction::IF_GEZ:
311 case Instruction::IF_GTZ:
312 case Instruction::IF_LEZ:
buzbeecbcfaf32013-08-19 07:37:40 -0700313 // If we've got a backwards branch to return, no need to suspend check.
buzbee9329e6d2013-08-19 12:55:10 -0700314 if ((IsBackedge(bb, bb->taken) && bb->taken->dominates_return) ||
315 (IsBackedge(bb, bb->fall_through) && bb->fall_through->dominates_return)) {
buzbee311ca162013-02-28 15:56:43 -0800316 mir->optimization_flags |= MIR_IGNORE_SUSPEND_CHECK;
317 if (cu_->verbose) {
318 LOG(INFO) << "Suppressed suspend check on branch to return at 0x" << std::hex << mir->offset;
319 }
320 }
321 break;
322 default:
323 break;
324 }
325 // Is this the select pattern?
326 // TODO: flesh out support for Mips and X86. NOTE: llvm's select op doesn't quite work here.
327 // TUNING: expand to support IF_xx compare & branches
buzbee1fd33462013-03-25 13:40:45 -0700328 if (!(cu_->compiler_backend == kPortable) && (cu_->instruction_set == kThumb2) &&
buzbee311ca162013-02-28 15:56:43 -0800329 ((mir->dalvikInsn.opcode == Instruction::IF_EQZ) ||
330 (mir->dalvikInsn.opcode == Instruction::IF_NEZ))) {
331 BasicBlock* ft = bb->fall_through;
332 DCHECK(ft != NULL);
333 BasicBlock* ft_ft = ft->fall_through;
334 BasicBlock* ft_tk = ft->taken;
335
336 BasicBlock* tk = bb->taken;
337 DCHECK(tk != NULL);
338 BasicBlock* tk_ft = tk->fall_through;
339 BasicBlock* tk_tk = tk->taken;
340
341 /*
342 * In the select pattern, the taken edge goes to a block that unconditionally
343 * transfers to the rejoin block and the fall_though edge goes to a block that
344 * unconditionally falls through to the rejoin block.
345 */
346 if ((tk_ft == NULL) && (ft_tk == NULL) && (tk_tk == ft_ft) &&
347 (Predecessors(tk) == 1) && (Predecessors(ft) == 1)) {
348 /*
349 * Okay - we have the basic diamond shape. At the very least, we can eliminate the
350 * suspend check on the taken-taken branch back to the join point.
351 */
352 if (SelectKind(tk->last_mir_insn) == kSelectGoto) {
353 tk->last_mir_insn->optimization_flags |= (MIR_IGNORE_SUSPEND_CHECK);
354 }
355 // Are the block bodies something we can handle?
356 if ((ft->first_mir_insn == ft->last_mir_insn) &&
357 (tk->first_mir_insn != tk->last_mir_insn) &&
358 (tk->first_mir_insn->next == tk->last_mir_insn) &&
359 ((SelectKind(ft->first_mir_insn) == kSelectMove) ||
360 (SelectKind(ft->first_mir_insn) == kSelectConst)) &&
361 (SelectKind(ft->first_mir_insn) == SelectKind(tk->first_mir_insn)) &&
362 (SelectKind(tk->last_mir_insn) == kSelectGoto)) {
363 // Almost there. Are the instructions targeting the same vreg?
364 MIR* if_true = tk->first_mir_insn;
365 MIR* if_false = ft->first_mir_insn;
366 // It's possible that the target of the select isn't used - skip those (rare) cases.
367 MIR* phi = FindPhi(tk_tk, if_true->ssa_rep->defs[0]);
368 if ((phi != NULL) && (if_true->dalvikInsn.vA == if_false->dalvikInsn.vA)) {
369 /*
370 * We'll convert the IF_EQZ/IF_NEZ to a SELECT. We need to find the
371 * Phi node in the merge block and delete it (while using the SSA name
372 * of the merge as the target of the SELECT. Delete both taken and
373 * fallthrough blocks, and set fallthrough to merge block.
374 * NOTE: not updating other dataflow info (no longer used at this point).
375 * If this changes, need to update i_dom, etc. here (and in CombineBlocks).
376 */
377 if (opcode == Instruction::IF_NEZ) {
378 // Normalize.
379 MIR* tmp_mir = if_true;
380 if_true = if_false;
381 if_false = tmp_mir;
382 }
383 mir->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpSelect);
384 bool const_form = (SelectKind(if_true) == kSelectConst);
385 if ((SelectKind(if_true) == kSelectMove)) {
386 if (IsConst(if_true->ssa_rep->uses[0]) &&
387 IsConst(if_false->ssa_rep->uses[0])) {
388 const_form = true;
389 if_true->dalvikInsn.vB = ConstantValue(if_true->ssa_rep->uses[0]);
390 if_false->dalvikInsn.vB = ConstantValue(if_false->ssa_rep->uses[0]);
391 }
392 }
393 if (const_form) {
394 // "true" set val in vB
395 mir->dalvikInsn.vB = if_true->dalvikInsn.vB;
396 // "false" set val in vC
397 mir->dalvikInsn.vC = if_false->dalvikInsn.vB;
398 } else {
399 DCHECK_EQ(SelectKind(if_true), kSelectMove);
400 DCHECK_EQ(SelectKind(if_false), kSelectMove);
buzbee862a7602013-04-05 10:58:54 -0700401 int* src_ssa =
402 static_cast<int*>(arena_->NewMem(sizeof(int) * 3, false,
403 ArenaAllocator::kAllocDFInfo));
buzbee311ca162013-02-28 15:56:43 -0800404 src_ssa[0] = mir->ssa_rep->uses[0];
405 src_ssa[1] = if_true->ssa_rep->uses[0];
406 src_ssa[2] = if_false->ssa_rep->uses[0];
407 mir->ssa_rep->uses = src_ssa;
408 mir->ssa_rep->num_uses = 3;
409 }
410 mir->ssa_rep->num_defs = 1;
buzbee862a7602013-04-05 10:58:54 -0700411 mir->ssa_rep->defs =
412 static_cast<int*>(arena_->NewMem(sizeof(int) * 1, false,
413 ArenaAllocator::kAllocDFInfo));
414 mir->ssa_rep->fp_def =
415 static_cast<bool*>(arena_->NewMem(sizeof(bool) * 1, false,
416 ArenaAllocator::kAllocDFInfo));
buzbee311ca162013-02-28 15:56:43 -0800417 mir->ssa_rep->fp_def[0] = if_true->ssa_rep->fp_def[0];
buzbee817e45a2013-05-30 18:59:12 -0700418 // Match type of uses to def.
419 mir->ssa_rep->fp_use =
420 static_cast<bool*>(arena_->NewMem(sizeof(bool) * mir->ssa_rep->num_uses, false,
421 ArenaAllocator::kAllocDFInfo));
422 for (int i = 0; i < mir->ssa_rep->num_uses; i++) {
423 mir->ssa_rep->fp_use[i] = mir->ssa_rep->fp_def[0];
424 }
buzbee311ca162013-02-28 15:56:43 -0800425 /*
426 * There is usually a Phi node in the join block for our two cases. If the
427 * Phi node only contains our two cases as input, we will use the result
428 * SSA name of the Phi node as our select result and delete the Phi. If
429 * the Phi node has more than two operands, we will arbitrarily use the SSA
430 * name of the "true" path, delete the SSA name of the "false" path from the
431 * Phi node (and fix up the incoming arc list).
432 */
433 if (phi->ssa_rep->num_uses == 2) {
434 mir->ssa_rep->defs[0] = phi->ssa_rep->defs[0];
435 phi->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpNop);
436 } else {
437 int dead_def = if_false->ssa_rep->defs[0];
438 int live_def = if_true->ssa_rep->defs[0];
439 mir->ssa_rep->defs[0] = live_def;
440 int* incoming = reinterpret_cast<int*>(phi->dalvikInsn.vB);
441 for (int i = 0; i < phi->ssa_rep->num_uses; i++) {
442 if (phi->ssa_rep->uses[i] == live_def) {
443 incoming[i] = bb->id;
444 }
445 }
446 for (int i = 0; i < phi->ssa_rep->num_uses; i++) {
447 if (phi->ssa_rep->uses[i] == dead_def) {
448 int last_slot = phi->ssa_rep->num_uses - 1;
449 phi->ssa_rep->uses[i] = phi->ssa_rep->uses[last_slot];
450 incoming[i] = incoming[last_slot];
451 }
452 }
453 }
454 phi->ssa_rep->num_uses--;
455 bb->taken = NULL;
456 tk->block_type = kDead;
457 for (MIR* tmir = ft->first_mir_insn; tmir != NULL; tmir = tmir->next) {
458 tmir->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpNop);
459 }
460 }
461 }
462 }
463 }
464 }
465 bb = NextDominatedBlock(bb);
466 }
467
468 if (num_temps > cu_->num_compiler_temps) {
469 cu_->num_compiler_temps = num_temps;
470 }
471 return true;
472}
473
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700474void MIRGraph::NullCheckEliminationInit(struct BasicBlock* bb) {
buzbee862a7602013-04-05 10:58:54 -0700475 if (bb->data_flow_info != NULL) {
476 bb->data_flow_info->ending_null_check_v =
477 new (arena_) ArenaBitVector(arena_, GetNumSSARegs(), false, kBitMapNullCheck);
478 }
buzbee311ca162013-02-28 15:56:43 -0800479}
480
481/* Collect stats on number of checks removed */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700482void MIRGraph::CountChecks(struct BasicBlock* bb) {
buzbee862a7602013-04-05 10:58:54 -0700483 if (bb->data_flow_info != NULL) {
484 for (MIR* mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
485 if (mir->ssa_rep == NULL) {
486 continue;
buzbee311ca162013-02-28 15:56:43 -0800487 }
buzbee862a7602013-04-05 10:58:54 -0700488 int df_attributes = oat_data_flow_attributes_[mir->dalvikInsn.opcode];
489 if (df_attributes & DF_HAS_NULL_CHKS) {
490 checkstats_->null_checks++;
491 if (mir->optimization_flags & MIR_IGNORE_NULL_CHECK) {
492 checkstats_->null_checks_eliminated++;
493 }
494 }
495 if (df_attributes & DF_HAS_RANGE_CHKS) {
496 checkstats_->range_checks++;
497 if (mir->optimization_flags & MIR_IGNORE_RANGE_CHECK) {
498 checkstats_->range_checks_eliminated++;
499 }
buzbee311ca162013-02-28 15:56:43 -0800500 }
501 }
502 }
buzbee311ca162013-02-28 15:56:43 -0800503}
504
505/* Try to make common case the fallthrough path */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700506static bool LayoutBlocks(struct BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800507 // TODO: For now, just looking for direct throws. Consider generalizing for profile feedback
508 if (!bb->explicit_throw) {
509 return false;
510 }
511 BasicBlock* walker = bb;
512 while (true) {
513 // Check termination conditions
514 if ((walker->block_type == kEntryBlock) || (Predecessors(walker) != 1)) {
515 break;
516 }
buzbee862a7602013-04-05 10:58:54 -0700517 BasicBlock* prev = walker->predecessors->Get(0);
buzbee311ca162013-02-28 15:56:43 -0800518 if (prev->conditional_branch) {
519 if (prev->fall_through == walker) {
520 // Already done - return
521 break;
522 }
523 DCHECK_EQ(walker, prev->taken);
524 // Got one. Flip it and exit
525 Instruction::Code opcode = prev->last_mir_insn->dalvikInsn.opcode;
526 switch (opcode) {
527 case Instruction::IF_EQ: opcode = Instruction::IF_NE; break;
528 case Instruction::IF_NE: opcode = Instruction::IF_EQ; break;
529 case Instruction::IF_LT: opcode = Instruction::IF_GE; break;
530 case Instruction::IF_GE: opcode = Instruction::IF_LT; break;
531 case Instruction::IF_GT: opcode = Instruction::IF_LE; break;
532 case Instruction::IF_LE: opcode = Instruction::IF_GT; break;
533 case Instruction::IF_EQZ: opcode = Instruction::IF_NEZ; break;
534 case Instruction::IF_NEZ: opcode = Instruction::IF_EQZ; break;
535 case Instruction::IF_LTZ: opcode = Instruction::IF_GEZ; break;
536 case Instruction::IF_GEZ: opcode = Instruction::IF_LTZ; break;
537 case Instruction::IF_GTZ: opcode = Instruction::IF_LEZ; break;
538 case Instruction::IF_LEZ: opcode = Instruction::IF_GTZ; break;
539 default: LOG(FATAL) << "Unexpected opcode " << opcode;
540 }
541 prev->last_mir_insn->dalvikInsn.opcode = opcode;
542 BasicBlock* t_bb = prev->taken;
543 prev->taken = prev->fall_through;
544 prev->fall_through = t_bb;
545 break;
546 }
547 walker = prev;
548 }
549 return false;
550}
551
552/* Combine any basic blocks terminated by instructions that we now know can't throw */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700553bool MIRGraph::CombineBlocks(struct BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800554 // Loop here to allow combining a sequence of blocks
555 while (true) {
556 // Check termination conditions
557 if ((bb->first_mir_insn == NULL)
558 || (bb->data_flow_info == NULL)
559 || (bb->block_type == kExceptionHandling)
560 || (bb->block_type == kExitBlock)
561 || (bb->block_type == kDead)
562 || ((bb->taken == NULL) || (bb->taken->block_type != kExceptionHandling))
563 || (bb->successor_block_list.block_list_type != kNotUsed)
564 || (static_cast<int>(bb->last_mir_insn->dalvikInsn.opcode) != kMirOpCheck)) {
565 break;
566 }
567
568 // Test the kMirOpCheck instruction
569 MIR* mir = bb->last_mir_insn;
570 // Grab the attributes from the paired opcode
571 MIR* throw_insn = mir->meta.throw_insn;
buzbee1fd33462013-03-25 13:40:45 -0700572 int df_attributes = oat_data_flow_attributes_[throw_insn->dalvikInsn.opcode];
buzbee311ca162013-02-28 15:56:43 -0800573 bool can_combine = true;
574 if (df_attributes & DF_HAS_NULL_CHKS) {
575 can_combine &= ((throw_insn->optimization_flags & MIR_IGNORE_NULL_CHECK) != 0);
576 }
577 if (df_attributes & DF_HAS_RANGE_CHKS) {
578 can_combine &= ((throw_insn->optimization_flags & MIR_IGNORE_RANGE_CHECK) != 0);
579 }
580 if (!can_combine) {
581 break;
582 }
583 // OK - got one. Combine
584 BasicBlock* bb_next = bb->fall_through;
585 DCHECK(!bb_next->catch_entry);
586 DCHECK_EQ(Predecessors(bb_next), 1U);
587 MIR* t_mir = bb->last_mir_insn->prev;
588 // Overwrite the kOpCheck insn with the paired opcode
589 DCHECK_EQ(bb_next->first_mir_insn, throw_insn);
590 *bb->last_mir_insn = *throw_insn;
591 bb->last_mir_insn->prev = t_mir;
592 // Use the successor info from the next block
593 bb->successor_block_list = bb_next->successor_block_list;
594 // Use the ending block linkage from the next block
595 bb->fall_through = bb_next->fall_through;
596 bb->taken->block_type = kDead; // Kill the unused exception block
597 bb->taken = bb_next->taken;
598 // Include the rest of the instructions
599 bb->last_mir_insn = bb_next->last_mir_insn;
600 /*
601 * If lower-half of pair of blocks to combine contained a return, move the flag
602 * to the newly combined block.
603 */
604 bb->terminated_by_return = bb_next->terminated_by_return;
605
606 /*
607 * NOTE: we aren't updating all dataflow info here. Should either make sure this pass
608 * happens after uses of i_dominated, dom_frontier or update the dataflow info here.
609 */
610
611 // Kill bb_next and remap now-dead id to parent
612 bb_next->block_type = kDead;
buzbee1fd33462013-03-25 13:40:45 -0700613 block_id_map_.Overwrite(bb_next->id, bb->id);
buzbee311ca162013-02-28 15:56:43 -0800614
615 // Now, loop back and see if we can keep going
616 }
617 return false;
618}
619
620/* Eliminate unnecessary null checks for a basic block. */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700621bool MIRGraph::EliminateNullChecks(struct BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800622 if (bb->data_flow_info == NULL) return false;
623
624 /*
625 * Set initial state. Be conservative with catch
626 * blocks and start with no assumptions about null check
627 * status (except for "this").
628 */
629 if ((bb->block_type == kEntryBlock) | bb->catch_entry) {
buzbee862a7602013-04-05 10:58:54 -0700630 temp_ssa_register_v_->ClearAllBits();
buzbee311ca162013-02-28 15:56:43 -0800631 if ((cu_->access_flags & kAccStatic) == 0) {
632 // If non-static method, mark "this" as non-null
633 int this_reg = cu_->num_dalvik_registers - cu_->num_ins;
buzbee862a7602013-04-05 10:58:54 -0700634 temp_ssa_register_v_->SetBit(this_reg);
buzbee311ca162013-02-28 15:56:43 -0800635 }
Ian Rogers22fd6a02013-06-13 15:06:54 -0700636 } else if (bb->predecessors->Size() == 1) {
637 BasicBlock* pred_bb = bb->predecessors->Get(0);
638 temp_ssa_register_v_->Copy(pred_bb->data_flow_info->ending_null_check_v);
639 if (pred_bb->block_type == kDalvikByteCode) {
640 // Check to see if predecessor had an explicit null-check.
641 MIR* last_insn = pred_bb->last_mir_insn;
642 Instruction::Code last_opcode = last_insn->dalvikInsn.opcode;
643 if (last_opcode == Instruction::IF_EQZ) {
644 if (pred_bb->fall_through == bb) {
645 // The fall-through of a block following a IF_EQZ, set the vA of the IF_EQZ to show that
646 // it can't be null.
647 temp_ssa_register_v_->SetBit(last_insn->ssa_rep->uses[0]);
648 }
649 } else if (last_opcode == Instruction::IF_NEZ) {
650 if (pred_bb->taken == bb) {
651 // The taken block following a IF_NEZ, set the vA of the IF_NEZ to show that it can't be
652 // null.
653 temp_ssa_register_v_->SetBit(last_insn->ssa_rep->uses[0]);
654 }
655 }
656 }
buzbee311ca162013-02-28 15:56:43 -0800657 } else {
Ian Rogers22fd6a02013-06-13 15:06:54 -0700658 // Starting state is intersection of all incoming arcs
buzbee862a7602013-04-05 10:58:54 -0700659 GrowableArray<BasicBlock*>::Iterator iter(bb->predecessors);
660 BasicBlock* pred_bb = iter.Next();
buzbee311ca162013-02-28 15:56:43 -0800661 DCHECK(pred_bb != NULL);
buzbee862a7602013-04-05 10:58:54 -0700662 temp_ssa_register_v_->Copy(pred_bb->data_flow_info->ending_null_check_v);
buzbee311ca162013-02-28 15:56:43 -0800663 while (true) {
buzbee862a7602013-04-05 10:58:54 -0700664 pred_bb = iter.Next();
buzbee311ca162013-02-28 15:56:43 -0800665 if (!pred_bb) break;
666 if ((pred_bb->data_flow_info == NULL) ||
667 (pred_bb->data_flow_info->ending_null_check_v == NULL)) {
668 continue;
669 }
buzbee862a7602013-04-05 10:58:54 -0700670 temp_ssa_register_v_->Intersect(pred_bb->data_flow_info->ending_null_check_v);
buzbee311ca162013-02-28 15:56:43 -0800671 }
672 }
673
674 // Walk through the instruction in the block, updating as necessary
675 for (MIR* mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
676 if (mir->ssa_rep == NULL) {
677 continue;
678 }
buzbee1fd33462013-03-25 13:40:45 -0700679 int df_attributes = oat_data_flow_attributes_[mir->dalvikInsn.opcode];
buzbee311ca162013-02-28 15:56:43 -0800680
681 // Mark target of NEW* as non-null
682 if (df_attributes & DF_NON_NULL_DST) {
buzbee862a7602013-04-05 10:58:54 -0700683 temp_ssa_register_v_->SetBit(mir->ssa_rep->defs[0]);
buzbee311ca162013-02-28 15:56:43 -0800684 }
685
686 // Mark non-null returns from invoke-style NEW*
687 if (df_attributes & DF_NON_NULL_RET) {
688 MIR* next_mir = mir->next;
689 // Next should be an MOVE_RESULT_OBJECT
690 if (next_mir &&
691 next_mir->dalvikInsn.opcode == Instruction::MOVE_RESULT_OBJECT) {
692 // Mark as null checked
buzbee862a7602013-04-05 10:58:54 -0700693 temp_ssa_register_v_->SetBit(next_mir->ssa_rep->defs[0]);
buzbee311ca162013-02-28 15:56:43 -0800694 } else {
695 if (next_mir) {
696 LOG(WARNING) << "Unexpected opcode following new: " << next_mir->dalvikInsn.opcode;
697 } else if (bb->fall_through) {
698 // Look in next basic block
699 struct BasicBlock* next_bb = bb->fall_through;
700 for (MIR* tmir = next_bb->first_mir_insn; tmir != NULL;
701 tmir =tmir->next) {
702 if (static_cast<int>(tmir->dalvikInsn.opcode) >= static_cast<int>(kMirOpFirst)) {
703 continue;
704 }
705 // First non-pseudo should be MOVE_RESULT_OBJECT
706 if (tmir->dalvikInsn.opcode == Instruction::MOVE_RESULT_OBJECT) {
707 // Mark as null checked
buzbee862a7602013-04-05 10:58:54 -0700708 temp_ssa_register_v_->SetBit(tmir->ssa_rep->defs[0]);
buzbee311ca162013-02-28 15:56:43 -0800709 } else {
710 LOG(WARNING) << "Unexpected op after new: " << tmir->dalvikInsn.opcode;
711 }
712 break;
713 }
714 }
715 }
716 }
717
718 /*
719 * Propagate nullcheck state on register copies (including
720 * Phi pseudo copies. For the latter, nullcheck state is
721 * the "and" of all the Phi's operands.
722 */
723 if (df_attributes & (DF_NULL_TRANSFER_0 | DF_NULL_TRANSFER_N)) {
724 int tgt_sreg = mir->ssa_rep->defs[0];
725 int operands = (df_attributes & DF_NULL_TRANSFER_0) ? 1 :
726 mir->ssa_rep->num_uses;
727 bool null_checked = true;
728 for (int i = 0; i < operands; i++) {
buzbee862a7602013-04-05 10:58:54 -0700729 null_checked &= temp_ssa_register_v_->IsBitSet(mir->ssa_rep->uses[i]);
buzbee311ca162013-02-28 15:56:43 -0800730 }
731 if (null_checked) {
buzbee862a7602013-04-05 10:58:54 -0700732 temp_ssa_register_v_->SetBit(tgt_sreg);
buzbee311ca162013-02-28 15:56:43 -0800733 }
734 }
735
736 // Already nullchecked?
737 if ((df_attributes & DF_HAS_NULL_CHKS) && !(mir->optimization_flags & MIR_IGNORE_NULL_CHECK)) {
738 int src_idx;
739 if (df_attributes & DF_NULL_CHK_1) {
740 src_idx = 1;
741 } else if (df_attributes & DF_NULL_CHK_2) {
742 src_idx = 2;
743 } else {
744 src_idx = 0;
745 }
746 int src_sreg = mir->ssa_rep->uses[src_idx];
buzbee862a7602013-04-05 10:58:54 -0700747 if (temp_ssa_register_v_->IsBitSet(src_sreg)) {
buzbee311ca162013-02-28 15:56:43 -0800748 // Eliminate the null check
749 mir->optimization_flags |= MIR_IGNORE_NULL_CHECK;
750 } else {
751 // Mark s_reg as null-checked
buzbee862a7602013-04-05 10:58:54 -0700752 temp_ssa_register_v_->SetBit(src_sreg);
buzbee311ca162013-02-28 15:56:43 -0800753 }
754 }
755 }
756
757 // Did anything change?
buzbee862a7602013-04-05 10:58:54 -0700758 bool changed = !temp_ssa_register_v_->Equal(bb->data_flow_info->ending_null_check_v);
759 if (changed) {
760 bb->data_flow_info->ending_null_check_v->Copy(temp_ssa_register_v_);
buzbee311ca162013-02-28 15:56:43 -0800761 }
buzbee862a7602013-04-05 10:58:54 -0700762 return changed;
buzbee311ca162013-02-28 15:56:43 -0800763}
764
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700765void MIRGraph::NullCheckElimination() {
buzbee311ca162013-02-28 15:56:43 -0800766 if (!(cu_->disable_opt & (1 << kNullCheckElimination))) {
767 DCHECK(temp_ssa_register_v_ != NULL);
buzbee0665fe02013-03-21 12:32:21 -0700768 AllNodesIterator iter(this, false /* not iterative */);
buzbee311ca162013-02-28 15:56:43 -0800769 for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) {
770 NullCheckEliminationInit(bb);
771 }
buzbee0665fe02013-03-21 12:32:21 -0700772 PreOrderDfsIterator iter2(this, true /* iterative */);
buzbee311ca162013-02-28 15:56:43 -0800773 bool change = false;
774 for (BasicBlock* bb = iter2.Next(change); bb != NULL; bb = iter2.Next(change)) {
775 change = EliminateNullChecks(bb);
776 }
777 }
778 if (cu_->enable_debug & (1 << kDebugDumpCFG)) {
779 DumpCFG("/sdcard/4_post_nce_cfg/", false);
780 }
781}
782
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700783void MIRGraph::BasicBlockCombine() {
buzbee0665fe02013-03-21 12:32:21 -0700784 PreOrderDfsIterator iter(this, false /* not iterative */);
buzbee311ca162013-02-28 15:56:43 -0800785 for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) {
786 CombineBlocks(bb);
787 }
788 if (cu_->enable_debug & (1 << kDebugDumpCFG)) {
789 DumpCFG("/sdcard/5_post_bbcombine_cfg/", false);
790 }
791}
792
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700793void MIRGraph::CodeLayout() {
buzbee1fd33462013-03-25 13:40:45 -0700794 if (cu_->enable_debug & (1 << kDebugVerifyDataflow)) {
795 VerifyDataflow();
796 }
buzbee0665fe02013-03-21 12:32:21 -0700797 AllNodesIterator iter(this, false /* not iterative */);
buzbee311ca162013-02-28 15:56:43 -0800798 for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) {
799 LayoutBlocks(bb);
800 }
801 if (cu_->enable_debug & (1 << kDebugDumpCFG)) {
802 DumpCFG("/sdcard/2_post_layout_cfg/", true);
803 }
804}
805
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700806void MIRGraph::DumpCheckStats() {
buzbee311ca162013-02-28 15:56:43 -0800807 Checkstats* stats =
buzbee862a7602013-04-05 10:58:54 -0700808 static_cast<Checkstats*>(arena_->NewMem(sizeof(Checkstats), true,
809 ArenaAllocator::kAllocDFInfo));
buzbee1fd33462013-03-25 13:40:45 -0700810 checkstats_ = stats;
buzbee0665fe02013-03-21 12:32:21 -0700811 AllNodesIterator iter(this, false /* not iterative */);
buzbee311ca162013-02-28 15:56:43 -0800812 for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) {
813 CountChecks(bb);
814 }
815 if (stats->null_checks > 0) {
816 float eliminated = static_cast<float>(stats->null_checks_eliminated);
817 float checks = static_cast<float>(stats->null_checks);
818 LOG(INFO) << "Null Checks: " << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " "
819 << stats->null_checks_eliminated << " of " << stats->null_checks << " -> "
820 << (eliminated/checks) * 100.0 << "%";
821 }
822 if (stats->range_checks > 0) {
823 float eliminated = static_cast<float>(stats->range_checks_eliminated);
824 float checks = static_cast<float>(stats->range_checks);
825 LOG(INFO) << "Range Checks: " << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " "
826 << stats->range_checks_eliminated << " of " << stats->range_checks << " -> "
827 << (eliminated/checks) * 100.0 << "%";
828 }
829}
830
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700831bool MIRGraph::BuildExtendedBBList(struct BasicBlock* bb) {
buzbee311ca162013-02-28 15:56:43 -0800832 if (bb->visited) return false;
833 if (!((bb->block_type == kEntryBlock) || (bb->block_type == kDalvikByteCode)
834 || (bb->block_type == kExitBlock))) {
835 // Ignore special blocks
836 bb->visited = true;
837 return false;
838 }
839 // Must be head of extended basic block.
840 BasicBlock* start_bb = bb;
841 extended_basic_blocks_.push_back(bb);
842 bool terminated_by_return = false;
843 // Visit blocks strictly dominated by this head.
844 while (bb != NULL) {
845 bb->visited = true;
846 terminated_by_return |= bb->terminated_by_return;
847 bb = NextDominatedBlock(bb);
848 }
849 if (terminated_by_return) {
850 // This extended basic block contains a return, so mark all members.
851 bb = start_bb;
852 while (bb != NULL) {
853 bb->dominates_return = true;
854 bb = NextDominatedBlock(bb);
855 }
856 }
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700857 return false; // Not iterative - return value will be ignored
buzbee311ca162013-02-28 15:56:43 -0800858}
859
860
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700861void MIRGraph::BasicBlockOptimization() {
buzbee311ca162013-02-28 15:56:43 -0800862 if (!(cu_->disable_opt & (1 << kBBOpt))) {
buzbee311ca162013-02-28 15:56:43 -0800863 DCHECK_EQ(cu_->num_compiler_temps, 0);
buzbeea5abf702013-04-12 14:39:29 -0700864 ClearAllVisitedFlags();
buzbee0665fe02013-03-21 12:32:21 -0700865 PreOrderDfsIterator iter2(this, false /* not iterative */);
buzbee311ca162013-02-28 15:56:43 -0800866 for (BasicBlock* bb = iter2.Next(); bb != NULL; bb = iter2.Next()) {
867 BuildExtendedBBList(bb);
868 }
869 // Perform extended basic block optimizations.
870 for (unsigned int i = 0; i < extended_basic_blocks_.size(); i++) {
871 BasicBlockOpt(extended_basic_blocks_[i]);
872 }
873 }
874 if (cu_->enable_debug & (1 << kDebugDumpCFG)) {
875 DumpCFG("/sdcard/6_post_bbo_cfg/", false);
876 }
877}
878
879} // namespace art