blob: 14258911e8e5c6b1b7254ed429671ad3b4b6aff9 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
Serban Constantinescufcc36ba2014-07-15 17:44:21 +010018#include "dex/quick/mir_to_lir-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070019
20namespace art {
21
22#define DEBUG_OPT(X)
23
Serban Constantinescufcc36ba2014-07-15 17:44:21 +010024#define LOAD_STORE_CHECK_REG_DEP(mask, check) (mask.Intersects(*check->u.m.def_mask))
25
Brian Carlstrom7940e442013-07-12 13:46:57 -070026/* Check RAW, WAR, and RAW dependency on the register operands */
Vladimir Marko8dea81c2014-06-06 14:50:36 +010027#define CHECK_REG_DEP(use, def, check) (def.Intersects(*check->u.m.use_mask)) || \
28 (use.Union(def).Intersects(*check->u.m.def_mask))
Brian Carlstrom7940e442013-07-12 13:46:57 -070029
Serban Constantinescufcc36ba2014-07-15 17:44:21 +010030/* Load Store Elimination filter:
31 * - Wide Load/Store
32 * - Exclusive Load/Store
33 * - Quad operand Load/Store
34 * - List Load/Store
35 * - IT blocks
36 * - Branch
37 * - Dmb
38 */
39#define LOAD_STORE_FILTER(flags) ((flags & (IS_QUAD_OP|IS_STORE)) == (IS_QUAD_OP|IS_STORE) || \
40 (flags & (IS_QUAD_OP|IS_LOAD)) == (IS_QUAD_OP|IS_LOAD) || \
41 (flags & REG_USE012) == REG_USE012 || \
42 (flags & REG_DEF01) == REG_DEF01 || \
43 (flags & REG_DEF_LIST0) || \
44 (flags & REG_USE_LIST0) || \
45 (flags & IS_VOLATILE) || \
46 (flags & IS_BRANCH) || \
47 (flags & IS_IT))
48
Brian Carlstrom7940e442013-07-12 13:46:57 -070049/* Scheduler heuristics */
50#define MAX_HOIST_DISTANCE 20
51#define LDLD_DISTANCE 4
52#define LD_LATENCY 2
53
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070054static bool IsDalvikRegisterClobbered(LIR* lir1, LIR* lir2) {
buzbeeb48819d2013-09-14 16:15:25 -070055 int reg1Lo = DECODE_ALIAS_INFO_REG(lir1->flags.alias_info);
56 int reg1Hi = reg1Lo + DECODE_ALIAS_INFO_WIDE(lir1->flags.alias_info);
57 int reg2Lo = DECODE_ALIAS_INFO_REG(lir2->flags.alias_info);
58 int reg2Hi = reg2Lo + DECODE_ALIAS_INFO_WIDE(lir2->flags.alias_info);
Brian Carlstrom7940e442013-07-12 13:46:57 -070059
60 return (reg1Lo == reg2Lo) || (reg1Lo == reg2Hi) || (reg1Hi == reg2Lo);
61}
62
63/* Convert a more expensive instruction (ie load) into a move */
buzbee2700f7e2014-03-07 09:46:20 -080064void Mir2Lir::ConvertMemOpIntoMove(LIR* orig_lir, RegStorage dest, RegStorage src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070065 /* Insert a move to replace the load */
66 LIR* move_lir;
67 move_lir = OpRegCopyNoInsert(dest, src);
Serban Constantinescufcc36ba2014-07-15 17:44:21 +010068 move_lir->dalvik_offset = orig_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -070069 /*
70 * Insert the converted instruction after the original since the
71 * optimization is scannng in the top-down order and the new instruction
72 * will need to be re-checked (eg the new dest clobbers the src used in
73 * this_lir).
74 */
75 InsertLIRAfter(orig_lir, move_lir);
76}
77
Serban Constantinescufcc36ba2014-07-15 17:44:21 +010078void Mir2Lir::DumpDependentInsnPair(LIR* check_lir, LIR* this_lir, const char* type) {
79 LOG(INFO) << type;
80 LOG(INFO) << "Check LIR:";
81 DumpLIRInsn(check_lir, 0);
82 LOG(INFO) << "This LIR:";
83 DumpLIRInsn(this_lir, 0);
84}
85
86inline void Mir2Lir::EliminateLoad(LIR* lir, int reg_id) {
87 CHECK(RegStorage::SameRegType(lir->operands[0], reg_id));
88 RegStorage dest_reg, src_reg;
89
90 /* Same Register - Nop */
91 if (lir->operands[0] == reg_id) {
92 NopLIR(lir);
93 return;
94 }
95
96 /* different Regsister - Move + Nop */
97 switch (reg_id & RegStorage::kShapeTypeMask) {
98 case RegStorage::k32BitSolo | RegStorage::kCoreRegister:
99 dest_reg = RegStorage::Solo32(lir->operands[0]);
100 src_reg = RegStorage::Solo32(reg_id);
101 break;
102 case RegStorage::k64BitSolo | RegStorage::kCoreRegister:
103 dest_reg = RegStorage::Solo64(lir->operands[0]);
104 src_reg = RegStorage::Solo64(reg_id);
105 break;
106 case RegStorage::k32BitSolo | RegStorage::kFloatingPoint:
107 dest_reg = RegStorage::FloatSolo32(lir->operands[0]);
108 src_reg = RegStorage::FloatSolo32(reg_id);
109 break;
110 case RegStorage::k64BitSolo | RegStorage::kFloatingPoint:
111 dest_reg = RegStorage::FloatSolo64(lir->operands[0]);
112 src_reg = RegStorage::FloatSolo64(reg_id);
113 break;
114 default:
115 LOG(INFO) << "Load Store: Unsuported register type!";
116 return;
117 }
118 ConvertMemOpIntoMove(lir, dest_reg, src_reg);
119 NopLIR(lir);
120 return;
121}
122
Brian Carlstrom7940e442013-07-12 13:46:57 -0700123/*
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100124 * Perform a pass of top-down walk, from the first to the last instruction in the
Brian Carlstrom7940e442013-07-12 13:46:57 -0700125 * superblock, to eliminate redundant loads and stores.
126 *
127 * An earlier load can eliminate a later load iff
128 * 1) They are must-aliases
129 * 2) The native register is not clobbered in between
130 * 3) The memory location is not written to in between
131 *
132 * An earlier store can eliminate a later load iff
133 * 1) They are must-aliases
134 * 2) The native register is not clobbered in between
135 * 3) The memory location is not written to in between
136 *
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100137 * An earlier store can eliminate a later store iff
Brian Carlstrom7940e442013-07-12 13:46:57 -0700138 * 1) They are must-aliases
139 * 2) The memory location is not written to in between
140 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700141void Mir2Lir::ApplyLoadStoreElimination(LIR* head_lir, LIR* tail_lir) {
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100142 LIR* this_lir, *check_lir;
143 std::vector<int> alias_list;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700144
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700145 if (head_lir == tail_lir) {
146 return;
147 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700148
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100149 for (this_lir = head_lir; this_lir != tail_lir; this_lir = NEXT_LIR(this_lir)) {
150 if (this_lir->flags.is_nop || IsPseudoLirOp(this_lir->opcode)) {
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700151 continue;
152 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700153
Brian Carlstrom7940e442013-07-12 13:46:57 -0700154 uint64_t target_flags = GetTargetInstFlags(this_lir->opcode);
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100155 /* Target LIR - skip if instr is:
156 * - NOP
157 * - Branch
158 * - Load and store
159 * - Wide load
160 * - Wide store
161 * - Exclusive load/store
162 */
163 if (LOAD_STORE_FILTER(target_flags) ||
164 ((target_flags & (IS_LOAD | IS_STORE)) == (IS_LOAD | IS_STORE)) ||
165 !(target_flags & (IS_LOAD | IS_STORE))) {
166 continue;
167 }
168 int native_reg_id = this_lir->operands[0];
169 int dest_reg_id = this_lir->operands[1];
170 bool is_this_lir_load = target_flags & IS_LOAD;
171 ResourceMask this_mem_mask = kEncodeMem.Intersection(this_lir->u.m.use_mask->Union(
172 *this_lir->u.m.def_mask));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700173
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100174 /* Memory region */
175 if (!this_mem_mask.Intersects(kEncodeLiteral.Union(kEncodeDalvikReg)) &&
176 (!this_mem_mask.Intersects(kEncodeLiteral.Union(kEncodeHeapRef)))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700177 continue;
178 }
179
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100180 /* Does not redefine the address */
181 if (this_lir->u.m.def_mask->Intersects(*this_lir->u.m.use_mask)) {
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700182 continue;
183 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700184
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100185 ResourceMask stop_def_reg_mask = this_lir->u.m.def_mask->Without(kEncodeMem);
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100186 ResourceMask stop_use_reg_mask = this_lir->u.m.use_mask->Without(kEncodeMem);
Andreas Gampeaf263df2014-07-11 16:40:54 -0700187
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100188 /* The ARM backend can load/store PC */
189 ResourceMask uses_pc = GetPCUseDefEncoding();
190 if (uses_pc.Intersects(this_lir->u.m.use_mask->Union(*this_lir->u.m.def_mask))) {
191 continue;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700192 }
193
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100194 /* Initialize alias list */
195 alias_list.clear();
196 ResourceMask alias_reg_list_mask = kEncodeNone;
197 if (!this_mem_mask.Intersects(kEncodeLiteral)) {
198 alias_list.push_back(dest_reg_id);
199 SetupRegMask(&alias_reg_list_mask, dest_reg_id);
200 }
201
202 /* Scan through the BB for posible elimination candidates */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700203 for (check_lir = NEXT_LIR(this_lir); check_lir != tail_lir; check_lir = NEXT_LIR(check_lir)) {
buzbee409fe942013-10-11 10:49:56 -0700204 if (check_lir->flags.is_nop || IsPseudoLirOp(check_lir->opcode)) {
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700205 continue;
206 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700207
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100208 if (uses_pc.Intersects(check_lir->u.m.use_mask->Union(*check_lir->u.m.def_mask))) {
209 break;
210 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700211
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100212 ResourceMask check_mem_mask = kEncodeMem.Intersection(check_lir->u.m.use_mask->Union(
213 *check_lir->u.m.def_mask));
214 ResourceMask alias_mem_mask = this_mem_mask.Intersection(check_mem_mask);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700215 uint64_t check_flags = GetTargetInstFlags(check_lir->opcode);
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100216 bool stop_here = false;
217 bool pass_over = false;
218
219 /* Check LIR - skip if instr is:
220 * - Wide Load
221 * - Wide Store
222 * - Branch
223 * - Dmb
224 * - Exclusive load/store
225 * - IT blocks
226 * - Quad loads
227 */
228 if (LOAD_STORE_FILTER(check_flags)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700229 stop_here = true;
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100230 /* Possible alias or result of earlier pass */
231 } else if (check_flags & IS_MOVE) {
232 for (auto &reg : alias_list) {
233 if (RegStorage::RegNum(check_lir->operands[1]) == RegStorage::RegNum(reg)) {
234 pass_over = true;
235 alias_list.push_back(check_lir->operands[0]);
236 SetupRegMask(&alias_reg_list_mask, check_lir->operands[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700237 }
238 }
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100239 /* Memory regions */
240 } else if (!alias_mem_mask.Equals(kEncodeNone)) {
241 DCHECK((check_flags & IS_LOAD) || (check_flags & IS_STORE));
242 bool is_check_lir_load = check_flags & IS_LOAD;
243 bool reg_compatible = RegStorage::SameRegType(check_lir->operands[0], native_reg_id);
244
245 if (alias_mem_mask.Equals(kEncodeLiteral)) {
246 CHECK(check_flags & IS_LOAD);
247 /* Same value && same register type */
248 if (reg_compatible && (this_lir->target == check_lir->target)) {
249 DEBUG_OPT(DumpDependentInsnPair(check_lir, this_lir, "LITERAL"));
250 EliminateLoad(check_lir, native_reg_id);
251 }
252 } else if (((alias_mem_mask.Equals(kEncodeDalvikReg)) || (alias_mem_mask.Equals(kEncodeHeapRef))) &&
253 alias_reg_list_mask.Intersects((check_lir->u.m.use_mask)->Without(kEncodeMem))) {
254 bool same_offset = (GetInstructionOffset(this_lir) == GetInstructionOffset(check_lir));
255 if (same_offset && !is_check_lir_load) {
256 if (check_lir->operands[0] != native_reg_id) {
257 DEBUG_OPT(DumpDependentInsnPair(check_lir, this_lir, "STORE STOP"));
258 stop_here = true;
259 break;
260 }
261 }
262
263 if (reg_compatible && same_offset &&
264 ((is_this_lir_load && is_check_lir_load) /* LDR - LDR */ ||
265 (!is_this_lir_load && is_check_lir_load) /* STR - LDR */ ||
266 (!is_this_lir_load && !is_check_lir_load) /* STR - STR */)) {
267 DEBUG_OPT(DumpDependentInsnPair(check_lir, this_lir, "LOAD STORE"));
268 EliminateLoad(check_lir, native_reg_id);
269 }
270 } else {
271 /* Unsupported memory region */
272 }
273 }
274
275 if (pass_over) {
276 continue;
277 }
278
279 if (stop_here == false) {
280 bool stop_alias = LOAD_STORE_CHECK_REG_DEP(alias_reg_list_mask, check_lir);
281 if (stop_alias) {
282 /* Scan through alias list and if alias remove from alias list. */
283 for (auto &reg : alias_list) {
284 stop_alias = false;
285 ResourceMask alias_reg_mask = kEncodeNone;
286 SetupRegMask(&alias_reg_mask, reg);
287 stop_alias = LOAD_STORE_CHECK_REG_DEP(alias_reg_mask, check_lir);
288 if (stop_alias) {
289 ClearRegMask(&alias_reg_list_mask, reg);
290 alias_list.erase(std::remove(alias_list.begin(), alias_list.end(),
291 reg), alias_list.end());
292 }
293 }
294 }
295 ResourceMask stop_search_mask = stop_def_reg_mask.Union(stop_use_reg_mask);
296 stop_search_mask = stop_search_mask.Union(alias_reg_list_mask);
297 stop_here = LOAD_STORE_CHECK_REG_DEP(stop_search_mask, check_lir);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700298 if (stop_here) {
299 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700300 }
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100301 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700302 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700303 }
304 }
305 }
306}
307
308/*
309 * Perform a pass of bottom-up walk, from the second instruction in the
310 * superblock, to try to hoist loads to earlier slots.
311 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700312void Mir2Lir::ApplyLoadHoisting(LIR* head_lir, LIR* tail_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700313 LIR* this_lir, *check_lir;
314 /*
315 * Store the list of independent instructions that can be hoisted past.
316 * Will decide the best place to insert later.
317 */
318 LIR* prev_inst_list[MAX_HOIST_DISTANCE];
319
320 /* Empty block */
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700321 if (head_lir == tail_lir) {
322 return;
323 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700324
325 /* Start from the second instruction */
326 for (this_lir = NEXT_LIR(head_lir); this_lir != tail_lir; this_lir = NEXT_LIR(this_lir)) {
buzbee409fe942013-10-11 10:49:56 -0700327 if (IsPseudoLirOp(this_lir->opcode)) {
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700328 continue;
329 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700330
331 uint64_t target_flags = GetTargetInstFlags(this_lir->opcode);
332 /* Skip non-interesting instructions */
buzbee1da1e2f2013-11-15 13:37:01 -0800333 if (!(target_flags & IS_LOAD) ||
334 (this_lir->flags.is_nop == true) ||
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800335 ((target_flags & (REG_DEF0 | REG_DEF1)) == (REG_DEF0 | REG_DEF1)) ||
336 ((target_flags & (IS_STORE | IS_LOAD)) == (IS_STORE | IS_LOAD))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700337 continue;
338 }
339
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100340 ResourceMask stop_use_all_mask = *this_lir->u.m.use_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700341
Andreas Gampeaf263df2014-07-11 16:40:54 -0700342 /*
343 * Branches for null/range checks are marked with the true resource
344 * bits, and loads to Dalvik registers, constant pools, and non-alias
345 * locations are safe to be hoisted. So only mark the heap references
346 * conservatively here.
347 *
348 * Note: on x86(-64) and Arm64 this will add kEncodeNone.
349 * TODO: Sanity check. LoadStoreElimination uses kBranchBit to fake a PC.
350 */
351 if (stop_use_all_mask.HasBit(ResourceMask::kHeapRef)) {
352 stop_use_all_mask.SetBits(GetPCUseDefEncoding());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700353 }
354
355 /* Similar as above, but just check for pure register dependency */
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100356 ResourceMask stop_use_reg_mask = stop_use_all_mask.Without(kEncodeMem);
357 ResourceMask stop_def_reg_mask = this_lir->u.m.def_mask->Without(kEncodeMem);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700358
359 int next_slot = 0;
360 bool stop_here = false;
361
362 /* Try to hoist the load to a good spot */
363 for (check_lir = PREV_LIR(this_lir); check_lir != head_lir; check_lir = PREV_LIR(check_lir)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700364 /*
365 * Skip already dead instructions (whose dataflow information is
366 * outdated and misleading).
367 */
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700368 if (check_lir->flags.is_nop) {
369 continue;
370 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700371
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100372 ResourceMask check_mem_mask = check_lir->u.m.def_mask->Intersection(kEncodeMem);
373 ResourceMask alias_condition = stop_use_all_mask.Intersection(check_mem_mask);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700374 stop_here = false;
375
376 /* Potential WAR alias seen - check the exact relation */
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100377 if (!check_mem_mask.Equals(kEncodeMem) && !alias_condition.Equals(kEncodeNone)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700378 /* We can fully disambiguate Dalvik references */
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100379 if (alias_condition.Equals(kEncodeDalvikReg)) {
380 /* Must alias or partially overlap */
buzbeeb48819d2013-09-14 16:15:25 -0700381 if ((check_lir->flags.alias_info == this_lir->flags.alias_info) ||
Brian Carlstrom7940e442013-07-12 13:46:57 -0700382 IsDalvikRegisterClobbered(this_lir, check_lir)) {
383 stop_here = true;
384 }
385 /* Conservatively treat all heap refs as may-alias */
386 } else {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100387 DCHECK(alias_condition.Equals(kEncodeHeapRef));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700388 stop_here = true;
389 }
390 /* Memory content may be updated. Stop looking now. */
391 if (stop_here) {
392 prev_inst_list[next_slot++] = check_lir;
393 break;
394 }
395 }
396
397 if (stop_here == false) {
398 stop_here = CHECK_REG_DEP(stop_use_reg_mask, stop_def_reg_mask,
399 check_lir);
400 }
401
402 /*
403 * Store the dependent or non-pseudo/indepedent instruction to the
404 * list.
405 */
buzbee409fe942013-10-11 10:49:56 -0700406 if (stop_here || !IsPseudoLirOp(check_lir->opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700407 prev_inst_list[next_slot++] = check_lir;
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700408 if (next_slot == MAX_HOIST_DISTANCE) {
409 break;
410 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700411 }
412
413 /* Found a new place to put the load - move it here */
414 if (stop_here == true) {
Serban Constantinescufcc36ba2014-07-15 17:44:21 +0100415 DEBUG_OPT(DumpDependentInsnPair(check_lir, this_lir, "HOIST STOP"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700416 break;
417 }
418 }
419
420 /*
421 * Reached the top - use head_lir as the dependent marker as all labels
422 * are barriers.
423 */
424 if (stop_here == false && next_slot < MAX_HOIST_DISTANCE) {
425 prev_inst_list[next_slot++] = head_lir;
426 }
427
428 /*
429 * At least one independent instruction is found. Scan in the reversed
430 * direction to find a beneficial slot.
431 */
432 if (next_slot >= 2) {
433 int first_slot = next_slot - 2;
434 int slot;
435 LIR* dep_lir = prev_inst_list[next_slot-1];
436 /* If there is ld-ld dependency, wait LDLD_DISTANCE cycles */
buzbee409fe942013-10-11 10:49:56 -0700437 if (!IsPseudoLirOp(dep_lir->opcode) &&
Brian Carlstrom7940e442013-07-12 13:46:57 -0700438 (GetTargetInstFlags(dep_lir->opcode) & IS_LOAD)) {
439 first_slot -= LDLD_DISTANCE;
440 }
441 /*
442 * Make sure we check slot >= 0 since first_slot may be negative
443 * when the loop is first entered.
444 */
445 for (slot = first_slot; slot >= 0; slot--) {
446 LIR* cur_lir = prev_inst_list[slot];
447 LIR* prev_lir = prev_inst_list[slot+1];
448
449 /* Check the highest instruction */
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100450 if (prev_lir->u.m.def_mask->Equals(kEncodeAll)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700451 /*
452 * If the first instruction is a load, don't hoist anything
453 * above it since it is unlikely to be beneficial.
454 */
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700455 if (GetTargetInstFlags(cur_lir->opcode) & IS_LOAD) {
456 continue;
457 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700458 /*
459 * If the remaining number of slots is less than LD_LATENCY,
460 * insert the hoisted load here.
461 */
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700462 if (slot < LD_LATENCY) {
463 break;
464 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700465 }
466
467 // Don't look across a barrier label
468 if ((prev_lir->opcode == kPseudoTargetLabel) ||
469 (prev_lir->opcode == kPseudoSafepointPC) ||
470 (prev_lir->opcode == kPseudoBarrier)) {
471 break;
472 }
473
474 /*
475 * Try to find two instructions with load/use dependency until
476 * the remaining instructions are less than LD_LATENCY.
477 */
buzbee409fe942013-10-11 10:49:56 -0700478 bool prev_is_load = IsPseudoLirOp(prev_lir->opcode) ? false :
Brian Carlstrom7940e442013-07-12 13:46:57 -0700479 (GetTargetInstFlags(prev_lir->opcode) & IS_LOAD);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100480 if ((prev_is_load && (cur_lir->u.m.use_mask->Intersects(*prev_lir->u.m.def_mask))) ||
481 (slot < LD_LATENCY)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700482 break;
483 }
484 }
485
486 /* Found a slot to hoist to */
487 if (slot >= 0) {
488 LIR* cur_lir = prev_inst_list[slot];
489 LIR* new_load_lir =
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000490 static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocLIR));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700491 *new_load_lir = *this_lir;
492 /*
493 * Insertion is guaranteed to succeed since check_lir
494 * is never the first LIR on the list
495 */
496 InsertLIRBefore(cur_lir, new_load_lir);
buzbee252254b2013-09-08 16:20:53 -0700497 NopLIR(this_lir);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700498 }
499 }
500 }
501}
502
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700503void Mir2Lir::ApplyLocalOptimizations(LIR* head_lir, LIR* tail_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700504 if (!(cu_->disable_opt & (1 << kLoadStoreElimination))) {
505 ApplyLoadStoreElimination(head_lir, tail_lir);
506 }
507 if (!(cu_->disable_opt & (1 << kLoadHoisting))) {
508 ApplyLoadHoisting(head_lir, tail_lir);
509 }
510}
511
Brian Carlstrom7940e442013-07-12 13:46:57 -0700512} // namespace art