blob: b4d93add9c4c924bc6ac6d250a4cf70f9a987957 [file] [log] [blame]
Mingyao Yang8df69d42015-10-22 15:40:58 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "load_store_elimination.h"
18#include "side_effects_analysis.h"
19
20#include <iostream>
21
22namespace art {
23
24class ReferenceInfo;
25
26// A cap for the number of heap locations to prevent pathological time/space consumption.
27// The number of heap locations for most of the methods stays below this threshold.
28constexpr size_t kMaxNumberOfHeapLocations = 32;
29
30// A ReferenceInfo contains additional info about a reference such as
31// whether it's a singleton, returned, etc.
32class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
33 public:
34 ReferenceInfo(HInstruction* reference, size_t pos) : reference_(reference), position_(pos) {
35 is_singleton_ = true;
36 is_singleton_and_not_returned_ = true;
37 if (!reference_->IsNewInstance() && !reference_->IsNewArray()) {
38 // For references not allocated in the method, don't assume anything.
39 is_singleton_ = false;
40 is_singleton_and_not_returned_ = false;
41 return;
42 }
43
44 // Visit all uses to determine if this reference can spread into the heap,
45 // a method call, etc.
Vladimir Markod59f3b12016-03-29 12:21:58 +010046 for (const HUseListNode<HInstruction*>& use : reference_->GetUses()) {
47 HInstruction* user = use.GetUser();
48 DCHECK(!user->IsNullCheck()) << "NullCheck should have been eliminated";
49 if (user->IsBoundType()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -070050 // BoundType shouldn't normally be necessary for a NewInstance.
51 // Just be conservative for the uncommon cases.
52 is_singleton_ = false;
53 is_singleton_and_not_returned_ = false;
54 return;
55 }
Vladimir Markod59f3b12016-03-29 12:21:58 +010056 if (user->IsPhi() || user->IsSelect() || user->IsInvoke() ||
57 (user->IsInstanceFieldSet() && (reference_ == user->InputAt(1))) ||
58 (user->IsUnresolvedInstanceFieldSet() && (reference_ == user->InputAt(1))) ||
59 (user->IsStaticFieldSet() && (reference_ == user->InputAt(1))) ||
60 (user->IsUnresolvedStaticFieldSet() && (reference_ == user->InputAt(0))) ||
61 (user->IsArraySet() && (reference_ == user->InputAt(2)))) {
Mingyao Yang40bcb932016-02-03 05:46:57 -080062 // reference_ is merged to HPhi/HSelect, passed to a callee, or stored to heap.
Mingyao Yang8df69d42015-10-22 15:40:58 -070063 // reference_ isn't the only name that can refer to its value anymore.
64 is_singleton_ = false;
65 is_singleton_and_not_returned_ = false;
66 return;
67 }
Vladimir Markod59f3b12016-03-29 12:21:58 +010068 if (user->IsReturn()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -070069 is_singleton_and_not_returned_ = false;
70 }
71 }
72 }
73
74 HInstruction* GetReference() const {
75 return reference_;
76 }
77
78 size_t GetPosition() const {
79 return position_;
80 }
81
82 // Returns true if reference_ is the only name that can refer to its value during
83 // the lifetime of the method. So it's guaranteed to not have any alias in
84 // the method (including its callees).
85 bool IsSingleton() const {
86 return is_singleton_;
87 }
88
89 // Returns true if reference_ is a singleton and not returned to the caller.
90 // The allocation and stores into reference_ may be eliminated for such cases.
91 bool IsSingletonAndNotReturned() const {
92 return is_singleton_and_not_returned_;
93 }
94
95 private:
96 HInstruction* const reference_;
97 const size_t position_; // position in HeapLocationCollector's ref_info_array_.
98 bool is_singleton_; // can only be referred to by a single name in the method.
99 bool is_singleton_and_not_returned_; // reference_ is singleton and not returned to caller.
100
101 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
102};
103
104// A heap location is a reference-offset/index pair that a value can be loaded from
105// or stored to.
106class HeapLocation : public ArenaObject<kArenaAllocMisc> {
107 public:
108 static constexpr size_t kInvalidFieldOffset = -1;
109
110 // TODO: more fine-grained array types.
111 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
112
113 HeapLocation(ReferenceInfo* ref_info,
114 size_t offset,
115 HInstruction* index,
116 int16_t declaring_class_def_index)
117 : ref_info_(ref_info),
118 offset_(offset),
119 index_(index),
Mingyao Yang803cbb92015-12-01 12:24:36 -0800120 declaring_class_def_index_(declaring_class_def_index),
121 value_killed_by_loop_side_effects_(true) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700122 DCHECK(ref_info != nullptr);
123 DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
124 (offset != kInvalidFieldOffset && index == nullptr));
Mingyao Yang803cbb92015-12-01 12:24:36 -0800125 if (ref_info->IsSingleton() && !IsArrayElement()) {
126 // Assume this location's value cannot be killed by loop side effects
127 // until proven otherwise.
128 value_killed_by_loop_side_effects_ = false;
129 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700130 }
131
132 ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
133 size_t GetOffset() const { return offset_; }
134 HInstruction* GetIndex() const { return index_; }
135
136 // Returns the definition of declaring class' dex index.
137 // It's kDeclaringClassDefIndexForArrays for an array element.
138 int16_t GetDeclaringClassDefIndex() const {
139 return declaring_class_def_index_;
140 }
141
142 bool IsArrayElement() const {
143 return index_ != nullptr;
144 }
145
Mingyao Yang803cbb92015-12-01 12:24:36 -0800146 bool IsValueKilledByLoopSideEffects() const {
147 return value_killed_by_loop_side_effects_;
148 }
149
150 void SetValueKilledByLoopSideEffects(bool val) {
151 value_killed_by_loop_side_effects_ = val;
152 }
153
Mingyao Yang8df69d42015-10-22 15:40:58 -0700154 private:
155 ReferenceInfo* const ref_info_; // reference for instance/static field or array access.
156 const size_t offset_; // offset of static/instance field.
157 HInstruction* const index_; // index of an array element.
158 const int16_t declaring_class_def_index_; // declaring class's def's dex index.
Mingyao Yang803cbb92015-12-01 12:24:36 -0800159 bool value_killed_by_loop_side_effects_; // value of this location may be killed by loop
160 // side effects because this location is stored
161 // into inside a loop.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700162
163 DISALLOW_COPY_AND_ASSIGN(HeapLocation);
164};
165
166static HInstruction* HuntForOriginalReference(HInstruction* ref) {
167 DCHECK(ref != nullptr);
168 while (ref->IsNullCheck() || ref->IsBoundType()) {
169 ref = ref->InputAt(0);
170 }
171 return ref;
172}
173
174// A HeapLocationCollector collects all relevant heap locations and keeps
175// an aliasing matrix for all locations.
176class HeapLocationCollector : public HGraphVisitor {
177 public:
178 static constexpr size_t kHeapLocationNotFound = -1;
179 // Start with a single uint32_t word. That's enough bits for pair-wise
180 // aliasing matrix of 8 heap locations.
181 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
182
183 explicit HeapLocationCollector(HGraph* graph)
184 : HGraphVisitor(graph),
185 ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
186 heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
Vladimir Markof6a35de2016-03-21 12:01:50 +0000187 aliasing_matrix_(graph->GetArena(),
188 kInitialAliasingMatrixBitVectorSize,
189 true,
190 kArenaAllocLSE),
Mingyao Yang8df69d42015-10-22 15:40:58 -0700191 has_heap_stores_(false),
192 has_volatile_(false),
193 has_monitor_operations_(false),
194 may_deoptimize_(false) {}
195
196 size_t GetNumberOfHeapLocations() const {
197 return heap_locations_.size();
198 }
199
200 HeapLocation* GetHeapLocation(size_t index) const {
201 return heap_locations_[index];
202 }
203
204 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
205 for (size_t i = 0; i < ref_info_array_.size(); i++) {
206 ReferenceInfo* ref_info = ref_info_array_[i];
207 if (ref_info->GetReference() == ref) {
208 DCHECK_EQ(i, ref_info->GetPosition());
209 return ref_info;
210 }
211 }
212 return nullptr;
213 }
214
215 bool HasHeapStores() const {
216 return has_heap_stores_;
217 }
218
219 bool HasVolatile() const {
220 return has_volatile_;
221 }
222
223 bool HasMonitorOps() const {
224 return has_monitor_operations_;
225 }
226
227 // Returns whether this method may be deoptimized.
228 // Currently we don't have meta data support for deoptimizing
229 // a method that eliminates allocations/stores.
230 bool MayDeoptimize() const {
231 return may_deoptimize_;
232 }
233
234 // Find and return the heap location index in heap_locations_.
235 size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
236 size_t offset,
237 HInstruction* index,
238 int16_t declaring_class_def_index) const {
239 for (size_t i = 0; i < heap_locations_.size(); i++) {
240 HeapLocation* loc = heap_locations_[i];
241 if (loc->GetReferenceInfo() == ref_info &&
242 loc->GetOffset() == offset &&
243 loc->GetIndex() == index &&
244 loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
245 return i;
246 }
247 }
248 return kHeapLocationNotFound;
249 }
250
251 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
252 bool MayAlias(size_t index1, size_t index2) const {
253 if (index1 < index2) {
254 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
255 } else if (index1 > index2) {
256 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
257 } else {
258 DCHECK(false) << "index1 and index2 are expected to be different";
259 return true;
260 }
261 }
262
263 void BuildAliasingMatrix() {
264 const size_t number_of_locations = heap_locations_.size();
265 if (number_of_locations == 0) {
266 return;
267 }
268 size_t pos = 0;
269 // Compute aliasing info between every pair of different heap locations.
270 // Save the result in a matrix represented as a BitVector.
271 for (size_t i = 0; i < number_of_locations - 1; i++) {
272 for (size_t j = i + 1; j < number_of_locations; j++) {
273 if (ComputeMayAlias(i, j)) {
274 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
275 }
276 pos++;
277 }
278 }
279 }
280
281 private:
282 // An allocation cannot alias with a name which already exists at the point
283 // of the allocation, such as a parameter or a load happening before the allocation.
284 bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
285 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
286 // Any reference that can alias with the allocation must appear after it in the block/in
287 // the block's successors. In reverse post order, those instructions will be visited after
288 // the allocation.
289 return ref_info2->GetPosition() >= ref_info1->GetPosition();
290 }
291 return true;
292 }
293
294 bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
295 if (ref_info1 == ref_info2) {
296 return true;
297 } else if (ref_info1->IsSingleton()) {
298 return false;
299 } else if (ref_info2->IsSingleton()) {
300 return false;
301 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
302 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
303 return false;
304 }
305 return true;
306 }
307
308 // `index1` and `index2` are indices in the array of collected heap locations.
309 // Returns the position in the bit vector that tracks whether the two heap
310 // locations may alias.
311 size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
312 DCHECK(index2 > index1);
313 const size_t number_of_locations = heap_locations_.size();
314 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
315 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
316 }
317
318 // An additional position is passed in to make sure the calculated position is correct.
319 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
320 size_t calculated_position = AliasingMatrixPosition(index1, index2);
321 DCHECK_EQ(calculated_position, position);
322 return calculated_position;
323 }
324
325 // Compute if two locations may alias to each other.
326 bool ComputeMayAlias(size_t index1, size_t index2) const {
327 HeapLocation* loc1 = heap_locations_[index1];
328 HeapLocation* loc2 = heap_locations_[index2];
329 if (loc1->GetOffset() != loc2->GetOffset()) {
330 // Either two different instance fields, or one is an instance
331 // field and the other is an array element.
332 return false;
333 }
334 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
335 // Different types.
336 return false;
337 }
338 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
339 return false;
340 }
341 if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
342 HInstruction* array_index1 = loc1->GetIndex();
343 HInstruction* array_index2 = loc2->GetIndex();
344 DCHECK(array_index1 != nullptr);
345 DCHECK(array_index2 != nullptr);
346 if (array_index1->IsIntConstant() &&
347 array_index2->IsIntConstant() &&
348 array_index1->AsIntConstant()->GetValue() != array_index2->AsIntConstant()->GetValue()) {
349 // Different constant indices do not alias.
350 return false;
351 }
352 }
353 return true;
354 }
355
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800356 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
357 ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700358 if (ref_info == nullptr) {
359 size_t pos = ref_info_array_.size();
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800360 ref_info = new (GetGraph()->GetArena()) ReferenceInfo(instruction, pos);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700361 ref_info_array_.push_back(ref_info);
362 }
363 return ref_info;
364 }
365
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800366 void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
367 if (instruction->GetType() != Primitive::kPrimNot) {
368 return;
369 }
370 DCHECK(FindReferenceInfoOf(instruction) == nullptr);
371 GetOrCreateReferenceInfo(instruction);
372 }
373
Mingyao Yang8df69d42015-10-22 15:40:58 -0700374 HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
375 size_t offset,
376 HInstruction* index,
377 int16_t declaring_class_def_index) {
378 HInstruction* original_ref = HuntForOriginalReference(ref);
379 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
380 size_t heap_location_idx = FindHeapLocationIndex(
381 ref_info, offset, index, declaring_class_def_index);
382 if (heap_location_idx == kHeapLocationNotFound) {
383 HeapLocation* heap_loc = new (GetGraph()->GetArena())
384 HeapLocation(ref_info, offset, index, declaring_class_def_index);
385 heap_locations_.push_back(heap_loc);
386 return heap_loc;
387 }
388 return heap_locations_[heap_location_idx];
389 }
390
Mingyao Yang803cbb92015-12-01 12:24:36 -0800391 HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700392 if (field_info.IsVolatile()) {
393 has_volatile_ = true;
394 }
395 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
396 const size_t offset = field_info.GetFieldOffset().SizeValue();
Mingyao Yang803cbb92015-12-01 12:24:36 -0800397 return GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700398 }
399
400 void VisitArrayAccess(HInstruction* array, HInstruction* index) {
401 GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
402 index, HeapLocation::kDeclaringClassDefIndexForArrays);
403 }
404
405 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800406 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800407 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700408 }
409
410 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
Mingyao Yang803cbb92015-12-01 12:24:36 -0800411 HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700412 has_heap_stores_ = true;
Mingyao Yang803cbb92015-12-01 12:24:36 -0800413 if (instruction->GetBlock()->GetLoopInformation() != nullptr) {
414 location->SetValueKilledByLoopSideEffects(true);
415 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700416 }
417
418 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800419 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800420 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700421 }
422
423 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800424 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700425 has_heap_stores_ = true;
426 }
427
428 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
429 // since we cannot accurately track the fields.
430
431 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
432 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800433 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700434 }
435
436 void VisitArraySet(HArraySet* instruction) OVERRIDE {
437 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
438 has_heap_stores_ = true;
439 }
440
441 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
442 // Any references appearing in the ref_info_array_ so far cannot alias with new_instance.
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800443 CreateReferenceInfoForReferenceType(new_instance);
444 }
445
446 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* instruction) OVERRIDE {
447 CreateReferenceInfoForReferenceType(instruction);
448 }
449
450 void VisitInvokeVirtual(HInvokeVirtual* instruction) OVERRIDE {
451 CreateReferenceInfoForReferenceType(instruction);
452 }
453
454 void VisitInvokeInterface(HInvokeInterface* instruction) OVERRIDE {
455 CreateReferenceInfoForReferenceType(instruction);
456 }
457
458 void VisitParameterValue(HParameterValue* instruction) OVERRIDE {
459 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700460 }
461
Mingyao Yang40bcb932016-02-03 05:46:57 -0800462 void VisitSelect(HSelect* instruction) OVERRIDE {
463 CreateReferenceInfoForReferenceType(instruction);
464 }
465
Mingyao Yang8df69d42015-10-22 15:40:58 -0700466 void VisitDeoptimize(HDeoptimize* instruction ATTRIBUTE_UNUSED) OVERRIDE {
467 may_deoptimize_ = true;
468 }
469
470 void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
471 has_monitor_operations_ = true;
472 }
473
474 ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
475 ArenaVector<HeapLocation*> heap_locations_; // All heap locations.
476 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
477 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
478 // alias analysis and won't be as effective.
479 bool has_volatile_; // If there are volatile field accesses.
480 bool has_monitor_operations_; // If there are monitor operations.
481 bool may_deoptimize_;
482
483 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
484};
485
486// An unknown heap value. Loads with such a value in the heap location cannot be eliminated.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800487// A heap location can be set to kUnknownHeapValue when:
488// - initially set a value.
489// - killed due to aliasing, merging, invocation, or loop side effects.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700490static HInstruction* const kUnknownHeapValue =
491 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-1));
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800492
Mingyao Yang8df69d42015-10-22 15:40:58 -0700493// Default heap value after an allocation.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800494// A heap location can be set to that value right after an allocation.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700495static HInstruction* const kDefaultHeapValue =
496 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-2));
497
498class LSEVisitor : public HGraphVisitor {
499 public:
500 LSEVisitor(HGraph* graph,
501 const HeapLocationCollector& heap_locations_collector,
502 const SideEffectsAnalysis& side_effects)
503 : HGraphVisitor(graph),
504 heap_location_collector_(heap_locations_collector),
505 side_effects_(side_effects),
506 heap_values_for_(graph->GetBlocks().size(),
507 ArenaVector<HInstruction*>(heap_locations_collector.
508 GetNumberOfHeapLocations(),
509 kUnknownHeapValue,
510 graph->GetArena()->Adapter(kArenaAllocLSE)),
511 graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800512 removed_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
513 substitute_instructions_for_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
514 possibly_removed_stores_(graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yang8df69d42015-10-22 15:40:58 -0700515 singleton_new_instances_(graph->GetArena()->Adapter(kArenaAllocLSE)) {
516 }
517
518 void VisitBasicBlock(HBasicBlock* block) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800519 // Populate the heap_values array for this block.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700520 // TODO: try to reuse the heap_values array from one predecessor if possible.
521 if (block->IsLoopHeader()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800522 HandleLoopSideEffects(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700523 } else {
524 MergePredecessorValues(block);
525 }
526 HGraphVisitor::VisitBasicBlock(block);
527 }
528
529 // Remove recorded instructions that should be eliminated.
530 void RemoveInstructions() {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800531 size_t size = removed_loads_.size();
532 DCHECK_EQ(size, substitute_instructions_for_loads_.size());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700533 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800534 HInstruction* load = removed_loads_[i];
535 DCHECK(load != nullptr);
536 DCHECK(load->IsInstanceFieldGet() ||
537 load->IsStaticFieldGet() ||
538 load->IsArrayGet());
539 HInstruction* substitute = substitute_instructions_for_loads_[i];
540 DCHECK(substitute != nullptr);
541 // Keep tracing substitute till one that's not removed.
542 HInstruction* sub_sub = FindSubstitute(substitute);
543 while (sub_sub != substitute) {
544 substitute = sub_sub;
545 sub_sub = FindSubstitute(substitute);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700546 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800547 load->ReplaceWith(substitute);
548 load->GetBlock()->RemoveInstruction(load);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700549 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800550
551 // At this point, stores in possibly_removed_stores_ can be safely removed.
552 size = possibly_removed_stores_.size();
553 for (size_t i = 0; i < size; i++) {
554 HInstruction* store = possibly_removed_stores_[i];
555 DCHECK(store->IsInstanceFieldSet() || store->IsStaticFieldSet() || store->IsArraySet());
556 store->GetBlock()->RemoveInstruction(store);
557 }
558
Mingyao Yang8df69d42015-10-22 15:40:58 -0700559 // TODO: remove unnecessary allocations.
560 // Eliminate instructions in singleton_new_instances_ that:
561 // - don't have uses,
562 // - don't have finalizers,
563 // - are instantiable and accessible,
564 // - have no/separate clinit check.
565 }
566
567 private:
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800568 // If heap_values[index] is an instance field store, need to keep the store.
569 // This is necessary if a heap value is killed due to merging, or loop side
570 // effects (which is essentially merging also), since a load later from the
571 // location won't be eliminated.
572 void KeepIfIsStore(HInstruction* heap_value) {
573 if (heap_value == kDefaultHeapValue ||
574 heap_value == kUnknownHeapValue ||
575 !heap_value->IsInstanceFieldSet()) {
576 return;
577 }
578 auto idx = std::find(possibly_removed_stores_.begin(),
579 possibly_removed_stores_.end(), heap_value);
580 if (idx != possibly_removed_stores_.end()) {
581 // Make sure the store is kept.
582 possibly_removed_stores_.erase(idx);
583 }
584 }
585
586 void HandleLoopSideEffects(HBasicBlock* block) {
587 DCHECK(block->IsLoopHeader());
588 int block_id = block->GetBlockId();
589 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block_id];
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000590
591 // Don't eliminate loads in irreducible loops. This is safe for singletons, because
592 // they are always used by the non-eliminated loop-phi.
593 if (block->GetLoopInformation()->IsIrreducible()) {
594 if (kIsDebugBuild) {
595 for (size_t i = 0; i < heap_values.size(); i++) {
596 DCHECK_EQ(heap_values[i], kUnknownHeapValue);
597 }
598 }
599 return;
600 }
601
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800602 HBasicBlock* pre_header = block->GetLoopInformation()->GetPreHeader();
603 ArenaVector<HInstruction*>& pre_header_heap_values =
604 heap_values_for_[pre_header->GetBlockId()];
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000605
Mingyao Yang803cbb92015-12-01 12:24:36 -0800606 // Inherit the values from pre-header.
607 for (size_t i = 0; i < heap_values.size(); i++) {
608 heap_values[i] = pre_header_heap_values[i];
609 }
610
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800611 // We do a single pass in reverse post order. For loops, use the side effects as a hint
612 // to see if the heap values should be killed.
613 if (side_effects_.GetLoopEffects(block).DoesAnyWrite()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800614 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yang803cbb92015-12-01 12:24:36 -0800615 HeapLocation* location = heap_location_collector_.GetHeapLocation(i);
616 ReferenceInfo* ref_info = location->GetReferenceInfo();
617 if (!ref_info->IsSingleton() || location->IsValueKilledByLoopSideEffects()) {
618 // heap value is killed by loop side effects (stored into directly, or due to
619 // aliasing).
620 KeepIfIsStore(pre_header_heap_values[i]);
621 heap_values[i] = kUnknownHeapValue;
622 } else {
623 // A singleton's field that's not stored into inside a loop is invariant throughout
624 // the loop.
625 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800626 }
627 }
628 }
629
Mingyao Yang8df69d42015-10-22 15:40:58 -0700630 void MergePredecessorValues(HBasicBlock* block) {
631 const ArenaVector<HBasicBlock*>& predecessors = block->GetPredecessors();
632 if (predecessors.size() == 0) {
633 return;
634 }
635 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()];
636 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800637 HInstruction* pred0_value = heap_values_for_[predecessors[0]->GetBlockId()][i];
638 heap_values[i] = pred0_value;
639 if (pred0_value != kUnknownHeapValue) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700640 for (size_t j = 1; j < predecessors.size(); j++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800641 HInstruction* pred_value = heap_values_for_[predecessors[j]->GetBlockId()][i];
642 if (pred_value != pred0_value) {
643 heap_values[i] = kUnknownHeapValue;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700644 break;
645 }
646 }
647 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800648
649 if (heap_values[i] == kUnknownHeapValue) {
650 // Keep the last store in each predecessor since future loads cannot be eliminated.
651 for (size_t j = 0; j < predecessors.size(); j++) {
652 ArenaVector<HInstruction*>& pred_values = heap_values_for_[predecessors[j]->GetBlockId()];
653 KeepIfIsStore(pred_values[i]);
654 }
655 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700656 }
657 }
658
659 // `instruction` is being removed. Try to see if the null check on it
660 // can be removed. This can happen if the same value is set in two branches
661 // but not in dominators. Such as:
662 // int[] a = foo();
663 // if () {
664 // a[0] = 2;
665 // } else {
666 // a[0] = 2;
667 // }
668 // // a[0] can now be replaced with constant 2, and the null check on it can be removed.
669 void TryRemovingNullCheck(HInstruction* instruction) {
670 HInstruction* prev = instruction->GetPrevious();
671 if ((prev != nullptr) && prev->IsNullCheck() && (prev == instruction->InputAt(0))) {
672 // Previous instruction is a null check for this instruction. Remove the null check.
673 prev->ReplaceWith(prev->InputAt(0));
674 prev->GetBlock()->RemoveInstruction(prev);
675 }
676 }
677
678 HInstruction* GetDefaultValue(Primitive::Type type) {
679 switch (type) {
680 case Primitive::kPrimNot:
681 return GetGraph()->GetNullConstant();
682 case Primitive::kPrimBoolean:
683 case Primitive::kPrimByte:
684 case Primitive::kPrimChar:
685 case Primitive::kPrimShort:
686 case Primitive::kPrimInt:
687 return GetGraph()->GetIntConstant(0);
688 case Primitive::kPrimLong:
689 return GetGraph()->GetLongConstant(0);
690 case Primitive::kPrimFloat:
691 return GetGraph()->GetFloatConstant(0);
692 case Primitive::kPrimDouble:
693 return GetGraph()->GetDoubleConstant(0);
694 default:
695 UNREACHABLE();
696 }
697 }
698
699 void VisitGetLocation(HInstruction* instruction,
700 HInstruction* ref,
701 size_t offset,
702 HInstruction* index,
703 int16_t declaring_class_def_index) {
704 HInstruction* original_ref = HuntForOriginalReference(ref);
705 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
706 size_t idx = heap_location_collector_.FindHeapLocationIndex(
707 ref_info, offset, index, declaring_class_def_index);
708 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
709 ArenaVector<HInstruction*>& heap_values =
710 heap_values_for_[instruction->GetBlock()->GetBlockId()];
711 HInstruction* heap_value = heap_values[idx];
712 if (heap_value == kDefaultHeapValue) {
713 HInstruction* constant = GetDefaultValue(instruction->GetType());
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800714 removed_loads_.push_back(instruction);
715 substitute_instructions_for_loads_.push_back(constant);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700716 heap_values[idx] = constant;
717 return;
718 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800719 if (heap_value != kUnknownHeapValue && heap_value->IsInstanceFieldSet()) {
720 HInstruction* store = heap_value;
721 // This load must be from a singleton since it's from the same field
722 // that a "removed" store puts the value. That store must be to a singleton's field.
723 DCHECK(ref_info->IsSingleton());
724 // Get the real heap value of the store.
725 heap_value = store->InputAt(1);
726 }
David Brazdil15693bf2015-12-16 10:30:45 +0000727 if (heap_value == kUnknownHeapValue) {
728 // Load isn't eliminated. Put the load as the value into the HeapLocation.
729 // This acts like GVN but with better aliasing analysis.
730 heap_values[idx] = instruction;
731 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +0000732 if (Primitive::PrimitiveKind(heap_value->GetType())
733 != Primitive::PrimitiveKind(instruction->GetType())) {
734 // The only situation where the same heap location has different type is when
Nicolas Geoffraybb2c93b2016-05-04 14:00:12 +0100735 // we do an array get on an instruction that originates from the null constant
736 // (the null could be behind a field access, an array access, a null check or
737 // a bound type).
738 // In order to stay properly typed on primitive types, we do not eliminate
739 // the array gets.
Nicolas Geoffray03971632016-03-17 10:44:24 +0000740 if (kIsDebugBuild) {
741 DCHECK(heap_value->IsArrayGet()) << heap_value->DebugName();
742 DCHECK(instruction->IsArrayGet()) << instruction->DebugName();
Nicolas Geoffray03971632016-03-17 10:44:24 +0000743 }
744 return;
745 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800746 removed_loads_.push_back(instruction);
747 substitute_instructions_for_loads_.push_back(heap_value);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700748 TryRemovingNullCheck(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700749 }
750 }
751
752 bool Equal(HInstruction* heap_value, HInstruction* value) {
753 if (heap_value == value) {
754 return true;
755 }
756 if (heap_value == kDefaultHeapValue && GetDefaultValue(value->GetType()) == value) {
757 return true;
758 }
759 return false;
760 }
761
762 void VisitSetLocation(HInstruction* instruction,
763 HInstruction* ref,
764 size_t offset,
765 HInstruction* index,
766 int16_t declaring_class_def_index,
767 HInstruction* value) {
768 HInstruction* original_ref = HuntForOriginalReference(ref);
769 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
770 size_t idx = heap_location_collector_.FindHeapLocationIndex(
771 ref_info, offset, index, declaring_class_def_index);
772 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
773 ArenaVector<HInstruction*>& heap_values =
774 heap_values_for_[instruction->GetBlock()->GetBlockId()];
775 HInstruction* heap_value = heap_values[idx];
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800776 bool same_value = false;
777 bool possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700778 if (Equal(heap_value, value)) {
779 // Store into the heap location with the same value.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800780 same_value = true;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700781 } else if (index != nullptr) {
782 // For array element, don't eliminate stores since it can be easily aliased
783 // with non-constant index.
784 } else if (!heap_location_collector_.MayDeoptimize() &&
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800785 ref_info->IsSingletonAndNotReturned()) {
786 // Store into a field of a singleton that's not returned. The value cannot be
787 // killed due to aliasing/invocation. It can be redundant since future loads can
788 // directly get the value set by this instruction. The value can still be killed due to
789 // merging or loop side effects. Stores whose values are killed due to merging/loop side
790 // effects later will be removed from possibly_removed_stores_ when that is detected.
791 possibly_redundant = true;
792 HNewInstance* new_instance = ref_info->GetReference()->AsNewInstance();
793 DCHECK(new_instance != nullptr);
794 if (new_instance->IsFinalizable()) {
795 // Finalizable objects escape globally. Need to keep the store.
796 possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700797 } else {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800798 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
799 if (loop_info != nullptr) {
800 // instruction is a store in the loop so the loop must does write.
801 DCHECK(side_effects_.GetLoopEffects(loop_info->GetHeader()).DoesAnyWrite());
Mingyao Yang803cbb92015-12-01 12:24:36 -0800802 // If it's a singleton, IsValueKilledByLoopSideEffects() must be true.
803 DCHECK(!ref_info->IsSingleton() ||
804 heap_location_collector_.GetHeapLocation(idx)->IsValueKilledByLoopSideEffects());
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800805
Mingyao Yang4b467ed2015-11-19 17:04:22 -0800806 if (loop_info->IsDefinedOutOfTheLoop(original_ref)) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800807 DCHECK(original_ref->GetBlock()->Dominates(loop_info->GetPreHeader()));
808 // Keep the store since its value may be needed at the loop header.
809 possibly_redundant = false;
810 } else {
811 // The singleton is created inside the loop. Value stored to it isn't needed at
812 // the loop header. This is true for outer loops also.
813 }
814 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700815 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700816 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800817 if (same_value || possibly_redundant) {
818 possibly_removed_stores_.push_back(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700819 }
Mingyao Yange9d6e602015-10-23 17:08:42 -0700820
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800821 if (!same_value) {
822 if (possibly_redundant) {
823 DCHECK(instruction->IsInstanceFieldSet());
824 // Put the store as the heap value. If the value is loaded from heap
825 // by a load later, this store isn't really redundant.
826 heap_values[idx] = instruction;
827 } else {
828 heap_values[idx] = value;
829 }
830 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700831 // This store may kill values in other heap locations due to aliasing.
832 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800833 if (i == idx) {
834 continue;
835 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700836 if (heap_values[i] == value) {
837 // Same value should be kept even if aliasing happens.
838 continue;
839 }
840 if (heap_values[i] == kUnknownHeapValue) {
841 // Value is already unknown, no need for aliasing check.
842 continue;
843 }
844 if (heap_location_collector_.MayAlias(i, idx)) {
845 // Kill heap locations that may alias.
846 heap_values[i] = kUnknownHeapValue;
847 }
848 }
849 }
850
851 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
852 HInstruction* obj = instruction->InputAt(0);
853 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
854 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
855 VisitGetLocation(instruction, obj, offset, nullptr, declaring_class_def_index);
856 }
857
858 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
859 HInstruction* obj = instruction->InputAt(0);
860 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
861 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
862 HInstruction* value = instruction->InputAt(1);
863 VisitSetLocation(instruction, obj, offset, nullptr, declaring_class_def_index, value);
864 }
865
866 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
867 HInstruction* cls = instruction->InputAt(0);
868 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
869 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
870 VisitGetLocation(instruction, cls, offset, nullptr, declaring_class_def_index);
871 }
872
873 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
874 HInstruction* cls = instruction->InputAt(0);
875 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
876 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
877 HInstruction* value = instruction->InputAt(1);
878 VisitSetLocation(instruction, cls, offset, nullptr, declaring_class_def_index, value);
879 }
880
881 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
882 HInstruction* array = instruction->InputAt(0);
883 HInstruction* index = instruction->InputAt(1);
884 VisitGetLocation(instruction,
885 array,
886 HeapLocation::kInvalidFieldOffset,
887 index,
888 HeapLocation::kDeclaringClassDefIndexForArrays);
889 }
890
891 void VisitArraySet(HArraySet* instruction) OVERRIDE {
892 HInstruction* array = instruction->InputAt(0);
893 HInstruction* index = instruction->InputAt(1);
894 HInstruction* value = instruction->InputAt(2);
895 VisitSetLocation(instruction,
896 array,
897 HeapLocation::kInvalidFieldOffset,
898 index,
899 HeapLocation::kDeclaringClassDefIndexForArrays,
900 value);
901 }
902
903 void HandleInvoke(HInstruction* invoke) {
904 ArenaVector<HInstruction*>& heap_values =
905 heap_values_for_[invoke->GetBlock()->GetBlockId()];
906 for (size_t i = 0; i < heap_values.size(); i++) {
907 ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
908 if (ref_info->IsSingleton()) {
909 // Singleton references cannot be seen by the callee.
910 } else {
911 heap_values[i] = kUnknownHeapValue;
912 }
913 }
914 }
915
916 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
917 HandleInvoke(invoke);
918 }
919
920 void VisitInvokeVirtual(HInvokeVirtual* invoke) OVERRIDE {
921 HandleInvoke(invoke);
922 }
923
924 void VisitInvokeInterface(HInvokeInterface* invoke) OVERRIDE {
925 HandleInvoke(invoke);
926 }
927
928 void VisitInvokeUnresolved(HInvokeUnresolved* invoke) OVERRIDE {
929 HandleInvoke(invoke);
930 }
931
932 void VisitClinitCheck(HClinitCheck* clinit) OVERRIDE {
933 HandleInvoke(clinit);
934 }
935
936 void VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet* instruction) OVERRIDE {
937 // Conservatively treat it as an invocation.
938 HandleInvoke(instruction);
939 }
940
941 void VisitUnresolvedInstanceFieldSet(HUnresolvedInstanceFieldSet* instruction) OVERRIDE {
942 // Conservatively treat it as an invocation.
943 HandleInvoke(instruction);
944 }
945
946 void VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet* instruction) OVERRIDE {
947 // Conservatively treat it as an invocation.
948 HandleInvoke(instruction);
949 }
950
951 void VisitUnresolvedStaticFieldSet(HUnresolvedStaticFieldSet* instruction) OVERRIDE {
952 // Conservatively treat it as an invocation.
953 HandleInvoke(instruction);
954 }
955
956 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
957 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_instance);
958 if (ref_info == nullptr) {
959 // new_instance isn't used for field accesses. No need to process it.
960 return;
961 }
962 if (!heap_location_collector_.MayDeoptimize() &&
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800963 ref_info->IsSingletonAndNotReturned() &&
964 !new_instance->IsFinalizable() &&
965 !new_instance->CanThrow()) {
966 // TODO: add new_instance to singleton_new_instances_ and enable allocation elimination.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700967 }
968 ArenaVector<HInstruction*>& heap_values =
969 heap_values_for_[new_instance->GetBlock()->GetBlockId()];
970 for (size_t i = 0; i < heap_values.size(); i++) {
971 HInstruction* ref =
972 heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo()->GetReference();
973 size_t offset = heap_location_collector_.GetHeapLocation(i)->GetOffset();
974 if (ref == new_instance && offset >= mirror::kObjectHeaderSize) {
975 // Instance fields except the header fields are set to default heap values.
976 heap_values[i] = kDefaultHeapValue;
977 }
978 }
979 }
980
981 // Find an instruction's substitute if it should be removed.
982 // Return the same instruction if it should not be removed.
983 HInstruction* FindSubstitute(HInstruction* instruction) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800984 size_t size = removed_loads_.size();
Mingyao Yang8df69d42015-10-22 15:40:58 -0700985 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800986 if (removed_loads_[i] == instruction) {
987 return substitute_instructions_for_loads_[i];
Mingyao Yang8df69d42015-10-22 15:40:58 -0700988 }
989 }
990 return instruction;
991 }
992
993 const HeapLocationCollector& heap_location_collector_;
994 const SideEffectsAnalysis& side_effects_;
995
996 // One array of heap values for each block.
997 ArenaVector<ArenaVector<HInstruction*>> heap_values_for_;
998
999 // We record the instructions that should be eliminated but may be
1000 // used by heap locations. They'll be removed in the end.
Mingyao Yangfb8464a2015-11-02 10:56:59 -08001001 ArenaVector<HInstruction*> removed_loads_;
1002 ArenaVector<HInstruction*> substitute_instructions_for_loads_;
1003
1004 // Stores in this list may be removed from the list later when it's
1005 // found that the store cannot be eliminated.
1006 ArenaVector<HInstruction*> possibly_removed_stores_;
1007
Mingyao Yang8df69d42015-10-22 15:40:58 -07001008 ArenaVector<HInstruction*> singleton_new_instances_;
1009
1010 DISALLOW_COPY_AND_ASSIGN(LSEVisitor);
1011};
1012
1013void LoadStoreElimination::Run() {
David Brazdil8993caf2015-12-07 10:04:40 +00001014 if (graph_->IsDebuggable() || graph_->HasTryCatch()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -07001015 // Debugger may set heap values or trigger deoptimization of callers.
David Brazdil8993caf2015-12-07 10:04:40 +00001016 // Try/catch support not implemented yet.
Mingyao Yang8df69d42015-10-22 15:40:58 -07001017 // Skip this optimization.
1018 return;
1019 }
1020 HeapLocationCollector heap_location_collector(graph_);
1021 for (HReversePostOrderIterator it(*graph_); !it.Done(); it.Advance()) {
1022 heap_location_collector.VisitBasicBlock(it.Current());
1023 }
1024 if (heap_location_collector.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
1025 // Bail out if there are too many heap locations to deal with.
1026 return;
1027 }
1028 if (!heap_location_collector.HasHeapStores()) {
1029 // Without heap stores, this pass would act mostly as GVN on heap accesses.
1030 return;
1031 }
1032 if (heap_location_collector.HasVolatile() || heap_location_collector.HasMonitorOps()) {
1033 // Don't do load/store elimination if the method has volatile field accesses or
1034 // monitor operations, for now.
1035 // TODO: do it right.
1036 return;
1037 }
1038 heap_location_collector.BuildAliasingMatrix();
1039 LSEVisitor lse_visitor(graph_, heap_location_collector, side_effects_);
1040 for (HReversePostOrderIterator it(*graph_); !it.Done(); it.Advance()) {
1041 lse_visitor.VisitBasicBlock(it.Current());
1042 }
1043 lse_visitor.RemoveInstructions();
1044}
1045
1046} // namespace art