blob: 8a75a90cfd7b5c27b2303eb0a95ad28ad1004dc6 [file] [log] [blame]
Mingyao Yang8df69d42015-10-22 15:40:58 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "load_store_elimination.h"
18#include "side_effects_analysis.h"
19
20#include <iostream>
21
22namespace art {
23
24class ReferenceInfo;
25
26// A cap for the number of heap locations to prevent pathological time/space consumption.
27// The number of heap locations for most of the methods stays below this threshold.
28constexpr size_t kMaxNumberOfHeapLocations = 32;
29
30// A ReferenceInfo contains additional info about a reference such as
31// whether it's a singleton, returned, etc.
32class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
33 public:
34 ReferenceInfo(HInstruction* reference, size_t pos) : reference_(reference), position_(pos) {
35 is_singleton_ = true;
36 is_singleton_and_not_returned_ = true;
37 if (!reference_->IsNewInstance() && !reference_->IsNewArray()) {
38 // For references not allocated in the method, don't assume anything.
39 is_singleton_ = false;
40 is_singleton_and_not_returned_ = false;
41 return;
42 }
43
44 // Visit all uses to determine if this reference can spread into the heap,
45 // a method call, etc.
Vladimir Marko46817b82016-03-29 12:21:58 +010046 for (const HUseListNode<HInstruction*>& use : reference_->GetUses()) {
47 HInstruction* user = use.GetUser();
48 DCHECK(!user->IsNullCheck()) << "NullCheck should have been eliminated";
49 if (user->IsBoundType()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -070050 // BoundType shouldn't normally be necessary for a NewInstance.
51 // Just be conservative for the uncommon cases.
52 is_singleton_ = false;
53 is_singleton_and_not_returned_ = false;
54 return;
55 }
Vladimir Marko46817b82016-03-29 12:21:58 +010056 if (user->IsPhi() || user->IsSelect() || user->IsInvoke() ||
57 (user->IsInstanceFieldSet() && (reference_ == user->InputAt(1))) ||
58 (user->IsUnresolvedInstanceFieldSet() && (reference_ == user->InputAt(1))) ||
59 (user->IsStaticFieldSet() && (reference_ == user->InputAt(1))) ||
60 (user->IsUnresolvedStaticFieldSet() && (reference_ == user->InputAt(0))) ||
61 (user->IsArraySet() && (reference_ == user->InputAt(2)))) {
Mingyao Yang40bcb932016-02-03 05:46:57 -080062 // reference_ is merged to HPhi/HSelect, passed to a callee, or stored to heap.
Mingyao Yang8df69d42015-10-22 15:40:58 -070063 // reference_ isn't the only name that can refer to its value anymore.
64 is_singleton_ = false;
65 is_singleton_and_not_returned_ = false;
66 return;
67 }
Vladimir Marko46817b82016-03-29 12:21:58 +010068 if (user->IsReturn()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -070069 is_singleton_and_not_returned_ = false;
70 }
71 }
72 }
73
74 HInstruction* GetReference() const {
75 return reference_;
76 }
77
78 size_t GetPosition() const {
79 return position_;
80 }
81
82 // Returns true if reference_ is the only name that can refer to its value during
83 // the lifetime of the method. So it's guaranteed to not have any alias in
84 // the method (including its callees).
85 bool IsSingleton() const {
86 return is_singleton_;
87 }
88
89 // Returns true if reference_ is a singleton and not returned to the caller.
90 // The allocation and stores into reference_ may be eliminated for such cases.
91 bool IsSingletonAndNotReturned() const {
92 return is_singleton_and_not_returned_;
93 }
94
95 private:
96 HInstruction* const reference_;
97 const size_t position_; // position in HeapLocationCollector's ref_info_array_.
98 bool is_singleton_; // can only be referred to by a single name in the method.
99 bool is_singleton_and_not_returned_; // reference_ is singleton and not returned to caller.
100
101 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
102};
103
104// A heap location is a reference-offset/index pair that a value can be loaded from
105// or stored to.
106class HeapLocation : public ArenaObject<kArenaAllocMisc> {
107 public:
108 static constexpr size_t kInvalidFieldOffset = -1;
109
110 // TODO: more fine-grained array types.
111 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
112
113 HeapLocation(ReferenceInfo* ref_info,
114 size_t offset,
115 HInstruction* index,
116 int16_t declaring_class_def_index)
117 : ref_info_(ref_info),
118 offset_(offset),
119 index_(index),
Mingyao Yang803cbb92015-12-01 12:24:36 -0800120 declaring_class_def_index_(declaring_class_def_index),
121 value_killed_by_loop_side_effects_(true) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700122 DCHECK(ref_info != nullptr);
123 DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
124 (offset != kInvalidFieldOffset && index == nullptr));
Mingyao Yang803cbb92015-12-01 12:24:36 -0800125 if (ref_info->IsSingleton() && !IsArrayElement()) {
126 // Assume this location's value cannot be killed by loop side effects
127 // until proven otherwise.
128 value_killed_by_loop_side_effects_ = false;
129 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700130 }
131
132 ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
133 size_t GetOffset() const { return offset_; }
134 HInstruction* GetIndex() const { return index_; }
135
136 // Returns the definition of declaring class' dex index.
137 // It's kDeclaringClassDefIndexForArrays for an array element.
138 int16_t GetDeclaringClassDefIndex() const {
139 return declaring_class_def_index_;
140 }
141
142 bool IsArrayElement() const {
143 return index_ != nullptr;
144 }
145
Mingyao Yang803cbb92015-12-01 12:24:36 -0800146 bool IsValueKilledByLoopSideEffects() const {
147 return value_killed_by_loop_side_effects_;
148 }
149
150 void SetValueKilledByLoopSideEffects(bool val) {
151 value_killed_by_loop_side_effects_ = val;
152 }
153
Mingyao Yang8df69d42015-10-22 15:40:58 -0700154 private:
155 ReferenceInfo* const ref_info_; // reference for instance/static field or array access.
156 const size_t offset_; // offset of static/instance field.
157 HInstruction* const index_; // index of an array element.
158 const int16_t declaring_class_def_index_; // declaring class's def's dex index.
Mingyao Yang803cbb92015-12-01 12:24:36 -0800159 bool value_killed_by_loop_side_effects_; // value of this location may be killed by loop
160 // side effects because this location is stored
161 // into inside a loop.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700162
163 DISALLOW_COPY_AND_ASSIGN(HeapLocation);
164};
165
166static HInstruction* HuntForOriginalReference(HInstruction* ref) {
167 DCHECK(ref != nullptr);
168 while (ref->IsNullCheck() || ref->IsBoundType()) {
169 ref = ref->InputAt(0);
170 }
171 return ref;
172}
173
174// A HeapLocationCollector collects all relevant heap locations and keeps
175// an aliasing matrix for all locations.
176class HeapLocationCollector : public HGraphVisitor {
177 public:
178 static constexpr size_t kHeapLocationNotFound = -1;
179 // Start with a single uint32_t word. That's enough bits for pair-wise
180 // aliasing matrix of 8 heap locations.
181 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
182
183 explicit HeapLocationCollector(HGraph* graph)
184 : HGraphVisitor(graph),
185 ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
186 heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
Vladimir Markof6a35de2016-03-21 12:01:50 +0000187 aliasing_matrix_(graph->GetArena(),
188 kInitialAliasingMatrixBitVectorSize,
189 true,
190 kArenaAllocLSE),
Mingyao Yang8df69d42015-10-22 15:40:58 -0700191 has_heap_stores_(false),
192 has_volatile_(false),
193 has_monitor_operations_(false),
194 may_deoptimize_(false) {}
195
196 size_t GetNumberOfHeapLocations() const {
197 return heap_locations_.size();
198 }
199
200 HeapLocation* GetHeapLocation(size_t index) const {
201 return heap_locations_[index];
202 }
203
204 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
205 for (size_t i = 0; i < ref_info_array_.size(); i++) {
206 ReferenceInfo* ref_info = ref_info_array_[i];
207 if (ref_info->GetReference() == ref) {
208 DCHECK_EQ(i, ref_info->GetPosition());
209 return ref_info;
210 }
211 }
212 return nullptr;
213 }
214
215 bool HasHeapStores() const {
216 return has_heap_stores_;
217 }
218
219 bool HasVolatile() const {
220 return has_volatile_;
221 }
222
223 bool HasMonitorOps() const {
224 return has_monitor_operations_;
225 }
226
227 // Returns whether this method may be deoptimized.
228 // Currently we don't have meta data support for deoptimizing
229 // a method that eliminates allocations/stores.
230 bool MayDeoptimize() const {
231 return may_deoptimize_;
232 }
233
234 // Find and return the heap location index in heap_locations_.
235 size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
236 size_t offset,
237 HInstruction* index,
238 int16_t declaring_class_def_index) const {
239 for (size_t i = 0; i < heap_locations_.size(); i++) {
240 HeapLocation* loc = heap_locations_[i];
241 if (loc->GetReferenceInfo() == ref_info &&
242 loc->GetOffset() == offset &&
243 loc->GetIndex() == index &&
244 loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
245 return i;
246 }
247 }
248 return kHeapLocationNotFound;
249 }
250
251 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
252 bool MayAlias(size_t index1, size_t index2) const {
253 if (index1 < index2) {
254 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
255 } else if (index1 > index2) {
256 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
257 } else {
258 DCHECK(false) << "index1 and index2 are expected to be different";
259 return true;
260 }
261 }
262
263 void BuildAliasingMatrix() {
264 const size_t number_of_locations = heap_locations_.size();
265 if (number_of_locations == 0) {
266 return;
267 }
268 size_t pos = 0;
269 // Compute aliasing info between every pair of different heap locations.
270 // Save the result in a matrix represented as a BitVector.
271 for (size_t i = 0; i < number_of_locations - 1; i++) {
272 for (size_t j = i + 1; j < number_of_locations; j++) {
273 if (ComputeMayAlias(i, j)) {
274 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
275 }
276 pos++;
277 }
278 }
279 }
280
281 private:
282 // An allocation cannot alias with a name which already exists at the point
283 // of the allocation, such as a parameter or a load happening before the allocation.
284 bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
285 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
286 // Any reference that can alias with the allocation must appear after it in the block/in
287 // the block's successors. In reverse post order, those instructions will be visited after
288 // the allocation.
289 return ref_info2->GetPosition() >= ref_info1->GetPosition();
290 }
291 return true;
292 }
293
294 bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
295 if (ref_info1 == ref_info2) {
296 return true;
297 } else if (ref_info1->IsSingleton()) {
298 return false;
299 } else if (ref_info2->IsSingleton()) {
300 return false;
301 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
302 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
303 return false;
304 }
305 return true;
306 }
307
308 // `index1` and `index2` are indices in the array of collected heap locations.
309 // Returns the position in the bit vector that tracks whether the two heap
310 // locations may alias.
311 size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
312 DCHECK(index2 > index1);
313 const size_t number_of_locations = heap_locations_.size();
314 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
315 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
316 }
317
318 // An additional position is passed in to make sure the calculated position is correct.
319 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
320 size_t calculated_position = AliasingMatrixPosition(index1, index2);
321 DCHECK_EQ(calculated_position, position);
322 return calculated_position;
323 }
324
325 // Compute if two locations may alias to each other.
326 bool ComputeMayAlias(size_t index1, size_t index2) const {
327 HeapLocation* loc1 = heap_locations_[index1];
328 HeapLocation* loc2 = heap_locations_[index2];
329 if (loc1->GetOffset() != loc2->GetOffset()) {
330 // Either two different instance fields, or one is an instance
331 // field and the other is an array element.
332 return false;
333 }
334 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
335 // Different types.
336 return false;
337 }
338 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
339 return false;
340 }
341 if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
342 HInstruction* array_index1 = loc1->GetIndex();
343 HInstruction* array_index2 = loc2->GetIndex();
344 DCHECK(array_index1 != nullptr);
345 DCHECK(array_index2 != nullptr);
346 if (array_index1->IsIntConstant() &&
347 array_index2->IsIntConstant() &&
348 array_index1->AsIntConstant()->GetValue() != array_index2->AsIntConstant()->GetValue()) {
349 // Different constant indices do not alias.
350 return false;
351 }
352 }
353 return true;
354 }
355
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800356 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
357 ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700358 if (ref_info == nullptr) {
359 size_t pos = ref_info_array_.size();
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800360 ref_info = new (GetGraph()->GetArena()) ReferenceInfo(instruction, pos);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700361 ref_info_array_.push_back(ref_info);
362 }
363 return ref_info;
364 }
365
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800366 void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
367 if (instruction->GetType() != Primitive::kPrimNot) {
368 return;
369 }
370 DCHECK(FindReferenceInfoOf(instruction) == nullptr);
371 GetOrCreateReferenceInfo(instruction);
372 }
373
Mingyao Yang8df69d42015-10-22 15:40:58 -0700374 HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
375 size_t offset,
376 HInstruction* index,
377 int16_t declaring_class_def_index) {
378 HInstruction* original_ref = HuntForOriginalReference(ref);
379 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
380 size_t heap_location_idx = FindHeapLocationIndex(
381 ref_info, offset, index, declaring_class_def_index);
382 if (heap_location_idx == kHeapLocationNotFound) {
383 HeapLocation* heap_loc = new (GetGraph()->GetArena())
384 HeapLocation(ref_info, offset, index, declaring_class_def_index);
385 heap_locations_.push_back(heap_loc);
386 return heap_loc;
387 }
388 return heap_locations_[heap_location_idx];
389 }
390
Mingyao Yang803cbb92015-12-01 12:24:36 -0800391 HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700392 if (field_info.IsVolatile()) {
393 has_volatile_ = true;
394 }
395 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
396 const size_t offset = field_info.GetFieldOffset().SizeValue();
Mingyao Yang803cbb92015-12-01 12:24:36 -0800397 return GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700398 }
399
400 void VisitArrayAccess(HInstruction* array, HInstruction* index) {
401 GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
402 index, HeapLocation::kDeclaringClassDefIndexForArrays);
403 }
404
405 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800406 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800407 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700408 }
409
410 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
Mingyao Yang803cbb92015-12-01 12:24:36 -0800411 HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700412 has_heap_stores_ = true;
Mingyao Yang803cbb92015-12-01 12:24:36 -0800413 if (instruction->GetBlock()->GetLoopInformation() != nullptr) {
414 location->SetValueKilledByLoopSideEffects(true);
415 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700416 }
417
418 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800419 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800420 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700421 }
422
423 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800424 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700425 has_heap_stores_ = true;
426 }
427
428 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
429 // since we cannot accurately track the fields.
430
431 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
432 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800433 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700434 }
435
436 void VisitArraySet(HArraySet* instruction) OVERRIDE {
437 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
438 has_heap_stores_ = true;
439 }
440
441 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
442 // Any references appearing in the ref_info_array_ so far cannot alias with new_instance.
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800443 CreateReferenceInfoForReferenceType(new_instance);
444 }
445
446 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* instruction) OVERRIDE {
447 CreateReferenceInfoForReferenceType(instruction);
448 }
449
450 void VisitInvokeVirtual(HInvokeVirtual* instruction) OVERRIDE {
451 CreateReferenceInfoForReferenceType(instruction);
452 }
453
454 void VisitInvokeInterface(HInvokeInterface* instruction) OVERRIDE {
455 CreateReferenceInfoForReferenceType(instruction);
456 }
457
458 void VisitParameterValue(HParameterValue* instruction) OVERRIDE {
459 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700460 }
461
Mingyao Yang40bcb932016-02-03 05:46:57 -0800462 void VisitSelect(HSelect* instruction) OVERRIDE {
463 CreateReferenceInfoForReferenceType(instruction);
464 }
465
Mingyao Yang8df69d42015-10-22 15:40:58 -0700466 void VisitDeoptimize(HDeoptimize* instruction ATTRIBUTE_UNUSED) OVERRIDE {
467 may_deoptimize_ = true;
468 }
469
470 void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
471 has_monitor_operations_ = true;
472 }
473
474 ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
475 ArenaVector<HeapLocation*> heap_locations_; // All heap locations.
476 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
477 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
478 // alias analysis and won't be as effective.
479 bool has_volatile_; // If there are volatile field accesses.
480 bool has_monitor_operations_; // If there are monitor operations.
Mingyao Yang062157f2016-03-02 10:15:36 -0800481 bool may_deoptimize_; // Only true for HDeoptimize with single-frame deoptimization.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700482
483 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
484};
485
486// An unknown heap value. Loads with such a value in the heap location cannot be eliminated.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800487// A heap location can be set to kUnknownHeapValue when:
488// - initially set a value.
489// - killed due to aliasing, merging, invocation, or loop side effects.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700490static HInstruction* const kUnknownHeapValue =
491 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-1));
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800492
Mingyao Yang8df69d42015-10-22 15:40:58 -0700493// Default heap value after an allocation.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800494// A heap location can be set to that value right after an allocation.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700495static HInstruction* const kDefaultHeapValue =
496 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-2));
497
498class LSEVisitor : public HGraphVisitor {
499 public:
500 LSEVisitor(HGraph* graph,
501 const HeapLocationCollector& heap_locations_collector,
502 const SideEffectsAnalysis& side_effects)
503 : HGraphVisitor(graph),
504 heap_location_collector_(heap_locations_collector),
505 side_effects_(side_effects),
506 heap_values_for_(graph->GetBlocks().size(),
507 ArenaVector<HInstruction*>(heap_locations_collector.
508 GetNumberOfHeapLocations(),
509 kUnknownHeapValue,
510 graph->GetArena()->Adapter(kArenaAllocLSE)),
511 graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800512 removed_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
513 substitute_instructions_for_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
514 possibly_removed_stores_(graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yang8df69d42015-10-22 15:40:58 -0700515 singleton_new_instances_(graph->GetArena()->Adapter(kArenaAllocLSE)) {
516 }
517
518 void VisitBasicBlock(HBasicBlock* block) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800519 // Populate the heap_values array for this block.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700520 // TODO: try to reuse the heap_values array from one predecessor if possible.
521 if (block->IsLoopHeader()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800522 HandleLoopSideEffects(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700523 } else {
524 MergePredecessorValues(block);
525 }
526 HGraphVisitor::VisitBasicBlock(block);
527 }
528
529 // Remove recorded instructions that should be eliminated.
530 void RemoveInstructions() {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800531 size_t size = removed_loads_.size();
532 DCHECK_EQ(size, substitute_instructions_for_loads_.size());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700533 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800534 HInstruction* load = removed_loads_[i];
535 DCHECK(load != nullptr);
536 DCHECK(load->IsInstanceFieldGet() ||
537 load->IsStaticFieldGet() ||
538 load->IsArrayGet());
539 HInstruction* substitute = substitute_instructions_for_loads_[i];
540 DCHECK(substitute != nullptr);
541 // Keep tracing substitute till one that's not removed.
542 HInstruction* sub_sub = FindSubstitute(substitute);
543 while (sub_sub != substitute) {
544 substitute = sub_sub;
545 sub_sub = FindSubstitute(substitute);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700546 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800547 load->ReplaceWith(substitute);
548 load->GetBlock()->RemoveInstruction(load);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700549 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800550
551 // At this point, stores in possibly_removed_stores_ can be safely removed.
Mingyao Yang062157f2016-03-02 10:15:36 -0800552 for (size_t i = 0, e = possibly_removed_stores_.size(); i < e; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800553 HInstruction* store = possibly_removed_stores_[i];
554 DCHECK(store->IsInstanceFieldSet() || store->IsStaticFieldSet() || store->IsArraySet());
555 store->GetBlock()->RemoveInstruction(store);
556 }
557
Mingyao Yang062157f2016-03-02 10:15:36 -0800558 // Eliminate allocations that are not used.
559 for (size_t i = 0, e = singleton_new_instances_.size(); i < e; i++) {
560 HInstruction* new_instance = singleton_new_instances_[i];
561 if (!new_instance->HasNonEnvironmentUses()) {
562 new_instance->RemoveEnvironmentUsers();
563 new_instance->GetBlock()->RemoveInstruction(new_instance);
564 }
565 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700566 }
567
568 private:
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800569 // If heap_values[index] is an instance field store, need to keep the store.
570 // This is necessary if a heap value is killed due to merging, or loop side
571 // effects (which is essentially merging also), since a load later from the
572 // location won't be eliminated.
573 void KeepIfIsStore(HInstruction* heap_value) {
574 if (heap_value == kDefaultHeapValue ||
575 heap_value == kUnknownHeapValue ||
576 !heap_value->IsInstanceFieldSet()) {
577 return;
578 }
579 auto idx = std::find(possibly_removed_stores_.begin(),
580 possibly_removed_stores_.end(), heap_value);
581 if (idx != possibly_removed_stores_.end()) {
582 // Make sure the store is kept.
583 possibly_removed_stores_.erase(idx);
584 }
585 }
586
587 void HandleLoopSideEffects(HBasicBlock* block) {
588 DCHECK(block->IsLoopHeader());
589 int block_id = block->GetBlockId();
590 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block_id];
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000591
592 // Don't eliminate loads in irreducible loops. This is safe for singletons, because
593 // they are always used by the non-eliminated loop-phi.
594 if (block->GetLoopInformation()->IsIrreducible()) {
595 if (kIsDebugBuild) {
596 for (size_t i = 0; i < heap_values.size(); i++) {
597 DCHECK_EQ(heap_values[i], kUnknownHeapValue);
598 }
599 }
600 return;
601 }
602
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800603 HBasicBlock* pre_header = block->GetLoopInformation()->GetPreHeader();
604 ArenaVector<HInstruction*>& pre_header_heap_values =
605 heap_values_for_[pre_header->GetBlockId()];
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000606
Mingyao Yang803cbb92015-12-01 12:24:36 -0800607 // Inherit the values from pre-header.
608 for (size_t i = 0; i < heap_values.size(); i++) {
609 heap_values[i] = pre_header_heap_values[i];
610 }
611
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800612 // We do a single pass in reverse post order. For loops, use the side effects as a hint
613 // to see if the heap values should be killed.
614 if (side_effects_.GetLoopEffects(block).DoesAnyWrite()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800615 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yang803cbb92015-12-01 12:24:36 -0800616 HeapLocation* location = heap_location_collector_.GetHeapLocation(i);
617 ReferenceInfo* ref_info = location->GetReferenceInfo();
618 if (!ref_info->IsSingleton() || location->IsValueKilledByLoopSideEffects()) {
619 // heap value is killed by loop side effects (stored into directly, or due to
620 // aliasing).
621 KeepIfIsStore(pre_header_heap_values[i]);
622 heap_values[i] = kUnknownHeapValue;
623 } else {
624 // A singleton's field that's not stored into inside a loop is invariant throughout
625 // the loop.
626 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800627 }
628 }
629 }
630
Mingyao Yang8df69d42015-10-22 15:40:58 -0700631 void MergePredecessorValues(HBasicBlock* block) {
632 const ArenaVector<HBasicBlock*>& predecessors = block->GetPredecessors();
633 if (predecessors.size() == 0) {
634 return;
635 }
636 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()];
637 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800638 HInstruction* pred0_value = heap_values_for_[predecessors[0]->GetBlockId()][i];
639 heap_values[i] = pred0_value;
640 if (pred0_value != kUnknownHeapValue) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700641 for (size_t j = 1; j < predecessors.size(); j++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800642 HInstruction* pred_value = heap_values_for_[predecessors[j]->GetBlockId()][i];
643 if (pred_value != pred0_value) {
644 heap_values[i] = kUnknownHeapValue;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700645 break;
646 }
647 }
648 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800649
650 if (heap_values[i] == kUnknownHeapValue) {
651 // Keep the last store in each predecessor since future loads cannot be eliminated.
652 for (size_t j = 0; j < predecessors.size(); j++) {
653 ArenaVector<HInstruction*>& pred_values = heap_values_for_[predecessors[j]->GetBlockId()];
654 KeepIfIsStore(pred_values[i]);
655 }
656 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700657 }
658 }
659
660 // `instruction` is being removed. Try to see if the null check on it
661 // can be removed. This can happen if the same value is set in two branches
662 // but not in dominators. Such as:
663 // int[] a = foo();
664 // if () {
665 // a[0] = 2;
666 // } else {
667 // a[0] = 2;
668 // }
669 // // a[0] can now be replaced with constant 2, and the null check on it can be removed.
670 void TryRemovingNullCheck(HInstruction* instruction) {
671 HInstruction* prev = instruction->GetPrevious();
672 if ((prev != nullptr) && prev->IsNullCheck() && (prev == instruction->InputAt(0))) {
673 // Previous instruction is a null check for this instruction. Remove the null check.
674 prev->ReplaceWith(prev->InputAt(0));
675 prev->GetBlock()->RemoveInstruction(prev);
676 }
677 }
678
679 HInstruction* GetDefaultValue(Primitive::Type type) {
680 switch (type) {
681 case Primitive::kPrimNot:
682 return GetGraph()->GetNullConstant();
683 case Primitive::kPrimBoolean:
684 case Primitive::kPrimByte:
685 case Primitive::kPrimChar:
686 case Primitive::kPrimShort:
687 case Primitive::kPrimInt:
688 return GetGraph()->GetIntConstant(0);
689 case Primitive::kPrimLong:
690 return GetGraph()->GetLongConstant(0);
691 case Primitive::kPrimFloat:
692 return GetGraph()->GetFloatConstant(0);
693 case Primitive::kPrimDouble:
694 return GetGraph()->GetDoubleConstant(0);
695 default:
696 UNREACHABLE();
697 }
698 }
699
700 void VisitGetLocation(HInstruction* instruction,
701 HInstruction* ref,
702 size_t offset,
703 HInstruction* index,
704 int16_t declaring_class_def_index) {
705 HInstruction* original_ref = HuntForOriginalReference(ref);
706 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
707 size_t idx = heap_location_collector_.FindHeapLocationIndex(
708 ref_info, offset, index, declaring_class_def_index);
709 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
710 ArenaVector<HInstruction*>& heap_values =
711 heap_values_for_[instruction->GetBlock()->GetBlockId()];
712 HInstruction* heap_value = heap_values[idx];
713 if (heap_value == kDefaultHeapValue) {
714 HInstruction* constant = GetDefaultValue(instruction->GetType());
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800715 removed_loads_.push_back(instruction);
716 substitute_instructions_for_loads_.push_back(constant);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700717 heap_values[idx] = constant;
718 return;
719 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800720 if (heap_value != kUnknownHeapValue && heap_value->IsInstanceFieldSet()) {
721 HInstruction* store = heap_value;
722 // This load must be from a singleton since it's from the same field
723 // that a "removed" store puts the value. That store must be to a singleton's field.
724 DCHECK(ref_info->IsSingleton());
725 // Get the real heap value of the store.
726 heap_value = store->InputAt(1);
727 }
David Brazdil15693bf2015-12-16 10:30:45 +0000728 if (heap_value == kUnknownHeapValue) {
729 // Load isn't eliminated. Put the load as the value into the HeapLocation.
730 // This acts like GVN but with better aliasing analysis.
731 heap_values[idx] = instruction;
732 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +0000733 if (Primitive::PrimitiveKind(heap_value->GetType())
734 != Primitive::PrimitiveKind(instruction->GetType())) {
735 // The only situation where the same heap location has different type is when
Nicolas Geoffray65fef302016-05-04 14:00:12 +0100736 // we do an array get on an instruction that originates from the null constant
737 // (the null could be behind a field access, an array access, a null check or
738 // a bound type).
739 // In order to stay properly typed on primitive types, we do not eliminate
740 // the array gets.
Nicolas Geoffray03971632016-03-17 10:44:24 +0000741 if (kIsDebugBuild) {
742 DCHECK(heap_value->IsArrayGet()) << heap_value->DebugName();
743 DCHECK(instruction->IsArrayGet()) << instruction->DebugName();
Nicolas Geoffray03971632016-03-17 10:44:24 +0000744 }
745 return;
746 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800747 removed_loads_.push_back(instruction);
748 substitute_instructions_for_loads_.push_back(heap_value);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700749 TryRemovingNullCheck(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700750 }
751 }
752
753 bool Equal(HInstruction* heap_value, HInstruction* value) {
754 if (heap_value == value) {
755 return true;
756 }
757 if (heap_value == kDefaultHeapValue && GetDefaultValue(value->GetType()) == value) {
758 return true;
759 }
760 return false;
761 }
762
763 void VisitSetLocation(HInstruction* instruction,
764 HInstruction* ref,
765 size_t offset,
766 HInstruction* index,
767 int16_t declaring_class_def_index,
768 HInstruction* value) {
769 HInstruction* original_ref = HuntForOriginalReference(ref);
770 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
771 size_t idx = heap_location_collector_.FindHeapLocationIndex(
772 ref_info, offset, index, declaring_class_def_index);
773 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
774 ArenaVector<HInstruction*>& heap_values =
775 heap_values_for_[instruction->GetBlock()->GetBlockId()];
776 HInstruction* heap_value = heap_values[idx];
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800777 bool same_value = false;
778 bool possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700779 if (Equal(heap_value, value)) {
780 // Store into the heap location with the same value.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800781 same_value = true;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700782 } else if (index != nullptr) {
783 // For array element, don't eliminate stores since it can be easily aliased
784 // with non-constant index.
785 } else if (!heap_location_collector_.MayDeoptimize() &&
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800786 ref_info->IsSingletonAndNotReturned()) {
787 // Store into a field of a singleton that's not returned. The value cannot be
788 // killed due to aliasing/invocation. It can be redundant since future loads can
789 // directly get the value set by this instruction. The value can still be killed due to
790 // merging or loop side effects. Stores whose values are killed due to merging/loop side
791 // effects later will be removed from possibly_removed_stores_ when that is detected.
792 possibly_redundant = true;
793 HNewInstance* new_instance = ref_info->GetReference()->AsNewInstance();
794 DCHECK(new_instance != nullptr);
795 if (new_instance->IsFinalizable()) {
796 // Finalizable objects escape globally. Need to keep the store.
797 possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700798 } else {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800799 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
800 if (loop_info != nullptr) {
801 // instruction is a store in the loop so the loop must does write.
802 DCHECK(side_effects_.GetLoopEffects(loop_info->GetHeader()).DoesAnyWrite());
Mingyao Yang803cbb92015-12-01 12:24:36 -0800803 // If it's a singleton, IsValueKilledByLoopSideEffects() must be true.
804 DCHECK(!ref_info->IsSingleton() ||
805 heap_location_collector_.GetHeapLocation(idx)->IsValueKilledByLoopSideEffects());
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800806
Mingyao Yang4b467ed2015-11-19 17:04:22 -0800807 if (loop_info->IsDefinedOutOfTheLoop(original_ref)) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800808 DCHECK(original_ref->GetBlock()->Dominates(loop_info->GetPreHeader()));
809 // Keep the store since its value may be needed at the loop header.
810 possibly_redundant = false;
811 } else {
812 // The singleton is created inside the loop. Value stored to it isn't needed at
813 // the loop header. This is true for outer loops also.
814 }
815 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700816 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700817 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800818 if (same_value || possibly_redundant) {
819 possibly_removed_stores_.push_back(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700820 }
Mingyao Yange9d6e602015-10-23 17:08:42 -0700821
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800822 if (!same_value) {
823 if (possibly_redundant) {
824 DCHECK(instruction->IsInstanceFieldSet());
825 // Put the store as the heap value. If the value is loaded from heap
826 // by a load later, this store isn't really redundant.
827 heap_values[idx] = instruction;
828 } else {
829 heap_values[idx] = value;
830 }
831 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700832 // This store may kill values in other heap locations due to aliasing.
833 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800834 if (i == idx) {
835 continue;
836 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700837 if (heap_values[i] == value) {
838 // Same value should be kept even if aliasing happens.
839 continue;
840 }
841 if (heap_values[i] == kUnknownHeapValue) {
842 // Value is already unknown, no need for aliasing check.
843 continue;
844 }
845 if (heap_location_collector_.MayAlias(i, idx)) {
846 // Kill heap locations that may alias.
847 heap_values[i] = kUnknownHeapValue;
848 }
849 }
850 }
851
852 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
853 HInstruction* obj = instruction->InputAt(0);
854 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
855 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
856 VisitGetLocation(instruction, obj, offset, nullptr, declaring_class_def_index);
857 }
858
859 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
860 HInstruction* obj = instruction->InputAt(0);
861 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
862 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
863 HInstruction* value = instruction->InputAt(1);
864 VisitSetLocation(instruction, obj, offset, nullptr, declaring_class_def_index, value);
865 }
866
867 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
868 HInstruction* cls = instruction->InputAt(0);
869 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
870 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
871 VisitGetLocation(instruction, cls, offset, nullptr, declaring_class_def_index);
872 }
873
874 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
875 HInstruction* cls = instruction->InputAt(0);
876 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
877 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
878 HInstruction* value = instruction->InputAt(1);
879 VisitSetLocation(instruction, cls, offset, nullptr, declaring_class_def_index, value);
880 }
881
882 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
883 HInstruction* array = instruction->InputAt(0);
884 HInstruction* index = instruction->InputAt(1);
885 VisitGetLocation(instruction,
886 array,
887 HeapLocation::kInvalidFieldOffset,
888 index,
889 HeapLocation::kDeclaringClassDefIndexForArrays);
890 }
891
892 void VisitArraySet(HArraySet* instruction) OVERRIDE {
893 HInstruction* array = instruction->InputAt(0);
894 HInstruction* index = instruction->InputAt(1);
895 HInstruction* value = instruction->InputAt(2);
896 VisitSetLocation(instruction,
897 array,
898 HeapLocation::kInvalidFieldOffset,
899 index,
900 HeapLocation::kDeclaringClassDefIndexForArrays,
901 value);
902 }
903
904 void HandleInvoke(HInstruction* invoke) {
905 ArenaVector<HInstruction*>& heap_values =
906 heap_values_for_[invoke->GetBlock()->GetBlockId()];
907 for (size_t i = 0; i < heap_values.size(); i++) {
908 ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
909 if (ref_info->IsSingleton()) {
910 // Singleton references cannot be seen by the callee.
911 } else {
912 heap_values[i] = kUnknownHeapValue;
913 }
914 }
915 }
916
917 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
918 HandleInvoke(invoke);
919 }
920
921 void VisitInvokeVirtual(HInvokeVirtual* invoke) OVERRIDE {
922 HandleInvoke(invoke);
923 }
924
925 void VisitInvokeInterface(HInvokeInterface* invoke) OVERRIDE {
926 HandleInvoke(invoke);
927 }
928
929 void VisitInvokeUnresolved(HInvokeUnresolved* invoke) OVERRIDE {
930 HandleInvoke(invoke);
931 }
932
933 void VisitClinitCheck(HClinitCheck* clinit) OVERRIDE {
934 HandleInvoke(clinit);
935 }
936
937 void VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet* instruction) OVERRIDE {
938 // Conservatively treat it as an invocation.
939 HandleInvoke(instruction);
940 }
941
942 void VisitUnresolvedInstanceFieldSet(HUnresolvedInstanceFieldSet* instruction) OVERRIDE {
943 // Conservatively treat it as an invocation.
944 HandleInvoke(instruction);
945 }
946
947 void VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet* instruction) OVERRIDE {
948 // Conservatively treat it as an invocation.
949 HandleInvoke(instruction);
950 }
951
952 void VisitUnresolvedStaticFieldSet(HUnresolvedStaticFieldSet* instruction) OVERRIDE {
953 // Conservatively treat it as an invocation.
954 HandleInvoke(instruction);
955 }
956
957 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
958 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_instance);
959 if (ref_info == nullptr) {
960 // new_instance isn't used for field accesses. No need to process it.
961 return;
962 }
963 if (!heap_location_collector_.MayDeoptimize() &&
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800964 ref_info->IsSingletonAndNotReturned() &&
965 !new_instance->IsFinalizable() &&
Mingyao Yang062157f2016-03-02 10:15:36 -0800966 !new_instance->NeedsAccessCheck()) {
967 singleton_new_instances_.push_back(new_instance);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700968 }
969 ArenaVector<HInstruction*>& heap_values =
970 heap_values_for_[new_instance->GetBlock()->GetBlockId()];
971 for (size_t i = 0; i < heap_values.size(); i++) {
972 HInstruction* ref =
973 heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo()->GetReference();
974 size_t offset = heap_location_collector_.GetHeapLocation(i)->GetOffset();
975 if (ref == new_instance && offset >= mirror::kObjectHeaderSize) {
976 // Instance fields except the header fields are set to default heap values.
977 heap_values[i] = kDefaultHeapValue;
978 }
979 }
980 }
981
982 // Find an instruction's substitute if it should be removed.
983 // Return the same instruction if it should not be removed.
984 HInstruction* FindSubstitute(HInstruction* instruction) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800985 size_t size = removed_loads_.size();
Mingyao Yang8df69d42015-10-22 15:40:58 -0700986 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800987 if (removed_loads_[i] == instruction) {
988 return substitute_instructions_for_loads_[i];
Mingyao Yang8df69d42015-10-22 15:40:58 -0700989 }
990 }
991 return instruction;
992 }
993
994 const HeapLocationCollector& heap_location_collector_;
995 const SideEffectsAnalysis& side_effects_;
996
997 // One array of heap values for each block.
998 ArenaVector<ArenaVector<HInstruction*>> heap_values_for_;
999
1000 // We record the instructions that should be eliminated but may be
1001 // used by heap locations. They'll be removed in the end.
Mingyao Yangfb8464a2015-11-02 10:56:59 -08001002 ArenaVector<HInstruction*> removed_loads_;
1003 ArenaVector<HInstruction*> substitute_instructions_for_loads_;
1004
1005 // Stores in this list may be removed from the list later when it's
1006 // found that the store cannot be eliminated.
1007 ArenaVector<HInstruction*> possibly_removed_stores_;
1008
Mingyao Yang8df69d42015-10-22 15:40:58 -07001009 ArenaVector<HInstruction*> singleton_new_instances_;
1010
1011 DISALLOW_COPY_AND_ASSIGN(LSEVisitor);
1012};
1013
1014void LoadStoreElimination::Run() {
David Brazdil8993caf2015-12-07 10:04:40 +00001015 if (graph_->IsDebuggable() || graph_->HasTryCatch()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -07001016 // Debugger may set heap values or trigger deoptimization of callers.
David Brazdil8993caf2015-12-07 10:04:40 +00001017 // Try/catch support not implemented yet.
Mingyao Yang8df69d42015-10-22 15:40:58 -07001018 // Skip this optimization.
1019 return;
1020 }
1021 HeapLocationCollector heap_location_collector(graph_);
1022 for (HReversePostOrderIterator it(*graph_); !it.Done(); it.Advance()) {
1023 heap_location_collector.VisitBasicBlock(it.Current());
1024 }
1025 if (heap_location_collector.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
1026 // Bail out if there are too many heap locations to deal with.
1027 return;
1028 }
1029 if (!heap_location_collector.HasHeapStores()) {
1030 // Without heap stores, this pass would act mostly as GVN on heap accesses.
1031 return;
1032 }
1033 if (heap_location_collector.HasVolatile() || heap_location_collector.HasMonitorOps()) {
1034 // Don't do load/store elimination if the method has volatile field accesses or
1035 // monitor operations, for now.
1036 // TODO: do it right.
1037 return;
1038 }
1039 heap_location_collector.BuildAliasingMatrix();
1040 LSEVisitor lse_visitor(graph_, heap_location_collector, side_effects_);
1041 for (HReversePostOrderIterator it(*graph_); !it.Done(); it.Advance()) {
1042 lse_visitor.VisitBasicBlock(it.Current());
1043 }
1044 lse_visitor.RemoveInstructions();
1045}
1046
1047} // namespace art