blob: b91e9e686883e50337dcd14bf9d9595f16ccf6ae [file] [log] [blame]
Mingyao Yang8df69d42015-10-22 15:40:58 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "load_store_elimination.h"
18#include "side_effects_analysis.h"
19
20#include <iostream>
21
22namespace art {
23
24class ReferenceInfo;
25
26// A cap for the number of heap locations to prevent pathological time/space consumption.
27// The number of heap locations for most of the methods stays below this threshold.
28constexpr size_t kMaxNumberOfHeapLocations = 32;
29
30// A ReferenceInfo contains additional info about a reference such as
31// whether it's a singleton, returned, etc.
32class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
33 public:
34 ReferenceInfo(HInstruction* reference, size_t pos) : reference_(reference), position_(pos) {
35 is_singleton_ = true;
36 is_singleton_and_not_returned_ = true;
37 if (!reference_->IsNewInstance() && !reference_->IsNewArray()) {
38 // For references not allocated in the method, don't assume anything.
39 is_singleton_ = false;
40 is_singleton_and_not_returned_ = false;
41 return;
42 }
43
44 // Visit all uses to determine if this reference can spread into the heap,
45 // a method call, etc.
Vladimir Marko46817b82016-03-29 12:21:58 +010046 for (const HUseListNode<HInstruction*>& use : reference_->GetUses()) {
47 HInstruction* user = use.GetUser();
48 DCHECK(!user->IsNullCheck()) << "NullCheck should have been eliminated";
49 if (user->IsBoundType()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -070050 // BoundType shouldn't normally be necessary for a NewInstance.
51 // Just be conservative for the uncommon cases.
52 is_singleton_ = false;
53 is_singleton_and_not_returned_ = false;
54 return;
55 }
Vladimir Marko46817b82016-03-29 12:21:58 +010056 if (user->IsPhi() || user->IsSelect() || user->IsInvoke() ||
57 (user->IsInstanceFieldSet() && (reference_ == user->InputAt(1))) ||
58 (user->IsUnresolvedInstanceFieldSet() && (reference_ == user->InputAt(1))) ||
59 (user->IsStaticFieldSet() && (reference_ == user->InputAt(1))) ||
60 (user->IsUnresolvedStaticFieldSet() && (reference_ == user->InputAt(0))) ||
61 (user->IsArraySet() && (reference_ == user->InputAt(2)))) {
Mingyao Yang40bcb932016-02-03 05:46:57 -080062 // reference_ is merged to HPhi/HSelect, passed to a callee, or stored to heap.
Mingyao Yang8df69d42015-10-22 15:40:58 -070063 // reference_ isn't the only name that can refer to its value anymore.
64 is_singleton_ = false;
65 is_singleton_and_not_returned_ = false;
66 return;
67 }
Nicolas Geoffrayb93a1652016-06-27 10:03:29 +010068 if ((user->IsUnresolvedInstanceFieldGet() && (reference_ == user->InputAt(0))) ||
69 (user->IsUnresolvedInstanceFieldSet() && (reference_ == user->InputAt(0)))) {
70 // The field is accessed in an unresolved way. We mark the object as a singleton to
71 // disable load/store optimizations on it.
72 // Note that we could optimize this case and still perform some optimizations until
73 // we hit the unresolved access, but disabling is the simplest.
74 is_singleton_ = false;
75 is_singleton_and_not_returned_ = false;
76 return;
77 }
Vladimir Marko46817b82016-03-29 12:21:58 +010078 if (user->IsReturn()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -070079 is_singleton_and_not_returned_ = false;
80 }
81 }
82 }
83
84 HInstruction* GetReference() const {
85 return reference_;
86 }
87
88 size_t GetPosition() const {
89 return position_;
90 }
91
92 // Returns true if reference_ is the only name that can refer to its value during
93 // the lifetime of the method. So it's guaranteed to not have any alias in
94 // the method (including its callees).
95 bool IsSingleton() const {
96 return is_singleton_;
97 }
98
99 // Returns true if reference_ is a singleton and not returned to the caller.
100 // The allocation and stores into reference_ may be eliminated for such cases.
101 bool IsSingletonAndNotReturned() const {
102 return is_singleton_and_not_returned_;
103 }
104
105 private:
106 HInstruction* const reference_;
107 const size_t position_; // position in HeapLocationCollector's ref_info_array_.
108 bool is_singleton_; // can only be referred to by a single name in the method.
109 bool is_singleton_and_not_returned_; // reference_ is singleton and not returned to caller.
110
111 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
112};
113
114// A heap location is a reference-offset/index pair that a value can be loaded from
115// or stored to.
116class HeapLocation : public ArenaObject<kArenaAllocMisc> {
117 public:
118 static constexpr size_t kInvalidFieldOffset = -1;
119
120 // TODO: more fine-grained array types.
121 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
122
123 HeapLocation(ReferenceInfo* ref_info,
124 size_t offset,
125 HInstruction* index,
126 int16_t declaring_class_def_index)
127 : ref_info_(ref_info),
128 offset_(offset),
129 index_(index),
Mingyao Yang803cbb92015-12-01 12:24:36 -0800130 declaring_class_def_index_(declaring_class_def_index),
131 value_killed_by_loop_side_effects_(true) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700132 DCHECK(ref_info != nullptr);
133 DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
134 (offset != kInvalidFieldOffset && index == nullptr));
Mingyao Yang803cbb92015-12-01 12:24:36 -0800135 if (ref_info->IsSingleton() && !IsArrayElement()) {
136 // Assume this location's value cannot be killed by loop side effects
137 // until proven otherwise.
138 value_killed_by_loop_side_effects_ = false;
139 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700140 }
141
142 ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
143 size_t GetOffset() const { return offset_; }
144 HInstruction* GetIndex() const { return index_; }
145
146 // Returns the definition of declaring class' dex index.
147 // It's kDeclaringClassDefIndexForArrays for an array element.
148 int16_t GetDeclaringClassDefIndex() const {
149 return declaring_class_def_index_;
150 }
151
152 bool IsArrayElement() const {
153 return index_ != nullptr;
154 }
155
Mingyao Yang803cbb92015-12-01 12:24:36 -0800156 bool IsValueKilledByLoopSideEffects() const {
157 return value_killed_by_loop_side_effects_;
158 }
159
160 void SetValueKilledByLoopSideEffects(bool val) {
161 value_killed_by_loop_side_effects_ = val;
162 }
163
Mingyao Yang8df69d42015-10-22 15:40:58 -0700164 private:
165 ReferenceInfo* const ref_info_; // reference for instance/static field or array access.
166 const size_t offset_; // offset of static/instance field.
167 HInstruction* const index_; // index of an array element.
168 const int16_t declaring_class_def_index_; // declaring class's def's dex index.
Mingyao Yang803cbb92015-12-01 12:24:36 -0800169 bool value_killed_by_loop_side_effects_; // value of this location may be killed by loop
170 // side effects because this location is stored
Mingyao Yang0a845202016-10-14 16:26:08 -0700171 // into inside a loop. This gives
172 // better info on whether a singleton's location
173 // value may be killed by loop side effects.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700174
175 DISALLOW_COPY_AND_ASSIGN(HeapLocation);
176};
177
178static HInstruction* HuntForOriginalReference(HInstruction* ref) {
179 DCHECK(ref != nullptr);
180 while (ref->IsNullCheck() || ref->IsBoundType()) {
181 ref = ref->InputAt(0);
182 }
183 return ref;
184}
185
186// A HeapLocationCollector collects all relevant heap locations and keeps
187// an aliasing matrix for all locations.
188class HeapLocationCollector : public HGraphVisitor {
189 public:
190 static constexpr size_t kHeapLocationNotFound = -1;
191 // Start with a single uint32_t word. That's enough bits for pair-wise
192 // aliasing matrix of 8 heap locations.
193 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
194
195 explicit HeapLocationCollector(HGraph* graph)
196 : HGraphVisitor(graph),
197 ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
198 heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
Vladimir Markof6a35de2016-03-21 12:01:50 +0000199 aliasing_matrix_(graph->GetArena(),
200 kInitialAliasingMatrixBitVectorSize,
201 true,
202 kArenaAllocLSE),
Mingyao Yang8df69d42015-10-22 15:40:58 -0700203 has_heap_stores_(false),
204 has_volatile_(false),
205 has_monitor_operations_(false),
206 may_deoptimize_(false) {}
207
208 size_t GetNumberOfHeapLocations() const {
209 return heap_locations_.size();
210 }
211
212 HeapLocation* GetHeapLocation(size_t index) const {
213 return heap_locations_[index];
214 }
215
216 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
217 for (size_t i = 0; i < ref_info_array_.size(); i++) {
218 ReferenceInfo* ref_info = ref_info_array_[i];
219 if (ref_info->GetReference() == ref) {
220 DCHECK_EQ(i, ref_info->GetPosition());
221 return ref_info;
222 }
223 }
224 return nullptr;
225 }
226
227 bool HasHeapStores() const {
228 return has_heap_stores_;
229 }
230
231 bool HasVolatile() const {
232 return has_volatile_;
233 }
234
235 bool HasMonitorOps() const {
236 return has_monitor_operations_;
237 }
238
239 // Returns whether this method may be deoptimized.
240 // Currently we don't have meta data support for deoptimizing
241 // a method that eliminates allocations/stores.
242 bool MayDeoptimize() const {
243 return may_deoptimize_;
244 }
245
246 // Find and return the heap location index in heap_locations_.
247 size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
248 size_t offset,
249 HInstruction* index,
250 int16_t declaring_class_def_index) const {
251 for (size_t i = 0; i < heap_locations_.size(); i++) {
252 HeapLocation* loc = heap_locations_[i];
253 if (loc->GetReferenceInfo() == ref_info &&
254 loc->GetOffset() == offset &&
255 loc->GetIndex() == index &&
256 loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
257 return i;
258 }
259 }
260 return kHeapLocationNotFound;
261 }
262
263 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
264 bool MayAlias(size_t index1, size_t index2) const {
265 if (index1 < index2) {
266 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
267 } else if (index1 > index2) {
268 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
269 } else {
270 DCHECK(false) << "index1 and index2 are expected to be different";
271 return true;
272 }
273 }
274
275 void BuildAliasingMatrix() {
276 const size_t number_of_locations = heap_locations_.size();
277 if (number_of_locations == 0) {
278 return;
279 }
280 size_t pos = 0;
281 // Compute aliasing info between every pair of different heap locations.
282 // Save the result in a matrix represented as a BitVector.
283 for (size_t i = 0; i < number_of_locations - 1; i++) {
284 for (size_t j = i + 1; j < number_of_locations; j++) {
285 if (ComputeMayAlias(i, j)) {
286 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
287 }
288 pos++;
289 }
290 }
291 }
292
293 private:
294 // An allocation cannot alias with a name which already exists at the point
295 // of the allocation, such as a parameter or a load happening before the allocation.
296 bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
297 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
298 // Any reference that can alias with the allocation must appear after it in the block/in
299 // the block's successors. In reverse post order, those instructions will be visited after
300 // the allocation.
301 return ref_info2->GetPosition() >= ref_info1->GetPosition();
302 }
303 return true;
304 }
305
306 bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
307 if (ref_info1 == ref_info2) {
308 return true;
309 } else if (ref_info1->IsSingleton()) {
310 return false;
311 } else if (ref_info2->IsSingleton()) {
312 return false;
313 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
314 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
315 return false;
316 }
317 return true;
318 }
319
320 // `index1` and `index2` are indices in the array of collected heap locations.
321 // Returns the position in the bit vector that tracks whether the two heap
322 // locations may alias.
323 size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
324 DCHECK(index2 > index1);
325 const size_t number_of_locations = heap_locations_.size();
326 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
327 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
328 }
329
330 // An additional position is passed in to make sure the calculated position is correct.
331 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
332 size_t calculated_position = AliasingMatrixPosition(index1, index2);
333 DCHECK_EQ(calculated_position, position);
334 return calculated_position;
335 }
336
337 // Compute if two locations may alias to each other.
338 bool ComputeMayAlias(size_t index1, size_t index2) const {
339 HeapLocation* loc1 = heap_locations_[index1];
340 HeapLocation* loc2 = heap_locations_[index2];
341 if (loc1->GetOffset() != loc2->GetOffset()) {
342 // Either two different instance fields, or one is an instance
343 // field and the other is an array element.
344 return false;
345 }
346 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
347 // Different types.
348 return false;
349 }
350 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
351 return false;
352 }
353 if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
354 HInstruction* array_index1 = loc1->GetIndex();
355 HInstruction* array_index2 = loc2->GetIndex();
356 DCHECK(array_index1 != nullptr);
357 DCHECK(array_index2 != nullptr);
358 if (array_index1->IsIntConstant() &&
359 array_index2->IsIntConstant() &&
360 array_index1->AsIntConstant()->GetValue() != array_index2->AsIntConstant()->GetValue()) {
361 // Different constant indices do not alias.
362 return false;
363 }
364 }
365 return true;
366 }
367
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800368 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
369 ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700370 if (ref_info == nullptr) {
371 size_t pos = ref_info_array_.size();
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800372 ref_info = new (GetGraph()->GetArena()) ReferenceInfo(instruction, pos);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700373 ref_info_array_.push_back(ref_info);
374 }
375 return ref_info;
376 }
377
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800378 void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
379 if (instruction->GetType() != Primitive::kPrimNot) {
380 return;
381 }
382 DCHECK(FindReferenceInfoOf(instruction) == nullptr);
383 GetOrCreateReferenceInfo(instruction);
384 }
385
Mingyao Yang8df69d42015-10-22 15:40:58 -0700386 HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
387 size_t offset,
388 HInstruction* index,
389 int16_t declaring_class_def_index) {
390 HInstruction* original_ref = HuntForOriginalReference(ref);
391 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
392 size_t heap_location_idx = FindHeapLocationIndex(
393 ref_info, offset, index, declaring_class_def_index);
394 if (heap_location_idx == kHeapLocationNotFound) {
395 HeapLocation* heap_loc = new (GetGraph()->GetArena())
396 HeapLocation(ref_info, offset, index, declaring_class_def_index);
397 heap_locations_.push_back(heap_loc);
398 return heap_loc;
399 }
400 return heap_locations_[heap_location_idx];
401 }
402
Mingyao Yang803cbb92015-12-01 12:24:36 -0800403 HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700404 if (field_info.IsVolatile()) {
405 has_volatile_ = true;
406 }
407 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
408 const size_t offset = field_info.GetFieldOffset().SizeValue();
Mingyao Yang803cbb92015-12-01 12:24:36 -0800409 return GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700410 }
411
412 void VisitArrayAccess(HInstruction* array, HInstruction* index) {
413 GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
414 index, HeapLocation::kDeclaringClassDefIndexForArrays);
415 }
416
417 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800418 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800419 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700420 }
421
422 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
Mingyao Yang803cbb92015-12-01 12:24:36 -0800423 HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700424 has_heap_stores_ = true;
Mingyao Yang0a845202016-10-14 16:26:08 -0700425 if (location->GetReferenceInfo()->IsSingleton()) {
426 // A singleton's location value may be killed by loop side effects if it's
427 // defined before that loop, and it's stored into inside that loop.
428 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
429 if (loop_info != nullptr) {
430 HInstruction* ref = location->GetReferenceInfo()->GetReference();
431 DCHECK(ref->IsNewInstance());
432 if (loop_info->IsDefinedOutOfTheLoop(ref)) {
433 // ref's location value may be killed by this loop's side effects.
434 location->SetValueKilledByLoopSideEffects(true);
435 } else {
436 // ref is defined inside this loop so this loop's side effects cannot
437 // kill its location value at the loop header since ref/its location doesn't
438 // exist yet at the loop header.
439 }
440 }
441 } else {
442 // For non-singletons, value_killed_by_loop_side_effects_ is inited to
443 // true.
444 DCHECK_EQ(location->IsValueKilledByLoopSideEffects(), true);
Mingyao Yang803cbb92015-12-01 12:24:36 -0800445 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700446 }
447
448 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800449 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800450 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700451 }
452
453 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800454 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700455 has_heap_stores_ = true;
456 }
457
458 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
459 // since we cannot accurately track the fields.
460
461 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
462 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800463 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700464 }
465
466 void VisitArraySet(HArraySet* instruction) OVERRIDE {
467 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
468 has_heap_stores_ = true;
469 }
470
471 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
472 // Any references appearing in the ref_info_array_ so far cannot alias with new_instance.
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800473 CreateReferenceInfoForReferenceType(new_instance);
474 }
475
476 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* instruction) OVERRIDE {
477 CreateReferenceInfoForReferenceType(instruction);
478 }
479
480 void VisitInvokeVirtual(HInvokeVirtual* instruction) OVERRIDE {
481 CreateReferenceInfoForReferenceType(instruction);
482 }
483
484 void VisitInvokeInterface(HInvokeInterface* instruction) OVERRIDE {
485 CreateReferenceInfoForReferenceType(instruction);
486 }
487
488 void VisitParameterValue(HParameterValue* instruction) OVERRIDE {
489 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700490 }
491
Mingyao Yang40bcb932016-02-03 05:46:57 -0800492 void VisitSelect(HSelect* instruction) OVERRIDE {
493 CreateReferenceInfoForReferenceType(instruction);
494 }
495
Mingyao Yang8df69d42015-10-22 15:40:58 -0700496 void VisitDeoptimize(HDeoptimize* instruction ATTRIBUTE_UNUSED) OVERRIDE {
497 may_deoptimize_ = true;
498 }
499
500 void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
501 has_monitor_operations_ = true;
502 }
503
504 ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
505 ArenaVector<HeapLocation*> heap_locations_; // All heap locations.
506 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
507 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
508 // alias analysis and won't be as effective.
509 bool has_volatile_; // If there are volatile field accesses.
510 bool has_monitor_operations_; // If there are monitor operations.
Mingyao Yang062157f2016-03-02 10:15:36 -0800511 bool may_deoptimize_; // Only true for HDeoptimize with single-frame deoptimization.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700512
513 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
514};
515
516// An unknown heap value. Loads with such a value in the heap location cannot be eliminated.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800517// A heap location can be set to kUnknownHeapValue when:
518// - initially set a value.
519// - killed due to aliasing, merging, invocation, or loop side effects.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700520static HInstruction* const kUnknownHeapValue =
521 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-1));
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800522
Mingyao Yang8df69d42015-10-22 15:40:58 -0700523// Default heap value after an allocation.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800524// A heap location can be set to that value right after an allocation.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700525static HInstruction* const kDefaultHeapValue =
526 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-2));
527
528class LSEVisitor : public HGraphVisitor {
529 public:
530 LSEVisitor(HGraph* graph,
531 const HeapLocationCollector& heap_locations_collector,
532 const SideEffectsAnalysis& side_effects)
533 : HGraphVisitor(graph),
534 heap_location_collector_(heap_locations_collector),
535 side_effects_(side_effects),
536 heap_values_for_(graph->GetBlocks().size(),
537 ArenaVector<HInstruction*>(heap_locations_collector.
538 GetNumberOfHeapLocations(),
539 kUnknownHeapValue,
540 graph->GetArena()->Adapter(kArenaAllocLSE)),
541 graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800542 removed_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
543 substitute_instructions_for_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
544 possibly_removed_stores_(graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yang8df69d42015-10-22 15:40:58 -0700545 singleton_new_instances_(graph->GetArena()->Adapter(kArenaAllocLSE)) {
546 }
547
548 void VisitBasicBlock(HBasicBlock* block) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800549 // Populate the heap_values array for this block.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700550 // TODO: try to reuse the heap_values array from one predecessor if possible.
551 if (block->IsLoopHeader()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800552 HandleLoopSideEffects(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700553 } else {
554 MergePredecessorValues(block);
555 }
556 HGraphVisitor::VisitBasicBlock(block);
557 }
558
559 // Remove recorded instructions that should be eliminated.
560 void RemoveInstructions() {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800561 size_t size = removed_loads_.size();
562 DCHECK_EQ(size, substitute_instructions_for_loads_.size());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700563 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800564 HInstruction* load = removed_loads_[i];
565 DCHECK(load != nullptr);
566 DCHECK(load->IsInstanceFieldGet() ||
567 load->IsStaticFieldGet() ||
568 load->IsArrayGet());
569 HInstruction* substitute = substitute_instructions_for_loads_[i];
570 DCHECK(substitute != nullptr);
571 // Keep tracing substitute till one that's not removed.
572 HInstruction* sub_sub = FindSubstitute(substitute);
573 while (sub_sub != substitute) {
574 substitute = sub_sub;
575 sub_sub = FindSubstitute(substitute);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700576 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800577 load->ReplaceWith(substitute);
578 load->GetBlock()->RemoveInstruction(load);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700579 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800580
581 // At this point, stores in possibly_removed_stores_ can be safely removed.
Mingyao Yang062157f2016-03-02 10:15:36 -0800582 for (size_t i = 0, e = possibly_removed_stores_.size(); i < e; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800583 HInstruction* store = possibly_removed_stores_[i];
584 DCHECK(store->IsInstanceFieldSet() || store->IsStaticFieldSet() || store->IsArraySet());
585 store->GetBlock()->RemoveInstruction(store);
586 }
587
Mingyao Yang062157f2016-03-02 10:15:36 -0800588 // Eliminate allocations that are not used.
589 for (size_t i = 0, e = singleton_new_instances_.size(); i < e; i++) {
590 HInstruction* new_instance = singleton_new_instances_[i];
591 if (!new_instance->HasNonEnvironmentUses()) {
592 new_instance->RemoveEnvironmentUsers();
593 new_instance->GetBlock()->RemoveInstruction(new_instance);
594 }
595 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700596 }
597
598 private:
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800599 // If heap_values[index] is an instance field store, need to keep the store.
600 // This is necessary if a heap value is killed due to merging, or loop side
601 // effects (which is essentially merging also), since a load later from the
602 // location won't be eliminated.
603 void KeepIfIsStore(HInstruction* heap_value) {
604 if (heap_value == kDefaultHeapValue ||
605 heap_value == kUnknownHeapValue ||
606 !heap_value->IsInstanceFieldSet()) {
607 return;
608 }
609 auto idx = std::find(possibly_removed_stores_.begin(),
610 possibly_removed_stores_.end(), heap_value);
611 if (idx != possibly_removed_stores_.end()) {
612 // Make sure the store is kept.
613 possibly_removed_stores_.erase(idx);
614 }
615 }
616
617 void HandleLoopSideEffects(HBasicBlock* block) {
618 DCHECK(block->IsLoopHeader());
619 int block_id = block->GetBlockId();
620 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block_id];
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000621
622 // Don't eliminate loads in irreducible loops. This is safe for singletons, because
623 // they are always used by the non-eliminated loop-phi.
624 if (block->GetLoopInformation()->IsIrreducible()) {
625 if (kIsDebugBuild) {
626 for (size_t i = 0; i < heap_values.size(); i++) {
627 DCHECK_EQ(heap_values[i], kUnknownHeapValue);
628 }
629 }
630 return;
631 }
632
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800633 HBasicBlock* pre_header = block->GetLoopInformation()->GetPreHeader();
634 ArenaVector<HInstruction*>& pre_header_heap_values =
635 heap_values_for_[pre_header->GetBlockId()];
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000636
Mingyao Yang803cbb92015-12-01 12:24:36 -0800637 // Inherit the values from pre-header.
638 for (size_t i = 0; i < heap_values.size(); i++) {
639 heap_values[i] = pre_header_heap_values[i];
640 }
641
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800642 // We do a single pass in reverse post order. For loops, use the side effects as a hint
643 // to see if the heap values should be killed.
644 if (side_effects_.GetLoopEffects(block).DoesAnyWrite()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800645 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yang803cbb92015-12-01 12:24:36 -0800646 HeapLocation* location = heap_location_collector_.GetHeapLocation(i);
647 ReferenceInfo* ref_info = location->GetReferenceInfo();
648 if (!ref_info->IsSingleton() || location->IsValueKilledByLoopSideEffects()) {
649 // heap value is killed by loop side effects (stored into directly, or due to
650 // aliasing).
651 KeepIfIsStore(pre_header_heap_values[i]);
652 heap_values[i] = kUnknownHeapValue;
653 } else {
654 // A singleton's field that's not stored into inside a loop is invariant throughout
655 // the loop.
656 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800657 }
658 }
659 }
660
Mingyao Yang8df69d42015-10-22 15:40:58 -0700661 void MergePredecessorValues(HBasicBlock* block) {
662 const ArenaVector<HBasicBlock*>& predecessors = block->GetPredecessors();
663 if (predecessors.size() == 0) {
664 return;
665 }
666 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()];
667 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800668 HInstruction* pred0_value = heap_values_for_[predecessors[0]->GetBlockId()][i];
669 heap_values[i] = pred0_value;
670 if (pred0_value != kUnknownHeapValue) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700671 for (size_t j = 1; j < predecessors.size(); j++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800672 HInstruction* pred_value = heap_values_for_[predecessors[j]->GetBlockId()][i];
673 if (pred_value != pred0_value) {
674 heap_values[i] = kUnknownHeapValue;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700675 break;
676 }
677 }
678 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800679
680 if (heap_values[i] == kUnknownHeapValue) {
681 // Keep the last store in each predecessor since future loads cannot be eliminated.
682 for (size_t j = 0; j < predecessors.size(); j++) {
683 ArenaVector<HInstruction*>& pred_values = heap_values_for_[predecessors[j]->GetBlockId()];
684 KeepIfIsStore(pred_values[i]);
685 }
686 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700687 }
688 }
689
690 // `instruction` is being removed. Try to see if the null check on it
691 // can be removed. This can happen if the same value is set in two branches
692 // but not in dominators. Such as:
693 // int[] a = foo();
694 // if () {
695 // a[0] = 2;
696 // } else {
697 // a[0] = 2;
698 // }
699 // // a[0] can now be replaced with constant 2, and the null check on it can be removed.
700 void TryRemovingNullCheck(HInstruction* instruction) {
701 HInstruction* prev = instruction->GetPrevious();
702 if ((prev != nullptr) && prev->IsNullCheck() && (prev == instruction->InputAt(0))) {
703 // Previous instruction is a null check for this instruction. Remove the null check.
704 prev->ReplaceWith(prev->InputAt(0));
705 prev->GetBlock()->RemoveInstruction(prev);
706 }
707 }
708
709 HInstruction* GetDefaultValue(Primitive::Type type) {
710 switch (type) {
711 case Primitive::kPrimNot:
712 return GetGraph()->GetNullConstant();
713 case Primitive::kPrimBoolean:
714 case Primitive::kPrimByte:
715 case Primitive::kPrimChar:
716 case Primitive::kPrimShort:
717 case Primitive::kPrimInt:
718 return GetGraph()->GetIntConstant(0);
719 case Primitive::kPrimLong:
720 return GetGraph()->GetLongConstant(0);
721 case Primitive::kPrimFloat:
722 return GetGraph()->GetFloatConstant(0);
723 case Primitive::kPrimDouble:
724 return GetGraph()->GetDoubleConstant(0);
725 default:
726 UNREACHABLE();
727 }
728 }
729
730 void VisitGetLocation(HInstruction* instruction,
731 HInstruction* ref,
732 size_t offset,
733 HInstruction* index,
734 int16_t declaring_class_def_index) {
735 HInstruction* original_ref = HuntForOriginalReference(ref);
736 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
737 size_t idx = heap_location_collector_.FindHeapLocationIndex(
738 ref_info, offset, index, declaring_class_def_index);
739 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
740 ArenaVector<HInstruction*>& heap_values =
741 heap_values_for_[instruction->GetBlock()->GetBlockId()];
742 HInstruction* heap_value = heap_values[idx];
743 if (heap_value == kDefaultHeapValue) {
744 HInstruction* constant = GetDefaultValue(instruction->GetType());
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800745 removed_loads_.push_back(instruction);
746 substitute_instructions_for_loads_.push_back(constant);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700747 heap_values[idx] = constant;
748 return;
749 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800750 if (heap_value != kUnknownHeapValue && heap_value->IsInstanceFieldSet()) {
751 HInstruction* store = heap_value;
752 // This load must be from a singleton since it's from the same field
753 // that a "removed" store puts the value. That store must be to a singleton's field.
754 DCHECK(ref_info->IsSingleton());
755 // Get the real heap value of the store.
756 heap_value = store->InputAt(1);
757 }
David Brazdil15693bf2015-12-16 10:30:45 +0000758 if (heap_value == kUnknownHeapValue) {
759 // Load isn't eliminated. Put the load as the value into the HeapLocation.
760 // This acts like GVN but with better aliasing analysis.
761 heap_values[idx] = instruction;
762 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +0000763 if (Primitive::PrimitiveKind(heap_value->GetType())
764 != Primitive::PrimitiveKind(instruction->GetType())) {
765 // The only situation where the same heap location has different type is when
Nicolas Geoffray65fef302016-05-04 14:00:12 +0100766 // we do an array get on an instruction that originates from the null constant
767 // (the null could be behind a field access, an array access, a null check or
768 // a bound type).
769 // In order to stay properly typed on primitive types, we do not eliminate
770 // the array gets.
Nicolas Geoffray03971632016-03-17 10:44:24 +0000771 if (kIsDebugBuild) {
772 DCHECK(heap_value->IsArrayGet()) << heap_value->DebugName();
773 DCHECK(instruction->IsArrayGet()) << instruction->DebugName();
Nicolas Geoffray03971632016-03-17 10:44:24 +0000774 }
775 return;
776 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800777 removed_loads_.push_back(instruction);
778 substitute_instructions_for_loads_.push_back(heap_value);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700779 TryRemovingNullCheck(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700780 }
781 }
782
783 bool Equal(HInstruction* heap_value, HInstruction* value) {
784 if (heap_value == value) {
785 return true;
786 }
787 if (heap_value == kDefaultHeapValue && GetDefaultValue(value->GetType()) == value) {
788 return true;
789 }
790 return false;
791 }
792
793 void VisitSetLocation(HInstruction* instruction,
794 HInstruction* ref,
795 size_t offset,
796 HInstruction* index,
797 int16_t declaring_class_def_index,
798 HInstruction* value) {
799 HInstruction* original_ref = HuntForOriginalReference(ref);
800 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
801 size_t idx = heap_location_collector_.FindHeapLocationIndex(
802 ref_info, offset, index, declaring_class_def_index);
803 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
804 ArenaVector<HInstruction*>& heap_values =
805 heap_values_for_[instruction->GetBlock()->GetBlockId()];
806 HInstruction* heap_value = heap_values[idx];
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800807 bool same_value = false;
808 bool possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700809 if (Equal(heap_value, value)) {
810 // Store into the heap location with the same value.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800811 same_value = true;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700812 } else if (index != nullptr) {
813 // For array element, don't eliminate stores since it can be easily aliased
814 // with non-constant index.
815 } else if (!heap_location_collector_.MayDeoptimize() &&
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800816 ref_info->IsSingletonAndNotReturned()) {
817 // Store into a field of a singleton that's not returned. The value cannot be
818 // killed due to aliasing/invocation. It can be redundant since future loads can
819 // directly get the value set by this instruction. The value can still be killed due to
820 // merging or loop side effects. Stores whose values are killed due to merging/loop side
821 // effects later will be removed from possibly_removed_stores_ when that is detected.
822 possibly_redundant = true;
823 HNewInstance* new_instance = ref_info->GetReference()->AsNewInstance();
824 DCHECK(new_instance != nullptr);
825 if (new_instance->IsFinalizable()) {
826 // Finalizable objects escape globally. Need to keep the store.
827 possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700828 } else {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800829 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
830 if (loop_info != nullptr) {
831 // instruction is a store in the loop so the loop must does write.
832 DCHECK(side_effects_.GetLoopEffects(loop_info->GetHeader()).DoesAnyWrite());
833
Mingyao Yang4b467ed2015-11-19 17:04:22 -0800834 if (loop_info->IsDefinedOutOfTheLoop(original_ref)) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800835 DCHECK(original_ref->GetBlock()->Dominates(loop_info->GetPreHeader()));
836 // Keep the store since its value may be needed at the loop header.
837 possibly_redundant = false;
838 } else {
839 // The singleton is created inside the loop. Value stored to it isn't needed at
840 // the loop header. This is true for outer loops also.
841 }
842 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700843 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700844 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800845 if (same_value || possibly_redundant) {
846 possibly_removed_stores_.push_back(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700847 }
Mingyao Yange9d6e602015-10-23 17:08:42 -0700848
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800849 if (!same_value) {
850 if (possibly_redundant) {
851 DCHECK(instruction->IsInstanceFieldSet());
852 // Put the store as the heap value. If the value is loaded from heap
853 // by a load later, this store isn't really redundant.
854 heap_values[idx] = instruction;
855 } else {
856 heap_values[idx] = value;
857 }
858 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700859 // This store may kill values in other heap locations due to aliasing.
860 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800861 if (i == idx) {
862 continue;
863 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700864 if (heap_values[i] == value) {
865 // Same value should be kept even if aliasing happens.
866 continue;
867 }
868 if (heap_values[i] == kUnknownHeapValue) {
869 // Value is already unknown, no need for aliasing check.
870 continue;
871 }
872 if (heap_location_collector_.MayAlias(i, idx)) {
873 // Kill heap locations that may alias.
874 heap_values[i] = kUnknownHeapValue;
875 }
876 }
877 }
878
879 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
880 HInstruction* obj = instruction->InputAt(0);
881 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
882 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
883 VisitGetLocation(instruction, obj, offset, nullptr, declaring_class_def_index);
884 }
885
886 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
887 HInstruction* obj = instruction->InputAt(0);
888 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
889 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
890 HInstruction* value = instruction->InputAt(1);
891 VisitSetLocation(instruction, obj, offset, nullptr, declaring_class_def_index, value);
892 }
893
894 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
895 HInstruction* cls = instruction->InputAt(0);
896 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
897 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
898 VisitGetLocation(instruction, cls, offset, nullptr, declaring_class_def_index);
899 }
900
901 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
902 HInstruction* cls = instruction->InputAt(0);
903 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
904 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
905 HInstruction* value = instruction->InputAt(1);
906 VisitSetLocation(instruction, cls, offset, nullptr, declaring_class_def_index, value);
907 }
908
909 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
910 HInstruction* array = instruction->InputAt(0);
911 HInstruction* index = instruction->InputAt(1);
912 VisitGetLocation(instruction,
913 array,
914 HeapLocation::kInvalidFieldOffset,
915 index,
916 HeapLocation::kDeclaringClassDefIndexForArrays);
917 }
918
919 void VisitArraySet(HArraySet* instruction) OVERRIDE {
920 HInstruction* array = instruction->InputAt(0);
921 HInstruction* index = instruction->InputAt(1);
922 HInstruction* value = instruction->InputAt(2);
923 VisitSetLocation(instruction,
924 array,
925 HeapLocation::kInvalidFieldOffset,
926 index,
927 HeapLocation::kDeclaringClassDefIndexForArrays,
928 value);
929 }
930
931 void HandleInvoke(HInstruction* invoke) {
932 ArenaVector<HInstruction*>& heap_values =
933 heap_values_for_[invoke->GetBlock()->GetBlockId()];
934 for (size_t i = 0; i < heap_values.size(); i++) {
935 ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
936 if (ref_info->IsSingleton()) {
937 // Singleton references cannot be seen by the callee.
938 } else {
939 heap_values[i] = kUnknownHeapValue;
940 }
941 }
942 }
943
944 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
945 HandleInvoke(invoke);
946 }
947
948 void VisitInvokeVirtual(HInvokeVirtual* invoke) OVERRIDE {
949 HandleInvoke(invoke);
950 }
951
952 void VisitInvokeInterface(HInvokeInterface* invoke) OVERRIDE {
953 HandleInvoke(invoke);
954 }
955
956 void VisitInvokeUnresolved(HInvokeUnresolved* invoke) OVERRIDE {
957 HandleInvoke(invoke);
958 }
959
960 void VisitClinitCheck(HClinitCheck* clinit) OVERRIDE {
961 HandleInvoke(clinit);
962 }
963
964 void VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet* instruction) OVERRIDE {
965 // Conservatively treat it as an invocation.
966 HandleInvoke(instruction);
967 }
968
969 void VisitUnresolvedInstanceFieldSet(HUnresolvedInstanceFieldSet* instruction) OVERRIDE {
970 // Conservatively treat it as an invocation.
971 HandleInvoke(instruction);
972 }
973
974 void VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet* instruction) OVERRIDE {
975 // Conservatively treat it as an invocation.
976 HandleInvoke(instruction);
977 }
978
979 void VisitUnresolvedStaticFieldSet(HUnresolvedStaticFieldSet* instruction) OVERRIDE {
980 // Conservatively treat it as an invocation.
981 HandleInvoke(instruction);
982 }
983
984 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
985 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_instance);
986 if (ref_info == nullptr) {
987 // new_instance isn't used for field accesses. No need to process it.
988 return;
989 }
990 if (!heap_location_collector_.MayDeoptimize() &&
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800991 ref_info->IsSingletonAndNotReturned() &&
992 !new_instance->IsFinalizable() &&
Mingyao Yang062157f2016-03-02 10:15:36 -0800993 !new_instance->NeedsAccessCheck()) {
994 singleton_new_instances_.push_back(new_instance);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700995 }
996 ArenaVector<HInstruction*>& heap_values =
997 heap_values_for_[new_instance->GetBlock()->GetBlockId()];
998 for (size_t i = 0; i < heap_values.size(); i++) {
999 HInstruction* ref =
1000 heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo()->GetReference();
1001 size_t offset = heap_location_collector_.GetHeapLocation(i)->GetOffset();
1002 if (ref == new_instance && offset >= mirror::kObjectHeaderSize) {
1003 // Instance fields except the header fields are set to default heap values.
1004 heap_values[i] = kDefaultHeapValue;
1005 }
1006 }
1007 }
1008
1009 // Find an instruction's substitute if it should be removed.
1010 // Return the same instruction if it should not be removed.
1011 HInstruction* FindSubstitute(HInstruction* instruction) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -08001012 size_t size = removed_loads_.size();
Mingyao Yang8df69d42015-10-22 15:40:58 -07001013 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -08001014 if (removed_loads_[i] == instruction) {
1015 return substitute_instructions_for_loads_[i];
Mingyao Yang8df69d42015-10-22 15:40:58 -07001016 }
1017 }
1018 return instruction;
1019 }
1020
1021 const HeapLocationCollector& heap_location_collector_;
1022 const SideEffectsAnalysis& side_effects_;
1023
1024 // One array of heap values for each block.
1025 ArenaVector<ArenaVector<HInstruction*>> heap_values_for_;
1026
1027 // We record the instructions that should be eliminated but may be
1028 // used by heap locations. They'll be removed in the end.
Mingyao Yangfb8464a2015-11-02 10:56:59 -08001029 ArenaVector<HInstruction*> removed_loads_;
1030 ArenaVector<HInstruction*> substitute_instructions_for_loads_;
1031
1032 // Stores in this list may be removed from the list later when it's
1033 // found that the store cannot be eliminated.
1034 ArenaVector<HInstruction*> possibly_removed_stores_;
1035
Mingyao Yang8df69d42015-10-22 15:40:58 -07001036 ArenaVector<HInstruction*> singleton_new_instances_;
1037
1038 DISALLOW_COPY_AND_ASSIGN(LSEVisitor);
1039};
1040
1041void LoadStoreElimination::Run() {
David Brazdil8993caf2015-12-07 10:04:40 +00001042 if (graph_->IsDebuggable() || graph_->HasTryCatch()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -07001043 // Debugger may set heap values or trigger deoptimization of callers.
David Brazdil8993caf2015-12-07 10:04:40 +00001044 // Try/catch support not implemented yet.
Mingyao Yang8df69d42015-10-22 15:40:58 -07001045 // Skip this optimization.
1046 return;
1047 }
1048 HeapLocationCollector heap_location_collector(graph_);
Vladimir Marko2c45bc92016-10-25 16:54:12 +01001049 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1050 heap_location_collector.VisitBasicBlock(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -07001051 }
1052 if (heap_location_collector.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
1053 // Bail out if there are too many heap locations to deal with.
1054 return;
1055 }
1056 if (!heap_location_collector.HasHeapStores()) {
1057 // Without heap stores, this pass would act mostly as GVN on heap accesses.
1058 return;
1059 }
1060 if (heap_location_collector.HasVolatile() || heap_location_collector.HasMonitorOps()) {
1061 // Don't do load/store elimination if the method has volatile field accesses or
1062 // monitor operations, for now.
1063 // TODO: do it right.
1064 return;
1065 }
1066 heap_location_collector.BuildAliasingMatrix();
1067 LSEVisitor lse_visitor(graph_, heap_location_collector, side_effects_);
Vladimir Marko2c45bc92016-10-25 16:54:12 +01001068 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1069 lse_visitor.VisitBasicBlock(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -07001070 }
1071 lse_visitor.RemoveInstructions();
1072}
1073
1074} // namespace art