blob: 5b2cbf783d0970dfb89bbb9d3376ce48a3583775 [file] [log] [blame]
Mingyao Yang8df69d42015-10-22 15:40:58 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "load_store_elimination.h"
18#include "side_effects_analysis.h"
19
20#include <iostream>
21
22namespace art {
23
24class ReferenceInfo;
25
26// A cap for the number of heap locations to prevent pathological time/space consumption.
27// The number of heap locations for most of the methods stays below this threshold.
28constexpr size_t kMaxNumberOfHeapLocations = 32;
29
30// A ReferenceInfo contains additional info about a reference such as
31// whether it's a singleton, returned, etc.
32class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
33 public:
34 ReferenceInfo(HInstruction* reference, size_t pos) : reference_(reference), position_(pos) {
35 is_singleton_ = true;
36 is_singleton_and_not_returned_ = true;
37 if (!reference_->IsNewInstance() && !reference_->IsNewArray()) {
38 // For references not allocated in the method, don't assume anything.
39 is_singleton_ = false;
40 is_singleton_and_not_returned_ = false;
41 return;
42 }
43
44 // Visit all uses to determine if this reference can spread into the heap,
45 // a method call, etc.
Vladimir Marko46817b82016-03-29 12:21:58 +010046 for (const HUseListNode<HInstruction*>& use : reference_->GetUses()) {
47 HInstruction* user = use.GetUser();
48 DCHECK(!user->IsNullCheck()) << "NullCheck should have been eliminated";
49 if (user->IsBoundType()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -070050 // BoundType shouldn't normally be necessary for a NewInstance.
51 // Just be conservative for the uncommon cases.
52 is_singleton_ = false;
53 is_singleton_and_not_returned_ = false;
54 return;
55 }
Vladimir Marko46817b82016-03-29 12:21:58 +010056 if (user->IsPhi() || user->IsSelect() || user->IsInvoke() ||
57 (user->IsInstanceFieldSet() && (reference_ == user->InputAt(1))) ||
58 (user->IsUnresolvedInstanceFieldSet() && (reference_ == user->InputAt(1))) ||
59 (user->IsStaticFieldSet() && (reference_ == user->InputAt(1))) ||
60 (user->IsUnresolvedStaticFieldSet() && (reference_ == user->InputAt(0))) ||
61 (user->IsArraySet() && (reference_ == user->InputAt(2)))) {
Mingyao Yang40bcb932016-02-03 05:46:57 -080062 // reference_ is merged to HPhi/HSelect, passed to a callee, or stored to heap.
Mingyao Yang8df69d42015-10-22 15:40:58 -070063 // reference_ isn't the only name that can refer to its value anymore.
64 is_singleton_ = false;
65 is_singleton_and_not_returned_ = false;
66 return;
67 }
Nicolas Geoffrayb93a1652016-06-27 10:03:29 +010068 if ((user->IsUnresolvedInstanceFieldGet() && (reference_ == user->InputAt(0))) ||
69 (user->IsUnresolvedInstanceFieldSet() && (reference_ == user->InputAt(0)))) {
70 // The field is accessed in an unresolved way. We mark the object as a singleton to
71 // disable load/store optimizations on it.
72 // Note that we could optimize this case and still perform some optimizations until
73 // we hit the unresolved access, but disabling is the simplest.
74 is_singleton_ = false;
75 is_singleton_and_not_returned_ = false;
76 return;
77 }
Vladimir Marko46817b82016-03-29 12:21:58 +010078 if (user->IsReturn()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -070079 is_singleton_and_not_returned_ = false;
80 }
81 }
82 }
83
84 HInstruction* GetReference() const {
85 return reference_;
86 }
87
88 size_t GetPosition() const {
89 return position_;
90 }
91
92 // Returns true if reference_ is the only name that can refer to its value during
93 // the lifetime of the method. So it's guaranteed to not have any alias in
94 // the method (including its callees).
95 bool IsSingleton() const {
96 return is_singleton_;
97 }
98
99 // Returns true if reference_ is a singleton and not returned to the caller.
100 // The allocation and stores into reference_ may be eliminated for such cases.
101 bool IsSingletonAndNotReturned() const {
102 return is_singleton_and_not_returned_;
103 }
104
105 private:
106 HInstruction* const reference_;
107 const size_t position_; // position in HeapLocationCollector's ref_info_array_.
108 bool is_singleton_; // can only be referred to by a single name in the method.
109 bool is_singleton_and_not_returned_; // reference_ is singleton and not returned to caller.
110
111 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
112};
113
114// A heap location is a reference-offset/index pair that a value can be loaded from
115// or stored to.
116class HeapLocation : public ArenaObject<kArenaAllocMisc> {
117 public:
118 static constexpr size_t kInvalidFieldOffset = -1;
119
120 // TODO: more fine-grained array types.
121 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
122
123 HeapLocation(ReferenceInfo* ref_info,
124 size_t offset,
125 HInstruction* index,
126 int16_t declaring_class_def_index)
127 : ref_info_(ref_info),
128 offset_(offset),
129 index_(index),
Mingyao Yang803cbb92015-12-01 12:24:36 -0800130 declaring_class_def_index_(declaring_class_def_index),
131 value_killed_by_loop_side_effects_(true) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700132 DCHECK(ref_info != nullptr);
133 DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
134 (offset != kInvalidFieldOffset && index == nullptr));
Mingyao Yang803cbb92015-12-01 12:24:36 -0800135 if (ref_info->IsSingleton() && !IsArrayElement()) {
136 // Assume this location's value cannot be killed by loop side effects
137 // until proven otherwise.
138 value_killed_by_loop_side_effects_ = false;
139 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700140 }
141
142 ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
143 size_t GetOffset() const { return offset_; }
144 HInstruction* GetIndex() const { return index_; }
145
146 // Returns the definition of declaring class' dex index.
147 // It's kDeclaringClassDefIndexForArrays for an array element.
148 int16_t GetDeclaringClassDefIndex() const {
149 return declaring_class_def_index_;
150 }
151
152 bool IsArrayElement() const {
153 return index_ != nullptr;
154 }
155
Mingyao Yang803cbb92015-12-01 12:24:36 -0800156 bool IsValueKilledByLoopSideEffects() const {
157 return value_killed_by_loop_side_effects_;
158 }
159
160 void SetValueKilledByLoopSideEffects(bool val) {
161 value_killed_by_loop_side_effects_ = val;
162 }
163
Mingyao Yang8df69d42015-10-22 15:40:58 -0700164 private:
165 ReferenceInfo* const ref_info_; // reference for instance/static field or array access.
166 const size_t offset_; // offset of static/instance field.
167 HInstruction* const index_; // index of an array element.
168 const int16_t declaring_class_def_index_; // declaring class's def's dex index.
Mingyao Yang803cbb92015-12-01 12:24:36 -0800169 bool value_killed_by_loop_side_effects_; // value of this location may be killed by loop
170 // side effects because this location is stored
Mingyao Yang0a845202016-10-14 16:26:08 -0700171 // into inside a loop. This gives
172 // better info on whether a singleton's location
173 // value may be killed by loop side effects.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700174
175 DISALLOW_COPY_AND_ASSIGN(HeapLocation);
176};
177
178static HInstruction* HuntForOriginalReference(HInstruction* ref) {
179 DCHECK(ref != nullptr);
180 while (ref->IsNullCheck() || ref->IsBoundType()) {
181 ref = ref->InputAt(0);
182 }
183 return ref;
184}
185
186// A HeapLocationCollector collects all relevant heap locations and keeps
187// an aliasing matrix for all locations.
188class HeapLocationCollector : public HGraphVisitor {
189 public:
190 static constexpr size_t kHeapLocationNotFound = -1;
191 // Start with a single uint32_t word. That's enough bits for pair-wise
192 // aliasing matrix of 8 heap locations.
193 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
194
195 explicit HeapLocationCollector(HGraph* graph)
196 : HGraphVisitor(graph),
197 ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
198 heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
Vladimir Markof6a35de2016-03-21 12:01:50 +0000199 aliasing_matrix_(graph->GetArena(),
200 kInitialAliasingMatrixBitVectorSize,
201 true,
202 kArenaAllocLSE),
Mingyao Yang8df69d42015-10-22 15:40:58 -0700203 has_heap_stores_(false),
204 has_volatile_(false),
205 has_monitor_operations_(false),
206 may_deoptimize_(false) {}
207
208 size_t GetNumberOfHeapLocations() const {
209 return heap_locations_.size();
210 }
211
212 HeapLocation* GetHeapLocation(size_t index) const {
213 return heap_locations_[index];
214 }
215
216 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
217 for (size_t i = 0; i < ref_info_array_.size(); i++) {
218 ReferenceInfo* ref_info = ref_info_array_[i];
219 if (ref_info->GetReference() == ref) {
220 DCHECK_EQ(i, ref_info->GetPosition());
221 return ref_info;
222 }
223 }
224 return nullptr;
225 }
226
227 bool HasHeapStores() const {
228 return has_heap_stores_;
229 }
230
231 bool HasVolatile() const {
232 return has_volatile_;
233 }
234
235 bool HasMonitorOps() const {
236 return has_monitor_operations_;
237 }
238
239 // Returns whether this method may be deoptimized.
240 // Currently we don't have meta data support for deoptimizing
241 // a method that eliminates allocations/stores.
242 bool MayDeoptimize() const {
243 return may_deoptimize_;
244 }
245
246 // Find and return the heap location index in heap_locations_.
247 size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
248 size_t offset,
249 HInstruction* index,
250 int16_t declaring_class_def_index) const {
251 for (size_t i = 0; i < heap_locations_.size(); i++) {
252 HeapLocation* loc = heap_locations_[i];
253 if (loc->GetReferenceInfo() == ref_info &&
254 loc->GetOffset() == offset &&
255 loc->GetIndex() == index &&
256 loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
257 return i;
258 }
259 }
260 return kHeapLocationNotFound;
261 }
262
263 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
264 bool MayAlias(size_t index1, size_t index2) const {
265 if (index1 < index2) {
266 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
267 } else if (index1 > index2) {
268 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
269 } else {
270 DCHECK(false) << "index1 and index2 are expected to be different";
271 return true;
272 }
273 }
274
275 void BuildAliasingMatrix() {
276 const size_t number_of_locations = heap_locations_.size();
277 if (number_of_locations == 0) {
278 return;
279 }
280 size_t pos = 0;
281 // Compute aliasing info between every pair of different heap locations.
282 // Save the result in a matrix represented as a BitVector.
283 for (size_t i = 0; i < number_of_locations - 1; i++) {
284 for (size_t j = i + 1; j < number_of_locations; j++) {
285 if (ComputeMayAlias(i, j)) {
286 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
287 }
288 pos++;
289 }
290 }
291 }
292
293 private:
294 // An allocation cannot alias with a name which already exists at the point
295 // of the allocation, such as a parameter or a load happening before the allocation.
296 bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
297 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
298 // Any reference that can alias with the allocation must appear after it in the block/in
299 // the block's successors. In reverse post order, those instructions will be visited after
300 // the allocation.
301 return ref_info2->GetPosition() >= ref_info1->GetPosition();
302 }
303 return true;
304 }
305
306 bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
307 if (ref_info1 == ref_info2) {
308 return true;
309 } else if (ref_info1->IsSingleton()) {
310 return false;
311 } else if (ref_info2->IsSingleton()) {
312 return false;
313 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
314 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
315 return false;
316 }
317 return true;
318 }
319
320 // `index1` and `index2` are indices in the array of collected heap locations.
321 // Returns the position in the bit vector that tracks whether the two heap
322 // locations may alias.
323 size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
324 DCHECK(index2 > index1);
325 const size_t number_of_locations = heap_locations_.size();
326 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
327 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
328 }
329
330 // An additional position is passed in to make sure the calculated position is correct.
331 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
332 size_t calculated_position = AliasingMatrixPosition(index1, index2);
333 DCHECK_EQ(calculated_position, position);
334 return calculated_position;
335 }
336
337 // Compute if two locations may alias to each other.
338 bool ComputeMayAlias(size_t index1, size_t index2) const {
339 HeapLocation* loc1 = heap_locations_[index1];
340 HeapLocation* loc2 = heap_locations_[index2];
341 if (loc1->GetOffset() != loc2->GetOffset()) {
342 // Either two different instance fields, or one is an instance
343 // field and the other is an array element.
344 return false;
345 }
346 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
347 // Different types.
348 return false;
349 }
350 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
351 return false;
352 }
353 if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
354 HInstruction* array_index1 = loc1->GetIndex();
355 HInstruction* array_index2 = loc2->GetIndex();
356 DCHECK(array_index1 != nullptr);
357 DCHECK(array_index2 != nullptr);
358 if (array_index1->IsIntConstant() &&
359 array_index2->IsIntConstant() &&
360 array_index1->AsIntConstant()->GetValue() != array_index2->AsIntConstant()->GetValue()) {
361 // Different constant indices do not alias.
362 return false;
363 }
364 }
365 return true;
366 }
367
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800368 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
369 ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700370 if (ref_info == nullptr) {
371 size_t pos = ref_info_array_.size();
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800372 ref_info = new (GetGraph()->GetArena()) ReferenceInfo(instruction, pos);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700373 ref_info_array_.push_back(ref_info);
374 }
375 return ref_info;
376 }
377
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800378 void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
379 if (instruction->GetType() != Primitive::kPrimNot) {
380 return;
381 }
382 DCHECK(FindReferenceInfoOf(instruction) == nullptr);
383 GetOrCreateReferenceInfo(instruction);
384 }
385
Mingyao Yang8df69d42015-10-22 15:40:58 -0700386 HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
387 size_t offset,
388 HInstruction* index,
389 int16_t declaring_class_def_index) {
390 HInstruction* original_ref = HuntForOriginalReference(ref);
391 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
392 size_t heap_location_idx = FindHeapLocationIndex(
393 ref_info, offset, index, declaring_class_def_index);
394 if (heap_location_idx == kHeapLocationNotFound) {
395 HeapLocation* heap_loc = new (GetGraph()->GetArena())
396 HeapLocation(ref_info, offset, index, declaring_class_def_index);
397 heap_locations_.push_back(heap_loc);
398 return heap_loc;
399 }
400 return heap_locations_[heap_location_idx];
401 }
402
Mingyao Yang803cbb92015-12-01 12:24:36 -0800403 HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700404 if (field_info.IsVolatile()) {
405 has_volatile_ = true;
406 }
407 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
408 const size_t offset = field_info.GetFieldOffset().SizeValue();
Mingyao Yang803cbb92015-12-01 12:24:36 -0800409 return GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700410 }
411
412 void VisitArrayAccess(HInstruction* array, HInstruction* index) {
413 GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
414 index, HeapLocation::kDeclaringClassDefIndexForArrays);
415 }
416
417 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800418 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800419 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700420 }
421
422 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
Mingyao Yang803cbb92015-12-01 12:24:36 -0800423 HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700424 has_heap_stores_ = true;
Mingyao Yang0a845202016-10-14 16:26:08 -0700425 if (location->GetReferenceInfo()->IsSingleton()) {
426 // A singleton's location value may be killed by loop side effects if it's
427 // defined before that loop, and it's stored into inside that loop.
428 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
429 if (loop_info != nullptr) {
430 HInstruction* ref = location->GetReferenceInfo()->GetReference();
431 DCHECK(ref->IsNewInstance());
432 if (loop_info->IsDefinedOutOfTheLoop(ref)) {
433 // ref's location value may be killed by this loop's side effects.
434 location->SetValueKilledByLoopSideEffects(true);
435 } else {
436 // ref is defined inside this loop so this loop's side effects cannot
437 // kill its location value at the loop header since ref/its location doesn't
438 // exist yet at the loop header.
439 }
440 }
441 } else {
442 // For non-singletons, value_killed_by_loop_side_effects_ is inited to
443 // true.
444 DCHECK_EQ(location->IsValueKilledByLoopSideEffects(), true);
Mingyao Yang803cbb92015-12-01 12:24:36 -0800445 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700446 }
447
448 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800449 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800450 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700451 }
452
453 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800454 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700455 has_heap_stores_ = true;
456 }
457
458 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
459 // since we cannot accurately track the fields.
460
461 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
462 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800463 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700464 }
465
466 void VisitArraySet(HArraySet* instruction) OVERRIDE {
467 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
468 has_heap_stores_ = true;
469 }
470
471 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
472 // Any references appearing in the ref_info_array_ so far cannot alias with new_instance.
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800473 CreateReferenceInfoForReferenceType(new_instance);
474 }
475
476 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* instruction) OVERRIDE {
477 CreateReferenceInfoForReferenceType(instruction);
478 }
479
480 void VisitInvokeVirtual(HInvokeVirtual* instruction) OVERRIDE {
481 CreateReferenceInfoForReferenceType(instruction);
482 }
483
484 void VisitInvokeInterface(HInvokeInterface* instruction) OVERRIDE {
485 CreateReferenceInfoForReferenceType(instruction);
486 }
487
488 void VisitParameterValue(HParameterValue* instruction) OVERRIDE {
489 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700490 }
491
Mingyao Yang40bcb932016-02-03 05:46:57 -0800492 void VisitSelect(HSelect* instruction) OVERRIDE {
493 CreateReferenceInfoForReferenceType(instruction);
494 }
495
Mingyao Yang8df69d42015-10-22 15:40:58 -0700496 void VisitDeoptimize(HDeoptimize* instruction ATTRIBUTE_UNUSED) OVERRIDE {
497 may_deoptimize_ = true;
498 }
499
500 void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
501 has_monitor_operations_ = true;
502 }
503
504 ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
505 ArenaVector<HeapLocation*> heap_locations_; // All heap locations.
506 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
507 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
508 // alias analysis and won't be as effective.
509 bool has_volatile_; // If there are volatile field accesses.
510 bool has_monitor_operations_; // If there are monitor operations.
Mingyao Yang062157f2016-03-02 10:15:36 -0800511 bool may_deoptimize_; // Only true for HDeoptimize with single-frame deoptimization.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700512
513 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
514};
515
516// An unknown heap value. Loads with such a value in the heap location cannot be eliminated.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800517// A heap location can be set to kUnknownHeapValue when:
518// - initially set a value.
519// - killed due to aliasing, merging, invocation, or loop side effects.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700520static HInstruction* const kUnknownHeapValue =
521 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-1));
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800522
Mingyao Yang8df69d42015-10-22 15:40:58 -0700523// Default heap value after an allocation.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800524// A heap location can be set to that value right after an allocation.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700525static HInstruction* const kDefaultHeapValue =
526 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-2));
527
528class LSEVisitor : public HGraphVisitor {
529 public:
530 LSEVisitor(HGraph* graph,
531 const HeapLocationCollector& heap_locations_collector,
532 const SideEffectsAnalysis& side_effects)
533 : HGraphVisitor(graph),
534 heap_location_collector_(heap_locations_collector),
535 side_effects_(side_effects),
536 heap_values_for_(graph->GetBlocks().size(),
537 ArenaVector<HInstruction*>(heap_locations_collector.
538 GetNumberOfHeapLocations(),
539 kUnknownHeapValue,
540 graph->GetArena()->Adapter(kArenaAllocLSE)),
541 graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800542 removed_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
543 substitute_instructions_for_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
544 possibly_removed_stores_(graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yang8df69d42015-10-22 15:40:58 -0700545 singleton_new_instances_(graph->GetArena()->Adapter(kArenaAllocLSE)) {
546 }
547
548 void VisitBasicBlock(HBasicBlock* block) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800549 // Populate the heap_values array for this block.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700550 // TODO: try to reuse the heap_values array from one predecessor if possible.
551 if (block->IsLoopHeader()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800552 HandleLoopSideEffects(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700553 } else {
554 MergePredecessorValues(block);
555 }
556 HGraphVisitor::VisitBasicBlock(block);
557 }
558
559 // Remove recorded instructions that should be eliminated.
560 void RemoveInstructions() {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800561 size_t size = removed_loads_.size();
562 DCHECK_EQ(size, substitute_instructions_for_loads_.size());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700563 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800564 HInstruction* load = removed_loads_[i];
565 DCHECK(load != nullptr);
566 DCHECK(load->IsInstanceFieldGet() ||
567 load->IsStaticFieldGet() ||
568 load->IsArrayGet());
569 HInstruction* substitute = substitute_instructions_for_loads_[i];
570 DCHECK(substitute != nullptr);
571 // Keep tracing substitute till one that's not removed.
572 HInstruction* sub_sub = FindSubstitute(substitute);
573 while (sub_sub != substitute) {
574 substitute = sub_sub;
575 sub_sub = FindSubstitute(substitute);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700576 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800577 load->ReplaceWith(substitute);
578 load->GetBlock()->RemoveInstruction(load);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700579 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800580
581 // At this point, stores in possibly_removed_stores_ can be safely removed.
Mingyao Yang062157f2016-03-02 10:15:36 -0800582 for (size_t i = 0, e = possibly_removed_stores_.size(); i < e; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800583 HInstruction* store = possibly_removed_stores_[i];
584 DCHECK(store->IsInstanceFieldSet() || store->IsStaticFieldSet() || store->IsArraySet());
585 store->GetBlock()->RemoveInstruction(store);
586 }
587
Mingyao Yang062157f2016-03-02 10:15:36 -0800588 // Eliminate allocations that are not used.
589 for (size_t i = 0, e = singleton_new_instances_.size(); i < e; i++) {
590 HInstruction* new_instance = singleton_new_instances_[i];
591 if (!new_instance->HasNonEnvironmentUses()) {
592 new_instance->RemoveEnvironmentUsers();
593 new_instance->GetBlock()->RemoveInstruction(new_instance);
594 }
595 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700596 }
597
598 private:
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800599 // If heap_values[index] is an instance field store, need to keep the store.
600 // This is necessary if a heap value is killed due to merging, or loop side
601 // effects (which is essentially merging also), since a load later from the
602 // location won't be eliminated.
603 void KeepIfIsStore(HInstruction* heap_value) {
604 if (heap_value == kDefaultHeapValue ||
605 heap_value == kUnknownHeapValue ||
606 !heap_value->IsInstanceFieldSet()) {
607 return;
608 }
609 auto idx = std::find(possibly_removed_stores_.begin(),
610 possibly_removed_stores_.end(), heap_value);
611 if (idx != possibly_removed_stores_.end()) {
612 // Make sure the store is kept.
613 possibly_removed_stores_.erase(idx);
614 }
615 }
616
617 void HandleLoopSideEffects(HBasicBlock* block) {
618 DCHECK(block->IsLoopHeader());
619 int block_id = block->GetBlockId();
620 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block_id];
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000621
622 // Don't eliminate loads in irreducible loops. This is safe for singletons, because
623 // they are always used by the non-eliminated loop-phi.
624 if (block->GetLoopInformation()->IsIrreducible()) {
625 if (kIsDebugBuild) {
626 for (size_t i = 0; i < heap_values.size(); i++) {
627 DCHECK_EQ(heap_values[i], kUnknownHeapValue);
628 }
629 }
630 return;
631 }
632
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800633 HBasicBlock* pre_header = block->GetLoopInformation()->GetPreHeader();
634 ArenaVector<HInstruction*>& pre_header_heap_values =
635 heap_values_for_[pre_header->GetBlockId()];
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000636
Mingyao Yang803cbb92015-12-01 12:24:36 -0800637 // Inherit the values from pre-header.
638 for (size_t i = 0; i < heap_values.size(); i++) {
639 heap_values[i] = pre_header_heap_values[i];
640 }
641
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800642 // We do a single pass in reverse post order. For loops, use the side effects as a hint
643 // to see if the heap values should be killed.
644 if (side_effects_.GetLoopEffects(block).DoesAnyWrite()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800645 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yang803cbb92015-12-01 12:24:36 -0800646 HeapLocation* location = heap_location_collector_.GetHeapLocation(i);
647 ReferenceInfo* ref_info = location->GetReferenceInfo();
648 if (!ref_info->IsSingleton() || location->IsValueKilledByLoopSideEffects()) {
649 // heap value is killed by loop side effects (stored into directly, or due to
650 // aliasing).
651 KeepIfIsStore(pre_header_heap_values[i]);
652 heap_values[i] = kUnknownHeapValue;
653 } else {
654 // A singleton's field that's not stored into inside a loop is invariant throughout
655 // the loop.
656 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800657 }
658 }
659 }
660
Mingyao Yang8df69d42015-10-22 15:40:58 -0700661 void MergePredecessorValues(HBasicBlock* block) {
662 const ArenaVector<HBasicBlock*>& predecessors = block->GetPredecessors();
663 if (predecessors.size() == 0) {
664 return;
665 }
Mingyao Yang58d9bfc2016-11-01 13:31:58 -0700666
Mingyao Yang8df69d42015-10-22 15:40:58 -0700667 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()];
668 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yang58d9bfc2016-11-01 13:31:58 -0700669 HInstruction* merged_value = nullptr;
670 // Whether merged_value is a result that's merged from all predecessors.
671 bool from_all_predecessors = true;
672 ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
673 HInstruction* singleton_ref = nullptr;
674 if (ref_info->IsSingletonAndNotReturned()) {
675 // We do more analysis of liveness when merging heap values for such
676 // cases since stores into such references may potentially be eliminated.
677 singleton_ref = ref_info->GetReference();
678 }
679
680 for (HBasicBlock* predecessor : predecessors) {
681 HInstruction* pred_value = heap_values_for_[predecessor->GetBlockId()][i];
682 if ((singleton_ref != nullptr) &&
683 !singleton_ref->GetBlock()->Dominates(predecessor)) {
684 // singleton_ref is not live in this predecessor. Skip this predecessor since
685 // it does not really have the location.
686 DCHECK_EQ(pred_value, kUnknownHeapValue);
687 from_all_predecessors = false;
688 continue;
689 }
690 if (merged_value == nullptr) {
691 // First seen heap value.
692 merged_value = pred_value;
693 } else if (pred_value != merged_value) {
694 // There are conflicting values.
695 merged_value = kUnknownHeapValue;
696 break;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700697 }
698 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800699
Mingyao Yang58d9bfc2016-11-01 13:31:58 -0700700 if (merged_value == kUnknownHeapValue) {
701 // There are conflicting heap values from different predecessors.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800702 // Keep the last store in each predecessor since future loads cannot be eliminated.
Mingyao Yang58d9bfc2016-11-01 13:31:58 -0700703 for (HBasicBlock* predecessor : predecessors) {
704 ArenaVector<HInstruction*>& pred_values = heap_values_for_[predecessor->GetBlockId()];
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800705 KeepIfIsStore(pred_values[i]);
706 }
707 }
Mingyao Yang58d9bfc2016-11-01 13:31:58 -0700708
709 if ((merged_value == nullptr) || !from_all_predecessors) {
710 DCHECK(singleton_ref != nullptr);
711 DCHECK((singleton_ref->GetBlock() == block) ||
712 !singleton_ref->GetBlock()->Dominates(block));
713 // singleton_ref is not defined before block or defined only in some of its
714 // predecessors, so block doesn't really have the location at its entry.
715 heap_values[i] = kUnknownHeapValue;
716 } else {
717 heap_values[i] = merged_value;
718 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700719 }
720 }
721
722 // `instruction` is being removed. Try to see if the null check on it
723 // can be removed. This can happen if the same value is set in two branches
724 // but not in dominators. Such as:
725 // int[] a = foo();
726 // if () {
727 // a[0] = 2;
728 // } else {
729 // a[0] = 2;
730 // }
731 // // a[0] can now be replaced with constant 2, and the null check on it can be removed.
732 void TryRemovingNullCheck(HInstruction* instruction) {
733 HInstruction* prev = instruction->GetPrevious();
734 if ((prev != nullptr) && prev->IsNullCheck() && (prev == instruction->InputAt(0))) {
735 // Previous instruction is a null check for this instruction. Remove the null check.
736 prev->ReplaceWith(prev->InputAt(0));
737 prev->GetBlock()->RemoveInstruction(prev);
738 }
739 }
740
741 HInstruction* GetDefaultValue(Primitive::Type type) {
742 switch (type) {
743 case Primitive::kPrimNot:
744 return GetGraph()->GetNullConstant();
745 case Primitive::kPrimBoolean:
746 case Primitive::kPrimByte:
747 case Primitive::kPrimChar:
748 case Primitive::kPrimShort:
749 case Primitive::kPrimInt:
750 return GetGraph()->GetIntConstant(0);
751 case Primitive::kPrimLong:
752 return GetGraph()->GetLongConstant(0);
753 case Primitive::kPrimFloat:
754 return GetGraph()->GetFloatConstant(0);
755 case Primitive::kPrimDouble:
756 return GetGraph()->GetDoubleConstant(0);
757 default:
758 UNREACHABLE();
759 }
760 }
761
762 void VisitGetLocation(HInstruction* instruction,
763 HInstruction* ref,
764 size_t offset,
765 HInstruction* index,
766 int16_t declaring_class_def_index) {
767 HInstruction* original_ref = HuntForOriginalReference(ref);
768 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
769 size_t idx = heap_location_collector_.FindHeapLocationIndex(
770 ref_info, offset, index, declaring_class_def_index);
771 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
772 ArenaVector<HInstruction*>& heap_values =
773 heap_values_for_[instruction->GetBlock()->GetBlockId()];
774 HInstruction* heap_value = heap_values[idx];
775 if (heap_value == kDefaultHeapValue) {
776 HInstruction* constant = GetDefaultValue(instruction->GetType());
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800777 removed_loads_.push_back(instruction);
778 substitute_instructions_for_loads_.push_back(constant);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700779 heap_values[idx] = constant;
780 return;
781 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800782 if (heap_value != kUnknownHeapValue && heap_value->IsInstanceFieldSet()) {
783 HInstruction* store = heap_value;
784 // This load must be from a singleton since it's from the same field
785 // that a "removed" store puts the value. That store must be to a singleton's field.
786 DCHECK(ref_info->IsSingleton());
787 // Get the real heap value of the store.
788 heap_value = store->InputAt(1);
789 }
David Brazdil15693bf2015-12-16 10:30:45 +0000790 if (heap_value == kUnknownHeapValue) {
791 // Load isn't eliminated. Put the load as the value into the HeapLocation.
792 // This acts like GVN but with better aliasing analysis.
793 heap_values[idx] = instruction;
794 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +0000795 if (Primitive::PrimitiveKind(heap_value->GetType())
796 != Primitive::PrimitiveKind(instruction->GetType())) {
797 // The only situation where the same heap location has different type is when
Nicolas Geoffray65fef302016-05-04 14:00:12 +0100798 // we do an array get on an instruction that originates from the null constant
799 // (the null could be behind a field access, an array access, a null check or
800 // a bound type).
801 // In order to stay properly typed on primitive types, we do not eliminate
802 // the array gets.
Nicolas Geoffray03971632016-03-17 10:44:24 +0000803 if (kIsDebugBuild) {
804 DCHECK(heap_value->IsArrayGet()) << heap_value->DebugName();
805 DCHECK(instruction->IsArrayGet()) << instruction->DebugName();
Nicolas Geoffray03971632016-03-17 10:44:24 +0000806 }
807 return;
808 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800809 removed_loads_.push_back(instruction);
810 substitute_instructions_for_loads_.push_back(heap_value);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700811 TryRemovingNullCheck(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700812 }
813 }
814
815 bool Equal(HInstruction* heap_value, HInstruction* value) {
816 if (heap_value == value) {
817 return true;
818 }
819 if (heap_value == kDefaultHeapValue && GetDefaultValue(value->GetType()) == value) {
820 return true;
821 }
822 return false;
823 }
824
825 void VisitSetLocation(HInstruction* instruction,
826 HInstruction* ref,
827 size_t offset,
828 HInstruction* index,
829 int16_t declaring_class_def_index,
830 HInstruction* value) {
831 HInstruction* original_ref = HuntForOriginalReference(ref);
832 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
833 size_t idx = heap_location_collector_.FindHeapLocationIndex(
834 ref_info, offset, index, declaring_class_def_index);
835 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
836 ArenaVector<HInstruction*>& heap_values =
837 heap_values_for_[instruction->GetBlock()->GetBlockId()];
838 HInstruction* heap_value = heap_values[idx];
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800839 bool same_value = false;
840 bool possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700841 if (Equal(heap_value, value)) {
842 // Store into the heap location with the same value.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800843 same_value = true;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700844 } else if (index != nullptr) {
845 // For array element, don't eliminate stores since it can be easily aliased
846 // with non-constant index.
847 } else if (!heap_location_collector_.MayDeoptimize() &&
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800848 ref_info->IsSingletonAndNotReturned()) {
849 // Store into a field of a singleton that's not returned. The value cannot be
850 // killed due to aliasing/invocation. It can be redundant since future loads can
851 // directly get the value set by this instruction. The value can still be killed due to
852 // merging or loop side effects. Stores whose values are killed due to merging/loop side
853 // effects later will be removed from possibly_removed_stores_ when that is detected.
854 possibly_redundant = true;
855 HNewInstance* new_instance = ref_info->GetReference()->AsNewInstance();
856 DCHECK(new_instance != nullptr);
857 if (new_instance->IsFinalizable()) {
858 // Finalizable objects escape globally. Need to keep the store.
859 possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700860 } else {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800861 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
862 if (loop_info != nullptr) {
863 // instruction is a store in the loop so the loop must does write.
864 DCHECK(side_effects_.GetLoopEffects(loop_info->GetHeader()).DoesAnyWrite());
865
Mingyao Yang4b467ed2015-11-19 17:04:22 -0800866 if (loop_info->IsDefinedOutOfTheLoop(original_ref)) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800867 DCHECK(original_ref->GetBlock()->Dominates(loop_info->GetPreHeader()));
868 // Keep the store since its value may be needed at the loop header.
869 possibly_redundant = false;
870 } else {
871 // The singleton is created inside the loop. Value stored to it isn't needed at
872 // the loop header. This is true for outer loops also.
873 }
874 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700875 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700876 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800877 if (same_value || possibly_redundant) {
878 possibly_removed_stores_.push_back(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700879 }
Mingyao Yange9d6e602015-10-23 17:08:42 -0700880
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800881 if (!same_value) {
882 if (possibly_redundant) {
883 DCHECK(instruction->IsInstanceFieldSet());
884 // Put the store as the heap value. If the value is loaded from heap
885 // by a load later, this store isn't really redundant.
886 heap_values[idx] = instruction;
887 } else {
888 heap_values[idx] = value;
889 }
890 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700891 // This store may kill values in other heap locations due to aliasing.
892 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800893 if (i == idx) {
894 continue;
895 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700896 if (heap_values[i] == value) {
897 // Same value should be kept even if aliasing happens.
898 continue;
899 }
900 if (heap_values[i] == kUnknownHeapValue) {
901 // Value is already unknown, no need for aliasing check.
902 continue;
903 }
904 if (heap_location_collector_.MayAlias(i, idx)) {
905 // Kill heap locations that may alias.
906 heap_values[i] = kUnknownHeapValue;
907 }
908 }
909 }
910
911 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
912 HInstruction* obj = instruction->InputAt(0);
913 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
914 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
915 VisitGetLocation(instruction, obj, offset, nullptr, declaring_class_def_index);
916 }
917
918 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
919 HInstruction* obj = instruction->InputAt(0);
920 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
921 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
922 HInstruction* value = instruction->InputAt(1);
923 VisitSetLocation(instruction, obj, offset, nullptr, declaring_class_def_index, value);
924 }
925
926 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
927 HInstruction* cls = instruction->InputAt(0);
928 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
929 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
930 VisitGetLocation(instruction, cls, offset, nullptr, declaring_class_def_index);
931 }
932
933 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
934 HInstruction* cls = instruction->InputAt(0);
935 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
936 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
937 HInstruction* value = instruction->InputAt(1);
938 VisitSetLocation(instruction, cls, offset, nullptr, declaring_class_def_index, value);
939 }
940
941 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
942 HInstruction* array = instruction->InputAt(0);
943 HInstruction* index = instruction->InputAt(1);
944 VisitGetLocation(instruction,
945 array,
946 HeapLocation::kInvalidFieldOffset,
947 index,
948 HeapLocation::kDeclaringClassDefIndexForArrays);
949 }
950
951 void VisitArraySet(HArraySet* instruction) OVERRIDE {
952 HInstruction* array = instruction->InputAt(0);
953 HInstruction* index = instruction->InputAt(1);
954 HInstruction* value = instruction->InputAt(2);
955 VisitSetLocation(instruction,
956 array,
957 HeapLocation::kInvalidFieldOffset,
958 index,
959 HeapLocation::kDeclaringClassDefIndexForArrays,
960 value);
961 }
962
963 void HandleInvoke(HInstruction* invoke) {
964 ArenaVector<HInstruction*>& heap_values =
965 heap_values_for_[invoke->GetBlock()->GetBlockId()];
966 for (size_t i = 0; i < heap_values.size(); i++) {
967 ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
968 if (ref_info->IsSingleton()) {
969 // Singleton references cannot be seen by the callee.
970 } else {
971 heap_values[i] = kUnknownHeapValue;
972 }
973 }
974 }
975
976 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
977 HandleInvoke(invoke);
978 }
979
980 void VisitInvokeVirtual(HInvokeVirtual* invoke) OVERRIDE {
981 HandleInvoke(invoke);
982 }
983
984 void VisitInvokeInterface(HInvokeInterface* invoke) OVERRIDE {
985 HandleInvoke(invoke);
986 }
987
988 void VisitInvokeUnresolved(HInvokeUnresolved* invoke) OVERRIDE {
989 HandleInvoke(invoke);
990 }
991
992 void VisitClinitCheck(HClinitCheck* clinit) OVERRIDE {
993 HandleInvoke(clinit);
994 }
995
996 void VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet* instruction) OVERRIDE {
997 // Conservatively treat it as an invocation.
998 HandleInvoke(instruction);
999 }
1000
1001 void VisitUnresolvedInstanceFieldSet(HUnresolvedInstanceFieldSet* instruction) OVERRIDE {
1002 // Conservatively treat it as an invocation.
1003 HandleInvoke(instruction);
1004 }
1005
1006 void VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet* instruction) OVERRIDE {
1007 // Conservatively treat it as an invocation.
1008 HandleInvoke(instruction);
1009 }
1010
1011 void VisitUnresolvedStaticFieldSet(HUnresolvedStaticFieldSet* instruction) OVERRIDE {
1012 // Conservatively treat it as an invocation.
1013 HandleInvoke(instruction);
1014 }
1015
1016 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
1017 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_instance);
1018 if (ref_info == nullptr) {
1019 // new_instance isn't used for field accesses. No need to process it.
1020 return;
1021 }
1022 if (!heap_location_collector_.MayDeoptimize() &&
Mingyao Yangfb8464a2015-11-02 10:56:59 -08001023 ref_info->IsSingletonAndNotReturned() &&
1024 !new_instance->IsFinalizable() &&
Mingyao Yang062157f2016-03-02 10:15:36 -08001025 !new_instance->NeedsAccessCheck()) {
1026 singleton_new_instances_.push_back(new_instance);
Mingyao Yang8df69d42015-10-22 15:40:58 -07001027 }
1028 ArenaVector<HInstruction*>& heap_values =
1029 heap_values_for_[new_instance->GetBlock()->GetBlockId()];
1030 for (size_t i = 0; i < heap_values.size(); i++) {
1031 HInstruction* ref =
1032 heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo()->GetReference();
1033 size_t offset = heap_location_collector_.GetHeapLocation(i)->GetOffset();
1034 if (ref == new_instance && offset >= mirror::kObjectHeaderSize) {
1035 // Instance fields except the header fields are set to default heap values.
1036 heap_values[i] = kDefaultHeapValue;
1037 }
1038 }
1039 }
1040
1041 // Find an instruction's substitute if it should be removed.
1042 // Return the same instruction if it should not be removed.
1043 HInstruction* FindSubstitute(HInstruction* instruction) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -08001044 size_t size = removed_loads_.size();
Mingyao Yang8df69d42015-10-22 15:40:58 -07001045 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -08001046 if (removed_loads_[i] == instruction) {
1047 return substitute_instructions_for_loads_[i];
Mingyao Yang8df69d42015-10-22 15:40:58 -07001048 }
1049 }
1050 return instruction;
1051 }
1052
1053 const HeapLocationCollector& heap_location_collector_;
1054 const SideEffectsAnalysis& side_effects_;
1055
1056 // One array of heap values for each block.
1057 ArenaVector<ArenaVector<HInstruction*>> heap_values_for_;
1058
1059 // We record the instructions that should be eliminated but may be
1060 // used by heap locations. They'll be removed in the end.
Mingyao Yangfb8464a2015-11-02 10:56:59 -08001061 ArenaVector<HInstruction*> removed_loads_;
1062 ArenaVector<HInstruction*> substitute_instructions_for_loads_;
1063
1064 // Stores in this list may be removed from the list later when it's
1065 // found that the store cannot be eliminated.
1066 ArenaVector<HInstruction*> possibly_removed_stores_;
1067
Mingyao Yang8df69d42015-10-22 15:40:58 -07001068 ArenaVector<HInstruction*> singleton_new_instances_;
1069
1070 DISALLOW_COPY_AND_ASSIGN(LSEVisitor);
1071};
1072
1073void LoadStoreElimination::Run() {
David Brazdil8993caf2015-12-07 10:04:40 +00001074 if (graph_->IsDebuggable() || graph_->HasTryCatch()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -07001075 // Debugger may set heap values or trigger deoptimization of callers.
David Brazdil8993caf2015-12-07 10:04:40 +00001076 // Try/catch support not implemented yet.
Mingyao Yang8df69d42015-10-22 15:40:58 -07001077 // Skip this optimization.
1078 return;
1079 }
1080 HeapLocationCollector heap_location_collector(graph_);
Vladimir Marko2c45bc92016-10-25 16:54:12 +01001081 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1082 heap_location_collector.VisitBasicBlock(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -07001083 }
1084 if (heap_location_collector.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
1085 // Bail out if there are too many heap locations to deal with.
1086 return;
1087 }
1088 if (!heap_location_collector.HasHeapStores()) {
1089 // Without heap stores, this pass would act mostly as GVN on heap accesses.
1090 return;
1091 }
1092 if (heap_location_collector.HasVolatile() || heap_location_collector.HasMonitorOps()) {
1093 // Don't do load/store elimination if the method has volatile field accesses or
1094 // monitor operations, for now.
1095 // TODO: do it right.
1096 return;
1097 }
1098 heap_location_collector.BuildAliasingMatrix();
1099 LSEVisitor lse_visitor(graph_, heap_location_collector, side_effects_);
Vladimir Marko2c45bc92016-10-25 16:54:12 +01001100 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1101 lse_visitor.VisitBasicBlock(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -07001102 }
1103 lse_visitor.RemoveInstructions();
1104}
1105
1106} // namespace art