blob: 2856c3ea110a8d81cf598a5aad7bc9667f357e91 [file] [log] [blame]
Mingyao Yang8df69d42015-10-22 15:40:58 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "load_store_elimination.h"
Aart Bik96fd51d2016-11-28 11:22:35 -080018
19#include "escape.h"
Mingyao Yang8df69d42015-10-22 15:40:58 -070020#include "side_effects_analysis.h"
21
22#include <iostream>
23
24namespace art {
25
26class ReferenceInfo;
27
28// A cap for the number of heap locations to prevent pathological time/space consumption.
29// The number of heap locations for most of the methods stays below this threshold.
30constexpr size_t kMaxNumberOfHeapLocations = 32;
31
32// A ReferenceInfo contains additional info about a reference such as
33// whether it's a singleton, returned, etc.
34class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
35 public:
Aart Bik96fd51d2016-11-28 11:22:35 -080036 ReferenceInfo(HInstruction* reference, size_t pos)
37 : reference_(reference),
38 position_(pos),
39 is_singleton_(true),
Aart Bik71bf7b42016-11-16 10:17:46 -080040 is_singleton_and_not_returned_(true),
41 is_singleton_and_not_deopt_visible_(true) {
42 CalculateEscape(reference_,
43 nullptr,
44 &is_singleton_,
45 &is_singleton_and_not_returned_,
46 &is_singleton_and_not_deopt_visible_);
Mingyao Yang8df69d42015-10-22 15:40:58 -070047 }
48
49 HInstruction* GetReference() const {
50 return reference_;
51 }
52
53 size_t GetPosition() const {
54 return position_;
55 }
56
57 // Returns true if reference_ is the only name that can refer to its value during
58 // the lifetime of the method. So it's guaranteed to not have any alias in
59 // the method (including its callees).
60 bool IsSingleton() const {
61 return is_singleton_;
62 }
63
Mingyao Yange58bdca2016-10-28 11:07:24 -070064 // Returns true if reference_ is a singleton and not returned to the caller or
65 // used as an environment local of an HDeoptimize instruction.
Mingyao Yang8df69d42015-10-22 15:40:58 -070066 // The allocation and stores into reference_ may be eliminated for such cases.
Aart Bik71bf7b42016-11-16 10:17:46 -080067 bool IsSingletonAndRemovable() const {
68 return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_;
Mingyao Yang8df69d42015-10-22 15:40:58 -070069 }
70
71 private:
72 HInstruction* const reference_;
Aart Bik71bf7b42016-11-16 10:17:46 -080073 const size_t position_; // position in HeapLocationCollector's ref_info_array_.
Mingyao Yange58bdca2016-10-28 11:07:24 -070074
Aart Bik71bf7b42016-11-16 10:17:46 -080075 bool is_singleton_; // can only be referred to by a single name in the method,
76 bool is_singleton_and_not_returned_; // and not returned to caller,
77 bool is_singleton_and_not_deopt_visible_; // and not used as an environment local of HDeoptimize.
Mingyao Yang8df69d42015-10-22 15:40:58 -070078
79 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
80};
81
82// A heap location is a reference-offset/index pair that a value can be loaded from
83// or stored to.
84class HeapLocation : public ArenaObject<kArenaAllocMisc> {
85 public:
86 static constexpr size_t kInvalidFieldOffset = -1;
87
88 // TODO: more fine-grained array types.
89 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
90
91 HeapLocation(ReferenceInfo* ref_info,
92 size_t offset,
93 HInstruction* index,
94 int16_t declaring_class_def_index)
95 : ref_info_(ref_info),
96 offset_(offset),
97 index_(index),
Mingyao Yang803cbb92015-12-01 12:24:36 -080098 declaring_class_def_index_(declaring_class_def_index),
99 value_killed_by_loop_side_effects_(true) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700100 DCHECK(ref_info != nullptr);
101 DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
102 (offset != kInvalidFieldOffset && index == nullptr));
Mingyao Yang803cbb92015-12-01 12:24:36 -0800103 if (ref_info->IsSingleton() && !IsArrayElement()) {
104 // Assume this location's value cannot be killed by loop side effects
105 // until proven otherwise.
106 value_killed_by_loop_side_effects_ = false;
107 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700108 }
109
110 ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
111 size_t GetOffset() const { return offset_; }
112 HInstruction* GetIndex() const { return index_; }
113
114 // Returns the definition of declaring class' dex index.
115 // It's kDeclaringClassDefIndexForArrays for an array element.
116 int16_t GetDeclaringClassDefIndex() const {
117 return declaring_class_def_index_;
118 }
119
120 bool IsArrayElement() const {
121 return index_ != nullptr;
122 }
123
Mingyao Yang803cbb92015-12-01 12:24:36 -0800124 bool IsValueKilledByLoopSideEffects() const {
125 return value_killed_by_loop_side_effects_;
126 }
127
128 void SetValueKilledByLoopSideEffects(bool val) {
129 value_killed_by_loop_side_effects_ = val;
130 }
131
Mingyao Yang8df69d42015-10-22 15:40:58 -0700132 private:
133 ReferenceInfo* const ref_info_; // reference for instance/static field or array access.
134 const size_t offset_; // offset of static/instance field.
135 HInstruction* const index_; // index of an array element.
136 const int16_t declaring_class_def_index_; // declaring class's def's dex index.
Mingyao Yang803cbb92015-12-01 12:24:36 -0800137 bool value_killed_by_loop_side_effects_; // value of this location may be killed by loop
138 // side effects because this location is stored
Mingyao Yang0a845202016-10-14 16:26:08 -0700139 // into inside a loop. This gives
140 // better info on whether a singleton's location
141 // value may be killed by loop side effects.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700142
143 DISALLOW_COPY_AND_ASSIGN(HeapLocation);
144};
145
146static HInstruction* HuntForOriginalReference(HInstruction* ref) {
147 DCHECK(ref != nullptr);
148 while (ref->IsNullCheck() || ref->IsBoundType()) {
149 ref = ref->InputAt(0);
150 }
151 return ref;
152}
153
154// A HeapLocationCollector collects all relevant heap locations and keeps
155// an aliasing matrix for all locations.
156class HeapLocationCollector : public HGraphVisitor {
157 public:
158 static constexpr size_t kHeapLocationNotFound = -1;
159 // Start with a single uint32_t word. That's enough bits for pair-wise
160 // aliasing matrix of 8 heap locations.
161 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
162
163 explicit HeapLocationCollector(HGraph* graph)
164 : HGraphVisitor(graph),
165 ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
166 heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
Vladimir Markof6a35de2016-03-21 12:01:50 +0000167 aliasing_matrix_(graph->GetArena(),
168 kInitialAliasingMatrixBitVectorSize,
169 true,
170 kArenaAllocLSE),
Mingyao Yang8df69d42015-10-22 15:40:58 -0700171 has_heap_stores_(false),
172 has_volatile_(false),
Mingyao Yange58bdca2016-10-28 11:07:24 -0700173 has_monitor_operations_(false) {}
Mingyao Yang8df69d42015-10-22 15:40:58 -0700174
175 size_t GetNumberOfHeapLocations() const {
176 return heap_locations_.size();
177 }
178
179 HeapLocation* GetHeapLocation(size_t index) const {
180 return heap_locations_[index];
181 }
182
183 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
184 for (size_t i = 0; i < ref_info_array_.size(); i++) {
185 ReferenceInfo* ref_info = ref_info_array_[i];
186 if (ref_info->GetReference() == ref) {
187 DCHECK_EQ(i, ref_info->GetPosition());
188 return ref_info;
189 }
190 }
191 return nullptr;
192 }
193
194 bool HasHeapStores() const {
195 return has_heap_stores_;
196 }
197
198 bool HasVolatile() const {
199 return has_volatile_;
200 }
201
202 bool HasMonitorOps() const {
203 return has_monitor_operations_;
204 }
205
Mingyao Yang8df69d42015-10-22 15:40:58 -0700206 // Find and return the heap location index in heap_locations_.
207 size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
208 size_t offset,
209 HInstruction* index,
210 int16_t declaring_class_def_index) const {
211 for (size_t i = 0; i < heap_locations_.size(); i++) {
212 HeapLocation* loc = heap_locations_[i];
213 if (loc->GetReferenceInfo() == ref_info &&
214 loc->GetOffset() == offset &&
215 loc->GetIndex() == index &&
216 loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
217 return i;
218 }
219 }
220 return kHeapLocationNotFound;
221 }
222
223 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
224 bool MayAlias(size_t index1, size_t index2) const {
225 if (index1 < index2) {
226 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
227 } else if (index1 > index2) {
228 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
229 } else {
230 DCHECK(false) << "index1 and index2 are expected to be different";
231 return true;
232 }
233 }
234
235 void BuildAliasingMatrix() {
236 const size_t number_of_locations = heap_locations_.size();
237 if (number_of_locations == 0) {
238 return;
239 }
240 size_t pos = 0;
241 // Compute aliasing info between every pair of different heap locations.
242 // Save the result in a matrix represented as a BitVector.
243 for (size_t i = 0; i < number_of_locations - 1; i++) {
244 for (size_t j = i + 1; j < number_of_locations; j++) {
245 if (ComputeMayAlias(i, j)) {
246 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
247 }
248 pos++;
249 }
250 }
251 }
252
253 private:
254 // An allocation cannot alias with a name which already exists at the point
255 // of the allocation, such as a parameter or a load happening before the allocation.
256 bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
257 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
258 // Any reference that can alias with the allocation must appear after it in the block/in
259 // the block's successors. In reverse post order, those instructions will be visited after
260 // the allocation.
261 return ref_info2->GetPosition() >= ref_info1->GetPosition();
262 }
263 return true;
264 }
265
266 bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
267 if (ref_info1 == ref_info2) {
268 return true;
269 } else if (ref_info1->IsSingleton()) {
270 return false;
271 } else if (ref_info2->IsSingleton()) {
272 return false;
273 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
274 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
275 return false;
276 }
277 return true;
278 }
279
280 // `index1` and `index2` are indices in the array of collected heap locations.
281 // Returns the position in the bit vector that tracks whether the two heap
282 // locations may alias.
283 size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
284 DCHECK(index2 > index1);
285 const size_t number_of_locations = heap_locations_.size();
286 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
287 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
288 }
289
290 // An additional position is passed in to make sure the calculated position is correct.
291 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
292 size_t calculated_position = AliasingMatrixPosition(index1, index2);
293 DCHECK_EQ(calculated_position, position);
294 return calculated_position;
295 }
296
297 // Compute if two locations may alias to each other.
298 bool ComputeMayAlias(size_t index1, size_t index2) const {
299 HeapLocation* loc1 = heap_locations_[index1];
300 HeapLocation* loc2 = heap_locations_[index2];
301 if (loc1->GetOffset() != loc2->GetOffset()) {
302 // Either two different instance fields, or one is an instance
303 // field and the other is an array element.
304 return false;
305 }
306 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
307 // Different types.
308 return false;
309 }
310 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
311 return false;
312 }
313 if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
314 HInstruction* array_index1 = loc1->GetIndex();
315 HInstruction* array_index2 = loc2->GetIndex();
316 DCHECK(array_index1 != nullptr);
317 DCHECK(array_index2 != nullptr);
318 if (array_index1->IsIntConstant() &&
319 array_index2->IsIntConstant() &&
320 array_index1->AsIntConstant()->GetValue() != array_index2->AsIntConstant()->GetValue()) {
321 // Different constant indices do not alias.
322 return false;
323 }
324 }
325 return true;
326 }
327
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800328 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
329 ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700330 if (ref_info == nullptr) {
331 size_t pos = ref_info_array_.size();
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800332 ref_info = new (GetGraph()->GetArena()) ReferenceInfo(instruction, pos);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700333 ref_info_array_.push_back(ref_info);
334 }
335 return ref_info;
336 }
337
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800338 void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
339 if (instruction->GetType() != Primitive::kPrimNot) {
340 return;
341 }
342 DCHECK(FindReferenceInfoOf(instruction) == nullptr);
343 GetOrCreateReferenceInfo(instruction);
344 }
345
Mingyao Yang8df69d42015-10-22 15:40:58 -0700346 HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
347 size_t offset,
348 HInstruction* index,
349 int16_t declaring_class_def_index) {
350 HInstruction* original_ref = HuntForOriginalReference(ref);
351 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
352 size_t heap_location_idx = FindHeapLocationIndex(
353 ref_info, offset, index, declaring_class_def_index);
354 if (heap_location_idx == kHeapLocationNotFound) {
355 HeapLocation* heap_loc = new (GetGraph()->GetArena())
356 HeapLocation(ref_info, offset, index, declaring_class_def_index);
357 heap_locations_.push_back(heap_loc);
358 return heap_loc;
359 }
360 return heap_locations_[heap_location_idx];
361 }
362
Mingyao Yang803cbb92015-12-01 12:24:36 -0800363 HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700364 if (field_info.IsVolatile()) {
365 has_volatile_ = true;
366 }
367 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
368 const size_t offset = field_info.GetFieldOffset().SizeValue();
Mingyao Yang803cbb92015-12-01 12:24:36 -0800369 return GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700370 }
371
372 void VisitArrayAccess(HInstruction* array, HInstruction* index) {
373 GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
374 index, HeapLocation::kDeclaringClassDefIndexForArrays);
375 }
376
377 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800378 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800379 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700380 }
381
382 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
Mingyao Yang803cbb92015-12-01 12:24:36 -0800383 HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700384 has_heap_stores_ = true;
Mingyao Yang0a845202016-10-14 16:26:08 -0700385 if (location->GetReferenceInfo()->IsSingleton()) {
386 // A singleton's location value may be killed by loop side effects if it's
387 // defined before that loop, and it's stored into inside that loop.
388 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
389 if (loop_info != nullptr) {
390 HInstruction* ref = location->GetReferenceInfo()->GetReference();
391 DCHECK(ref->IsNewInstance());
392 if (loop_info->IsDefinedOutOfTheLoop(ref)) {
393 // ref's location value may be killed by this loop's side effects.
394 location->SetValueKilledByLoopSideEffects(true);
395 } else {
396 // ref is defined inside this loop so this loop's side effects cannot
397 // kill its location value at the loop header since ref/its location doesn't
398 // exist yet at the loop header.
399 }
400 }
401 } else {
402 // For non-singletons, value_killed_by_loop_side_effects_ is inited to
403 // true.
404 DCHECK_EQ(location->IsValueKilledByLoopSideEffects(), true);
Mingyao Yang803cbb92015-12-01 12:24:36 -0800405 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700406 }
407
408 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800409 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800410 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700411 }
412
413 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800414 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700415 has_heap_stores_ = true;
416 }
417
418 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
419 // since we cannot accurately track the fields.
420
421 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
422 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800423 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700424 }
425
426 void VisitArraySet(HArraySet* instruction) OVERRIDE {
427 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
428 has_heap_stores_ = true;
429 }
430
431 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
432 // Any references appearing in the ref_info_array_ so far cannot alias with new_instance.
Mingyao Yang8ab1d642015-12-03 14:11:15 -0800433 CreateReferenceInfoForReferenceType(new_instance);
434 }
435
436 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* instruction) OVERRIDE {
437 CreateReferenceInfoForReferenceType(instruction);
438 }
439
440 void VisitInvokeVirtual(HInvokeVirtual* instruction) OVERRIDE {
441 CreateReferenceInfoForReferenceType(instruction);
442 }
443
444 void VisitInvokeInterface(HInvokeInterface* instruction) OVERRIDE {
445 CreateReferenceInfoForReferenceType(instruction);
446 }
447
448 void VisitParameterValue(HParameterValue* instruction) OVERRIDE {
449 CreateReferenceInfoForReferenceType(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700450 }
451
Mingyao Yang40bcb932016-02-03 05:46:57 -0800452 void VisitSelect(HSelect* instruction) OVERRIDE {
453 CreateReferenceInfoForReferenceType(instruction);
454 }
455
Mingyao Yang8df69d42015-10-22 15:40:58 -0700456 void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
457 has_monitor_operations_ = true;
458 }
459
460 ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
461 ArenaVector<HeapLocation*> heap_locations_; // All heap locations.
462 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
463 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
464 // alias analysis and won't be as effective.
465 bool has_volatile_; // If there are volatile field accesses.
466 bool has_monitor_operations_; // If there are monitor operations.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700467
468 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
469};
470
471// An unknown heap value. Loads with such a value in the heap location cannot be eliminated.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800472// A heap location can be set to kUnknownHeapValue when:
473// - initially set a value.
474// - killed due to aliasing, merging, invocation, or loop side effects.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700475static HInstruction* const kUnknownHeapValue =
476 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-1));
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800477
Mingyao Yang8df69d42015-10-22 15:40:58 -0700478// Default heap value after an allocation.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800479// A heap location can be set to that value right after an allocation.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700480static HInstruction* const kDefaultHeapValue =
481 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-2));
482
483class LSEVisitor : public HGraphVisitor {
484 public:
485 LSEVisitor(HGraph* graph,
486 const HeapLocationCollector& heap_locations_collector,
487 const SideEffectsAnalysis& side_effects)
488 : HGraphVisitor(graph),
489 heap_location_collector_(heap_locations_collector),
490 side_effects_(side_effects),
491 heap_values_for_(graph->GetBlocks().size(),
492 ArenaVector<HInstruction*>(heap_locations_collector.
493 GetNumberOfHeapLocations(),
494 kUnknownHeapValue,
495 graph->GetArena()->Adapter(kArenaAllocLSE)),
496 graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800497 removed_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
498 substitute_instructions_for_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
499 possibly_removed_stores_(graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yang8df69d42015-10-22 15:40:58 -0700500 singleton_new_instances_(graph->GetArena()->Adapter(kArenaAllocLSE)) {
501 }
502
503 void VisitBasicBlock(HBasicBlock* block) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800504 // Populate the heap_values array for this block.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700505 // TODO: try to reuse the heap_values array from one predecessor if possible.
506 if (block->IsLoopHeader()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800507 HandleLoopSideEffects(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700508 } else {
509 MergePredecessorValues(block);
510 }
511 HGraphVisitor::VisitBasicBlock(block);
512 }
513
514 // Remove recorded instructions that should be eliminated.
515 void RemoveInstructions() {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800516 size_t size = removed_loads_.size();
517 DCHECK_EQ(size, substitute_instructions_for_loads_.size());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700518 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800519 HInstruction* load = removed_loads_[i];
520 DCHECK(load != nullptr);
521 DCHECK(load->IsInstanceFieldGet() ||
522 load->IsStaticFieldGet() ||
523 load->IsArrayGet());
524 HInstruction* substitute = substitute_instructions_for_loads_[i];
525 DCHECK(substitute != nullptr);
526 // Keep tracing substitute till one that's not removed.
527 HInstruction* sub_sub = FindSubstitute(substitute);
528 while (sub_sub != substitute) {
529 substitute = sub_sub;
530 sub_sub = FindSubstitute(substitute);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700531 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800532 load->ReplaceWith(substitute);
533 load->GetBlock()->RemoveInstruction(load);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700534 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800535
536 // At this point, stores in possibly_removed_stores_ can be safely removed.
Mingyao Yang062157f2016-03-02 10:15:36 -0800537 for (size_t i = 0, e = possibly_removed_stores_.size(); i < e; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800538 HInstruction* store = possibly_removed_stores_[i];
539 DCHECK(store->IsInstanceFieldSet() || store->IsStaticFieldSet() || store->IsArraySet());
540 store->GetBlock()->RemoveInstruction(store);
541 }
542
Mingyao Yang062157f2016-03-02 10:15:36 -0800543 // Eliminate allocations that are not used.
544 for (size_t i = 0, e = singleton_new_instances_.size(); i < e; i++) {
545 HInstruction* new_instance = singleton_new_instances_[i];
546 if (!new_instance->HasNonEnvironmentUses()) {
547 new_instance->RemoveEnvironmentUsers();
548 new_instance->GetBlock()->RemoveInstruction(new_instance);
549 }
550 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700551 }
552
553 private:
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800554 // If heap_values[index] is an instance field store, need to keep the store.
555 // This is necessary if a heap value is killed due to merging, or loop side
556 // effects (which is essentially merging also), since a load later from the
557 // location won't be eliminated.
558 void KeepIfIsStore(HInstruction* heap_value) {
559 if (heap_value == kDefaultHeapValue ||
560 heap_value == kUnknownHeapValue ||
561 !heap_value->IsInstanceFieldSet()) {
562 return;
563 }
564 auto idx = std::find(possibly_removed_stores_.begin(),
565 possibly_removed_stores_.end(), heap_value);
566 if (idx != possibly_removed_stores_.end()) {
567 // Make sure the store is kept.
568 possibly_removed_stores_.erase(idx);
569 }
570 }
571
572 void HandleLoopSideEffects(HBasicBlock* block) {
573 DCHECK(block->IsLoopHeader());
574 int block_id = block->GetBlockId();
575 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block_id];
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000576
577 // Don't eliminate loads in irreducible loops. This is safe for singletons, because
578 // they are always used by the non-eliminated loop-phi.
579 if (block->GetLoopInformation()->IsIrreducible()) {
580 if (kIsDebugBuild) {
581 for (size_t i = 0; i < heap_values.size(); i++) {
582 DCHECK_EQ(heap_values[i], kUnknownHeapValue);
583 }
584 }
585 return;
586 }
587
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800588 HBasicBlock* pre_header = block->GetLoopInformation()->GetPreHeader();
589 ArenaVector<HInstruction*>& pre_header_heap_values =
590 heap_values_for_[pre_header->GetBlockId()];
Nicolas Geoffray15bd2282016-01-05 15:55:41 +0000591
Mingyao Yang803cbb92015-12-01 12:24:36 -0800592 // Inherit the values from pre-header.
593 for (size_t i = 0; i < heap_values.size(); i++) {
594 heap_values[i] = pre_header_heap_values[i];
595 }
596
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800597 // We do a single pass in reverse post order. For loops, use the side effects as a hint
598 // to see if the heap values should be killed.
599 if (side_effects_.GetLoopEffects(block).DoesAnyWrite()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800600 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yang803cbb92015-12-01 12:24:36 -0800601 HeapLocation* location = heap_location_collector_.GetHeapLocation(i);
602 ReferenceInfo* ref_info = location->GetReferenceInfo();
603 if (!ref_info->IsSingleton() || location->IsValueKilledByLoopSideEffects()) {
604 // heap value is killed by loop side effects (stored into directly, or due to
605 // aliasing).
606 KeepIfIsStore(pre_header_heap_values[i]);
607 heap_values[i] = kUnknownHeapValue;
608 } else {
609 // A singleton's field that's not stored into inside a loop is invariant throughout
610 // the loop.
611 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800612 }
613 }
614 }
615
Mingyao Yang8df69d42015-10-22 15:40:58 -0700616 void MergePredecessorValues(HBasicBlock* block) {
617 const ArenaVector<HBasicBlock*>& predecessors = block->GetPredecessors();
618 if (predecessors.size() == 0) {
619 return;
620 }
Mingyao Yang58d9bfc2016-11-01 13:31:58 -0700621
Mingyao Yang8df69d42015-10-22 15:40:58 -0700622 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()];
623 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yang58d9bfc2016-11-01 13:31:58 -0700624 HInstruction* merged_value = nullptr;
625 // Whether merged_value is a result that's merged from all predecessors.
626 bool from_all_predecessors = true;
627 ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
628 HInstruction* singleton_ref = nullptr;
Aart Bik71bf7b42016-11-16 10:17:46 -0800629 if (ref_info->IsSingletonAndRemovable()) {
Mingyao Yang58d9bfc2016-11-01 13:31:58 -0700630 // We do more analysis of liveness when merging heap values for such
631 // cases since stores into such references may potentially be eliminated.
632 singleton_ref = ref_info->GetReference();
633 }
634
635 for (HBasicBlock* predecessor : predecessors) {
636 HInstruction* pred_value = heap_values_for_[predecessor->GetBlockId()][i];
637 if ((singleton_ref != nullptr) &&
638 !singleton_ref->GetBlock()->Dominates(predecessor)) {
639 // singleton_ref is not live in this predecessor. Skip this predecessor since
640 // it does not really have the location.
641 DCHECK_EQ(pred_value, kUnknownHeapValue);
642 from_all_predecessors = false;
643 continue;
644 }
645 if (merged_value == nullptr) {
646 // First seen heap value.
647 merged_value = pred_value;
648 } else if (pred_value != merged_value) {
649 // There are conflicting values.
650 merged_value = kUnknownHeapValue;
651 break;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700652 }
653 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800654
Mingyao Yang58d9bfc2016-11-01 13:31:58 -0700655 if (merged_value == kUnknownHeapValue) {
656 // There are conflicting heap values from different predecessors.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800657 // Keep the last store in each predecessor since future loads cannot be eliminated.
Mingyao Yang58d9bfc2016-11-01 13:31:58 -0700658 for (HBasicBlock* predecessor : predecessors) {
659 ArenaVector<HInstruction*>& pred_values = heap_values_for_[predecessor->GetBlockId()];
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800660 KeepIfIsStore(pred_values[i]);
661 }
662 }
Mingyao Yang58d9bfc2016-11-01 13:31:58 -0700663
664 if ((merged_value == nullptr) || !from_all_predecessors) {
665 DCHECK(singleton_ref != nullptr);
666 DCHECK((singleton_ref->GetBlock() == block) ||
667 !singleton_ref->GetBlock()->Dominates(block));
668 // singleton_ref is not defined before block or defined only in some of its
669 // predecessors, so block doesn't really have the location at its entry.
670 heap_values[i] = kUnknownHeapValue;
671 } else {
672 heap_values[i] = merged_value;
673 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700674 }
675 }
676
677 // `instruction` is being removed. Try to see if the null check on it
678 // can be removed. This can happen if the same value is set in two branches
679 // but not in dominators. Such as:
680 // int[] a = foo();
681 // if () {
682 // a[0] = 2;
683 // } else {
684 // a[0] = 2;
685 // }
686 // // a[0] can now be replaced with constant 2, and the null check on it can be removed.
687 void TryRemovingNullCheck(HInstruction* instruction) {
688 HInstruction* prev = instruction->GetPrevious();
689 if ((prev != nullptr) && prev->IsNullCheck() && (prev == instruction->InputAt(0))) {
690 // Previous instruction is a null check for this instruction. Remove the null check.
691 prev->ReplaceWith(prev->InputAt(0));
692 prev->GetBlock()->RemoveInstruction(prev);
693 }
694 }
695
696 HInstruction* GetDefaultValue(Primitive::Type type) {
697 switch (type) {
698 case Primitive::kPrimNot:
699 return GetGraph()->GetNullConstant();
700 case Primitive::kPrimBoolean:
701 case Primitive::kPrimByte:
702 case Primitive::kPrimChar:
703 case Primitive::kPrimShort:
704 case Primitive::kPrimInt:
705 return GetGraph()->GetIntConstant(0);
706 case Primitive::kPrimLong:
707 return GetGraph()->GetLongConstant(0);
708 case Primitive::kPrimFloat:
709 return GetGraph()->GetFloatConstant(0);
710 case Primitive::kPrimDouble:
711 return GetGraph()->GetDoubleConstant(0);
712 default:
713 UNREACHABLE();
714 }
715 }
716
717 void VisitGetLocation(HInstruction* instruction,
718 HInstruction* ref,
719 size_t offset,
720 HInstruction* index,
721 int16_t declaring_class_def_index) {
722 HInstruction* original_ref = HuntForOriginalReference(ref);
723 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
724 size_t idx = heap_location_collector_.FindHeapLocationIndex(
725 ref_info, offset, index, declaring_class_def_index);
726 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
727 ArenaVector<HInstruction*>& heap_values =
728 heap_values_for_[instruction->GetBlock()->GetBlockId()];
729 HInstruction* heap_value = heap_values[idx];
730 if (heap_value == kDefaultHeapValue) {
731 HInstruction* constant = GetDefaultValue(instruction->GetType());
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800732 removed_loads_.push_back(instruction);
733 substitute_instructions_for_loads_.push_back(constant);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700734 heap_values[idx] = constant;
735 return;
736 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800737 if (heap_value != kUnknownHeapValue && heap_value->IsInstanceFieldSet()) {
738 HInstruction* store = heap_value;
739 // This load must be from a singleton since it's from the same field
740 // that a "removed" store puts the value. That store must be to a singleton's field.
741 DCHECK(ref_info->IsSingleton());
742 // Get the real heap value of the store.
743 heap_value = store->InputAt(1);
744 }
David Brazdil15693bf2015-12-16 10:30:45 +0000745 if (heap_value == kUnknownHeapValue) {
746 // Load isn't eliminated. Put the load as the value into the HeapLocation.
747 // This acts like GVN but with better aliasing analysis.
748 heap_values[idx] = instruction;
749 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +0000750 if (Primitive::PrimitiveKind(heap_value->GetType())
751 != Primitive::PrimitiveKind(instruction->GetType())) {
752 // The only situation where the same heap location has different type is when
Nicolas Geoffray65fef302016-05-04 14:00:12 +0100753 // we do an array get on an instruction that originates from the null constant
754 // (the null could be behind a field access, an array access, a null check or
755 // a bound type).
756 // In order to stay properly typed on primitive types, we do not eliminate
757 // the array gets.
Nicolas Geoffray03971632016-03-17 10:44:24 +0000758 if (kIsDebugBuild) {
759 DCHECK(heap_value->IsArrayGet()) << heap_value->DebugName();
760 DCHECK(instruction->IsArrayGet()) << instruction->DebugName();
Nicolas Geoffray03971632016-03-17 10:44:24 +0000761 }
762 return;
763 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800764 removed_loads_.push_back(instruction);
765 substitute_instructions_for_loads_.push_back(heap_value);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700766 TryRemovingNullCheck(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700767 }
768 }
769
770 bool Equal(HInstruction* heap_value, HInstruction* value) {
771 if (heap_value == value) {
772 return true;
773 }
774 if (heap_value == kDefaultHeapValue && GetDefaultValue(value->GetType()) == value) {
775 return true;
776 }
777 return false;
778 }
779
780 void VisitSetLocation(HInstruction* instruction,
781 HInstruction* ref,
782 size_t offset,
783 HInstruction* index,
784 int16_t declaring_class_def_index,
785 HInstruction* value) {
786 HInstruction* original_ref = HuntForOriginalReference(ref);
787 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
788 size_t idx = heap_location_collector_.FindHeapLocationIndex(
789 ref_info, offset, index, declaring_class_def_index);
790 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
791 ArenaVector<HInstruction*>& heap_values =
792 heap_values_for_[instruction->GetBlock()->GetBlockId()];
793 HInstruction* heap_value = heap_values[idx];
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800794 bool same_value = false;
795 bool possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700796 if (Equal(heap_value, value)) {
797 // Store into the heap location with the same value.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800798 same_value = true;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700799 } else if (index != nullptr) {
800 // For array element, don't eliminate stores since it can be easily aliased
801 // with non-constant index.
Aart Bik71bf7b42016-11-16 10:17:46 -0800802 } else if (ref_info->IsSingletonAndRemovable()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800803 // Store into a field of a singleton that's not returned. The value cannot be
804 // killed due to aliasing/invocation. It can be redundant since future loads can
805 // directly get the value set by this instruction. The value can still be killed due to
806 // merging or loop side effects. Stores whose values are killed due to merging/loop side
807 // effects later will be removed from possibly_removed_stores_ when that is detected.
808 possibly_redundant = true;
809 HNewInstance* new_instance = ref_info->GetReference()->AsNewInstance();
810 DCHECK(new_instance != nullptr);
811 if (new_instance->IsFinalizable()) {
812 // Finalizable objects escape globally. Need to keep the store.
813 possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700814 } else {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800815 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
816 if (loop_info != nullptr) {
817 // instruction is a store in the loop so the loop must does write.
818 DCHECK(side_effects_.GetLoopEffects(loop_info->GetHeader()).DoesAnyWrite());
819
Mingyao Yang4b467ed2015-11-19 17:04:22 -0800820 if (loop_info->IsDefinedOutOfTheLoop(original_ref)) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800821 DCHECK(original_ref->GetBlock()->Dominates(loop_info->GetPreHeader()));
822 // Keep the store since its value may be needed at the loop header.
823 possibly_redundant = false;
824 } else {
825 // The singleton is created inside the loop. Value stored to it isn't needed at
826 // the loop header. This is true for outer loops also.
827 }
828 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700829 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700830 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800831 if (same_value || possibly_redundant) {
832 possibly_removed_stores_.push_back(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700833 }
Mingyao Yange9d6e602015-10-23 17:08:42 -0700834
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800835 if (!same_value) {
836 if (possibly_redundant) {
837 DCHECK(instruction->IsInstanceFieldSet());
838 // Put the store as the heap value. If the value is loaded from heap
839 // by a load later, this store isn't really redundant.
840 heap_values[idx] = instruction;
841 } else {
842 heap_values[idx] = value;
843 }
844 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700845 // This store may kill values in other heap locations due to aliasing.
846 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800847 if (i == idx) {
848 continue;
849 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700850 if (heap_values[i] == value) {
851 // Same value should be kept even if aliasing happens.
852 continue;
853 }
854 if (heap_values[i] == kUnknownHeapValue) {
855 // Value is already unknown, no need for aliasing check.
856 continue;
857 }
858 if (heap_location_collector_.MayAlias(i, idx)) {
859 // Kill heap locations that may alias.
860 heap_values[i] = kUnknownHeapValue;
861 }
862 }
863 }
864
865 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
866 HInstruction* obj = instruction->InputAt(0);
867 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
868 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
869 VisitGetLocation(instruction, obj, offset, nullptr, declaring_class_def_index);
870 }
871
872 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
873 HInstruction* obj = instruction->InputAt(0);
874 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
875 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
876 HInstruction* value = instruction->InputAt(1);
877 VisitSetLocation(instruction, obj, offset, nullptr, declaring_class_def_index, value);
878 }
879
880 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
881 HInstruction* cls = instruction->InputAt(0);
882 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
883 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
884 VisitGetLocation(instruction, cls, offset, nullptr, declaring_class_def_index);
885 }
886
887 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
888 HInstruction* cls = instruction->InputAt(0);
889 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
890 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
891 HInstruction* value = instruction->InputAt(1);
892 VisitSetLocation(instruction, cls, offset, nullptr, declaring_class_def_index, value);
893 }
894
895 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
896 HInstruction* array = instruction->InputAt(0);
897 HInstruction* index = instruction->InputAt(1);
898 VisitGetLocation(instruction,
899 array,
900 HeapLocation::kInvalidFieldOffset,
901 index,
902 HeapLocation::kDeclaringClassDefIndexForArrays);
903 }
904
905 void VisitArraySet(HArraySet* instruction) OVERRIDE {
906 HInstruction* array = instruction->InputAt(0);
907 HInstruction* index = instruction->InputAt(1);
908 HInstruction* value = instruction->InputAt(2);
909 VisitSetLocation(instruction,
910 array,
911 HeapLocation::kInvalidFieldOffset,
912 index,
913 HeapLocation::kDeclaringClassDefIndexForArrays,
914 value);
915 }
916
917 void HandleInvoke(HInstruction* invoke) {
918 ArenaVector<HInstruction*>& heap_values =
919 heap_values_for_[invoke->GetBlock()->GetBlockId()];
920 for (size_t i = 0; i < heap_values.size(); i++) {
921 ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
922 if (ref_info->IsSingleton()) {
923 // Singleton references cannot be seen by the callee.
924 } else {
925 heap_values[i] = kUnknownHeapValue;
926 }
927 }
928 }
929
930 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
931 HandleInvoke(invoke);
932 }
933
934 void VisitInvokeVirtual(HInvokeVirtual* invoke) OVERRIDE {
935 HandleInvoke(invoke);
936 }
937
938 void VisitInvokeInterface(HInvokeInterface* invoke) OVERRIDE {
939 HandleInvoke(invoke);
940 }
941
942 void VisitInvokeUnresolved(HInvokeUnresolved* invoke) OVERRIDE {
943 HandleInvoke(invoke);
944 }
945
946 void VisitClinitCheck(HClinitCheck* clinit) OVERRIDE {
947 HandleInvoke(clinit);
948 }
949
950 void VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet* instruction) OVERRIDE {
951 // Conservatively treat it as an invocation.
952 HandleInvoke(instruction);
953 }
954
955 void VisitUnresolvedInstanceFieldSet(HUnresolvedInstanceFieldSet* instruction) OVERRIDE {
956 // Conservatively treat it as an invocation.
957 HandleInvoke(instruction);
958 }
959
960 void VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet* instruction) OVERRIDE {
961 // Conservatively treat it as an invocation.
962 HandleInvoke(instruction);
963 }
964
965 void VisitUnresolvedStaticFieldSet(HUnresolvedStaticFieldSet* instruction) OVERRIDE {
966 // Conservatively treat it as an invocation.
967 HandleInvoke(instruction);
968 }
969
970 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
971 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_instance);
972 if (ref_info == nullptr) {
973 // new_instance isn't used for field accesses. No need to process it.
974 return;
975 }
Aart Bik71bf7b42016-11-16 10:17:46 -0800976 if (ref_info->IsSingletonAndRemovable() &&
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800977 !new_instance->IsFinalizable() &&
Mingyao Yang062157f2016-03-02 10:15:36 -0800978 !new_instance->NeedsAccessCheck()) {
979 singleton_new_instances_.push_back(new_instance);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700980 }
981 ArenaVector<HInstruction*>& heap_values =
982 heap_values_for_[new_instance->GetBlock()->GetBlockId()];
983 for (size_t i = 0; i < heap_values.size(); i++) {
984 HInstruction* ref =
985 heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo()->GetReference();
986 size_t offset = heap_location_collector_.GetHeapLocation(i)->GetOffset();
987 if (ref == new_instance && offset >= mirror::kObjectHeaderSize) {
988 // Instance fields except the header fields are set to default heap values.
989 heap_values[i] = kDefaultHeapValue;
990 }
991 }
992 }
993
994 // Find an instruction's substitute if it should be removed.
995 // Return the same instruction if it should not be removed.
996 HInstruction* FindSubstitute(HInstruction* instruction) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800997 size_t size = removed_loads_.size();
Mingyao Yang8df69d42015-10-22 15:40:58 -0700998 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800999 if (removed_loads_[i] == instruction) {
1000 return substitute_instructions_for_loads_[i];
Mingyao Yang8df69d42015-10-22 15:40:58 -07001001 }
1002 }
1003 return instruction;
1004 }
1005
1006 const HeapLocationCollector& heap_location_collector_;
1007 const SideEffectsAnalysis& side_effects_;
1008
1009 // One array of heap values for each block.
1010 ArenaVector<ArenaVector<HInstruction*>> heap_values_for_;
1011
1012 // We record the instructions that should be eliminated but may be
1013 // used by heap locations. They'll be removed in the end.
Mingyao Yangfb8464a2015-11-02 10:56:59 -08001014 ArenaVector<HInstruction*> removed_loads_;
1015 ArenaVector<HInstruction*> substitute_instructions_for_loads_;
1016
1017 // Stores in this list may be removed from the list later when it's
1018 // found that the store cannot be eliminated.
1019 ArenaVector<HInstruction*> possibly_removed_stores_;
1020
Mingyao Yang8df69d42015-10-22 15:40:58 -07001021 ArenaVector<HInstruction*> singleton_new_instances_;
1022
1023 DISALLOW_COPY_AND_ASSIGN(LSEVisitor);
1024};
1025
1026void LoadStoreElimination::Run() {
David Brazdil8993caf2015-12-07 10:04:40 +00001027 if (graph_->IsDebuggable() || graph_->HasTryCatch()) {
Mingyao Yang8df69d42015-10-22 15:40:58 -07001028 // Debugger may set heap values or trigger deoptimization of callers.
David Brazdil8993caf2015-12-07 10:04:40 +00001029 // Try/catch support not implemented yet.
Mingyao Yang8df69d42015-10-22 15:40:58 -07001030 // Skip this optimization.
1031 return;
1032 }
1033 HeapLocationCollector heap_location_collector(graph_);
Vladimir Marko2c45bc92016-10-25 16:54:12 +01001034 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1035 heap_location_collector.VisitBasicBlock(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -07001036 }
1037 if (heap_location_collector.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
1038 // Bail out if there are too many heap locations to deal with.
1039 return;
1040 }
1041 if (!heap_location_collector.HasHeapStores()) {
1042 // Without heap stores, this pass would act mostly as GVN on heap accesses.
1043 return;
1044 }
1045 if (heap_location_collector.HasVolatile() || heap_location_collector.HasMonitorOps()) {
1046 // Don't do load/store elimination if the method has volatile field accesses or
1047 // monitor operations, for now.
1048 // TODO: do it right.
1049 return;
1050 }
1051 heap_location_collector.BuildAliasingMatrix();
1052 LSEVisitor lse_visitor(graph_, heap_location_collector, side_effects_);
Vladimir Marko2c45bc92016-10-25 16:54:12 +01001053 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1054 lse_visitor.VisitBasicBlock(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -07001055 }
1056 lse_visitor.RemoveInstructions();
1057}
1058
1059} // namespace art