blob: 5b89cfef5aedd2315aa3760485d02468536fa53a [file] [log] [blame]
Mingyao Yang8df69d42015-10-22 15:40:58 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "load_store_elimination.h"
18#include "side_effects_analysis.h"
19
20#include <iostream>
21
22namespace art {
23
24class ReferenceInfo;
25
26// A cap for the number of heap locations to prevent pathological time/space consumption.
27// The number of heap locations for most of the methods stays below this threshold.
28constexpr size_t kMaxNumberOfHeapLocations = 32;
29
30// A ReferenceInfo contains additional info about a reference such as
31// whether it's a singleton, returned, etc.
32class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
33 public:
34 ReferenceInfo(HInstruction* reference, size_t pos) : reference_(reference), position_(pos) {
35 is_singleton_ = true;
36 is_singleton_and_not_returned_ = true;
37 if (!reference_->IsNewInstance() && !reference_->IsNewArray()) {
38 // For references not allocated in the method, don't assume anything.
39 is_singleton_ = false;
40 is_singleton_and_not_returned_ = false;
41 return;
42 }
43
44 // Visit all uses to determine if this reference can spread into the heap,
45 // a method call, etc.
46 for (HUseIterator<HInstruction*> use_it(reference_->GetUses());
47 !use_it.Done();
48 use_it.Advance()) {
49 HInstruction* use = use_it.Current()->GetUser();
50 DCHECK(!use->IsNullCheck()) << "NullCheck should have been eliminated";
51 if (use->IsBoundType()) {
52 // BoundType shouldn't normally be necessary for a NewInstance.
53 // Just be conservative for the uncommon cases.
54 is_singleton_ = false;
55 is_singleton_and_not_returned_ = false;
56 return;
57 }
58 if (use->IsPhi() || use->IsInvoke() ||
59 (use->IsInstanceFieldSet() && (reference_ == use->InputAt(1))) ||
60 (use->IsUnresolvedInstanceFieldSet() && (reference_ == use->InputAt(1))) ||
61 (use->IsStaticFieldSet() && (reference_ == use->InputAt(1))) ||
Nicolas Geoffrayd9309292015-10-31 22:21:31 +000062 (use->IsUnresolvedStaticFieldSet() && (reference_ == use->InputAt(0))) ||
Mingyao Yang8df69d42015-10-22 15:40:58 -070063 (use->IsArraySet() && (reference_ == use->InputAt(2)))) {
64 // reference_ is merged to a phi, passed to a callee, or stored to heap.
65 // reference_ isn't the only name that can refer to its value anymore.
66 is_singleton_ = false;
67 is_singleton_and_not_returned_ = false;
68 return;
69 }
70 if (use->IsReturn()) {
71 is_singleton_and_not_returned_ = false;
72 }
73 }
74 }
75
76 HInstruction* GetReference() const {
77 return reference_;
78 }
79
80 size_t GetPosition() const {
81 return position_;
82 }
83
84 // Returns true if reference_ is the only name that can refer to its value during
85 // the lifetime of the method. So it's guaranteed to not have any alias in
86 // the method (including its callees).
87 bool IsSingleton() const {
88 return is_singleton_;
89 }
90
91 // Returns true if reference_ is a singleton and not returned to the caller.
92 // The allocation and stores into reference_ may be eliminated for such cases.
93 bool IsSingletonAndNotReturned() const {
94 return is_singleton_and_not_returned_;
95 }
96
97 private:
98 HInstruction* const reference_;
99 const size_t position_; // position in HeapLocationCollector's ref_info_array_.
100 bool is_singleton_; // can only be referred to by a single name in the method.
101 bool is_singleton_and_not_returned_; // reference_ is singleton and not returned to caller.
102
103 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
104};
105
106// A heap location is a reference-offset/index pair that a value can be loaded from
107// or stored to.
108class HeapLocation : public ArenaObject<kArenaAllocMisc> {
109 public:
110 static constexpr size_t kInvalidFieldOffset = -1;
111
112 // TODO: more fine-grained array types.
113 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
114
115 HeapLocation(ReferenceInfo* ref_info,
116 size_t offset,
117 HInstruction* index,
118 int16_t declaring_class_def_index)
119 : ref_info_(ref_info),
120 offset_(offset),
121 index_(index),
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800122 declaring_class_def_index_(declaring_class_def_index) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700123 DCHECK(ref_info != nullptr);
124 DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
125 (offset != kInvalidFieldOffset && index == nullptr));
Mingyao Yang8df69d42015-10-22 15:40:58 -0700126 }
127
128 ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
129 size_t GetOffset() const { return offset_; }
130 HInstruction* GetIndex() const { return index_; }
131
132 // Returns the definition of declaring class' dex index.
133 // It's kDeclaringClassDefIndexForArrays for an array element.
134 int16_t GetDeclaringClassDefIndex() const {
135 return declaring_class_def_index_;
136 }
137
138 bool IsArrayElement() const {
139 return index_ != nullptr;
140 }
141
Mingyao Yang8df69d42015-10-22 15:40:58 -0700142 private:
143 ReferenceInfo* const ref_info_; // reference for instance/static field or array access.
144 const size_t offset_; // offset of static/instance field.
145 HInstruction* const index_; // index of an array element.
146 const int16_t declaring_class_def_index_; // declaring class's def's dex index.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700147
148 DISALLOW_COPY_AND_ASSIGN(HeapLocation);
149};
150
151static HInstruction* HuntForOriginalReference(HInstruction* ref) {
152 DCHECK(ref != nullptr);
153 while (ref->IsNullCheck() || ref->IsBoundType()) {
154 ref = ref->InputAt(0);
155 }
156 return ref;
157}
158
159// A HeapLocationCollector collects all relevant heap locations and keeps
160// an aliasing matrix for all locations.
161class HeapLocationCollector : public HGraphVisitor {
162 public:
163 static constexpr size_t kHeapLocationNotFound = -1;
164 // Start with a single uint32_t word. That's enough bits for pair-wise
165 // aliasing matrix of 8 heap locations.
166 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
167
168 explicit HeapLocationCollector(HGraph* graph)
169 : HGraphVisitor(graph),
170 ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
171 heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
172 aliasing_matrix_(graph->GetArena(), kInitialAliasingMatrixBitVectorSize, true),
173 has_heap_stores_(false),
174 has_volatile_(false),
175 has_monitor_operations_(false),
176 may_deoptimize_(false) {}
177
178 size_t GetNumberOfHeapLocations() const {
179 return heap_locations_.size();
180 }
181
182 HeapLocation* GetHeapLocation(size_t index) const {
183 return heap_locations_[index];
184 }
185
186 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
187 for (size_t i = 0; i < ref_info_array_.size(); i++) {
188 ReferenceInfo* ref_info = ref_info_array_[i];
189 if (ref_info->GetReference() == ref) {
190 DCHECK_EQ(i, ref_info->GetPosition());
191 return ref_info;
192 }
193 }
194 return nullptr;
195 }
196
197 bool HasHeapStores() const {
198 return has_heap_stores_;
199 }
200
201 bool HasVolatile() const {
202 return has_volatile_;
203 }
204
205 bool HasMonitorOps() const {
206 return has_monitor_operations_;
207 }
208
209 // Returns whether this method may be deoptimized.
210 // Currently we don't have meta data support for deoptimizing
211 // a method that eliminates allocations/stores.
212 bool MayDeoptimize() const {
213 return may_deoptimize_;
214 }
215
216 // Find and return the heap location index in heap_locations_.
217 size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
218 size_t offset,
219 HInstruction* index,
220 int16_t declaring_class_def_index) const {
221 for (size_t i = 0; i < heap_locations_.size(); i++) {
222 HeapLocation* loc = heap_locations_[i];
223 if (loc->GetReferenceInfo() == ref_info &&
224 loc->GetOffset() == offset &&
225 loc->GetIndex() == index &&
226 loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
227 return i;
228 }
229 }
230 return kHeapLocationNotFound;
231 }
232
233 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
234 bool MayAlias(size_t index1, size_t index2) const {
235 if (index1 < index2) {
236 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
237 } else if (index1 > index2) {
238 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
239 } else {
240 DCHECK(false) << "index1 and index2 are expected to be different";
241 return true;
242 }
243 }
244
245 void BuildAliasingMatrix() {
246 const size_t number_of_locations = heap_locations_.size();
247 if (number_of_locations == 0) {
248 return;
249 }
250 size_t pos = 0;
251 // Compute aliasing info between every pair of different heap locations.
252 // Save the result in a matrix represented as a BitVector.
253 for (size_t i = 0; i < number_of_locations - 1; i++) {
254 for (size_t j = i + 1; j < number_of_locations; j++) {
255 if (ComputeMayAlias(i, j)) {
256 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
257 }
258 pos++;
259 }
260 }
261 }
262
263 private:
264 // An allocation cannot alias with a name which already exists at the point
265 // of the allocation, such as a parameter or a load happening before the allocation.
266 bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
267 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
268 // Any reference that can alias with the allocation must appear after it in the block/in
269 // the block's successors. In reverse post order, those instructions will be visited after
270 // the allocation.
271 return ref_info2->GetPosition() >= ref_info1->GetPosition();
272 }
273 return true;
274 }
275
276 bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
277 if (ref_info1 == ref_info2) {
278 return true;
279 } else if (ref_info1->IsSingleton()) {
280 return false;
281 } else if (ref_info2->IsSingleton()) {
282 return false;
283 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
284 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
285 return false;
286 }
287 return true;
288 }
289
290 // `index1` and `index2` are indices in the array of collected heap locations.
291 // Returns the position in the bit vector that tracks whether the two heap
292 // locations may alias.
293 size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
294 DCHECK(index2 > index1);
295 const size_t number_of_locations = heap_locations_.size();
296 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
297 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
298 }
299
300 // An additional position is passed in to make sure the calculated position is correct.
301 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
302 size_t calculated_position = AliasingMatrixPosition(index1, index2);
303 DCHECK_EQ(calculated_position, position);
304 return calculated_position;
305 }
306
307 // Compute if two locations may alias to each other.
308 bool ComputeMayAlias(size_t index1, size_t index2) const {
309 HeapLocation* loc1 = heap_locations_[index1];
310 HeapLocation* loc2 = heap_locations_[index2];
311 if (loc1->GetOffset() != loc2->GetOffset()) {
312 // Either two different instance fields, or one is an instance
313 // field and the other is an array element.
314 return false;
315 }
316 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
317 // Different types.
318 return false;
319 }
320 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
321 return false;
322 }
323 if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
324 HInstruction* array_index1 = loc1->GetIndex();
325 HInstruction* array_index2 = loc2->GetIndex();
326 DCHECK(array_index1 != nullptr);
327 DCHECK(array_index2 != nullptr);
328 if (array_index1->IsIntConstant() &&
329 array_index2->IsIntConstant() &&
330 array_index1->AsIntConstant()->GetValue() != array_index2->AsIntConstant()->GetValue()) {
331 // Different constant indices do not alias.
332 return false;
333 }
334 }
335 return true;
336 }
337
338 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* ref) {
339 ReferenceInfo* ref_info = FindReferenceInfoOf(ref);
340 if (ref_info == nullptr) {
341 size_t pos = ref_info_array_.size();
342 ref_info = new (GetGraph()->GetArena()) ReferenceInfo(ref, pos);
343 ref_info_array_.push_back(ref_info);
344 }
345 return ref_info;
346 }
347
348 HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
349 size_t offset,
350 HInstruction* index,
351 int16_t declaring_class_def_index) {
352 HInstruction* original_ref = HuntForOriginalReference(ref);
353 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
354 size_t heap_location_idx = FindHeapLocationIndex(
355 ref_info, offset, index, declaring_class_def_index);
356 if (heap_location_idx == kHeapLocationNotFound) {
357 HeapLocation* heap_loc = new (GetGraph()->GetArena())
358 HeapLocation(ref_info, offset, index, declaring_class_def_index);
359 heap_locations_.push_back(heap_loc);
360 return heap_loc;
361 }
362 return heap_locations_[heap_location_idx];
363 }
364
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800365 void VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700366 if (field_info.IsVolatile()) {
367 has_volatile_ = true;
368 }
369 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
370 const size_t offset = field_info.GetFieldOffset().SizeValue();
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800371 GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700372 }
373
374 void VisitArrayAccess(HInstruction* array, HInstruction* index) {
375 GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
376 index, HeapLocation::kDeclaringClassDefIndexForArrays);
377 }
378
379 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800380 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700381 }
382
383 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800384 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700385 has_heap_stores_ = true;
386 }
387
388 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800389 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700390 }
391
392 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800393 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700394 has_heap_stores_ = true;
395 }
396
397 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
398 // since we cannot accurately track the fields.
399
400 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
401 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
402 }
403
404 void VisitArraySet(HArraySet* instruction) OVERRIDE {
405 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
406 has_heap_stores_ = true;
407 }
408
409 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
410 // Any references appearing in the ref_info_array_ so far cannot alias with new_instance.
411 GetOrCreateReferenceInfo(new_instance);
412 }
413
414 void VisitDeoptimize(HDeoptimize* instruction ATTRIBUTE_UNUSED) OVERRIDE {
415 may_deoptimize_ = true;
416 }
417
418 void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
419 has_monitor_operations_ = true;
420 }
421
422 ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
423 ArenaVector<HeapLocation*> heap_locations_; // All heap locations.
424 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
425 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
426 // alias analysis and won't be as effective.
427 bool has_volatile_; // If there are volatile field accesses.
428 bool has_monitor_operations_; // If there are monitor operations.
429 bool may_deoptimize_;
430
431 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
432};
433
434// An unknown heap value. Loads with such a value in the heap location cannot be eliminated.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800435// A heap location can be set to kUnknownHeapValue when:
436// - initially set a value.
437// - killed due to aliasing, merging, invocation, or loop side effects.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700438static HInstruction* const kUnknownHeapValue =
439 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-1));
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800440
Mingyao Yang8df69d42015-10-22 15:40:58 -0700441// Default heap value after an allocation.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800442// A heap location can be set to that value right after an allocation.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700443static HInstruction* const kDefaultHeapValue =
444 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-2));
445
446class LSEVisitor : public HGraphVisitor {
447 public:
448 LSEVisitor(HGraph* graph,
449 const HeapLocationCollector& heap_locations_collector,
450 const SideEffectsAnalysis& side_effects)
451 : HGraphVisitor(graph),
452 heap_location_collector_(heap_locations_collector),
453 side_effects_(side_effects),
454 heap_values_for_(graph->GetBlocks().size(),
455 ArenaVector<HInstruction*>(heap_locations_collector.
456 GetNumberOfHeapLocations(),
457 kUnknownHeapValue,
458 graph->GetArena()->Adapter(kArenaAllocLSE)),
459 graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800460 removed_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
461 substitute_instructions_for_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
462 possibly_removed_stores_(graph->GetArena()->Adapter(kArenaAllocLSE)),
Mingyao Yang8df69d42015-10-22 15:40:58 -0700463 singleton_new_instances_(graph->GetArena()->Adapter(kArenaAllocLSE)) {
464 }
465
466 void VisitBasicBlock(HBasicBlock* block) OVERRIDE {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800467 // Populate the heap_values array for this block.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700468 // TODO: try to reuse the heap_values array from one predecessor if possible.
469 if (block->IsLoopHeader()) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800470 HandleLoopSideEffects(block);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700471 } else {
472 MergePredecessorValues(block);
473 }
474 HGraphVisitor::VisitBasicBlock(block);
475 }
476
477 // Remove recorded instructions that should be eliminated.
478 void RemoveInstructions() {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800479 size_t size = removed_loads_.size();
480 DCHECK_EQ(size, substitute_instructions_for_loads_.size());
Mingyao Yang8df69d42015-10-22 15:40:58 -0700481 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800482 HInstruction* load = removed_loads_[i];
483 DCHECK(load != nullptr);
484 DCHECK(load->IsInstanceFieldGet() ||
485 load->IsStaticFieldGet() ||
486 load->IsArrayGet());
487 HInstruction* substitute = substitute_instructions_for_loads_[i];
488 DCHECK(substitute != nullptr);
489 // Keep tracing substitute till one that's not removed.
490 HInstruction* sub_sub = FindSubstitute(substitute);
491 while (sub_sub != substitute) {
492 substitute = sub_sub;
493 sub_sub = FindSubstitute(substitute);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700494 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800495 load->ReplaceWith(substitute);
496 load->GetBlock()->RemoveInstruction(load);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700497 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800498
499 // At this point, stores in possibly_removed_stores_ can be safely removed.
500 size = possibly_removed_stores_.size();
501 for (size_t i = 0; i < size; i++) {
502 HInstruction* store = possibly_removed_stores_[i];
503 DCHECK(store->IsInstanceFieldSet() || store->IsStaticFieldSet() || store->IsArraySet());
504 store->GetBlock()->RemoveInstruction(store);
505 }
506
Mingyao Yang8df69d42015-10-22 15:40:58 -0700507 // TODO: remove unnecessary allocations.
508 // Eliminate instructions in singleton_new_instances_ that:
509 // - don't have uses,
510 // - don't have finalizers,
511 // - are instantiable and accessible,
512 // - have no/separate clinit check.
513 }
514
515 private:
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800516 // If heap_values[index] is an instance field store, need to keep the store.
517 // This is necessary if a heap value is killed due to merging, or loop side
518 // effects (which is essentially merging also), since a load later from the
519 // location won't be eliminated.
520 void KeepIfIsStore(HInstruction* heap_value) {
521 if (heap_value == kDefaultHeapValue ||
522 heap_value == kUnknownHeapValue ||
523 !heap_value->IsInstanceFieldSet()) {
524 return;
525 }
526 auto idx = std::find(possibly_removed_stores_.begin(),
527 possibly_removed_stores_.end(), heap_value);
528 if (idx != possibly_removed_stores_.end()) {
529 // Make sure the store is kept.
530 possibly_removed_stores_.erase(idx);
531 }
532 }
533
534 void HandleLoopSideEffects(HBasicBlock* block) {
535 DCHECK(block->IsLoopHeader());
536 int block_id = block->GetBlockId();
537 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block_id];
538 HBasicBlock* pre_header = block->GetLoopInformation()->GetPreHeader();
539 ArenaVector<HInstruction*>& pre_header_heap_values =
540 heap_values_for_[pre_header->GetBlockId()];
541 // We do a single pass in reverse post order. For loops, use the side effects as a hint
542 // to see if the heap values should be killed.
543 if (side_effects_.GetLoopEffects(block).DoesAnyWrite()) {
544 for (size_t i = 0; i < pre_header_heap_values.size(); i++) {
545 // heap value is killed by loop side effects, need to keep the last store.
546 KeepIfIsStore(pre_header_heap_values[i]);
547 }
548 if (kIsDebugBuild) {
549 // heap_values should all be kUnknownHeapValue that it is inited with.
550 for (size_t i = 0; i < heap_values.size(); i++) {
551 DCHECK_EQ(heap_values[i], kUnknownHeapValue);
552 }
553 }
554 } else {
555 // Inherit the values from pre-header.
556 for (size_t i = 0; i < heap_values.size(); i++) {
557 heap_values[i] = pre_header_heap_values[i];
558 }
559 }
560 }
561
Mingyao Yang8df69d42015-10-22 15:40:58 -0700562 void MergePredecessorValues(HBasicBlock* block) {
563 const ArenaVector<HBasicBlock*>& predecessors = block->GetPredecessors();
564 if (predecessors.size() == 0) {
565 return;
566 }
567 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()];
568 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800569 HInstruction* pred0_value = heap_values_for_[predecessors[0]->GetBlockId()][i];
570 heap_values[i] = pred0_value;
571 if (pred0_value != kUnknownHeapValue) {
Mingyao Yang8df69d42015-10-22 15:40:58 -0700572 for (size_t j = 1; j < predecessors.size(); j++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800573 HInstruction* pred_value = heap_values_for_[predecessors[j]->GetBlockId()][i];
574 if (pred_value != pred0_value) {
575 heap_values[i] = kUnknownHeapValue;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700576 break;
577 }
578 }
579 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800580
581 if (heap_values[i] == kUnknownHeapValue) {
582 // Keep the last store in each predecessor since future loads cannot be eliminated.
583 for (size_t j = 0; j < predecessors.size(); j++) {
584 ArenaVector<HInstruction*>& pred_values = heap_values_for_[predecessors[j]->GetBlockId()];
585 KeepIfIsStore(pred_values[i]);
586 }
587 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700588 }
589 }
590
591 // `instruction` is being removed. Try to see if the null check on it
592 // can be removed. This can happen if the same value is set in two branches
593 // but not in dominators. Such as:
594 // int[] a = foo();
595 // if () {
596 // a[0] = 2;
597 // } else {
598 // a[0] = 2;
599 // }
600 // // a[0] can now be replaced with constant 2, and the null check on it can be removed.
601 void TryRemovingNullCheck(HInstruction* instruction) {
602 HInstruction* prev = instruction->GetPrevious();
603 if ((prev != nullptr) && prev->IsNullCheck() && (prev == instruction->InputAt(0))) {
604 // Previous instruction is a null check for this instruction. Remove the null check.
605 prev->ReplaceWith(prev->InputAt(0));
606 prev->GetBlock()->RemoveInstruction(prev);
607 }
608 }
609
610 HInstruction* GetDefaultValue(Primitive::Type type) {
611 switch (type) {
612 case Primitive::kPrimNot:
613 return GetGraph()->GetNullConstant();
614 case Primitive::kPrimBoolean:
615 case Primitive::kPrimByte:
616 case Primitive::kPrimChar:
617 case Primitive::kPrimShort:
618 case Primitive::kPrimInt:
619 return GetGraph()->GetIntConstant(0);
620 case Primitive::kPrimLong:
621 return GetGraph()->GetLongConstant(0);
622 case Primitive::kPrimFloat:
623 return GetGraph()->GetFloatConstant(0);
624 case Primitive::kPrimDouble:
625 return GetGraph()->GetDoubleConstant(0);
626 default:
627 UNREACHABLE();
628 }
629 }
630
631 void VisitGetLocation(HInstruction* instruction,
632 HInstruction* ref,
633 size_t offset,
634 HInstruction* index,
635 int16_t declaring_class_def_index) {
636 HInstruction* original_ref = HuntForOriginalReference(ref);
637 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
638 size_t idx = heap_location_collector_.FindHeapLocationIndex(
639 ref_info, offset, index, declaring_class_def_index);
640 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
641 ArenaVector<HInstruction*>& heap_values =
642 heap_values_for_[instruction->GetBlock()->GetBlockId()];
643 HInstruction* heap_value = heap_values[idx];
644 if (heap_value == kDefaultHeapValue) {
645 HInstruction* constant = GetDefaultValue(instruction->GetType());
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800646 removed_loads_.push_back(instruction);
647 substitute_instructions_for_loads_.push_back(constant);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700648 heap_values[idx] = constant;
649 return;
650 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800651 if (heap_value != kUnknownHeapValue && heap_value->IsInstanceFieldSet()) {
652 HInstruction* store = heap_value;
653 // This load must be from a singleton since it's from the same field
654 // that a "removed" store puts the value. That store must be to a singleton's field.
655 DCHECK(ref_info->IsSingleton());
656 // Get the real heap value of the store.
657 heap_value = store->InputAt(1);
658 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700659 if ((heap_value != kUnknownHeapValue) &&
660 // Keep the load due to possible I/F, J/D array aliasing.
661 // See b/22538329 for details.
662 (heap_value->GetType() == instruction->GetType())) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800663 removed_loads_.push_back(instruction);
664 substitute_instructions_for_loads_.push_back(heap_value);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700665 TryRemovingNullCheck(instruction);
666 return;
667 }
668
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800669 // Load isn't eliminated.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700670 if (heap_value == kUnknownHeapValue) {
671 // Put the load as the value into the HeapLocation.
672 // This acts like GVN but with better aliasing analysis.
673 heap_values[idx] = instruction;
674 }
675 }
676
677 bool Equal(HInstruction* heap_value, HInstruction* value) {
678 if (heap_value == value) {
679 return true;
680 }
681 if (heap_value == kDefaultHeapValue && GetDefaultValue(value->GetType()) == value) {
682 return true;
683 }
684 return false;
685 }
686
687 void VisitSetLocation(HInstruction* instruction,
688 HInstruction* ref,
689 size_t offset,
690 HInstruction* index,
691 int16_t declaring_class_def_index,
692 HInstruction* value) {
693 HInstruction* original_ref = HuntForOriginalReference(ref);
694 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
695 size_t idx = heap_location_collector_.FindHeapLocationIndex(
696 ref_info, offset, index, declaring_class_def_index);
697 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
698 ArenaVector<HInstruction*>& heap_values =
699 heap_values_for_[instruction->GetBlock()->GetBlockId()];
700 HInstruction* heap_value = heap_values[idx];
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800701 bool same_value = false;
702 bool possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700703 if (Equal(heap_value, value)) {
704 // Store into the heap location with the same value.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800705 same_value = true;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700706 } else if (index != nullptr) {
707 // For array element, don't eliminate stores since it can be easily aliased
708 // with non-constant index.
709 } else if (!heap_location_collector_.MayDeoptimize() &&
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800710 ref_info->IsSingletonAndNotReturned()) {
711 // Store into a field of a singleton that's not returned. The value cannot be
712 // killed due to aliasing/invocation. It can be redundant since future loads can
713 // directly get the value set by this instruction. The value can still be killed due to
714 // merging or loop side effects. Stores whose values are killed due to merging/loop side
715 // effects later will be removed from possibly_removed_stores_ when that is detected.
716 possibly_redundant = true;
717 HNewInstance* new_instance = ref_info->GetReference()->AsNewInstance();
718 DCHECK(new_instance != nullptr);
719 if (new_instance->IsFinalizable()) {
720 // Finalizable objects escape globally. Need to keep the store.
721 possibly_redundant = false;
Mingyao Yang8df69d42015-10-22 15:40:58 -0700722 } else {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800723 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
724 if (loop_info != nullptr) {
725 // instruction is a store in the loop so the loop must does write.
726 DCHECK(side_effects_.GetLoopEffects(loop_info->GetHeader()).DoesAnyWrite());
727
728 if (loop_info->IsLoopInvariant(original_ref, false)) {
729 DCHECK(original_ref->GetBlock()->Dominates(loop_info->GetPreHeader()));
730 // Keep the store since its value may be needed at the loop header.
731 possibly_redundant = false;
732 } else {
733 // The singleton is created inside the loop. Value stored to it isn't needed at
734 // the loop header. This is true for outer loops also.
735 }
736 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700737 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700738 }
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800739 if (same_value || possibly_redundant) {
740 possibly_removed_stores_.push_back(instruction);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700741 }
Mingyao Yange9d6e602015-10-23 17:08:42 -0700742
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800743 if (!same_value) {
744 if (possibly_redundant) {
745 DCHECK(instruction->IsInstanceFieldSet());
746 // Put the store as the heap value. If the value is loaded from heap
747 // by a load later, this store isn't really redundant.
748 heap_values[idx] = instruction;
749 } else {
750 heap_values[idx] = value;
751 }
752 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700753 // This store may kill values in other heap locations due to aliasing.
754 for (size_t i = 0; i < heap_values.size(); i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800755 if (i == idx) {
756 continue;
757 }
Mingyao Yang8df69d42015-10-22 15:40:58 -0700758 if (heap_values[i] == value) {
759 // Same value should be kept even if aliasing happens.
760 continue;
761 }
762 if (heap_values[i] == kUnknownHeapValue) {
763 // Value is already unknown, no need for aliasing check.
764 continue;
765 }
766 if (heap_location_collector_.MayAlias(i, idx)) {
767 // Kill heap locations that may alias.
768 heap_values[i] = kUnknownHeapValue;
769 }
770 }
771 }
772
773 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
774 HInstruction* obj = instruction->InputAt(0);
775 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
776 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
777 VisitGetLocation(instruction, obj, offset, nullptr, declaring_class_def_index);
778 }
779
780 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
781 HInstruction* obj = instruction->InputAt(0);
782 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
783 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
784 HInstruction* value = instruction->InputAt(1);
785 VisitSetLocation(instruction, obj, offset, nullptr, declaring_class_def_index, value);
786 }
787
788 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
789 HInstruction* cls = instruction->InputAt(0);
790 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
791 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
792 VisitGetLocation(instruction, cls, offset, nullptr, declaring_class_def_index);
793 }
794
795 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
796 HInstruction* cls = instruction->InputAt(0);
797 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
798 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
799 HInstruction* value = instruction->InputAt(1);
800 VisitSetLocation(instruction, cls, offset, nullptr, declaring_class_def_index, value);
801 }
802
803 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
804 HInstruction* array = instruction->InputAt(0);
805 HInstruction* index = instruction->InputAt(1);
806 VisitGetLocation(instruction,
807 array,
808 HeapLocation::kInvalidFieldOffset,
809 index,
810 HeapLocation::kDeclaringClassDefIndexForArrays);
811 }
812
813 void VisitArraySet(HArraySet* instruction) OVERRIDE {
814 HInstruction* array = instruction->InputAt(0);
815 HInstruction* index = instruction->InputAt(1);
816 HInstruction* value = instruction->InputAt(2);
817 VisitSetLocation(instruction,
818 array,
819 HeapLocation::kInvalidFieldOffset,
820 index,
821 HeapLocation::kDeclaringClassDefIndexForArrays,
822 value);
823 }
824
825 void HandleInvoke(HInstruction* invoke) {
826 ArenaVector<HInstruction*>& heap_values =
827 heap_values_for_[invoke->GetBlock()->GetBlockId()];
828 for (size_t i = 0; i < heap_values.size(); i++) {
829 ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
830 if (ref_info->IsSingleton()) {
831 // Singleton references cannot be seen by the callee.
832 } else {
833 heap_values[i] = kUnknownHeapValue;
834 }
835 }
836 }
837
838 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
839 HandleInvoke(invoke);
840 }
841
842 void VisitInvokeVirtual(HInvokeVirtual* invoke) OVERRIDE {
843 HandleInvoke(invoke);
844 }
845
846 void VisitInvokeInterface(HInvokeInterface* invoke) OVERRIDE {
847 HandleInvoke(invoke);
848 }
849
850 void VisitInvokeUnresolved(HInvokeUnresolved* invoke) OVERRIDE {
851 HandleInvoke(invoke);
852 }
853
854 void VisitClinitCheck(HClinitCheck* clinit) OVERRIDE {
855 HandleInvoke(clinit);
856 }
857
858 void VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet* instruction) OVERRIDE {
859 // Conservatively treat it as an invocation.
860 HandleInvoke(instruction);
861 }
862
863 void VisitUnresolvedInstanceFieldSet(HUnresolvedInstanceFieldSet* instruction) OVERRIDE {
864 // Conservatively treat it as an invocation.
865 HandleInvoke(instruction);
866 }
867
868 void VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet* instruction) OVERRIDE {
869 // Conservatively treat it as an invocation.
870 HandleInvoke(instruction);
871 }
872
873 void VisitUnresolvedStaticFieldSet(HUnresolvedStaticFieldSet* instruction) OVERRIDE {
874 // Conservatively treat it as an invocation.
875 HandleInvoke(instruction);
876 }
877
878 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
879 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_instance);
880 if (ref_info == nullptr) {
881 // new_instance isn't used for field accesses. No need to process it.
882 return;
883 }
884 if (!heap_location_collector_.MayDeoptimize() &&
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800885 ref_info->IsSingletonAndNotReturned() &&
886 !new_instance->IsFinalizable() &&
887 !new_instance->CanThrow()) {
888 // TODO: add new_instance to singleton_new_instances_ and enable allocation elimination.
Mingyao Yang8df69d42015-10-22 15:40:58 -0700889 }
890 ArenaVector<HInstruction*>& heap_values =
891 heap_values_for_[new_instance->GetBlock()->GetBlockId()];
892 for (size_t i = 0; i < heap_values.size(); i++) {
893 HInstruction* ref =
894 heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo()->GetReference();
895 size_t offset = heap_location_collector_.GetHeapLocation(i)->GetOffset();
896 if (ref == new_instance && offset >= mirror::kObjectHeaderSize) {
897 // Instance fields except the header fields are set to default heap values.
898 heap_values[i] = kDefaultHeapValue;
899 }
900 }
901 }
902
903 // Find an instruction's substitute if it should be removed.
904 // Return the same instruction if it should not be removed.
905 HInstruction* FindSubstitute(HInstruction* instruction) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800906 size_t size = removed_loads_.size();
Mingyao Yang8df69d42015-10-22 15:40:58 -0700907 for (size_t i = 0; i < size; i++) {
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800908 if (removed_loads_[i] == instruction) {
909 return substitute_instructions_for_loads_[i];
Mingyao Yang8df69d42015-10-22 15:40:58 -0700910 }
911 }
912 return instruction;
913 }
914
915 const HeapLocationCollector& heap_location_collector_;
916 const SideEffectsAnalysis& side_effects_;
917
918 // One array of heap values for each block.
919 ArenaVector<ArenaVector<HInstruction*>> heap_values_for_;
920
921 // We record the instructions that should be eliminated but may be
922 // used by heap locations. They'll be removed in the end.
Mingyao Yangfb8464a2015-11-02 10:56:59 -0800923 ArenaVector<HInstruction*> removed_loads_;
924 ArenaVector<HInstruction*> substitute_instructions_for_loads_;
925
926 // Stores in this list may be removed from the list later when it's
927 // found that the store cannot be eliminated.
928 ArenaVector<HInstruction*> possibly_removed_stores_;
929
Mingyao Yang8df69d42015-10-22 15:40:58 -0700930 ArenaVector<HInstruction*> singleton_new_instances_;
931
932 DISALLOW_COPY_AND_ASSIGN(LSEVisitor);
933};
934
935void LoadStoreElimination::Run() {
936 if (graph_->IsDebuggable()) {
937 // Debugger may set heap values or trigger deoptimization of callers.
938 // Skip this optimization.
939 return;
940 }
941 HeapLocationCollector heap_location_collector(graph_);
942 for (HReversePostOrderIterator it(*graph_); !it.Done(); it.Advance()) {
943 heap_location_collector.VisitBasicBlock(it.Current());
944 }
945 if (heap_location_collector.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
946 // Bail out if there are too many heap locations to deal with.
947 return;
948 }
949 if (!heap_location_collector.HasHeapStores()) {
950 // Without heap stores, this pass would act mostly as GVN on heap accesses.
951 return;
952 }
953 if (heap_location_collector.HasVolatile() || heap_location_collector.HasMonitorOps()) {
954 // Don't do load/store elimination if the method has volatile field accesses or
955 // monitor operations, for now.
956 // TODO: do it right.
957 return;
958 }
959 heap_location_collector.BuildAliasingMatrix();
960 LSEVisitor lse_visitor(graph_, heap_location_collector, side_effects_);
961 for (HReversePostOrderIterator it(*graph_); !it.Done(); it.Advance()) {
962 lse_visitor.VisitBasicBlock(it.Current());
963 }
964 lse_visitor.RemoveInstructions();
965}
966
967} // namespace art