blob: 7cf061773f4f0261a4f26711070984193853cc5f [file] [log] [blame]
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "gvn.h"
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010018
19#include "base/arena_containers.h"
20#include "base/bit_vector-inl.h"
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000021#include "side_effects_analysis.h"
David Brazdil6d340c42015-03-03 11:54:54 +000022#include "utils.h"
David Brazdil6d340c42015-03-03 11:54:54 +000023#include "utils/arena_bit_vector.h"
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +010024
25namespace art {
26
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000027/**
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000028 * A ValueSet holds instructions that can replace other instructions. It is updated
29 * through the `Add` method, and the `Kill` method. The `Kill` method removes
30 * instructions that are affected by the given side effect.
31 *
32 * The `Lookup` method returns an equivalent instruction to the given instruction
33 * if there is one in the set. In GVN, we would say those instructions have the
34 * same "number".
35 */
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010036class ValueSet : public ArenaObject<kArenaAllocGvn> {
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000037 public:
David Brazdil6d340c42015-03-03 11:54:54 +000038 // Constructs an empty ValueSet which owns all its buckets.
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000039 explicit ValueSet(ArenaAllocator* allocator)
David Brazdil6d340c42015-03-03 11:54:54 +000040 : allocator_(allocator),
41 num_buckets_(kMinimumNumberOfBuckets),
Vladimir Marko5233f932015-09-29 19:01:15 +010042 buckets_(allocator->AllocArray<Node*>(num_buckets_, kArenaAllocGvn)),
David Brazdil6d340c42015-03-03 11:54:54 +000043 buckets_owned_(allocator, num_buckets_, false),
44 num_entries_(0) {
45 // ArenaAllocator returns zeroed memory, so no need to set buckets to null.
46 DCHECK(IsPowerOfTwo(num_buckets_));
47 buckets_owned_.SetInitialBits(num_buckets_);
48 }
49
50 // Copy constructor. Depending on the load factor, it will either make a deep
51 // copy (all buckets owned) or a shallow one (buckets pointing to the parent).
52 ValueSet(ArenaAllocator* allocator, const ValueSet& to_copy)
53 : allocator_(allocator),
54 num_buckets_(to_copy.IdealBucketCount()),
Vladimir Marko5233f932015-09-29 19:01:15 +010055 buckets_(allocator->AllocArray<Node*>(num_buckets_, kArenaAllocGvn)),
David Brazdil6d340c42015-03-03 11:54:54 +000056 buckets_owned_(allocator, num_buckets_, false),
57 num_entries_(to_copy.num_entries_) {
58 // ArenaAllocator returns zeroed memory, so entries of buckets_ and
Mathieu Chartier2cebb242015-04-21 16:50:40 -070059 // buckets_owned_ are initialized to null and false, respectively.
David Brazdil6d340c42015-03-03 11:54:54 +000060 DCHECK(IsPowerOfTwo(num_buckets_));
61 if (num_buckets_ == to_copy.num_buckets_) {
62 // Hash table remains the same size. We copy the bucket pointers and leave
63 // all buckets_owned_ bits false.
64 memcpy(buckets_, to_copy.buckets_, num_buckets_ * sizeof(Node*));
65 } else {
66 // Hash table size changes. We copy and rehash all entries, and set all
67 // buckets_owned_ bits to true.
68 for (size_t i = 0; i < to_copy.num_buckets_; ++i) {
69 for (Node* node = to_copy.buckets_[i]; node != nullptr; node = node->GetNext()) {
70 size_t new_index = BucketIndex(node->GetHashCode());
71 buckets_[new_index] = node->Dup(allocator_, buckets_[new_index]);
72 }
73 }
74 buckets_owned_.SetInitialBits(num_buckets_);
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000075 }
76 }
77
78 // Adds an instruction in the set.
79 void Add(HInstruction* instruction) {
80 DCHECK(Lookup(instruction) == nullptr);
David Brazdil6d340c42015-03-03 11:54:54 +000081 size_t hash_code = HashCode(instruction);
82 size_t index = BucketIndex(hash_code);
83
84 if (!buckets_owned_.IsBitSet(index)) {
85 CloneBucket(index);
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000086 }
David Brazdil6d340c42015-03-03 11:54:54 +000087 buckets_[index] = new (allocator_) Node(instruction, hash_code, buckets_[index]);
88 ++num_entries_;
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000089 }
90
David Brazdil6d340c42015-03-03 11:54:54 +000091 // If in the set, returns an equivalent instruction to the given instruction.
92 // Returns null otherwise.
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000093 HInstruction* Lookup(HInstruction* instruction) const {
David Brazdil6d340c42015-03-03 11:54:54 +000094 size_t hash_code = HashCode(instruction);
95 size_t index = BucketIndex(hash_code);
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000096
David Brazdil6d340c42015-03-03 11:54:54 +000097 for (Node* node = buckets_[index]; node != nullptr; node = node->GetNext()) {
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000098 if (node->GetHashCode() == hash_code) {
David Brazdil6d340c42015-03-03 11:54:54 +000099 HInstruction* existing = node->GetInstruction();
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000100 if (existing->Equals(instruction)) {
101 return existing;
102 }
103 }
104 }
105 return nullptr;
106 }
107
David Brazdil6d340c42015-03-03 11:54:54 +0000108 // Returns whether instruction is in the set.
109 bool Contains(HInstruction* instruction) const {
110 size_t hash_code = HashCode(instruction);
111 size_t index = BucketIndex(hash_code);
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000112
David Brazdil6d340c42015-03-03 11:54:54 +0000113 for (Node* node = buckets_[index]; node != nullptr; node = node->GetNext()) {
114 if (node->GetInstruction() == instruction) {
115 return true;
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000116 }
117 }
David Brazdil6d340c42015-03-03 11:54:54 +0000118 return false;
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000119 }
120
David Brazdil6d340c42015-03-03 11:54:54 +0000121 // Removes all instructions in the set affected by the given side effects.
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000122 void Kill(SideEffects side_effects) {
David Brazdil6d340c42015-03-03 11:54:54 +0000123 DeleteAllImpureWhich([side_effects](Node* node) {
Aart Bik854a02b2015-07-14 16:07:00 -0700124 return node->GetInstruction()->GetSideEffects().MayDependOn(side_effects);
David Brazdil6d340c42015-03-03 11:54:54 +0000125 });
126 }
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000127
David Brazdil6d340c42015-03-03 11:54:54 +0000128 // Updates this set by intersecting with instructions in a predecessor's set.
129 void IntersectWith(ValueSet* predecessor) {
130 if (IsEmpty()) {
131 return;
132 } else if (predecessor->IsEmpty()) {
133 Clear();
134 } else {
135 // Pure instructions do not need to be tested because only impure
136 // instructions can be killed.
137 DeleteAllImpureWhich([predecessor](Node* node) {
138 return !predecessor->Contains(node->GetInstruction());
139 });
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100140 }
141 }
142
David Brazdil6d340c42015-03-03 11:54:54 +0000143 bool IsEmpty() const { return num_entries_ == 0; }
144 size_t GetNumberOfEntries() const { return num_entries_; }
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100145
David Brazdil6d340c42015-03-03 11:54:54 +0000146 private:
Vladimir Marko2aaa4b52015-09-17 17:03:26 +0100147 class Node : public ArenaObject<kArenaAllocGvn> {
David Brazdil6d340c42015-03-03 11:54:54 +0000148 public:
149 Node(HInstruction* instruction, size_t hash_code, Node* next)
150 : instruction_(instruction), hash_code_(hash_code), next_(next) {}
151
152 size_t GetHashCode() const { return hash_code_; }
153 HInstruction* GetInstruction() const { return instruction_; }
154 Node* GetNext() const { return next_; }
155 void SetNext(Node* node) { next_ = node; }
156
157 Node* Dup(ArenaAllocator* allocator, Node* new_next = nullptr) {
158 return new (allocator) Node(instruction_, hash_code_, new_next);
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100159 }
160
David Brazdil6d340c42015-03-03 11:54:54 +0000161 private:
162 HInstruction* const instruction_;
163 const size_t hash_code_;
164 Node* next_;
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100165
David Brazdil6d340c42015-03-03 11:54:54 +0000166 DISALLOW_COPY_AND_ASSIGN(Node);
167 };
168
169 // Creates our own copy of a bucket that is currently pointing to a parent.
170 // This algorithm can be called while iterating over the bucket because it
171 // preserves the order of entries in the bucket and will return the clone of
172 // the given 'iterator'.
173 Node* CloneBucket(size_t index, Node* iterator = nullptr) {
174 DCHECK(!buckets_owned_.IsBitSet(index));
175 Node* clone_current = nullptr;
176 Node* clone_previous = nullptr;
177 Node* clone_iterator = nullptr;
178 for (Node* node = buckets_[index]; node != nullptr; node = node->GetNext()) {
179 clone_current = node->Dup(allocator_, nullptr);
180 if (node == iterator) {
181 clone_iterator = clone_current;
182 }
183 if (clone_previous == nullptr) {
184 buckets_[index] = clone_current;
185 } else {
186 clone_previous->SetNext(clone_current);
187 }
188 clone_previous = clone_current;
189 }
190 buckets_owned_.SetBit(index);
191 return clone_iterator;
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000192 }
193
194 void Clear() {
David Brazdil6d340c42015-03-03 11:54:54 +0000195 num_entries_ = 0;
196 for (size_t i = 0; i < num_buckets_; ++i) {
197 buckets_[i] = nullptr;
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100198 }
David Brazdil6d340c42015-03-03 11:54:54 +0000199 buckets_owned_.SetInitialBits(num_buckets_);
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100200 }
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100201
David Brazdil6d340c42015-03-03 11:54:54 +0000202 // Iterates over buckets with impure instructions (even indices) and deletes
203 // the ones on which 'cond' returns true.
204 template<typename Functor>
205 void DeleteAllImpureWhich(Functor cond) {
206 for (size_t i = 0; i < num_buckets_; i += 2) {
207 Node* node = buckets_[i];
208 Node* previous = nullptr;
209
210 if (node == nullptr) {
211 continue;
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000212 }
David Brazdil6d340c42015-03-03 11:54:54 +0000213
214 if (!buckets_owned_.IsBitSet(i)) {
215 // Bucket is not owned but maybe we won't need to change it at all.
216 // Iterate as long as the entries don't satisfy 'cond'.
217 while (node != nullptr) {
218 if (cond(node)) {
219 // We do need to delete an entry but we do not own the bucket.
220 // Clone the bucket, make sure 'previous' and 'node' point to
221 // the cloned entries and break.
222 previous = CloneBucket(i, previous);
223 node = (previous == nullptr) ? buckets_[i] : previous->GetNext();
224 break;
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000225 }
David Brazdil6d340c42015-03-03 11:54:54 +0000226 previous = node;
227 node = node->GetNext();
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000228 }
229 }
David Brazdil6d340c42015-03-03 11:54:54 +0000230
231 // By this point we either own the bucket and can start deleting entries,
232 // or we do not own it but no entries matched 'cond'.
233 DCHECK(buckets_owned_.IsBitSet(i) || node == nullptr);
234
235 // We iterate over the remainder of entries and delete those that match
236 // the given condition.
237 while (node != nullptr) {
238 Node* next = node->GetNext();
239 if (cond(node)) {
240 if (previous == nullptr) {
241 buckets_[i] = next;
242 } else {
243 previous->SetNext(next);
244 }
245 } else {
246 previous = node;
247 }
248 node = next;
249 }
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000250 }
251 }
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100252
David Brazdil6d340c42015-03-03 11:54:54 +0000253 // Computes a bucket count such that the load factor is reasonable.
254 // This is estimated as (num_entries_ * 1.5) and rounded up to nearest pow2.
255 size_t IdealBucketCount() const {
256 size_t bucket_count = RoundUpToPowerOfTwo(num_entries_ + (num_entries_ >> 1));
257 if (bucket_count > kMinimumNumberOfBuckets) {
258 return bucket_count;
259 } else {
260 return kMinimumNumberOfBuckets;
261 }
262 }
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000263
David Brazdil6d340c42015-03-03 11:54:54 +0000264 // Generates a hash code for an instruction. Pure instructions are put into
265 // odd buckets to speed up deletion.
266 size_t HashCode(HInstruction* instruction) const {
267 size_t hash_code = instruction->ComputeHashCode();
Alexandre Rames78e3ef62015-08-12 13:43:29 +0100268 if (instruction->GetSideEffects().HasDependencies()) {
David Brazdil6d340c42015-03-03 11:54:54 +0000269 return (hash_code << 1) | 0;
270 } else {
271 return (hash_code << 1) | 1;
272 }
273 }
274
275 // Converts a hash code to a bucket index.
276 size_t BucketIndex(size_t hash_code) const {
277 return hash_code & (num_buckets_ - 1);
278 }
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000279
280 ArenaAllocator* const allocator_;
281
David Brazdil6d340c42015-03-03 11:54:54 +0000282 // The internal bucket implementation of the set.
283 size_t const num_buckets_;
284 Node** const buckets_;
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000285
David Brazdil6d340c42015-03-03 11:54:54 +0000286 // Flags specifying which buckets were copied into the set from its parent.
287 // If a flag is not set, the corresponding bucket points to entries in the
288 // parent and must be cloned prior to making changes.
289 ArenaBitVector buckets_owned_;
290
291 // The number of entries in the set.
292 size_t num_entries_;
293
294 static constexpr size_t kMinimumNumberOfBuckets = 8;
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000295
296 DISALLOW_COPY_AND_ASSIGN(ValueSet);
297};
298
299/**
300 * Optimization phase that removes redundant instruction.
301 */
302class GlobalValueNumberer : public ValueObject {
303 public:
304 GlobalValueNumberer(ArenaAllocator* allocator,
305 HGraph* graph,
306 const SideEffectsAnalysis& side_effects)
307 : graph_(graph),
308 allocator_(allocator),
309 side_effects_(side_effects),
Vladimir Marko2aaa4b52015-09-17 17:03:26 +0100310 sets_(graph->GetBlocks().size(), nullptr, allocator->Adapter(kArenaAllocGvn)) {}
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000311
312 void Run();
313
314 private:
315 // Per-block GVN. Will also update the ValueSet of the dominated and
316 // successor blocks.
317 void VisitBasicBlock(HBasicBlock* block);
318
319 HGraph* graph_;
320 ArenaAllocator* const allocator_;
321 const SideEffectsAnalysis& side_effects_;
322
323 // ValueSet for blocks. Initially null, but for an individual block they
324 // are allocated and populated by the dominator, and updated by all blocks
325 // in the path from the dominator to the block.
Vladimir Marko2aaa4b52015-09-17 17:03:26 +0100326 ArenaVector<ValueSet*> sets_;
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000327
328 DISALLOW_COPY_AND_ASSIGN(GlobalValueNumberer);
329};
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100330
Nicolas Geoffray86dde162015-01-26 12:54:33 +0000331void GlobalValueNumberer::Run() {
332 DCHECK(side_effects_.HasRun());
Vladimir Marko2aaa4b52015-09-17 17:03:26 +0100333 sets_[graph_->GetEntryBlock()->GetBlockId()] = new (allocator_) ValueSet(allocator_);
Nicolas Geoffray86dde162015-01-26 12:54:33 +0000334
335 // Use the reverse post order to ensure the non back-edge predecessors of a block are
336 // visited before the block itself.
337 for (HReversePostOrderIterator it(*graph_); !it.Done(); it.Advance()) {
338 VisitBasicBlock(it.Current());
339 }
340}
341
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100342void GlobalValueNumberer::VisitBasicBlock(HBasicBlock* block) {
Nicolas Geoffraydbca6fa2014-11-27 12:01:59 +0000343 ValueSet* set = nullptr;
Vladimir Marko60584552015-09-03 13:35:12 +0000344 const ArenaVector<HBasicBlock*>& predecessors = block->GetPredecessors();
345 if (predecessors.size() == 0 || predecessors[0]->IsEntryBlock()) {
Nicolas Geoffraydbca6fa2014-11-27 12:01:59 +0000346 // The entry block should only accumulate constant instructions, and
347 // the builder puts constants only in the entry block.
348 // Therefore, there is no need to propagate the value set to the next block.
349 set = new (allocator_) ValueSet(allocator_);
350 } else {
351 HBasicBlock* dominator = block->GetDominator();
Vladimir Marko2aaa4b52015-09-17 17:03:26 +0100352 ValueSet* dominator_set = sets_[dominator->GetBlockId()];
Vladimir Marko60584552015-09-03 13:35:12 +0000353 if (dominator->GetSuccessors().size() == 1) {
354 DCHECK_EQ(dominator->GetSuccessor(0), block);
David Brazdil6d340c42015-03-03 11:54:54 +0000355 set = dominator_set;
356 } else {
Nicolas Geoffraydbca6fa2014-11-27 12:01:59 +0000357 // We have to copy if the dominator has other successors, or `block` is not a successor
358 // of the dominator.
David Brazdil6d340c42015-03-03 11:54:54 +0000359 set = new (allocator_) ValueSet(allocator_, *dominator_set);
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100360 }
Nicolas Geoffraydbca6fa2014-11-27 12:01:59 +0000361 if (!set->IsEmpty()) {
362 if (block->IsLoopHeader()) {
363 DCHECK_EQ(block->GetDominator(), block->GetLoopInformation()->GetPreHeader());
Nicolas Geoffray86dde162015-01-26 12:54:33 +0000364 set->Kill(side_effects_.GetLoopEffects(block));
Vladimir Marko60584552015-09-03 13:35:12 +0000365 } else if (predecessors.size() > 1) {
366 for (HBasicBlock* predecessor : predecessors) {
Vladimir Marko2aaa4b52015-09-17 17:03:26 +0100367 set->IntersectWith(sets_[predecessor->GetBlockId()]);
Nicolas Geoffraydbca6fa2014-11-27 12:01:59 +0000368 if (set->IsEmpty()) {
369 break;
370 }
371 }
372 }
373 }
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100374 }
375
Vladimir Marko2aaa4b52015-09-17 17:03:26 +0100376 sets_[block->GetBlockId()] = set;
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100377
378 HInstruction* current = block->GetFirstInstruction();
379 while (current != nullptr) {
380 set->Kill(current->GetSideEffects());
381 // Save the next instruction in case `current` is removed from the graph.
382 HInstruction* next = current->GetNext();
383 if (current->CanBeMoved()) {
Mingyao Yangdc5ac732015-02-25 11:28:05 -0800384 if (current->IsBinaryOperation() && current->AsBinaryOperation()->IsCommutative()) {
385 // For commutative ops, (x op y) will be treated the same as (y op x)
386 // after fixed ordering.
387 current->AsBinaryOperation()->OrderInputs();
388 }
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100389 HInstruction* existing = set->Lookup(current);
390 if (existing != nullptr) {
Mingyao Yangdc5ac732015-02-25 11:28:05 -0800391 // This replacement doesn't make more OrderInputs() necessary since
392 // current is either used by an instruction that it dominates,
393 // which hasn't been visited yet due to the order we visit instructions.
394 // Or current is used by a phi, and we don't do OrderInputs() on a phi anyway.
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100395 current->ReplaceWith(existing);
396 current->GetBlock()->RemoveInstruction(current);
397 } else {
398 set->Add(current);
399 }
400 }
401 current = next;
402 }
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100403}
404
Nicolas Geoffray827eedb2015-01-26 15:18:36 +0000405void GVNOptimization::Run() {
406 GlobalValueNumberer gvn(graph_->GetArena(), graph_, side_effects_);
407 gvn.Run();
408}
409
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +0100410} // namespace art