Optimizing: Fix register allocator validation memory usage.
Also attribute ArenaBitVector allocations to appropriate
passes. This was used to track down the source of the
excessive memory alloactions.
Bug: 27690481
Change-Id: Ib895984cb7c04e24cbc7abbd8322079bab8ab100
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index f871543..7539943 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -85,17 +85,20 @@
"GVN ",
"InductionVar ",
"BCE ",
+ "DCE ",
+ "LSE ",
+ "LICM ",
"SsaLiveness ",
"SsaPhiElim ",
"RefTypeProp ",
"PrimTypeProp ",
"SideEffects ",
"RegAllocator ",
+ "RegAllocVldt ",
"StackMapStm ",
"CodeGen ",
"ParallelMove ",
"GraphChecker ",
- "LSE ",
"Verifier ",
};
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index 728f897..f8f7396 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -96,17 +96,20 @@
kArenaAllocGvn,
kArenaAllocInductionVarAnalysis,
kArenaAllocBoundsCheckElimination,
+ kArenaAllocDCE,
+ kArenaAllocLSE,
+ kArenaAllocLICM,
kArenaAllocSsaLiveness,
kArenaAllocSsaPhiElimination,
kArenaAllocReferenceTypePropagation,
kArenaAllocPrimitiveTypePropagation,
kArenaAllocSideEffectsAnalysis,
kArenaAllocRegisterAllocator,
+ kArenaAllocRegisterAllocatorValidate,
kArenaAllocStackMapStream,
kArenaAllocCodeGenerator,
kArenaAllocParallelMoveResolver,
kArenaAllocGraphChecker,
- kArenaAllocLSE,
kArenaAllocVerifier,
kNumArenaAllocKinds
};
@@ -356,6 +359,11 @@
}
template <typename T>
+ T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
+ return AllocArray<T>(1, kind);
+ }
+
+ template <typename T>
T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
return static_cast<T*>(Alloc(length * sizeof(T), kind));
}
diff --git a/runtime/base/arena_bit_vector.cc b/runtime/base/arena_bit_vector.cc
index fbbfd84..5f8f5d2 100644
--- a/runtime/base/arena_bit_vector.cc
+++ b/runtime/base/arena_bit_vector.cc
@@ -21,36 +21,78 @@
namespace art {
-template <typename ArenaAlloc>
-class ArenaBitVectorAllocator FINAL : public Allocator,
- public ArenaObject<kArenaAllocGrowableBitMap> {
+template <bool kCount>
+class ArenaBitVectorAllocatorKindImpl;
+
+template <>
+class ArenaBitVectorAllocatorKindImpl<false> {
public:
- explicit ArenaBitVectorAllocator(ArenaAlloc* arena) : arena_(arena) {}
- ~ArenaBitVectorAllocator() {}
+ // Not tracking allocations, ignore the supplied kind and arbitrarily provide kArenaAllocSTL.
+ explicit ArenaBitVectorAllocatorKindImpl(ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
+ ArenaBitVectorAllocatorKindImpl(const ArenaBitVectorAllocatorKindImpl&) = default;
+ ArenaBitVectorAllocatorKindImpl& operator=(const ArenaBitVectorAllocatorKindImpl&) = default;
+ ArenaAllocKind Kind() { return kArenaAllocGrowableBitMap; }
+};
+
+template <bool kCount>
+class ArenaBitVectorAllocatorKindImpl {
+ public:
+ explicit ArenaBitVectorAllocatorKindImpl(ArenaAllocKind kind) : kind_(kind) { }
+ ArenaBitVectorAllocatorKindImpl(const ArenaBitVectorAllocatorKindImpl&) = default;
+ ArenaBitVectorAllocatorKindImpl& operator=(const ArenaBitVectorAllocatorKindImpl&) = default;
+ ArenaAllocKind Kind() { return kind_; }
+
+ private:
+ ArenaAllocKind kind_;
+};
+
+using ArenaBitVectorAllocatorKind =
+ ArenaBitVectorAllocatorKindImpl<kArenaAllocatorCountAllocations>;
+
+template <typename ArenaAlloc>
+class ArenaBitVectorAllocator FINAL : public Allocator, private ArenaBitVectorAllocatorKind {
+ public:
+ static ArenaBitVectorAllocator* Create(ArenaAlloc* arena, ArenaAllocKind kind) {
+ void* storage = arena->template Alloc<ArenaBitVectorAllocator>(kind);
+ return new (storage) ArenaBitVectorAllocator(arena, kind);
+ }
+
+ ~ArenaBitVectorAllocator() {
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
+ }
virtual void* Alloc(size_t size) {
- return arena_->Alloc(size, kArenaAllocGrowableBitMap);
+ return arena_->Alloc(size, this->Kind());
}
virtual void Free(void*) {} // Nop.
private:
+ ArenaBitVectorAllocator(ArenaAlloc* arena, ArenaAllocKind kind)
+ : ArenaBitVectorAllocatorKind(kind), arena_(arena) { }
+
ArenaAlloc* const arena_;
+
DISALLOW_COPY_AND_ASSIGN(ArenaBitVectorAllocator);
};
-ArenaBitVector::ArenaBitVector(ArenaAllocator* arena, unsigned int start_bits,
- bool expandable, OatBitMapKind kind)
- : BitVector(start_bits, expandable,
- new (arena) ArenaBitVectorAllocator<ArenaAllocator>(arena)), kind_(kind) {
- UNUSED(kind_);
+ArenaBitVector::ArenaBitVector(ArenaAllocator* arena,
+ unsigned int start_bits,
+ bool expandable,
+ ArenaAllocKind kind)
+ : BitVector(start_bits,
+ expandable,
+ ArenaBitVectorAllocator<ArenaAllocator>::Create(arena, kind)) {
}
-ArenaBitVector::ArenaBitVector(ScopedArenaAllocator* arena, unsigned int start_bits,
- bool expandable, OatBitMapKind kind)
- : BitVector(start_bits, expandable,
- new (arena) ArenaBitVectorAllocator<ScopedArenaAllocator>(arena)), kind_(kind) {
- UNUSED(kind_);
+ArenaBitVector::ArenaBitVector(ScopedArenaAllocator* arena,
+ unsigned int start_bits,
+ bool expandable,
+ ArenaAllocKind kind)
+ : BitVector(start_bits,
+ expandable,
+ ArenaBitVectorAllocator<ScopedArenaAllocator>::Create(arena, kind)) {
}
} // namespace art
diff --git a/runtime/base/arena_bit_vector.h b/runtime/base/arena_bit_vector.h
index d606166..d86d622 100644
--- a/runtime/base/arena_bit_vector.h
+++ b/runtime/base/arena_bit_vector.h
@@ -25,44 +25,34 @@
class ArenaAllocator;
class ScopedArenaAllocator;
-// Type of growable bitmap for memory tuning.
-enum OatBitMapKind {
- kBitMapMisc = 0,
- kBitMapUse,
- kBitMapDef,
- kBitMapLiveIn,
- kBitMapBMatrix,
- kBitMapDominators,
- kBitMapIDominated,
- kBitMapDomFrontier,
- kBitMapRegisterV,
- kBitMapTempSSARegisterV,
- kBitMapNullCheck,
- kBitMapClInitCheck,
- kBitMapPredecessors,
- kNumBitMapKinds
-};
-
-std::ostream& operator<<(std::ostream& os, const OatBitMapKind& kind);
-
/*
* A BitVector implementation that uses Arena allocation.
*/
class ArenaBitVector : public BitVector, public ArenaObject<kArenaAllocGrowableBitMap> {
public:
- ArenaBitVector(ArenaAllocator* arena, uint32_t start_bits, bool expandable,
- OatBitMapKind kind = kBitMapMisc);
- ArenaBitVector(ScopedArenaAllocator* arena, uint32_t start_bits, bool expandable,
- OatBitMapKind kind = kBitMapMisc);
+ template <typename Allocator>
+ static ArenaBitVector* Create(Allocator* arena,
+ uint32_t start_bits,
+ bool expandable,
+ ArenaAllocKind kind = kArenaAllocGrowableBitMap) {
+ void* storage = arena->template Alloc<ArenaBitVector>(kind);
+ return new (storage) ArenaBitVector(arena, start_bits, expandable, kind);
+ }
+
+ ArenaBitVector(ArenaAllocator* arena,
+ uint32_t start_bits,
+ bool expandable,
+ ArenaAllocKind kind = kArenaAllocGrowableBitMap);
+ ArenaBitVector(ScopedArenaAllocator* arena,
+ uint32_t start_bits,
+ bool expandable,
+ ArenaAllocKind kind = kArenaAllocGrowableBitMap);
~ArenaBitVector() {}
private:
- const OatBitMapKind kind_; // for memory use tuning. TODO: currently unused.
-
DISALLOW_COPY_AND_ASSIGN(ArenaBitVector);
};
-
} // namespace art
#endif // ART_RUNTIME_BASE_ARENA_BIT_VECTOR_H_
diff --git a/runtime/base/scoped_arena_allocator.h b/runtime/base/scoped_arena_allocator.h
index a87153b..55044b3 100644
--- a/runtime/base/scoped_arena_allocator.h
+++ b/runtime/base/scoped_arena_allocator.h
@@ -152,6 +152,11 @@
}
template <typename T>
+ T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
+ return AllocArray<T>(1, kind);
+ }
+
+ template <typename T>
T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
return static_cast<T*>(Alloc(length * sizeof(T), kind));
}