Replace ObjectSet with LargeObjectBitmap.
Speeds up large object marking since large objects no longer required
a lock. Changed the GCs to use the heap bitmap for marking objects
which aren't in the fast path. This eliminates the need for a
MarkLargeObject function.
Maps before (10 GC iterations):
Mean partial time: 180ms
Mean sticky time: 151ms
Maps after:
Mean partial time: 161ms
Mean sticky time: 101ms
Note: the GC durations are long due to recent ergonomic changes and
because the fast bulk free hasn't yet been enabled. Over 50% of the
GC time is spent in RosAllocSpace::FreeList.
Bug: 13571028
Change-Id: Id8f94718aeaa13052672ccbae1e8edf77d653f62
diff --git a/runtime/gc/accounting/heap_bitmap-inl.h b/runtime/gc/accounting/heap_bitmap-inl.h
index ed7b427..c67542f 100644
--- a/runtime/gc/accounting/heap_bitmap-inl.h
+++ b/runtime/gc/accounting/heap_bitmap-inl.h
@@ -30,9 +30,8 @@
for (const auto& bitmap : continuous_space_bitmaps_) {
bitmap->VisitMarkedRange(bitmap->HeapBegin(), bitmap->HeapLimit(), visitor);
}
- DCHECK(!discontinuous_space_sets_.empty());
- for (const auto& space_set : discontinuous_space_sets_) {
- space_set->Visit(visitor);
+ for (const auto& bitmap : large_object_bitmaps_) {
+ bitmap->VisitMarkedRange(bitmap->HeapBegin(), bitmap->HeapLimit(), visitor);
}
}
@@ -40,31 +39,61 @@
ContinuousSpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj);
if (LIKELY(bitmap != nullptr)) {
return bitmap->Test(obj);
- } else {
- return GetDiscontinuousSpaceObjectSet(obj) != nullptr;
}
+ for (const auto& bitmap : large_object_bitmaps_) {
+ if (LIKELY(bitmap->HasAddress(obj))) {
+ return bitmap->Test(obj);
+ }
+ }
+ LOG(FATAL) << "Invalid object " << obj;
+ return false;
}
inline void HeapBitmap::Clear(const mirror::Object* obj) {
ContinuousSpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj);
if (LIKELY(bitmap != nullptr)) {
bitmap->Clear(obj);
- } else {
- ObjectSet* set = GetDiscontinuousSpaceObjectSet(obj);
- DCHECK(set != NULL);
- set->Clear(obj);
+ return;
}
+ for (const auto& bitmap : large_object_bitmaps_) {
+ if (LIKELY(bitmap->HasAddress(obj))) {
+ bitmap->Clear(obj);
+ }
+ }
+ LOG(FATAL) << "Invalid object " << obj;
}
-inline void HeapBitmap::Set(const mirror::Object* obj) {
+template<typename LargeObjectSetVisitor>
+inline bool HeapBitmap::Set(const mirror::Object* obj, const LargeObjectSetVisitor& visitor) {
ContinuousSpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj);
- if (LIKELY(bitmap != NULL)) {
- bitmap->Set(obj);
- } else {
- ObjectSet* set = GetDiscontinuousSpaceObjectSet(obj);
- DCHECK(set != NULL);
- set->Set(obj);
+ if (LIKELY(bitmap != nullptr)) {
+ return bitmap->Set(obj);
}
+ visitor(obj);
+ for (const auto& bitmap : large_object_bitmaps_) {
+ if (LIKELY(bitmap->HasAddress(obj))) {
+ return bitmap->Set(obj);
+ }
+ }
+ LOG(FATAL) << "Invalid object " << obj;
+ return false;
+}
+
+template<typename LargeObjectSetVisitor>
+inline bool HeapBitmap::AtomicTestAndSet(const mirror::Object* obj,
+ const LargeObjectSetVisitor& visitor) {
+ ContinuousSpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj);
+ if (LIKELY(bitmap != nullptr)) {
+ return bitmap->AtomicTestAndSet(obj);
+ }
+ visitor(obj);
+ for (const auto& bitmap : large_object_bitmaps_) {
+ if (LIKELY(bitmap->HasAddress(obj))) {
+ return bitmap->AtomicTestAndSet(obj);
+ }
+ }
+ LOG(FATAL) << "Invalid object " << obj;
+ return false;
}
inline ContinuousSpaceBitmap* HeapBitmap::GetContinuousSpaceBitmap(const mirror::Object* obj) const {
@@ -76,15 +105,6 @@
return nullptr;
}
-inline ObjectSet* HeapBitmap::GetDiscontinuousSpaceObjectSet(const mirror::Object* obj) const {
- for (const auto& space_set : discontinuous_space_sets_) {
- if (space_set->Test(obj)) {
- return space_set;
- }
- }
- return nullptr;
-}
-
} // namespace accounting
} // namespace gc
} // namespace art
diff --git a/runtime/gc/accounting/heap_bitmap.cc b/runtime/gc/accounting/heap_bitmap.cc
index 1db886c..a5d59bf 100644
--- a/runtime/gc/accounting/heap_bitmap.cc
+++ b/runtime/gc/accounting/heap_bitmap.cc
@@ -25,61 +25,58 @@
void HeapBitmap::ReplaceBitmap(ContinuousSpaceBitmap* old_bitmap,
ContinuousSpaceBitmap* new_bitmap) {
- for (auto& bitmap : continuous_space_bitmaps_) {
- if (bitmap == old_bitmap) {
- bitmap = new_bitmap;
- return;
- }
- }
- LOG(FATAL) << "bitmap " << static_cast<const void*>(old_bitmap) << " not found";
+ auto it = std::find(continuous_space_bitmaps_.begin(), continuous_space_bitmaps_.end(),
+ old_bitmap);
+ CHECK(it != continuous_space_bitmaps_.end()) << " continuous space bitmap " << old_bitmap
+ << " not found";
+ *it = new_bitmap;
}
-void HeapBitmap::ReplaceObjectSet(ObjectSet* old_set, ObjectSet* new_set) {
- for (auto& space_set : discontinuous_space_sets_) {
- if (space_set == old_set) {
- space_set = new_set;
- return;
- }
- }
- LOG(FATAL) << "object set " << static_cast<const void*>(old_set) << " not found";
+void HeapBitmap::ReplaceLargeObjectBitmap(LargeObjectBitmap* old_bitmap,
+ LargeObjectBitmap* new_bitmap) {
+ auto it = std::find(large_object_bitmaps_.begin(), large_object_bitmaps_.end(), old_bitmap);
+ CHECK(it != large_object_bitmaps_.end()) << " large object bitmap " << old_bitmap
+ << " not found";
+ *it = new_bitmap;
}
void HeapBitmap::AddContinuousSpaceBitmap(accounting::ContinuousSpaceBitmap* bitmap) {
- DCHECK(bitmap != NULL);
-
- // Check for interval overlap.
+ DCHECK(bitmap != nullptr);
+ // Check that there is no bitmap overlap.
for (const auto& cur_bitmap : continuous_space_bitmaps_) {
- CHECK(!(
- bitmap->HeapBegin() < cur_bitmap->HeapLimit() &&
- bitmap->HeapLimit() > cur_bitmap->HeapBegin()))
- << "Bitmap " << bitmap->Dump() << " overlaps with existing bitmap " << cur_bitmap->Dump();
+ CHECK(bitmap->HeapBegin() >= cur_bitmap->HeapLimit() ||
+ bitmap->HeapLimit() <= cur_bitmap->HeapBegin())
+ << "Bitmap " << bitmap->Dump() << " overlaps with existing bitmap "
+ << cur_bitmap->Dump();
}
continuous_space_bitmaps_.push_back(bitmap);
}
void HeapBitmap::RemoveContinuousSpaceBitmap(accounting::ContinuousSpaceBitmap* bitmap) {
+ DCHECK(bitmap != nullptr);
auto it = std::find(continuous_space_bitmaps_.begin(), continuous_space_bitmaps_.end(), bitmap);
DCHECK(it != continuous_space_bitmaps_.end());
continuous_space_bitmaps_.erase(it);
}
-void HeapBitmap::AddDiscontinuousObjectSet(ObjectSet* set) {
- DCHECK(set != nullptr);
- discontinuous_space_sets_.push_back(set);
+void HeapBitmap::AddLargeObjectBitmap(LargeObjectBitmap* bitmap) {
+ DCHECK(bitmap != nullptr);
+ large_object_bitmaps_.push_back(bitmap);
}
-void HeapBitmap::RemoveDiscontinuousObjectSet(ObjectSet* set) {
- auto it = std::find(discontinuous_space_sets_.begin(), discontinuous_space_sets_.end(), set);
- DCHECK(it != discontinuous_space_sets_.end());
- discontinuous_space_sets_.erase(it);
+void HeapBitmap::RemoveLargeObjectBitmap(LargeObjectBitmap* bitmap) {
+ DCHECK(bitmap != nullptr);
+ auto it = std::find(large_object_bitmaps_.begin(), large_object_bitmaps_.end(), bitmap);
+ DCHECK(it != large_object_bitmaps_.end());
+ large_object_bitmaps_.erase(it);
}
void HeapBitmap::Walk(ObjectCallback* callback, void* arg) {
for (const auto& bitmap : continuous_space_bitmaps_) {
bitmap->Walk(callback, arg);
}
- for (const auto& space_set : discontinuous_space_sets_) {
- space_set->Walk(callback, arg);
+ for (const auto& bitmap : large_object_bitmaps_) {
+ bitmap->Walk(callback, arg);
}
}
diff --git a/runtime/gc/accounting/heap_bitmap.h b/runtime/gc/accounting/heap_bitmap.h
index 61a2429..814dc06 100644
--- a/runtime/gc/accounting/heap_bitmap.h
+++ b/runtime/gc/accounting/heap_bitmap.h
@@ -33,9 +33,13 @@
public:
bool Test(const mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
void Clear(const mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
- void Set(const mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+ template<typename LargeObjectSetVisitor>
+ bool Set(const mirror::Object* obj, const LargeObjectSetVisitor& visitor)
+ EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) ALWAYS_INLINE;
+ template<typename LargeObjectSetVisitor>
+ bool AtomicTestAndSet(const mirror::Object* obj, const LargeObjectSetVisitor& visitor)
+ EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) ALWAYS_INLINE;
ContinuousSpaceBitmap* GetContinuousSpaceBitmap(const mirror::Object* obj) const;
- ObjectSet* GetDiscontinuousSpaceObjectSet(const mirror::Object* obj) const;
void Walk(ObjectCallback* callback, void* arg)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
@@ -50,7 +54,7 @@
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
// Find and replace a object set pointer, this is used by for the bitmap swapping in the GC.
- void ReplaceObjectSet(ObjectSet* old_set, ObjectSet* new_set)
+ void ReplaceLargeObjectBitmap(LargeObjectBitmap* old_bitmap, LargeObjectBitmap* new_bitmap)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
explicit HeapBitmap(Heap* heap) : heap_(heap) {}
@@ -60,15 +64,15 @@
void AddContinuousSpaceBitmap(ContinuousSpaceBitmap* bitmap);
void RemoveContinuousSpaceBitmap(ContinuousSpaceBitmap* bitmap);
- void AddDiscontinuousObjectSet(ObjectSet* set);
- void RemoveDiscontinuousObjectSet(ObjectSet* set);
+ void AddLargeObjectBitmap(LargeObjectBitmap* bitmap);
+ void RemoveLargeObjectBitmap(LargeObjectBitmap* bitmap);
// Bitmaps covering continuous spaces.
std::vector<ContinuousSpaceBitmap*, GcAllocator<ContinuousSpaceBitmap*>>
continuous_space_bitmaps_;
// Sets covering discontinuous spaces.
- std::vector<ObjectSet*, GcAllocator<ObjectSet*>> discontinuous_space_sets_;
+ std::vector<LargeObjectBitmap*, GcAllocator<LargeObjectBitmap*>> large_object_bitmaps_;
friend class art::gc::Heap;
};
diff --git a/runtime/gc/accounting/space_bitmap.cc b/runtime/gc/accounting/space_bitmap.cc
index 7eed05a..31a1537 100644
--- a/runtime/gc/accounting/space_bitmap.cc
+++ b/runtime/gc/accounting/space_bitmap.cc
@@ -25,22 +25,35 @@
const std::string& name, MemMap* mem_map, byte* heap_begin, size_t heap_capacity) {
CHECK(mem_map != nullptr);
uword* bitmap_begin = reinterpret_cast<uword*>(mem_map->Begin());
- size_t bitmap_size = OffsetToIndex(RoundUp(heap_capacity, kAlignment * kBitsPerWord)) * kWordSize;
+ const uint64_t kBytesCoveredPerWord = kAlignment * kBitsPerWord;
+ size_t bitmap_size = (RoundUp(static_cast<uint64_t>(heap_capacity), kBytesCoveredPerWord) /
+ kBytesCoveredPerWord) * kWordSize;
return new SpaceBitmap(name, mem_map, bitmap_begin, bitmap_size, heap_begin);
}
template<size_t kAlignment>
+SpaceBitmap<kAlignment>::SpaceBitmap(const std::string& name, MemMap* mem_map, uword* bitmap_begin,
+ size_t bitmap_size, const void* heap_begin)
+ : mem_map_(mem_map), bitmap_begin_(bitmap_begin), bitmap_size_(bitmap_size),
+ heap_begin_(reinterpret_cast<uintptr_t>(heap_begin)),
+ name_(name) {
+ CHECK(bitmap_begin_ != nullptr);
+ CHECK_NE(bitmap_size, 0U);
+}
+
+template<size_t kAlignment>
SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::Create(
const std::string& name, byte* heap_begin, size_t heap_capacity) {
- CHECK(heap_begin != NULL);
// Round up since heap_capacity is not necessarily a multiple of kAlignment * kBitsPerWord.
- size_t bitmap_size = OffsetToIndex(RoundUp(heap_capacity, kAlignment * kBitsPerWord)) * kWordSize;
+ const uint64_t kBytesCoveredPerWord = kAlignment * kBitsPerWord;
+ size_t bitmap_size = (RoundUp(static_cast<uint64_t>(heap_capacity), kBytesCoveredPerWord) /
+ kBytesCoveredPerWord) * kWordSize;
std::string error_msg;
- UniquePtr<MemMap> mem_map(MemMap::MapAnonymous(name.c_str(), NULL, bitmap_size,
+ UniquePtr<MemMap> mem_map(MemMap::MapAnonymous(name.c_str(), nullptr, bitmap_size,
PROT_READ | PROT_WRITE, false, &error_msg));
if (UNLIKELY(mem_map.get() == nullptr)) {
LOG(ERROR) << "Failed to allocate bitmap " << name << ": " << error_msg;
- return NULL;
+ return nullptr;
}
return CreateFromMemMap(name, mem_map.release(), heap_begin, heap_capacity);
}
@@ -68,13 +81,13 @@
}
template<size_t kAlignment>
-inline void SpaceBitmap<kAlignment>::CopyFrom(SpaceBitmap* source_bitmap) {
+void SpaceBitmap<kAlignment>::CopyFrom(SpaceBitmap* source_bitmap) {
DCHECK_EQ(Size(), source_bitmap->Size());
std::copy(source_bitmap->Begin(), source_bitmap->Begin() + source_bitmap->Size() / kWordSize, Begin());
}
template<size_t kAlignment>
-inline void SpaceBitmap<kAlignment>::Walk(ObjectCallback* callback, void* arg) {
+void SpaceBitmap<kAlignment>::Walk(ObjectCallback* callback, void* arg) {
CHECK(bitmap_begin_ != NULL);
CHECK(callback != NULL);
@@ -96,11 +109,11 @@
template<size_t kAlignment>
void SpaceBitmap<kAlignment>::SweepWalk(const SpaceBitmap<kAlignment>& live_bitmap,
- const SpaceBitmap<kAlignment>& mark_bitmap,
- uintptr_t sweep_begin, uintptr_t sweep_end,
- SpaceBitmap::SweepCallback* callback, void* arg) {
- CHECK(live_bitmap.bitmap_begin_ != NULL);
- CHECK(mark_bitmap.bitmap_begin_ != NULL);
+ const SpaceBitmap<kAlignment>& mark_bitmap,
+ uintptr_t sweep_begin, uintptr_t sweep_end,
+ SpaceBitmap::SweepCallback* callback, void* arg) {
+ CHECK(live_bitmap.bitmap_begin_ != nullptr);
+ CHECK(mark_bitmap.bitmap_begin_ != nullptr);
CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
CHECK(callback != NULL);
@@ -170,8 +183,8 @@
template<size_t kAlignment>
void SpaceBitmap<kAlignment>::WalkFieldsInOrder(SpaceBitmap<kAlignment>* visited,
- ObjectCallback* callback,
- mirror::Object* obj, void* arg) {
+ ObjectCallback* callback, mirror::Object* obj,
+ void* arg) {
if (visited->Test(obj)) {
return;
}
@@ -232,12 +245,6 @@
}
}
-void ObjectSet::Walk(ObjectCallback* callback, void* arg) {
- for (const mirror::Object* obj : contained_) {
- callback(const_cast<mirror::Object*>(obj), arg);
- }
-}
-
template class SpaceBitmap<kObjectAlignment>;
template class SpaceBitmap<kPageSize>;
diff --git a/runtime/gc/accounting/space_bitmap.h b/runtime/gc/accounting/space_bitmap.h
index b90a799..df3fd37 100644
--- a/runtime/gc/accounting/space_bitmap.h
+++ b/runtime/gc/accounting/space_bitmap.h
@@ -198,10 +198,7 @@
// TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
// however, we document that this is expected on heap_end_
SpaceBitmap(const std::string& name, MemMap* mem_map, uword* bitmap_begin, size_t bitmap_size,
- const void* heap_begin)
- : mem_map_(mem_map), bitmap_begin_(bitmap_begin), bitmap_size_(bitmap_size),
- heap_begin_(reinterpret_cast<uintptr_t>(heap_begin)),
- name_(name) {}
+ const void* heap_begin);
template<bool kSetBit>
bool Modify(const mirror::Object* obj);
@@ -232,71 +229,7 @@
std::string name_;
};
-// Like a bitmap except it keeps track of objects using sets.
-class ObjectSet {
- public:
- typedef std::set<
- const mirror::Object*, std::less<const mirror::Object*>,
- GcAllocator<const mirror::Object*> > Objects;
-
- bool IsEmpty() const {
- return contained_.empty();
- }
-
- inline void Set(const mirror::Object* obj) {
- contained_.insert(obj);
- }
-
- inline void Clear(const mirror::Object* obj) {
- Objects::iterator found = contained_.find(obj);
- if (found != contained_.end()) {
- contained_.erase(found);
- }
- }
-
- void Clear() {
- contained_.clear();
- }
-
- inline bool Test(const mirror::Object* obj) const {
- return contained_.find(obj) != contained_.end();
- }
-
- const std::string& GetName() const {
- return name_;
- }
-
- void SetName(const std::string& name) {
- name_ = name;
- }
-
- void CopyFrom(const ObjectSet& space_set) {
- contained_ = space_set.contained_;
- }
-
- void Walk(ObjectCallback* callback, void* arg) SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
-
- template <typename Visitor>
- void Visit(const Visitor& visitor) NO_THREAD_SAFETY_ANALYSIS {
- for (const mirror::Object* obj : contained_) {
- visitor(const_cast<mirror::Object*>(obj));
- }
- }
-
- explicit ObjectSet(const std::string& name) : name_(name) {}
- ~ObjectSet() {}
-
- Objects& GetObjects() {
- return contained_;
- }
-
- private:
- std::string name_;
- Objects contained_;
-};
-
typedef SpaceBitmap<kObjectAlignment> ContinuousSpaceBitmap;
-// TODO: Replace usage of ObjectSet with LargeObjectBitmap.
typedef SpaceBitmap<kLargeObjectAlignment> LargeObjectBitmap;
template<size_t kAlignment>
diff --git a/runtime/gc/accounting/space_bitmap_test.cc b/runtime/gc/accounting/space_bitmap_test.cc
index 7c18052..972f94d 100644
--- a/runtime/gc/accounting/space_bitmap_test.cc
+++ b/runtime/gc/accounting/space_bitmap_test.cc
@@ -110,7 +110,8 @@
uint32_t val_;
};
-void compat_test() NO_THREAD_SAFETY_ANALYSIS {
+template <size_t kAlignment>
+void RunTest() NO_THREAD_SAFETY_ANALYSIS {
byte* heap_begin = reinterpret_cast<byte*>(0x10000000);
size_t heap_capacity = 16 * MB;
@@ -123,7 +124,7 @@
ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity));
for (int j = 0; j < 10000; ++j) {
- size_t offset = (r.next() % heap_capacity) & ~(0x7);
+ size_t offset = RoundDown(r.next() % heap_capacity, kAlignment);
bool set = r.next() % 2 == 1;
if (set) {
@@ -137,15 +138,15 @@
size_t count = 0;
SimpleCounter c(&count);
- size_t offset = (r.next() % heap_capacity) & ~(0x7);
+ size_t offset = RoundDown(r.next() % heap_capacity, kAlignment);
size_t remain = heap_capacity - offset;
- size_t end = offset + ((r.next() % (remain + 1)) & ~(0x7));
+ size_t end = offset + RoundDown(r.next() % (remain + 1), kAlignment);
space_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(heap_begin) + offset,
reinterpret_cast<uintptr_t>(heap_begin) + end, c);
size_t manual = 0;
- for (uintptr_t k = offset; k < end; k += kObjectAlignment) {
+ for (uintptr_t k = offset; k < end; k += kAlignment) {
if (space_bitmap->Test(reinterpret_cast<mirror::Object*>(heap_begin + k))) {
manual++;
}
@@ -156,8 +157,12 @@
}
}
-TEST_F(SpaceBitmapTest, Visitor) {
- compat_test();
+TEST_F(SpaceBitmapTest, VisitorObjectAlignment) {
+ RunTest<kObjectAlignment>();
+}
+
+TEST_F(SpaceBitmapTest, VisitorPageAlignment) {
+ RunTest<kPageSize>();
}
} // namespace accounting