Compacting collector.
The compacting collector is currently similar to semispace. It works by
copying objects back and forth between two bump pointer spaces. There
are types of objects which are "non-movable" due to current runtime
limitations. These are Classes, Methods, and Fields.
Bump pointer spaces are a new type of continuous alloc space which have
no lock in the allocation code path. When you allocate from these it uses
atomic operations to increase an index. Traversing the objects in the bump
pointer space relies on Object::SizeOf matching the allocated size exactly.
Runtime changes:
JNI::GetArrayElements returns copies objects if you attempt to get the
backing data of a movable array. For GetArrayElementsCritical, we return
direct backing storage for any types of arrays, but temporarily disable
the GC until the critical region is completed.
Added a new runtime call called VisitObjects, this is used in place of
the old pattern which was flushing the allocation stack and walking
the bitmaps.
Changed image writer to be compaction safe and use object monitor word
for forwarding addresses.
Added a bunch of added SIRTs to ClassLinker, MethodLinker, etc..
TODO: Enable switching allocators, compacting on background, etc..
Bug: 8981901
Change-Id: I3c886fd322a6eef2b99388d19a765042ec26ab99
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 3bc014a..cc58412 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -114,7 +114,7 @@
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- bool IsImmuneSpace(const space::ContinuousSpace* space)
+ bool IsImmuneSpace(const space::ContinuousSpace* space) const;
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
@@ -140,6 +140,7 @@
void ProcessReferences(Thread* self)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ // Update and mark references from immune spaces.
virtual void UpdateAndMarkModUnion()
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -158,7 +159,7 @@
}
// Blackens an object.
- void ScanObject(const mirror::Object* obj)
+ void ScanObject(mirror::Object* obj)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -167,38 +168,6 @@
void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
NO_THREAD_SAFETY_ANALYSIS;
- size_t GetFreedBytes() const {
- return freed_bytes_;
- }
-
- size_t GetFreedLargeObjectBytes() const {
- return freed_large_object_bytes_;
- }
-
- size_t GetFreedObjects() const {
- return freed_objects_;
- }
-
- size_t GetFreedLargeObjects() const {
- return freed_large_objects_;
- }
-
- uint64_t GetTotalTimeNs() const {
- return total_time_ns_;
- }
-
- uint64_t GetTotalPausedTimeNs() const {
- return total_paused_time_ns_;
- }
-
- uint64_t GetTotalFreedObjects() const {
- return total_freed_objects_;
- }
-
- uint64_t GetTotalFreedBytes() const {
- return total_freed_bytes_;
- }
-
// Everything inside the immune range is assumed to be marked.
void SetImmuneRange(mirror::Object* begin, mirror::Object* end);
@@ -216,8 +185,7 @@
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
template <typename Visitor>
- static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor,
- bool visit_class = false)
+ static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor, bool visit_class)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
Locks::mutator_lock_);
@@ -395,9 +363,6 @@
// object.
accounting::SpaceBitmap* current_mark_bitmap_;
- // Cache java.lang.Class for optimization.
- mirror::Class* java_lang_Class_;
-
accounting::ObjectStack* mark_stack_;
// Immune range, every object inside the immune range is assumed to be marked.
@@ -412,14 +377,6 @@
// Parallel finger.
AtomicInteger atomic_finger_;
- // Number of non large object bytes freed in this collection.
- AtomicInteger freed_bytes_;
- // Number of large object bytes freed.
- AtomicInteger freed_large_object_bytes_;
- // Number of objects freed in this collection.
- AtomicInteger freed_objects_;
- // Number of freed large objects.
- AtomicInteger freed_large_objects_;
// Number of classes scanned, if kCountScannedTypes.
AtomicInteger class_count_;
// Number of arrays scanned, if kCountScannedTypes.
@@ -443,8 +400,6 @@
const bool is_concurrent_;
- bool clear_soft_references_;
-
private:
friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table.
friend class CardScanTask;