Visit class native roots from VisitReferences
Visit class roots when we call Class::VisitReferences instead of in
the class linker. This makes it easier to implement class unloading
since unmarked classes won't have their roots visited by the class
linker.
Bug: 22181835
Change-Id: I63f31e5ebef7b2a0b764b3ba3cb038b3f561b379
diff --git a/runtime/gc/accounting/remembered_set.cc b/runtime/gc/accounting/remembered_set.cc
index 994a0ad..251b50d 100644
--- a/runtime/gc/accounting/remembered_set.cc
+++ b/runtime/gc/accounting/remembered_set.cc
@@ -67,7 +67,7 @@
: collector_(collector), target_space_(target_space),
contains_reference_to_target_space_(contains_reference_to_target_space) {}
- void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */) const
+ void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
SHARED_REQUIRES(Locks::mutator_lock_) {
DCHECK(obj != nullptr);
mirror::HeapReference<mirror::Object>* ref_ptr = obj->GetFieldObjectReferenceAddr(offset);
@@ -79,14 +79,25 @@
}
void operator()(mirror::Class* klass, mirror::Reference* ref) const
- SHARED_REQUIRES(Locks::mutator_lock_)
- REQUIRES(Locks::heap_bitmap_lock_) {
+ SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
if (target_space_->HasAddress(ref->GetReferent())) {
*contains_reference_to_target_space_ = true;
collector_->DelayReferenceReferent(klass, ref);
}
}
+ void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ if (kIsDebugBuild && !root->IsNull()) {
+ VisitRoot(root);
+ }
+ }
+
+ void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ DCHECK(!target_space_->HasAddress(root->AsMirrorPtr()));
+ }
+
private:
collector::GarbageCollector* const collector_;
space::ContinuousSpace* const target_space_;