Move most mirror:: args to ObjPtr
Fixed possible moving GC bugs in ClinitImageUpdate class.
Bug: 31113334
Test: test-art-host
Change-Id: I0bf6578553d58b944aaa17665f1350bdf5ed15ec
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index dabb6da..3dee974 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -354,14 +354,14 @@
explicit VerifyGrayImmuneObjectsVisitor(ConcurrentCopying* collector)
: collector_(collector) {}
- void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */)
+ void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, bool /* is_static */)
const ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
CheckReference(obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset),
obj, offset);
}
- void operator()(mirror::Class* klass, mirror::Reference* ref) const
+ void operator()(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE {
CHECK(klass->IsTypeOfReferenceClass());
CheckReference(ref->GetReferent<kWithoutReadBarrier>(),
@@ -386,13 +386,15 @@
private:
ConcurrentCopying* const collector_;
- void CheckReference(mirror::Object* ref, mirror::Object* holder, MemberOffset offset) const
+ void CheckReference(ObjPtr<mirror::Object> ref,
+ ObjPtr<mirror::Object> holder,
+ MemberOffset offset) const
REQUIRES_SHARED(Locks::mutator_lock_) {
if (ref != nullptr) {
- if (!collector_->immune_spaces_.ContainsObject(ref)) {
+ if (!collector_->immune_spaces_.ContainsObject(ref.Ptr())) {
// Not immune, must be a zygote large object.
CHECK(Runtime::Current()->GetHeap()->GetLargeObjectsSpace()->IsZygoteLargeObject(
- Thread::Current(), ref))
+ Thread::Current(), ref.Ptr()))
<< "Non gray object references non immune, non zygote large object "<< ref << " "
<< PrettyTypeOf(ref) << " in holder " << holder << " " << PrettyTypeOf(holder)
<< " offset=" << offset.Uint32Value();
@@ -969,14 +971,17 @@
explicit VerifyNoFromSpaceRefsFieldVisitor(ConcurrentCopying* collector)
: collector_(collector) {}
- void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<mirror::Object> obj,
+ MemberOffset offset,
+ bool is_static ATTRIBUTE_UNUSED) const
REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE {
mirror::Object* ref =
obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset);
VerifyNoFromSpaceRefsVisitor visitor(collector_);
visitor(ref);
}
- void operator()(mirror::Class* klass, mirror::Reference* ref) const
+ void operator()(ObjPtr<mirror::Class> klass,
+ ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE {
CHECK(klass->IsTypeOfReferenceClass());
this->operator()(ref, mirror::Reference::ReferentOffset(), false);
@@ -1091,14 +1096,16 @@
explicit AssertToSpaceInvariantFieldVisitor(ConcurrentCopying* collector)
: collector_(collector) {}
- void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<mirror::Object> obj,
+ MemberOffset offset,
+ bool is_static ATTRIBUTE_UNUSED) const
REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE {
mirror::Object* ref =
obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset);
AssertToSpaceInvariantRefsVisitor visitor(collector_);
visitor(ref);
}
- void operator()(mirror::Class* klass, mirror::Reference* ref ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE {
CHECK(klass->IsTypeOfReferenceClass());
}
@@ -1780,13 +1787,13 @@
explicit RefFieldsVisitor(ConcurrentCopying* collector)
: collector_(collector) {}
- void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */)
+ void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, bool /* is_static */)
const ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
- collector_->Process(obj, offset);
+ collector_->Process(obj.Ptr(), offset);
}
- void operator()(mirror::Class* klass, mirror::Reference* ref) const
+ void operator()(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE {
CHECK(klass->IsTypeOfReferenceClass());
collector_->DelayReferenceReferent(klass, ref);
@@ -2377,7 +2384,8 @@
return Mark(from_ref);
}
-void ConcurrentCopying::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) {
+void ConcurrentCopying::DelayReferenceReferent(ObjPtr<mirror::Class> klass,
+ ObjPtr<mirror::Reference> reference) {
heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this);
}
diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h
index 81ffbc5..5b8a557 100644
--- a/runtime/gc/collector/concurrent_copying.h
+++ b/runtime/gc/collector/concurrent_copying.h
@@ -169,7 +169,8 @@
void SwitchToSharedMarkStackMode() REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!mark_stack_lock_);
void SwitchToGcExclusiveMarkStackMode() REQUIRES_SHARED(Locks::mutator_lock_);
- virtual void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) OVERRIDE
+ virtual void DelayReferenceReferent(ObjPtr<mirror::Class> klass,
+ ObjPtr<mirror::Reference> reference) OVERRIDE
REQUIRES_SHARED(Locks::mutator_lock_);
void ProcessReferences(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
virtual mirror::Object* MarkObject(mirror::Object* from_ref) OVERRIDE
diff --git a/runtime/gc/collector/garbage_collector.h b/runtime/gc/collector/garbage_collector.h
index 4ffa254..5b51399 100644
--- a/runtime/gc/collector/garbage_collector.h
+++ b/runtime/gc/collector/garbage_collector.h
@@ -196,7 +196,8 @@
REQUIRES_SHARED(Locks::mutator_lock_) = 0;
virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* obj)
REQUIRES_SHARED(Locks::mutator_lock_) = 0;
- virtual void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
+ virtual void DelayReferenceReferent(ObjPtr<mirror::Class> klass,
+ ObjPtr<mirror::Reference> reference)
REQUIRES_SHARED(Locks::mutator_lock_) = 0;
protected:
diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc
index 6d2f009..e0bf744 100644
--- a/runtime/gc/collector/mark_compact.cc
+++ b/runtime/gc/collector/mark_compact.cc
@@ -418,7 +418,7 @@
collector_->UpdateHeapReference(obj->GetFieldObjectReferenceAddr<kVerifyNone>(offset));
}
- void operator()(mirror::Class* /*klass*/, mirror::Reference* ref) const
+ void operator()(ObjPtr<mirror::Class> /*klass*/, mirror::Reference* ref) const
REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
collector_->UpdateHeapReference(
ref->GetFieldObjectReferenceAddr<kVerifyNone>(mirror::Reference::ReferentOffset()));
@@ -543,7 +543,8 @@
// Process the "referent" field in a java.lang.ref.Reference. If the referent has not yet been
// marked, put it on the appropriate list in the heap for later processing.
-void MarkCompact::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) {
+void MarkCompact::DelayReferenceReferent(ObjPtr<mirror::Class> klass,
+ ObjPtr<mirror::Reference> reference) {
heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this);
}
@@ -551,13 +552,16 @@
public:
explicit MarkObjectVisitor(MarkCompact* collector) : collector_(collector) {}
- void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const ALWAYS_INLINE
+ void operator()(ObjPtr<mirror::Object> obj,
+ MemberOffset offset,
+ bool /*is_static*/) const ALWAYS_INLINE
REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
// Object was already verified when we scanned it.
collector_->MarkObject(obj->GetFieldObject<mirror::Object, kVerifyNone>(offset));
}
- void operator()(mirror::Class* klass, mirror::Reference* ref) const
+ void operator()(ObjPtr<mirror::Class> klass,
+ ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::heap_bitmap_lock_) {
collector_->DelayReferenceReferent(klass, ref);
diff --git a/runtime/gc/collector/mark_compact.h b/runtime/gc/collector/mark_compact.h
index a61646c..564f85b 100644
--- a/runtime/gc/collector/mark_compact.h
+++ b/runtime/gc/collector/mark_compact.h
@@ -122,7 +122,7 @@
OVERRIDE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
// Schedules an unmarked object for reference processing.
- void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
+ void DelayReferenceReferent(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> reference)
REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
protected:
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index b89d99c..c05719d 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -272,7 +272,7 @@
explicit ScanObjectVisitor(MarkSweep* const mark_sweep) ALWAYS_INLINE
: mark_sweep_(mark_sweep) {}
- void operator()(mirror::Object* obj) const
+ void operator()(ObjPtr<mirror::Object> obj) const
ALWAYS_INLINE
REQUIRES(Locks::heap_bitmap_lock_)
REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -280,7 +280,7 @@
Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current());
}
- mark_sweep_->ScanObject(obj);
+ mark_sweep_->ScanObject(obj.Ptr());
}
private:
@@ -616,7 +616,7 @@
public:
explicit DelayReferenceReferentVisitor(MarkSweep* collector) : collector_(collector) {}
- void operator()(mirror::Class* klass, mirror::Reference* ref) const
+ void operator()(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
REQUIRES(Locks::heap_bitmap_lock_)
REQUIRES_SHARED(Locks::mutator_lock_) {
collector_->DelayReferenceReferent(klass, ref);
@@ -1297,9 +1297,9 @@
}
}
-// Process the "referent" field in a java.lang.ref.Reference. If the referent has not yet been
+// Process the "referent" field lin a java.lang.ref.Reference. If the referent has not yet been
// marked, put it on the appropriate list in the heap for later processing.
-void MarkSweep::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* ref) {
+void MarkSweep::DelayReferenceReferent(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) {
heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, ref, this);
}
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index bbac9da..19c2e9a 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -225,7 +225,7 @@
}
// Schedules an unmarked object for reference processing.
- void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
+ void DelayReferenceReferent(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> reference)
REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
protected:
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index 76a478e..2cb1767 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -679,7 +679,8 @@
// Process the "referent" field in a java.lang.ref.Reference. If the referent has not yet been
// marked, put it on the appropriate list in the heap for later processing.
-void SemiSpace::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) {
+void SemiSpace::DelayReferenceReferent(ObjPtr<mirror::Class> klass,
+ ObjPtr<mirror::Reference> reference) {
heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this);
}
@@ -687,13 +688,13 @@
public:
explicit MarkObjectVisitor(SemiSpace* collector) : collector_(collector) {}
- void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const ALWAYS_INLINE
+ void operator()(ObjPtr<Object> obj, MemberOffset offset, bool /* is_static */) const ALWAYS_INLINE
REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
// Object was already verified when we scanned it.
collector_->MarkObject(obj->GetFieldObjectReferenceAddr<kVerifyNone>(offset));
}
- void operator()(mirror::Class* klass, mirror::Reference* ref) const
+ void operator()(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
collector_->DelayReferenceReferent(klass, ref);
}
diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h
index 4b63d9b..4cebcc3 100644
--- a/runtime/gc/collector/semi_space.h
+++ b/runtime/gc/collector/semi_space.h
@@ -156,7 +156,7 @@
REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
// Schedules an unmarked object for reference processing.
- void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
+ void DelayReferenceReferent(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> reference)
REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
protected: