Fix race condition btw DelayReferenceRefernent vs Reference.clear().
Rename IsMarkedHeapReference to IsNullOrMarkedHeapReference.
Move the null check from the caller of IsMarkedHeapReference into
IsNullOrMarkedHeapReference.
Make sure that the referent is only loaded once between the null
check and the IsMarked call.
Use a CAS in ConcurrentCopying::IsNullOrMarkedHeapReference when
called from DelayReferenceRefernent.
Bug: 33389022
Test: test-art-host without and with CC.
Change-Id: I20edab4dac2a4bb02dbb72af0f09de77b55ac08e
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index b889913..741d1da 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -2385,16 +2385,29 @@
}
}
-bool ConcurrentCopying::IsMarkedHeapReference(mirror::HeapReference<mirror::Object>* field) {
+bool ConcurrentCopying::IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* field,
+ bool do_atomic_update) {
mirror::Object* from_ref = field->AsMirrorPtr();
+ if (from_ref == nullptr) {
+ return true;
+ }
mirror::Object* to_ref = IsMarked(from_ref);
if (to_ref == nullptr) {
return false;
}
if (from_ref != to_ref) {
- QuasiAtomic::ThreadFenceRelease();
- field->Assign(to_ref);
- QuasiAtomic::ThreadFenceSequentiallyConsistent();
+ if (do_atomic_update) {
+ do {
+ if (field->AsMirrorPtr() != from_ref) {
+ // Concurrently overwritten by a mutator.
+ break;
+ }
+ } while (!field->CasWeakRelaxed(from_ref, to_ref));
+ } else {
+ QuasiAtomic::ThreadFenceRelease();
+ field->Assign(to_ref);
+ QuasiAtomic::ThreadFenceSequentiallyConsistent();
+ }
}
return true;
}