Print more diagnosis info on to-space invariant violation.
Pass the method/field (in GcRootSource) to the read barrier to print
more info when a to-space invariant violation is detected on a
method/field GC root access.
Refactor ConcurrentCopying::AssertToSpaceInvariant().
Bug: 12687968
Bug: 21564728
Change-Id: I3a5fde1f41969349b0fee6cd9217b948d5241a7c
diff --git a/runtime/read_barrier-inl.h b/runtime/read_barrier-inl.h
index 8d84c35..7014813 100644
--- a/runtime/read_barrier-inl.h
+++ b/runtime/read_barrier-inl.h
@@ -74,7 +74,8 @@
}
template <typename MirrorType, ReadBarrierOption kReadBarrierOption, bool kMaybeDuringStartup>
-inline MirrorType* ReadBarrier::BarrierForRoot(MirrorType** root) {
+inline MirrorType* ReadBarrier::BarrierForRoot(MirrorType** root,
+ GcRootSource* gc_root_source) {
MirrorType* ref = *root;
const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
if (with_read_barrier && kUseBakerReadBarrier) {
@@ -87,7 +88,7 @@
if (Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->IsMarking()) {
ref = reinterpret_cast<MirrorType*>(Mark(ref));
}
- AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
+ AssertToSpaceInvariant(gc_root_source, ref);
return ref;
} else if (with_read_barrier && kUseBrooksReadBarrier) {
// To be implemented.
@@ -105,7 +106,7 @@
Atomic<mirror::Object*>* atomic_root = reinterpret_cast<Atomic<mirror::Object*>*>(root);
atomic_root->CompareExchangeStrongSequentiallyConsistent(old_ref, ref);
}
- AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
+ AssertToSpaceInvariant(gc_root_source, ref);
return ref;
} else {
return ref;
@@ -114,7 +115,8 @@
// TODO: Reduce copy paste
template <typename MirrorType, ReadBarrierOption kReadBarrierOption, bool kMaybeDuringStartup>
-inline MirrorType* ReadBarrier::BarrierForRoot(mirror::CompressedReference<MirrorType>* root) {
+inline MirrorType* ReadBarrier::BarrierForRoot(mirror::CompressedReference<MirrorType>* root,
+ GcRootSource* gc_root_source) {
MirrorType* ref = root->AsMirrorPtr();
const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
if (with_read_barrier && kUseBakerReadBarrier) {
@@ -127,7 +129,7 @@
if (Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->IsMarking()) {
ref = reinterpret_cast<MirrorType*>(Mark(ref));
}
- AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
+ AssertToSpaceInvariant(gc_root_source, ref);
return ref;
} else if (with_read_barrier && kUseBrooksReadBarrier) {
// To be implemented.
@@ -147,7 +149,7 @@
reinterpret_cast<Atomic<mirror::CompressedReference<MirrorType>>*>(root);
atomic_root->CompareExchangeStrongSequentiallyConsistent(old_ref, new_ref);
}
- AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
+ AssertToSpaceInvariant(gc_root_source, ref);
return ref;
} else {
return ref;
@@ -183,6 +185,17 @@
}
}
+inline void ReadBarrier::AssertToSpaceInvariant(GcRootSource* gc_root_source,
+ mirror::Object* ref) {
+ if (kEnableToSpaceInvariantChecks || kIsDebugBuild) {
+ if (ref == nullptr || IsDuringStartup()) {
+ return;
+ }
+ Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->
+ AssertToSpaceInvariant(gc_root_source, ref);
+ }
+}
+
inline mirror::Object* ReadBarrier::Mark(mirror::Object* obj) {
return Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->Mark(obj);
}