Fix remaining read barrier issues in image relocation
Added a way to disallow read barriers, this makes it easy to find
the issues.
Bug: 26786304
Change-Id: I7ebb50832686d03e096a979aae9741239371683f
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index 08b1a00..d185e63 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -867,20 +867,20 @@
if (obj->IsClass<kVerifyNone, kWithoutReadBarrier>()) {
mirror::Class* klass = obj->AsClass<kVerifyNone, kWithoutReadBarrier>();
FixupObjectAdapter visitor(boot_image_, boot_oat_, app_image_, app_oat_);
- klass->FixupNativePointers(klass, sizeof(void*), visitor);
+ klass->FixupNativePointers<kVerifyNone, kWithoutReadBarrier>(klass, sizeof(void*), visitor);
// Deal with the arrays.
mirror::PointerArray* vtable = klass->GetVTable<kVerifyNone, kWithoutReadBarrier>();
if (vtable != nullptr) {
- vtable->Fixup(vtable, sizeof(void*), visitor);
+ vtable->Fixup<kVerifyNone, kWithoutReadBarrier>(vtable, sizeof(void*), visitor);
}
mirror::IfTable* iftable = klass->GetIfTable<kVerifyNone, kWithoutReadBarrier>();
if (iftable != nullptr) {
- for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
- if (iftable->GetMethodArrayCount(i) > 0) {
+ for (int32_t i = 0, count = iftable->Count(); i < count; ++i) {
+ if (iftable->GetMethodArrayCount<kVerifyNone, kWithoutReadBarrier>(i) > 0) {
mirror::PointerArray* methods =
iftable->GetMethodArray<kVerifyNone, kWithoutReadBarrier>(i);
DCHECK(methods != nullptr);
- methods->Fixup(methods, sizeof(void*), visitor);
+ methods->Fixup<kVerifyNone, kWithoutReadBarrier>(methods, sizeof(void*), visitor);
}
}
}
@@ -1014,6 +1014,7 @@
// Nothing to fix up.
return true;
}
+ ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
// Need to update the image to be at the target base.
const ImageSection& objects_section = image_header.GetImageSection(ImageHeader::kSectionObjects);
uintptr_t objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
@@ -1039,7 +1040,7 @@
CHECK_EQ(image_header.GetImageBegin(), target_base);
// Fix up dex cache DexFile pointers.
auto* dex_caches = image_header.GetImageRoot<kWithoutReadBarrier>(ImageHeader::kDexCaches)->
- AsObjectArray<mirror::DexCache>();
+ AsObjectArray<mirror::DexCache, kVerifyNone, kWithoutReadBarrier>();
for (int32_t i = 0, count = dex_caches->GetLength(); i < count; ++i) {
mirror::DexCache* dex_cache = dex_caches->Get<kVerifyNone, kWithoutReadBarrier>(i);
// Fix up dex cache pointers.
diff --git a/runtime/mirror/array-inl.h b/runtime/mirror/array-inl.h
index b3439f7..c6fa15d 100644
--- a/runtime/mirror/array-inl.h
+++ b/runtime/mirror/array-inl.h
@@ -370,15 +370,17 @@
}
}
-template<typename T>
+template<typename T, VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline T PointerArray::GetElementPtrSize(uint32_t idx, size_t ptr_size) {
// C style casts here since we sometimes have T be a pointer, or sometimes an integer
// (for stack traces).
if (ptr_size == 8) {
- return (T)static_cast<uintptr_t>(AsLongArray()->GetWithoutChecks(idx));
+ return (T)static_cast<uintptr_t>(
+ AsLongArray<kVerifyFlags, kReadBarrierOption>()->GetWithoutChecks(idx));
}
DCHECK_EQ(ptr_size, 4u);
- return (T)static_cast<uintptr_t>(AsIntArray()->GetWithoutChecks(idx));
+ return (T)static_cast<uintptr_t>(
+ AsIntArray<kVerifyFlags, kReadBarrierOption>()->GetWithoutChecks(idx));
}
template<bool kTransactionActive, bool kUnchecked>
@@ -401,12 +403,12 @@
ptr_size);
}
-template <typename Visitor>
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption, typename Visitor>
inline void PointerArray::Fixup(mirror::PointerArray* dest,
size_t pointer_size,
const Visitor& visitor) {
for (size_t i = 0, count = GetLength(); i < count; ++i) {
- void* ptr = GetElementPtrSize<void*>(i, pointer_size);
+ void* ptr = GetElementPtrSize<void*, kVerifyFlags, kReadBarrierOption>(i, pointer_size);
void* new_ptr = visitor(ptr);
if (ptr != new_ptr) {
dest->SetElementPtrSize<false, true>(i, new_ptr, pointer_size);
diff --git a/runtime/mirror/array.h b/runtime/mirror/array.h
index 2bd6c5b..9a21ec2 100644
--- a/runtime/mirror/array.h
+++ b/runtime/mirror/array.h
@@ -183,7 +183,9 @@
// Either an IntArray or a LongArray.
class PointerArray : public Array {
public:
- template<typename T>
+ template<typename T,
+ VerifyObjectFlags kVerifyFlags = kVerifyNone,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
T GetElementPtrSize(uint32_t idx, size_t ptr_size)
SHARED_REQUIRES(Locks::mutator_lock_);
@@ -196,7 +198,9 @@
// Fixup the pointers in the dest arrays by passing our pointers through the visitor. Only copies
// to dest if visitor(source_ptr) != source_ptr.
- template <typename Visitor>
+ template <VerifyObjectFlags kVerifyFlags = kVerifyNone,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+ typename Visitor>
void Fixup(mirror::PointerArray* dest, size_t pointer_size, const Visitor& visitor)
SHARED_REQUIRES(Locks::mutator_lock_);
};
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index d5783c0..422832e 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -253,14 +253,16 @@
EmbeddedImTableOffset(pointer_size).Uint32Value() + i * ImTableEntrySize(pointer_size));
}
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline ArtMethod* Class::GetEmbeddedImTableEntry(uint32_t i, size_t pointer_size) {
- DCHECK(ShouldHaveEmbeddedImtAndVTable());
+ DCHECK((ShouldHaveEmbeddedImtAndVTable<kVerifyFlags, kReadBarrierOption>()));
return GetFieldPtrWithSize<ArtMethod*>(
EmbeddedImTableEntryOffset(i, pointer_size), pointer_size);
}
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline void Class::SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method, size_t pointer_size) {
- DCHECK(ShouldHaveEmbeddedImtAndVTable());
+ DCHECK((ShouldHaveEmbeddedImtAndVTable<kVerifyFlags, kReadBarrierOption>()));
SetFieldPtrWithSize<false>(EmbeddedImTableEntryOffset(i, pointer_size), method, pointer_size);
}
@@ -538,10 +540,11 @@
: ClassOffset();
}
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline MemberOffset Class::GetFirstReferenceStaticFieldOffset(size_t pointer_size) {
DCHECK(IsResolved());
uint32_t base = sizeof(mirror::Class); // Static fields come after the class.
- if (ShouldHaveEmbeddedImtAndVTable()) {
+ if (ShouldHaveEmbeddedImtAndVTable<kVerifyFlags, kReadBarrierOption>()) {
// Static fields come after the embedded tables.
base = mirror::Class::ComputeClassSize(
true, GetEmbeddedVTableLength(), 0, 0, 0, 0, 0, pointer_size);
@@ -1057,7 +1060,7 @@
return arr != nullptr ? arr->size() : 0u;
}
-template <typename Visitor>
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption, typename Visitor>
inline void Class::FixupNativePointers(mirror::Class* dest,
size_t pointer_size,
const Visitor& visitor) {
@@ -1085,7 +1088,7 @@
dest->SetDexCacheStrings(new_strings);
}
// Fix up embedded tables.
- if (!IsTemp() && ShouldHaveEmbeddedImtAndVTable()) {
+ if (!IsTemp() && ShouldHaveEmbeddedImtAndVTable<kVerifyNone, kReadBarrierOption>()) {
for (int32_t i = 0, count = GetEmbeddedVTableLength(); i < count; ++i) {
ArtMethod* method = GetEmbeddedVTableEntry(i, pointer_size);
ArtMethod* new_method = visitor(method);
@@ -1094,10 +1097,13 @@
}
}
for (size_t i = 0; i < mirror::Class::kImtSize; ++i) {
- ArtMethod* method = GetEmbeddedImTableEntry(i, pointer_size);
+ ArtMethod* method = GetEmbeddedImTableEntry<kVerifyFlags, kReadBarrierOption>(i,
+ pointer_size);
ArtMethod* new_method = visitor(method);
if (method != new_method) {
- dest->SetEmbeddedImTableEntry(i, new_method, pointer_size);
+ dest->SetEmbeddedImTableEntry<kVerifyFlags, kReadBarrierOption>(i,
+ new_method,
+ pointer_size);
}
}
}
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index be5c668..8fa4975 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -444,7 +444,6 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
-
bool IsArrayClass() SHARED_REQUIRES(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
@@ -489,9 +488,11 @@
return !IsPrimitive() && !IsInterface() && !IsAbstract() && !IsArrayClass();
}
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsInstantiable() SHARED_REQUIRES(Locks::mutator_lock_) {
return (!IsPrimitive() && !IsInterface() && !IsAbstract()) ||
- (IsAbstract() && IsArrayClass());
+ (IsAbstract() && IsArrayClass<kVerifyFlags, kReadBarrierOption>());
}
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
@@ -810,8 +811,10 @@
return MemberOffset(sizeof(Class));
}
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool ShouldHaveEmbeddedImtAndVTable() SHARED_REQUIRES(Locks::mutator_lock_) {
- return IsInstantiable();
+ return IsInstantiable<kVerifyFlags, kReadBarrierOption>();
}
bool HasVTable() SHARED_REQUIRES(Locks::mutator_lock_);
@@ -820,9 +823,13 @@
static MemberOffset EmbeddedVTableEntryOffset(uint32_t i, size_t pointer_size);
+ template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ArtMethod* GetEmbeddedImTableEntry(uint32_t i, size_t pointer_size)
SHARED_REQUIRES(Locks::mutator_lock_);
+ template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
void SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method, size_t pointer_size)
SHARED_REQUIRES(Locks::mutator_lock_);
@@ -1015,6 +1022,8 @@
}
// Get the offset of the first reference static field. Other reference static fields follow.
+ template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
MemberOffset GetFirstReferenceStaticFieldOffset(size_t pointer_size)
SHARED_REQUIRES(Locks::mutator_lock_);
@@ -1238,7 +1247,9 @@
// the corresponding entry in dest if visitor(obj) != obj to prevent dirty memory. Dest should be
// initialized to a copy of *this to prevent issues. Does not visit the ArtMethod and ArtField
// roots.
- template <typename Visitor>
+ template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+ typename Visitor>
void FixupNativePointers(mirror::Class* dest, size_t pointer_size, const Visitor& visitor)
SHARED_REQUIRES(Locks::mutator_lock_);
diff --git a/runtime/mirror/iftable.h b/runtime/mirror/iftable.h
index 605deac..d6571f2 100644
--- a/runtime/mirror/iftable.h
+++ b/runtime/mirror/iftable.h
@@ -43,8 +43,11 @@
return method_array;
}
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
size_t GetMethodArrayCount(int32_t i) SHARED_REQUIRES(Locks::mutator_lock_) {
- auto* method_array = down_cast<PointerArray*>(Get((i * kMax) + kMethodArray));
+ auto* method_array = down_cast<PointerArray*>(
+ Get<kVerifyFlags, kReadBarrierOption>((i * kMax) + kMethodArray));
return method_array == nullptr ? 0u : method_array->GetLength();
}
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h
index 760de9a..eb391be 100644
--- a/runtime/mirror/object-inl.h
+++ b/runtime/mirror/object-inl.h
@@ -255,16 +255,17 @@
return down_cast<Class*>(this);
}
-template<VerifyObjectFlags kVerifyFlags>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline bool Object::IsObjectArray() {
constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
- return IsArrayInstance<kVerifyFlags>() &&
- !GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitive();
+ return IsArrayInstance<kVerifyFlags, kReadBarrierOption>() &&
+ !GetClass<kNewFlags, kReadBarrierOption>()->
+ template GetComponentType<kNewFlags, kReadBarrierOption>()->IsPrimitive();
}
-template<class T, VerifyObjectFlags kVerifyFlags>
+template<class T, VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline ObjectArray<T>* Object::AsObjectArray() {
- DCHECK(IsObjectArray<kVerifyFlags>());
+ DCHECK((IsObjectArray<kVerifyFlags, kReadBarrierOption>()));
return down_cast<ObjectArray<T>*>(this);
}
@@ -274,14 +275,14 @@
template IsArrayClass<kVerifyFlags, kReadBarrierOption>();
}
-template<VerifyObjectFlags kVerifyFlags>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline bool Object::IsReferenceInstance() {
- return GetClass<kVerifyFlags>()->IsTypeOfReferenceClass();
+ return GetClass<kVerifyFlags, kReadBarrierOption>()->IsTypeOfReferenceClass();
}
-template<VerifyObjectFlags kVerifyFlags>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline Reference* Object::AsReference() {
- DCHECK(IsReferenceInstance<kVerifyFlags>());
+ DCHECK((IsReferenceInstance<kVerifyFlags, kReadBarrierOption>()));
return down_cast<Reference*>(this);
}
@@ -341,29 +342,31 @@
return down_cast<ShortArray*>(this);
}
-template<VerifyObjectFlags kVerifyFlags>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline bool Object::IsIntArray() {
constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
- auto* component_type = GetClass<kVerifyFlags>()->GetComponentType();
+ mirror::Class* klass = GetClass<kVerifyFlags, kReadBarrierOption>();
+ mirror::Class* component_type = klass->GetComponentType<kVerifyFlags, kReadBarrierOption>();
return component_type != nullptr && component_type->template IsPrimitiveInt<kNewFlags>();
}
-template<VerifyObjectFlags kVerifyFlags>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline IntArray* Object::AsIntArray() {
- DCHECK(IsIntArray<kVerifyFlags>());
+ DCHECK((IsIntArray<kVerifyFlags, kReadBarrierOption>()));
return down_cast<IntArray*>(this);
}
-template<VerifyObjectFlags kVerifyFlags>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline bool Object::IsLongArray() {
constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
- auto* component_type = GetClass<kVerifyFlags>()->GetComponentType();
+ mirror::Class* klass = GetClass<kVerifyFlags, kReadBarrierOption>();
+ mirror::Class* component_type = klass->GetComponentType<kVerifyFlags, kReadBarrierOption>();
return component_type != nullptr && component_type->template IsPrimitiveLong<kNewFlags>();
}
-template<VerifyObjectFlags kVerifyFlags>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline LongArray* Object::AsLongArray() {
- DCHECK(IsLongArray<kVerifyFlags>());
+ DCHECK((IsLongArray<kVerifyFlags, kReadBarrierOption>()));
return down_cast<LongArray*>(this);
}
@@ -1063,7 +1066,7 @@
// Presumably GC can happen when we are cross compiling, it should not cause performance
// problems to do pointer size logic.
MemberOffset field_offset = kIsStatic
- ? klass->GetFirstReferenceStaticFieldOffset(
+ ? klass->GetFirstReferenceStaticFieldOffset<kVerifyFlags, kReadBarrierOption>(
Runtime::Current()->GetClassLinker()->GetImagePointerSize())
: klass->GetFirstReferenceInstanceFieldOffset();
for (size_t i = 0u; i < num_reference_fields; ++i) {
@@ -1123,26 +1126,26 @@
visitor(this, ClassOffset(), false);
const uint32_t class_flags = klass->GetClassFlags<kVerifyNone>();
if (LIKELY(class_flags == kClassFlagNormal)) {
- DCHECK(!klass->IsVariableSize());
- VisitInstanceFieldsReferences(klass, visitor);
+ DCHECK((!klass->IsVariableSize<kVerifyFlags, kReadBarrierOption>()));
+ VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
DCHECK((!klass->IsClassClass<kVerifyFlags, kReadBarrierOption>()));
DCHECK(!klass->IsStringClass());
DCHECK(!klass->IsClassLoaderClass());
- DCHECK(!klass->IsArrayClass());
+ DCHECK((!klass->IsArrayClass<kVerifyFlags, kReadBarrierOption>()));
} else {
if ((class_flags & kClassFlagNoReferenceFields) == 0) {
DCHECK(!klass->IsStringClass());
if (class_flags == kClassFlagClass) {
- DCHECK(klass->IsClassClass());
- AsClass<kVerifyNone>()->VisitReferences<kVisitNativeRoots,
- kVerifyFlags,
- kReadBarrierOption>(klass, visitor);
+ DCHECK((klass->IsClassClass<kVerifyFlags, kReadBarrierOption>()));
+ mirror::Class* as_klass = AsClass<kVerifyNone, kReadBarrierOption>();
+ as_klass->VisitReferences<kVisitNativeRoots, kVerifyFlags, kReadBarrierOption>(klass,
+ visitor);
} else if (class_flags == kClassFlagObjectArray) {
DCHECK((klass->IsObjectArrayClass<kVerifyFlags, kReadBarrierOption>()));
- AsObjectArray<mirror::Object, kVerifyNone>()->VisitReferences(visitor);
+ AsObjectArray<mirror::Object, kVerifyNone, kReadBarrierOption>()->VisitReferences(visitor);
} else if ((class_flags & kClassFlagReference) != 0) {
- VisitInstanceFieldsReferences(klass, visitor);
- ref_visitor(klass, AsReference());
+ VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
+ ref_visitor(klass, AsReference<kVerifyFlags, kReadBarrierOption>());
} else if (class_flags == kClassFlagDexCache) {
mirror::DexCache* const dex_cache = AsDexCache<kVerifyFlags, kReadBarrierOption>();
dex_cache->VisitReferences<kVisitNativeRoots,
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index d635002..3f739df 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -159,9 +159,12 @@
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Class* AsClass() SHARED_REQUIRES(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsObjectArray() SHARED_REQUIRES(Locks::mutator_lock_);
- template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<class T,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ObjectArray<T>* AsObjectArray() SHARED_REQUIRES(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
@@ -199,14 +202,18 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
ShortArray* AsShortSizedArray() SHARED_REQUIRES(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsIntArray() SHARED_REQUIRES(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
IntArray* AsIntArray() SHARED_REQUIRES(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsLongArray() SHARED_REQUIRES(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
LongArray* AsLongArray() SHARED_REQUIRES(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
@@ -230,9 +237,11 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Throwable* AsThrowable() SHARED_REQUIRES(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsReferenceInstance() SHARED_REQUIRES(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Reference* AsReference() SHARED_REQUIRES(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
bool IsWeakReferenceInstance() SHARED_REQUIRES(Locks::mutator_lock_);
diff --git a/runtime/read_barrier-inl.h b/runtime/read_barrier-inl.h
index 19cf759..0c3eb3b 100644
--- a/runtime/read_barrier-inl.h
+++ b/runtime/read_barrier-inl.h
@@ -32,50 +32,61 @@
inline MirrorType* ReadBarrier::Barrier(
mirror::Object* obj, MemberOffset offset, mirror::HeapReference<MirrorType>* ref_addr) {
constexpr bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
- if (with_read_barrier && kUseBakerReadBarrier) {
- // The higher bits of the rb_ptr, rb_ptr_high_bits (must be zero)
- // is used to create artificial data dependency from the is_gray
- // load to the ref field (ptr) load to avoid needing a load-load
- // barrier between the two.
- uintptr_t rb_ptr_high_bits;
- bool is_gray = HasGrayReadBarrierPointer(obj, &rb_ptr_high_bits);
- ref_addr = reinterpret_cast<mirror::HeapReference<MirrorType>*>(
- rb_ptr_high_bits | reinterpret_cast<uintptr_t>(ref_addr));
- MirrorType* ref = ref_addr->AsMirrorPtr();
- MirrorType* old_ref = ref;
- if (is_gray) {
- // Slow-path.
- ref = reinterpret_cast<MirrorType*>(Mark(ref));
- // If kAlwaysUpdateField is true, update the field atomically. This may fail if mutator
- // updates before us, but it's ok.
- if (kAlwaysUpdateField && ref != old_ref) {
- obj->CasFieldStrongRelaxedObjectWithoutWriteBarrier<false, false>(
- offset, old_ref, ref);
+ if (kUseReadBarrier && with_read_barrier) {
+ if (kIsDebugBuild) {
+ Thread* const self = Thread::Current();
+ if (self != nullptr) {
+ CHECK_EQ(self->GetDebugDisallowReadBarrierCount(), 0u);
}
}
- if (kEnableReadBarrierInvariantChecks) {
- CHECK_EQ(rb_ptr_high_bits, 0U) << obj << " rb_ptr=" << obj->GetReadBarrierPointer();
- }
- AssertToSpaceInvariant(obj, offset, ref);
- return ref;
- } else if (with_read_barrier && kUseBrooksReadBarrier) {
- // To be implemented.
- return ref_addr->AsMirrorPtr();
- } else if (with_read_barrier && kUseTableLookupReadBarrier) {
- MirrorType* ref = ref_addr->AsMirrorPtr();
- MirrorType* old_ref = ref;
- // The heap or the collector can be null at startup. TODO: avoid the need for this null check.
- gc::Heap* heap = Runtime::Current()->GetHeap();
- if (heap != nullptr && heap->GetReadBarrierTable()->IsSet(old_ref)) {
- ref = reinterpret_cast<MirrorType*>(Mark(old_ref));
- // Update the field atomically. This may fail if mutator updates before us, but it's ok.
- if (ref != old_ref) {
- obj->CasFieldStrongRelaxedObjectWithoutWriteBarrier<false, false>(
- offset, old_ref, ref);
+ if (kUseBakerReadBarrier) {
+ // The higher bits of the rb_ptr, rb_ptr_high_bits (must be zero)
+ // is used to create artificial data dependency from the is_gray
+ // load to the ref field (ptr) load to avoid needing a load-load
+ // barrier between the two.
+ uintptr_t rb_ptr_high_bits;
+ bool is_gray = HasGrayReadBarrierPointer(obj, &rb_ptr_high_bits);
+ ref_addr = reinterpret_cast<mirror::HeapReference<MirrorType>*>(
+ rb_ptr_high_bits | reinterpret_cast<uintptr_t>(ref_addr));
+ MirrorType* ref = ref_addr->AsMirrorPtr();
+ MirrorType* old_ref = ref;
+ if (is_gray) {
+ // Slow-path.
+ ref = reinterpret_cast<MirrorType*>(Mark(ref));
+ // If kAlwaysUpdateField is true, update the field atomically. This may fail if mutator
+ // updates before us, but it's ok.
+ if (kAlwaysUpdateField && ref != old_ref) {
+ obj->CasFieldStrongRelaxedObjectWithoutWriteBarrier<false, false>(
+ offset, old_ref, ref);
+ }
}
+ if (kEnableReadBarrierInvariantChecks) {
+ CHECK_EQ(rb_ptr_high_bits, 0U) << obj << " rb_ptr=" << obj->GetReadBarrierPointer();
+ }
+ AssertToSpaceInvariant(obj, offset, ref);
+ return ref;
+ } else if (kUseBrooksReadBarrier) {
+ // To be implemented.
+ return ref_addr->AsMirrorPtr();
+ } else if (kUseTableLookupReadBarrier) {
+ MirrorType* ref = ref_addr->AsMirrorPtr();
+ MirrorType* old_ref = ref;
+ // The heap or the collector can be null at startup. TODO: avoid the need for this null check.
+ gc::Heap* heap = Runtime::Current()->GetHeap();
+ if (heap != nullptr && heap->GetReadBarrierTable()->IsSet(old_ref)) {
+ ref = reinterpret_cast<MirrorType*>(Mark(old_ref));
+ // Update the field atomically. This may fail if mutator updates before us, but it's ok.
+ if (ref != old_ref) {
+ obj->CasFieldStrongRelaxedObjectWithoutWriteBarrier<false, false>(
+ offset, old_ref, ref);
+ }
+ }
+ AssertToSpaceInvariant(obj, offset, ref);
+ return ref;
+ } else {
+ LOG(FATAL) << "Unexpected read barrier type";
+ UNREACHABLE();
}
- AssertToSpaceInvariant(obj, offset, ref);
- return ref;
} else {
// No read barrier.
return ref_addr->AsMirrorPtr();
@@ -87,32 +98,43 @@
GcRootSource* gc_root_source) {
MirrorType* ref = *root;
const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
- if (with_read_barrier && kUseBakerReadBarrier) {
- // TODO: separate the read barrier code from the collector code more.
- Thread* self = Thread::Current();
- if (self != nullptr && self->GetIsGcMarking()) {
- ref = reinterpret_cast<MirrorType*>(Mark(ref));
- }
- AssertToSpaceInvariant(gc_root_source, ref);
- return ref;
- } else if (with_read_barrier && kUseBrooksReadBarrier) {
- // To be implemented.
- return ref;
- } else if (with_read_barrier && kUseTableLookupReadBarrier) {
- Thread* self = Thread::Current();
- if (self != nullptr &&
- self->GetIsGcMarking() &&
- Runtime::Current()->GetHeap()->GetReadBarrierTable()->IsSet(ref)) {
- MirrorType* old_ref = ref;
- ref = reinterpret_cast<MirrorType*>(Mark(old_ref));
- // Update the field atomically. This may fail if mutator updates before us, but it's ok.
- if (ref != old_ref) {
- Atomic<mirror::Object*>* atomic_root = reinterpret_cast<Atomic<mirror::Object*>*>(root);
- atomic_root->CompareExchangeStrongRelaxed(old_ref, ref);
+ if (kUseReadBarrier && with_read_barrier) {
+ if (kIsDebugBuild) {
+ Thread* const self = Thread::Current();
+ if (self != nullptr) {
+ CHECK_EQ(self->GetDebugDisallowReadBarrierCount(), 0u);
}
}
- AssertToSpaceInvariant(gc_root_source, ref);
- return ref;
+ if (kUseBakerReadBarrier) {
+ // TODO: separate the read barrier code from the collector code more.
+ Thread* self = Thread::Current();
+ if (self != nullptr && self->GetIsGcMarking()) {
+ ref = reinterpret_cast<MirrorType*>(Mark(ref));
+ }
+ AssertToSpaceInvariant(gc_root_source, ref);
+ return ref;
+ } else if (kUseBrooksReadBarrier) {
+ // To be implemented.
+ return ref;
+ } else if (kUseTableLookupReadBarrier) {
+ Thread* self = Thread::Current();
+ if (self != nullptr &&
+ self->GetIsGcMarking() &&
+ Runtime::Current()->GetHeap()->GetReadBarrierTable()->IsSet(ref)) {
+ MirrorType* old_ref = ref;
+ ref = reinterpret_cast<MirrorType*>(Mark(old_ref));
+ // Update the field atomically. This may fail if mutator updates before us, but it's ok.
+ if (ref != old_ref) {
+ Atomic<mirror::Object*>* atomic_root = reinterpret_cast<Atomic<mirror::Object*>*>(root);
+ atomic_root->CompareExchangeStrongRelaxed(old_ref, ref);
+ }
+ }
+ AssertToSpaceInvariant(gc_root_source, ref);
+ return ref;
+ } else {
+ LOG(FATAL) << "Unexpected read barrier type";
+ UNREACHABLE();
+ }
} else {
return ref;
}
diff --git a/runtime/thread.h b/runtime/thread.h
index b25bcb2..d7887ca 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -1067,6 +1067,14 @@
void InitStringEntryPoints();
+ void ModifyDebugDisallowReadBarrier(int8_t delta) {
+ debug_disallow_read_barrier_ += delta;
+ }
+
+ uint8_t GetDebugDisallowReadBarrierCount() const {
+ return debug_disallow_read_barrier_;
+ }
+
private:
explicit Thread(bool daemon);
~Thread() REQUIRES(!Locks::mutator_lock_, !Locks::thread_suspend_count_lock_);
@@ -1446,6 +1454,9 @@
// Thread "interrupted" status; stays raised until queried or thrown.
bool interrupted_ GUARDED_BY(wait_mutex_);
+ // Debug disable read barrier count, only is checked for debug builds and only in the runtime.
+ uint8_t debug_disallow_read_barrier_ = 0;
+
friend class Dbg; // For SetStateUnsafe.
friend class gc::collector::SemiSpace; // For getting stack traces.
friend class Runtime; // For CreatePeer.
@@ -1493,6 +1504,20 @@
DISALLOW_COPY_AND_ASSIGN(ScopedStackedShadowFramePusher);
};
+// Only works for debug builds.
+class ScopedDebugDisallowReadBarriers {
+ public:
+ explicit ScopedDebugDisallowReadBarriers(Thread* self) : self_(self) {
+ self_->ModifyDebugDisallowReadBarrier(1);
+ }
+ ~ScopedDebugDisallowReadBarriers() {
+ self_->ModifyDebugDisallowReadBarrier(-1);
+ }
+
+ private:
+ Thread* const self_;
+};
+
std::ostream& operator<<(std::ostream& os, const Thread& thread);
std::ostream& operator<<(std::ostream& os, const StackedShadowFrameType& thread);