Move mirror::Class to use ObjPtr
Leave the return types as non ObjPtr for now. Fixed moving GC bugs
in tests.
Test: test-art-host
Bug: 31113334
Change-Id: I5da1b5ac55dfbc5cc97a64be2c870ba9f512d9b0
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index 98d383d..02c987c 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -306,14 +306,14 @@
SetEmbeddedVTableEntryUnchecked(i, method, pointer_size);
}
-inline bool Class::Implements(Class* klass) {
+inline bool Class::Implements(ObjPtr<Class> klass) {
DCHECK(klass != nullptr);
DCHECK(klass->IsInterface()) << PrettyClass(this);
// All interfaces implemented directly and by our superclass, and
// recursively all super-interfaces of those interfaces, are listed
// in iftable_, so we can just do a linear scan through that.
int32_t iftable_count = GetIfTableCount();
- IfTable* iftable = GetIfTable();
+ ObjPtr<IfTable> iftable = GetIfTable();
for (int32_t i = 0; i < iftable_count; i++) {
if (iftable->GetInterface(i) == klass) {
return true;
@@ -353,7 +353,7 @@
if (!IsArrayClass()) {
// If "this" is not also an array, it must be Object.
// src's super should be java_lang_Object, since it is an array.
- Class* java_lang_Object = src->GetSuperClass();
+ ObjPtr<Class> java_lang_Object = src->GetSuperClass();
DCHECK(java_lang_Object != nullptr) << PrettyClass(src);
DCHECK(java_lang_Object->GetSuperClass() == nullptr) << PrettyClass(src);
return this == java_lang_Object;
@@ -451,15 +451,20 @@
return ResolvedFieldAccessTest<true, true>(access_to, field, field_idx, nullptr);
}
-inline bool Class::CanAccessResolvedMethod(Class* access_to, ArtMethod* method,
- DexCache* dex_cache, uint32_t method_idx) {
+inline bool Class::CanAccessResolvedMethod(ObjPtr<Class> access_to,
+ ArtMethod* method,
+ ObjPtr<DexCache> dex_cache,
+ uint32_t method_idx) {
return ResolvedMethodAccessTest<false, false, kStatic>(access_to, method, method_idx, dex_cache);
}
template <InvokeType throw_invoke_type>
-inline bool Class::CheckResolvedMethodAccess(Class* access_to, ArtMethod* method,
+inline bool Class::CheckResolvedMethodAccess(ObjPtr<Class> access_to,
+ ArtMethod* method,
uint32_t method_idx) {
- return ResolvedMethodAccessTest<true, true, throw_invoke_type>(access_to, method, method_idx,
+ return ResolvedMethodAccessTest<true, true, throw_invoke_type>(access_to,
+ method,
+ method_idx,
nullptr);
}
@@ -478,13 +483,13 @@
inline ArtMethod* Class::FindVirtualMethodForInterface(ArtMethod* method,
PointerSize pointer_size) {
- Class* declaring_class = method->GetDeclaringClass();
+ ObjPtr<Class> declaring_class = method->GetDeclaringClass();
DCHECK(declaring_class != nullptr) << PrettyClass(this);
DCHECK(declaring_class->IsInterface()) << PrettyMethod(method);
DCHECK(!method->IsCopied());
// TODO cache to improve lookup speed
const int32_t iftable_count = GetIfTableCount();
- IfTable* iftable = GetIfTable();
+ ObjPtr<IfTable> iftable = GetIfTable();
for (int32_t i = 0; i < iftable_count; i++) {
if (iftable->GetInterface(i) == declaring_class) {
return iftable->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
@@ -526,14 +531,14 @@
}
inline int32_t Class::GetIfTableCount() {
- IfTable* iftable = GetIfTable();
+ ObjPtr<IfTable> iftable = GetIfTable();
if (iftable == nullptr) {
return 0;
}
return iftable->Count();
}
-inline void Class::SetIfTable(IfTable* new_iftable) {
+inline void Class::SetIfTable(ObjPtr<IfTable> new_iftable) {
SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, iftable_), new_iftable);
}
@@ -544,20 +549,20 @@
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline MemberOffset Class::GetFirstReferenceInstanceFieldOffset() {
- Class* super_class = GetSuperClass<kVerifyFlags, kReadBarrierOption>();
+ ObjPtr<Class> super_class = GetSuperClass<kVerifyFlags, kReadBarrierOption>();
return (super_class != nullptr)
? MemberOffset(RoundUp(super_class->GetObjectSize<kVerifyFlags, kReadBarrierOption>(),
- sizeof(mirror::HeapReference<mirror::Object>)))
+ kHeapReferenceSize))
: ClassOffset();
}
template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline MemberOffset Class::GetFirstReferenceStaticFieldOffset(PointerSize pointer_size) {
DCHECK(IsResolved());
- uint32_t base = sizeof(mirror::Class); // Static fields come after the class.
+ uint32_t base = sizeof(Class); // Static fields come after the class.
if (ShouldHaveEmbeddedVTable<kVerifyFlags, kReadBarrierOption>()) {
// Static fields come after the embedded tables.
- base = mirror::Class::ComputeClassSize(
+ base = Class::ComputeClassSize(
true, GetEmbeddedVTableLength(), 0, 0, 0, 0, 0, pointer_size);
}
return MemberOffset(base);
@@ -566,10 +571,10 @@
inline MemberOffset Class::GetFirstReferenceStaticFieldOffsetDuringLinking(
PointerSize pointer_size) {
DCHECK(IsLoaded());
- uint32_t base = sizeof(mirror::Class); // Static fields come after the class.
+ uint32_t base = sizeof(Class); // Static fields come after the class.
if (ShouldHaveEmbeddedVTable()) {
// Static fields come after the embedded tables.
- base = mirror::Class::ComputeClassSize(true, GetVTableDuringLinking()->GetLength(),
+ base = Class::ComputeClassSize(true, GetVTableDuringLinking()->GetLength(),
0, 0, 0, 0, 0, pointer_size);
}
return MemberOffset(base);
@@ -700,7 +705,7 @@
}
template<bool kIsInstrumented, bool kCheckAddFinalizer>
-inline Object* Class::Alloc(Thread* self, gc::AllocatorType allocator_type) {
+inline ObjPtr<Object> Class::Alloc(Thread* self, gc::AllocatorType allocator_type) {
CheckObjectAlloc();
gc::Heap* heap = Runtime::Current()->GetHeap();
const bool add_finalizer = kCheckAddFinalizer && IsFinalizable();
@@ -708,7 +713,7 @@
DCHECK(!IsFinalizable());
}
// Note that the this pointer may be invalidated after the allocation.
- ObjPtr<mirror::Object> obj =
+ ObjPtr<Object> obj =
heap->AllocObjectWithAllocator<kIsInstrumented, false>(self,
this,
this->object_size_,
@@ -724,11 +729,11 @@
return obj.Ptr();
}
-inline Object* Class::AllocObject(Thread* self) {
+inline ObjPtr<Object> Class::AllocObject(Thread* self) {
return Alloc<true>(self, Runtime::Current()->GetHeap()->GetCurrentAllocator());
}
-inline Object* Class::AllocNonMovableObject(Thread* self) {
+inline ObjPtr<Object> Class::AllocNonMovableObject(Thread* self) {
return Alloc<true>(self, Runtime::Current()->GetHeap()->GetCurrentNonMovingAllocator());
}
@@ -750,7 +755,7 @@
}
// Space used by reference statics.
- size += num_ref_static_fields * sizeof(HeapReference<Object>);
+ size += num_ref_static_fields * kHeapReferenceSize;
if (!IsAligned<8>(size) && num_64bit_static_fields > 0) {
uint32_t gap = 8 - (size & 0x7);
size += gap; // will be padded
@@ -781,8 +786,8 @@
VerifyObjectFlags kVerifyFlags,
ReadBarrierOption kReadBarrierOption,
typename Visitor>
-inline void Class::VisitReferences(mirror::Class* klass, const Visitor& visitor) {
- VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
+inline void Class::VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) {
+ VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass.Ptr(), visitor);
// Right after a class is allocated, but not yet loaded
// (kStatusNotReady, see ClassLinker::LoadClass()), GC may find it
// and scan it. IsTemp() may call Class::GetAccessFlags() but may
@@ -810,7 +815,7 @@
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline bool Class::IsClassClass() {
- Class* java_lang_Class = GetClass<kVerifyFlags, kReadBarrierOption>()->
+ ObjPtr<Class> java_lang_Class = GetClass<kVerifyFlags, kReadBarrierOption>()->
template GetClass<kVerifyFlags, kReadBarrierOption>();
return this == java_lang_Class;
}
@@ -883,12 +888,11 @@
SetFieldBoolean<false, false>(GetSlowPathFlagOffset(), enabled);
}
-inline void Class::InitializeClassVisitor::operator()(ObjPtr<mirror::Object> obj,
+inline void Class::InitializeClassVisitor::operator()(ObjPtr<Object> obj,
size_t usable_size) const {
DCHECK_LE(class_size_, usable_size);
// Avoid AsClass as object is not yet in live bitmap or allocation stack.
- ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
- // DCHECK(klass->IsClass());
+ ObjPtr<Class> klass = ObjPtr<Class>::DownCast(obj);
klass->SetClassSize(class_size_);
klass->SetPrimitiveType(Primitive::kPrimNot); // Default to not being primitive.
klass->SetDexClassDefIndex(DexFile::kDexNoIndex16); // Default to no valid class def index.
@@ -920,7 +924,7 @@
} else if (IsArrayClass()) {
return 2;
} else if (IsProxyClass()) {
- mirror::ObjectArray<mirror::Class>* interfaces = GetInterfaces();
+ ObjectArray<Class>* interfaces = GetInterfaces();
return interfaces != nullptr ? interfaces->GetLength() : 0;
} else {
const DexFile::TypeList* interfaces = GetInterfaceTypeList();
@@ -941,7 +945,7 @@
}
template<ReadBarrierOption kReadBarrierOption, class Visitor>
-void mirror::Class::VisitNativeRoots(Visitor& visitor, PointerSize pointer_size) {
+void Class::VisitNativeRoots(Visitor& visitor, PointerSize pointer_size) {
for (ArtField& field : GetSFieldsUnchecked()) {
// Visit roots first in case the declaring class gets moved.
field.VisitRoots(visitor);
@@ -1070,7 +1074,7 @@
}
template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption, typename Visitor>
-inline void Class::FixupNativePointers(mirror::Class* dest,
+inline void Class::FixupNativePointers(Class* dest,
PointerSize pointer_size,
const Visitor& visitor) {
// Update the field arrays.
@@ -1139,6 +1143,14 @@
return this->IsInSamePackage(access_to);
}
+inline bool Class::CannotBeAssignedFromOtherTypes() {
+ if (!IsArrayClass()) {
+ return IsFinal();
+ }
+ ObjPtr<Class> component = GetComponentType();
+ return component->IsPrimitive() || component->CannotBeAssignedFromOtherTypes();
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 689dd22..f93f72f 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -40,12 +40,12 @@
GcRoot<Class> Class::java_lang_Class_;
-void Class::SetClassClass(Class* java_lang_Class) {
+void Class::SetClassClass(ObjPtr<Class> java_lang_Class) {
CHECK(java_lang_Class_.IsNull())
<< java_lang_Class_.Read()
<< " " << java_lang_Class;
CHECK(java_lang_Class != nullptr);
- java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
+ java_lang_Class->SetClassFlags(kClassFlagClass);
java_lang_Class_ = GcRoot<Class>(java_lang_Class);
}
@@ -58,7 +58,7 @@
java_lang_Class_.VisitRootIfNonNull(visitor, RootInfo(kRootStickyClass));
}
-inline void Class::SetVerifyError(mirror::Object* error) {
+inline void Class::SetVerifyError(ObjPtr<Object> error) {
CHECK(error != nullptr) << PrettyClass(this);
if (Runtime::Current()->IsActiveTransaction()) {
SetFieldObject<true>(OFFSET_OF_OBJECT_MEMBER(Class, verify_error_), error);
@@ -140,7 +140,7 @@
}
}
-void Class::SetDexCache(DexCache* new_dex_cache) {
+void Class::SetDexCache(ObjPtr<DexCache> new_dex_cache) {
SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, dex_cache_), new_dex_cache);
SetDexCacheStrings(new_dex_cache != nullptr ? new_dex_cache->GetStrings() : nullptr);
}
@@ -209,8 +209,8 @@
Thread* const self = Thread::Current();
StackHandleScope<2> hs(self);
- Handle<mirror::Class> h_this(hs.NewHandle(this));
- Handle<mirror::Class> h_super(hs.NewHandle(GetSuperClass()));
+ Handle<Class> h_this(hs.NewHandle(this));
+ Handle<Class> h_super(hs.NewHandle(GetSuperClass()));
auto image_pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
std::string temp;
@@ -231,12 +231,12 @@
if (num_direct_interfaces > 0) {
os << " interfaces (" << num_direct_interfaces << "):\n";
for (size_t i = 0; i < num_direct_interfaces; ++i) {
- Class* interface = GetDirectInterface(self, h_this, i);
+ ObjPtr<Class> interface = GetDirectInterface(self, h_this, i);
if (interface == nullptr) {
os << StringPrintf(" %2zd: nullptr!\n", i);
} else {
- const ClassLoader* cl = interface->GetClassLoader();
- os << StringPrintf(" %2zd: %s (cl=%p)\n", i, PrettyClass(interface).c_str(), cl);
+ ObjPtr<ClassLoader> cl = interface->GetClassLoader();
+ os << StringPrintf(" %2zd: %s (cl=%p)\n", i, PrettyClass(interface).c_str(), cl.Ptr());
}
}
}
@@ -283,7 +283,7 @@
// Sanity check that the number of bits set in the reference offset bitmap
// agrees with the number of references
uint32_t count = 0;
- for (Class* c = this; c != nullptr; c = c->GetSuperClass()) {
+ for (ObjPtr<Class> c = this; c != nullptr; c = c->GetSuperClass()) {
count += c->NumReferenceInstanceFieldsDuringLinking();
}
// +1 for the Class in Object.
@@ -338,7 +338,7 @@
return WellKnownClasses::ToClass(WellKnownClasses::java_lang_Throwable)->IsAssignableFrom(this);
}
-void Class::SetClassLoader(ClassLoader* new_class_loader) {
+void Class::SetClassLoader(ObjPtr<ClassLoader> new_class_loader) {
if (Runtime::Current()->IsActiveTransaction()) {
SetFieldObject<true>(OFFSET_OF_OBJECT_MEMBER(Class, class_loader_), new_class_loader);
} else {
@@ -356,7 +356,7 @@
}
int32_t iftable_count = GetIfTableCount();
- IfTable* iftable = GetIfTable();
+ ObjPtr<IfTable> iftable = GetIfTable();
for (int32_t i = 0; i < iftable_count; ++i) {
method = iftable->GetInterface(i)->FindDeclaredVirtualMethod(name, signature, pointer_size);
if (method != nullptr) {
@@ -376,7 +376,7 @@
}
int32_t iftable_count = GetIfTableCount();
- IfTable* iftable = GetIfTable();
+ ObjPtr<IfTable> iftable = GetIfTable();
for (int32_t i = 0; i < iftable_count; ++i) {
method = iftable->GetInterface(i)->FindDeclaredVirtualMethod(name, signature, pointer_size);
if (method != nullptr) {
@@ -386,7 +386,7 @@
return nullptr;
}
-ArtMethod* Class::FindInterfaceMethod(const DexCache* dex_cache,
+ArtMethod* Class::FindInterfaceMethod(ObjPtr<DexCache> dex_cache,
uint32_t dex_method_idx,
PointerSize pointer_size) {
// Check the current class before checking the interfaces.
@@ -396,7 +396,7 @@
}
int32_t iftable_count = GetIfTableCount();
- IfTable* iftable = GetIfTable();
+ ObjPtr<IfTable> iftable = GetIfTable();
for (int32_t i = 0; i < iftable_count; ++i) {
method = iftable->GetInterface(i)->FindDeclaredVirtualMethod(
dex_cache, dex_method_idx, pointer_size);
@@ -429,7 +429,7 @@
return nullptr;
}
-ArtMethod* Class::FindDeclaredDirectMethod(const DexCache* dex_cache,
+ArtMethod* Class::FindDeclaredDirectMethod(ObjPtr<DexCache> dex_cache,
uint32_t dex_method_idx,
PointerSize pointer_size) {
if (GetDexCache() == dex_cache) {
@@ -445,7 +445,7 @@
ArtMethod* Class::FindDirectMethod(const StringPiece& name,
const StringPiece& signature,
PointerSize pointer_size) {
- for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
+ for (ObjPtr<Class> klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
ArtMethod* method = klass->FindDeclaredDirectMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
@@ -457,7 +457,7 @@
ArtMethod* Class::FindDirectMethod(const StringPiece& name,
const Signature& signature,
PointerSize pointer_size) {
- for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
+ for (ObjPtr<Class> klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
ArtMethod* method = klass->FindDeclaredDirectMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
@@ -466,9 +466,10 @@
return nullptr;
}
-ArtMethod* Class::FindDirectMethod(
- const DexCache* dex_cache, uint32_t dex_method_idx, PointerSize pointer_size) {
- for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
+ArtMethod* Class::FindDirectMethod(ObjPtr<DexCache> dex_cache,
+ uint32_t dex_method_idx,
+ PointerSize pointer_size) {
+ for (ObjPtr<Class> klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
ArtMethod* method = klass->FindDeclaredDirectMethod(dex_cache, dex_method_idx, pointer_size);
if (method != nullptr) {
return method;
@@ -516,7 +517,7 @@
return nullptr;
}
-ArtMethod* Class::FindDeclaredVirtualMethod(const DexCache* dex_cache,
+ArtMethod* Class::FindDeclaredVirtualMethod(ObjPtr<DexCache> dex_cache,
uint32_t dex_method_idx,
PointerSize pointer_size) {
if (GetDexCache() == dex_cache) {
@@ -540,9 +541,10 @@
return nullptr;
}
-ArtMethod* Class::FindVirtualMethod(
- const StringPiece& name, const StringPiece& signature, PointerSize pointer_size) {
- for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
+ArtMethod* Class::FindVirtualMethod(const StringPiece& name,
+ const StringPiece& signature,
+ PointerSize pointer_size) {
+ for (ObjPtr<Class> klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
ArtMethod* method = klass->FindDeclaredVirtualMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
@@ -551,9 +553,10 @@
return nullptr;
}
-ArtMethod* Class::FindVirtualMethod(
- const StringPiece& name, const Signature& signature, PointerSize pointer_size) {
- for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
+ArtMethod* Class::FindVirtualMethod(const StringPiece& name,
+ const Signature& signature,
+ PointerSize pointer_size) {
+ for (ObjPtr<Class> klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
ArtMethod* method = klass->FindDeclaredVirtualMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
@@ -562,9 +565,10 @@
return nullptr;
}
-ArtMethod* Class::FindVirtualMethod(
- const DexCache* dex_cache, uint32_t dex_method_idx, PointerSize pointer_size) {
- for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
+ArtMethod* Class::FindVirtualMethod(ObjPtr<DexCache> dex_cache,
+ uint32_t dex_method_idx,
+ PointerSize pointer_size) {
+ for (ObjPtr<Class> klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
ArtMethod* method = klass->FindDeclaredVirtualMethod(dex_cache, dex_method_idx, pointer_size);
if (method != nullptr) {
return method;
@@ -591,8 +595,8 @@
Thread* self = Thread::Current();
StackHandleScope<2> hs(self);
- MutableHandle<mirror::IfTable> iftable(hs.NewHandle(GetIfTable()));
- MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
+ MutableHandle<IfTable> iftable(hs.NewHandle(GetIfTable()));
+ MutableHandle<Class> iface(hs.NewHandle<Class>(nullptr));
size_t iftable_count = GetIfTableCount();
// Find the method. We don't need to check for conflicts because they would have been in the
// copied virtuals of this interface. Order matters, traverse in reverse topological order; most
@@ -696,7 +700,7 @@
return FindFieldByNameAndType(GetIFieldsPtr(), name, type);
}
-ArtField* Class::FindDeclaredInstanceField(const DexCache* dex_cache, uint32_t dex_field_idx) {
+ArtField* Class::FindDeclaredInstanceField(ObjPtr<DexCache> dex_cache, uint32_t dex_field_idx) {
if (GetDexCache() == dex_cache) {
for (ArtField& field : GetIFields()) {
if (field.GetDexFieldIndex() == dex_field_idx) {
@@ -710,7 +714,7 @@
ArtField* Class::FindInstanceField(const StringPiece& name, const StringPiece& type) {
// Is the field in this class, or any of its superclasses?
// Interfaces are not relevant because they can't contain instance fields.
- for (Class* c = this; c != nullptr; c = c->GetSuperClass()) {
+ for (ObjPtr<Class> c = this; c != nullptr; c = c->GetSuperClass()) {
ArtField* f = c->FindDeclaredInstanceField(name, type);
if (f != nullptr) {
return f;
@@ -719,10 +723,10 @@
return nullptr;
}
-ArtField* Class::FindInstanceField(const DexCache* dex_cache, uint32_t dex_field_idx) {
+ArtField* Class::FindInstanceField(ObjPtr<DexCache> dex_cache, uint32_t dex_field_idx) {
// Is the field in this class, or any of its superclasses?
// Interfaces are not relevant because they can't contain instance fields.
- for (Class* c = this; c != nullptr; c = c->GetSuperClass()) {
+ for (ObjPtr<Class> c = this; c != nullptr; c = c->GetSuperClass()) {
ArtField* f = c->FindDeclaredInstanceField(dex_cache, dex_field_idx);
if (f != nullptr) {
return f;
@@ -736,7 +740,7 @@
return FindFieldByNameAndType(GetSFieldsPtr(), name, type);
}
-ArtField* Class::FindDeclaredStaticField(const DexCache* dex_cache, uint32_t dex_field_idx) {
+ArtField* Class::FindDeclaredStaticField(ObjPtr<DexCache> dex_cache, uint32_t dex_field_idx) {
if (dex_cache == GetDexCache()) {
for (ArtField& field : GetSFields()) {
if (field.GetDexFieldIndex() == dex_field_idx) {
@@ -747,11 +751,13 @@
return nullptr;
}
-ArtField* Class::FindStaticField(Thread* self, Handle<Class> klass, const StringPiece& name,
+ArtField* Class::FindStaticField(Thread* self,
+ Handle<Class> klass,
+ const StringPiece& name,
const StringPiece& type) {
// Is the field in this class (or its interfaces), or any of its
// superclasses (or their interfaces)?
- for (Class* k = klass.Get(); k != nullptr; k = k->GetSuperClass()) {
+ for (ObjPtr<Class> k = klass.Get(); k != nullptr; k = k->GetSuperClass()) {
// Is the field in this class?
ArtField* f = k->FindDeclaredStaticField(name, type);
if (f != nullptr) {
@@ -759,11 +765,11 @@
}
// Wrap k incase it moves during GetDirectInterface.
StackHandleScope<1> hs(self);
- HandleWrapper<mirror::Class> h_k(hs.NewHandleWrapper(&k));
+ HandleWrapperObjPtr<Class> h_k(hs.NewHandleWrapper(&k));
// Is this field in any of this class' interfaces?
for (uint32_t i = 0; i < h_k->NumDirectInterfaces(); ++i) {
StackHandleScope<1> hs2(self);
- Handle<mirror::Class> interface(hs2.NewHandle(GetDirectInterface(self, h_k, i)));
+ Handle<Class> interface(hs2.NewHandle(GetDirectInterface(self, h_k, i)));
f = FindStaticField(self, interface, name, type);
if (f != nullptr) {
return f;
@@ -774,10 +780,10 @@
}
ArtField* Class::FindStaticField(Thread* self,
- Class* klass,
- const DexCache* dex_cache,
+ ObjPtr<Class> klass,
+ ObjPtr<DexCache> dex_cache,
uint32_t dex_field_idx) {
- for (Class* k = klass; k != nullptr; k = k->GetSuperClass()) {
+ for (ObjPtr<Class> k = klass; k != nullptr; k = k->GetSuperClass()) {
// Is the field in this class?
ArtField* f = k->FindDeclaredStaticField(dex_cache, dex_field_idx);
if (f != nullptr) {
@@ -787,10 +793,10 @@
// from here, it takes a Handle as an argument, so we need to wrap `k`.
ScopedAssertNoThreadSuspension ants(__FUNCTION__);
StackHandleScope<1> hs(self);
- Handle<mirror::Class> h_k(hs.NewHandle(k));
+ Handle<Class> h_k(hs.NewHandle(k));
// Is this field in any of this class' interfaces?
for (uint32_t i = 0; i < h_k->NumDirectInterfaces(); ++i) {
- mirror::Class* interface = GetDirectInterface(self, h_k, i);
+ ObjPtr<Class> interface = GetDirectInterface(self, h_k, i);
f = FindStaticField(self, interface, dex_cache, dex_field_idx);
if (f != nullptr) {
return f;
@@ -800,10 +806,12 @@
return nullptr;
}
-ArtField* Class::FindField(Thread* self, Handle<Class> klass, const StringPiece& name,
+ArtField* Class::FindField(Thread* self,
+ Handle<Class> klass,
+ const StringPiece& name,
const StringPiece& type) {
// Find a field using the JLS field resolution order
- for (Class* k = klass.Get(); k != nullptr; k = k->GetSuperClass()) {
+ for (ObjPtr<Class> k = klass.Get(); k != nullptr; k = k->GetSuperClass()) {
// Is the field in this class?
ArtField* f = k->FindDeclaredInstanceField(name, type);
if (f != nullptr) {
@@ -815,10 +823,10 @@
}
// Is this field in any of this class' interfaces?
StackHandleScope<1> hs(self);
- HandleWrapper<mirror::Class> h_k(hs.NewHandleWrapper(&k));
+ HandleWrapperObjPtr<Class> h_k(hs.NewHandleWrapper(&k));
for (uint32_t i = 0; i < h_k->NumDirectInterfaces(); ++i) {
StackHandleScope<1> hs2(self);
- Handle<mirror::Class> interface(hs2.NewHandle(GetDirectInterface(self, h_k, i)));
+ Handle<Class> interface(hs2.NewHandle(GetDirectInterface(self, h_k, i)));
f = interface->FindStaticField(self, interface, name, type);
if (f != nullptr) {
return f;
@@ -874,8 +882,9 @@
return GetInterfaceTypeList()->GetTypeItem(idx).type_idx_;
}
-mirror::Class* Class::GetDirectInterface(Thread* self, Handle<mirror::Class> klass,
- uint32_t idx) {
+ObjPtr<Class> Class::GetDirectInterface(Thread* self,
+ Handle<Class> klass,
+ uint32_t idx) {
DCHECK(klass.Get() != nullptr);
DCHECK(!klass->IsPrimitive());
if (klass->IsArrayClass()) {
@@ -887,12 +896,12 @@
return class_linker->FindSystemClass(self, "Ljava/io/Serializable;");
}
} else if (klass->IsProxyClass()) {
- mirror::ObjectArray<mirror::Class>* interfaces = klass.Get()->GetInterfaces();
+ ObjPtr<ObjectArray<Class>> interfaces = klass.Get()->GetInterfaces();
DCHECK(interfaces != nullptr);
return interfaces->Get(idx);
} else {
uint16_t type_idx = klass->GetDirectInterfaceTypeIdx(idx);
- mirror::Class* interface = klass->GetDexCache()->GetResolvedType(type_idx);
+ ObjPtr<Class> interface = klass->GetDexCache()->GetResolvedType(type_idx);
if (interface == nullptr) {
interface = Runtime::Current()->GetClassLinker()->ResolveType(klass->GetDexFile(), type_idx,
klass.Get());
@@ -902,13 +911,13 @@
}
}
-mirror::Class* Class::GetCommonSuperClass(Handle<Class> klass) {
+ObjPtr<Class> Class::GetCommonSuperClass(Handle<Class> klass) {
DCHECK(klass.Get() != nullptr);
DCHECK(!klass->IsInterface());
DCHECK(!IsInterface());
- mirror::Class* common_super_class = this;
+ ObjPtr<Class> common_super_class = this;
while (!common_super_class->IsAssignableFrom(klass.Get())) {
- mirror::Class* old_common = common_super_class;
+ ObjPtr<Class> old_common = common_super_class;
common_super_class = old_common->GetSuperClass();
DCHECK(common_super_class != nullptr) << PrettyClass(old_common);
}
@@ -926,7 +935,7 @@
}
std::string Class::GetLocation() {
- mirror::DexCache* dex_cache = GetDexCache();
+ ObjPtr<DexCache> dex_cache = GetDexCache();
if (dex_cache != nullptr && !IsProxyClass()) {
return dex_cache->GetLocation()->ToModifiedUtf8();
}
@@ -959,28 +968,28 @@
class ReadBarrierOnNativeRootsVisitor {
public:
- void operator()(mirror::Object* obj ATTRIBUTE_UNUSED,
+ void operator()(ObjPtr<Object> obj ATTRIBUTE_UNUSED,
MemberOffset offset ATTRIBUTE_UNUSED,
bool is_static ATTRIBUTE_UNUSED) const {}
- void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
+ void VisitRootIfNonNull(CompressedReference<Object>* root) const
REQUIRES_SHARED(Locks::mutator_lock_) {
if (!root->IsNull()) {
VisitRoot(root);
}
}
- void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
+ void VisitRoot(CompressedReference<Object>* root) const
REQUIRES_SHARED(Locks::mutator_lock_) {
- mirror::Object* old_ref = root->AsMirrorPtr();
- mirror::Object* new_ref = ReadBarrier::BarrierForRoot(root);
+ ObjPtr<Object> old_ref = root->AsMirrorPtr();
+ ObjPtr<Object> new_ref = ReadBarrier::BarrierForRoot(root);
if (old_ref != new_ref) {
// Update the field atomically. This may fail if mutator updates before us, but it's ok.
auto* atomic_root =
- reinterpret_cast<Atomic<mirror::CompressedReference<mirror::Object>>*>(root);
+ reinterpret_cast<Atomic<CompressedReference<Object>>*>(root);
atomic_root->CompareExchangeStrongSequentiallyConsistent(
- mirror::CompressedReference<mirror::Object>::FromMirrorPtr(old_ref),
- mirror::CompressedReference<mirror::Object>::FromMirrorPtr(new_ref));
+ CompressedReference<Object>::FromMirrorPtr(old_ref.Ptr()),
+ CompressedReference<Object>::FromMirrorPtr(new_ref.Ptr()));
}
}
};
@@ -989,7 +998,7 @@
class CopyClassVisitor {
public:
CopyClassVisitor(Thread* self,
- Handle<mirror::Class>* orig,
+ Handle<Class>* orig,
size_t new_length,
size_t copy_bytes,
ImTable* imt,
@@ -998,24 +1007,24 @@
copy_bytes_(copy_bytes), imt_(imt), pointer_size_(pointer_size) {
}
- void operator()(ObjPtr<mirror::Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
REQUIRES_SHARED(Locks::mutator_lock_) {
StackHandleScope<1> hs(self_);
Handle<mirror::Class> h_new_class_obj(hs.NewHandle(obj->AsClass()));
- mirror::Object::CopyObject(h_new_class_obj.Get(), orig_->Get(), copy_bytes_);
- mirror::Class::SetStatus(h_new_class_obj, Class::kStatusResolving, self_);
+ Object::CopyObject(h_new_class_obj.Get(), orig_->Get(), copy_bytes_);
+ Class::SetStatus(h_new_class_obj, Class::kStatusResolving, self_);
h_new_class_obj->PopulateEmbeddedVTable(pointer_size_);
h_new_class_obj->SetImt(imt_, pointer_size_);
h_new_class_obj->SetClassSize(new_length_);
// Visit all of the references to make sure there is no from space references in the native
// roots.
- static_cast<mirror::Object*>(h_new_class_obj.Get())->VisitReferences(
+ ObjPtr<Object>(h_new_class_obj.Get())->VisitReferences(
ReadBarrierOnNativeRootsVisitor(), VoidFunctor());
}
private:
Thread* const self_;
- Handle<mirror::Class>* const orig_;
+ Handle<Class>* const orig_;
const size_t new_length_;
const size_t copy_bytes_;
ImTable* imt_;
@@ -1027,12 +1036,12 @@
DCHECK_GE(new_length, static_cast<int32_t>(sizeof(Class)));
// We may get copied by a compacting GC.
StackHandleScope<1> hs(self);
- Handle<mirror::Class> h_this(hs.NewHandle(this));
+ Handle<Class> h_this(hs.NewHandle(this));
gc::Heap* heap = Runtime::Current()->GetHeap();
// The num_bytes (3rd param) is sizeof(Class) as opposed to SizeOf()
// to skip copying the tail part that we will overwrite here.
CopyClassVisitor visitor(self, &h_this, new_length, sizeof(Class), imt, pointer_size);
- mirror::Object* new_class = kMovingClasses ?
+ ObjPtr<Object> new_class = kMovingClasses ?
heap->AllocObject<true>(self, java_lang_Class_.Read(), new_length, visitor) :
heap->AllocNonMovableObject<true>(self, java_lang_Class_.Read(), new_length, visitor);
if (UNLIKELY(new_class == nullptr)) {
@@ -1049,7 +1058,7 @@
// TODO: Move this to java_lang_Class.cc?
ArtMethod* Class::GetDeclaredConstructor(
- Thread* self, Handle<mirror::ObjectArray<mirror::Class>> args, PointerSize pointer_size) {
+ Thread* self, Handle<ObjectArray<Class>> args, PointerSize pointer_size) {
for (auto& m : GetDirectMethods(pointer_size)) {
// Skip <clinit> which is a static constructor, as well as non constructors.
if (m.IsStatic() || !m.IsConstructor()) {
@@ -1068,7 +1077,7 @@
uint32_t Class::Depth() {
uint32_t depth = 0;
- for (Class* klass = this; klass->GetSuperClass() != nullptr; klass = klass->GetSuperClass()) {
+ for (ObjPtr<Class> klass = this; klass->GetSuperClass() != nullptr; klass = klass->GetSuperClass()) {
depth++;
}
return depth;
@@ -1081,10 +1090,11 @@
}
template <PointerSize kPointerSize, bool kTransactionActive>
-mirror::Method* Class::GetDeclaredMethodInternal(Thread* self,
- mirror::Class* klass,
- mirror::String* name,
- mirror::ObjectArray<mirror::Class>* args) {
+ObjPtr<Method> Class::GetDeclaredMethodInternal(
+ Thread* self,
+ ObjPtr<Class> klass,
+ ObjPtr<String> name,
+ ObjPtr<ObjectArray<Class>> args) {
// Covariant return types permit the class to define multiple
// methods with the same name and parameter types. Prefer to
// return a non-synthetic method in such situations. We may
@@ -1099,12 +1109,12 @@
return nullptr;
}
auto h_args = hs.NewHandle(args);
- Handle<mirror::Class> h_klass = hs.NewHandle(klass);
+ Handle<Class> h_klass = hs.NewHandle(klass);
ArtMethod* result = nullptr;
for (auto& m : h_klass->GetDeclaredVirtualMethods(kPointerSize)) {
auto* np_method = m.GetInterfaceMethodIfProxy(kPointerSize);
// May cause thread suspension.
- mirror::String* np_name = np_method->GetNameAsString(self);
+ ObjPtr<String> np_name = np_method->GetNameAsString(self);
if (!np_name->Equals(h_method_name.Get()) || !np_method->EqualParameters(h_args)) {
if (UNLIKELY(self->IsExceptionPending())) {
return nullptr;
@@ -1113,7 +1123,7 @@
}
auto modifiers = m.GetAccessFlags();
if ((modifiers & kSkipModifiers) == 0) {
- return mirror::Method::CreateFromArtMethod<kPointerSize, kTransactionActive>(self, &m);
+ return Method::CreateFromArtMethod<kPointerSize, kTransactionActive>(self, &m);
}
if ((modifiers & kAccMiranda) == 0) {
result = &m; // Remember as potential result if it's not a miranda method.
@@ -1127,7 +1137,7 @@
}
auto* np_method = m.GetInterfaceMethodIfProxy(kPointerSize);
// May cause thread suspension.
- mirror::String* np_name = np_method->GetNameAsString(self);
+ ObjPtr<String> np_name = np_method->GetNameAsString(self);
if (np_name == nullptr) {
self->AssertPendingException();
return nullptr;
@@ -1139,76 +1149,76 @@
continue;
}
if ((modifiers & kSkipModifiers) == 0) {
- return mirror::Method::CreateFromArtMethod<kPointerSize, kTransactionActive>(self, &m);
+ return Method::CreateFromArtMethod<kPointerSize, kTransactionActive>(self, &m);
}
// Direct methods cannot be miranda methods, so this potential result must be synthetic.
result = &m;
}
}
return result != nullptr
- ? mirror::Method::CreateFromArtMethod<kPointerSize, kTransactionActive>(self, result)
+ ? Method::CreateFromArtMethod<kPointerSize, kTransactionActive>(self, result)
: nullptr;
}
template
-mirror::Method* Class::GetDeclaredMethodInternal<PointerSize::k32, false>(
+ObjPtr<Method> Class::GetDeclaredMethodInternal<PointerSize::k32, false>(
Thread* self,
- mirror::Class* klass,
- mirror::String* name,
- mirror::ObjectArray<mirror::Class>* args);
+ ObjPtr<Class> klass,
+ ObjPtr<String> name,
+ ObjPtr<ObjectArray<Class>> args);
template
-mirror::Method* Class::GetDeclaredMethodInternal<PointerSize::k32, true>(
+ObjPtr<Method> Class::GetDeclaredMethodInternal<PointerSize::k32, true>(
Thread* self,
- mirror::Class* klass,
- mirror::String* name,
- mirror::ObjectArray<mirror::Class>* args);
+ ObjPtr<Class> klass,
+ ObjPtr<String> name,
+ ObjPtr<ObjectArray<Class>> args);
template
-mirror::Method* Class::GetDeclaredMethodInternal<PointerSize::k64, false>(
+ObjPtr<Method> Class::GetDeclaredMethodInternal<PointerSize::k64, false>(
Thread* self,
- mirror::Class* klass,
- mirror::String* name,
- mirror::ObjectArray<mirror::Class>* args);
+ ObjPtr<Class> klass,
+ ObjPtr<String> name,
+ ObjPtr<ObjectArray<Class>> args);
template
-mirror::Method* Class::GetDeclaredMethodInternal<PointerSize::k64, true>(
+ObjPtr<Method> Class::GetDeclaredMethodInternal<PointerSize::k64, true>(
Thread* self,
- mirror::Class* klass,
- mirror::String* name,
- mirror::ObjectArray<mirror::Class>* args);
+ ObjPtr<Class> klass,
+ ObjPtr<String> name,
+ ObjPtr<ObjectArray<Class>> args);
template <PointerSize kPointerSize, bool kTransactionActive>
-mirror::Constructor* Class::GetDeclaredConstructorInternal(
+ObjPtr<Constructor> Class::GetDeclaredConstructorInternal(
Thread* self,
- mirror::Class* klass,
- mirror::ObjectArray<mirror::Class>* args) {
+ ObjPtr<Class> klass,
+ ObjPtr<ObjectArray<Class>> args) {
StackHandleScope<1> hs(self);
ArtMethod* result = klass->GetDeclaredConstructor(self, hs.NewHandle(args), kPointerSize);
return result != nullptr
- ? mirror::Constructor::CreateFromArtMethod<kPointerSize, kTransactionActive>(self, result)
+ ? Constructor::CreateFromArtMethod<kPointerSize, kTransactionActive>(self, result)
: nullptr;
}
-// mirror::Constructor::CreateFromArtMethod<kTransactionActive>(self, result)
+// Constructor::CreateFromArtMethod<kTransactionActive>(self, result)
template
-mirror::Constructor* Class::GetDeclaredConstructorInternal<PointerSize::k32, false>(
+ObjPtr<Constructor> Class::GetDeclaredConstructorInternal<PointerSize::k32, false>(
Thread* self,
- mirror::Class* klass,
- mirror::ObjectArray<mirror::Class>* args);
+ ObjPtr<Class> klass,
+ ObjPtr<ObjectArray<Class>> args);
template
-mirror::Constructor* Class::GetDeclaredConstructorInternal<PointerSize::k32, true>(
+ObjPtr<Constructor> Class::GetDeclaredConstructorInternal<PointerSize::k32, true>(
Thread* self,
- mirror::Class* klass,
- mirror::ObjectArray<mirror::Class>* args);
+ ObjPtr<Class> klass,
+ ObjPtr<ObjectArray<Class>> args);
template
-mirror::Constructor* Class::GetDeclaredConstructorInternal<PointerSize::k64, false>(
+ObjPtr<Constructor> Class::GetDeclaredConstructorInternal<PointerSize::k64, false>(
Thread* self,
- mirror::Class* klass,
- mirror::ObjectArray<mirror::Class>* args);
+ ObjPtr<Class> klass,
+ ObjPtr<ObjectArray<Class>> args);
template
-mirror::Constructor* Class::GetDeclaredConstructorInternal<PointerSize::k64, true>(
+ObjPtr<Constructor> Class::GetDeclaredConstructorInternal<PointerSize::k64, true>(
Thread* self,
- mirror::Class* klass,
- mirror::ObjectArray<mirror::Class>* args);
+ ObjPtr<Class> klass,
+ ObjPtr<ObjectArray<Class>> args);
int32_t Class::GetInnerClassFlags(Handle<Class> h_this, int32_t default_value) {
if (h_this->IsProxyClass() || h_this->GetDexCache() == nullptr) {
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 725939a..12ce014 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -56,7 +56,7 @@
class Method;
template <typename T> struct PACKED(8) DexCachePair;
-using StringDexCachePair = DexCachePair<mirror::String>;
+using StringDexCachePair = DexCachePair<String>;
using StringDexCacheType = std::atomic<StringDexCachePair>;
// C++ mirror of java.lang.Class
@@ -337,18 +337,7 @@
// For array classes, where all the classes are final due to there being no sub-classes, an
// Object[] may be assigned to by a String[] but a String[] may not be assigned to by other
// types as the component is final.
- bool CannotBeAssignedFromOtherTypes() REQUIRES_SHARED(Locks::mutator_lock_) {
- if (!IsArrayClass()) {
- return IsFinal();
- } else {
- Class* component = GetComponentType();
- if (component->IsPrimitive()) {
- return true;
- } else {
- return component->CannotBeAssignedFromOtherTypes();
- }
- }
- }
+ bool CannotBeAssignedFromOtherTypes() REQUIRES_SHARED(Locks::mutator_lock_);
// Returns true if this class is the placeholder and should retire and
// be replaced with a class with the right size for embedded imt/vtable.
@@ -473,7 +462,7 @@
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Class* GetComponentType() REQUIRES_SHARED(Locks::mutator_lock_);
- void SetComponentType(Class* new_component_type) REQUIRES_SHARED(Locks::mutator_lock_) {
+ void SetComponentType(ObjPtr<Class> new_component_type) REQUIRES_SHARED(Locks::mutator_lock_) {
DCHECK(GetComponentType() == nullptr);
DCHECK(new_component_type != nullptr);
// Component type is invariant: use non-transactional mode without check.
@@ -508,7 +497,7 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsObjectArrayClass() REQUIRES_SHARED(Locks::mutator_lock_) {
- mirror::Class* const component_type = GetComponentType<kVerifyFlags, kReadBarrierOption>();
+ ObjPtr<Class> const component_type = GetComponentType<kVerifyFlags, kReadBarrierOption>();
return component_type != nullptr && !component_type->IsPrimitive();
}
@@ -528,12 +517,12 @@
// Creates a raw object instance but does not invoke the default constructor.
template<bool kIsInstrumented, bool kCheckAddFinalizer = true>
- ALWAYS_INLINE Object* Alloc(Thread* self, gc::AllocatorType allocator_type)
+ ALWAYS_INLINE ObjPtr<Object> Alloc(Thread* self, gc::AllocatorType allocator_type)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
- Object* AllocObject(Thread* self)
+ ObjPtr<Object> AllocObject(Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
- Object* AllocNonMovableObject(Thread* self)
+ ObjPtr<Object> AllocNonMovableObject(Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
@@ -638,11 +627,14 @@
// Can this class access a resolved method?
// Note that access to methods's class is checked and this may require looking up the class
// referenced by the MethodId in the DexFile in case the declaring class is inaccessible.
- bool CanAccessResolvedMethod(Class* access_to, ArtMethod* resolved_method,
- DexCache* dex_cache, uint32_t method_idx)
+ bool CanAccessResolvedMethod(ObjPtr<Class> access_to,
+ ArtMethod* resolved_method,
+ ObjPtr<DexCache> dex_cache,
+ uint32_t method_idx)
REQUIRES_SHARED(Locks::mutator_lock_);
template <InvokeType throw_invoke_type>
- bool CheckResolvedMethodAccess(Class* access_to, ArtMethod* resolved_method,
+ bool CheckResolvedMethodAccess(ObjPtr<Class> access_to,
+ ArtMethod* resolved_method,
uint32_t method_idx)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -661,11 +653,12 @@
// Get first common super class. It will never return null.
// `This` and `klass` must be classes.
- Class* GetCommonSuperClass(Handle<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<Class> GetCommonSuperClass(Handle<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
- void SetSuperClass(Class* new_super_class) REQUIRES_SHARED(Locks::mutator_lock_) {
+ void SetSuperClass(ObjPtr<Class> new_super_class) REQUIRES_SHARED(Locks::mutator_lock_) {
// Super class is assigned once, except during class linker initialization.
- Class* old_super_class = GetFieldObject<Class>(OFFSET_OF_OBJECT_MEMBER(Class, super_class_));
+ ObjPtr<Class> old_super_class =
+ GetFieldObject<Class>(OFFSET_OF_OBJECT_MEMBER(Class, super_class_));
DCHECK(old_super_class == nullptr || old_super_class == new_super_class);
DCHECK(new_super_class != nullptr);
SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, super_class_), new_super_class);
@@ -681,7 +674,7 @@
ClassLoader* GetClassLoader() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_);
- void SetClassLoader(ClassLoader* new_cl) REQUIRES_SHARED(Locks::mutator_lock_);
+ void SetClassLoader(ObjPtr<ClassLoader> new_cl) REQUIRES_SHARED(Locks::mutator_lock_);
static MemberOffset DexCacheOffset() {
return MemberOffset(OFFSETOF_MEMBER(Class, dex_cache_));
@@ -699,7 +692,7 @@
DexCache* GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
// Also updates the dex_cache_strings_ variable from new_dex_cache.
- void SetDexCache(DexCache* new_dex_cache) REQUIRES_SHARED(Locks::mutator_lock_);
+ void SetDexCache(ObjPtr<DexCache> new_dex_cache) REQUIRES_SHARED(Locks::mutator_lock_);
ALWAYS_INLINE IterationRange<StrideIterator<ArtMethod>> GetDirectMethods(PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -753,15 +746,16 @@
REQUIRES_SHARED(Locks::mutator_lock_);
template <PointerSize kPointerSize, bool kTransactionActive>
- static Method* GetDeclaredMethodInternal(Thread* self,
- mirror::Class* klass,
- mirror::String* name,
- mirror::ObjectArray<mirror::Class>* args)
+ static ObjPtr<Method> GetDeclaredMethodInternal(Thread* self,
+ ObjPtr<Class> klass,
+ ObjPtr<String> name,
+ ObjPtr<ObjectArray<Class>> args)
REQUIRES_SHARED(Locks::mutator_lock_);
+
template <PointerSize kPointerSize, bool kTransactionActive>
- static Constructor* GetDeclaredConstructorInternal(Thread* self,
- mirror::Class* klass,
- mirror::ObjectArray<mirror::Class>* args)
+ static ObjPtr<Constructor> GetDeclaredConstructorInternal(Thread* self,
+ ObjPtr<Class> klass,
+ ObjPtr<ObjectArray<Class>> args)
REQUIRES_SHARED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
@@ -894,69 +888,86 @@
ArtMethod* FindVirtualMethodForVirtualOrInterface(ArtMethod* method, PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindInterfaceMethod(const StringPiece& name, const StringPiece& signature,
+ ArtMethod* FindInterfaceMethod(const StringPiece& name,
+ const StringPiece& signature,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindInterfaceMethod(const StringPiece& name, const Signature& signature,
+ ArtMethod* FindInterfaceMethod(const StringPiece& name,
+ const Signature& signature,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindInterfaceMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ ArtMethod* FindInterfaceMethod(ObjPtr<DexCache> dex_cache,
+ uint32_t dex_method_idx,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredDirectMethod(const StringPiece& name, const StringPiece& signature,
+ ArtMethod* FindDeclaredDirectMethod(const StringPiece& name,
+ const StringPiece& signature,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredDirectMethod(const StringPiece& name, const Signature& signature,
+ ArtMethod* FindDeclaredDirectMethod(const StringPiece& name,
+ const Signature& signature,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredDirectMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ ArtMethod* FindDeclaredDirectMethod(ObjPtr<DexCache> dex_cache,
+ uint32_t dex_method_idx,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindDirectMethod(const StringPiece& name, const StringPiece& signature,
+ ArtMethod* FindDirectMethod(const StringPiece& name,
+ const StringPiece& signature,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindDirectMethod(const StringPiece& name, const Signature& signature,
+ ArtMethod* FindDirectMethod(const StringPiece& name,
+ const Signature& signature,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindDirectMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ ArtMethod* FindDirectMethod(ObjPtr<DexCache> dex_cache,
+ uint32_t dex_method_idx,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredVirtualMethod(const StringPiece& name, const StringPiece& signature,
+ ArtMethod* FindDeclaredVirtualMethod(const StringPiece& name,
+ const StringPiece& signature,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredVirtualMethod(const StringPiece& name, const Signature& signature,
+ ArtMethod* FindDeclaredVirtualMethod(const StringPiece& name,
+ const Signature& signature,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredVirtualMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ ArtMethod* FindDeclaredVirtualMethod(ObjPtr<DexCache> dex_cache,
+ uint32_t dex_method_idx,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredVirtualMethodByName(const StringPiece& name, PointerSize pointer_size)
+ ArtMethod* FindDeclaredVirtualMethodByName(const StringPiece& name,
+ PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredDirectMethodByName(const StringPiece& name, PointerSize pointer_size)
+ ArtMethod* FindDeclaredDirectMethodByName(const StringPiece& name,
+ PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindVirtualMethod(const StringPiece& name, const StringPiece& signature,
+ ArtMethod* FindVirtualMethod(const StringPiece& name,
+ const StringPiece& signature,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindVirtualMethod(const StringPiece& name, const Signature& signature,
+ ArtMethod* FindVirtualMethod(const StringPiece& name,
+ const Signature& signature,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtMethod* FindVirtualMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ ArtMethod* FindVirtualMethod(ObjPtr<DexCache> dex_cache,
+ uint32_t dex_method_idx,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -976,7 +987,8 @@
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ALWAYS_INLINE IfTable* GetIfTable() REQUIRES_SHARED(Locks::mutator_lock_);
- ALWAYS_INLINE void SetIfTable(IfTable* new_iftable) REQUIRES_SHARED(Locks::mutator_lock_);
+ ALWAYS_INLINE void SetIfTable(ObjPtr<IfTable> new_iftable)
+ REQUIRES_SHARED(Locks::mutator_lock_);
// Get instance fields of the class (See also GetSFields).
LengthPrefixedArray<ArtField>* GetIFieldsPtr() REQUIRES_SHARED(Locks::mutator_lock_);
@@ -1077,32 +1089,34 @@
// Finds the given instance field in this class or a superclass, only searches classes that
// have the same dex cache.
- ArtField* FindInstanceField(const DexCache* dex_cache, uint32_t dex_field_idx)
+ ArtField* FindInstanceField(ObjPtr<DexCache> dex_cache, uint32_t dex_field_idx)
REQUIRES_SHARED(Locks::mutator_lock_);
ArtField* FindDeclaredInstanceField(const StringPiece& name, const StringPiece& type)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtField* FindDeclaredInstanceField(const DexCache* dex_cache, uint32_t dex_field_idx)
+ ArtField* FindDeclaredInstanceField(ObjPtr<DexCache> dex_cache, uint32_t dex_field_idx)
REQUIRES_SHARED(Locks::mutator_lock_);
// Finds the given static field in this class or a superclass.
- static ArtField* FindStaticField(Thread* self, Handle<Class> klass, const StringPiece& name,
+ static ArtField* FindStaticField(Thread* self,
+ Handle<Class> klass,
+ const StringPiece& name,
const StringPiece& type)
REQUIRES_SHARED(Locks::mutator_lock_);
// Finds the given static field in this class or superclass, only searches classes that
// have the same dex cache.
static ArtField* FindStaticField(Thread* self,
- Class* klass,
- const DexCache* dex_cache,
+ ObjPtr<Class> klass,
+ ObjPtr<DexCache> dex_cache,
uint32_t dex_field_idx)
REQUIRES_SHARED(Locks::mutator_lock_);
ArtField* FindDeclaredStaticField(const StringPiece& name, const StringPiece& type)
REQUIRES_SHARED(Locks::mutator_lock_);
- ArtField* FindDeclaredStaticField(const DexCache* dex_cache, uint32_t dex_field_idx)
+ ArtField* FindDeclaredStaticField(ObjPtr<DexCache> dex_cache, uint32_t dex_field_idx)
REQUIRES_SHARED(Locks::mutator_lock_);
pid_t GetClinitThreadId() REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -1148,7 +1162,7 @@
}
// Can't call this SetClass or else gets called instead of Object::SetClass in places.
- static void SetClassClass(Class* java_lang_Class) REQUIRES_SHARED(Locks::mutator_lock_);
+ static void SetClassClass(ObjPtr<Class> java_lang_Class) REQUIRES_SHARED(Locks::mutator_lock_);
static void ResetClass();
static void VisitRoots(RootVisitor* visitor)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -1178,8 +1192,9 @@
uint16_t GetDirectInterfaceTypeIdx(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_);
- static mirror::Class* GetDirectInterface(Thread* self, Handle<mirror::Class> klass,
- uint32_t idx)
+ static ObjPtr<Class> GetDirectInterface(Thread* self,
+ Handle<Class> klass,
+ uint32_t idx)
REQUIRES_SHARED(Locks::mutator_lock_);
const char* GetSourceFile() REQUIRES_SHARED(Locks::mutator_lock_);
@@ -1194,7 +1209,9 @@
void AssertInitializedOrInitializingInThread(Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_);
- Class* CopyOf(Thread* self, int32_t new_length, ImTable* imt,
+ Class* CopyOf(Thread* self,
+ int32_t new_length,
+ ImTable* imt,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
@@ -1218,8 +1235,9 @@
}
// May cause thread suspension due to EqualParameters.
- ArtMethod* GetDeclaredConstructor(
- Thread* self, Handle<mirror::ObjectArray<mirror::Class>> args, PointerSize pointer_size)
+ ArtMethod* GetDeclaredConstructor(Thread* self,
+ Handle<ObjectArray<Class>> args,
+ PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
static int32_t GetInnerClassFlags(Handle<Class> h_this, int32_t default_value)
@@ -1232,7 +1250,7 @@
explicit InitializeClassVisitor(uint32_t class_size) : class_size_(class_size) {
}
- void operator()(ObjPtr<mirror::Object> obj, size_t usable_size) const
+ void operator()(ObjPtr<Object> obj, size_t usable_size) const
REQUIRES_SHARED(Locks::mutator_lock_);
private:
@@ -1277,14 +1295,14 @@
template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
typename Visitor>
- void FixupNativePointers(mirror::Class* dest, PointerSize pointer_size, const Visitor& visitor)
+ void FixupNativePointers(Class* dest, PointerSize pointer_size, const Visitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_);
private:
ALWAYS_INLINE void SetMethodsPtrInternal(LengthPrefixedArray<ArtMethod>* new_methods)
REQUIRES_SHARED(Locks::mutator_lock_);
- void SetVerifyError(Object* klass) REQUIRES_SHARED(Locks::mutator_lock_);
+ void SetVerifyError(ObjPtr<Object> klass) REQUIRES_SHARED(Locks::mutator_lock_);
template <bool throw_on_failure, bool use_referrers_cache>
bool ResolvedFieldAccessTest(ObjPtr<Class> access_to,
@@ -1300,7 +1318,7 @@
ObjPtr<DexCache> dex_cache)
REQUIRES_SHARED(Locks::mutator_lock_);
- bool Implements(Class* klass) REQUIRES_SHARED(Locks::mutator_lock_);
+ bool Implements(ObjPtr<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
bool IsArrayAssignableFromArray(ObjPtr<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
bool IsAssignableFromArray(ObjPtr<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
@@ -1333,7 +1351,7 @@
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
typename Visitor>
- void VisitReferences(mirror::Class* klass, const Visitor& visitor)
+ void VisitReferences(ObjPtr<Class> klass, const Visitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_);
// 'Class' Object Fields
diff --git a/runtime/mirror/field-inl.h b/runtime/mirror/field-inl.h
index 445f23f..940e824 100644
--- a/runtime/mirror/field-inl.h
+++ b/runtime/mirror/field-inl.h
@@ -48,7 +48,7 @@
self->ClearException();
}
}
- auto ret = hs.NewHandle(static_cast<Field*>(StaticClass()->AllocObject(self)));
+ auto ret = hs.NewHandle(static_cast<Field*>(StaticClass()->AllocObject(self).Ptr()));
if (UNLIKELY(ret.Get() == nullptr)) {
self->AssertPendingOOMException();
return nullptr;
diff --git a/runtime/mirror/method.cc b/runtime/mirror/method.cc
index 71bac7e..7ddadda 100644
--- a/runtime/mirror/method.cc
+++ b/runtime/mirror/method.cc
@@ -54,12 +54,12 @@
template <PointerSize kPointerSize, bool kTransactionActive>
Method* Method::CreateFromArtMethod(Thread* self, ArtMethod* method) {
DCHECK(!method->IsConstructor()) << PrettyMethod(method);
- auto* ret = down_cast<Method*>(StaticClass()->AllocObject(self));
+ ObjPtr<Method> ret = ObjPtr<Method>::DownCast(StaticClass()->AllocObject(self));
if (LIKELY(ret != nullptr)) {
- static_cast<Executable*>(ret)->
+ ObjPtr<Executable>(ret)->
CreateFromArtMethod<kPointerSize, kTransactionActive>(method);
}
- return ret;
+ return ret.Ptr();
}
template Method* Method::CreateFromArtMethod<PointerSize::k32, false>(Thread* self,
@@ -106,12 +106,12 @@
template <PointerSize kPointerSize, bool kTransactionActive>
Constructor* Constructor::CreateFromArtMethod(Thread* self, ArtMethod* method) {
DCHECK(method->IsConstructor()) << PrettyMethod(method);
- auto* ret = down_cast<Constructor*>(StaticClass()->AllocObject(self));
+ ObjPtr<Constructor> ret = ObjPtr<Constructor>::DownCast(StaticClass()->AllocObject(self));
if (LIKELY(ret != nullptr)) {
- static_cast<Executable*>(ret)->
+ ObjPtr<Executable>(ret)->
CreateFromArtMethod<kPointerSize, kTransactionActive>(method);
}
- return ret;
+ return ret.Ptr();
}
template Constructor* Constructor::CreateFromArtMethod<PointerSize::k32, false>(
diff --git a/runtime/mirror/method_type.cc b/runtime/mirror/method_type.cc
index ba6ea5e..0b52931 100644
--- a/runtime/mirror/method_type.cc
+++ b/runtime/mirror/method_type.cc
@@ -29,7 +29,7 @@
Handle<ObjectArray<Class>> param_types) {
StackHandleScope<1> hs(self);
Handle<mirror::MethodType> mt(
- hs.NewHandle(static_cast<MethodType*>(StaticClass()->AllocObject(self))));
+ hs.NewHandle(ObjPtr<MethodType>::DownCast(StaticClass()->AllocObject(self))));
// TODO: Do we ever create a MethodType during a transaction ? There doesn't
// seem like a good reason to do a polymorphic invoke that results in the
diff --git a/runtime/mirror/object_test.cc b/runtime/mirror/object_test.cc
index 062afd3..60e2bf8 100644
--- a/runtime/mirror/object_test.cc
+++ b/runtime/mirror/object_test.cc
@@ -139,10 +139,10 @@
ASSERT_TRUE(oa->GetClass() != nullptr);
Handle<mirror::Class> klass(hs.NewHandle(oa->GetClass()));
ASSERT_EQ(2U, klass->NumDirectInterfaces());
- EXPECT_EQ(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Cloneable;"),
- mirror::Class::GetDirectInterface(soa.Self(), klass, 0));
- EXPECT_EQ(class_linker_->FindSystemClass(soa.Self(), "Ljava/io/Serializable;"),
- mirror::Class::GetDirectInterface(soa.Self(), klass, 1));
+ EXPECT_OBJ_PTR_EQ(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Cloneable;"),
+ mirror::Class::GetDirectInterface(soa.Self(), klass, 0));
+ EXPECT_OBJ_PTR_EQ(class_linker_->FindSystemClass(soa.Self(), "Ljava/io/Serializable;"),
+ mirror::Class::GetDirectInterface(soa.Self(), klass, 1));
}
TEST_F(ObjectTest, AllocArray) {
diff --git a/runtime/mirror/stack_trace_element.cc b/runtime/mirror/stack_trace_element.cc
index 96f6a53..e2050cc 100644
--- a/runtime/mirror/stack_trace_element.cc
+++ b/runtime/mirror/stack_trace_element.cc
@@ -42,8 +42,8 @@
StackTraceElement* StackTraceElement::Alloc(Thread* self, Handle<String> declaring_class,
Handle<String> method_name, Handle<String> file_name,
int32_t line_number) {
- StackTraceElement* trace =
- down_cast<StackTraceElement*>(GetStackTraceElement()->AllocObject(self));
+ ObjPtr<StackTraceElement> trace =
+ ObjPtr<StackTraceElement>::DownCast(GetStackTraceElement()->AllocObject(self));
if (LIKELY(trace != nullptr)) {
if (Runtime::Current()->IsActiveTransaction()) {
trace->Init<true>(declaring_class, method_name, file_name, line_number);
@@ -51,7 +51,7 @@
trace->Init<false>(declaring_class, method_name, file_name, line_number);
}
}
- return trace;
+ return trace.Ptr();
}
template<bool kTransactionActive>