Hash-based dex cache type array.
Test: m test-art-host (Interpreter, Optimizing, JIT)
Test: m test-art-target on Nexus 6P (Interpreter, Optimizing, JIT)
Test: Nexus 6P boots
Test: m valgrind-test-art-host
Bug: 30627598
Bug: 34659969
Bug: 30419309
Change-Id: Ic00eda89e58088a3573fc9ec0ad04c0e69e161d1
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index f08d4da..85636fb 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -951,7 +951,8 @@
return interfaces->Get(idx);
} else {
dex::TypeIndex type_idx = klass->GetDirectInterfaceTypeIdx(idx);
- ObjPtr<Class> interface = klass->GetDexCache()->GetResolvedType(type_idx);
+ ObjPtr<Class> interface = ClassLinker::LookupResolvedType(
+ type_idx, klass->GetDexCache(), klass->GetClassLoader());
return interface;
}
}
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index a59bb7b..bef3ad2 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -40,14 +40,22 @@
return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
}
-inline mirror::String* DexCache::GetResolvedString(dex::StringIndex string_idx) {
+inline uint32_t DexCache::StringSlotIndex(dex::StringIndex string_idx) {
DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds());
- return StringDexCachePair::Lookup(GetStrings(), string_idx.index_, NumStrings()).Read();
+ const uint32_t slot_idx = string_idx.index_ % kDexCacheStringCacheSize;
+ DCHECK_LT(slot_idx, NumStrings());
+ return slot_idx;
}
-inline void DexCache::SetResolvedString(dex::StringIndex string_idx,
- ObjPtr<mirror::String> resolved) {
- StringDexCachePair::Assign(GetStrings(), string_idx.index_, resolved.Ptr(), NumStrings());
+inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) {
+ return GetStrings()[StringSlotIndex(string_idx)].load(
+ std::memory_order_relaxed).GetObjectForIndex(string_idx.index_);
+}
+
+inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) {
+ DCHECK(resolved != nullptr);
+ GetStrings()[StringSlotIndex(string_idx)].store(
+ StringDexCachePair(resolved, string_idx.index_), std::memory_order_relaxed);
Runtime* const runtime = Runtime::Current();
if (UNLIKELY(runtime->IsActiveTransaction())) {
DCHECK(runtime->IsAotCompiler());
@@ -58,50 +66,70 @@
}
inline void DexCache::ClearString(dex::StringIndex string_idx) {
- const uint32_t slot_idx = string_idx.index_ % NumStrings();
DCHECK(Runtime::Current()->IsAotCompiler());
+ uint32_t slot_idx = StringSlotIndex(string_idx);
StringDexCacheType* slot = &GetStrings()[slot_idx];
// This is racy but should only be called from the transactional interpreter.
if (slot->load(std::memory_order_relaxed).index == string_idx.index_) {
- StringDexCachePair cleared(
- nullptr,
- StringDexCachePair::InvalidIndexForSlot(slot_idx));
+ StringDexCachePair cleared(nullptr, StringDexCachePair::InvalidIndexForSlot(slot_idx));
slot->store(cleared, std::memory_order_relaxed);
}
}
+inline uint32_t DexCache::TypeSlotIndex(dex::TypeIndex type_idx) {
+ DCHECK_LT(type_idx.index_, GetDexFile()->NumTypeIds());
+ const uint32_t slot_idx = type_idx.index_ % kDexCacheTypeCacheSize;
+ DCHECK_LT(slot_idx, NumResolvedTypes());
+ return slot_idx;
+}
+
inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) {
// It is theorized that a load acquire is not required since obtaining the resolved class will
// always have an address dependency or a lock.
- DCHECK_LT(type_idx.index_, NumResolvedTypes());
- return GetResolvedTypes()[type_idx.index_].Read();
+ return GetResolvedTypes()[TypeSlotIndex(type_idx)].load(
+ std::memory_order_relaxed).GetObjectForIndex(type_idx.index_);
}
inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) {
- DCHECK_LT(type_idx.index_, NumResolvedTypes()); // NOTE: Unchecked, i.e. not throwing AIOOB.
+ DCHECK(resolved != nullptr);
// TODO default transaction support.
// Use a release store for SetResolvedType. This is done to prevent other threads from seeing a
// class but not necessarily seeing the loaded members like the static fields array.
// See b/32075261.
- reinterpret_cast<Atomic<GcRoot<mirror::Class>>&>(GetResolvedTypes()[type_idx.index_]).
- StoreRelease(GcRoot<Class>(resolved));
+ GetResolvedTypes()[TypeSlotIndex(type_idx)].store(
+ TypeDexCachePair(resolved, type_idx.index_), std::memory_order_release);
// TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(this);
}
-inline MethodType* DexCache::GetResolvedMethodType(uint32_t proto_idx) {
+inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) {
+ DCHECK(Runtime::Current()->IsAotCompiler());
+ uint32_t slot_idx = TypeSlotIndex(type_idx);
+ TypeDexCacheType* slot = &GetResolvedTypes()[slot_idx];
+ // This is racy but should only be called from the single-threaded ImageWriter and tests.
+ if (slot->load(std::memory_order_relaxed).index == type_idx.index_) {
+ TypeDexCachePair cleared(nullptr, TypeDexCachePair::InvalidIndexForSlot(slot_idx));
+ slot->store(cleared, std::memory_order_relaxed);
+ }
+}
+
+inline uint32_t DexCache::MethodTypeSlotIndex(uint32_t proto_idx) {
DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
DCHECK_LT(proto_idx, GetDexFile()->NumProtoIds());
- return MethodTypeDexCachePair::Lookup(
- GetResolvedMethodTypes(), proto_idx, NumResolvedMethodTypes()).Read();
+ const uint32_t slot_idx = proto_idx % kDexCacheMethodTypeCacheSize;
+ DCHECK_LT(slot_idx, NumResolvedMethodTypes());
+ return slot_idx;
+}
+
+inline MethodType* DexCache::GetResolvedMethodType(uint32_t proto_idx) {
+ return GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].load(
+ std::memory_order_relaxed).GetObjectForIndex(proto_idx);
}
inline void DexCache::SetResolvedMethodType(uint32_t proto_idx, MethodType* resolved) {
- DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
- DCHECK_LT(proto_idx, GetDexFile()->NumProtoIds());
-
- MethodTypeDexCachePair::Assign(GetResolvedMethodTypes(), proto_idx, resolved,
- NumResolvedMethodTypes());
+ DCHECK(resolved != nullptr);
+ GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].store(
+ MethodTypeDexCachePair(resolved, proto_idx), std::memory_order_relaxed);
// TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(this);
}
@@ -198,49 +226,49 @@
VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
// Visit arrays after.
if (kVisitNativeRoots) {
- VisitDexCachePairs<mirror::String, kReadBarrierOption, Visitor>(
+ VisitDexCachePairs<String, kReadBarrierOption, Visitor>(
GetStrings(), NumStrings(), visitor);
- GcRoot<mirror::Class>* resolved_types = GetResolvedTypes();
- for (size_t i = 0, num_types = NumResolvedTypes(); i != num_types; ++i) {
- visitor.VisitRootIfNonNull(resolved_types[i].AddressWithoutBarrier());
- }
+ VisitDexCachePairs<Class, kReadBarrierOption, Visitor>(
+ GetResolvedTypes(), NumResolvedTypes(), visitor);
- VisitDexCachePairs<mirror::MethodType, kReadBarrierOption, Visitor>(
+ VisitDexCachePairs<MethodType, kReadBarrierOption, Visitor>(
GetResolvedMethodTypes(), NumResolvedMethodTypes(), visitor);
}
}
template <ReadBarrierOption kReadBarrierOption, typename Visitor>
-inline void DexCache::FixupStrings(mirror::StringDexCacheType* dest, const Visitor& visitor) {
- mirror::StringDexCacheType* src = GetStrings();
+inline void DexCache::FixupStrings(StringDexCacheType* dest, const Visitor& visitor) {
+ StringDexCacheType* src = GetStrings();
for (size_t i = 0, count = NumStrings(); i < count; ++i) {
StringDexCachePair source = src[i].load(std::memory_order_relaxed);
- mirror::String* ptr = source.object.Read<kReadBarrierOption>();
- mirror::String* new_source = visitor(ptr);
+ String* ptr = source.object.Read<kReadBarrierOption>();
+ String* new_source = visitor(ptr);
source.object = GcRoot<String>(new_source);
dest[i].store(source, std::memory_order_relaxed);
}
}
template <ReadBarrierOption kReadBarrierOption, typename Visitor>
-inline void DexCache::FixupResolvedTypes(GcRoot<mirror::Class>* dest, const Visitor& visitor) {
- GcRoot<mirror::Class>* src = GetResolvedTypes();
+inline void DexCache::FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) {
+ TypeDexCacheType* src = GetResolvedTypes();
for (size_t i = 0, count = NumResolvedTypes(); i < count; ++i) {
- mirror::Class* source = src[i].Read<kReadBarrierOption>();
- mirror::Class* new_source = visitor(source);
- dest[i] = GcRoot<mirror::Class>(new_source);
+ TypeDexCachePair source = src[i].load(std::memory_order_relaxed);
+ Class* ptr = source.object.Read<kReadBarrierOption>();
+ Class* new_source = visitor(ptr);
+ source.object = GcRoot<Class>(new_source);
+ dest[i].store(source, std::memory_order_relaxed);
}
}
template <ReadBarrierOption kReadBarrierOption, typename Visitor>
-inline void DexCache::FixupResolvedMethodTypes(mirror::MethodTypeDexCacheType* dest,
+inline void DexCache::FixupResolvedMethodTypes(MethodTypeDexCacheType* dest,
const Visitor& visitor) {
- mirror::MethodTypeDexCacheType* src = GetResolvedMethodTypes();
+ MethodTypeDexCacheType* src = GetResolvedMethodTypes();
for (size_t i = 0, count = NumResolvedMethodTypes(); i < count; ++i) {
MethodTypeDexCachePair source = src[i].load(std::memory_order_relaxed);
- mirror::MethodType* ptr = source.object.Read<kReadBarrierOption>();
- mirror::MethodType* new_source = visitor(ptr);
+ MethodType* ptr = source.object.Read<kReadBarrierOption>();
+ MethodType* new_source = visitor(ptr);
source.object = GcRoot<MethodType>(new_source);
dest[i].store(source, std::memory_order_relaxed);
}
diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc
index 741cf3b..3103a92 100644
--- a/runtime/mirror/dex_cache.cc
+++ b/runtime/mirror/dex_cache.cc
@@ -58,8 +58,8 @@
mirror::StringDexCacheType* strings = (dex_file->NumStringIds() == 0u) ? nullptr :
reinterpret_cast<mirror::StringDexCacheType*>(raw_arrays + layout.StringsOffset());
- GcRoot<mirror::Class>* types = (dex_file->NumTypeIds() == 0u) ? nullptr :
- reinterpret_cast<GcRoot<mirror::Class>*>(raw_arrays + layout.TypesOffset());
+ mirror::TypeDexCacheType* types = (dex_file->NumTypeIds() == 0u) ? nullptr :
+ reinterpret_cast<mirror::TypeDexCacheType*>(raw_arrays + layout.TypesOffset());
ArtMethod** methods = (dex_file->NumMethodIds() == 0u) ? nullptr :
reinterpret_cast<ArtMethod**>(raw_arrays + layout.MethodsOffset());
ArtField** fields = (dex_file->NumFieldIds() == 0u) ? nullptr :
@@ -69,6 +69,10 @@
if (dex_file->NumStringIds() < num_strings) {
num_strings = dex_file->NumStringIds();
}
+ size_t num_types = mirror::DexCache::kDexCacheTypeCacheSize;
+ if (dex_file->NumTypeIds() < num_types) {
+ num_types = dex_file->NumTypeIds();
+ }
// Note that we allocate the method type dex caches regardless of this flag,
// and we make sure here that they're not used by the runtime. This is in the
@@ -104,8 +108,9 @@
CHECK_EQ(strings[i].load(std::memory_order_relaxed).index, 0u);
CHECK(strings[i].load(std::memory_order_relaxed).object.IsNull());
}
- for (size_t i = 0; i < dex_file->NumTypeIds(); ++i) {
- CHECK(types[i].IsNull());
+ for (size_t i = 0; i < num_types; ++i) {
+ CHECK_EQ(types[i].load(std::memory_order_relaxed).index, 0u);
+ CHECK(types[i].load(std::memory_order_relaxed).object.IsNull());
}
for (size_t i = 0; i < dex_file->NumMethodIds(); ++i) {
CHECK(mirror::DexCache::GetElementPtrSize(methods, i, image_pointer_size) == nullptr);
@@ -121,6 +126,9 @@
if (strings != nullptr) {
mirror::StringDexCachePair::Initialize(strings);
}
+ if (types != nullptr) {
+ mirror::TypeDexCachePair::Initialize(types);
+ }
if (method_types != nullptr) {
mirror::MethodTypeDexCachePair::Initialize(method_types);
}
@@ -129,7 +137,7 @@
strings,
num_strings,
types,
- dex_file->NumTypeIds(),
+ num_types,
methods,
dex_file->NumMethodIds(),
fields,
@@ -143,7 +151,7 @@
ObjPtr<String> location,
StringDexCacheType* strings,
uint32_t num_strings,
- GcRoot<Class>* resolved_types,
+ TypeDexCacheType* resolved_types,
uint32_t num_resolved_types,
ArtMethod** resolved_methods,
uint32_t num_resolved_methods,
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index 6f88cc5..e68b0c7 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -18,14 +18,14 @@
#define ART_RUNTIME_MIRROR_DEX_CACHE_H_
#include "array.h"
-#include "art_field.h"
-#include "class.h"
+#include "base/bit_utils.h"
#include "dex_file_types.h"
#include "object.h"
#include "object_array.h"
namespace art {
+class ArtField;
class ArtMethod;
struct DexCacheOffsets;
class DexFile;
@@ -36,6 +36,7 @@
namespace mirror {
+class Class;
class MethodType;
class String;
@@ -60,7 +61,7 @@
// it's always non-null if the id branch succeeds (except for the 0th id).
// Set the initial state for the 0th entry to be {0,1} which is guaranteed to fail
// the lookup id == stored id branch.
- DexCachePair(T* object, uint32_t index)
+ DexCachePair(ObjPtr<T> object, uint32_t index)
: object(object),
index(index) {}
DexCachePair() = default;
@@ -74,39 +75,28 @@
dex_cache[0].store(first_elem, std::memory_order_relaxed);
}
- static GcRoot<T> Lookup(std::atomic<DexCachePair<T>>* dex_cache,
- uint32_t idx,
- uint32_t cache_size) {
- DCHECK_NE(cache_size, 0u);
- DexCachePair<T> element = dex_cache[idx % cache_size].load(std::memory_order_relaxed);
- if (idx != element.index) {
- return GcRoot<T>(nullptr);
- }
-
- DCHECK(!element.object.IsNull());
- return element.object;
- }
-
- static void Assign(std::atomic<DexCachePair<T>>* dex_cache,
- uint32_t idx,
- T* object,
- uint32_t cache_size) {
- DCHECK_LT(idx % cache_size, cache_size);
- dex_cache[idx % cache_size].store(
- DexCachePair<T>(object, idx), std::memory_order_relaxed);
- }
-
static uint32_t InvalidIndexForSlot(uint32_t slot) {
// Since the cache size is a power of two, 0 will always map to slot 0.
// Use 1 for slot 0 and 0 for all other slots.
return (slot == 0) ? 1u : 0u;
}
+
+ T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (idx != index) {
+ return nullptr;
+ }
+ DCHECK(!object.IsNull());
+ return object.Read();
+ }
};
-using StringDexCachePair = DexCachePair<mirror::String>;
+using TypeDexCachePair = DexCachePair<Class>;
+using TypeDexCacheType = std::atomic<TypeDexCachePair>;
+
+using StringDexCachePair = DexCachePair<String>;
using StringDexCacheType = std::atomic<StringDexCachePair>;
-using MethodTypeDexCachePair = DexCachePair<mirror::MethodType>;
+using MethodTypeDexCachePair = DexCachePair<MethodType>;
using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>;
// C++ mirror of java.lang.DexCache.
@@ -115,6 +105,11 @@
// Size of java.lang.DexCache.class.
static uint32_t ClassSize(PointerSize pointer_size);
+ // Size of type dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
+ static constexpr size_t kDexCacheTypeCacheSize = 1024;
+ static_assert(IsPowerOfTwo(kDexCacheTypeCacheSize),
+ "Type dex cache size is not a power of 2.");
+
// Size of string dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
static constexpr size_t kDexCacheStringCacheSize = 1024;
static_assert(IsPowerOfTwo(kDexCacheStringCacheSize),
@@ -126,6 +121,10 @@
static_assert(IsPowerOfTwo(kDexCacheMethodTypeCacheSize),
"MethodType dex cache size is not a power of 2.");
+ static constexpr size_t StaticTypeSize() {
+ return kDexCacheTypeCacheSize;
+ }
+
static constexpr size_t StaticStringSize() {
return kDexCacheStringCacheSize;
}
@@ -156,7 +155,7 @@
REQUIRES_SHARED(Locks::mutator_lock_);
template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
- void FixupResolvedTypes(GcRoot<mirror::Class>* dest, const Visitor& visitor)
+ void FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_);
template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
@@ -211,7 +210,7 @@
return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_method_types_);
}
- mirror::String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE
+ String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE
REQUIRES_SHARED(Locks::mutator_lock_);
void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE
@@ -226,6 +225,8 @@
void SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved)
REQUIRES_SHARED(Locks::mutator_lock_);
+ void ClearResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
+
ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -254,11 +255,11 @@
SetFieldPtr<false>(StringsOffset(), strings);
}
- GcRoot<Class>* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
- return GetFieldPtr<GcRoot<Class>*>(ResolvedTypesOffset());
+ TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
+ return GetFieldPtr<TypeDexCacheType*>(ResolvedTypesOffset());
}
- void SetResolvedTypes(GcRoot<Class>* resolved_types)
+ void SetResolvedTypes(TypeDexCacheType* resolved_types)
ALWAYS_INLINE
REQUIRES_SHARED(Locks::mutator_lock_) {
SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types);
@@ -323,7 +324,7 @@
SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_), dex_file);
}
- void SetLocation(ObjPtr<mirror::String> location) REQUIRES_SHARED(Locks::mutator_lock_);
+ void SetLocation(ObjPtr<String> location) REQUIRES_SHARED(Locks::mutator_lock_);
// NOTE: Get/SetElementPtrSize() are intended for working with ArtMethod** and ArtField**
// provided by GetResolvedMethods/Fields() and ArtMethod::GetDexCacheResolvedMethods(),
@@ -340,7 +341,7 @@
ObjPtr<String> location,
StringDexCacheType* strings,
uint32_t num_strings,
- GcRoot<Class>* resolved_types,
+ TypeDexCacheType* resolved_types,
uint32_t num_resolved_types,
ArtMethod** resolved_methods,
uint32_t num_resolved_methods,
@@ -351,12 +352,16 @@
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
+ uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_);
+ uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
+ uint32_t MethodTypeSlotIndex(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
+
// Visit instance fields of the dex cache as well as its associated arrays.
template <bool kVisitNativeRoots,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
typename Visitor>
- void VisitReferences(ObjPtr<mirror::Class> klass, const Visitor& visitor)
+ void VisitReferences(ObjPtr<Class> klass, const Visitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_);
HeapReference<Object> dex_;
@@ -366,7 +371,7 @@
uint64_t resolved_method_types_; // std::atomic<MethodTypeDexCachePair>* array with
// num_resolved_method_types_ elements.
uint64_t resolved_methods_; // ArtMethod*, array with num_resolved_methods_ elements.
- uint64_t resolved_types_; // GcRoot<Class>*, array with num_resolved_types_ elements.
+ uint64_t resolved_types_; // TypeDexCacheType*, array with num_resolved_types_ elements.
uint64_t strings_; // std::atomic<StringDexCachePair>*, array with num_strings_
// elements.
diff --git a/runtime/mirror/dex_cache_test.cc b/runtime/mirror/dex_cache_test.cc
index 8f978e1..5693f67 100644
--- a/runtime/mirror/dex_cache_test.cc
+++ b/runtime/mirror/dex_cache_test.cc
@@ -51,7 +51,8 @@
EXPECT_TRUE(dex_cache->StaticStringSize() == dex_cache->NumStrings()
|| java_lang_dex_file_->NumStringIds() == dex_cache->NumStrings());
- EXPECT_EQ(java_lang_dex_file_->NumTypeIds(), dex_cache->NumResolvedTypes());
+ EXPECT_TRUE(dex_cache->StaticTypeSize() == dex_cache->NumResolvedTypes()
+ || java_lang_dex_file_->NumTypeIds() == dex_cache->NumResolvedTypes());
EXPECT_EQ(java_lang_dex_file_->NumMethodIds(), dex_cache->NumResolvedMethods());
EXPECT_EQ(java_lang_dex_file_->NumFieldIds(), dex_cache->NumResolvedFields());
EXPECT_TRUE(dex_cache->StaticMethodTypeSize() == dex_cache->NumResolvedMethodTypes()