Merge "Fix accidental IMT and root marking regression"
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 84da475..3ce646f 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1264,12 +1264,18 @@
// marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
for (GcRoot<mirror::Class>& root : class_table_) {
buffered_visitor.VisitRoot(root);
- root.Read()->VisitNativeRoots(buffered_visitor, image_pointer_size_);
+ if ((flags & kVisitRootFlagNonMoving) == 0) {
+ // Don't bother visiting ArtField and ArtMethod if kVisitRootFlagNonMoving is set since
+ // these roots are all reachable from the class or dex cache.
+ root.Read()->VisitNativeRoots(buffered_visitor, image_pointer_size_);
+ }
}
// PreZygote classes can't move so we won't need to update fields' declaring classes.
for (GcRoot<mirror::Class>& root : pre_zygote_class_table_) {
buffered_visitor.VisitRoot(root);
- root.Read()->VisitNativeRoots(buffered_visitor, image_pointer_size_);
+ if ((flags & kVisitRootFlagNonMoving) == 0) {
+ root.Read()->VisitNativeRoots(buffered_visitor, image_pointer_size_);
+ }
}
} else if ((flags & kVisitRootFlagNewRoots) != 0) {
for (auto& root : new_class_roots_) {
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 7c92b18..042c33f 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -2083,7 +2083,7 @@
return GetTwoWordFailureValue(); // Failure.
}
} else {
- DCHECK(interface_method == Runtime::Current()->GetResolutionMethod());
+ DCHECK_EQ(interface_method, Runtime::Current()->GetResolutionMethod());
if (kIsDebugBuild) {
uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
const DexFile::CodeItem* code = caller_method->GetCodeItem();
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 91a0e8a..1c9c412 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -590,7 +590,8 @@
void MarkSweep::MarkConcurrentRoots(VisitRootFlags flags) {
TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
// Visit all runtime roots and clear dirty flags.
- Runtime::Current()->VisitConcurrentRoots(this, flags);
+ Runtime::Current()->VisitConcurrentRoots(
+ this, static_cast<VisitRootFlags>(flags | kVisitRootFlagNonMoving));
}
class ScanObjectVisitor {
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index fbde494..59d0259 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -1630,7 +1630,12 @@
}
uint64_t Heap::GetObjectsAllocatedEver() const {
- return GetObjectsFreedEver() + GetObjectsAllocated();
+ uint64_t total = GetObjectsFreedEver();
+ // If we are detached, we can't use GetObjectsAllocated since we can't change thread states.
+ if (Thread::Current() != nullptr) {
+ total += GetObjectsAllocated();
+ }
+ return total;
}
uint64_t Heap::GetBytesAllocatedEver() const {
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 482640b..f0b7bfd 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -806,7 +806,7 @@
for (size_t i = 0; i < kImtSize; i++) {
auto method = methods[i];
DCHECK(method != nullptr);
- SetEmbeddedImTableEntry(i, Runtime::Current()->GetImtConflictMethod(), pointer_size);
+ SetEmbeddedImTableEntry(i, method, pointer_size);
}
PointerArray* table = GetVTableDuringLinking();
CHECK(table != nullptr) << PrettyClass(this);
diff --git a/runtime/runtime.h b/runtime/runtime.h
index f6f9725..e569333 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -98,6 +98,9 @@
kVisitRootFlagStartLoggingNewRoots = 0x4,
kVisitRootFlagStopLoggingNewRoots = 0x8,
kVisitRootFlagClearRootLog = 0x10,
+ // Non moving means we can have optimizations where we don't visit some roots if they are
+ // definitely reachable from another location. E.g. ArtMethod and ArtField roots.
+ kVisitRootFlagNonMoving = 0x20,
};
class Runtime {