Implemented a new method for walking string references in AppImages.
* Defined a new section in the AppImage binary format.
* Verify heap invariants having to do with native string references.
* Collect managed and native references to managed strings.
* Record the location of these references as offsets into the image.
* Writes offsets into new image section.
* Walk this list of offsets at load time to intern strings from the
AppImage.
* Verify that strings have been interned correctly.
Test: art/test/testrunner/testrunner.py -b --host
Test: m test-art-host-gtest
Test: Built and ran Keep, Docs, Chrome, YouTube, and Facebook.
Bug: 70734839
Change-Id: Ib5235abdebca018e8920d12318c31b9c0efb0ede
diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc
index 95d08b3..9585a15 100644
--- a/compiler/driver/compiler_driver.cc
+++ b/compiler/driver/compiler_driver.cc
@@ -2157,10 +2157,9 @@
// Otherwise it's in app image but superclasses can't be initialized, no need to proceed.
old_status = klass->GetStatus();
- bool too_many_encoded_fields = false;
- if (!is_boot_image && klass->NumStaticFields() > kMaxEncodedFields) {
- too_many_encoded_fields = true;
- }
+ bool too_many_encoded_fields = !is_boot_image &&
+ klass->NumStaticFields() > kMaxEncodedFields;
+
// If the class was not initialized, we can proceed to see if we can initialize static
// fields. Limit the max number of encoded fields.
if (!klass->IsInitialized() &&
@@ -2210,9 +2209,13 @@
if (success) {
runtime->ExitTransactionMode();
DCHECK(!runtime->IsActiveTransaction());
- }
- if (!success) {
+ if (is_boot_image) {
+ // For boot image, we want to put the updated status in the oat class since we
+ // can't reject the image anyways.
+ old_status = klass->GetStatus();
+ }
+ } else {
CHECK(soa.Self()->IsExceptionPending());
mirror::Throwable* exception = soa.Self()->GetException();
VLOG(compiler) << "Initialization of " << descriptor << " aborted because of "
@@ -2226,10 +2229,6 @@
soa.Self()->ClearException();
runtime->RollbackAllTransactions();
CHECK_EQ(old_status, klass->GetStatus()) << "Previous class status not restored";
- } else if (is_boot_image) {
- // For boot image, we want to put the updated status in the oat class since we can't
- // reject the image anyways.
- old_status = klass->GetStatus();
}
}
diff --git a/dex2oat/linker/image_writer.cc b/dex2oat/linker/image_writer.cc
index 685174d..901f478 100644
--- a/dex2oat/linker/image_writer.cc
+++ b/dex2oat/linker/image_writer.cc
@@ -200,13 +200,17 @@
bool ImageWriter::PrepareImageAddressSpace(TimingLogger* timings) {
target_ptr_size_ = InstructionSetPointerSize(compiler_options_.GetInstructionSet());
+
+ Thread* const self = Thread::Current();
+
gc::Heap* const heap = Runtime::Current()->GetHeap();
{
- ScopedObjectAccess soa(Thread::Current());
+ ScopedObjectAccess soa(self);
{
TimingLogger::ScopedTiming t("PruneNonImageClasses", timings);
PruneNonImageClasses(); // Remove junk
}
+
if (compile_app_image_) {
TimingLogger::ScopedTiming t("ClearDexFileCookies", timings);
// Clear dex file cookies for app images to enable app image determinism. This is required
@@ -215,22 +219,108 @@
ClearDexFileCookies();
}
}
+
{
TimingLogger::ScopedTiming t("CollectGarbage", timings);
heap->CollectGarbage(/* clear_soft_references */ false); // Remove garbage.
}
if (kIsDebugBuild) {
- ScopedObjectAccess soa(Thread::Current());
+ ScopedObjectAccess soa(self);
CheckNonImageClassesRemoved();
}
+ // Used to store information that will later be used to calculate image
+ // offsets to string references in the AppImage.
+ std::vector<RefInfoPair> string_ref_info;
+ if (ClassLinker::kAppImageMayContainStrings && compile_app_image_) {
+ // Count the number of string fields so we can allocate the appropriate
+ // amount of space in the image section.
+ TimingLogger::ScopedTiming t("AppImage:CollectStringReferenceInfo", timings);
+ ScopedObjectAccess soa(self);
+
+ if (kIsDebugBuild) {
+ VerifyNativeGCRootInvariants();
+ CHECK_EQ(image_infos_.size(), 1u);
+ }
+
+ string_ref_info = CollectStringReferenceInfo();
+ image_infos_.back().num_string_references_ = string_ref_info.size();
+ }
+
{
TimingLogger::ScopedTiming t("CalculateNewObjectOffsets", timings);
- ScopedObjectAccess soa(Thread::Current());
+ ScopedObjectAccess soa(self);
CalculateNewObjectOffsets();
}
+ // Obtain class count for debugging purposes
+ if (kIsDebugBuild && compile_app_image_) {
+ ScopedObjectAccess soa(self);
+
+ size_t app_image_class_count = 0;
+
+ for (ImageInfo& info : image_infos_) {
+ info.class_table_->Visit([&](ObjPtr<mirror::Class> klass)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (!IsInBootImage(klass.Ptr())) {
+ ++app_image_class_count;
+ }
+
+ // Indicate that we would like to continue visiting classes.
+ return true;
+ });
+ }
+
+ LOG(INFO) << "Dex2Oat:AppImage:classCount = " << app_image_class_count;
+ }
+
+ if (ClassLinker::kAppImageMayContainStrings && compile_app_image_) {
+ // Use the string reference information obtained earlier to calculate image
+ // offsets. These will later be written to the image by Write/CopyMetadata.
+ TimingLogger::ScopedTiming t("AppImage:CalculateImageOffsets", timings);
+ ScopedObjectAccess soa(self);
+
+ size_t managed_string_refs = 0,
+ native_string_refs = 0;
+
+ /*
+ * Iterate over the string reference info and calculate image offsets.
+ * The first element of the pair is the object the reference belongs to
+ * and the second element is the offset to the field. If the offset has
+ * a native ref tag 1) the object is a DexCache and 2) the offset needs
+ * to be calculated using the relocation information for the DexCache's
+ * strings array.
+ */
+ for (const RefInfoPair& ref_info : string_ref_info) {
+ uint32_t image_offset;
+
+ if (HasNativeRefTag(ref_info.second)) {
+ ++native_string_refs;
+
+ // Only DexCaches can contain native references to Java strings.
+ ObjPtr<mirror::DexCache> dex_cache(ref_info.first->AsDexCache());
+
+ // No need to set or clear native ref tags. The existing tag will be
+ // carried forward.
+ image_offset = native_object_relocations_[dex_cache->GetStrings()].offset +
+ ref_info.second;
+ } else {
+ ++managed_string_refs;
+ image_offset = GetImageOffset(ref_info.first) + ref_info.second;
+ }
+
+ string_reference_offsets_.push_back(image_offset);
+ }
+
+ CHECK_EQ(image_infos_.back().num_string_references_,
+ string_reference_offsets_.size());
+
+ LOG(INFO) << "Dex2Oat:AppImage:stringReferences = " << string_reference_offsets_.size();
+ LOG(INFO) << "Dex2Oat:AppImage:managedStringReferences = " << managed_string_refs;
+ LOG(INFO) << "Dex2Oat:AppImage:nativeStringReferences = " << native_string_refs;
+ }
+
// This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
// bin size sums being calculated.
TimingLogger::ScopedTiming t("AllocMemory", timings);
@@ -241,6 +331,252 @@
return true;
}
+class ImageWriter::CollectStringReferenceVisitor {
+ public:
+ explicit CollectStringReferenceVisitor(const ImageWriter& image_writer)
+ : dex_cache_string_ref_counter_(0),
+ image_writer_(image_writer) {}
+
+ // Used to prevent repeated null checks in the code that calls the visitor.
+ ALWAYS_INLINE
+ void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (!root->IsNull()) {
+ VisitRoot(root);
+ }
+ }
+
+ /*
+ * Counts the number of native references to strings reachable through
+ * DexCache objects for verification later.
+ */
+ ALWAYS_INLINE
+ void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ ObjPtr<mirror::Object> referred_obj = root->AsMirrorPtr();
+
+ if (curr_obj_->IsDexCache() &&
+ image_writer_.IsValidAppImageStringReference(referred_obj)) {
+ ++dex_cache_string_ref_counter_;
+ }
+ }
+
+ // Collects info for Java fields that reference Java Strings.
+ ALWAYS_INLINE
+ void operator() (ObjPtr<mirror::Object> obj,
+ MemberOffset member_offset,
+ bool is_static ATTRIBUTE_UNUSED) const
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ ObjPtr<mirror::Object> referred_obj =
+ obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(
+ member_offset);
+
+ if (image_writer_.IsValidAppImageStringReference(referred_obj)) {
+ string_ref_info_.emplace_back(obj.Ptr(), member_offset.Uint32Value());
+ }
+ }
+
+ ALWAYS_INLINE
+ void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
+ ObjPtr<mirror::Reference> ref) const
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
+ }
+
+ // Used by the wrapper function to obtain a native reference count.
+ size_t GetDexCacheStringRefCounter() const {
+ return dex_cache_string_ref_counter_;
+ }
+
+ // Resets the native reference count.
+ void ResetDexCacheStringRefCounter() {
+ dex_cache_string_ref_counter_ = 0;
+ }
+
+ ObjPtr<mirror::Object> curr_obj_;
+ mutable std::vector<RefInfoPair> string_ref_info_;
+
+ private:
+ mutable size_t dex_cache_string_ref_counter_;
+ const ImageWriter& image_writer_;
+};
+
+std::vector<ImageWriter::RefInfoPair> ImageWriter::CollectStringReferenceInfo() const
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ gc::Heap* const heap = Runtime::Current()->GetHeap();
+ CollectStringReferenceVisitor visitor(*this);
+
+ heap->VisitObjects([this, &visitor](ObjPtr<mirror::Object> object)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (!IsInBootImage(object.Ptr())) {
+ // Many native GC roots are wrapped in std::atomics. Due to the
+ // semantics of atomic objects we can't actually visit the addresses of
+ // the native GC roots. Instead the visiting functions will pass the
+ // visitor the address of a temporary copy of the native GC root and, if
+ // it is changed, copy it back into the original location.
+ //
+ // This analysis requires the actual address of the native GC root so
+ // we will only count them in the visitor and then collect them manually
+ // afterwards. This count will then be used to verify that we collected
+ // all native GC roots.
+ visitor.curr_obj_ = object;
+ if (object->IsDexCache()) {
+ object->VisitReferences</* kVisitNativeRoots */ true,
+ kVerifyNone,
+ kWithoutReadBarrier>(visitor, visitor);
+
+ ObjPtr<mirror::DexCache> dex_cache = object->AsDexCache();
+ size_t new_native_ref_counter = 0;
+
+ for (size_t string_index = 0; string_index < dex_cache->NumStrings(); ++string_index) {
+ mirror::StringDexCachePair dc_pair = dex_cache->GetStrings()[string_index].load();
+ mirror::Object* referred_obj = dc_pair.object.AddressWithoutBarrier()->AsMirrorPtr();
+
+ if (IsValidAppImageStringReference(referred_obj)) {
+ ++new_native_ref_counter;
+
+ uint32_t string_vector_offset =
+ (string_index * sizeof(mirror::StringDexCachePair)) +
+ offsetof(mirror::StringDexCachePair, object);
+
+ visitor.string_ref_info_.emplace_back(object.Ptr(),
+ SetNativeRefTag(string_vector_offset));
+ }
+ }
+
+ CHECK_EQ(visitor.GetDexCacheStringRefCounter(), new_native_ref_counter);
+ } else {
+ object->VisitReferences</* kVisitNativeRoots */ false,
+ kVerifyNone,
+ kWithoutReadBarrier>(visitor, visitor);
+ }
+
+ visitor.ResetDexCacheStringRefCounter();
+ }
+ });
+
+ return std::move(visitor.string_ref_info_);
+}
+
+class ImageWriter::NativeGCRootInvariantVisitor {
+ public:
+ explicit NativeGCRootInvariantVisitor(const ImageWriter& image_writer) :
+ image_writer_(image_writer) {}
+
+ ALWAYS_INLINE
+ void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (!root->IsNull()) {
+ VisitRoot(root);
+ }
+ }
+
+ ALWAYS_INLINE
+ void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ ObjPtr<mirror::Object> referred_obj = root->AsMirrorPtr();
+
+ if (curr_obj_->IsClass()) {
+ class_violation = class_violation ||
+ image_writer_.IsValidAppImageStringReference(referred_obj);
+
+ } else if (curr_obj_->IsClassLoader()) {
+ class_loader_violation = class_loader_violation ||
+ image_writer_.IsValidAppImageStringReference(referred_obj);
+
+ } else if (!curr_obj_->IsDexCache()) {
+ LOG(FATAL) << "Dex2Oat:AppImage | " <<
+ "Native reference to String found in unexpected object type.";
+ }
+ }
+
+ ALWAYS_INLINE
+ void operator() (ObjPtr<mirror::Object> obj ATTRIBUTE_UNUSED,
+ MemberOffset member_offset ATTRIBUTE_UNUSED,
+ bool is_static ATTRIBUTE_UNUSED) const
+ REQUIRES_SHARED(Locks::mutator_lock_) {}
+
+ ALWAYS_INLINE
+ void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
+ ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
+ REQUIRES_SHARED(Locks::mutator_lock_) {}
+
+ // Returns true iff the only reachable native string references are through DexCache objects.
+ bool InvariantsHold() const {
+ return !(class_violation || class_loader_violation);
+ }
+
+ ObjPtr<mirror::Object> curr_obj_;
+ mutable bool class_violation = false,
+ class_loader_violation = false;
+
+ private:
+ const ImageWriter& image_writer_;
+};
+
+void ImageWriter::VerifyNativeGCRootInvariants() const REQUIRES_SHARED(Locks::mutator_lock_) {
+ gc::Heap* const heap = Runtime::Current()->GetHeap();
+
+ NativeGCRootInvariantVisitor visitor(*this);
+
+ heap->VisitObjects([this, &visitor](ObjPtr<mirror::Object> object)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ visitor.curr_obj_ = object;
+
+ if (!IsInBootImage(object.Ptr())) {
+ object->VisitReferences</* kVisitNativeRefernces */ true,
+ kVerifyNone,
+ kWithoutReadBarrier>(visitor, visitor);
+ }
+ });
+
+ bool error = false;
+ std::ostringstream error_str;
+
+ /*
+ * Build the error string
+ */
+
+ if (UNLIKELY(visitor.class_violation)) {
+ error_str << "Class";
+ error = true;
+ }
+
+ if (UNLIKELY(visitor.class_loader_violation)) {
+ if (error) {
+ error_str << ", ";
+ }
+
+ error_str << "ClassLoader";
+ }
+
+ CHECK(visitor.InvariantsHold()) <<
+ "Native GC root invariant failure. String refs reachable through the following objects: " <<
+ error_str.str();
+}
+
+void ImageWriter::CopyMetadata() {
+ CHECK(compile_app_image_);
+ CHECK_EQ(image_infos_.size(), 1u);
+
+ const ImageInfo& image_info = image_infos_.back();
+ std::vector<ImageSection> image_sections = image_info.CreateImageSections().second;
+
+ uint32_t* sfo_section_base = reinterpret_cast<uint32_t*>(
+ image_info.image_.Begin() +
+ image_sections[ImageHeader::kSectionStringReferenceOffsets].Offset());
+
+ std::copy(string_reference_offsets_.begin(),
+ string_reference_offsets_.end(),
+ sfo_section_base);
+}
+
+bool ImageWriter::IsValidAppImageStringReference(ObjPtr<mirror::Object> referred_obj) const {
+ return referred_obj != nullptr &&
+ !IsInBootImage(referred_obj.Ptr()) &&
+ referred_obj->IsString();
+}
+
bool ImageWriter::Write(int image_fd,
const std::vector<const char*>& image_filenames,
const std::vector<const char*>& oat_filenames) {
@@ -268,6 +604,10 @@
CopyAndFixupObjects();
}
+ if (compile_app_image_) {
+ CopyMetadata();
+ }
+
for (size_t i = 0; i < image_filenames.size(); ++i) {
const char* image_filename = image_filenames[i];
ImageInfo& image_info = GetImageInfo(i);
@@ -366,8 +706,23 @@
return false;
}
+ if (kIsDebugBuild) {
+ size_t separately_written_section_size = bitmap_section.Size() +
+ image_header->GetImageRelocationsSection().Size() +
+ sizeof(ImageHeader);
+
+ size_t total_uncompressed_size = raw_image_data.size() + separately_written_section_size,
+ total_compressed_size = image_data.size() + separately_written_section_size;
+
+ LOG(INFO) << "Dex2Oat:uncompressedImageSize = " << total_uncompressed_size;
+ if (total_uncompressed_size != total_compressed_size) {
+ LOG(INFO) << "Dex2Oat:compressedImageSize = " << total_compressed_size;
+ }
+ }
+
CHECK_EQ(relocations_position_in_file + relocations.size(),
static_cast<size_t>(image_file->GetLength()));
+
if (image_file->FlushCloseOrErase() != 0) {
PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
return false;
@@ -721,9 +1076,7 @@
bool ImageWriter::AllocMemory() {
for (ImageInfo& image_info : image_infos_) {
- ImageSection unused_sections[ImageHeader::kSectionCount];
- const size_t length = RoundUp(
- image_info.CreateImageSections(unused_sections), kPageSize);
+ const size_t length = RoundUp(image_info.CreateImageSections().first, kPageSize);
std::string error_msg;
image_info.image_ = MemMap::MapAnonymous("image writer image",
@@ -1779,9 +2132,9 @@
}
ProcessWorkStack(&work_stack);
- // For app images, there may be objects that are only held live by the by the boot image. One
+ // For app images, there may be objects that are only held live by the boot image. One
// example is finalizer references. Forward these objects so that EnsureBinSlotAssignedCallback
- // does not fail any checks. TODO: We should probably avoid copying these objects.
+ // does not fail any checks.
if (compile_app_image_) {
for (gc::space::ImageSpace* space : heap->GetBootImageSpaces()) {
DCHECK(space->IsImageSpace());
@@ -1873,9 +2226,7 @@
for (ImageInfo& image_info : image_infos_) {
image_info.image_begin_ = global_image_begin_ + image_offset;
image_info.image_offset_ = image_offset;
- ImageSection unused_sections[ImageHeader::kSectionCount];
- image_info.image_size_ =
- RoundUp(image_info.CreateImageSections(unused_sections), kPageSize);
+ image_info.image_size_ = RoundUp(image_info.CreateImageSections().first, kPageSize);
// There should be no gaps until the next image.
image_offset += image_info.image_size_;
}
@@ -1909,58 +2260,94 @@
boot_image_live_objects_ = boot_image_live_objects.Get();
}
-size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) const {
- DCHECK(out_sections != nullptr);
+std::pair<size_t, std::vector<ImageSection>> ImageWriter::ImageInfo::CreateImageSections() const {
+ std::vector<ImageSection> sections(ImageHeader::kSectionCount);
- // Do not round up any sections here that are represented by the bins since it will break
- // offsets.
+ // Do not round up any sections here that are represented by the bins since it
+ // will break offsets.
- // Objects section
- ImageSection* objects_section = &out_sections[ImageHeader::kSectionObjects];
- *objects_section = ImageSection(0u, image_end_);
+ /*
+ * Objects section
+ */
+ sections[ImageHeader::kSectionObjects] =
+ ImageSection(0u, image_end_);
- // Add field section.
- ImageSection* field_section = &out_sections[ImageHeader::kSectionArtFields];
- *field_section = ImageSection(GetBinSlotOffset(Bin::kArtField), GetBinSlotSize(Bin::kArtField));
+ /*
+ * Field section
+ */
+ sections[ImageHeader::kSectionArtFields] =
+ ImageSection(GetBinSlotOffset(Bin::kArtField), GetBinSlotSize(Bin::kArtField));
- // Add method section.
- ImageSection* methods_section = &out_sections[ImageHeader::kSectionArtMethods];
- *methods_section = ImageSection(
- GetBinSlotOffset(Bin::kArtMethodClean),
- GetBinSlotSize(Bin::kArtMethodClean) + GetBinSlotSize(Bin::kArtMethodDirty));
+ /*
+ * Method section
+ */
+ sections[ImageHeader::kSectionArtMethods] =
+ ImageSection(GetBinSlotOffset(Bin::kArtMethodClean),
+ GetBinSlotSize(Bin::kArtMethodClean) +
+ GetBinSlotSize(Bin::kArtMethodDirty));
- // IMT section.
- ImageSection* imt_section = &out_sections[ImageHeader::kSectionImTables];
- *imt_section = ImageSection(GetBinSlotOffset(Bin::kImTable), GetBinSlotSize(Bin::kImTable));
+ /*
+ * IMT section
+ */
+ sections[ImageHeader::kSectionImTables] =
+ ImageSection(GetBinSlotOffset(Bin::kImTable), GetBinSlotSize(Bin::kImTable));
- // Conflict tables section.
- ImageSection* imt_conflict_tables_section = &out_sections[ImageHeader::kSectionIMTConflictTables];
- *imt_conflict_tables_section = ImageSection(GetBinSlotOffset(Bin::kIMTConflictTable),
- GetBinSlotSize(Bin::kIMTConflictTable));
+ /*
+ * Conflict Tables section
+ */
+ sections[ImageHeader::kSectionIMTConflictTables] =
+ ImageSection(GetBinSlotOffset(Bin::kIMTConflictTable), GetBinSlotSize(Bin::kIMTConflictTable));
- // Runtime methods section.
- ImageSection* runtime_methods_section = &out_sections[ImageHeader::kSectionRuntimeMethods];
- *runtime_methods_section = ImageSection(GetBinSlotOffset(Bin::kRuntimeMethod),
- GetBinSlotSize(Bin::kRuntimeMethod));
+ /*
+ * Runtime Methods section
+ */
+ sections[ImageHeader::kSectionRuntimeMethods] =
+ ImageSection(GetBinSlotOffset(Bin::kRuntimeMethod), GetBinSlotSize(Bin::kRuntimeMethod));
- // Add dex cache arrays section.
- ImageSection* dex_cache_arrays_section = &out_sections[ImageHeader::kSectionDexCacheArrays];
- *dex_cache_arrays_section = ImageSection(GetBinSlotOffset(Bin::kDexCacheArray),
- GetBinSlotSize(Bin::kDexCacheArray));
+ /*
+ * DexCache Arrays section.
+ */
+ const ImageSection& dex_cache_arrays_section =
+ sections[ImageHeader::kSectionDexCacheArrays] =
+ ImageSection(GetBinSlotOffset(Bin::kDexCacheArray),
+ GetBinSlotSize(Bin::kDexCacheArray));
+
+ /*
+ * Interned Strings section
+ */
+
// Round up to the alignment the string table expects. See HashSet::WriteToMemory.
- size_t cur_pos = RoundUp(dex_cache_arrays_section->End(), sizeof(uint64_t));
- // Calculate the size of the interned strings.
- ImageSection* interned_strings_section = &out_sections[ImageHeader::kSectionInternedStrings];
- *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
- cur_pos = interned_strings_section->End();
- // Round up to the alignment the class table expects. See HashSet::WriteToMemory.
- cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
- // Calculate the size of the class table section.
- ImageSection* class_table_section = &out_sections[ImageHeader::kSectionClassTable];
- *class_table_section = ImageSection(cur_pos, class_table_bytes_);
- cur_pos = class_table_section->End();
- // Image end goes right before the start of the image bitmap.
- return cur_pos;
+ size_t cur_pos = RoundUp(dex_cache_arrays_section.End(), sizeof(uint64_t));
+
+ const ImageSection& interned_strings_section =
+ sections[ImageHeader::kSectionInternedStrings] =
+ ImageSection(cur_pos, intern_table_bytes_);
+
+ /*
+ * Class Table section
+ */
+
+ // Obtain the new position and round it up to the appropriate alignment.
+ cur_pos = RoundUp(interned_strings_section.End(), sizeof(uint64_t));
+
+ const ImageSection& class_table_section =
+ sections[ImageHeader::kSectionClassTable] =
+ ImageSection(cur_pos, class_table_bytes_);
+
+ /*
+ * String Field Offsets section
+ */
+
+ // Round up to the alignment of the offsets we are going to store.
+ cur_pos = RoundUp(class_table_section.End(), sizeof(uint32_t));
+
+ const ImageSection& string_reference_offsets =
+ sections[ImageHeader::kSectionStringReferenceOffsets] =
+ ImageSection(cur_pos, sizeof(uint32_t) * num_string_references_);
+
+ // Return the number of bytes described by these sections, and the sections
+ // themselves.
+ return make_pair(string_reference_offsets.End(), std::move(sections));
}
void ImageWriter::CreateHeader(size_t oat_index) {
@@ -1970,8 +2357,9 @@
const uint8_t* oat_data_end = image_info.oat_data_begin_ + image_info.oat_size_;
// Create the image sections.
- ImageSection sections[ImageHeader::kSectionCount];
- const size_t image_end = image_info.CreateImageSections(sections);
+ auto section_info_pair = image_info.CreateImageSections();
+ const size_t image_end = section_info_pair.first;
+ std::vector<ImageSection>& sections = section_info_pair.second;
// Finally bitmap section.
const size_t bitmap_bytes = image_info.image_bitmap_->Size();
@@ -2008,7 +2396,7 @@
ImageHeader* header = new (image_info.image_.Begin()) ImageHeader(
PointerToLowMemUInt32(image_info.image_begin_),
image_end,
- sections,
+ sections.data(),
image_info.image_roots_address_,
image_info.oat_checksum_,
PointerToLowMemUInt32(oat_file_begin),
diff --git a/dex2oat/linker/image_writer.h b/dex2oat/linker/image_writer.h
index f455746..142f77b 100644
--- a/dex2oat/linker/image_writer.h
+++ b/dex2oat/linker/image_writer.h
@@ -84,6 +84,22 @@
const std::unordered_map<const DexFile*, size_t>& dex_file_oat_index_map,
const HashSet<std::string>* dirty_image_objects);
+ /*
+ * Modifies the heap and collects information about objects and code so that
+ * they can be written to the boot or app image later.
+ *
+ * First, unneeded classes are removed from the managed heap. Next, we
+ * remove cached values and calculate necessary metadata for later in the
+ * process. Optionally some debugging information is collected and used to
+ * verify the state of the heap at this point. Next, metadata from earlier
+ * is used to calculate offsets of references to strings to speed up string
+ * interning when the image is loaded. Lastly, we allocate enough memory to
+ * fit all image data minus the bitmap and relocation sections.
+ *
+ * This function should only be called when all objects to be included in the
+ * image have been initialized and all native methods have been generated. In
+ * addition, no other thread should be modifying the heap.
+ */
bool PrepareImageAddressSpace(TimingLogger* timings);
bool IsImageAddressSpaceReady() const {
@@ -270,9 +286,18 @@
ImageInfo();
ImageInfo(ImageInfo&&) = default;
- // Create the image sections into the out sections variable, returns the size of the image
- // excluding the bitmap.
- size_t CreateImageSections(ImageSection* out_sections) const;
+ /*
+ * Creates ImageSection objects that describe most of the sections of a
+ * boot or AppImage. The following sections are not included:
+ * - ImageHeader::kSectionImageBitmap
+ * - ImageHeader::kSectionImageRelocations
+ *
+ * In addition, the ImageHeader is not covered here.
+ *
+ * This function will return the total size of the covered sections as well
+ * as a vector containing the individual ImageSection objects.
+ */
+ std::pair<size_t, std::vector<ImageSection>> CreateImageSections() const;
size_t GetStubOffset(StubType stub_type) const {
DCHECK_LT(static_cast<size_t>(stub_type), kNumberOfStubTypes);
@@ -364,6 +389,10 @@
// Number of pointer fixup bytes.
size_t pointer_fixup_bytes_ = 0;
+ // Number of offsets to string references that will be written to the
+ // StringFieldOffsets section.
+ size_t num_string_references_ = 0;
+
// Intern table associated with this image for serialization.
std::unique_ptr<InternTable> intern_table_;
@@ -474,6 +503,21 @@
ImtConflictTable* copy,
size_t oat_index)
REQUIRES_SHARED(Locks::mutator_lock_);
+
+ /*
+ * Copies metadata from the heap into a buffer that will be compressed and
+ * written to the image.
+ *
+ * This function copies the string offset metadata from a local vector to an
+ * offset inside the image_ field of an ImageInfo struct. The offset into the
+ * memory pointed to by the image_ field is obtained from the ImageSection
+ * object for the String Offsets section.
+ *
+ * All data for the image, besides the object bitmap and the relocation data,
+ * will also be copied into the memory region pointed to by image_.
+ */
+ void CopyMetadata();
+
template <bool kCheckNotNull = true>
void RecordImageRelocation(const void* dest, size_t oat_index, bool app_to_boot_image = false);
void FixupClass(mirror::Class* orig, mirror::Class* copy, size_t oat_index)
@@ -554,6 +598,60 @@
std::unordered_set<mirror::Object*>* visited)
REQUIRES_SHARED(Locks::mutator_lock_);
+ /*
+ * A pair containing the information necessary to calculate the position of a
+ * managed object's field or native reference inside an AppImage.
+ *
+ * The first element of this pair is a raw mirror::Object pointer because its
+ * usage will cross a suspend point and ObjPtr would produce a false positive.
+ *
+ * The second element is an offset either into the object or into the string
+ * array of a DexCache object.
+ *
+ * TODO (chriswailes): Add a note indicating the source line where we ensure
+ * that no moving garbage collection will occur.
+ */
+ typedef std::pair<mirror::Object*, uint32_t> RefInfoPair;
+
+ /*
+ * Collects the info necessary for calculating image offsets to string field
+ * later.
+ *
+ * This function is used when constructing AppImages. Because AppImages
+ * contain strings that must be interned we need to visit references to these
+ * strings when the AppImage is loaded and either insert them into the
+ * runtime intern table or replace the existing reference with a reference
+ * to the interned strings.
+ *
+ * To speed up the interning of strings when the AppImage is loaded we include
+ * a list of offsets to string references in the AppImage. These are then
+ * iterated over at load time and fixed up.
+ *
+ * To record the offsets we first have to count the number of string
+ * references that will be included in the AppImage. This allows use to both
+ * allocate enough memory for soring the offsets and correctly calculate the
+ * offsets of various objects into the image. Once the image offset
+ * calculations are done for Java objects the reference object/offset pairs
+ * are translated to image offsets. The CopyMetadata function then copies
+ * these offsets into the image.
+ *
+ * A vector containing pairs of object pointers and offsets. The offsets are
+ * tagged to indicate if the offset is for a field of a mirror object or a
+ * native reference. If the offset is tagged as a native reference it must
+ * have come from a DexCache's string array.
+ */
+ std::vector<RefInfoPair> CollectStringReferenceInfo() const
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
+ /*
+ * Ensures that assumptions about native GC roots and AppImages hold.
+ *
+ * This function verifies the following condition(s):
+ * - Native references to Java strings are only reachable through DexCache
+ * objects
+ */
+ void VerifyNativeGCRootInvariants() const REQUIRES_SHARED(Locks::mutator_lock_);
+
bool IsMultiImage() const {
return image_infos_.size() > 1;
}
@@ -628,6 +726,18 @@
void CopyAndFixupPointer(void* object, MemberOffset offset, void* value, size_t oat_index)
REQUIRES_SHARED(Locks::mutator_lock_);
+ /*
+ * Tests an object to see if it will be contained in an AppImage.
+ *
+ * An object reference is considered to be a AppImage String reference iff:
+ * - It isn't null
+ * - The referred-object isn't in the boot image
+ * - The referred-object is a Java String
+ */
+ ALWAYS_INLINE
+ bool IsValidAppImageStringReference(ObjPtr<mirror::Object> referred_obj) const
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
const CompilerOptions& compiler_options_;
// Beginning target image address for the first image.
@@ -680,6 +790,9 @@
// Boot image live objects, null for app image.
mirror::ObjectArray<mirror::Object>* boot_image_live_objects_;
+ // Offsets into the image that indicate where string references are recorded.
+ std::vector<uint32_t> string_reference_offsets_;
+
// Which mode the image is stored as, see image.h
const ImageHeader::StorageMode image_storage_mode_;
@@ -700,9 +813,23 @@
class NativeLocationVisitor;
class PruneClassesVisitor;
class PruneClassLoaderClassesVisitor;
+ class PruneObjectReferenceVisitor;
class RegisterBootClassPathClassesVisitor;
class VisitReferencesVisitor;
- class PruneObjectReferenceVisitor;
+
+ /*
+ * A visitor class for extracting object/offset pairs.
+ *
+ * This visitor walks the fields of an object and extracts object/offset pairs
+ * that are later translated to image offsets. This visitor is only
+ * responsible for extracting info for Java references. Native references to
+ * Java strings are handled in the wrapper function
+ * CollectStringReferenceInfo().
+ */
+ class CollectStringReferenceVisitor;
+
+ // A visitor used by the VerifyNativeGCRootInvariants() function.
+ class NativeGCRootInvariantVisitor;
DISALLOW_COPY_AND_ASSIGN(ImageWriter);
};
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index d95f71a..44d6aaa 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -104,6 +104,8 @@
#include "mirror/object-inl.h"
#include "mirror/object-refvisitor-inl.h"
#include "mirror/object_array-inl.h"
+#include "mirror/object_reference.h"
+#include "mirror/object_reference-inl.h"
#include "mirror/proxy.h"
#include "mirror/reference-inl.h"
#include "mirror/stack_trace_element.h"
@@ -1171,18 +1173,31 @@
gc::accounting::HeapBitmap* const live_bitmap_;
};
-class FixupInternVisitor {
+/*
+ * A class used to ensure that all strings in an AppImage have been properly
+ * interned.
+ */
+class VerifyStringInterningVisitor {
public:
- ALWAYS_INLINE ObjPtr<mirror::Object> TryInsertIntern(mirror::Object* obj) const
+ explicit VerifyStringInterningVisitor(const gc::space::ImageSpace& space) :
+ uninterned_string_found_(false),
+ space_(space),
+ intern_table_(*Runtime::Current()->GetInternTable()) {}
+
+ ALWAYS_INLINE
+ void TestObject(ObjPtr<mirror::Object> referred_obj) const
REQUIRES_SHARED(Locks::mutator_lock_) {
- if (obj != nullptr && obj->IsString()) {
- const auto intern = Runtime::Current()->GetInternTable()->InternStrong(obj->AsString());
- return intern;
+ if (referred_obj != nullptr &&
+ space_.HasAddress(referred_obj.Ptr()) &&
+ referred_obj->IsString()) {
+ ObjPtr<mirror::String> referred_str = referred_obj->AsString();
+ uninterned_string_found_ = uninterned_string_found_ ||
+ (intern_table_.LookupStrong(Thread::Current(), referred_str) != referred_str);
}
- return obj;
}
- ALWAYS_INLINE void VisitRootIfNonNull(
+ ALWAYS_INLINE
+ void VisitRootIfNonNull(
mirror::CompressedReference<mirror::Object>* root) const
REQUIRES_SHARED(Locks::mutator_lock_) {
if (!root->IsNull()) {
@@ -1190,44 +1205,77 @@
}
}
- ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
+ ALWAYS_INLINE
+ void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
REQUIRES_SHARED(Locks::mutator_lock_) {
- root->Assign(TryInsertIntern(root->AsMirrorPtr()));
+ TestObject(root->AsMirrorPtr());
}
// Visit Class Fields
- ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> obj,
- MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
+ ALWAYS_INLINE
+ void operator()(ObjPtr<mirror::Object> obj,
+ MemberOffset offset,
+ bool is_static ATTRIBUTE_UNUSED) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// There could be overlap between ranges, we must avoid visiting the same reference twice.
// Avoid the class field since we already fixed it up in FixupClassVisitor.
if (offset.Uint32Value() != mirror::Object::ClassOffset().Uint32Value()) {
// Updating images, don't do a read barrier.
- // Only string fields are fixed, don't do a verify.
- mirror::Object* ref = obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(
- offset);
- obj->SetFieldObject<false, false>(offset, TryInsertIntern(ref));
+ ObjPtr<mirror::Object> referred_obj =
+ obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
+
+ TestObject(referred_obj);
}
}
void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
- this->operator()(ref, mirror::Reference::ReferentOffset(), false);
+ operator()(ref, mirror::Reference::ReferentOffset(), false);
}
- void operator()(mirror::Object* obj) const
- REQUIRES_SHARED(Locks::mutator_lock_) {
- if (obj->IsDexCache()) {
- obj->VisitReferences<true, kVerifyNone, kWithoutReadBarrier>(*this, *this);
- } else {
- // Don't visit native roots for non-dex-cache
- obj->VisitReferences<false, kVerifyNone, kWithoutReadBarrier>(*this, *this);
- }
- }
+ mutable bool uninterned_string_found_;
+ const gc::space::ImageSpace& space_;
+ InternTable& intern_table_;
};
+/*
+ * This function verifies that string references in the AppImage have been
+ * properly interned. To be considered properly interned a reference must
+ * point to the same version of the string that the intern table does.
+ */
+bool VerifyStringInterning(gc::space::ImageSpace& space) REQUIRES_SHARED(Locks::mutator_lock_) {
+ const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
+ const ImageHeader& image_header = space.GetImageHeader();
+ const uint8_t* target_base = space.GetMemMap()->Begin();
+ const ImageSection& objects_section = image_header.GetObjectsSection();
+ uintptr_t objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
+ uintptr_t objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
+
+ VerifyStringInterningVisitor visitor(space);
+ bitmap->VisitMarkedRange(objects_begin,
+ objects_end,
+ [&space, &visitor](mirror::Object* obj)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (space.HasAddress(obj)) {
+ if (obj->IsDexCache()) {
+ obj->VisitReferences</* kVisitNativeRoots */ true,
+ kVerifyNone,
+ kWithoutReadBarrier>(visitor, visitor);
+ } else {
+ // Don't visit native roots for non-dex-cache as they can't contain
+ // native references to strings. This is verified during compilation
+ // by ImageWriter::VerifyNativeGCRootInvariants.
+ obj->VisitReferences</* kVisitNativeRoots */ false,
+ kVerifyNone,
+ kWithoutReadBarrier>(visitor, visitor);
+ }
+ }
+ });
+
+ return !visitor.uninterned_string_found_;
+}
+
// new_class_set is the set of classes that were read from the class table section in the image.
// If there was no class table section, it is null.
// Note: using a class here to avoid having to make ClassLinker internals public.
@@ -1268,6 +1316,7 @@
CHECK(!class_linker->FindDexCacheDataLocked(*dex_file).IsValid());
class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
}
+
if (kIsDebugBuild) {
CHECK(new_class_set != nullptr);
mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
@@ -1275,17 +1324,20 @@
for (size_t j = 0; j != num_types; ++j) {
// The image space is not yet added to the heap, avoid read barriers.
ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
+
if (space->HasAddress(klass.Ptr())) {
DCHECK(!klass->IsErroneous()) << klass->GetStatus();
auto it = new_class_set->find(ClassTable::TableSlot(klass));
DCHECK(it != new_class_set->end());
DCHECK_EQ(it->Read(), klass);
ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
+
if (super_class != nullptr && !heap->ObjectIsInBootImageSpace(super_class)) {
auto it2 = new_class_set->find(ClassTable::TableSlot(super_class));
DCHECK(it2 != new_class_set->end());
DCHECK_EQ(it2->Read(), super_class);
}
+
for (ArtMethod& m : klass->GetDirectMethods(kRuntimePointerSize)) {
const void* code = m.GetEntryPointFromQuickCompiledCode();
const void* oat_code = m.IsInvokable() ? class_linker->GetQuickOatCodeFor(&m) : code;
@@ -1296,6 +1348,7 @@
DCHECK_EQ(code, oat_code) << m.PrettyMethod();
}
}
+
for (ArtMethod& m : klass->GetVirtualMethods(kRuntimePointerSize)) {
const void* code = m.GetEntryPointFromQuickCompiledCode();
const void* oat_code = m.IsInvokable() ? class_linker->GetQuickOatCodeFor(&m) : code;
@@ -1311,20 +1364,54 @@
}
}
}
+
if (ClassLinker::kAppImageMayContainStrings) {
- // Fixup all the literal strings happens at app images which are supposed to be interned.
+ // Iterate over the string reference offsets stored in the image and intern
+ // the strings they point to.
+
ScopedTrace timing("AppImage:InternString");
const auto& image_header = space->GetImageHeader();
- const auto bitmap = space->GetMarkBitmap(); // bitmap of objects
const uint8_t* target_base = space->GetMemMap()->Begin();
- const ImageSection& objects_section = image_header.GetObjectsSection();
+ const ImageSection& sro_section = image_header.GetImageStringReferenceOffsetsSection();
- uintptr_t objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
- uintptr_t objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
+ size_t num_string_offsets = sro_section.Size() / sizeof(uint32_t);
- FixupInternVisitor fixup_intern_visitor;
- bitmap->VisitMarkedRange(objects_begin, objects_end, fixup_intern_visitor);
+ if (kIsDebugBuild) {
+ LOG(INFO)
+ << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
+ << num_string_offsets;
+ }
+
+ DCHECK(Runtime::Current()->GetInternTable() != nullptr);
+
+ InternTable& intern_table = *Runtime::Current()->GetInternTable();
+ const uint32_t* sro_base =
+ reinterpret_cast<const uint32_t*>(target_base + sro_section.Offset());
+
+ for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
+ if (HasNativeRefTag(sro_base[offset_index])) {
+ void* raw_field_addr = space->Begin() + ClearNativeRefTag(sro_base[offset_index]);
+ mirror::CompressedReference<mirror::Object>* objref_addr =
+ reinterpret_cast<mirror::CompressedReference<mirror::Object>*>(raw_field_addr);
+ mirror::String* referred_string = objref_addr->AsMirrorPtr()->AsString();
+ DCHECK(referred_string != nullptr);
+
+ objref_addr->Assign(intern_table.InternStrong(referred_string));
+
+ } else {
+ void* raw_field_addr = space->Begin() + sro_base[offset_index];
+ mirror::HeapReference<mirror::Object>* objref_addr =
+ reinterpret_cast<mirror::HeapReference<mirror::Object>*>(raw_field_addr);
+ mirror::String* referred_string = objref_addr->AsMirrorPtr()->AsString();
+ DCHECK(referred_string != nullptr);
+
+ objref_addr->Assign<false>(intern_table.InternStrong(referred_string));
+ }
+ }
+
+ DCHECK(VerifyStringInterning(*space));
}
+
if (kVerifyArtMethodDeclaringClasses) {
ScopedTrace timing("AppImage:VerifyDeclaringClasses");
ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
diff --git a/runtime/image.h b/runtime/image.h
index af092ad..b0e8a3d 100644
--- a/runtime/image.h
+++ b/runtime/image.h
@@ -219,6 +219,16 @@
kBootImageLiveObjects = kSpecialRoots, // Array of boot image objects that must be kept live.
};
+ /*
+ * This describes the number and ordering of sections inside of Boot
+ * and App Images. It is very important that changes to this struct
+ * are reflected in the compiler and loader.
+ *
+ * See:
+ * - ImageWriter::ImageInfo::CreateImageSections()
+ * - ImageWriter::Write()
+ * - ImageWriter::AllocMemory()
+ */
enum ImageSections {
kSectionObjects,
kSectionArtFields,
@@ -229,6 +239,7 @@
kSectionDexCacheArrays,
kSectionInternedStrings,
kSectionClassTable,
+ kSectionStringReferenceOffsets,
kSectionImageBitmap,
kSectionImageRelocations,
kSectionCount, // Number of elements in enum.
@@ -291,6 +302,10 @@
return GetImageSection(kSectionImageRelocations);
}
+ const ImageSection& GetImageStringReferenceOffsetsSection() const {
+ return GetImageSection(kSectionStringReferenceOffsets);
+ }
+
template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ObjPtr<mirror::Object> GetImageRoot(ImageRoot image_root) const
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -451,6 +466,39 @@
friend class linker::ImageWriter;
};
+/*
+ * Tags the last bit. Used by AppImage logic to differentiate between managed
+ * and native references.
+ */
+template<typename T>
+T SetNativeRefTag(T val) {
+ static_assert(std::is_integral<T>::value, "Expected integral type.");
+
+ return val | 1u;
+}
+
+/*
+ * Retrieves the value of the last bit. Used by AppImage logic to
+ * differentiate between managed and native references.
+ */
+template<typename T>
+bool HasNativeRefTag(T val) {
+ static_assert(std::is_integral<T>::value, "Expected integral type.");
+
+ return (val & 1u) == 1u;
+}
+
+/*
+ * Sets the last bit of the value to 0. Used by AppImage logic to
+ * differentiate between managed and native references.
+ */
+template<typename T>
+T ClearNativeRefTag(T val) {
+ static_assert(std::is_integral<T>::value, "Expected integral type.");
+
+ return val & ~1u;
+}
+
std::ostream& operator<<(std::ostream& os, const ImageHeader::ImageMethod& policy);
std::ostream& operator<<(std::ostream& os, const ImageHeader::ImageRoot& policy);
std::ostream& operator<<(std::ostream& os, const ImageHeader::ImageSections& section);
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h
index 99a0d92..fbe002a 100644
--- a/runtime/mirror/object-inl.h
+++ b/runtime/mirror/object-inl.h
@@ -588,6 +588,10 @@
return atomic_addr->CompareAndSetStrongSequentiallyConsistent(old_value, new_value);
}
+/*
+ * Returns a pointer to an object representing what the field points to, not an
+ * object representing the field.
+ */
template<class T,
VerifyObjectFlags kVerifyFlags,
ReadBarrierOption kReadBarrierOption,
diff --git a/runtime/mirror/object_reference-inl.h b/runtime/mirror/object_reference-inl.h
index 295b460..780d662 100644
--- a/runtime/mirror/object_reference-inl.h
+++ b/runtime/mirror/object_reference-inl.h
@@ -24,17 +24,26 @@
namespace art {
namespace mirror {
-template<bool kPoisonReferences, class MirrorType>
+template <bool kPoisonReferences, class MirrorType>
+ALWAYS_INLINE
void ObjectReference<kPoisonReferences, MirrorType>::Assign(ObjPtr<MirrorType> ptr) {
Assign(ptr.Ptr());
}
-template<class MirrorType>
+template <class MirrorType>
+ALWAYS_INLINE
bool HeapReference<MirrorType>::CasWeakRelaxed(MirrorType* expected_ptr, MirrorType* new_ptr) {
return reference_.CompareAndSetWeakRelaxed(Compression::Compress(expected_ptr),
Compression::Compress(new_ptr));
}
+template <typename MirrorType>
+template <bool kIsVolatile>
+ALWAYS_INLINE
+void HeapReference<MirrorType>::Assign(ObjPtr<MirrorType> ptr) {
+ Assign<kIsVolatile>(ptr.Ptr());
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/object_reference.h b/runtime/mirror/object_reference.h
index 77154e2..d6a39aa 100644
--- a/runtime/mirror/object_reference.h
+++ b/runtime/mirror/object_reference.h
@@ -60,6 +60,15 @@
using Compression = PtrCompression<kPoisonReferences, MirrorType>;
public:
+ /*
+ * Returns a pointer to the mirror of the managed object this reference is for.
+ *
+ * This does NOT return the current object (which isn't derived from, and
+ * therefor cannot be a mirror::Object) as a mirror pointer. Instead, this
+ * returns a pointer to the mirror of the managed object this refers to.
+ *
+ * TODO (chriswailes): Rename to GetPtr().
+ */
MirrorType* AsMirrorPtr() const {
return Compression::Decompress(reference_);
}