blob: 9172c8347b33e83876a9b7bd6fddf021c8ed812a [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "image_writer.h"
18
19#include <sys/stat.h>
20
Ian Rogers700a4022014-05-19 16:49:03 -070021#include <memory>
Vladimir Marko20f85592015-03-19 10:07:02 +000022#include <numeric>
Brian Carlstrom7940e442013-07-12 13:46:57 -070023#include <vector>
24
Mathieu Chartierc7853442015-03-27 14:35:38 -070025#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070026#include "art_method-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070027#include "base/logging.h"
28#include "base/unix_file/fd_file.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010029#include "class_linker-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070030#include "compiled_method.h"
31#include "dex_file-inl.h"
32#include "driver/compiler_driver.h"
Alex Light53cb16b2014-06-12 11:26:29 -070033#include "elf_file.h"
34#include "elf_utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070035#include "elf_writer.h"
36#include "gc/accounting/card_table-inl.h"
37#include "gc/accounting/heap_bitmap.h"
Mathieu Chartier31e89252013-08-28 11:29:12 -070038#include "gc/accounting/space_bitmap-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070039#include "gc/heap.h"
40#include "gc/space/large_object_space.h"
41#include "gc/space/space-inl.h"
42#include "globals.h"
43#include "image.h"
44#include "intern_table.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070045#include "linear_alloc.h"
Mathieu Chartierad2541a2013-10-25 10:05:23 -070046#include "lock_word.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070047#include "mirror/abstract_method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070048#include "mirror/array-inl.h"
49#include "mirror/class-inl.h"
50#include "mirror/class_loader.h"
51#include "mirror/dex_cache-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070052#include "mirror/method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070053#include "mirror/object-inl.h"
54#include "mirror/object_array-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070055#include "mirror/string-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070056#include "oat.h"
57#include "oat_file.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070058#include "runtime.h"
59#include "scoped_thread_state_change.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070060#include "handle_scope-inl.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000061#include "utils/dex_cache_arrays_layout-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070062
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070063using ::art::mirror::Class;
64using ::art::mirror::DexCache;
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070065using ::art::mirror::Object;
66using ::art::mirror::ObjectArray;
67using ::art::mirror::String;
Brian Carlstrom7940e442013-07-12 13:46:57 -070068
69namespace art {
70
Igor Murashkinf5b4c502014-11-14 15:01:59 -080071// Separate objects into multiple bins to optimize dirty memory use.
72static constexpr bool kBinObjects = true;
73
Andreas Gampedd9d0552015-03-09 12:57:41 -070074static void CheckNoDexObjectsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -070075 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampedd9d0552015-03-09 12:57:41 -070076 Class* klass = obj->GetClass();
77 CHECK_NE(PrettyClass(klass), "com.android.dex.Dex");
78}
79
80static void CheckNoDexObjects() {
81 ScopedObjectAccess soa(Thread::Current());
82 Runtime::Current()->GetHeap()->VisitObjects(CheckNoDexObjectsCallback, nullptr);
83}
84
Vladimir Markof4da6752014-08-01 19:04:18 +010085bool ImageWriter::PrepareImageAddressSpace() {
Mathieu Chartier2d721012014-11-10 11:08:06 -080086 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
Vladimir Markof4da6752014-08-01 19:04:18 +010087 {
88 Thread::Current()->TransitionFromSuspendedToRunnable();
89 PruneNonImageClasses(); // Remove junk
90 ComputeLazyFieldsForImageClasses(); // Add useful information
Jeff Hao848f70a2014-01-15 13:49:50 -080091
Vladimir Markof4da6752014-08-01 19:04:18 +010092 Thread::Current()->TransitionFromRunnableToSuspended(kNative);
93 }
94 gc::Heap* heap = Runtime::Current()->GetHeap();
95 heap->CollectGarbage(false); // Remove garbage.
96
Andreas Gampedd9d0552015-03-09 12:57:41 -070097 // Dex caches must not have their dex fields set in the image. These are memory buffers of mapped
98 // dex files.
99 //
100 // We may open them in the unstarted-runtime code for class metadata. Their fields should all be
101 // reset in PruneNonImageClasses and the objects reclaimed in the GC. Make sure that's actually
102 // true.
103 if (kIsDebugBuild) {
104 CheckNoDexObjects();
105 }
106
Vladimir Markof4da6752014-08-01 19:04:18 +0100107 if (kIsDebugBuild) {
108 ScopedObjectAccess soa(Thread::Current());
109 CheckNonImageClassesRemoved();
110 }
111
112 Thread::Current()->TransitionFromSuspendedToRunnable();
113 CalculateNewObjectOffsets();
114 Thread::Current()->TransitionFromRunnableToSuspended(kNative);
115
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700116 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
117 // bin size sums being calculated.
118 if (!AllocMemory()) {
119 return false;
120 }
121
Vladimir Markof4da6752014-08-01 19:04:18 +0100122 return true;
123}
124
Brian Carlstrom7940e442013-07-12 13:46:57 -0700125bool ImageWriter::Write(const std::string& image_filename,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700126 const std::string& oat_filename,
127 const std::string& oat_location) {
128 CHECK(!image_filename.empty());
129
Brian Carlstrom7940e442013-07-12 13:46:57 -0700130 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700131
Ian Rogers700a4022014-05-19 16:49:03 -0700132 std::unique_ptr<File> oat_file(OS::OpenFileReadWrite(oat_filename.c_str()));
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700133 if (oat_file.get() == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800134 PLOG(ERROR) << "Failed to open oat file " << oat_filename << " for " << oat_location;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700135 return false;
136 }
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700137 std::string error_msg;
Igor Murashkinb1d8c312015-08-04 11:18:43 -0700138 oat_file_ = OatFile::OpenReadable(oat_file.get(), oat_location, nullptr, &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700139 if (oat_file_ == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800140 PLOG(ERROR) << "Failed to open writable oat file " << oat_filename << " for " << oat_location
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700141 << ": " << error_msg;
Andreas Gampe0b7fcf92015-03-13 16:54:54 -0700142 oat_file->Erase();
Brian Carlstromc50d8e12013-07-23 22:35:16 -0700143 return false;
144 }
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700145 CHECK_EQ(class_linker->RegisterOatFile(oat_file_), oat_file_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700146
Ian Rogers848871b2013-08-05 10:56:33 -0700147 interpreter_to_interpreter_bridge_offset_ =
148 oat_file_->GetOatHeader().GetInterpreterToInterpreterBridgeOffset();
149 interpreter_to_compiled_code_bridge_offset_ =
150 oat_file_->GetOatHeader().GetInterpreterToCompiledCodeBridgeOffset();
151
152 jni_dlsym_lookup_offset_ = oat_file_->GetOatHeader().GetJniDlsymLookupOffset();
153
Andreas Gampe2da88232014-02-27 12:26:20 -0800154 quick_generic_jni_trampoline_offset_ =
155 oat_file_->GetOatHeader().GetQuickGenericJniTrampolineOffset();
Jeff Hao88474b42013-10-23 16:24:40 -0700156 quick_imt_conflict_trampoline_offset_ =
157 oat_file_->GetOatHeader().GetQuickImtConflictTrampolineOffset();
Ian Rogers848871b2013-08-05 10:56:33 -0700158 quick_resolution_trampoline_offset_ =
159 oat_file_->GetOatHeader().GetQuickResolutionTrampolineOffset();
160 quick_to_interpreter_bridge_offset_ =
161 oat_file_->GetOatHeader().GetQuickToInterpreterBridgeOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700162
Brian Carlstrom7940e442013-07-12 13:46:57 -0700163 size_t oat_loaded_size = 0;
164 size_t oat_data_offset = 0;
Vladimir Marko3fc99032015-05-13 19:06:30 +0100165 ElfWriter::GetOatElfInformation(oat_file.get(), &oat_loaded_size, &oat_data_offset);
Alex Light53cb16b2014-06-12 11:26:29 -0700166
Vladimir Markof4da6752014-08-01 19:04:18 +0100167 Thread::Current()->TransitionFromSuspendedToRunnable();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700168
Vladimir Markof4da6752014-08-01 19:04:18 +0100169 CreateHeader(oat_loaded_size, oat_data_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700170 CopyAndFixupNativeData();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700171 // TODO: heap validation can't handle these fix up passes.
172 Runtime::Current()->GetHeap()->DisableObjectValidation();
Vladimir Markof4da6752014-08-01 19:04:18 +0100173 CopyAndFixupObjects();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700174 Thread::Current()->TransitionFromRunnableToSuspended(kNative);
175
Vladimir Markof4da6752014-08-01 19:04:18 +0100176 SetOatChecksumFromElfFile(oat_file.get());
177
Andreas Gampe4303ba92014-11-06 01:00:46 -0800178 if (oat_file->FlushCloseOrErase() != 0) {
179 LOG(ERROR) << "Failed to flush and close oat file " << oat_filename << " for " << oat_location;
180 return false;
181 }
182
Ian Rogers700a4022014-05-19 16:49:03 -0700183 std::unique_ptr<File> image_file(OS::CreateEmptyFile(image_filename.c_str()));
Mathieu Chartier31e89252013-08-28 11:29:12 -0700184 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700185 if (image_file.get() == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700186 LOG(ERROR) << "Failed to open image file " << image_filename;
187 return false;
188 }
189 if (fchmod(image_file->Fd(), 0644) != 0) {
190 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800191 image_file->Erase();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700192 return EXIT_FAILURE;
193 }
Mathieu Chartier31e89252013-08-28 11:29:12 -0700194
Mathieu Chartiere401d142015-04-22 13:56:20 -0700195 // Write out the image + fields + methods.
196 const auto write_count = image_header->GetImageSize();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700197 if (!image_file->WriteFully(image_->Begin(), write_count)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700198 PLOG(ERROR) << "Failed to write image file " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800199 image_file->Erase();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700200 return false;
201 }
Mathieu Chartier31e89252013-08-28 11:29:12 -0700202
203 // Write out the image bitmap at the page aligned start of the image end.
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700204 const ImageSection& bitmap_section = image_header->GetImageSection(ImageHeader::kSectionImageBitmap);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700205 CHECK_ALIGNED(bitmap_section.Offset(), kPageSize);
Mathieu Chartier31e89252013-08-28 11:29:12 -0700206 if (!image_file->Write(reinterpret_cast<char*>(image_bitmap_->Begin()),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700207 bitmap_section.Size(), bitmap_section.Offset())) {
Mathieu Chartier31e89252013-08-28 11:29:12 -0700208 PLOG(ERROR) << "Failed to write image file " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800209 image_file->Erase();
Mathieu Chartier31e89252013-08-28 11:29:12 -0700210 return false;
211 }
212
Mathieu Chartiere401d142015-04-22 13:56:20 -0700213 CHECK_EQ(bitmap_section.End(), static_cast<size_t>(image_file->GetLength()));
Andreas Gampe4303ba92014-11-06 01:00:46 -0800214 if (image_file->FlushCloseOrErase() != 0) {
215 PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
216 return false;
217 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700218 return true;
219}
220
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700221void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700222 DCHECK(object != nullptr);
223 DCHECK_NE(offset, 0U);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800224
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800225 // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700226 object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700227 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700228 DCHECK(IsImageOffsetAssigned(object));
229}
230
Mathieu Chartiere401d142015-04-22 13:56:20 -0700231void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
232 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
233 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
234 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
235}
236
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800237void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700238 DCHECK(object != nullptr);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800239 DCHECK_NE(image_objects_offset_begin_, 0u);
240
Vladimir Markocf36d492015-08-12 19:27:26 +0100241 size_t bin_slot_offset = bin_slot_offsets_[bin_slot.GetBin()];
242 size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800243 DCHECK_ALIGNED(new_offset, kObjectAlignment);
244
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700245 SetImageOffset(object, new_offset);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800246 DCHECK_LT(new_offset, image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700247}
248
Ian Rogersef7d42f2014-01-06 12:55:46 -0800249bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800250 // Will also return true if the bin slot was assigned since we are reusing the lock word.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700251 DCHECK(object != nullptr);
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700252 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700253}
254
Ian Rogersef7d42f2014-01-06 12:55:46 -0800255size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700256 DCHECK(object != nullptr);
257 DCHECK(IsImageOffsetAssigned(object));
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700258 LockWord lock_word = object->GetLockWord(false);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700259 size_t offset = lock_word.ForwardingAddress();
260 DCHECK_LT(offset, image_end_);
261 return offset;
Mathieu Chartier31e89252013-08-28 11:29:12 -0700262}
263
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800264void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
265 DCHECK(object != nullptr);
266 DCHECK(!IsImageOffsetAssigned(object));
267 DCHECK(!IsImageBinSlotAssigned(object));
268
269 // Before we stomp over the lock word, save the hash code for later.
270 Monitor::Deflate(Thread::Current(), object);;
271 LockWord lw(object->GetLockWord(false));
272 switch (lw.GetState()) {
273 case LockWord::kFatLocked: {
274 LOG(FATAL) << "Fat locked object " << object << " found during object copy";
275 break;
276 }
277 case LockWord::kThinLocked: {
278 LOG(FATAL) << "Thin locked object " << object << " found during object copy";
279 break;
280 }
281 case LockWord::kUnlocked:
282 // No hash, don't need to save it.
283 break;
284 case LockWord::kHashCode:
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700285 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
286 saved_hashcode_map_.emplace(object, lw.GetHashCode());
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800287 break;
288 default:
289 LOG(FATAL) << "Unreachable.";
290 UNREACHABLE();
291 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700292 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700293 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800294 DCHECK(IsImageBinSlotAssigned(object));
295}
296
Vladimir Marko20f85592015-03-19 10:07:02 +0000297void ImageWriter::PrepareDexCacheArraySlots() {
298 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700299 Thread* const self = Thread::Current();
300 ReaderMutexLock mu(self, *class_linker->DexLock());
Vladimir Marko20f85592015-03-19 10:07:02 +0000301 uint32_t size = 0u;
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700302 for (jobject weak_root : class_linker->GetDexCaches()) {
303 mirror::DexCache* dex_cache =
304 down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
305 if (dex_cache == nullptr) {
306 continue;
307 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000308 const DexFile* dex_file = dex_cache->GetDexFile();
309 dex_cache_array_starts_.Put(dex_file, size);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700310 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
Vladimir Marko20f85592015-03-19 10:07:02 +0000311 DCHECK(layout.Valid());
Vladimir Marko05792b92015-08-03 11:56:49 +0100312 DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
313 AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(), size + layout.TypesOffset());
314 DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
315 AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(), size + layout.MethodsOffset());
316 DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
317 AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(), size + layout.FieldsOffset());
318 DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
319 AddDexCacheArrayRelocation(dex_cache->GetStrings(), size + layout.StringsOffset());
Vladimir Marko20f85592015-03-19 10:07:02 +0000320 size += layout.Size();
321 }
322 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
323 // when AssignImageBinSlot() assigns their indexes out or order.
324 bin_slot_sizes_[kBinDexCacheArray] = size;
325}
326
Vladimir Marko05792b92015-08-03 11:56:49 +0100327void ImageWriter::AddDexCacheArrayRelocation(void* array, size_t offset) {
328 if (array != nullptr) {
329 native_object_relocations_.emplace(
330 array,
331 NativeObjectRelocation { offset, kNativeObjectRelocationTypeDexCacheArray });
332 }
333}
334
Mathieu Chartiere401d142015-04-22 13:56:20 -0700335void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
336 DCHECK(arr != nullptr);
337 if (kIsDebugBuild) {
338 for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
339 auto* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
340 if (method != nullptr && !method->IsRuntimeMethod()) {
341 auto* klass = method->GetDeclaringClass();
342 CHECK(klass == nullptr || IsImageClass(klass)) << PrettyClass(klass)
343 << " should be an image class";
344 }
345 }
346 }
347 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
348 // ArtMethods.
349 pointer_arrays_.emplace(arr, kBinArtMethodClean);
350}
351
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800352void ImageWriter::AssignImageBinSlot(mirror::Object* object) {
353 DCHECK(object != nullptr);
Jeff Haoc7d11882015-02-03 15:08:39 -0800354 size_t object_size = object->SizeOf();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800355
356 // The magic happens here. We segregate objects into different bins based
357 // on how likely they are to get dirty at runtime.
358 //
359 // Likely-to-dirty objects get packed together into the same bin so that
360 // at runtime their page dirtiness ratio (how many dirty objects a page has) is
361 // maximized.
362 //
363 // This means more pages will stay either clean or shared dirty (with zygote) and
364 // the app will use less of its own (private) memory.
365 Bin bin = kBinRegular;
Vladimir Marko20f85592015-03-19 10:07:02 +0000366 size_t current_offset = 0u;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800367
368 if (kBinObjects) {
369 //
370 // Changing the bin of an object is purely a memory-use tuning.
371 // It has no change on runtime correctness.
372 //
373 // Memory analysis has determined that the following types of objects get dirtied
374 // the most:
375 //
Vladimir Marko20f85592015-03-19 10:07:02 +0000376 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
377 // a fixed layout which helps improve generated code (using PC-relative addressing),
378 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
379 // Since these arrays are huge, most pages do not overlap other objects and it's not
380 // really important where they are for the clean/dirty separation. Due to their
Vladimir Marko05792b92015-08-03 11:56:49 +0100381 // special PC-relative addressing, we arbitrarily keep them at the end.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800382 // * Class'es which are verified [their clinit runs only at runtime]
383 // - classes in general [because their static fields get overwritten]
384 // - initialized classes with all-final statics are unlikely to be ever dirty,
385 // so bin them separately
386 // * Art Methods that are:
387 // - native [their native entry point is not looked up until runtime]
388 // - have declaring classes that aren't initialized
389 // [their interpreter/quick entry points are trampolines until the class
390 // becomes initialized]
391 //
392 // We also assume the following objects get dirtied either never or extremely rarely:
393 // * Strings (they are immutable)
394 // * Art methods that aren't native and have initialized declared classes
395 //
396 // We assume that "regular" bin objects are highly unlikely to become dirtied,
397 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
398 //
399 if (object->IsClass()) {
400 bin = kBinClassVerified;
401 mirror::Class* klass = object->AsClass();
402
Mathieu Chartiere401d142015-04-22 13:56:20 -0700403 // Add non-embedded vtable to the pointer array table if there is one.
404 auto* vtable = klass->GetVTable();
405 if (vtable != nullptr) {
406 AddMethodPointerArray(vtable);
407 }
408 auto* iftable = klass->GetIfTable();
409 if (iftable != nullptr) {
410 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
411 if (iftable->GetMethodArrayCount(i) > 0) {
412 AddMethodPointerArray(iftable->GetMethodArray(i));
413 }
414 }
415 }
416
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800417 if (klass->GetStatus() == Class::kStatusInitialized) {
418 bin = kBinClassInitialized;
419
420 // If the class's static fields are all final, put it into a separate bin
421 // since it's very likely it will stay clean.
422 uint32_t num_static_fields = klass->NumStaticFields();
423 if (num_static_fields == 0) {
424 bin = kBinClassInitializedFinalStatics;
425 } else {
426 // Maybe all the statics are final?
427 bool all_final = true;
428 for (uint32_t i = 0; i < num_static_fields; ++i) {
429 ArtField* field = klass->GetStaticField(i);
430 if (!field->IsFinal()) {
431 all_final = false;
432 break;
433 }
434 }
435
436 if (all_final) {
437 bin = kBinClassInitializedFinalStatics;
438 }
439 }
440 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800441 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
442 bin = kBinString; // Strings are almost always immutable (except for object header).
443 } // else bin = kBinRegular
444 }
445
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800446 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment
Vladimir Marko05792b92015-08-03 11:56:49 +0100447 current_offset = bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned).
448 // Move the current bin size up to accomodate the object we just assigned a bin slot.
449 bin_slot_sizes_[bin] += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800450
451 BinSlot new_bin_slot(bin, current_offset);
452 SetImageBinSlot(object, new_bin_slot);
453
454 ++bin_slot_count_[bin];
455
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800456 // Grow the image closer to the end by the object we just assigned.
457 image_end_ += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800458}
459
Mathieu Chartiere401d142015-04-22 13:56:20 -0700460bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
461 if (m->IsNative()) {
462 return true;
463 }
464 mirror::Class* declaring_class = m->GetDeclaringClass();
465 // Initialized is highly unlikely to dirty since there's no entry points to mutate.
466 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized;
467}
468
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800469bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
470 DCHECK(object != nullptr);
471
472 // We always stash the bin slot into a lockword, in the 'forwarding address' state.
473 // If it's in some other state, then we haven't yet assigned an image bin slot.
474 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
475 return false;
476 } else if (kIsDebugBuild) {
477 LockWord lock_word = object->GetLockWord(false);
478 size_t offset = lock_word.ForwardingAddress();
479 BinSlot bin_slot(offset);
480 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()])
481 << "bin slot offset should not exceed the size of that bin";
482 }
483 return true;
484}
485
486ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
487 DCHECK(object != nullptr);
488 DCHECK(IsImageBinSlotAssigned(object));
489
490 LockWord lock_word = object->GetLockWord(false);
491 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t
492 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
493
494 BinSlot bin_slot(static_cast<uint32_t>(offset));
495 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()]);
496
497 return bin_slot;
498}
499
Brian Carlstrom7940e442013-07-12 13:46:57 -0700500bool ImageWriter::AllocMemory() {
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700501 const size_t length = RoundUp(image_objects_offset_begin_ + GetBinSizeSum() + intern_table_bytes_,
502 kPageSize);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700503 std::string error_msg;
Vladimir Marko5c42c292015-02-25 12:02:49 +0000504 image_.reset(MemMap::MapAnonymous("image writer image", nullptr, length, PROT_READ | PROT_WRITE,
505 false, false, &error_msg));
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700506 if (UNLIKELY(image_.get() == nullptr)) {
507 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700508 return false;
509 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700510
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700511 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
512 CHECK_LE(image_end_, length);
513 image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
514 "image bitmap", image_->Begin(), RoundUp(image_end_, kPageSize)));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700515 if (image_bitmap_.get() == nullptr) {
516 LOG(ERROR) << "Failed to allocate memory for image bitmap";
517 return false;
518 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700519 return true;
520}
521
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700522class ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
523 public:
524 bool Visit(Class* c) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
525 StackHandleScope<1> hs(Thread::Current());
526 mirror::Class::ComputeName(hs.NewHandle(c));
527 return true;
528 }
529};
530
Brian Carlstrom7940e442013-07-12 13:46:57 -0700531void ImageWriter::ComputeLazyFieldsForImageClasses() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700532 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700533 ComputeLazyFieldsForClassesVisitor visitor;
534 class_linker->VisitClassesWithoutClassesLock(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700535}
536
Ian Rogersef7d42f2014-01-06 12:55:46 -0800537bool ImageWriter::IsImageClass(Class* klass) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700538 if (klass == nullptr) {
539 return false;
540 }
Ian Rogers1ff3c982014-08-12 02:30:58 -0700541 std::string temp;
542 return compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700543}
544
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700545class NonImageClassesVisitor : public ClassVisitor {
546 public:
547 explicit NonImageClassesVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
548
549 bool Visit(Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
550 if (!image_writer_->IsImageClass(klass)) {
551 std::string temp;
552 non_image_classes_.insert(klass->GetDescriptor(&temp));
553 }
554 return true;
555 }
556
557 std::set<std::string> non_image_classes_;
558 ImageWriter* const image_writer_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700559};
560
561void ImageWriter::PruneNonImageClasses() {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700562 if (compiler_driver_.GetImageClasses() == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700563 return;
564 }
565 Runtime* runtime = Runtime::Current();
566 ClassLinker* class_linker = runtime->GetClassLinker();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700567 Thread* self = Thread::Current();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700568
569 // Make a list of classes we would like to prune.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700570 NonImageClassesVisitor visitor(this);
571 class_linker->VisitClasses(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700572
573 // Remove the undesired classes from the class roots.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700574 for (const std::string& it : visitor.non_image_classes_) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700575 bool result = class_linker->RemoveClass(it.c_str(), nullptr);
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800576 DCHECK(result);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700577 }
578
579 // Clear references to removed classes from the DexCaches.
Vladimir Marko05792b92015-08-03 11:56:49 +0100580 ArtMethod* resolution_method = runtime->GetResolutionMethod();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700581
582 ScopedAssertNoThreadSuspension sa(self, __FUNCTION__);
583 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); // For ClassInClassTable
584 ReaderMutexLock mu2(self, *class_linker->DexLock());
585 for (jobject weak_root : class_linker->GetDexCaches()) {
586 mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
587 if (dex_cache == nullptr) {
588 continue;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700589 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700590 for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
591 Class* klass = dex_cache->GetResolvedType(i);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700592 if (klass != nullptr && !IsImageClass(klass)) {
593 dex_cache->SetResolvedType(i, nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700594 }
595 }
Vladimir Marko05792b92015-08-03 11:56:49 +0100596 ArtMethod** resolved_methods = dex_cache->GetResolvedMethods();
597 for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) {
598 ArtMethod* method =
599 mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700600 if (method != nullptr) {
601 auto* declaring_class = method->GetDeclaringClass();
602 // Miranda methods may be held live by a class which was not an image class but have a
603 // declaring class which is an image class. Set it to the resolution method to be safe and
604 // prevent dangling pointers.
605 if (method->IsMiranda() || !IsImageClass(declaring_class)) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100606 mirror::DexCache::SetElementPtrSize(resolved_methods,
607 i,
608 resolution_method,
609 target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700610 } else {
611 // Check that the class is still in the classes table.
612 DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class "
613 << PrettyClass(declaring_class) << " not in class linker table";
614 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700615 }
616 }
617 for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700618 ArtField* field = dex_cache->GetResolvedField(i, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700619 if (field != nullptr && !IsImageClass(field->GetDeclaringClass())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700620 dex_cache->SetResolvedField(i, nullptr, target_ptr_size_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700621 }
622 }
Andreas Gampedd9d0552015-03-09 12:57:41 -0700623 // Clean the dex field. It might have been populated during the initialization phase, but
624 // contains data only valid during a real run.
625 dex_cache->SetFieldObject<false>(mirror::DexCache::DexOffset(), nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700626 }
Andreas Gampe8ac75952015-06-02 21:01:45 -0700627
628 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
629 class_linker->DropFindArrayClassCache();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700630}
631
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -0800632void ImageWriter::CheckNonImageClassesRemoved() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700633 if (compiler_driver_.GetImageClasses() != nullptr) {
634 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700635 heap->VisitObjects(CheckNonImageClassesRemovedCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700636 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700637}
638
639void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
640 ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700641 if (obj->IsClass()) {
642 Class* klass = obj->AsClass();
643 if (!image_writer->IsImageClass(klass)) {
644 image_writer->DumpImageClasses();
Ian Rogers1ff3c982014-08-12 02:30:58 -0700645 std::string temp;
646 CHECK(image_writer->IsImageClass(klass)) << klass->GetDescriptor(&temp)
Mathieu Chartier590fee92013-09-13 13:46:47 -0700647 << " " << PrettyDescriptor(klass);
648 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700649 }
650}
651
652void ImageWriter::DumpImageClasses() {
Andreas Gampeb1fcead2015-04-20 18:53:51 -0700653 auto image_classes = compiler_driver_.GetImageClasses();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700654 CHECK(image_classes != nullptr);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700655 for (const std::string& image_class : *image_classes) {
656 LOG(INFO) << " " << image_class;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700657 }
658}
659
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800660void ImageWriter::CalculateObjectBinSlots(Object* obj) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700661 DCHECK(obj != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700662 // if it is a string, we want to intern it if its not interned.
663 if (obj->GetClass()->IsStringClass()) {
664 // we must be an interned string that was forward referenced and already assigned
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800665 if (IsImageBinSlotAssigned(obj)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700666 DCHECK_EQ(obj, obj->AsString()->Intern());
667 return;
668 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700669 // InternImageString allows us to intern while holding the heap bitmap lock. This is safe since
670 // we are guaranteed to not have GC during image writing.
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700671 mirror::String* const interned = Runtime::Current()->GetInternTable()->InternStrongImageString(
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700672 obj->AsString());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700673 if (obj != interned) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800674 if (!IsImageBinSlotAssigned(interned)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700675 // interned obj is after us, allocate its location early
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800676 AssignImageBinSlot(interned);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700677 }
678 // point those looking for this object to the interned version.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800679 SetImageBinSlot(obj, GetImageBinSlot(interned));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700680 return;
681 }
682 // else (obj == interned), nothing to do but fall through to the normal case
683 }
684
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800685 AssignImageBinSlot(obj);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700686}
687
688ObjectArray<Object>* ImageWriter::CreateImageRoots() const {
689 Runtime* runtime = Runtime::Current();
690 ClassLinker* class_linker = runtime->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700691 Thread* self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700692 StackHandleScope<3> hs(self);
693 Handle<Class> object_array_class(hs.NewHandle(
694 class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700695
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700696 // build an Object[] of all the DexCaches used in the source_space_.
697 // Since we can't hold the dex lock when allocating the dex_caches
698 // ObjectArray, we lock the dex lock twice, first to get the number
699 // of dex caches first and then lock it again to copy the dex
700 // caches. We check that the number of dex caches does not change.
701 size_t dex_cache_count;
702 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700703 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700704 dex_cache_count = class_linker->GetDexCacheCount();
705 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700706 Handle<ObjectArray<Object>> dex_caches(
707 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(),
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700708 dex_cache_count)));
709 CHECK(dex_caches.Get() != nullptr) << "Failed to allocate a dex cache array.";
710 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700711 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700712 CHECK_EQ(dex_cache_count, class_linker->GetDexCacheCount())
713 << "The number of dex caches changed.";
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700714 size_t i = 0;
715 for (jobject weak_root : class_linker->GetDexCaches()) {
716 mirror::DexCache* dex_cache =
717 down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
718 dex_caches->Set<false>(i, dex_cache);
719 ++i;
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700720 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700721 }
722
723 // build an Object[] of the roots needed to restore the runtime
Mathieu Chartiere401d142015-04-22 13:56:20 -0700724 auto image_roots(hs.NewHandle(
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 ObjectArray<Object>::Alloc(self, object_array_class.Get(), ImageHeader::kImageRootsMax)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700726 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100727 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700728 for (int i = 0; i < ImageHeader::kImageRootsMax; i++) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700729 CHECK(image_roots->Get(i) != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700730 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700731 return image_roots.Get();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700732}
733
Mathieu Chartier590fee92013-09-13 13:46:47 -0700734// Walk instance fields of the given Class. Separate function to allow recursion on the super
735// class.
736void ImageWriter::WalkInstanceFields(mirror::Object* obj, mirror::Class* klass) {
737 // Visit fields of parent classes first.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700738 StackHandleScope<1> hs(Thread::Current());
739 Handle<mirror::Class> h_class(hs.NewHandle(klass));
740 mirror::Class* super = h_class->GetSuperClass();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700741 if (super != nullptr) {
742 WalkInstanceFields(obj, super);
743 }
744 //
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700745 size_t num_reference_fields = h_class->NumReferenceInstanceFields();
Vladimir Marko76649e82014-11-10 18:32:59 +0000746 MemberOffset field_offset = h_class->GetFirstReferenceInstanceFieldOffset();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700747 for (size_t i = 0; i < num_reference_fields; ++i) {
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700748 mirror::Object* value = obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700749 if (value != nullptr) {
750 WalkFieldsInOrder(value);
751 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000752 field_offset = MemberOffset(field_offset.Uint32Value() +
753 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700754 }
755}
756
757// For an unvisited object, visit it then all its children found via fields.
758void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800759 // Use our own visitor routine (instead of GC visitor) to get better locality between
760 // an object and its fields
761 if (!IsImageBinSlotAssigned(obj)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700762 // Walk instance fields of all objects
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700763 StackHandleScope<2> hs(Thread::Current());
764 Handle<mirror::Object> h_obj(hs.NewHandle(obj));
765 Handle<mirror::Class> klass(hs.NewHandle(obj->GetClass()));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700766 // visit the object itself.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800767 CalculateObjectBinSlots(h_obj.Get());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700768 WalkInstanceFields(h_obj.Get(), klass.Get());
Mathieu Chartier590fee92013-09-13 13:46:47 -0700769 // Walk static fields of a Class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700770 if (h_obj->IsClass()) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700771 size_t num_reference_static_fields = klass->NumReferenceStaticFields();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700772 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700773 for (size_t i = 0; i < num_reference_static_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700774 mirror::Object* value = h_obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700775 if (value != nullptr) {
776 WalkFieldsInOrder(value);
777 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000778 field_offset = MemberOffset(field_offset.Uint32Value() +
779 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700780 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700781 // Visit and assign offsets for fields and field arrays.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700782 auto* as_klass = h_obj->AsClass();
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700783 LengthPrefixedArray<ArtField>* fields[] = {
784 as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
785 };
786 for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
787 // Total array length including header.
788 if (cur_fields != nullptr) {
789 const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
790 // Forward the entire array at once.
791 auto it = native_object_relocations_.find(cur_fields);
792 CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
793 << " already forwarded";
794 size_t& offset = bin_slot_sizes_[kBinArtField];
795 native_object_relocations_.emplace(
796 cur_fields, NativeObjectRelocation {
797 offset, kNativeObjectRelocationTypeArtFieldArray });
798 offset += header_size;
799 // Forward individual fields so that we can quickly find where they belong.
800 for (size_t i = 0, count = cur_fields->Length(); i < count; ++i) {
801 // Need to forward arrays separate of fields.
802 ArtField* field = &cur_fields->At(i);
803 auto it2 = native_object_relocations_.find(field);
804 CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
805 << " already assigned " << PrettyField(field) << " static=" << field->IsStatic();
806 native_object_relocations_.emplace(
807 field, NativeObjectRelocation {offset, kNativeObjectRelocationTypeArtField });
808 offset += sizeof(ArtField);
809 }
Mathieu Chartierc7853442015-03-27 14:35:38 -0700810 }
811 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700812 // Visit and assign offsets for methods.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700813 LengthPrefixedArray<ArtMethod>* method_arrays[] = {
814 as_klass->GetDirectMethodsPtr(), as_klass->GetVirtualMethodsPtr(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700815 };
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700816 for (LengthPrefixedArray<ArtMethod>* array : method_arrays) {
817 if (array == nullptr) {
818 continue;
819 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700820 bool any_dirty = false;
821 size_t count = 0;
Vladimir Marko14632852015-08-17 12:07:23 +0100822 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
823 const size_t method_size = ArtMethod::Size(target_ptr_size_);
Vladimir Markocf36d492015-08-12 19:27:26 +0100824 auto iteration_range =
825 MakeIterationRangeFromLengthPrefixedArray(array, method_size, method_alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700826 for (auto& m : iteration_range) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700827 any_dirty = any_dirty || WillMethodBeDirty(&m);
828 ++count;
829 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700830 NativeObjectRelocationType type = any_dirty ? kNativeObjectRelocationTypeArtMethodDirty :
831 kNativeObjectRelocationTypeArtMethodClean;
832 Bin bin_type = BinTypeForNativeRelocationType(type);
833 // Forward the entire array at once, but header first.
Vladimir Markocf36d492015-08-12 19:27:26 +0100834 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
835 method_size,
836 method_alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700837 auto it = native_object_relocations_.find(array);
838 CHECK(it == native_object_relocations_.end()) << "Method array " << array
839 << " already forwarded";
840 size_t& offset = bin_slot_sizes_[bin_type];
841 native_object_relocations_.emplace(array, NativeObjectRelocation { offset,
842 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty :
843 kNativeObjectRelocationTypeArtMethodArrayClean });
844 offset += header_size;
845 for (auto& m : iteration_range) {
846 AssignMethodOffset(&m, type);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700847 }
848 (any_dirty ? dirty_methods_ : clean_methods_) += count;
849 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700850 } else if (h_obj->IsObjectArray()) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700851 // Walk elements of an object array.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700852 int32_t length = h_obj->AsObjectArray<mirror::Object>()->GetLength();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700853 for (int32_t i = 0; i < length; i++) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700854 mirror::ObjectArray<mirror::Object>* obj_array = h_obj->AsObjectArray<mirror::Object>();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700855 mirror::Object* value = obj_array->Get(i);
856 if (value != nullptr) {
857 WalkFieldsInOrder(value);
858 }
859 }
860 }
861 }
862}
863
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700864void ImageWriter::AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type) {
865 auto it = native_object_relocations_.find(method);
866 CHECK(it == native_object_relocations_.end()) << "Method " << method << " already assigned "
Mathieu Chartiere401d142015-04-22 13:56:20 -0700867 << PrettyMethod(method);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700868 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
869 native_object_relocations_.emplace(method, NativeObjectRelocation { offset, type });
Vladimir Marko14632852015-08-17 12:07:23 +0100870 offset += ArtMethod::Size(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700871}
872
Mathieu Chartier590fee92013-09-13 13:46:47 -0700873void ImageWriter::WalkFieldsCallback(mirror::Object* obj, void* arg) {
874 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
875 DCHECK(writer != nullptr);
876 writer->WalkFieldsInOrder(obj);
877}
878
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800879void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) {
880 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
881 DCHECK(writer != nullptr);
882 writer->UnbinObjectsIntoOffset(obj);
883}
884
885void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
886 CHECK(obj != nullptr);
887
888 // We know the bin slot, and the total bin sizes for all objects by now,
889 // so calculate the object's final image offset.
890
891 DCHECK(IsImageBinSlotAssigned(obj));
892 BinSlot bin_slot = GetImageBinSlot(obj);
893 // Change the lockword from a bin slot into an offset
894 AssignImageOffset(obj, bin_slot);
895}
896
Vladimir Markof4da6752014-08-01 19:04:18 +0100897void ImageWriter::CalculateNewObjectOffsets() {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700898 Thread* const self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700899 StackHandleScope<1> hs(self);
900 Handle<ObjectArray<Object>> image_roots(hs.NewHandle(CreateImageRoots()));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700901
Mathieu Chartiere401d142015-04-22 13:56:20 -0700902 auto* runtime = Runtime::Current();
903 auto* heap = runtime->GetHeap();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700904 DCHECK_EQ(0U, image_end_);
905
Mathieu Chartier31e89252013-08-28 11:29:12 -0700906 // Leave space for the header, but do not write it yet, we need to
Brian Carlstrom7940e442013-07-12 13:46:57 -0700907 // know where image_roots is going to end up
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800908 image_end_ += RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment
Brian Carlstrom7940e442013-07-12 13:46:57 -0700909
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800910 image_objects_offset_begin_ = image_end_;
911 // Clear any pre-existing monitors which may have been in the monitor words, assign bin slots.
912 heap->VisitObjects(WalkFieldsCallback, this);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700913 // Write the image runtime methods.
914 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
915 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
916 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
917 image_methods_[ImageHeader::kCalleeSaveMethod] = runtime->GetCalleeSaveMethod(Runtime::kSaveAll);
918 image_methods_[ImageHeader::kRefsOnlySaveMethod] =
919 runtime->GetCalleeSaveMethod(Runtime::kRefsOnly);
920 image_methods_[ImageHeader::kRefsAndArgsSaveMethod] =
921 runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700922
923 // Add room for fake length prefixed array.
924 const auto image_method_type = kNativeObjectRelocationTypeArtMethodArrayClean;
925 auto it = native_object_relocations_.find(&image_method_array_);
926 CHECK(it == native_object_relocations_.end());
927 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(image_method_type)];
928 native_object_relocations_.emplace(&image_method_array_,
929 NativeObjectRelocation { offset, image_method_type });
Vladimir Marko14632852015-08-17 12:07:23 +0100930 size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -0700931 const size_t array_size = LengthPrefixedArray<ArtMethod>::ComputeSize(
Vladimir Marko14632852015-08-17 12:07:23 +0100932 0, ArtMethod::Size(target_ptr_size_), method_alignment);
Vladimir Markocf36d492015-08-12 19:27:26 +0100933 CHECK_ALIGNED_PARAM(array_size, method_alignment);
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -0700934 offset += array_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700935 for (auto* m : image_methods_) {
936 CHECK(m != nullptr);
937 CHECK(m->IsRuntimeMethod());
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700938 AssignMethodOffset(m, kNativeObjectRelocationTypeArtMethodClean);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700939 }
Vladimir Marko05792b92015-08-03 11:56:49 +0100940 // Calculate size of the dex cache arrays slot and prepare offsets.
941 PrepareDexCacheArraySlots();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700942
Vladimir Markocf36d492015-08-12 19:27:26 +0100943 // Calculate bin slot offsets.
944 size_t bin_offset = image_objects_offset_begin_;
Vladimir Marko20f85592015-03-19 10:07:02 +0000945 for (size_t i = 0; i != kBinSize; ++i) {
Vladimir Markocf36d492015-08-12 19:27:26 +0100946 bin_slot_offsets_[i] = bin_offset;
947 bin_offset += bin_slot_sizes_[i];
948 if (i == kBinArtField) {
949 static_assert(kBinArtField + 1 == kBinArtMethodClean, "Methods follow fields.");
950 static_assert(alignof(ArtField) == 4u, "ArtField alignment is 4.");
951 DCHECK_ALIGNED(bin_offset, 4u);
952 DCHECK(method_alignment == 4u || method_alignment == 8u);
953 bin_offset = RoundUp(bin_offset, method_alignment);
954 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000955 }
Vladimir Markocf36d492015-08-12 19:27:26 +0100956 // NOTE: There may be additional padding between the bin slots and the intern table.
957
Mathieu Chartierc7853442015-03-27 14:35:38 -0700958 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
959
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800960 // Transform each object's bin slot into an offset which will be used to do the final copy.
961 heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700962
Mathieu Chartierc7853442015-03-27 14:35:38 -0700963 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800964
Vladimir Markof4da6752014-08-01 19:04:18 +0100965 image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Get()));
966
Mathieu Chartiere401d142015-04-22 13:56:20 -0700967 // Update the native relocations by adding their bin sums.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700968 for (auto& pair : native_object_relocations_) {
969 NativeObjectRelocation& relocation = pair.second;
970 Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
Vladimir Markocf36d492015-08-12 19:27:26 +0100971 relocation.offset += bin_slot_offsets_[bin_type];
Mathieu Chartiere401d142015-04-22 13:56:20 -0700972 }
973
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700974 // Calculate how big the intern table will be after being serialized.
975 auto* const intern_table = Runtime::Current()->GetInternTable();
976 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
977 intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
978
Mathieu Chartiere401d142015-04-22 13:56:20 -0700979 // Note that image_end_ is left at end of used mirror object section.
Vladimir Markof4da6752014-08-01 19:04:18 +0100980}
981
982void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) {
983 CHECK_NE(0U, oat_loaded_size);
Ian Rogers13735952014-10-08 12:43:28 -0700984 const uint8_t* oat_file_begin = GetOatFileBegin();
985 const uint8_t* oat_file_end = oat_file_begin + oat_loaded_size;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700986 oat_data_begin_ = oat_file_begin + oat_data_offset;
Ian Rogers13735952014-10-08 12:43:28 -0700987 const uint8_t* oat_data_end = oat_data_begin_ + oat_file_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700988
989 // Create the image sections.
990 ImageSection sections[ImageHeader::kSectionCount];
991 // Objects section
992 auto* objects_section = &sections[ImageHeader::kSectionObjects];
993 *objects_section = ImageSection(0u, image_end_);
994 size_t cur_pos = objects_section->End();
995 // Add field section.
996 auto* field_section = &sections[ImageHeader::kSectionArtFields];
997 *field_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtField]);
Vladimir Markocf36d492015-08-12 19:27:26 +0100998 CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700999 cur_pos = field_section->End();
Vladimir Markocf36d492015-08-12 19:27:26 +01001000 // Round up to the alignment the required by the method section.
Vladimir Marko14632852015-08-17 12:07:23 +01001001 cur_pos = RoundUp(cur_pos, ArtMethod::Alignment(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001002 // Add method section.
1003 auto* methods_section = &sections[ImageHeader::kSectionArtMethods];
1004 *methods_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtMethodClean] +
1005 bin_slot_sizes_[kBinArtMethodDirty]);
Vladimir Markocf36d492015-08-12 19:27:26 +01001006 CHECK_EQ(bin_slot_offsets_[kBinArtMethodClean], methods_section->Offset());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001007 cur_pos = methods_section->End();
Vladimir Marko05792b92015-08-03 11:56:49 +01001008 // Add dex cache arrays section.
1009 auto* dex_cache_arrays_section = &sections[ImageHeader::kSectionDexCacheArrays];
1010 *dex_cache_arrays_section = ImageSection(cur_pos, bin_slot_sizes_[kBinDexCacheArray]);
1011 CHECK_EQ(bin_slot_offsets_[kBinDexCacheArray], dex_cache_arrays_section->Offset());
1012 cur_pos = dex_cache_arrays_section->End();
Nicolas Geoffray7bf2b4f2015-07-08 10:11:59 +00001013 // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
1014 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001015 // Calculate the size of the interned strings.
1016 auto* interned_strings_section = &sections[ImageHeader::kSectionInternedStrings];
1017 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1018 cur_pos = interned_strings_section->End();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001019 // Finally bitmap section.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001020 const size_t bitmap_bytes = image_bitmap_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001021 auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
1022 *bitmap_section = ImageSection(RoundUp(cur_pos, kPageSize), RoundUp(bitmap_bytes, kPageSize));
1023 cur_pos = bitmap_section->End();
1024 if (kIsDebugBuild) {
1025 size_t idx = 0;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001026 for (const ImageSection& section : sections) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001027 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1028 ++idx;
1029 }
1030 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
1031 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001032 const size_t image_end = static_cast<uint32_t>(interned_strings_section->End());
1033 CHECK_EQ(AlignUp(image_begin_ + image_end, kPageSize), oat_file_begin) <<
1034 "Oat file should be right after the image.";
Mathieu Chartiere401d142015-04-22 13:56:20 -07001035 // Create the header.
1036 new (image_->Begin()) ImageHeader(
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001037 PointerToLowMemUInt32(image_begin_), image_end,
1038 sections, image_roots_address_, oat_file_->GetOatHeader().GetChecksum(),
Mathieu Chartiere401d142015-04-22 13:56:20 -07001039 PointerToLowMemUInt32(oat_file_begin), PointerToLowMemUInt32(oat_data_begin_),
1040 PointerToLowMemUInt32(oat_data_end), PointerToLowMemUInt32(oat_file_end), target_ptr_size_,
1041 compile_pic_);
1042}
1043
1044ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001045 auto it = native_object_relocations_.find(method);
1046 CHECK(it != native_object_relocations_.end()) << PrettyMethod(method) << " @ " << method;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001047 CHECK_GE(it->second.offset, image_end_) << "ArtMethods should be after Objects";
1048 return reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001049}
1050
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001051class FixupRootVisitor : public RootVisitor {
1052 public:
1053 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
1054 }
1055
1056 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001057 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001058 for (size_t i = 0; i < count; ++i) {
1059 *roots[i] = ImageAddress(*roots[i]);
1060 }
1061 }
1062
1063 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
1064 const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001065 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001066 for (size_t i = 0; i < count; ++i) {
1067 roots[i]->Assign(ImageAddress(roots[i]->AsMirrorPtr()));
1068 }
1069 }
1070
1071 private:
1072 ImageWriter* const image_writer_;
1073
Mathieu Chartier90443472015-07-16 20:32:27 -07001074 mirror::Object* ImageAddress(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001075 const size_t offset = image_writer_->GetImageOffset(obj);
1076 auto* const dest = reinterpret_cast<Object*>(image_writer_->image_begin_ + offset);
1077 VLOG(compiler) << "Update root from " << obj << " to " << dest;
1078 return dest;
1079 }
1080};
1081
Mathieu Chartierc7853442015-03-27 14:35:38 -07001082void ImageWriter::CopyAndFixupNativeData() {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001083 // Copy ArtFields and methods to their locations and update the array for convenience.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001084 for (auto& pair : native_object_relocations_) {
1085 NativeObjectRelocation& relocation = pair.second;
1086 auto* dest = image_->Begin() + relocation.offset;
1087 DCHECK_GE(dest, image_->Begin() + image_end_);
1088 switch (relocation.type) {
1089 case kNativeObjectRelocationTypeArtField: {
1090 memcpy(dest, pair.first, sizeof(ArtField));
1091 reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
1092 GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass()));
1093 break;
1094 }
1095 case kNativeObjectRelocationTypeArtMethodClean:
1096 case kNativeObjectRelocationTypeArtMethodDirty: {
1097 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
1098 reinterpret_cast<ArtMethod*>(dest));
1099 break;
1100 }
1101 // For arrays, copy just the header since the elements will get copied by their corresponding
1102 // relocations.
1103 case kNativeObjectRelocationTypeArtFieldArray: {
1104 memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
1105 break;
1106 }
1107 case kNativeObjectRelocationTypeArtMethodArrayClean:
1108 case kNativeObjectRelocationTypeArtMethodArrayDirty: {
Vladimir Markocf36d492015-08-12 19:27:26 +01001109 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(
1110 0,
Vladimir Marko14632852015-08-17 12:07:23 +01001111 ArtMethod::Size(target_ptr_size_),
1112 ArtMethod::Alignment(target_ptr_size_)));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001113 break;
Vladimir Marko05792b92015-08-03 11:56:49 +01001114 case kNativeObjectRelocationTypeDexCacheArray:
1115 // Nothing to copy here, everything is done in FixupDexCache().
1116 break;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001117 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001118 }
1119 }
1120 // Fixup the image method roots.
1121 auto* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001122 const ImageSection& methods_section = image_header->GetMethodsSection();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001123 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
1124 auto* m = image_methods_[i];
1125 CHECK(m != nullptr);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001126 auto it = native_object_relocations_.find(m);
1127 CHECK(it != native_object_relocations_.end()) << "No fowarding for " << PrettyMethod(m);
1128 NativeObjectRelocation& relocation = it->second;
1129 CHECK(methods_section.Contains(relocation.offset)) << relocation.offset << " not in "
Mathieu Chartiere401d142015-04-22 13:56:20 -07001130 << methods_section;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001131 CHECK(relocation.IsArtMethodRelocation()) << relocation.type;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001132 auto* dest = reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
1133 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), dest);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001134 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001135 // Write the intern table into the image.
1136 const ImageSection& intern_table_section = image_header->GetImageSection(
1137 ImageHeader::kSectionInternedStrings);
1138 InternTable* const intern_table = Runtime::Current()->GetInternTable();
1139 uint8_t* const memory_ptr = image_->Begin() + intern_table_section.Offset();
1140 const size_t intern_table_bytes = intern_table->WriteToMemory(memory_ptr);
1141 // Fixup the pointers in the newly written intern table to contain image addresses.
1142 InternTable temp_table;
1143 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
1144 // the VisitRoots() will update the memory directly rather than the copies.
1145 // This also relies on visit roots not doing any verification which could fail after we update
1146 // the roots to be the image addresses.
1147 temp_table.ReadFromMemory(memory_ptr);
1148 CHECK_EQ(temp_table.Size(), intern_table->Size());
1149 FixupRootVisitor visitor(this);
1150 temp_table.VisitRoots(&visitor, kVisitRootFlagAllRoots);
1151 CHECK_EQ(intern_table_bytes, intern_table_bytes_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001152}
1153
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -08001154void ImageWriter::CopyAndFixupObjects() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001155 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -07001156 heap->VisitObjects(CopyAndFixupObjectsCallback, this);
1157 // Fix up the object previously had hash codes.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001158 for (const auto& hash_pair : saved_hashcode_map_) {
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001159 Object* obj = hash_pair.first;
Andreas Gampe3b45ef22015-05-26 21:34:09 -07001160 DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
1161 obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001162 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001163 saved_hashcode_map_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001164}
1165
Mathieu Chartier590fee92013-09-13 13:46:47 -07001166void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) {
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -07001167 DCHECK(obj != nullptr);
1168 DCHECK(arg != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001169 reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj);
1170}
1171
Mathieu Chartiere401d142015-04-22 13:56:20 -07001172void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr,
1173 mirror::Class* klass, Bin array_type) {
1174 CHECK(klass->IsArrayClass());
1175 CHECK(arr->IsIntArray() || arr->IsLongArray()) << PrettyClass(klass) << " " << arr;
1176 // Fixup int and long pointers for the ArtMethod or ArtField arrays.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001177 const size_t num_elements = arr->GetLength();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001178 dst->SetClass(GetImageAddress(arr->GetClass()));
1179 auto* dest_array = down_cast<mirror::PointerArray*>(dst);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001180 for (size_t i = 0, count = num_elements; i < count; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001181 auto* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
1182 if (elem != nullptr) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001183 auto it = native_object_relocations_.find(elem);
Vladimir Marko05792b92015-08-03 11:56:49 +01001184 if (UNLIKELY(it == native_object_relocations_.end())) {
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001185 if (it->second.IsArtMethodRelocation()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001186 auto* method = reinterpret_cast<ArtMethod*>(elem);
1187 LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ "
1188 << method << " idx=" << i << "/" << num_elements << " with declaring class "
1189 << PrettyClass(method->GetDeclaringClass());
1190 } else {
1191 CHECK_EQ(array_type, kBinArtField);
1192 auto* field = reinterpret_cast<ArtField*>(elem);
1193 LOG(FATAL) << "No relocation entry for ArtField " << PrettyField(field) << " @ "
1194 << field << " idx=" << i << "/" << num_elements << " with declaring class "
1195 << PrettyClass(field->GetDeclaringClass());
1196 }
Vladimir Marko05792b92015-08-03 11:56:49 +01001197 UNREACHABLE();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001198 } else {
1199 elem = image_begin_ + it->second.offset;
1200 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001201 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001202 dest_array->SetElementPtrSize<false, true>(i, elem, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001203 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001204}
1205
1206void ImageWriter::CopyAndFixupObject(Object* obj) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001207 size_t offset = GetImageOffset(obj);
1208 auto* dst = reinterpret_cast<Object*>(image_->Begin() + offset);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001209 DCHECK_LT(offset, image_end_);
1210 const auto* src = reinterpret_cast<const uint8_t*>(obj);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001211
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001212 image_bitmap_->Set(dst); // Mark the obj as live.
1213
1214 const size_t n = obj->SizeOf();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001215 DCHECK_LE(offset + n, image_->Size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001216 memcpy(dst, src, n);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001217
Mathieu Chartierad2541a2013-10-25 10:05:23 -07001218 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
1219 // word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001220 const auto it = saved_hashcode_map_.find(obj);
1221 dst->SetLockWord(it != saved_hashcode_map_.end() ?
1222 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001223 FixupObject(obj, dst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001224}
1225
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001226// Rewrite all the references in the copied object to point to their image address equivalent
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001227class FixupVisitor {
1228 public:
1229 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
1230 }
1231
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001232 // Ignore class roots since we don't have a way to map them to the destination. These are handled
1233 // with other logic.
1234 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1235 const {}
1236 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
1237
1238
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001239 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001240 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Hiroshi Yamauchi6e83c172014-05-01 21:25:41 -07001241 Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001242 // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the
1243 // image.
1244 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Ian Rogersb0fa5dc2014-04-28 16:47:08 -07001245 offset, image_writer_->GetImageAddress(ref));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001246 }
1247
1248 // java.lang.ref.Reference visitor.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001249 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001250 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001251 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Ian Rogersb0fa5dc2014-04-28 16:47:08 -07001252 mirror::Reference::ReferentOffset(), image_writer_->GetImageAddress(ref->GetReferent()));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001253 }
1254
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001255 protected:
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001256 ImageWriter* const image_writer_;
1257 mirror::Object* const copy_;
1258};
1259
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001260class FixupClassVisitor FINAL : public FixupVisitor {
1261 public:
1262 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
1263 }
1264
Mathieu Chartierc7853442015-03-27 14:35:38 -07001265 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001266 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001267 DCHECK(obj->IsClass());
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001268 FixupVisitor::operator()(obj, offset, /*is_static*/false);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001269 }
1270
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001271 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED,
1272 mirror::Reference* ref ATTRIBUTE_UNUSED) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001273 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001274 LOG(FATAL) << "Reference not expected here.";
1275 }
1276};
1277
Vladimir Marko05792b92015-08-03 11:56:49 +01001278uintptr_t ImageWriter::NativeOffsetInImage(void* obj) {
1279 DCHECK(obj != nullptr);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001280 auto it = native_object_relocations_.find(obj);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001281 CHECK(it != native_object_relocations_.end()) << obj;
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001282 const NativeObjectRelocation& relocation = it->second;
Vladimir Marko05792b92015-08-03 11:56:49 +01001283 return relocation.offset;
1284}
1285
1286template <typename T>
1287T* ImageWriter::NativeLocationInImage(T* obj) {
1288 if (obj == nullptr) {
1289 return nullptr;
1290 }
1291 return reinterpret_cast<T*>(image_begin_ + NativeOffsetInImage(obj));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001292}
1293
Mathieu Chartierc7853442015-03-27 14:35:38 -07001294void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001295 // Update the field arrays.
Vladimir Marko05792b92015-08-03 11:56:49 +01001296 copy->SetSFieldsPtrUnchecked(NativeLocationInImage(orig->GetSFieldsPtr()));
1297 copy->SetIFieldsPtrUnchecked(NativeLocationInImage(orig->GetIFieldsPtr()));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001298 // Update direct and virtual method arrays.
Vladimir Marko05792b92015-08-03 11:56:49 +01001299 copy->SetDirectMethodsPtrUnchecked(NativeLocationInImage(orig->GetDirectMethodsPtr()));
1300 copy->SetVirtualMethodsPtr(NativeLocationInImage(orig->GetVirtualMethodsPtr()));
1301 // Update dex cache strings.
1302 copy->SetDexCacheStrings(NativeLocationInImage(orig->GetDexCacheStrings()));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001303 // Fix up embedded tables.
1304 if (orig->ShouldHaveEmbeddedImtAndVTable()) {
1305 for (int32_t i = 0; i < orig->GetEmbeddedVTableLength(); ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001306 auto it = native_object_relocations_.find(orig->GetEmbeddedVTableEntry(i, target_ptr_size_));
1307 CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001308 copy->SetEmbeddedVTableEntryUnchecked(
1309 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
1310 }
1311 for (size_t i = 0; i < mirror::Class::kImtSize; ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001312 auto it = native_object_relocations_.find(orig->GetEmbeddedImTableEntry(i, target_ptr_size_));
1313 CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001314 copy->SetEmbeddedImTableEntry(
1315 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001316 }
1317 }
1318 FixupClassVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07001319 static_cast<mirror::Object*>(orig)->VisitReferences(visitor, visitor);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001320}
1321
Ian Rogersef7d42f2014-01-06 12:55:46 -08001322void ImageWriter::FixupObject(Object* orig, Object* copy) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001323 DCHECK(orig != nullptr);
1324 DCHECK(copy != nullptr);
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -07001325 if (kUseBakerOrBrooksReadBarrier) {
1326 orig->AssertReadBarrierPointer();
1327 if (kUseBrooksReadBarrier) {
1328 // Note the address 'copy' isn't the same as the image address of 'orig'.
1329 copy->SetReadBarrierPointer(GetImageAddress(orig));
1330 DCHECK_EQ(copy->GetReadBarrierPointer(), GetImageAddress(orig));
1331 }
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -08001332 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001333 auto* klass = orig->GetClass();
1334 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
Vladimir Marko05792b92015-08-03 11:56:49 +01001335 // Is this a native pointer array?
Mathieu Chartiere401d142015-04-22 13:56:20 -07001336 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
1337 if (it != pointer_arrays_.end()) {
1338 // Should only need to fixup every pointer array exactly once.
1339 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
1340 pointer_arrays_.erase(it);
1341 return;
1342 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001343 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001344 if (orig->IsClass()) {
1345 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001346 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001347 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
1348 // Need to go update the ArtMethod.
1349 auto* dest = down_cast<mirror::AbstractMethod*>(copy);
1350 auto* src = down_cast<mirror::AbstractMethod*>(orig);
1351 ArtMethod* src_method = src->GetArtMethod();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001352 auto it = native_object_relocations_.find(src_method);
1353 CHECK(it != native_object_relocations_.end())
1354 << "Missing relocation for AbstractMethod.artMethod " << PrettyMethod(src_method);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001355 dest->SetArtMethod(
1356 reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
Vladimir Marko05792b92015-08-03 11:56:49 +01001357 } else if (!klass->IsArrayClass()) {
1358 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1359 if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) {
1360 FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
1361 } else if (klass->IsSubClass(down_cast<mirror::Class*>(
1362 class_linker->GetClassRoot(ClassLinker::kJavaLangClassLoader)))) {
1363 // If src is a ClassLoader, set the class table to null so that it gets recreated by the
1364 // ClassLoader.
1365 down_cast<mirror::ClassLoader*>(copy)->SetClassTable(nullptr);
1366 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001367 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001368 FixupVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07001369 orig->VisitReferences(visitor, visitor);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001370 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001371}
1372
Vladimir Marko05792b92015-08-03 11:56:49 +01001373void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache,
1374 mirror::DexCache* copy_dex_cache) {
1375 // Though the DexCache array fields are usually treated as native pointers, we set the full
1376 // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is
1377 // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e.
1378 // static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))).
1379 GcRoot<mirror::String>* orig_strings = orig_dex_cache->GetStrings();
1380 if (orig_strings != nullptr) {
1381 uintptr_t copy_strings_offset = NativeOffsetInImage(orig_strings);
1382 copy_dex_cache->SetField64<false>(
1383 mirror::DexCache::StringsOffset(),
1384 static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_strings_offset)));
1385 GcRoot<mirror::String>* copy_strings =
1386 reinterpret_cast<GcRoot<mirror::String>*>(image_->Begin() + copy_strings_offset);
1387 for (size_t i = 0, num = orig_dex_cache->NumStrings(); i != num; ++i) {
1388 copy_strings[i] = GcRoot<mirror::String>(GetImageAddress(orig_strings[i].Read()));
1389 }
1390 }
1391 GcRoot<mirror::Class>* orig_types = orig_dex_cache->GetResolvedTypes();
1392 if (orig_types != nullptr) {
1393 uintptr_t copy_types_offset = NativeOffsetInImage(orig_types);
1394 copy_dex_cache->SetField64<false>(
1395 mirror::DexCache::ResolvedTypesOffset(),
1396 static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_types_offset)));
1397 GcRoot<mirror::Class>* copy_types =
1398 reinterpret_cast<GcRoot<mirror::Class>*>(image_->Begin() + copy_types_offset);
1399 for (size_t i = 0, num = orig_dex_cache->NumResolvedTypes(); i != num; ++i) {
1400 copy_types[i] = GcRoot<mirror::Class>(GetImageAddress(orig_types[i].Read()));
1401 }
1402 }
1403 ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
1404 if (orig_methods != nullptr) {
1405 uintptr_t copy_methods_offset = NativeOffsetInImage(orig_methods);
1406 copy_dex_cache->SetField64<false>(
1407 mirror::DexCache::ResolvedMethodsOffset(),
1408 static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_methods_offset)));
1409 ArtMethod** copy_methods =
1410 reinterpret_cast<ArtMethod**>(image_->Begin() + copy_methods_offset);
1411 for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) {
1412 ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, i, target_ptr_size_);
1413 ArtMethod* copy = NativeLocationInImage(orig);
1414 mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_);
1415 }
1416 }
1417 ArtField** orig_fields = orig_dex_cache->GetResolvedFields();
1418 if (orig_fields != nullptr) {
1419 uintptr_t copy_fields_offset = NativeOffsetInImage(orig_fields);
1420 copy_dex_cache->SetField64<false>(
1421 mirror::DexCache::ResolvedFieldsOffset(),
1422 static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_fields_offset)));
1423 ArtField** copy_fields = reinterpret_cast<ArtField**>(image_->Begin() + copy_fields_offset);
1424 for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) {
1425 ArtField* orig = mirror::DexCache::GetElementPtrSize(orig_fields, i, target_ptr_size_);
1426 ArtField* copy = NativeLocationInImage(orig);
1427 mirror::DexCache::SetElementPtrSize(copy_fields, i, copy, target_ptr_size_);
1428 }
1429 }
1430}
1431
Mathieu Chartiere401d142015-04-22 13:56:20 -07001432const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method, bool* quick_is_interpreted) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001433 DCHECK(!method->IsResolutionMethod() && !method->IsImtConflictMethod() &&
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001434 !method->IsImtUnimplementedMethod() && !method->IsAbstract()) << PrettyMethod(method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001435
1436 // Use original code if it exists. Otherwise, set the code pointer to the resolution
1437 // trampoline.
1438
1439 // Quick entrypoint:
Jeff Haoc7d11882015-02-03 15:08:39 -08001440 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(
1441 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_));
1442 const uint8_t* quick_code = GetOatAddress(quick_oat_code_offset);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001443 *quick_is_interpreted = false;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001444 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
1445 method->GetDeclaringClass()->IsInitialized())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001446 // We have code for a non-static or initialized method, just use the code.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001447 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001448 } else if (quick_code == nullptr && method->IsNative() &&
1449 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
1450 // Non-static or initialized native method missing compiled code, use generic JNI version.
1451 quick_code = GetOatAddress(quick_generic_jni_trampoline_offset_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001452 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001453 } else if (quick_code == nullptr && !method->IsNative()) {
1454 // We don't have code at all for a non-native method, use the interpreter.
1455 quick_code = GetOatAddress(quick_to_interpreter_bridge_offset_);
1456 *quick_is_interpreted = true;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001457 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001458 } else {
1459 CHECK(!method->GetDeclaringClass()->IsInitialized());
1460 // We have code for a static method, but need to go through the resolution stub for class
1461 // initialization.
1462 quick_code = GetOatAddress(quick_resolution_trampoline_offset_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001463 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001464 }
1465 return quick_code;
1466}
1467
Mathieu Chartiere401d142015-04-22 13:56:20 -07001468const uint8_t* ImageWriter::GetQuickEntryPoint(ArtMethod* method) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001469 // Calculate the quick entry point following the same logic as FixupMethod() below.
1470 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001471 Runtime* runtime = Runtime::Current();
1472 if (UNLIKELY(method == runtime->GetResolutionMethod())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001473 return GetOatAddress(quick_resolution_trampoline_offset_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001474 } else if (UNLIKELY(method == runtime->GetImtConflictMethod() ||
1475 method == runtime->GetImtUnimplementedMethod())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001476 return GetOatAddress(quick_imt_conflict_trampoline_offset_);
1477 } else {
1478 // We assume all methods have code. If they don't currently then we set them to the use the
1479 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1480 // use results in an AbstractMethodError. We use the interpreter to achieve this.
1481 if (UNLIKELY(method->IsAbstract())) {
1482 return GetOatAddress(quick_to_interpreter_bridge_offset_);
1483 } else {
1484 bool quick_is_interpreted;
1485 return GetQuickCode(method, &quick_is_interpreted);
1486 }
1487 }
1488}
1489
Mathieu Chartiere401d142015-04-22 13:56:20 -07001490void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) {
Vladimir Marko14632852015-08-17 12:07:23 +01001491 memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001492
1493 copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked()));
Vladimir Marko05792b92015-08-03 11:56:49 +01001494
1495 ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_);
1496 copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_);
1497 GcRoot<mirror::Class>* orig_resolved_types = orig->GetDexCacheResolvedTypes(target_ptr_size_);
1498 copy->SetDexCacheResolvedTypes(NativeLocationInImage(orig_resolved_types), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001499
Ian Rogers848871b2013-08-05 10:56:33 -07001500 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
1501 // oat_begin_
Brian Carlstrom7940e442013-07-12 13:46:57 -07001502
Ian Rogers848871b2013-08-05 10:56:33 -07001503 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001504 Runtime* runtime = Runtime::Current();
1505 if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001506 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001507 GetOatAddress(quick_resolution_trampoline_offset_), target_ptr_size_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001508 } else if (UNLIKELY(orig == runtime->GetImtConflictMethod() ||
1509 orig == runtime->GetImtUnimplementedMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001510 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001511 GetOatAddress(quick_imt_conflict_trampoline_offset_), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001512 } else if (UNLIKELY(orig->IsRuntimeMethod())) {
1513 bool found_one = false;
1514 for (size_t i = 0; i < static_cast<size_t>(Runtime::kLastCalleeSaveType); ++i) {
1515 auto idx = static_cast<Runtime::CalleeSaveType>(i);
1516 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
1517 found_one = true;
1518 break;
1519 }
1520 }
1521 CHECK(found_one) << "Expected to find callee save method but got " << PrettyMethod(orig);
1522 CHECK(copy->IsRuntimeMethod());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001523 } else {
Ian Rogers848871b2013-08-05 10:56:33 -07001524 // We assume all methods have code. If they don't currently then we set them to the use the
1525 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1526 // use results in an AbstractMethodError. We use the interpreter to achieve this.
1527 if (UNLIKELY(orig->IsAbstract())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001528 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001529 GetOatAddress(quick_to_interpreter_bridge_offset_), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001530 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001531 bool quick_is_interpreted;
Ian Rogers13735952014-10-08 12:43:28 -07001532 const uint8_t* quick_code = GetQuickCode(orig, &quick_is_interpreted);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001533 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
Sebastien Hertze1d07812014-05-21 15:44:09 +02001534
Sebastien Hertze1d07812014-05-21 15:44:09 +02001535 // JNI entrypoint:
Ian Rogers848871b2013-08-05 10:56:33 -07001536 if (orig->IsNative()) {
1537 // The native method's pointer is set to a stub to lookup via dlsym.
1538 // Note this is not the code_ pointer, that is handled above.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001539 copy->SetEntryPointFromJniPtrSize(
1540 GetOatAddress(jni_dlsym_lookup_offset_), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001541 }
1542 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001543 }
1544}
1545
Alex Lighta59dd802014-07-02 16:28:08 -07001546static OatHeader* GetOatHeaderFromElf(ElfFile* elf) {
Tong Shen62d1ca32014-09-03 17:24:56 -07001547 uint64_t data_sec_offset;
1548 bool has_data_sec = elf->GetSectionOffsetAndSize(".rodata", &data_sec_offset, nullptr);
1549 if (!has_data_sec) {
Alex Lighta59dd802014-07-02 16:28:08 -07001550 return nullptr;
1551 }
Tong Shen62d1ca32014-09-03 17:24:56 -07001552 return reinterpret_cast<OatHeader*>(elf->Begin() + data_sec_offset);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001553}
1554
Vladimir Markof4da6752014-08-01 19:04:18 +01001555void ImageWriter::SetOatChecksumFromElfFile(File* elf_file) {
Alex Lighta59dd802014-07-02 16:28:08 -07001556 std::string error_msg;
1557 std::unique_ptr<ElfFile> elf(ElfFile::Open(elf_file, PROT_READ|PROT_WRITE,
1558 MAP_SHARED, &error_msg));
1559 if (elf.get() == nullptr) {
Vladimir Markof4da6752014-08-01 19:04:18 +01001560 LOG(FATAL) << "Unable open oat file: " << error_msg;
Alex Lighta59dd802014-07-02 16:28:08 -07001561 return;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001562 }
Alex Lighta59dd802014-07-02 16:28:08 -07001563 OatHeader* oat_header = GetOatHeaderFromElf(elf.get());
1564 CHECK(oat_header != nullptr);
1565 CHECK(oat_header->IsValid());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001566
Brian Carlstrom7940e442013-07-12 13:46:57 -07001567 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Alex Lighta59dd802014-07-02 16:28:08 -07001568 image_header->SetOatChecksum(oat_header->GetChecksum());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001569}
1570
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001571size_t ImageWriter::GetBinSizeSum(ImageWriter::Bin up_to) const {
1572 DCHECK_LE(up_to, kBinSize);
1573 return std::accumulate(&bin_slot_sizes_[0], &bin_slot_sizes_[up_to], /*init*/0);
1574}
1575
1576ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
1577 // These values may need to get updated if more bins are added to the enum Bin
Mathieu Chartiere401d142015-04-22 13:56:20 -07001578 static_assert(kBinBits == 3, "wrong number of bin bits");
1579 static_assert(kBinShift == 27, "wrong number of shift");
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001580 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
1581
1582 DCHECK_LT(GetBin(), kBinSize);
1583 DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
1584}
1585
1586ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
1587 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
1588 DCHECK_EQ(index, GetIndex());
1589}
1590
1591ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
1592 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
1593}
1594
1595uint32_t ImageWriter::BinSlot::GetIndex() const {
1596 return lockword_ & ~kBinMask;
1597}
1598
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001599uint8_t* ImageWriter::GetOatFileBegin() const {
1600 DCHECK_GT(intern_table_bytes_, 0u);
Vladimir Marko05792b92015-08-03 11:56:49 +01001601 size_t native_sections_size =
1602 bin_slot_sizes_[kBinArtField] + bin_slot_sizes_[kBinArtMethodDirty] +
1603 bin_slot_sizes_[kBinArtMethodClean] + bin_slot_sizes_[kBinDexCacheArray] +
1604 intern_table_bytes_;
1605 return image_begin_ + RoundUp(image_end_ + native_sections_size, kPageSize);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001606}
1607
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001608ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
1609 switch (type) {
1610 case kNativeObjectRelocationTypeArtField:
1611 case kNativeObjectRelocationTypeArtFieldArray:
1612 return kBinArtField;
1613 case kNativeObjectRelocationTypeArtMethodClean:
1614 case kNativeObjectRelocationTypeArtMethodArrayClean:
1615 return kBinArtMethodClean;
1616 case kNativeObjectRelocationTypeArtMethodDirty:
1617 case kNativeObjectRelocationTypeArtMethodArrayDirty:
1618 return kBinArtMethodDirty;
Vladimir Marko05792b92015-08-03 11:56:49 +01001619 case kNativeObjectRelocationTypeDexCacheArray:
1620 return kBinDexCacheArray;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001621 }
1622 UNREACHABLE();
1623}
1624
Brian Carlstrom7940e442013-07-12 13:46:57 -07001625} // namespace art