blob: f318921d3f818d26825d59b18f783da5b7679f34 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "image_writer.h"
18
19#include <sys/stat.h>
20
Ian Rogers700a4022014-05-19 16:49:03 -070021#include <memory>
Vladimir Marko20f85592015-03-19 10:07:02 +000022#include <numeric>
Brian Carlstrom7940e442013-07-12 13:46:57 -070023#include <vector>
24
Mathieu Chartierc7853442015-03-27 14:35:38 -070025#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070026#include "art_method-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070027#include "base/logging.h"
28#include "base/unix_file/fd_file.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010029#include "class_linker-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070030#include "compiled_method.h"
31#include "dex_file-inl.h"
32#include "driver/compiler_driver.h"
Alex Light53cb16b2014-06-12 11:26:29 -070033#include "elf_file.h"
34#include "elf_utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070035#include "elf_writer.h"
36#include "gc/accounting/card_table-inl.h"
37#include "gc/accounting/heap_bitmap.h"
Mathieu Chartier31e89252013-08-28 11:29:12 -070038#include "gc/accounting/space_bitmap-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070039#include "gc/heap.h"
40#include "gc/space/large_object_space.h"
41#include "gc/space/space-inl.h"
42#include "globals.h"
43#include "image.h"
44#include "intern_table.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070045#include "linear_alloc.h"
Mathieu Chartierad2541a2013-10-25 10:05:23 -070046#include "lock_word.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070047#include "mirror/abstract_method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070048#include "mirror/array-inl.h"
49#include "mirror/class-inl.h"
50#include "mirror/class_loader.h"
51#include "mirror/dex_cache-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070052#include "mirror/method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070053#include "mirror/object-inl.h"
54#include "mirror/object_array-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070055#include "mirror/string-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070056#include "oat.h"
57#include "oat_file.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070058#include "runtime.h"
59#include "scoped_thread_state_change.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070060#include "handle_scope-inl.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000061#include "utils/dex_cache_arrays_layout-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070062
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070063using ::art::mirror::Class;
64using ::art::mirror::DexCache;
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070065using ::art::mirror::Object;
66using ::art::mirror::ObjectArray;
67using ::art::mirror::String;
Brian Carlstrom7940e442013-07-12 13:46:57 -070068
69namespace art {
70
Igor Murashkinf5b4c502014-11-14 15:01:59 -080071// Separate objects into multiple bins to optimize dirty memory use.
72static constexpr bool kBinObjects = true;
73
Andreas Gampedd9d0552015-03-09 12:57:41 -070074static void CheckNoDexObjectsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -070075 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampedd9d0552015-03-09 12:57:41 -070076 Class* klass = obj->GetClass();
77 CHECK_NE(PrettyClass(klass), "com.android.dex.Dex");
78}
79
80static void CheckNoDexObjects() {
81 ScopedObjectAccess soa(Thread::Current());
82 Runtime::Current()->GetHeap()->VisitObjects(CheckNoDexObjectsCallback, nullptr);
83}
84
Vladimir Markof4da6752014-08-01 19:04:18 +010085bool ImageWriter::PrepareImageAddressSpace() {
Mathieu Chartier2d721012014-11-10 11:08:06 -080086 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
Vladimir Markof4da6752014-08-01 19:04:18 +010087 {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -070088 ScopedObjectAccess soa(Thread::Current());
Vladimir Markof4da6752014-08-01 19:04:18 +010089 PruneNonImageClasses(); // Remove junk
90 ComputeLazyFieldsForImageClasses(); // Add useful information
Vladimir Markof4da6752014-08-01 19:04:18 +010091 }
92 gc::Heap* heap = Runtime::Current()->GetHeap();
93 heap->CollectGarbage(false); // Remove garbage.
94
Andreas Gampedd9d0552015-03-09 12:57:41 -070095 // Dex caches must not have their dex fields set in the image. These are memory buffers of mapped
96 // dex files.
97 //
98 // We may open them in the unstarted-runtime code for class metadata. Their fields should all be
99 // reset in PruneNonImageClasses and the objects reclaimed in the GC. Make sure that's actually
100 // true.
101 if (kIsDebugBuild) {
102 CheckNoDexObjects();
103 }
104
Vladimir Markof4da6752014-08-01 19:04:18 +0100105 if (kIsDebugBuild) {
106 ScopedObjectAccess soa(Thread::Current());
107 CheckNonImageClassesRemoved();
108 }
109
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700110 {
111 ScopedObjectAccess soa(Thread::Current());
112 CalculateNewObjectOffsets();
113 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100114
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700115 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
116 // bin size sums being calculated.
117 if (!AllocMemory()) {
118 return false;
119 }
120
Vladimir Markof4da6752014-08-01 19:04:18 +0100121 return true;
122}
123
Brian Carlstrom7940e442013-07-12 13:46:57 -0700124bool ImageWriter::Write(const std::string& image_filename,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700125 const std::string& oat_filename,
126 const std::string& oat_location) {
127 CHECK(!image_filename.empty());
128
Brian Carlstrom7940e442013-07-12 13:46:57 -0700129 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700130
Ian Rogers700a4022014-05-19 16:49:03 -0700131 std::unique_ptr<File> oat_file(OS::OpenFileReadWrite(oat_filename.c_str()));
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700132 if (oat_file.get() == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800133 PLOG(ERROR) << "Failed to open oat file " << oat_filename << " for " << oat_location;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700134 return false;
135 }
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700136 std::string error_msg;
Igor Murashkinb1d8c312015-08-04 11:18:43 -0700137 oat_file_ = OatFile::OpenReadable(oat_file.get(), oat_location, nullptr, &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700138 if (oat_file_ == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800139 PLOG(ERROR) << "Failed to open writable oat file " << oat_filename << " for " << oat_location
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700140 << ": " << error_msg;
Andreas Gampe0b7fcf92015-03-13 16:54:54 -0700141 oat_file->Erase();
Brian Carlstromc50d8e12013-07-23 22:35:16 -0700142 return false;
143 }
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700144 CHECK_EQ(class_linker->RegisterOatFile(oat_file_), oat_file_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700145
Ian Rogers848871b2013-08-05 10:56:33 -0700146 interpreter_to_interpreter_bridge_offset_ =
147 oat_file_->GetOatHeader().GetInterpreterToInterpreterBridgeOffset();
148 interpreter_to_compiled_code_bridge_offset_ =
149 oat_file_->GetOatHeader().GetInterpreterToCompiledCodeBridgeOffset();
150
151 jni_dlsym_lookup_offset_ = oat_file_->GetOatHeader().GetJniDlsymLookupOffset();
152
Andreas Gampe2da88232014-02-27 12:26:20 -0800153 quick_generic_jni_trampoline_offset_ =
154 oat_file_->GetOatHeader().GetQuickGenericJniTrampolineOffset();
Jeff Hao88474b42013-10-23 16:24:40 -0700155 quick_imt_conflict_trampoline_offset_ =
156 oat_file_->GetOatHeader().GetQuickImtConflictTrampolineOffset();
Ian Rogers848871b2013-08-05 10:56:33 -0700157 quick_resolution_trampoline_offset_ =
158 oat_file_->GetOatHeader().GetQuickResolutionTrampolineOffset();
159 quick_to_interpreter_bridge_offset_ =
160 oat_file_->GetOatHeader().GetQuickToInterpreterBridgeOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700161
Brian Carlstrom7940e442013-07-12 13:46:57 -0700162 size_t oat_loaded_size = 0;
163 size_t oat_data_offset = 0;
Vladimir Marko3fc99032015-05-13 19:06:30 +0100164 ElfWriter::GetOatElfInformation(oat_file.get(), &oat_loaded_size, &oat_data_offset);
Alex Light53cb16b2014-06-12 11:26:29 -0700165
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700166 {
167 ScopedObjectAccess soa(Thread::Current());
168 CreateHeader(oat_loaded_size, oat_data_offset);
169 CopyAndFixupNativeData();
170 // TODO: heap validation can't handle these fix up passes.
171 Runtime::Current()->GetHeap()->DisableObjectValidation();
172 CopyAndFixupObjects();
173 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700174
Vladimir Markof4da6752014-08-01 19:04:18 +0100175 SetOatChecksumFromElfFile(oat_file.get());
176
Andreas Gampe4303ba92014-11-06 01:00:46 -0800177 if (oat_file->FlushCloseOrErase() != 0) {
178 LOG(ERROR) << "Failed to flush and close oat file " << oat_filename << " for " << oat_location;
179 return false;
180 }
181
Ian Rogers700a4022014-05-19 16:49:03 -0700182 std::unique_ptr<File> image_file(OS::CreateEmptyFile(image_filename.c_str()));
Mathieu Chartier31e89252013-08-28 11:29:12 -0700183 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700184 if (image_file.get() == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700185 LOG(ERROR) << "Failed to open image file " << image_filename;
186 return false;
187 }
188 if (fchmod(image_file->Fd(), 0644) != 0) {
189 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800190 image_file->Erase();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700191 return EXIT_FAILURE;
192 }
Mathieu Chartier31e89252013-08-28 11:29:12 -0700193
Mathieu Chartiere401d142015-04-22 13:56:20 -0700194 // Write out the image + fields + methods.
195 const auto write_count = image_header->GetImageSize();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700196 if (!image_file->WriteFully(image_->Begin(), write_count)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700197 PLOG(ERROR) << "Failed to write image file " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800198 image_file->Erase();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700199 return false;
200 }
Mathieu Chartier31e89252013-08-28 11:29:12 -0700201
202 // Write out the image bitmap at the page aligned start of the image end.
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700203 const ImageSection& bitmap_section = image_header->GetImageSection(ImageHeader::kSectionImageBitmap);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700204 CHECK_ALIGNED(bitmap_section.Offset(), kPageSize);
Mathieu Chartier31e89252013-08-28 11:29:12 -0700205 if (!image_file->Write(reinterpret_cast<char*>(image_bitmap_->Begin()),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700206 bitmap_section.Size(), bitmap_section.Offset())) {
Mathieu Chartier31e89252013-08-28 11:29:12 -0700207 PLOG(ERROR) << "Failed to write image file " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800208 image_file->Erase();
Mathieu Chartier31e89252013-08-28 11:29:12 -0700209 return false;
210 }
211
Mathieu Chartiere401d142015-04-22 13:56:20 -0700212 CHECK_EQ(bitmap_section.End(), static_cast<size_t>(image_file->GetLength()));
Andreas Gampe4303ba92014-11-06 01:00:46 -0800213 if (image_file->FlushCloseOrErase() != 0) {
214 PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
215 return false;
216 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700217 return true;
218}
219
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700220void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700221 DCHECK(object != nullptr);
222 DCHECK_NE(offset, 0U);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800223
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800224 // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700225 object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700226 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700227 DCHECK(IsImageOffsetAssigned(object));
228}
229
Mathieu Chartiere401d142015-04-22 13:56:20 -0700230void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
231 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
232 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
233 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
234}
235
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800236void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700237 DCHECK(object != nullptr);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800238 DCHECK_NE(image_objects_offset_begin_, 0u);
239
Vladimir Markocf36d492015-08-12 19:27:26 +0100240 size_t bin_slot_offset = bin_slot_offsets_[bin_slot.GetBin()];
241 size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800242 DCHECK_ALIGNED(new_offset, kObjectAlignment);
243
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700244 SetImageOffset(object, new_offset);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800245 DCHECK_LT(new_offset, image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700246}
247
Ian Rogersef7d42f2014-01-06 12:55:46 -0800248bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800249 // Will also return true if the bin slot was assigned since we are reusing the lock word.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700250 DCHECK(object != nullptr);
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700251 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700252}
253
Ian Rogersef7d42f2014-01-06 12:55:46 -0800254size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700255 DCHECK(object != nullptr);
256 DCHECK(IsImageOffsetAssigned(object));
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700257 LockWord lock_word = object->GetLockWord(false);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700258 size_t offset = lock_word.ForwardingAddress();
259 DCHECK_LT(offset, image_end_);
260 return offset;
Mathieu Chartier31e89252013-08-28 11:29:12 -0700261}
262
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800263void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
264 DCHECK(object != nullptr);
265 DCHECK(!IsImageOffsetAssigned(object));
266 DCHECK(!IsImageBinSlotAssigned(object));
267
268 // Before we stomp over the lock word, save the hash code for later.
269 Monitor::Deflate(Thread::Current(), object);;
270 LockWord lw(object->GetLockWord(false));
271 switch (lw.GetState()) {
272 case LockWord::kFatLocked: {
273 LOG(FATAL) << "Fat locked object " << object << " found during object copy";
274 break;
275 }
276 case LockWord::kThinLocked: {
277 LOG(FATAL) << "Thin locked object " << object << " found during object copy";
278 break;
279 }
280 case LockWord::kUnlocked:
281 // No hash, don't need to save it.
282 break;
283 case LockWord::kHashCode:
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700284 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
285 saved_hashcode_map_.emplace(object, lw.GetHashCode());
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800286 break;
287 default:
288 LOG(FATAL) << "Unreachable.";
289 UNREACHABLE();
290 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700291 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700292 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800293 DCHECK(IsImageBinSlotAssigned(object));
294}
295
Vladimir Marko20f85592015-03-19 10:07:02 +0000296void ImageWriter::PrepareDexCacheArraySlots() {
297 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700298 Thread* const self = Thread::Current();
299 ReaderMutexLock mu(self, *class_linker->DexLock());
Vladimir Marko20f85592015-03-19 10:07:02 +0000300 uint32_t size = 0u;
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700301 for (jobject weak_root : class_linker->GetDexCaches()) {
302 mirror::DexCache* dex_cache =
303 down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
304 if (dex_cache == nullptr) {
305 continue;
306 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000307 const DexFile* dex_file = dex_cache->GetDexFile();
308 dex_cache_array_starts_.Put(dex_file, size);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700309 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
Vladimir Marko20f85592015-03-19 10:07:02 +0000310 DCHECK(layout.Valid());
Mathieu Chartierc7853442015-03-27 14:35:38 -0700311 auto types_size = layout.TypesSize(dex_file->NumTypeIds());
312 auto methods_size = layout.MethodsSize(dex_file->NumMethodIds());
313 auto fields_size = layout.FieldsSize(dex_file->NumFieldIds());
314 auto strings_size = layout.StringsSize(dex_file->NumStringIds());
315 dex_cache_array_indexes_.Put(
316 dex_cache->GetResolvedTypes(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700317 DexCacheArrayLocation {size + layout.TypesOffset(), types_size, kBinRegular});
Mathieu Chartierc7853442015-03-27 14:35:38 -0700318 dex_cache_array_indexes_.Put(
319 dex_cache->GetResolvedMethods(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700320 DexCacheArrayLocation {size + layout.MethodsOffset(), methods_size, kBinArtMethodClean});
321 AddMethodPointerArray(dex_cache->GetResolvedMethods());
Mathieu Chartierc7853442015-03-27 14:35:38 -0700322 dex_cache_array_indexes_.Put(
323 dex_cache->GetResolvedFields(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700324 DexCacheArrayLocation {size + layout.FieldsOffset(), fields_size, kBinArtField});
325 pointer_arrays_.emplace(dex_cache->GetResolvedFields(), kBinArtField);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700326 dex_cache_array_indexes_.Put(
327 dex_cache->GetStrings(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700328 DexCacheArrayLocation {size + layout.StringsOffset(), strings_size, kBinRegular});
Vladimir Marko20f85592015-03-19 10:07:02 +0000329 size += layout.Size();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700330 CHECK_EQ(layout.Size(), types_size + methods_size + fields_size + strings_size);
Vladimir Marko20f85592015-03-19 10:07:02 +0000331 }
332 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
333 // when AssignImageBinSlot() assigns their indexes out or order.
334 bin_slot_sizes_[kBinDexCacheArray] = size;
335}
336
Mathieu Chartiere401d142015-04-22 13:56:20 -0700337void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
338 DCHECK(arr != nullptr);
339 if (kIsDebugBuild) {
340 for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
341 auto* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
342 if (method != nullptr && !method->IsRuntimeMethod()) {
343 auto* klass = method->GetDeclaringClass();
344 CHECK(klass == nullptr || IsImageClass(klass)) << PrettyClass(klass)
345 << " should be an image class";
346 }
347 }
348 }
349 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
350 // ArtMethods.
351 pointer_arrays_.emplace(arr, kBinArtMethodClean);
352}
353
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800354void ImageWriter::AssignImageBinSlot(mirror::Object* object) {
355 DCHECK(object != nullptr);
Jeff Haoc7d11882015-02-03 15:08:39 -0800356 size_t object_size = object->SizeOf();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800357
358 // The magic happens here. We segregate objects into different bins based
359 // on how likely they are to get dirty at runtime.
360 //
361 // Likely-to-dirty objects get packed together into the same bin so that
362 // at runtime their page dirtiness ratio (how many dirty objects a page has) is
363 // maximized.
364 //
365 // This means more pages will stay either clean or shared dirty (with zygote) and
366 // the app will use less of its own (private) memory.
367 Bin bin = kBinRegular;
Vladimir Marko20f85592015-03-19 10:07:02 +0000368 size_t current_offset = 0u;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800369
370 if (kBinObjects) {
371 //
372 // Changing the bin of an object is purely a memory-use tuning.
373 // It has no change on runtime correctness.
374 //
375 // Memory analysis has determined that the following types of objects get dirtied
376 // the most:
377 //
Vladimir Marko20f85592015-03-19 10:07:02 +0000378 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
379 // a fixed layout which helps improve generated code (using PC-relative addressing),
380 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
381 // Since these arrays are huge, most pages do not overlap other objects and it's not
382 // really important where they are for the clean/dirty separation. Due to their
383 // special PC-relative addressing, we arbitrarily keep them at the beginning.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800384 // * Class'es which are verified [their clinit runs only at runtime]
385 // - classes in general [because their static fields get overwritten]
386 // - initialized classes with all-final statics are unlikely to be ever dirty,
387 // so bin them separately
388 // * Art Methods that are:
389 // - native [their native entry point is not looked up until runtime]
390 // - have declaring classes that aren't initialized
391 // [their interpreter/quick entry points are trampolines until the class
392 // becomes initialized]
393 //
394 // We also assume the following objects get dirtied either never or extremely rarely:
395 // * Strings (they are immutable)
396 // * Art methods that aren't native and have initialized declared classes
397 //
398 // We assume that "regular" bin objects are highly unlikely to become dirtied,
399 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
400 //
401 if (object->IsClass()) {
402 bin = kBinClassVerified;
403 mirror::Class* klass = object->AsClass();
404
Mathieu Chartiere401d142015-04-22 13:56:20 -0700405 // Add non-embedded vtable to the pointer array table if there is one.
406 auto* vtable = klass->GetVTable();
407 if (vtable != nullptr) {
408 AddMethodPointerArray(vtable);
409 }
410 auto* iftable = klass->GetIfTable();
411 if (iftable != nullptr) {
412 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
413 if (iftable->GetMethodArrayCount(i) > 0) {
414 AddMethodPointerArray(iftable->GetMethodArray(i));
415 }
416 }
417 }
418
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800419 if (klass->GetStatus() == Class::kStatusInitialized) {
420 bin = kBinClassInitialized;
421
422 // If the class's static fields are all final, put it into a separate bin
423 // since it's very likely it will stay clean.
424 uint32_t num_static_fields = klass->NumStaticFields();
425 if (num_static_fields == 0) {
426 bin = kBinClassInitializedFinalStatics;
427 } else {
428 // Maybe all the statics are final?
429 bool all_final = true;
430 for (uint32_t i = 0; i < num_static_fields; ++i) {
431 ArtField* field = klass->GetStaticField(i);
432 if (!field->IsFinal()) {
433 all_final = false;
434 break;
435 }
436 }
437
438 if (all_final) {
439 bin = kBinClassInitializedFinalStatics;
440 }
441 }
442 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800443 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
444 bin = kBinString; // Strings are almost always immutable (except for object header).
Mathieu Chartierc7853442015-03-27 14:35:38 -0700445 } else if (object->IsArrayInstance()) {
446 mirror::Class* klass = object->GetClass<kVerifyNone>();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700447 if (klass->IsObjectArrayClass() || klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700448 auto it = dex_cache_array_indexes_.find(object);
449 if (it != dex_cache_array_indexes_.end()) {
450 bin = kBinDexCacheArray;
451 // Use prepared offset defined by the DexCacheLayout.
452 current_offset = it->second.offset_;
453 // Override incase of cross compilation.
454 object_size = it->second.length_;
455 } // else bin = kBinRegular
456 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800457 } // else bin = kBinRegular
458 }
459
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800460 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment
Vladimir Marko20f85592015-03-19 10:07:02 +0000461 if (bin != kBinDexCacheArray) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700462 DCHECK(dex_cache_array_indexes_.find(object) == dex_cache_array_indexes_.end()) << object;
Vladimir Marko20f85592015-03-19 10:07:02 +0000463 current_offset = bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned).
464 // Move the current bin size up to accomodate the object we just assigned a bin slot.
465 bin_slot_sizes_[bin] += offset_delta;
466 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800467
468 BinSlot new_bin_slot(bin, current_offset);
469 SetImageBinSlot(object, new_bin_slot);
470
471 ++bin_slot_count_[bin];
472
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800473 // Grow the image closer to the end by the object we just assigned.
474 image_end_ += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800475}
476
Mathieu Chartiere401d142015-04-22 13:56:20 -0700477bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
478 if (m->IsNative()) {
479 return true;
480 }
481 mirror::Class* declaring_class = m->GetDeclaringClass();
482 // Initialized is highly unlikely to dirty since there's no entry points to mutate.
483 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized;
484}
485
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800486bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
487 DCHECK(object != nullptr);
488
489 // We always stash the bin slot into a lockword, in the 'forwarding address' state.
490 // If it's in some other state, then we haven't yet assigned an image bin slot.
491 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
492 return false;
493 } else if (kIsDebugBuild) {
494 LockWord lock_word = object->GetLockWord(false);
495 size_t offset = lock_word.ForwardingAddress();
496 BinSlot bin_slot(offset);
497 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()])
498 << "bin slot offset should not exceed the size of that bin";
499 }
500 return true;
501}
502
503ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
504 DCHECK(object != nullptr);
505 DCHECK(IsImageBinSlotAssigned(object));
506
507 LockWord lock_word = object->GetLockWord(false);
508 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t
509 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
510
511 BinSlot bin_slot(static_cast<uint32_t>(offset));
512 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()]);
513
514 return bin_slot;
515}
516
Brian Carlstrom7940e442013-07-12 13:46:57 -0700517bool ImageWriter::AllocMemory() {
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700518 const size_t length = RoundUp(image_objects_offset_begin_ + GetBinSizeSum() + intern_table_bytes_,
519 kPageSize);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700520 std::string error_msg;
Vladimir Marko5c42c292015-02-25 12:02:49 +0000521 image_.reset(MemMap::MapAnonymous("image writer image", nullptr, length, PROT_READ | PROT_WRITE,
522 false, false, &error_msg));
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700523 if (UNLIKELY(image_.get() == nullptr)) {
524 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700525 return false;
526 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700527
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700528 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
529 CHECK_LE(image_end_, length);
530 image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
531 "image bitmap", image_->Begin(), RoundUp(image_end_, kPageSize)));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700532 if (image_bitmap_.get() == nullptr) {
533 LOG(ERROR) << "Failed to allocate memory for image bitmap";
534 return false;
535 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700536 return true;
537}
538
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700539class ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
540 public:
541 bool Visit(Class* c) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
542 StackHandleScope<1> hs(Thread::Current());
543 mirror::Class::ComputeName(hs.NewHandle(c));
544 return true;
545 }
546};
547
Brian Carlstrom7940e442013-07-12 13:46:57 -0700548void ImageWriter::ComputeLazyFieldsForImageClasses() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700549 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700550 ComputeLazyFieldsForClassesVisitor visitor;
551 class_linker->VisitClassesWithoutClassesLock(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700552}
553
Ian Rogersef7d42f2014-01-06 12:55:46 -0800554bool ImageWriter::IsImageClass(Class* klass) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700555 if (klass == nullptr) {
556 return false;
557 }
Ian Rogers1ff3c982014-08-12 02:30:58 -0700558 std::string temp;
559 return compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700560}
561
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700562class NonImageClassesVisitor : public ClassVisitor {
563 public:
564 explicit NonImageClassesVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
565
566 bool Visit(Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
567 if (!image_writer_->IsImageClass(klass)) {
568 std::string temp;
569 non_image_classes_.insert(klass->GetDescriptor(&temp));
570 }
571 return true;
572 }
573
574 std::set<std::string> non_image_classes_;
575 ImageWriter* const image_writer_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700576};
577
578void ImageWriter::PruneNonImageClasses() {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700579 if (compiler_driver_.GetImageClasses() == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700580 return;
581 }
582 Runtime* runtime = Runtime::Current();
583 ClassLinker* class_linker = runtime->GetClassLinker();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700584 Thread* self = Thread::Current();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700585
586 // Make a list of classes we would like to prune.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700587 NonImageClassesVisitor visitor(this);
588 class_linker->VisitClasses(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700589
590 // Remove the undesired classes from the class roots.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700591 for (const std::string& it : visitor.non_image_classes_) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700592 bool result = class_linker->RemoveClass(it.c_str(), nullptr);
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800593 DCHECK(result);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700594 }
595
596 // Clear references to removed classes from the DexCaches.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700597 const ArtMethod* resolution_method = runtime->GetResolutionMethod();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700598
599 ScopedAssertNoThreadSuspension sa(self, __FUNCTION__);
600 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); // For ClassInClassTable
601 ReaderMutexLock mu2(self, *class_linker->DexLock());
602 for (jobject weak_root : class_linker->GetDexCaches()) {
603 mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
604 if (dex_cache == nullptr) {
605 continue;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700606 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700607 for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
608 Class* klass = dex_cache->GetResolvedType(i);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700609 if (klass != nullptr && !IsImageClass(klass)) {
610 dex_cache->SetResolvedType(i, nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700611 }
612 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700613 auto* resolved_methods = down_cast<mirror::PointerArray*>(dex_cache->GetResolvedMethods());
614 for (size_t i = 0, len = resolved_methods->GetLength(); i < len; i++) {
615 auto* method = resolved_methods->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
616 if (method != nullptr) {
617 auto* declaring_class = method->GetDeclaringClass();
618 // Miranda methods may be held live by a class which was not an image class but have a
619 // declaring class which is an image class. Set it to the resolution method to be safe and
620 // prevent dangling pointers.
621 if (method->IsMiranda() || !IsImageClass(declaring_class)) {
622 resolved_methods->SetElementPtrSize(i, resolution_method, target_ptr_size_);
623 } else {
624 // Check that the class is still in the classes table.
625 DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class "
626 << PrettyClass(declaring_class) << " not in class linker table";
627 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700628 }
629 }
630 for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700631 ArtField* field = dex_cache->GetResolvedField(i, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700632 if (field != nullptr && !IsImageClass(field->GetDeclaringClass())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700633 dex_cache->SetResolvedField(i, nullptr, target_ptr_size_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700634 }
635 }
Andreas Gampedd9d0552015-03-09 12:57:41 -0700636 // Clean the dex field. It might have been populated during the initialization phase, but
637 // contains data only valid during a real run.
638 dex_cache->SetFieldObject<false>(mirror::DexCache::DexOffset(), nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700639 }
Andreas Gampe8ac75952015-06-02 21:01:45 -0700640
641 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
642 class_linker->DropFindArrayClassCache();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700643}
644
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -0800645void ImageWriter::CheckNonImageClassesRemoved() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700646 if (compiler_driver_.GetImageClasses() != nullptr) {
647 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700648 heap->VisitObjects(CheckNonImageClassesRemovedCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700649 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700650}
651
652void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
653 ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700654 if (obj->IsClass()) {
655 Class* klass = obj->AsClass();
656 if (!image_writer->IsImageClass(klass)) {
657 image_writer->DumpImageClasses();
Ian Rogers1ff3c982014-08-12 02:30:58 -0700658 std::string temp;
659 CHECK(image_writer->IsImageClass(klass)) << klass->GetDescriptor(&temp)
Mathieu Chartier590fee92013-09-13 13:46:47 -0700660 << " " << PrettyDescriptor(klass);
661 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700662 }
663}
664
665void ImageWriter::DumpImageClasses() {
Andreas Gampeb1fcead2015-04-20 18:53:51 -0700666 auto image_classes = compiler_driver_.GetImageClasses();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700667 CHECK(image_classes != nullptr);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700668 for (const std::string& image_class : *image_classes) {
669 LOG(INFO) << " " << image_class;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700670 }
671}
672
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800673void ImageWriter::CalculateObjectBinSlots(Object* obj) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700674 DCHECK(obj != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700675 // if it is a string, we want to intern it if its not interned.
676 if (obj->GetClass()->IsStringClass()) {
677 // we must be an interned string that was forward referenced and already assigned
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800678 if (IsImageBinSlotAssigned(obj)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700679 DCHECK_EQ(obj, obj->AsString()->Intern());
680 return;
681 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700682 // InternImageString allows us to intern while holding the heap bitmap lock. This is safe since
683 // we are guaranteed to not have GC during image writing.
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700684 mirror::String* const interned = Runtime::Current()->GetInternTable()->InternStrongImageString(
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700685 obj->AsString());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700686 if (obj != interned) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800687 if (!IsImageBinSlotAssigned(interned)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700688 // interned obj is after us, allocate its location early
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800689 AssignImageBinSlot(interned);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700690 }
691 // point those looking for this object to the interned version.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800692 SetImageBinSlot(obj, GetImageBinSlot(interned));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700693 return;
694 }
695 // else (obj == interned), nothing to do but fall through to the normal case
696 }
697
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800698 AssignImageBinSlot(obj);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700699}
700
701ObjectArray<Object>* ImageWriter::CreateImageRoots() const {
702 Runtime* runtime = Runtime::Current();
703 ClassLinker* class_linker = runtime->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700704 Thread* self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700705 StackHandleScope<3> hs(self);
706 Handle<Class> object_array_class(hs.NewHandle(
707 class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700708
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700709 // build an Object[] of all the DexCaches used in the source_space_.
710 // Since we can't hold the dex lock when allocating the dex_caches
711 // ObjectArray, we lock the dex lock twice, first to get the number
712 // of dex caches first and then lock it again to copy the dex
713 // caches. We check that the number of dex caches does not change.
714 size_t dex_cache_count;
715 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700716 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700717 dex_cache_count = class_linker->GetDexCacheCount();
718 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700719 Handle<ObjectArray<Object>> dex_caches(
720 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(),
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700721 dex_cache_count)));
722 CHECK(dex_caches.Get() != nullptr) << "Failed to allocate a dex cache array.";
723 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700724 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700725 CHECK_EQ(dex_cache_count, class_linker->GetDexCacheCount())
726 << "The number of dex caches changed.";
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700727 size_t i = 0;
728 for (jobject weak_root : class_linker->GetDexCaches()) {
729 mirror::DexCache* dex_cache =
730 down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
731 dex_caches->Set<false>(i, dex_cache);
732 ++i;
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700733 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700734 }
735
736 // build an Object[] of the roots needed to restore the runtime
Mathieu Chartiere401d142015-04-22 13:56:20 -0700737 auto image_roots(hs.NewHandle(
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700738 ObjectArray<Object>::Alloc(self, object_array_class.Get(), ImageHeader::kImageRootsMax)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700739 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100740 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700741 for (int i = 0; i < ImageHeader::kImageRootsMax; i++) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700742 CHECK(image_roots->Get(i) != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700743 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700744 return image_roots.Get();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700745}
746
Mathieu Chartier590fee92013-09-13 13:46:47 -0700747// Walk instance fields of the given Class. Separate function to allow recursion on the super
748// class.
749void ImageWriter::WalkInstanceFields(mirror::Object* obj, mirror::Class* klass) {
750 // Visit fields of parent classes first.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700751 StackHandleScope<1> hs(Thread::Current());
752 Handle<mirror::Class> h_class(hs.NewHandle(klass));
753 mirror::Class* super = h_class->GetSuperClass();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700754 if (super != nullptr) {
755 WalkInstanceFields(obj, super);
756 }
757 //
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700758 size_t num_reference_fields = h_class->NumReferenceInstanceFields();
Vladimir Marko76649e82014-11-10 18:32:59 +0000759 MemberOffset field_offset = h_class->GetFirstReferenceInstanceFieldOffset();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700760 for (size_t i = 0; i < num_reference_fields; ++i) {
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700761 mirror::Object* value = obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700762 if (value != nullptr) {
763 WalkFieldsInOrder(value);
764 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000765 field_offset = MemberOffset(field_offset.Uint32Value() +
766 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700767 }
768}
769
770// For an unvisited object, visit it then all its children found via fields.
771void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800772 // Use our own visitor routine (instead of GC visitor) to get better locality between
773 // an object and its fields
774 if (!IsImageBinSlotAssigned(obj)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700775 // Walk instance fields of all objects
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700776 StackHandleScope<2> hs(Thread::Current());
777 Handle<mirror::Object> h_obj(hs.NewHandle(obj));
778 Handle<mirror::Class> klass(hs.NewHandle(obj->GetClass()));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700779 // visit the object itself.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800780 CalculateObjectBinSlots(h_obj.Get());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700781 WalkInstanceFields(h_obj.Get(), klass.Get());
Mathieu Chartier590fee92013-09-13 13:46:47 -0700782 // Walk static fields of a Class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700783 if (h_obj->IsClass()) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700784 size_t num_reference_static_fields = klass->NumReferenceStaticFields();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700785 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700786 for (size_t i = 0; i < num_reference_static_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700787 mirror::Object* value = h_obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700788 if (value != nullptr) {
789 WalkFieldsInOrder(value);
790 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000791 field_offset = MemberOffset(field_offset.Uint32Value() +
792 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700793 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700794 // Visit and assign offsets for fields and field arrays.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700795 auto* as_klass = h_obj->AsClass();
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700796 LengthPrefixedArray<ArtField>* fields[] = {
797 as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
798 };
799 for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
800 // Total array length including header.
801 if (cur_fields != nullptr) {
802 const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
803 // Forward the entire array at once.
804 auto it = native_object_relocations_.find(cur_fields);
805 CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
806 << " already forwarded";
807 size_t& offset = bin_slot_sizes_[kBinArtField];
808 native_object_relocations_.emplace(
809 cur_fields, NativeObjectRelocation {
810 offset, kNativeObjectRelocationTypeArtFieldArray });
811 offset += header_size;
812 // Forward individual fields so that we can quickly find where they belong.
813 for (size_t i = 0, count = cur_fields->Length(); i < count; ++i) {
814 // Need to forward arrays separate of fields.
815 ArtField* field = &cur_fields->At(i);
816 auto it2 = native_object_relocations_.find(field);
817 CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
818 << " already assigned " << PrettyField(field) << " static=" << field->IsStatic();
819 native_object_relocations_.emplace(
820 field, NativeObjectRelocation {offset, kNativeObjectRelocationTypeArtField });
821 offset += sizeof(ArtField);
822 }
Mathieu Chartierc7853442015-03-27 14:35:38 -0700823 }
824 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700825 // Visit and assign offsets for methods.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700826 LengthPrefixedArray<ArtMethod>* method_arrays[] = {
827 as_klass->GetDirectMethodsPtr(), as_klass->GetVirtualMethodsPtr(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700828 };
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700829 for (LengthPrefixedArray<ArtMethod>* array : method_arrays) {
830 if (array == nullptr) {
831 continue;
832 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700833 bool any_dirty = false;
834 size_t count = 0;
Vladimir Marko14632852015-08-17 12:07:23 +0100835 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
836 const size_t method_size = ArtMethod::Size(target_ptr_size_);
Vladimir Markocf36d492015-08-12 19:27:26 +0100837 auto iteration_range =
838 MakeIterationRangeFromLengthPrefixedArray(array, method_size, method_alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700839 for (auto& m : iteration_range) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700840 any_dirty = any_dirty || WillMethodBeDirty(&m);
841 ++count;
842 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700843 NativeObjectRelocationType type = any_dirty ? kNativeObjectRelocationTypeArtMethodDirty :
844 kNativeObjectRelocationTypeArtMethodClean;
845 Bin bin_type = BinTypeForNativeRelocationType(type);
846 // Forward the entire array at once, but header first.
Vladimir Markocf36d492015-08-12 19:27:26 +0100847 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
848 method_size,
849 method_alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700850 auto it = native_object_relocations_.find(array);
851 CHECK(it == native_object_relocations_.end()) << "Method array " << array
852 << " already forwarded";
853 size_t& offset = bin_slot_sizes_[bin_type];
854 native_object_relocations_.emplace(array, NativeObjectRelocation { offset,
855 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty :
856 kNativeObjectRelocationTypeArtMethodArrayClean });
857 offset += header_size;
858 for (auto& m : iteration_range) {
859 AssignMethodOffset(&m, type);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700860 }
861 (any_dirty ? dirty_methods_ : clean_methods_) += count;
862 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700863 } else if (h_obj->IsObjectArray()) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700864 // Walk elements of an object array.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700865 int32_t length = h_obj->AsObjectArray<mirror::Object>()->GetLength();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700866 for (int32_t i = 0; i < length; i++) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700867 mirror::ObjectArray<mirror::Object>* obj_array = h_obj->AsObjectArray<mirror::Object>();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700868 mirror::Object* value = obj_array->Get(i);
869 if (value != nullptr) {
870 WalkFieldsInOrder(value);
871 }
872 }
873 }
874 }
875}
876
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700877void ImageWriter::AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type) {
878 auto it = native_object_relocations_.find(method);
879 CHECK(it == native_object_relocations_.end()) << "Method " << method << " already assigned "
Mathieu Chartiere401d142015-04-22 13:56:20 -0700880 << PrettyMethod(method);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700881 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
882 native_object_relocations_.emplace(method, NativeObjectRelocation { offset, type });
Vladimir Marko14632852015-08-17 12:07:23 +0100883 offset += ArtMethod::Size(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700884}
885
Mathieu Chartier590fee92013-09-13 13:46:47 -0700886void ImageWriter::WalkFieldsCallback(mirror::Object* obj, void* arg) {
887 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
888 DCHECK(writer != nullptr);
889 writer->WalkFieldsInOrder(obj);
890}
891
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800892void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) {
893 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
894 DCHECK(writer != nullptr);
895 writer->UnbinObjectsIntoOffset(obj);
896}
897
898void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
899 CHECK(obj != nullptr);
900
901 // We know the bin slot, and the total bin sizes for all objects by now,
902 // so calculate the object's final image offset.
903
904 DCHECK(IsImageBinSlotAssigned(obj));
905 BinSlot bin_slot = GetImageBinSlot(obj);
906 // Change the lockword from a bin slot into an offset
907 AssignImageOffset(obj, bin_slot);
908}
909
Vladimir Markof4da6752014-08-01 19:04:18 +0100910void ImageWriter::CalculateNewObjectOffsets() {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700911 Thread* const self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 StackHandleScope<1> hs(self);
913 Handle<ObjectArray<Object>> image_roots(hs.NewHandle(CreateImageRoots()));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700914
Mathieu Chartiere401d142015-04-22 13:56:20 -0700915 auto* runtime = Runtime::Current();
916 auto* heap = runtime->GetHeap();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700917 DCHECK_EQ(0U, image_end_);
918
Mathieu Chartier31e89252013-08-28 11:29:12 -0700919 // Leave space for the header, but do not write it yet, we need to
Brian Carlstrom7940e442013-07-12 13:46:57 -0700920 // know where image_roots is going to end up
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800921 image_end_ += RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment
Brian Carlstrom7940e442013-07-12 13:46:57 -0700922
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800923 image_objects_offset_begin_ = image_end_;
Vladimir Marko20f85592015-03-19 10:07:02 +0000924 // Prepare bin slots for dex cache arrays.
925 PrepareDexCacheArraySlots();
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800926 // Clear any pre-existing monitors which may have been in the monitor words, assign bin slots.
927 heap->VisitObjects(WalkFieldsCallback, this);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700928 // Write the image runtime methods.
929 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
930 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
931 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
932 image_methods_[ImageHeader::kCalleeSaveMethod] = runtime->GetCalleeSaveMethod(Runtime::kSaveAll);
933 image_methods_[ImageHeader::kRefsOnlySaveMethod] =
934 runtime->GetCalleeSaveMethod(Runtime::kRefsOnly);
935 image_methods_[ImageHeader::kRefsAndArgsSaveMethod] =
936 runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700937
938 // Add room for fake length prefixed array.
939 const auto image_method_type = kNativeObjectRelocationTypeArtMethodArrayClean;
940 auto it = native_object_relocations_.find(&image_method_array_);
941 CHECK(it == native_object_relocations_.end());
942 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(image_method_type)];
943 native_object_relocations_.emplace(&image_method_array_,
944 NativeObjectRelocation { offset, image_method_type });
Vladimir Marko14632852015-08-17 12:07:23 +0100945 size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -0700946 const size_t array_size = LengthPrefixedArray<ArtMethod>::ComputeSize(
Vladimir Marko14632852015-08-17 12:07:23 +0100947 0, ArtMethod::Size(target_ptr_size_), method_alignment);
Vladimir Markocf36d492015-08-12 19:27:26 +0100948 CHECK_ALIGNED_PARAM(array_size, method_alignment);
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -0700949 offset += array_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700950 for (auto* m : image_methods_) {
951 CHECK(m != nullptr);
952 CHECK(m->IsRuntimeMethod());
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700953 AssignMethodOffset(m, kNativeObjectRelocationTypeArtMethodClean);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700954 }
955
Vladimir Markocf36d492015-08-12 19:27:26 +0100956 // Calculate bin slot offsets.
957 size_t bin_offset = image_objects_offset_begin_;
Vladimir Marko20f85592015-03-19 10:07:02 +0000958 for (size_t i = 0; i != kBinSize; ++i) {
Vladimir Markocf36d492015-08-12 19:27:26 +0100959 bin_slot_offsets_[i] = bin_offset;
960 bin_offset += bin_slot_sizes_[i];
961 if (i == kBinArtField) {
962 static_assert(kBinArtField + 1 == kBinArtMethodClean, "Methods follow fields.");
963 static_assert(alignof(ArtField) == 4u, "ArtField alignment is 4.");
964 DCHECK_ALIGNED(bin_offset, 4u);
965 DCHECK(method_alignment == 4u || method_alignment == 8u);
966 bin_offset = RoundUp(bin_offset, method_alignment);
967 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000968 }
Vladimir Markocf36d492015-08-12 19:27:26 +0100969 // NOTE: There may be additional padding between the bin slots and the intern table.
970
Mathieu Chartierc7853442015-03-27 14:35:38 -0700971 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
972
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800973 // Transform each object's bin slot into an offset which will be used to do the final copy.
974 heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700975
Mathieu Chartierc7853442015-03-27 14:35:38 -0700976 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800977
Vladimir Markof4da6752014-08-01 19:04:18 +0100978 image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Get()));
979
Mathieu Chartiere401d142015-04-22 13:56:20 -0700980 // Update the native relocations by adding their bin sums.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700981 for (auto& pair : native_object_relocations_) {
982 NativeObjectRelocation& relocation = pair.second;
983 Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
Vladimir Markocf36d492015-08-12 19:27:26 +0100984 relocation.offset += bin_slot_offsets_[bin_type];
Mathieu Chartiere401d142015-04-22 13:56:20 -0700985 }
986
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700987 // Calculate how big the intern table will be after being serialized.
988 auto* const intern_table = Runtime::Current()->GetInternTable();
989 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
990 intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
991
Mathieu Chartiere401d142015-04-22 13:56:20 -0700992 // Note that image_end_ is left at end of used mirror object section.
Vladimir Markof4da6752014-08-01 19:04:18 +0100993}
994
995void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) {
996 CHECK_NE(0U, oat_loaded_size);
Ian Rogers13735952014-10-08 12:43:28 -0700997 const uint8_t* oat_file_begin = GetOatFileBegin();
998 const uint8_t* oat_file_end = oat_file_begin + oat_loaded_size;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700999 oat_data_begin_ = oat_file_begin + oat_data_offset;
Ian Rogers13735952014-10-08 12:43:28 -07001000 const uint8_t* oat_data_end = oat_data_begin_ + oat_file_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001001
1002 // Create the image sections.
1003 ImageSection sections[ImageHeader::kSectionCount];
1004 // Objects section
1005 auto* objects_section = &sections[ImageHeader::kSectionObjects];
1006 *objects_section = ImageSection(0u, image_end_);
1007 size_t cur_pos = objects_section->End();
1008 // Add field section.
1009 auto* field_section = &sections[ImageHeader::kSectionArtFields];
1010 *field_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtField]);
Vladimir Markocf36d492015-08-12 19:27:26 +01001011 CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001012 cur_pos = field_section->End();
Vladimir Markocf36d492015-08-12 19:27:26 +01001013 // Round up to the alignment the required by the method section.
Vladimir Marko14632852015-08-17 12:07:23 +01001014 cur_pos = RoundUp(cur_pos, ArtMethod::Alignment(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001015 // Add method section.
1016 auto* methods_section = &sections[ImageHeader::kSectionArtMethods];
1017 *methods_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtMethodClean] +
1018 bin_slot_sizes_[kBinArtMethodDirty]);
Vladimir Markocf36d492015-08-12 19:27:26 +01001019 CHECK_EQ(bin_slot_offsets_[kBinArtMethodClean], methods_section->Offset());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001020 cur_pos = methods_section->End();
Nicolas Geoffray7bf2b4f2015-07-08 10:11:59 +00001021 // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
1022 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001023 // Calculate the size of the interned strings.
1024 auto* interned_strings_section = &sections[ImageHeader::kSectionInternedStrings];
1025 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1026 cur_pos = interned_strings_section->End();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001027 // Finally bitmap section.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001028 const size_t bitmap_bytes = image_bitmap_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001029 auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
1030 *bitmap_section = ImageSection(RoundUp(cur_pos, kPageSize), RoundUp(bitmap_bytes, kPageSize));
1031 cur_pos = bitmap_section->End();
1032 if (kIsDebugBuild) {
1033 size_t idx = 0;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001034 for (const ImageSection& section : sections) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001035 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1036 ++idx;
1037 }
1038 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
1039 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001040 const size_t image_end = static_cast<uint32_t>(interned_strings_section->End());
1041 CHECK_EQ(AlignUp(image_begin_ + image_end, kPageSize), oat_file_begin) <<
1042 "Oat file should be right after the image.";
Mathieu Chartiere401d142015-04-22 13:56:20 -07001043 // Create the header.
1044 new (image_->Begin()) ImageHeader(
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001045 PointerToLowMemUInt32(image_begin_), image_end,
1046 sections, image_roots_address_, oat_file_->GetOatHeader().GetChecksum(),
Mathieu Chartiere401d142015-04-22 13:56:20 -07001047 PointerToLowMemUInt32(oat_file_begin), PointerToLowMemUInt32(oat_data_begin_),
1048 PointerToLowMemUInt32(oat_data_end), PointerToLowMemUInt32(oat_file_end), target_ptr_size_,
1049 compile_pic_);
1050}
1051
1052ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001053 auto it = native_object_relocations_.find(method);
1054 CHECK(it != native_object_relocations_.end()) << PrettyMethod(method) << " @ " << method;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001055 CHECK_GE(it->second.offset, image_end_) << "ArtMethods should be after Objects";
1056 return reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001057}
1058
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001059class FixupRootVisitor : public RootVisitor {
1060 public:
1061 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
1062 }
1063
1064 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001065 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001066 for (size_t i = 0; i < count; ++i) {
1067 *roots[i] = ImageAddress(*roots[i]);
1068 }
1069 }
1070
1071 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
1072 const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001073 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001074 for (size_t i = 0; i < count; ++i) {
1075 roots[i]->Assign(ImageAddress(roots[i]->AsMirrorPtr()));
1076 }
1077 }
1078
1079 private:
1080 ImageWriter* const image_writer_;
1081
Mathieu Chartier90443472015-07-16 20:32:27 -07001082 mirror::Object* ImageAddress(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001083 const size_t offset = image_writer_->GetImageOffset(obj);
1084 auto* const dest = reinterpret_cast<Object*>(image_writer_->image_begin_ + offset);
1085 VLOG(compiler) << "Update root from " << obj << " to " << dest;
1086 return dest;
1087 }
1088};
1089
Mathieu Chartierc7853442015-03-27 14:35:38 -07001090void ImageWriter::CopyAndFixupNativeData() {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001091 // Copy ArtFields and methods to their locations and update the array for convenience.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001092 for (auto& pair : native_object_relocations_) {
1093 NativeObjectRelocation& relocation = pair.second;
1094 auto* dest = image_->Begin() + relocation.offset;
1095 DCHECK_GE(dest, image_->Begin() + image_end_);
1096 switch (relocation.type) {
1097 case kNativeObjectRelocationTypeArtField: {
1098 memcpy(dest, pair.first, sizeof(ArtField));
1099 reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
1100 GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass()));
1101 break;
1102 }
1103 case kNativeObjectRelocationTypeArtMethodClean:
1104 case kNativeObjectRelocationTypeArtMethodDirty: {
1105 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
1106 reinterpret_cast<ArtMethod*>(dest));
1107 break;
1108 }
1109 // For arrays, copy just the header since the elements will get copied by their corresponding
1110 // relocations.
1111 case kNativeObjectRelocationTypeArtFieldArray: {
1112 memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
1113 break;
1114 }
1115 case kNativeObjectRelocationTypeArtMethodArrayClean:
1116 case kNativeObjectRelocationTypeArtMethodArrayDirty: {
Vladimir Markocf36d492015-08-12 19:27:26 +01001117 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(
1118 0,
Vladimir Marko14632852015-08-17 12:07:23 +01001119 ArtMethod::Size(target_ptr_size_),
1120 ArtMethod::Alignment(target_ptr_size_)));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001121 break;
1122 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001123 }
1124 }
1125 // Fixup the image method roots.
1126 auto* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001127 const ImageSection& methods_section = image_header->GetMethodsSection();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001128 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
1129 auto* m = image_methods_[i];
1130 CHECK(m != nullptr);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001131 auto it = native_object_relocations_.find(m);
1132 CHECK(it != native_object_relocations_.end()) << "No fowarding for " << PrettyMethod(m);
1133 NativeObjectRelocation& relocation = it->second;
1134 CHECK(methods_section.Contains(relocation.offset)) << relocation.offset << " not in "
Mathieu Chartiere401d142015-04-22 13:56:20 -07001135 << methods_section;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001136 CHECK(relocation.IsArtMethodRelocation()) << relocation.type;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001137 auto* dest = reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
1138 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), dest);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001139 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001140 // Write the intern table into the image.
1141 const ImageSection& intern_table_section = image_header->GetImageSection(
1142 ImageHeader::kSectionInternedStrings);
1143 InternTable* const intern_table = Runtime::Current()->GetInternTable();
1144 uint8_t* const memory_ptr = image_->Begin() + intern_table_section.Offset();
1145 const size_t intern_table_bytes = intern_table->WriteToMemory(memory_ptr);
1146 // Fixup the pointers in the newly written intern table to contain image addresses.
1147 InternTable temp_table;
1148 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
1149 // the VisitRoots() will update the memory directly rather than the copies.
1150 // This also relies on visit roots not doing any verification which could fail after we update
1151 // the roots to be the image addresses.
1152 temp_table.ReadFromMemory(memory_ptr);
1153 CHECK_EQ(temp_table.Size(), intern_table->Size());
1154 FixupRootVisitor visitor(this);
1155 temp_table.VisitRoots(&visitor, kVisitRootFlagAllRoots);
1156 CHECK_EQ(intern_table_bytes, intern_table_bytes_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001157}
1158
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -08001159void ImageWriter::CopyAndFixupObjects() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001160 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -07001161 heap->VisitObjects(CopyAndFixupObjectsCallback, this);
1162 // Fix up the object previously had hash codes.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001163 for (const auto& hash_pair : saved_hashcode_map_) {
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001164 Object* obj = hash_pair.first;
Andreas Gampe3b45ef22015-05-26 21:34:09 -07001165 DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
1166 obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001167 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001168 saved_hashcode_map_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001169}
1170
Mathieu Chartier590fee92013-09-13 13:46:47 -07001171void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) {
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -07001172 DCHECK(obj != nullptr);
1173 DCHECK(arg != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001174 reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj);
1175}
1176
Mathieu Chartiere401d142015-04-22 13:56:20 -07001177void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr,
1178 mirror::Class* klass, Bin array_type) {
1179 CHECK(klass->IsArrayClass());
1180 CHECK(arr->IsIntArray() || arr->IsLongArray()) << PrettyClass(klass) << " " << arr;
1181 // Fixup int and long pointers for the ArtMethod or ArtField arrays.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001182 const size_t num_elements = arr->GetLength();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001183 dst->SetClass(GetImageAddress(arr->GetClass()));
1184 auto* dest_array = down_cast<mirror::PointerArray*>(dst);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001185 for (size_t i = 0, count = num_elements; i < count; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001186 auto* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
1187 if (elem != nullptr) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001188 auto it = native_object_relocations_.find(elem);
1189 if (it == native_object_relocations_.end()) {
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001190 if (it->second.IsArtMethodRelocation()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001191 auto* method = reinterpret_cast<ArtMethod*>(elem);
1192 LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ "
1193 << method << " idx=" << i << "/" << num_elements << " with declaring class "
1194 << PrettyClass(method->GetDeclaringClass());
1195 } else {
1196 CHECK_EQ(array_type, kBinArtField);
1197 auto* field = reinterpret_cast<ArtField*>(elem);
1198 LOG(FATAL) << "No relocation entry for ArtField " << PrettyField(field) << " @ "
1199 << field << " idx=" << i << "/" << num_elements << " with declaring class "
1200 << PrettyClass(field->GetDeclaringClass());
1201 }
1202 } else {
1203 elem = image_begin_ + it->second.offset;
1204 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001205 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001206 dest_array->SetElementPtrSize<false, true>(i, elem, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001207 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001208}
1209
1210void ImageWriter::CopyAndFixupObject(Object* obj) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001211 size_t offset = GetImageOffset(obj);
1212 auto* dst = reinterpret_cast<Object*>(image_->Begin() + offset);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001213 DCHECK_LT(offset, image_end_);
1214 const auto* src = reinterpret_cast<const uint8_t*>(obj);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001215
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001216 image_bitmap_->Set(dst); // Mark the obj as live.
1217
1218 const size_t n = obj->SizeOf();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001219 DCHECK_LE(offset + n, image_->Size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001220 memcpy(dst, src, n);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001221
Mathieu Chartierad2541a2013-10-25 10:05:23 -07001222 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
1223 // word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001224 const auto it = saved_hashcode_map_.find(obj);
1225 dst->SetLockWord(it != saved_hashcode_map_.end() ?
1226 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001227 FixupObject(obj, dst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001228}
1229
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001230// Rewrite all the references in the copied object to point to their image address equivalent
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001231class FixupVisitor {
1232 public:
1233 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
1234 }
1235
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001236 // Ignore class roots since we don't have a way to map them to the destination. These are handled
1237 // with other logic.
1238 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1239 const {}
1240 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
1241
1242
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001243 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001244 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Hiroshi Yamauchi6e83c172014-05-01 21:25:41 -07001245 Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001246 // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the
1247 // image.
1248 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Ian Rogersb0fa5dc2014-04-28 16:47:08 -07001249 offset, image_writer_->GetImageAddress(ref));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001250 }
1251
1252 // java.lang.ref.Reference visitor.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001253 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001254 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001255 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Ian Rogersb0fa5dc2014-04-28 16:47:08 -07001256 mirror::Reference::ReferentOffset(), image_writer_->GetImageAddress(ref->GetReferent()));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001257 }
1258
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001259 protected:
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001260 ImageWriter* const image_writer_;
1261 mirror::Object* const copy_;
1262};
1263
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001264class FixupClassVisitor FINAL : public FixupVisitor {
1265 public:
1266 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
1267 }
1268
Mathieu Chartierc7853442015-03-27 14:35:38 -07001269 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001270 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001271 DCHECK(obj->IsClass());
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001272 FixupVisitor::operator()(obj, offset, /*is_static*/false);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001273 }
1274
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001275 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED,
1276 mirror::Reference* ref ATTRIBUTE_UNUSED) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001277 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001278 LOG(FATAL) << "Reference not expected here.";
1279 }
1280};
1281
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001282void* ImageWriter::NativeLocationInImage(void* obj) {
1283 if (obj == nullptr) {
1284 return nullptr;
1285 }
1286 auto it = native_object_relocations_.find(obj);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001287 CHECK(it != native_object_relocations_.end()) << obj;
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001288 const NativeObjectRelocation& relocation = it->second;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001289 return reinterpret_cast<void*>(image_begin_ + relocation.offset);
1290}
1291
Mathieu Chartierc7853442015-03-27 14:35:38 -07001292void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001293 // Update the field arrays.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001294 copy->SetSFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>(
1295 NativeLocationInImage(orig->GetSFieldsPtr())));
1296 copy->SetIFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>(
1297 NativeLocationInImage(orig->GetIFieldsPtr())));
1298 // Update direct and virtual method arrays.
1299 copy->SetDirectMethodsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
1300 NativeLocationInImage(orig->GetDirectMethodsPtr())));
1301 copy->SetVirtualMethodsPtr(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
1302 NativeLocationInImage(orig->GetVirtualMethodsPtr())));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001303 // Fix up embedded tables.
1304 if (orig->ShouldHaveEmbeddedImtAndVTable()) {
1305 for (int32_t i = 0; i < orig->GetEmbeddedVTableLength(); ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001306 auto it = native_object_relocations_.find(orig->GetEmbeddedVTableEntry(i, target_ptr_size_));
1307 CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001308 copy->SetEmbeddedVTableEntryUnchecked(
1309 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
1310 }
1311 for (size_t i = 0; i < mirror::Class::kImtSize; ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001312 auto it = native_object_relocations_.find(orig->GetEmbeddedImTableEntry(i, target_ptr_size_));
1313 CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001314 copy->SetEmbeddedImTableEntry(
1315 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001316 }
1317 }
1318 FixupClassVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07001319 static_cast<mirror::Object*>(orig)->VisitReferences(visitor, visitor);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001320}
1321
Ian Rogersef7d42f2014-01-06 12:55:46 -08001322void ImageWriter::FixupObject(Object* orig, Object* copy) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001323 DCHECK(orig != nullptr);
1324 DCHECK(copy != nullptr);
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -07001325 if (kUseBakerOrBrooksReadBarrier) {
1326 orig->AssertReadBarrierPointer();
1327 if (kUseBrooksReadBarrier) {
1328 // Note the address 'copy' isn't the same as the image address of 'orig'.
1329 copy->SetReadBarrierPointer(GetImageAddress(orig));
1330 DCHECK_EQ(copy->GetReadBarrierPointer(), GetImageAddress(orig));
1331 }
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -08001332 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001333 auto* klass = orig->GetClass();
1334 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
1335 // Is this a native dex cache array?
1336 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
1337 if (it != pointer_arrays_.end()) {
1338 // Should only need to fixup every pointer array exactly once.
1339 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
1340 pointer_arrays_.erase(it);
1341 return;
1342 }
1343 CHECK(dex_cache_array_indexes_.find(orig) == dex_cache_array_indexes_.end())
1344 << "Should have been pointer array.";
1345 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001346 if (orig->IsClass()) {
1347 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001348 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001349 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
1350 // Need to go update the ArtMethod.
1351 auto* dest = down_cast<mirror::AbstractMethod*>(copy);
1352 auto* src = down_cast<mirror::AbstractMethod*>(orig);
1353 ArtMethod* src_method = src->GetArtMethod();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001354 auto it = native_object_relocations_.find(src_method);
1355 CHECK(it != native_object_relocations_.end())
1356 << "Missing relocation for AbstractMethod.artMethod " << PrettyMethod(src_method);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001357 dest->SetArtMethod(
1358 reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
Mathieu Chartier6b069532015-08-05 15:08:12 -07001359 } else if (!klass->IsArrayClass() && klass->IsSubClass(down_cast<mirror::Class*>(
1360 Thread::Current()->DecodeJObject(WellKnownClasses::java_lang_ClassLoader)))) {
1361 // If src is a ClassLoader, set the class table to null so that it gets recreated by the
1362 // ClassLoader.
1363 down_cast<mirror::ClassLoader*>(copy)->SetClassTable(nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001364 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001365 FixupVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07001366 orig->VisitReferences(visitor, visitor);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001367 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001368}
1369
Mathieu Chartiere401d142015-04-22 13:56:20 -07001370const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method, bool* quick_is_interpreted) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001371 DCHECK(!method->IsResolutionMethod() && !method->IsImtConflictMethod() &&
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001372 !method->IsImtUnimplementedMethod() && !method->IsAbstract()) << PrettyMethod(method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001373
1374 // Use original code if it exists. Otherwise, set the code pointer to the resolution
1375 // trampoline.
1376
1377 // Quick entrypoint:
Jeff Haoc7d11882015-02-03 15:08:39 -08001378 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(
1379 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_));
1380 const uint8_t* quick_code = GetOatAddress(quick_oat_code_offset);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001381 *quick_is_interpreted = false;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001382 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
1383 method->GetDeclaringClass()->IsInitialized())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001384 // We have code for a non-static or initialized method, just use the code.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001385 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001386 } else if (quick_code == nullptr && method->IsNative() &&
1387 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
1388 // Non-static or initialized native method missing compiled code, use generic JNI version.
1389 quick_code = GetOatAddress(quick_generic_jni_trampoline_offset_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001390 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001391 } else if (quick_code == nullptr && !method->IsNative()) {
1392 // We don't have code at all for a non-native method, use the interpreter.
1393 quick_code = GetOatAddress(quick_to_interpreter_bridge_offset_);
1394 *quick_is_interpreted = true;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001395 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001396 } else {
1397 CHECK(!method->GetDeclaringClass()->IsInitialized());
1398 // We have code for a static method, but need to go through the resolution stub for class
1399 // initialization.
1400 quick_code = GetOatAddress(quick_resolution_trampoline_offset_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001401 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001402 }
1403 return quick_code;
1404}
1405
Mathieu Chartiere401d142015-04-22 13:56:20 -07001406const uint8_t* ImageWriter::GetQuickEntryPoint(ArtMethod* method) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001407 // Calculate the quick entry point following the same logic as FixupMethod() below.
1408 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001409 Runtime* runtime = Runtime::Current();
1410 if (UNLIKELY(method == runtime->GetResolutionMethod())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001411 return GetOatAddress(quick_resolution_trampoline_offset_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001412 } else if (UNLIKELY(method == runtime->GetImtConflictMethod() ||
1413 method == runtime->GetImtUnimplementedMethod())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001414 return GetOatAddress(quick_imt_conflict_trampoline_offset_);
1415 } else {
1416 // We assume all methods have code. If they don't currently then we set them to the use the
1417 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1418 // use results in an AbstractMethodError. We use the interpreter to achieve this.
1419 if (UNLIKELY(method->IsAbstract())) {
1420 return GetOatAddress(quick_to_interpreter_bridge_offset_);
1421 } else {
1422 bool quick_is_interpreted;
1423 return GetQuickCode(method, &quick_is_interpreted);
1424 }
1425 }
1426}
1427
Mathieu Chartiere401d142015-04-22 13:56:20 -07001428void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) {
Vladimir Marko14632852015-08-17 12:07:23 +01001429 memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001430
1431 copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked()));
1432 copy->SetDexCacheResolvedMethods(GetImageAddress(orig->GetDexCacheResolvedMethods()));
1433 copy->SetDexCacheResolvedTypes(GetImageAddress(orig->GetDexCacheResolvedTypes()));
1434
Ian Rogers848871b2013-08-05 10:56:33 -07001435 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
1436 // oat_begin_
Brian Carlstrom7940e442013-07-12 13:46:57 -07001437
Ian Rogers848871b2013-08-05 10:56:33 -07001438 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001439 Runtime* runtime = Runtime::Current();
1440 if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001441 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001442 GetOatAddress(quick_resolution_trampoline_offset_), target_ptr_size_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001443 } else if (UNLIKELY(orig == runtime->GetImtConflictMethod() ||
1444 orig == runtime->GetImtUnimplementedMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001445 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001446 GetOatAddress(quick_imt_conflict_trampoline_offset_), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001447 } else if (UNLIKELY(orig->IsRuntimeMethod())) {
1448 bool found_one = false;
1449 for (size_t i = 0; i < static_cast<size_t>(Runtime::kLastCalleeSaveType); ++i) {
1450 auto idx = static_cast<Runtime::CalleeSaveType>(i);
1451 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
1452 found_one = true;
1453 break;
1454 }
1455 }
1456 CHECK(found_one) << "Expected to find callee save method but got " << PrettyMethod(orig);
1457 CHECK(copy->IsRuntimeMethod());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001458 } else {
Ian Rogers848871b2013-08-05 10:56:33 -07001459 // We assume all methods have code. If they don't currently then we set them to the use the
1460 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1461 // use results in an AbstractMethodError. We use the interpreter to achieve this.
1462 if (UNLIKELY(orig->IsAbstract())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001463 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001464 GetOatAddress(quick_to_interpreter_bridge_offset_), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001465 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001466 bool quick_is_interpreted;
Ian Rogers13735952014-10-08 12:43:28 -07001467 const uint8_t* quick_code = GetQuickCode(orig, &quick_is_interpreted);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001468 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
Sebastien Hertze1d07812014-05-21 15:44:09 +02001469
Sebastien Hertze1d07812014-05-21 15:44:09 +02001470 // JNI entrypoint:
Ian Rogers848871b2013-08-05 10:56:33 -07001471 if (orig->IsNative()) {
1472 // The native method's pointer is set to a stub to lookup via dlsym.
1473 // Note this is not the code_ pointer, that is handled above.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001474 copy->SetEntryPointFromJniPtrSize(
1475 GetOatAddress(jni_dlsym_lookup_offset_), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001476 }
1477 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001478 }
1479}
1480
Alex Lighta59dd802014-07-02 16:28:08 -07001481static OatHeader* GetOatHeaderFromElf(ElfFile* elf) {
Tong Shen62d1ca32014-09-03 17:24:56 -07001482 uint64_t data_sec_offset;
1483 bool has_data_sec = elf->GetSectionOffsetAndSize(".rodata", &data_sec_offset, nullptr);
1484 if (!has_data_sec) {
Alex Lighta59dd802014-07-02 16:28:08 -07001485 return nullptr;
1486 }
Tong Shen62d1ca32014-09-03 17:24:56 -07001487 return reinterpret_cast<OatHeader*>(elf->Begin() + data_sec_offset);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001488}
1489
Vladimir Markof4da6752014-08-01 19:04:18 +01001490void ImageWriter::SetOatChecksumFromElfFile(File* elf_file) {
Alex Lighta59dd802014-07-02 16:28:08 -07001491 std::string error_msg;
1492 std::unique_ptr<ElfFile> elf(ElfFile::Open(elf_file, PROT_READ|PROT_WRITE,
1493 MAP_SHARED, &error_msg));
1494 if (elf.get() == nullptr) {
Vladimir Markof4da6752014-08-01 19:04:18 +01001495 LOG(FATAL) << "Unable open oat file: " << error_msg;
Alex Lighta59dd802014-07-02 16:28:08 -07001496 return;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001497 }
Alex Lighta59dd802014-07-02 16:28:08 -07001498 OatHeader* oat_header = GetOatHeaderFromElf(elf.get());
1499 CHECK(oat_header != nullptr);
1500 CHECK(oat_header->IsValid());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001501
Brian Carlstrom7940e442013-07-12 13:46:57 -07001502 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Alex Lighta59dd802014-07-02 16:28:08 -07001503 image_header->SetOatChecksum(oat_header->GetChecksum());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001504}
1505
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001506size_t ImageWriter::GetBinSizeSum(ImageWriter::Bin up_to) const {
1507 DCHECK_LE(up_to, kBinSize);
1508 return std::accumulate(&bin_slot_sizes_[0], &bin_slot_sizes_[up_to], /*init*/0);
1509}
1510
1511ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
1512 // These values may need to get updated if more bins are added to the enum Bin
Mathieu Chartiere401d142015-04-22 13:56:20 -07001513 static_assert(kBinBits == 3, "wrong number of bin bits");
1514 static_assert(kBinShift == 27, "wrong number of shift");
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001515 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
1516
1517 DCHECK_LT(GetBin(), kBinSize);
1518 DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
1519}
1520
1521ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
1522 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
1523 DCHECK_EQ(index, GetIndex());
1524}
1525
1526ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
1527 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
1528}
1529
1530uint32_t ImageWriter::BinSlot::GetIndex() const {
1531 return lockword_ & ~kBinMask;
1532}
1533
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001534uint8_t* ImageWriter::GetOatFileBegin() const {
1535 DCHECK_GT(intern_table_bytes_, 0u);
1536 return image_begin_ + RoundUp(
1537 image_end_ + bin_slot_sizes_[kBinArtField] + bin_slot_sizes_[kBinArtMethodDirty] +
1538 bin_slot_sizes_[kBinArtMethodClean] + intern_table_bytes_, kPageSize);
1539}
1540
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001541ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
1542 switch (type) {
1543 case kNativeObjectRelocationTypeArtField:
1544 case kNativeObjectRelocationTypeArtFieldArray:
1545 return kBinArtField;
1546 case kNativeObjectRelocationTypeArtMethodClean:
1547 case kNativeObjectRelocationTypeArtMethodArrayClean:
1548 return kBinArtMethodClean;
1549 case kNativeObjectRelocationTypeArtMethodDirty:
1550 case kNativeObjectRelocationTypeArtMethodArrayDirty:
1551 return kBinArtMethodDirty;
1552 }
1553 UNREACHABLE();
1554}
1555
Brian Carlstrom7940e442013-07-12 13:46:57 -07001556} // namespace art