blob: b85a129fca0f13376c95799adc2bf32baad6da2e [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "image_writer.h"
18
19#include <sys/stat.h>
20
Ian Rogers700a4022014-05-19 16:49:03 -070021#include <memory>
Vladimir Marko20f85592015-03-19 10:07:02 +000022#include <numeric>
Brian Carlstrom7940e442013-07-12 13:46:57 -070023#include <vector>
24
Mathieu Chartierc7853442015-03-27 14:35:38 -070025#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070026#include "art_method-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070027#include "base/logging.h"
28#include "base/unix_file/fd_file.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010029#include "class_linker-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070030#include "compiled_method.h"
31#include "dex_file-inl.h"
32#include "driver/compiler_driver.h"
Alex Light53cb16b2014-06-12 11:26:29 -070033#include "elf_file.h"
34#include "elf_utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070035#include "elf_writer.h"
36#include "gc/accounting/card_table-inl.h"
37#include "gc/accounting/heap_bitmap.h"
Mathieu Chartier31e89252013-08-28 11:29:12 -070038#include "gc/accounting/space_bitmap-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070039#include "gc/heap.h"
40#include "gc/space/large_object_space.h"
41#include "gc/space/space-inl.h"
42#include "globals.h"
43#include "image.h"
44#include "intern_table.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070045#include "linear_alloc.h"
Mathieu Chartierad2541a2013-10-25 10:05:23 -070046#include "lock_word.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070047#include "mirror/abstract_method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070048#include "mirror/array-inl.h"
49#include "mirror/class-inl.h"
50#include "mirror/class_loader.h"
51#include "mirror/dex_cache-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070052#include "mirror/method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070053#include "mirror/object-inl.h"
54#include "mirror/object_array-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070055#include "mirror/string-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070056#include "oat.h"
57#include "oat_file.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070058#include "runtime.h"
59#include "scoped_thread_state_change.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070060#include "handle_scope-inl.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000061#include "utils/dex_cache_arrays_layout-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070062
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070063using ::art::mirror::Class;
64using ::art::mirror::DexCache;
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070065using ::art::mirror::Object;
66using ::art::mirror::ObjectArray;
67using ::art::mirror::String;
Brian Carlstrom7940e442013-07-12 13:46:57 -070068
69namespace art {
70
Igor Murashkinf5b4c502014-11-14 15:01:59 -080071// Separate objects into multiple bins to optimize dirty memory use.
72static constexpr bool kBinObjects = true;
Mathieu Chartierde486922015-04-15 20:03:16 -070073static constexpr bool kComputeEagerResolvedStrings = false;
Igor Murashkinf5b4c502014-11-14 15:01:59 -080074
Andreas Gampedd9d0552015-03-09 12:57:41 -070075static void CheckNoDexObjectsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -070076 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampedd9d0552015-03-09 12:57:41 -070077 Class* klass = obj->GetClass();
78 CHECK_NE(PrettyClass(klass), "com.android.dex.Dex");
79}
80
81static void CheckNoDexObjects() {
82 ScopedObjectAccess soa(Thread::Current());
83 Runtime::Current()->GetHeap()->VisitObjects(CheckNoDexObjectsCallback, nullptr);
84}
85
Vladimir Markof4da6752014-08-01 19:04:18 +010086bool ImageWriter::PrepareImageAddressSpace() {
Mathieu Chartier2d721012014-11-10 11:08:06 -080087 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
Vladimir Markof4da6752014-08-01 19:04:18 +010088 {
89 Thread::Current()->TransitionFromSuspendedToRunnable();
90 PruneNonImageClasses(); // Remove junk
91 ComputeLazyFieldsForImageClasses(); // Add useful information
Jeff Hao848f70a2014-01-15 13:49:50 -080092
93 // Calling this can in theory fill in some resolved strings. However, in practice it seems to
94 // never resolve any.
95 if (kComputeEagerResolvedStrings) {
96 ComputeEagerResolvedStrings();
97 }
Vladimir Markof4da6752014-08-01 19:04:18 +010098 Thread::Current()->TransitionFromRunnableToSuspended(kNative);
99 }
100 gc::Heap* heap = Runtime::Current()->GetHeap();
101 heap->CollectGarbage(false); // Remove garbage.
102
Andreas Gampedd9d0552015-03-09 12:57:41 -0700103 // Dex caches must not have their dex fields set in the image. These are memory buffers of mapped
104 // dex files.
105 //
106 // We may open them in the unstarted-runtime code for class metadata. Their fields should all be
107 // reset in PruneNonImageClasses and the objects reclaimed in the GC. Make sure that's actually
108 // true.
109 if (kIsDebugBuild) {
110 CheckNoDexObjects();
111 }
112
Vladimir Markof4da6752014-08-01 19:04:18 +0100113 if (kIsDebugBuild) {
114 ScopedObjectAccess soa(Thread::Current());
115 CheckNonImageClassesRemoved();
116 }
117
118 Thread::Current()->TransitionFromSuspendedToRunnable();
119 CalculateNewObjectOffsets();
120 Thread::Current()->TransitionFromRunnableToSuspended(kNative);
121
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700122 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
123 // bin size sums being calculated.
124 if (!AllocMemory()) {
125 return false;
126 }
127
Vladimir Markof4da6752014-08-01 19:04:18 +0100128 return true;
129}
130
Brian Carlstrom7940e442013-07-12 13:46:57 -0700131bool ImageWriter::Write(const std::string& image_filename,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700132 const std::string& oat_filename,
133 const std::string& oat_location) {
134 CHECK(!image_filename.empty());
135
Brian Carlstrom7940e442013-07-12 13:46:57 -0700136 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700137
Ian Rogers700a4022014-05-19 16:49:03 -0700138 std::unique_ptr<File> oat_file(OS::OpenFileReadWrite(oat_filename.c_str()));
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700139 if (oat_file.get() == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800140 PLOG(ERROR) << "Failed to open oat file " << oat_filename << " for " << oat_location;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700141 return false;
142 }
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700143 std::string error_msg;
Igor Murashkinb1d8c312015-08-04 11:18:43 -0700144 oat_file_ = OatFile::OpenReadable(oat_file.get(), oat_location, nullptr, &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700145 if (oat_file_ == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800146 PLOG(ERROR) << "Failed to open writable oat file " << oat_filename << " for " << oat_location
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700147 << ": " << error_msg;
Andreas Gampe0b7fcf92015-03-13 16:54:54 -0700148 oat_file->Erase();
Brian Carlstromc50d8e12013-07-23 22:35:16 -0700149 return false;
150 }
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700151 CHECK_EQ(class_linker->RegisterOatFile(oat_file_), oat_file_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700152
Ian Rogers848871b2013-08-05 10:56:33 -0700153 interpreter_to_interpreter_bridge_offset_ =
154 oat_file_->GetOatHeader().GetInterpreterToInterpreterBridgeOffset();
155 interpreter_to_compiled_code_bridge_offset_ =
156 oat_file_->GetOatHeader().GetInterpreterToCompiledCodeBridgeOffset();
157
158 jni_dlsym_lookup_offset_ = oat_file_->GetOatHeader().GetJniDlsymLookupOffset();
159
Andreas Gampe2da88232014-02-27 12:26:20 -0800160 quick_generic_jni_trampoline_offset_ =
161 oat_file_->GetOatHeader().GetQuickGenericJniTrampolineOffset();
Jeff Hao88474b42013-10-23 16:24:40 -0700162 quick_imt_conflict_trampoline_offset_ =
163 oat_file_->GetOatHeader().GetQuickImtConflictTrampolineOffset();
Ian Rogers848871b2013-08-05 10:56:33 -0700164 quick_resolution_trampoline_offset_ =
165 oat_file_->GetOatHeader().GetQuickResolutionTrampolineOffset();
166 quick_to_interpreter_bridge_offset_ =
167 oat_file_->GetOatHeader().GetQuickToInterpreterBridgeOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700168
Brian Carlstrom7940e442013-07-12 13:46:57 -0700169 size_t oat_loaded_size = 0;
170 size_t oat_data_offset = 0;
Vladimir Marko3fc99032015-05-13 19:06:30 +0100171 ElfWriter::GetOatElfInformation(oat_file.get(), &oat_loaded_size, &oat_data_offset);
Alex Light53cb16b2014-06-12 11:26:29 -0700172
Vladimir Markof4da6752014-08-01 19:04:18 +0100173 Thread::Current()->TransitionFromSuspendedToRunnable();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700174
Vladimir Markof4da6752014-08-01 19:04:18 +0100175 CreateHeader(oat_loaded_size, oat_data_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700176 CopyAndFixupNativeData();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700177 // TODO: heap validation can't handle these fix up passes.
178 Runtime::Current()->GetHeap()->DisableObjectValidation();
Vladimir Markof4da6752014-08-01 19:04:18 +0100179 CopyAndFixupObjects();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700180 Thread::Current()->TransitionFromRunnableToSuspended(kNative);
181
Vladimir Markof4da6752014-08-01 19:04:18 +0100182 SetOatChecksumFromElfFile(oat_file.get());
183
Andreas Gampe4303ba92014-11-06 01:00:46 -0800184 if (oat_file->FlushCloseOrErase() != 0) {
185 LOG(ERROR) << "Failed to flush and close oat file " << oat_filename << " for " << oat_location;
186 return false;
187 }
188
Ian Rogers700a4022014-05-19 16:49:03 -0700189 std::unique_ptr<File> image_file(OS::CreateEmptyFile(image_filename.c_str()));
Mathieu Chartier31e89252013-08-28 11:29:12 -0700190 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700191 if (image_file.get() == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700192 LOG(ERROR) << "Failed to open image file " << image_filename;
193 return false;
194 }
195 if (fchmod(image_file->Fd(), 0644) != 0) {
196 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800197 image_file->Erase();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700198 return EXIT_FAILURE;
199 }
Mathieu Chartier31e89252013-08-28 11:29:12 -0700200
Mathieu Chartiere401d142015-04-22 13:56:20 -0700201 // Write out the image + fields + methods.
202 const auto write_count = image_header->GetImageSize();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700203 if (!image_file->WriteFully(image_->Begin(), write_count)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700204 PLOG(ERROR) << "Failed to write image file " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800205 image_file->Erase();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700206 return false;
207 }
Mathieu Chartier31e89252013-08-28 11:29:12 -0700208
209 // Write out the image bitmap at the page aligned start of the image end.
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700210 const ImageSection& bitmap_section = image_header->GetImageSection(ImageHeader::kSectionImageBitmap);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700211 CHECK_ALIGNED(bitmap_section.Offset(), kPageSize);
Mathieu Chartier31e89252013-08-28 11:29:12 -0700212 if (!image_file->Write(reinterpret_cast<char*>(image_bitmap_->Begin()),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700213 bitmap_section.Size(), bitmap_section.Offset())) {
Mathieu Chartier31e89252013-08-28 11:29:12 -0700214 PLOG(ERROR) << "Failed to write image file " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800215 image_file->Erase();
Mathieu Chartier31e89252013-08-28 11:29:12 -0700216 return false;
217 }
218
Mathieu Chartiere401d142015-04-22 13:56:20 -0700219 CHECK_EQ(bitmap_section.End(), static_cast<size_t>(image_file->GetLength()));
Andreas Gampe4303ba92014-11-06 01:00:46 -0800220 if (image_file->FlushCloseOrErase() != 0) {
221 PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
222 return false;
223 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700224 return true;
225}
226
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700227void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700228 DCHECK(object != nullptr);
229 DCHECK_NE(offset, 0U);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800230
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800231 // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700232 object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700233 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700234 DCHECK(IsImageOffsetAssigned(object));
235}
236
Mathieu Chartiere401d142015-04-22 13:56:20 -0700237void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
238 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
239 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
240 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
241}
242
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800243void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700244 DCHECK(object != nullptr);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800245 DCHECK_NE(image_objects_offset_begin_, 0u);
246
Vladimir Marko20f85592015-03-19 10:07:02 +0000247 size_t previous_bin_sizes = bin_slot_previous_sizes_[bin_slot.GetBin()];
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800248 size_t new_offset = image_objects_offset_begin_ + previous_bin_sizes + bin_slot.GetIndex();
249 DCHECK_ALIGNED(new_offset, kObjectAlignment);
250
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700251 SetImageOffset(object, new_offset);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800252 DCHECK_LT(new_offset, image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700253}
254
Ian Rogersef7d42f2014-01-06 12:55:46 -0800255bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800256 // Will also return true if the bin slot was assigned since we are reusing the lock word.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700257 DCHECK(object != nullptr);
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700258 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700259}
260
Ian Rogersef7d42f2014-01-06 12:55:46 -0800261size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700262 DCHECK(object != nullptr);
263 DCHECK(IsImageOffsetAssigned(object));
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700264 LockWord lock_word = object->GetLockWord(false);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700265 size_t offset = lock_word.ForwardingAddress();
266 DCHECK_LT(offset, image_end_);
267 return offset;
Mathieu Chartier31e89252013-08-28 11:29:12 -0700268}
269
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800270void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
271 DCHECK(object != nullptr);
272 DCHECK(!IsImageOffsetAssigned(object));
273 DCHECK(!IsImageBinSlotAssigned(object));
274
275 // Before we stomp over the lock word, save the hash code for later.
276 Monitor::Deflate(Thread::Current(), object);;
277 LockWord lw(object->GetLockWord(false));
278 switch (lw.GetState()) {
279 case LockWord::kFatLocked: {
280 LOG(FATAL) << "Fat locked object " << object << " found during object copy";
281 break;
282 }
283 case LockWord::kThinLocked: {
284 LOG(FATAL) << "Thin locked object " << object << " found during object copy";
285 break;
286 }
287 case LockWord::kUnlocked:
288 // No hash, don't need to save it.
289 break;
290 case LockWord::kHashCode:
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700291 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
292 saved_hashcode_map_.emplace(object, lw.GetHashCode());
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800293 break;
294 default:
295 LOG(FATAL) << "Unreachable.";
296 UNREACHABLE();
297 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700298 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700299 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800300 DCHECK(IsImageBinSlotAssigned(object));
301}
302
Vladimir Marko20f85592015-03-19 10:07:02 +0000303void ImageWriter::PrepareDexCacheArraySlots() {
304 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
305 ReaderMutexLock mu(Thread::Current(), *class_linker->DexLock());
306 size_t dex_cache_count = class_linker->GetDexCacheCount();
307 uint32_t size = 0u;
308 for (size_t idx = 0; idx < dex_cache_count; ++idx) {
309 DexCache* dex_cache = class_linker->GetDexCache(idx);
310 const DexFile* dex_file = dex_cache->GetDexFile();
311 dex_cache_array_starts_.Put(dex_file, size);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700312 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
Vladimir Marko20f85592015-03-19 10:07:02 +0000313 DCHECK(layout.Valid());
Mathieu Chartierc7853442015-03-27 14:35:38 -0700314 auto types_size = layout.TypesSize(dex_file->NumTypeIds());
315 auto methods_size = layout.MethodsSize(dex_file->NumMethodIds());
316 auto fields_size = layout.FieldsSize(dex_file->NumFieldIds());
317 auto strings_size = layout.StringsSize(dex_file->NumStringIds());
318 dex_cache_array_indexes_.Put(
319 dex_cache->GetResolvedTypes(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700320 DexCacheArrayLocation {size + layout.TypesOffset(), types_size, kBinRegular});
Mathieu Chartierc7853442015-03-27 14:35:38 -0700321 dex_cache_array_indexes_.Put(
322 dex_cache->GetResolvedMethods(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700323 DexCacheArrayLocation {size + layout.MethodsOffset(), methods_size, kBinArtMethodClean});
324 AddMethodPointerArray(dex_cache->GetResolvedMethods());
Mathieu Chartierc7853442015-03-27 14:35:38 -0700325 dex_cache_array_indexes_.Put(
326 dex_cache->GetResolvedFields(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700327 DexCacheArrayLocation {size + layout.FieldsOffset(), fields_size, kBinArtField});
328 pointer_arrays_.emplace(dex_cache->GetResolvedFields(), kBinArtField);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700329 dex_cache_array_indexes_.Put(
330 dex_cache->GetStrings(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700331 DexCacheArrayLocation {size + layout.StringsOffset(), strings_size, kBinRegular});
Vladimir Marko20f85592015-03-19 10:07:02 +0000332 size += layout.Size();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700333 CHECK_EQ(layout.Size(), types_size + methods_size + fields_size + strings_size);
Vladimir Marko20f85592015-03-19 10:07:02 +0000334 }
335 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
336 // when AssignImageBinSlot() assigns their indexes out or order.
337 bin_slot_sizes_[kBinDexCacheArray] = size;
338}
339
Mathieu Chartiere401d142015-04-22 13:56:20 -0700340void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
341 DCHECK(arr != nullptr);
342 if (kIsDebugBuild) {
343 for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
344 auto* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
345 if (method != nullptr && !method->IsRuntimeMethod()) {
346 auto* klass = method->GetDeclaringClass();
347 CHECK(klass == nullptr || IsImageClass(klass)) << PrettyClass(klass)
348 << " should be an image class";
349 }
350 }
351 }
352 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
353 // ArtMethods.
354 pointer_arrays_.emplace(arr, kBinArtMethodClean);
355}
356
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800357void ImageWriter::AssignImageBinSlot(mirror::Object* object) {
358 DCHECK(object != nullptr);
Jeff Haoc7d11882015-02-03 15:08:39 -0800359 size_t object_size = object->SizeOf();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800360
361 // The magic happens here. We segregate objects into different bins based
362 // on how likely they are to get dirty at runtime.
363 //
364 // Likely-to-dirty objects get packed together into the same bin so that
365 // at runtime their page dirtiness ratio (how many dirty objects a page has) is
366 // maximized.
367 //
368 // This means more pages will stay either clean or shared dirty (with zygote) and
369 // the app will use less of its own (private) memory.
370 Bin bin = kBinRegular;
Vladimir Marko20f85592015-03-19 10:07:02 +0000371 size_t current_offset = 0u;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800372
373 if (kBinObjects) {
374 //
375 // Changing the bin of an object is purely a memory-use tuning.
376 // It has no change on runtime correctness.
377 //
378 // Memory analysis has determined that the following types of objects get dirtied
379 // the most:
380 //
Vladimir Marko20f85592015-03-19 10:07:02 +0000381 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
382 // a fixed layout which helps improve generated code (using PC-relative addressing),
383 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
384 // Since these arrays are huge, most pages do not overlap other objects and it's not
385 // really important where they are for the clean/dirty separation. Due to their
386 // special PC-relative addressing, we arbitrarily keep them at the beginning.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800387 // * Class'es which are verified [their clinit runs only at runtime]
388 // - classes in general [because their static fields get overwritten]
389 // - initialized classes with all-final statics are unlikely to be ever dirty,
390 // so bin them separately
391 // * Art Methods that are:
392 // - native [their native entry point is not looked up until runtime]
393 // - have declaring classes that aren't initialized
394 // [their interpreter/quick entry points are trampolines until the class
395 // becomes initialized]
396 //
397 // We also assume the following objects get dirtied either never or extremely rarely:
398 // * Strings (they are immutable)
399 // * Art methods that aren't native and have initialized declared classes
400 //
401 // We assume that "regular" bin objects are highly unlikely to become dirtied,
402 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
403 //
404 if (object->IsClass()) {
405 bin = kBinClassVerified;
406 mirror::Class* klass = object->AsClass();
407
Mathieu Chartiere401d142015-04-22 13:56:20 -0700408 // Add non-embedded vtable to the pointer array table if there is one.
409 auto* vtable = klass->GetVTable();
410 if (vtable != nullptr) {
411 AddMethodPointerArray(vtable);
412 }
413 auto* iftable = klass->GetIfTable();
414 if (iftable != nullptr) {
415 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
416 if (iftable->GetMethodArrayCount(i) > 0) {
417 AddMethodPointerArray(iftable->GetMethodArray(i));
418 }
419 }
420 }
421
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800422 if (klass->GetStatus() == Class::kStatusInitialized) {
423 bin = kBinClassInitialized;
424
425 // If the class's static fields are all final, put it into a separate bin
426 // since it's very likely it will stay clean.
427 uint32_t num_static_fields = klass->NumStaticFields();
428 if (num_static_fields == 0) {
429 bin = kBinClassInitializedFinalStatics;
430 } else {
431 // Maybe all the statics are final?
432 bool all_final = true;
433 for (uint32_t i = 0; i < num_static_fields; ++i) {
434 ArtField* field = klass->GetStaticField(i);
435 if (!field->IsFinal()) {
436 all_final = false;
437 break;
438 }
439 }
440
441 if (all_final) {
442 bin = kBinClassInitializedFinalStatics;
443 }
444 }
445 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800446 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
447 bin = kBinString; // Strings are almost always immutable (except for object header).
Mathieu Chartierc7853442015-03-27 14:35:38 -0700448 } else if (object->IsArrayInstance()) {
449 mirror::Class* klass = object->GetClass<kVerifyNone>();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700450 if (klass->IsObjectArrayClass() || klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700451 auto it = dex_cache_array_indexes_.find(object);
452 if (it != dex_cache_array_indexes_.end()) {
453 bin = kBinDexCacheArray;
454 // Use prepared offset defined by the DexCacheLayout.
455 current_offset = it->second.offset_;
456 // Override incase of cross compilation.
457 object_size = it->second.length_;
458 } // else bin = kBinRegular
459 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800460 } // else bin = kBinRegular
461 }
462
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800463 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment
Vladimir Marko20f85592015-03-19 10:07:02 +0000464 if (bin != kBinDexCacheArray) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700465 DCHECK(dex_cache_array_indexes_.find(object) == dex_cache_array_indexes_.end()) << object;
Vladimir Marko20f85592015-03-19 10:07:02 +0000466 current_offset = bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned).
467 // Move the current bin size up to accomodate the object we just assigned a bin slot.
468 bin_slot_sizes_[bin] += offset_delta;
469 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800470
471 BinSlot new_bin_slot(bin, current_offset);
472 SetImageBinSlot(object, new_bin_slot);
473
474 ++bin_slot_count_[bin];
475
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800476 // Grow the image closer to the end by the object we just assigned.
477 image_end_ += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800478}
479
Mathieu Chartiere401d142015-04-22 13:56:20 -0700480bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
481 if (m->IsNative()) {
482 return true;
483 }
484 mirror::Class* declaring_class = m->GetDeclaringClass();
485 // Initialized is highly unlikely to dirty since there's no entry points to mutate.
486 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized;
487}
488
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800489bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
490 DCHECK(object != nullptr);
491
492 // We always stash the bin slot into a lockword, in the 'forwarding address' state.
493 // If it's in some other state, then we haven't yet assigned an image bin slot.
494 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
495 return false;
496 } else if (kIsDebugBuild) {
497 LockWord lock_word = object->GetLockWord(false);
498 size_t offset = lock_word.ForwardingAddress();
499 BinSlot bin_slot(offset);
500 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()])
501 << "bin slot offset should not exceed the size of that bin";
502 }
503 return true;
504}
505
506ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
507 DCHECK(object != nullptr);
508 DCHECK(IsImageBinSlotAssigned(object));
509
510 LockWord lock_word = object->GetLockWord(false);
511 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t
512 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
513
514 BinSlot bin_slot(static_cast<uint32_t>(offset));
515 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()]);
516
517 return bin_slot;
518}
519
Brian Carlstrom7940e442013-07-12 13:46:57 -0700520bool ImageWriter::AllocMemory() {
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700521 const size_t length = RoundUp(image_objects_offset_begin_ + GetBinSizeSum() + intern_table_bytes_,
522 kPageSize);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700523 std::string error_msg;
Vladimir Marko5c42c292015-02-25 12:02:49 +0000524 image_.reset(MemMap::MapAnonymous("image writer image", nullptr, length, PROT_READ | PROT_WRITE,
525 false, false, &error_msg));
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700526 if (UNLIKELY(image_.get() == nullptr)) {
527 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700528 return false;
529 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700530
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700531 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
532 CHECK_LE(image_end_, length);
533 image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
534 "image bitmap", image_->Begin(), RoundUp(image_end_, kPageSize)));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700535 if (image_bitmap_.get() == nullptr) {
536 LOG(ERROR) << "Failed to allocate memory for image bitmap";
537 return false;
538 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700539 return true;
540}
541
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700542class ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
543 public:
544 bool Visit(Class* c) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
545 StackHandleScope<1> hs(Thread::Current());
546 mirror::Class::ComputeName(hs.NewHandle(c));
547 return true;
548 }
549};
550
Brian Carlstrom7940e442013-07-12 13:46:57 -0700551void ImageWriter::ComputeLazyFieldsForImageClasses() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700552 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700553 ComputeLazyFieldsForClassesVisitor visitor;
554 class_linker->VisitClassesWithoutClassesLock(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700555}
556
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700557void ImageWriter::ComputeEagerResolvedStringsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700558 if (!obj->GetClass()->IsStringClass()) {
559 return;
560 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700561 mirror::String* string = obj->AsString();
Jeff Hao848f70a2014-01-15 13:49:50 -0800562 const uint16_t* utf16_string = string->GetValue();
Vladimir Markoa48aef42014-12-03 17:53:53 +0000563 size_t utf16_length = static_cast<size_t>(string->GetLength());
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700564 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
565 ReaderMutexLock mu(Thread::Current(), *class_linker->DexLock());
566 size_t dex_cache_count = class_linker->GetDexCacheCount();
567 for (size_t i = 0; i < dex_cache_count; ++i) {
568 DexCache* dex_cache = class_linker->GetDexCache(i);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700569 const DexFile& dex_file = *dex_cache->GetDexFile();
Ian Rogers24c534d2013-11-14 00:15:00 -0800570 const DexFile::StringId* string_id;
Vladimir Markoa48aef42014-12-03 17:53:53 +0000571 if (UNLIKELY(utf16_length == 0)) {
Ian Rogers24c534d2013-11-14 00:15:00 -0800572 string_id = dex_file.FindStringId("");
573 } else {
Vladimir Markoa48aef42014-12-03 17:53:53 +0000574 string_id = dex_file.FindStringId(utf16_string, utf16_length);
Ian Rogers24c534d2013-11-14 00:15:00 -0800575 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700576 if (string_id != nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700577 // This string occurs in this dex file, assign the dex cache entry.
578 uint32_t string_idx = dex_file.GetIndexForStringId(*string_id);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700579 if (dex_cache->GetResolvedString(string_idx) == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700580 dex_cache->SetResolvedString(string_idx, string);
581 }
582 }
583 }
584}
585
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -0800586void ImageWriter::ComputeEagerResolvedStrings() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700587 Runtime::Current()->GetHeap()->VisitObjects(ComputeEagerResolvedStringsCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700588}
589
Ian Rogersef7d42f2014-01-06 12:55:46 -0800590bool ImageWriter::IsImageClass(Class* klass) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700591 if (klass == nullptr) {
592 return false;
593 }
Ian Rogers1ff3c982014-08-12 02:30:58 -0700594 std::string temp;
595 return compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700596}
597
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700598class NonImageClassesVisitor : public ClassVisitor {
599 public:
600 explicit NonImageClassesVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
601
602 bool Visit(Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
603 if (!image_writer_->IsImageClass(klass)) {
604 std::string temp;
605 non_image_classes_.insert(klass->GetDescriptor(&temp));
606 }
607 return true;
608 }
609
610 std::set<std::string> non_image_classes_;
611 ImageWriter* const image_writer_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700612};
613
614void ImageWriter::PruneNonImageClasses() {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700615 if (compiler_driver_.GetImageClasses() == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700616 return;
617 }
618 Runtime* runtime = Runtime::Current();
619 ClassLinker* class_linker = runtime->GetClassLinker();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700620 Thread* self = Thread::Current();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700621
622 // Make a list of classes we would like to prune.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700623 NonImageClassesVisitor visitor(this);
624 class_linker->VisitClasses(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700625
626 // Remove the undesired classes from the class roots.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700627 for (const std::string& it : visitor.non_image_classes_) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700628 bool result = class_linker->RemoveClass(it.c_str(), nullptr);
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800629 DCHECK(result);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700630 }
631
632 // Clear references to removed classes from the DexCaches.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700633 const ArtMethod* resolution_method = runtime->GetResolutionMethod();
634 size_t dex_cache_count;
635 {
636 ReaderMutexLock mu(self, *class_linker->DexLock());
637 dex_cache_count = class_linker->GetDexCacheCount();
638 }
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700639 for (size_t idx = 0; idx < dex_cache_count; ++idx) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700640 DexCache* dex_cache;
641 {
642 ReaderMutexLock mu(self, *class_linker->DexLock());
643 dex_cache = class_linker->GetDexCache(idx);
644 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700645 for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
646 Class* klass = dex_cache->GetResolvedType(i);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700647 if (klass != nullptr && !IsImageClass(klass)) {
648 dex_cache->SetResolvedType(i, nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700649 }
650 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700651 auto* resolved_methods = down_cast<mirror::PointerArray*>(dex_cache->GetResolvedMethods());
652 for (size_t i = 0, len = resolved_methods->GetLength(); i < len; i++) {
653 auto* method = resolved_methods->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
654 if (method != nullptr) {
655 auto* declaring_class = method->GetDeclaringClass();
656 // Miranda methods may be held live by a class which was not an image class but have a
657 // declaring class which is an image class. Set it to the resolution method to be safe and
658 // prevent dangling pointers.
659 if (method->IsMiranda() || !IsImageClass(declaring_class)) {
660 resolved_methods->SetElementPtrSize(i, resolution_method, target_ptr_size_);
661 } else {
662 // Check that the class is still in the classes table.
663 DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class "
664 << PrettyClass(declaring_class) << " not in class linker table";
665 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700666 }
667 }
668 for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700669 ArtField* field = dex_cache->GetResolvedField(i, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700670 if (field != nullptr && !IsImageClass(field->GetDeclaringClass())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700671 dex_cache->SetResolvedField(i, nullptr, target_ptr_size_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700672 }
673 }
Andreas Gampedd9d0552015-03-09 12:57:41 -0700674 // Clean the dex field. It might have been populated during the initialization phase, but
675 // contains data only valid during a real run.
676 dex_cache->SetFieldObject<false>(mirror::DexCache::DexOffset(), nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700677 }
Andreas Gampe8ac75952015-06-02 21:01:45 -0700678
679 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
680 class_linker->DropFindArrayClassCache();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700681}
682
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -0800683void ImageWriter::CheckNonImageClassesRemoved() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700684 if (compiler_driver_.GetImageClasses() != nullptr) {
685 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700686 heap->VisitObjects(CheckNonImageClassesRemovedCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700687 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700688}
689
690void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
691 ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700692 if (obj->IsClass()) {
693 Class* klass = obj->AsClass();
694 if (!image_writer->IsImageClass(klass)) {
695 image_writer->DumpImageClasses();
Ian Rogers1ff3c982014-08-12 02:30:58 -0700696 std::string temp;
697 CHECK(image_writer->IsImageClass(klass)) << klass->GetDescriptor(&temp)
Mathieu Chartier590fee92013-09-13 13:46:47 -0700698 << " " << PrettyDescriptor(klass);
699 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700700 }
701}
702
703void ImageWriter::DumpImageClasses() {
Andreas Gampeb1fcead2015-04-20 18:53:51 -0700704 auto image_classes = compiler_driver_.GetImageClasses();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700705 CHECK(image_classes != nullptr);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700706 for (const std::string& image_class : *image_classes) {
707 LOG(INFO) << " " << image_class;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700708 }
709}
710
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800711void ImageWriter::CalculateObjectBinSlots(Object* obj) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700712 DCHECK(obj != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700713 // if it is a string, we want to intern it if its not interned.
714 if (obj->GetClass()->IsStringClass()) {
715 // we must be an interned string that was forward referenced and already assigned
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800716 if (IsImageBinSlotAssigned(obj)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700717 DCHECK_EQ(obj, obj->AsString()->Intern());
718 return;
719 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700720 // InternImageString allows us to intern while holding the heap bitmap lock. This is safe since
721 // we are guaranteed to not have GC during image writing.
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700722 mirror::String* const interned = Runtime::Current()->GetInternTable()->InternStrongImageString(
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700723 obj->AsString());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700724 if (obj != interned) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800725 if (!IsImageBinSlotAssigned(interned)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700726 // interned obj is after us, allocate its location early
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800727 AssignImageBinSlot(interned);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700728 }
729 // point those looking for this object to the interned version.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800730 SetImageBinSlot(obj, GetImageBinSlot(interned));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700731 return;
732 }
733 // else (obj == interned), nothing to do but fall through to the normal case
734 }
735
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800736 AssignImageBinSlot(obj);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700737}
738
739ObjectArray<Object>* ImageWriter::CreateImageRoots() const {
740 Runtime* runtime = Runtime::Current();
741 ClassLinker* class_linker = runtime->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700742 Thread* self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700743 StackHandleScope<3> hs(self);
744 Handle<Class> object_array_class(hs.NewHandle(
745 class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700746
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700747 // build an Object[] of all the DexCaches used in the source_space_.
748 // Since we can't hold the dex lock when allocating the dex_caches
749 // ObjectArray, we lock the dex lock twice, first to get the number
750 // of dex caches first and then lock it again to copy the dex
751 // caches. We check that the number of dex caches does not change.
752 size_t dex_cache_count;
753 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700754 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700755 dex_cache_count = class_linker->GetDexCacheCount();
756 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700757 Handle<ObjectArray<Object>> dex_caches(
758 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(),
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700759 dex_cache_count)));
760 CHECK(dex_caches.Get() != nullptr) << "Failed to allocate a dex cache array.";
761 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700762 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700763 CHECK_EQ(dex_cache_count, class_linker->GetDexCacheCount())
764 << "The number of dex caches changed.";
765 for (size_t i = 0; i < dex_cache_count; ++i) {
766 dex_caches->Set<false>(i, class_linker->GetDexCache(i));
767 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700768 }
769
770 // build an Object[] of the roots needed to restore the runtime
Mathieu Chartiere401d142015-04-22 13:56:20 -0700771 auto image_roots(hs.NewHandle(
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700772 ObjectArray<Object>::Alloc(self, object_array_class.Get(), ImageHeader::kImageRootsMax)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700773 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100774 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700775 for (int i = 0; i < ImageHeader::kImageRootsMax; i++) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700776 CHECK(image_roots->Get(i) != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700777 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700778 return image_roots.Get();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700779}
780
Mathieu Chartier590fee92013-09-13 13:46:47 -0700781// Walk instance fields of the given Class. Separate function to allow recursion on the super
782// class.
783void ImageWriter::WalkInstanceFields(mirror::Object* obj, mirror::Class* klass) {
784 // Visit fields of parent classes first.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700785 StackHandleScope<1> hs(Thread::Current());
786 Handle<mirror::Class> h_class(hs.NewHandle(klass));
787 mirror::Class* super = h_class->GetSuperClass();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700788 if (super != nullptr) {
789 WalkInstanceFields(obj, super);
790 }
791 //
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700792 size_t num_reference_fields = h_class->NumReferenceInstanceFields();
Vladimir Marko76649e82014-11-10 18:32:59 +0000793 MemberOffset field_offset = h_class->GetFirstReferenceInstanceFieldOffset();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700794 for (size_t i = 0; i < num_reference_fields; ++i) {
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700795 mirror::Object* value = obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700796 if (value != nullptr) {
797 WalkFieldsInOrder(value);
798 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000799 field_offset = MemberOffset(field_offset.Uint32Value() +
800 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700801 }
802}
803
804// For an unvisited object, visit it then all its children found via fields.
805void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800806 // Use our own visitor routine (instead of GC visitor) to get better locality between
807 // an object and its fields
808 if (!IsImageBinSlotAssigned(obj)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700809 // Walk instance fields of all objects
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700810 StackHandleScope<2> hs(Thread::Current());
811 Handle<mirror::Object> h_obj(hs.NewHandle(obj));
812 Handle<mirror::Class> klass(hs.NewHandle(obj->GetClass()));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700813 // visit the object itself.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800814 CalculateObjectBinSlots(h_obj.Get());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700815 WalkInstanceFields(h_obj.Get(), klass.Get());
Mathieu Chartier590fee92013-09-13 13:46:47 -0700816 // Walk static fields of a Class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700817 if (h_obj->IsClass()) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700818 size_t num_reference_static_fields = klass->NumReferenceStaticFields();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700819 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700820 for (size_t i = 0; i < num_reference_static_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700821 mirror::Object* value = h_obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700822 if (value != nullptr) {
823 WalkFieldsInOrder(value);
824 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000825 field_offset = MemberOffset(field_offset.Uint32Value() +
826 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700827 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700828 // Visit and assign offsets for fields and field arrays.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700829 auto* as_klass = h_obj->AsClass();
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700830 LengthPrefixedArray<ArtField>* fields[] = {
831 as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
832 };
833 for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
834 // Total array length including header.
835 if (cur_fields != nullptr) {
836 const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
837 // Forward the entire array at once.
838 auto it = native_object_relocations_.find(cur_fields);
839 CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
840 << " already forwarded";
841 size_t& offset = bin_slot_sizes_[kBinArtField];
842 native_object_relocations_.emplace(
843 cur_fields, NativeObjectRelocation {
844 offset, kNativeObjectRelocationTypeArtFieldArray });
845 offset += header_size;
846 // Forward individual fields so that we can quickly find where they belong.
847 for (size_t i = 0, count = cur_fields->Length(); i < count; ++i) {
848 // Need to forward arrays separate of fields.
849 ArtField* field = &cur_fields->At(i);
850 auto it2 = native_object_relocations_.find(field);
851 CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
852 << " already assigned " << PrettyField(field) << " static=" << field->IsStatic();
853 native_object_relocations_.emplace(
854 field, NativeObjectRelocation {offset, kNativeObjectRelocationTypeArtField });
855 offset += sizeof(ArtField);
856 }
Mathieu Chartierc7853442015-03-27 14:35:38 -0700857 }
858 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700859 // Visit and assign offsets for methods.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700860 LengthPrefixedArray<ArtMethod>* method_arrays[] = {
861 as_klass->GetDirectMethodsPtr(), as_klass->GetVirtualMethodsPtr(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700862 };
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700863 for (LengthPrefixedArray<ArtMethod>* array : method_arrays) {
864 if (array == nullptr) {
865 continue;
866 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700867 bool any_dirty = false;
868 size_t count = 0;
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700869 const size_t method_size = ArtMethod::ObjectSize(target_ptr_size_);
870 auto iteration_range = MakeIterationRangeFromLengthPrefixedArray(array, method_size);
871 for (auto& m : iteration_range) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700872 any_dirty = any_dirty || WillMethodBeDirty(&m);
873 ++count;
874 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700875 NativeObjectRelocationType type = any_dirty ? kNativeObjectRelocationTypeArtMethodDirty :
876 kNativeObjectRelocationTypeArtMethodClean;
877 Bin bin_type = BinTypeForNativeRelocationType(type);
878 // Forward the entire array at once, but header first.
879 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0, method_size);
880 auto it = native_object_relocations_.find(array);
881 CHECK(it == native_object_relocations_.end()) << "Method array " << array
882 << " already forwarded";
883 size_t& offset = bin_slot_sizes_[bin_type];
884 native_object_relocations_.emplace(array, NativeObjectRelocation { offset,
885 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty :
886 kNativeObjectRelocationTypeArtMethodArrayClean });
887 offset += header_size;
888 for (auto& m : iteration_range) {
889 AssignMethodOffset(&m, type);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700890 }
891 (any_dirty ? dirty_methods_ : clean_methods_) += count;
892 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700893 } else if (h_obj->IsObjectArray()) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700894 // Walk elements of an object array.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700895 int32_t length = h_obj->AsObjectArray<mirror::Object>()->GetLength();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700896 for (int32_t i = 0; i < length; i++) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700897 mirror::ObjectArray<mirror::Object>* obj_array = h_obj->AsObjectArray<mirror::Object>();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700898 mirror::Object* value = obj_array->Get(i);
899 if (value != nullptr) {
900 WalkFieldsInOrder(value);
901 }
902 }
903 }
904 }
905}
906
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700907void ImageWriter::AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type) {
908 auto it = native_object_relocations_.find(method);
909 CHECK(it == native_object_relocations_.end()) << "Method " << method << " already assigned "
Mathieu Chartiere401d142015-04-22 13:56:20 -0700910 << PrettyMethod(method);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700911 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
912 native_object_relocations_.emplace(method, NativeObjectRelocation { offset, type });
913 offset += ArtMethod::ObjectSize(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700914}
915
Mathieu Chartier590fee92013-09-13 13:46:47 -0700916void ImageWriter::WalkFieldsCallback(mirror::Object* obj, void* arg) {
917 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
918 DCHECK(writer != nullptr);
919 writer->WalkFieldsInOrder(obj);
920}
921
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800922void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) {
923 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
924 DCHECK(writer != nullptr);
925 writer->UnbinObjectsIntoOffset(obj);
926}
927
928void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
929 CHECK(obj != nullptr);
930
931 // We know the bin slot, and the total bin sizes for all objects by now,
932 // so calculate the object's final image offset.
933
934 DCHECK(IsImageBinSlotAssigned(obj));
935 BinSlot bin_slot = GetImageBinSlot(obj);
936 // Change the lockword from a bin slot into an offset
937 AssignImageOffset(obj, bin_slot);
938}
939
Vladimir Markof4da6752014-08-01 19:04:18 +0100940void ImageWriter::CalculateNewObjectOffsets() {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700941 Thread* const self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700942 StackHandleScope<1> hs(self);
943 Handle<ObjectArray<Object>> image_roots(hs.NewHandle(CreateImageRoots()));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700944
Mathieu Chartiere401d142015-04-22 13:56:20 -0700945 auto* runtime = Runtime::Current();
946 auto* heap = runtime->GetHeap();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700947 DCHECK_EQ(0U, image_end_);
948
Mathieu Chartier31e89252013-08-28 11:29:12 -0700949 // Leave space for the header, but do not write it yet, we need to
Brian Carlstrom7940e442013-07-12 13:46:57 -0700950 // know where image_roots is going to end up
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800951 image_end_ += RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment
Brian Carlstrom7940e442013-07-12 13:46:57 -0700952
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800953 image_objects_offset_begin_ = image_end_;
Vladimir Marko20f85592015-03-19 10:07:02 +0000954 // Prepare bin slots for dex cache arrays.
955 PrepareDexCacheArraySlots();
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800956 // Clear any pre-existing monitors which may have been in the monitor words, assign bin slots.
957 heap->VisitObjects(WalkFieldsCallback, this);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700958 // Write the image runtime methods.
959 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
960 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
961 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
962 image_methods_[ImageHeader::kCalleeSaveMethod] = runtime->GetCalleeSaveMethod(Runtime::kSaveAll);
963 image_methods_[ImageHeader::kRefsOnlySaveMethod] =
964 runtime->GetCalleeSaveMethod(Runtime::kRefsOnly);
965 image_methods_[ImageHeader::kRefsAndArgsSaveMethod] =
966 runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700967
968 // Add room for fake length prefixed array.
969 const auto image_method_type = kNativeObjectRelocationTypeArtMethodArrayClean;
970 auto it = native_object_relocations_.find(&image_method_array_);
971 CHECK(it == native_object_relocations_.end());
972 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(image_method_type)];
973 native_object_relocations_.emplace(&image_method_array_,
974 NativeObjectRelocation { offset, image_method_type });
975 CHECK_EQ(sizeof(image_method_array_), 8u);
976 offset += sizeof(image_method_array_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700977 for (auto* m : image_methods_) {
978 CHECK(m != nullptr);
979 CHECK(m->IsRuntimeMethod());
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700980 AssignMethodOffset(m, kNativeObjectRelocationTypeArtMethodClean);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700981 }
982
Vladimir Marko20f85592015-03-19 10:07:02 +0000983 // Calculate cumulative bin slot sizes.
984 size_t previous_sizes = 0u;
985 for (size_t i = 0; i != kBinSize; ++i) {
986 bin_slot_previous_sizes_[i] = previous_sizes;
987 previous_sizes += bin_slot_sizes_[i];
988 }
989 DCHECK_EQ(previous_sizes, GetBinSizeSum());
Mathieu Chartierc7853442015-03-27 14:35:38 -0700990 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
991
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800992 // Transform each object's bin slot into an offset which will be used to do the final copy.
993 heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700994
Mathieu Chartierc7853442015-03-27 14:35:38 -0700995 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800996
Vladimir Markof4da6752014-08-01 19:04:18 +0100997 image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Get()));
998
Mathieu Chartiere401d142015-04-22 13:56:20 -0700999 // Update the native relocations by adding their bin sums.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001000 for (auto& pair : native_object_relocations_) {
1001 NativeObjectRelocation& relocation = pair.second;
1002 Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
1003 relocation.offset += image_objects_offset_begin_ + bin_slot_previous_sizes_[bin_type];
Mathieu Chartiere401d142015-04-22 13:56:20 -07001004 }
1005
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001006 // Calculate how big the intern table will be after being serialized.
1007 auto* const intern_table = Runtime::Current()->GetInternTable();
1008 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
1009 intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
1010
Mathieu Chartiere401d142015-04-22 13:56:20 -07001011 // Note that image_end_ is left at end of used mirror object section.
Vladimir Markof4da6752014-08-01 19:04:18 +01001012}
1013
1014void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) {
1015 CHECK_NE(0U, oat_loaded_size);
Ian Rogers13735952014-10-08 12:43:28 -07001016 const uint8_t* oat_file_begin = GetOatFileBegin();
1017 const uint8_t* oat_file_end = oat_file_begin + oat_loaded_size;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001018 oat_data_begin_ = oat_file_begin + oat_data_offset;
Ian Rogers13735952014-10-08 12:43:28 -07001019 const uint8_t* oat_data_end = oat_data_begin_ + oat_file_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001020
1021 // Create the image sections.
1022 ImageSection sections[ImageHeader::kSectionCount];
1023 // Objects section
1024 auto* objects_section = &sections[ImageHeader::kSectionObjects];
1025 *objects_section = ImageSection(0u, image_end_);
1026 size_t cur_pos = objects_section->End();
1027 // Add field section.
1028 auto* field_section = &sections[ImageHeader::kSectionArtFields];
1029 *field_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtField]);
1030 CHECK_EQ(image_objects_offset_begin_ + bin_slot_previous_sizes_[kBinArtField],
1031 field_section->Offset());
1032 cur_pos = field_section->End();
1033 // Add method section.
1034 auto* methods_section = &sections[ImageHeader::kSectionArtMethods];
1035 *methods_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtMethodClean] +
1036 bin_slot_sizes_[kBinArtMethodDirty]);
1037 CHECK_EQ(image_objects_offset_begin_ + bin_slot_previous_sizes_[kBinArtMethodClean],
1038 methods_section->Offset());
1039 cur_pos = methods_section->End();
Nicolas Geoffray7bf2b4f2015-07-08 10:11:59 +00001040 // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
1041 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001042 // Calculate the size of the interned strings.
1043 auto* interned_strings_section = &sections[ImageHeader::kSectionInternedStrings];
1044 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1045 cur_pos = interned_strings_section->End();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001046 // Finally bitmap section.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001047 const size_t bitmap_bytes = image_bitmap_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001048 auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
1049 *bitmap_section = ImageSection(RoundUp(cur_pos, kPageSize), RoundUp(bitmap_bytes, kPageSize));
1050 cur_pos = bitmap_section->End();
1051 if (kIsDebugBuild) {
1052 size_t idx = 0;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001053 for (const ImageSection& section : sections) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001054 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1055 ++idx;
1056 }
1057 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
1058 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001059 const size_t image_end = static_cast<uint32_t>(interned_strings_section->End());
1060 CHECK_EQ(AlignUp(image_begin_ + image_end, kPageSize), oat_file_begin) <<
1061 "Oat file should be right after the image.";
Mathieu Chartiere401d142015-04-22 13:56:20 -07001062 // Create the header.
1063 new (image_->Begin()) ImageHeader(
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001064 PointerToLowMemUInt32(image_begin_), image_end,
1065 sections, image_roots_address_, oat_file_->GetOatHeader().GetChecksum(),
Mathieu Chartiere401d142015-04-22 13:56:20 -07001066 PointerToLowMemUInt32(oat_file_begin), PointerToLowMemUInt32(oat_data_begin_),
1067 PointerToLowMemUInt32(oat_data_end), PointerToLowMemUInt32(oat_file_end), target_ptr_size_,
1068 compile_pic_);
1069}
1070
1071ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001072 auto it = native_object_relocations_.find(method);
1073 CHECK(it != native_object_relocations_.end()) << PrettyMethod(method) << " @ " << method;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001074 CHECK_GE(it->second.offset, image_end_) << "ArtMethods should be after Objects";
1075 return reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001076}
1077
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001078class FixupRootVisitor : public RootVisitor {
1079 public:
1080 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
1081 }
1082
1083 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001084 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001085 for (size_t i = 0; i < count; ++i) {
1086 *roots[i] = ImageAddress(*roots[i]);
1087 }
1088 }
1089
1090 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
1091 const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001092 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001093 for (size_t i = 0; i < count; ++i) {
1094 roots[i]->Assign(ImageAddress(roots[i]->AsMirrorPtr()));
1095 }
1096 }
1097
1098 private:
1099 ImageWriter* const image_writer_;
1100
Mathieu Chartier90443472015-07-16 20:32:27 -07001101 mirror::Object* ImageAddress(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001102 const size_t offset = image_writer_->GetImageOffset(obj);
1103 auto* const dest = reinterpret_cast<Object*>(image_writer_->image_begin_ + offset);
1104 VLOG(compiler) << "Update root from " << obj << " to " << dest;
1105 return dest;
1106 }
1107};
1108
Mathieu Chartierc7853442015-03-27 14:35:38 -07001109void ImageWriter::CopyAndFixupNativeData() {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001110 // Copy ArtFields and methods to their locations and update the array for convenience.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001111 for (auto& pair : native_object_relocations_) {
1112 NativeObjectRelocation& relocation = pair.second;
1113 auto* dest = image_->Begin() + relocation.offset;
1114 DCHECK_GE(dest, image_->Begin() + image_end_);
1115 switch (relocation.type) {
1116 case kNativeObjectRelocationTypeArtField: {
1117 memcpy(dest, pair.first, sizeof(ArtField));
1118 reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
1119 GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass()));
1120 break;
1121 }
1122 case kNativeObjectRelocationTypeArtMethodClean:
1123 case kNativeObjectRelocationTypeArtMethodDirty: {
1124 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
1125 reinterpret_cast<ArtMethod*>(dest));
1126 break;
1127 }
1128 // For arrays, copy just the header since the elements will get copied by their corresponding
1129 // relocations.
1130 case kNativeObjectRelocationTypeArtFieldArray: {
1131 memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
1132 break;
1133 }
1134 case kNativeObjectRelocationTypeArtMethodArrayClean:
1135 case kNativeObjectRelocationTypeArtMethodArrayDirty: {
1136 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0));
1137 break;
1138 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001139 }
1140 }
1141 // Fixup the image method roots.
1142 auto* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001143 const ImageSection& methods_section = image_header->GetMethodsSection();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001144 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
1145 auto* m = image_methods_[i];
1146 CHECK(m != nullptr);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001147 auto it = native_object_relocations_.find(m);
1148 CHECK(it != native_object_relocations_.end()) << "No fowarding for " << PrettyMethod(m);
1149 NativeObjectRelocation& relocation = it->second;
1150 CHECK(methods_section.Contains(relocation.offset)) << relocation.offset << " not in "
Mathieu Chartiere401d142015-04-22 13:56:20 -07001151 << methods_section;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001152 CHECK(relocation.IsArtMethodRelocation()) << relocation.type;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001153 auto* dest = reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
1154 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), dest);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001155 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001156 // Write the intern table into the image.
1157 const ImageSection& intern_table_section = image_header->GetImageSection(
1158 ImageHeader::kSectionInternedStrings);
1159 InternTable* const intern_table = Runtime::Current()->GetInternTable();
1160 uint8_t* const memory_ptr = image_->Begin() + intern_table_section.Offset();
1161 const size_t intern_table_bytes = intern_table->WriteToMemory(memory_ptr);
1162 // Fixup the pointers in the newly written intern table to contain image addresses.
1163 InternTable temp_table;
1164 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
1165 // the VisitRoots() will update the memory directly rather than the copies.
1166 // This also relies on visit roots not doing any verification which could fail after we update
1167 // the roots to be the image addresses.
1168 temp_table.ReadFromMemory(memory_ptr);
1169 CHECK_EQ(temp_table.Size(), intern_table->Size());
1170 FixupRootVisitor visitor(this);
1171 temp_table.VisitRoots(&visitor, kVisitRootFlagAllRoots);
1172 CHECK_EQ(intern_table_bytes, intern_table_bytes_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001173}
1174
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -08001175void ImageWriter::CopyAndFixupObjects() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001176 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -07001177 heap->VisitObjects(CopyAndFixupObjectsCallback, this);
1178 // Fix up the object previously had hash codes.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001179 for (const auto& hash_pair : saved_hashcode_map_) {
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001180 Object* obj = hash_pair.first;
Andreas Gampe3b45ef22015-05-26 21:34:09 -07001181 DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
1182 obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001183 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001184 saved_hashcode_map_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001185}
1186
Mathieu Chartier590fee92013-09-13 13:46:47 -07001187void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) {
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -07001188 DCHECK(obj != nullptr);
1189 DCHECK(arg != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001190 reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj);
1191}
1192
Mathieu Chartiere401d142015-04-22 13:56:20 -07001193void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr,
1194 mirror::Class* klass, Bin array_type) {
1195 CHECK(klass->IsArrayClass());
1196 CHECK(arr->IsIntArray() || arr->IsLongArray()) << PrettyClass(klass) << " " << arr;
1197 // Fixup int and long pointers for the ArtMethod or ArtField arrays.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001198 const size_t num_elements = arr->GetLength();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001199 dst->SetClass(GetImageAddress(arr->GetClass()));
1200 auto* dest_array = down_cast<mirror::PointerArray*>(dst);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001201 for (size_t i = 0, count = num_elements; i < count; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001202 auto* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
1203 if (elem != nullptr) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001204 auto it = native_object_relocations_.find(elem);
1205 if (it == native_object_relocations_.end()) {
1206 if (true) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001207 auto* method = reinterpret_cast<ArtMethod*>(elem);
1208 LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ "
1209 << method << " idx=" << i << "/" << num_elements << " with declaring class "
1210 << PrettyClass(method->GetDeclaringClass());
1211 } else {
1212 CHECK_EQ(array_type, kBinArtField);
1213 auto* field = reinterpret_cast<ArtField*>(elem);
1214 LOG(FATAL) << "No relocation entry for ArtField " << PrettyField(field) << " @ "
1215 << field << " idx=" << i << "/" << num_elements << " with declaring class "
1216 << PrettyClass(field->GetDeclaringClass());
1217 }
1218 } else {
1219 elem = image_begin_ + it->second.offset;
1220 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001221 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001222 dest_array->SetElementPtrSize<false, true>(i, elem, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001223 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001224}
1225
1226void ImageWriter::CopyAndFixupObject(Object* obj) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001227 size_t offset = GetImageOffset(obj);
1228 auto* dst = reinterpret_cast<Object*>(image_->Begin() + offset);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001229 DCHECK_LT(offset, image_end_);
1230 const auto* src = reinterpret_cast<const uint8_t*>(obj);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001231
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001232 image_bitmap_->Set(dst); // Mark the obj as live.
1233
1234 const size_t n = obj->SizeOf();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001235 DCHECK_LE(offset + n, image_->Size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001236 memcpy(dst, src, n);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001237
Mathieu Chartierad2541a2013-10-25 10:05:23 -07001238 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
1239 // word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001240 const auto it = saved_hashcode_map_.find(obj);
1241 dst->SetLockWord(it != saved_hashcode_map_.end() ?
1242 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001243 FixupObject(obj, dst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001244}
1245
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001246// Rewrite all the references in the copied object to point to their image address equivalent
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001247class FixupVisitor {
1248 public:
1249 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
1250 }
1251
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001252 // Ignore class roots since we don't have a way to map them to the destination. These are handled
1253 // with other logic.
1254 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1255 const {}
1256 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
1257
1258
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001259 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001260 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Hiroshi Yamauchi6e83c172014-05-01 21:25:41 -07001261 Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001262 // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the
1263 // image.
1264 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Ian Rogersb0fa5dc2014-04-28 16:47:08 -07001265 offset, image_writer_->GetImageAddress(ref));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001266 }
1267
1268 // java.lang.ref.Reference visitor.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001269 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001270 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001271 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Ian Rogersb0fa5dc2014-04-28 16:47:08 -07001272 mirror::Reference::ReferentOffset(), image_writer_->GetImageAddress(ref->GetReferent()));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001273 }
1274
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001275 protected:
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001276 ImageWriter* const image_writer_;
1277 mirror::Object* const copy_;
1278};
1279
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001280class FixupClassVisitor FINAL : public FixupVisitor {
1281 public:
1282 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
1283 }
1284
Mathieu Chartierc7853442015-03-27 14:35:38 -07001285 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001286 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001287 DCHECK(obj->IsClass());
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001288 FixupVisitor::operator()(obj, offset, /*is_static*/false);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001289 }
1290
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001291 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED,
1292 mirror::Reference* ref ATTRIBUTE_UNUSED) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001293 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001294 LOG(FATAL) << "Reference not expected here.";
1295 }
1296};
1297
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001298void* ImageWriter::NativeLocationInImage(void* obj) {
1299 if (obj == nullptr) {
1300 return nullptr;
1301 }
1302 auto it = native_object_relocations_.find(obj);
1303 const NativeObjectRelocation& relocation = it->second;
1304 CHECK(it != native_object_relocations_.end()) << obj;
1305 return reinterpret_cast<void*>(image_begin_ + relocation.offset);
1306}
1307
Mathieu Chartierc7853442015-03-27 14:35:38 -07001308void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001309 // Update the field arrays.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001310 copy->SetSFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>(
1311 NativeLocationInImage(orig->GetSFieldsPtr())));
1312 copy->SetIFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>(
1313 NativeLocationInImage(orig->GetIFieldsPtr())));
1314 // Update direct and virtual method arrays.
1315 copy->SetDirectMethodsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
1316 NativeLocationInImage(orig->GetDirectMethodsPtr())));
1317 copy->SetVirtualMethodsPtr(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
1318 NativeLocationInImage(orig->GetVirtualMethodsPtr())));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001319 // Fix up embedded tables.
1320 if (orig->ShouldHaveEmbeddedImtAndVTable()) {
1321 for (int32_t i = 0; i < orig->GetEmbeddedVTableLength(); ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001322 auto it = native_object_relocations_.find(orig->GetEmbeddedVTableEntry(i, target_ptr_size_));
1323 CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001324 copy->SetEmbeddedVTableEntryUnchecked(
1325 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
1326 }
1327 for (size_t i = 0; i < mirror::Class::kImtSize; ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001328 auto it = native_object_relocations_.find(orig->GetEmbeddedImTableEntry(i, target_ptr_size_));
1329 CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001330 copy->SetEmbeddedImTableEntry(
1331 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001332 }
1333 }
1334 FixupClassVisitor visitor(this, copy);
1335 static_cast<mirror::Object*>(orig)->VisitReferences<true /*visit class*/>(visitor, visitor);
1336}
1337
Ian Rogersef7d42f2014-01-06 12:55:46 -08001338void ImageWriter::FixupObject(Object* orig, Object* copy) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001339 DCHECK(orig != nullptr);
1340 DCHECK(copy != nullptr);
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -07001341 if (kUseBakerOrBrooksReadBarrier) {
1342 orig->AssertReadBarrierPointer();
1343 if (kUseBrooksReadBarrier) {
1344 // Note the address 'copy' isn't the same as the image address of 'orig'.
1345 copy->SetReadBarrierPointer(GetImageAddress(orig));
1346 DCHECK_EQ(copy->GetReadBarrierPointer(), GetImageAddress(orig));
1347 }
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -08001348 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001349 auto* klass = orig->GetClass();
1350 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
1351 // Is this a native dex cache array?
1352 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
1353 if (it != pointer_arrays_.end()) {
1354 // Should only need to fixup every pointer array exactly once.
1355 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
1356 pointer_arrays_.erase(it);
1357 return;
1358 }
1359 CHECK(dex_cache_array_indexes_.find(orig) == dex_cache_array_indexes_.end())
1360 << "Should have been pointer array.";
1361 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001362 if (orig->IsClass()) {
1363 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001364 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001365 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
1366 // Need to go update the ArtMethod.
1367 auto* dest = down_cast<mirror::AbstractMethod*>(copy);
1368 auto* src = down_cast<mirror::AbstractMethod*>(orig);
1369 ArtMethod* src_method = src->GetArtMethod();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001370 auto it = native_object_relocations_.find(src_method);
1371 CHECK(it != native_object_relocations_.end())
1372 << "Missing relocation for AbstractMethod.artMethod " << PrettyMethod(src_method);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001373 dest->SetArtMethod(
1374 reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
1375 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001376 FixupVisitor visitor(this, copy);
1377 orig->VisitReferences<true /*visit class*/>(visitor, visitor);
1378 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001379}
1380
Mathieu Chartiere401d142015-04-22 13:56:20 -07001381const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method, bool* quick_is_interpreted) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001382 DCHECK(!method->IsResolutionMethod() && !method->IsImtConflictMethod() &&
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001383 !method->IsImtUnimplementedMethod() && !method->IsAbstract()) << PrettyMethod(method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001384
1385 // Use original code if it exists. Otherwise, set the code pointer to the resolution
1386 // trampoline.
1387
1388 // Quick entrypoint:
Jeff Haoc7d11882015-02-03 15:08:39 -08001389 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(
1390 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_));
1391 const uint8_t* quick_code = GetOatAddress(quick_oat_code_offset);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001392 *quick_is_interpreted = false;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001393 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
1394 method->GetDeclaringClass()->IsInitialized())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001395 // We have code for a non-static or initialized method, just use the code.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001396 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001397 } else if (quick_code == nullptr && method->IsNative() &&
1398 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
1399 // Non-static or initialized native method missing compiled code, use generic JNI version.
1400 quick_code = GetOatAddress(quick_generic_jni_trampoline_offset_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001401 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001402 } else if (quick_code == nullptr && !method->IsNative()) {
1403 // We don't have code at all for a non-native method, use the interpreter.
1404 quick_code = GetOatAddress(quick_to_interpreter_bridge_offset_);
1405 *quick_is_interpreted = true;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001406 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001407 } else {
1408 CHECK(!method->GetDeclaringClass()->IsInitialized());
1409 // We have code for a static method, but need to go through the resolution stub for class
1410 // initialization.
1411 quick_code = GetOatAddress(quick_resolution_trampoline_offset_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001412 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001413 }
1414 return quick_code;
1415}
1416
Mathieu Chartiere401d142015-04-22 13:56:20 -07001417const uint8_t* ImageWriter::GetQuickEntryPoint(ArtMethod* method) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001418 // Calculate the quick entry point following the same logic as FixupMethod() below.
1419 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001420 Runtime* runtime = Runtime::Current();
1421 if (UNLIKELY(method == runtime->GetResolutionMethod())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001422 return GetOatAddress(quick_resolution_trampoline_offset_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001423 } else if (UNLIKELY(method == runtime->GetImtConflictMethod() ||
1424 method == runtime->GetImtUnimplementedMethod())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001425 return GetOatAddress(quick_imt_conflict_trampoline_offset_);
1426 } else {
1427 // We assume all methods have code. If they don't currently then we set them to the use the
1428 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1429 // use results in an AbstractMethodError. We use the interpreter to achieve this.
1430 if (UNLIKELY(method->IsAbstract())) {
1431 return GetOatAddress(quick_to_interpreter_bridge_offset_);
1432 } else {
1433 bool quick_is_interpreted;
1434 return GetQuickCode(method, &quick_is_interpreted);
1435 }
1436 }
1437}
1438
Mathieu Chartiere401d142015-04-22 13:56:20 -07001439void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) {
1440 memcpy(copy, orig, ArtMethod::ObjectSize(target_ptr_size_));
1441
1442 copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked()));
1443 copy->SetDexCacheResolvedMethods(GetImageAddress(orig->GetDexCacheResolvedMethods()));
1444 copy->SetDexCacheResolvedTypes(GetImageAddress(orig->GetDexCacheResolvedTypes()));
1445
Ian Rogers848871b2013-08-05 10:56:33 -07001446 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
1447 // oat_begin_
Brian Carlstrom7940e442013-07-12 13:46:57 -07001448
Ian Rogers848871b2013-08-05 10:56:33 -07001449 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001450 Runtime* runtime = Runtime::Current();
1451 if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001452 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001453 GetOatAddress(quick_resolution_trampoline_offset_), target_ptr_size_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001454 } else if (UNLIKELY(orig == runtime->GetImtConflictMethod() ||
1455 orig == runtime->GetImtUnimplementedMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001456 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001457 GetOatAddress(quick_imt_conflict_trampoline_offset_), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001458 } else if (UNLIKELY(orig->IsRuntimeMethod())) {
1459 bool found_one = false;
1460 for (size_t i = 0; i < static_cast<size_t>(Runtime::kLastCalleeSaveType); ++i) {
1461 auto idx = static_cast<Runtime::CalleeSaveType>(i);
1462 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
1463 found_one = true;
1464 break;
1465 }
1466 }
1467 CHECK(found_one) << "Expected to find callee save method but got " << PrettyMethod(orig);
1468 CHECK(copy->IsRuntimeMethod());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001469 } else {
Ian Rogers848871b2013-08-05 10:56:33 -07001470 // We assume all methods have code. If they don't currently then we set them to the use the
1471 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1472 // use results in an AbstractMethodError. We use the interpreter to achieve this.
1473 if (UNLIKELY(orig->IsAbstract())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001474 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001475 GetOatAddress(quick_to_interpreter_bridge_offset_), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001476 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001477 bool quick_is_interpreted;
Ian Rogers13735952014-10-08 12:43:28 -07001478 const uint8_t* quick_code = GetQuickCode(orig, &quick_is_interpreted);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001479 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
Sebastien Hertze1d07812014-05-21 15:44:09 +02001480
Sebastien Hertze1d07812014-05-21 15:44:09 +02001481 // JNI entrypoint:
Ian Rogers848871b2013-08-05 10:56:33 -07001482 if (orig->IsNative()) {
1483 // The native method's pointer is set to a stub to lookup via dlsym.
1484 // Note this is not the code_ pointer, that is handled above.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001485 copy->SetEntryPointFromJniPtrSize(
1486 GetOatAddress(jni_dlsym_lookup_offset_), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001487 }
1488 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001489 }
1490}
1491
Alex Lighta59dd802014-07-02 16:28:08 -07001492static OatHeader* GetOatHeaderFromElf(ElfFile* elf) {
Tong Shen62d1ca32014-09-03 17:24:56 -07001493 uint64_t data_sec_offset;
1494 bool has_data_sec = elf->GetSectionOffsetAndSize(".rodata", &data_sec_offset, nullptr);
1495 if (!has_data_sec) {
Alex Lighta59dd802014-07-02 16:28:08 -07001496 return nullptr;
1497 }
Tong Shen62d1ca32014-09-03 17:24:56 -07001498 return reinterpret_cast<OatHeader*>(elf->Begin() + data_sec_offset);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001499}
1500
Vladimir Markof4da6752014-08-01 19:04:18 +01001501void ImageWriter::SetOatChecksumFromElfFile(File* elf_file) {
Alex Lighta59dd802014-07-02 16:28:08 -07001502 std::string error_msg;
1503 std::unique_ptr<ElfFile> elf(ElfFile::Open(elf_file, PROT_READ|PROT_WRITE,
1504 MAP_SHARED, &error_msg));
1505 if (elf.get() == nullptr) {
Vladimir Markof4da6752014-08-01 19:04:18 +01001506 LOG(FATAL) << "Unable open oat file: " << error_msg;
Alex Lighta59dd802014-07-02 16:28:08 -07001507 return;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001508 }
Alex Lighta59dd802014-07-02 16:28:08 -07001509 OatHeader* oat_header = GetOatHeaderFromElf(elf.get());
1510 CHECK(oat_header != nullptr);
1511 CHECK(oat_header->IsValid());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001512
Brian Carlstrom7940e442013-07-12 13:46:57 -07001513 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Alex Lighta59dd802014-07-02 16:28:08 -07001514 image_header->SetOatChecksum(oat_header->GetChecksum());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001515}
1516
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001517size_t ImageWriter::GetBinSizeSum(ImageWriter::Bin up_to) const {
1518 DCHECK_LE(up_to, kBinSize);
1519 return std::accumulate(&bin_slot_sizes_[0], &bin_slot_sizes_[up_to], /*init*/0);
1520}
1521
1522ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
1523 // These values may need to get updated if more bins are added to the enum Bin
Mathieu Chartiere401d142015-04-22 13:56:20 -07001524 static_assert(kBinBits == 3, "wrong number of bin bits");
1525 static_assert(kBinShift == 27, "wrong number of shift");
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001526 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
1527
1528 DCHECK_LT(GetBin(), kBinSize);
1529 DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
1530}
1531
1532ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
1533 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
1534 DCHECK_EQ(index, GetIndex());
1535}
1536
1537ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
1538 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
1539}
1540
1541uint32_t ImageWriter::BinSlot::GetIndex() const {
1542 return lockword_ & ~kBinMask;
1543}
1544
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001545uint8_t* ImageWriter::GetOatFileBegin() const {
1546 DCHECK_GT(intern_table_bytes_, 0u);
1547 return image_begin_ + RoundUp(
1548 image_end_ + bin_slot_sizes_[kBinArtField] + bin_slot_sizes_[kBinArtMethodDirty] +
1549 bin_slot_sizes_[kBinArtMethodClean] + intern_table_bytes_, kPageSize);
1550}
1551
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001552ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
1553 switch (type) {
1554 case kNativeObjectRelocationTypeArtField:
1555 case kNativeObjectRelocationTypeArtFieldArray:
1556 return kBinArtField;
1557 case kNativeObjectRelocationTypeArtMethodClean:
1558 case kNativeObjectRelocationTypeArtMethodArrayClean:
1559 return kBinArtMethodClean;
1560 case kNativeObjectRelocationTypeArtMethodDirty:
1561 case kNativeObjectRelocationTypeArtMethodArrayDirty:
1562 return kBinArtMethodDirty;
1563 }
1564 UNREACHABLE();
1565}
1566
Brian Carlstrom7940e442013-07-12 13:46:57 -07001567} // namespace art