blob: 4f1fef9f585c8b66659c3afe26dc6001f901f81b [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "image_writer.h"
18
19#include <sys/stat.h>
Mathieu Chartierceb07b32015-12-10 09:33:21 -080020#include <lz4.h>
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -080021#include <lz4hc.h>
Brian Carlstrom7940e442013-07-12 13:46:57 -070022
Ian Rogers700a4022014-05-19 16:49:03 -070023#include <memory>
Vladimir Marko20f85592015-03-19 10:07:02 +000024#include <numeric>
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080025#include <unordered_set>
Brian Carlstrom7940e442013-07-12 13:46:57 -070026#include <vector>
27
Mathieu Chartierc7853442015-03-27 14:35:38 -070028#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "art_method-inl.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070030#include "base/callee_save_type.h"
31#include "base/enums.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070032#include "base/logging.h"
33#include "base/unix_file/fd_file.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010034#include "class_linker-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070035#include "compiled_method.h"
36#include "dex_file-inl.h"
Andreas Gampea5b09a62016-11-17 15:21:22 -080037#include "dex_file_types.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070038#include "driver/compiler_driver.h"
Alex Light53cb16b2014-06-12 11:26:29 -070039#include "elf_file.h"
40#include "elf_utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070041#include "elf_writer.h"
42#include "gc/accounting/card_table-inl.h"
43#include "gc/accounting/heap_bitmap.h"
Mathieu Chartier31e89252013-08-28 11:29:12 -070044#include "gc/accounting/space_bitmap-inl.h"
Mathieu Chartier36a270a2016-07-28 18:08:51 -070045#include "gc/collector/concurrent_copying.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070046#include "gc/heap.h"
47#include "gc/space/large_object_space.h"
48#include "gc/space/space-inl.h"
Mathieu Chartier4f5e3cb2017-06-12 13:10:01 -070049#include "gc/verification.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070050#include "globals.h"
Mathieu Chartier4f5e3cb2017-06-12 13:10:01 -070051#include "handle_scope-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070052#include "image.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070053#include "imt_conflict_table.h"
Mathieu Chartier3738e982017-05-12 16:07:28 -070054#include "jni_internal.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070055#include "linear_alloc.h"
Mathieu Chartierad2541a2013-10-25 10:05:23 -070056#include "lock_word.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070057#include "mirror/array-inl.h"
58#include "mirror/class-inl.h"
Alex Lightd6251582016-10-31 11:12:30 -070059#include "mirror/class_ext.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070060#include "mirror/class_loader.h"
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070061#include "mirror/dex_cache.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070062#include "mirror/dex_cache-inl.h"
Neil Fuller0e844392016-09-08 13:43:31 +010063#include "mirror/executable.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070064#include "mirror/method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070065#include "mirror/object-inl.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080066#include "mirror/object-refvisitor-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070067#include "mirror/object_array-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070068#include "mirror/string-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070069#include "oat.h"
70#include "oat_file.h"
Mathieu Chartierf9c6fc62015-10-07 11:44:05 -070071#include "oat_file_manager.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070072#include "runtime.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070073#include "scoped_thread_state_change-inl.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000074#include "utils/dex_cache_arrays_layout-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070075
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070076using ::art::mirror::Class;
77using ::art::mirror::DexCache;
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070078using ::art::mirror::Object;
79using ::art::mirror::ObjectArray;
80using ::art::mirror::String;
Brian Carlstrom7940e442013-07-12 13:46:57 -070081
82namespace art {
83
Igor Murashkinf5b4c502014-11-14 15:01:59 -080084// Separate objects into multiple bins to optimize dirty memory use.
85static constexpr bool kBinObjects = true;
86
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080087// Return true if an object is already in an image space.
88bool ImageWriter::IsInBootImage(const void* obj) const {
Mathieu Chartiere467cea2016-01-07 18:36:19 -080089 gc::Heap* const heap = Runtime::Current()->GetHeap();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080090 if (!compile_app_image_) {
Mathieu Chartiere467cea2016-01-07 18:36:19 -080091 DCHECK(heap->GetBootImageSpaces().empty());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080092 return false;
93 }
Mathieu Chartiere467cea2016-01-07 18:36:19 -080094 for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
95 const uint8_t* image_begin = boot_image_space->Begin();
96 // Real image end including ArtMethods and ArtField sections.
97 const uint8_t* image_end = image_begin + boot_image_space->GetImageHeader().GetImageSize();
98 if (image_begin <= obj && obj < image_end) {
99 return true;
100 }
101 }
102 return false;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800103}
104
105bool ImageWriter::IsInBootOatFile(const void* ptr) const {
Mathieu Chartiere467cea2016-01-07 18:36:19 -0800106 gc::Heap* const heap = Runtime::Current()->GetHeap();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800107 if (!compile_app_image_) {
Mathieu Chartiere467cea2016-01-07 18:36:19 -0800108 DCHECK(heap->GetBootImageSpaces().empty());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800109 return false;
110 }
Mathieu Chartiere467cea2016-01-07 18:36:19 -0800111 for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
112 const ImageHeader& image_header = boot_image_space->GetImageHeader();
113 if (image_header.GetOatFileBegin() <= ptr && ptr < image_header.GetOatFileEnd()) {
114 return true;
115 }
116 }
117 return false;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800118}
119
Mathieu Chartier3738e982017-05-12 16:07:28 -0700120static void ClearDexFileCookieCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700121 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier3738e982017-05-12 16:07:28 -0700122 DCHECK(obj != nullptr);
Andreas Gampedd9d0552015-03-09 12:57:41 -0700123 Class* klass = obj->GetClass();
Mathieu Chartier3738e982017-05-12 16:07:28 -0700124 if (klass == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_DexFile)) {
125 ArtField* field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
126 // Null out the cookie to enable determinism. b/34090128
127 field->SetObject</*kTransactionActive*/false>(obj, nullptr);
128 }
Andreas Gampedd9d0552015-03-09 12:57:41 -0700129}
130
Mathieu Chartier3738e982017-05-12 16:07:28 -0700131static void ClearDexFileCookies() REQUIRES_SHARED(Locks::mutator_lock_) {
132 Runtime::Current()->GetHeap()->VisitObjects(ClearDexFileCookieCallback, nullptr);
Andreas Gampedd9d0552015-03-09 12:57:41 -0700133}
134
Vladimir Markof4da6752014-08-01 19:04:18 +0100135bool ImageWriter::PrepareImageAddressSpace() {
Mathieu Chartier2d721012014-11-10 11:08:06 -0800136 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800137 gc::Heap* const heap = Runtime::Current()->GetHeap();
Vladimir Markof4da6752014-08-01 19:04:18 +0100138 {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700139 ScopedObjectAccess soa(Thread::Current());
Vladimir Markof4da6752014-08-01 19:04:18 +0100140 PruneNonImageClasses(); // Remove junk
Mathieu Chartier3738e982017-05-12 16:07:28 -0700141 if (compile_app_image_) {
142 // Clear dex file cookies for app images to enable app image determinism. This is required
143 // since the cookie field contains long pointers to DexFiles which are not deterministic.
144 // b/34090128
145 ClearDexFileCookies();
146 } else {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800147 // Avoid for app image since this may increase RAM and image size.
148 ComputeLazyFieldsForImageClasses(); // Add useful information
149 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100150 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100151 heap->CollectGarbage(false); // Remove garbage.
152
Vladimir Markof4da6752014-08-01 19:04:18 +0100153 if (kIsDebugBuild) {
154 ScopedObjectAccess soa(Thread::Current());
155 CheckNonImageClassesRemoved();
156 }
157
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700158 {
159 ScopedObjectAccess soa(Thread::Current());
160 CalculateNewObjectOffsets();
161 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100162
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700163 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
164 // bin size sums being calculated.
165 if (!AllocMemory()) {
166 return false;
167 }
168
Vladimir Markof4da6752014-08-01 19:04:18 +0100169 return true;
170}
171
Mathieu Chartiera90c7722015-10-29 15:41:36 -0700172bool ImageWriter::Write(int image_fd,
Jeff Haodcdc85b2015-12-04 14:06:18 -0800173 const std::vector<const char*>& image_filenames,
Vladimir Marko944da602016-02-19 12:27:55 +0000174 const std::vector<const char*>& oat_filenames) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800175 // If image_fd or oat_fd are not kInvalidFd then we may have empty strings in image_filenames or
176 // oat_filenames.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800177 CHECK(!image_filenames.empty());
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800178 if (image_fd != kInvalidFd) {
179 CHECK_EQ(image_filenames.size(), 1u);
180 }
Jeff Haodcdc85b2015-12-04 14:06:18 -0800181 CHECK(!oat_filenames.empty());
182 CHECK_EQ(image_filenames.size(), oat_filenames.size());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700183
Vladimir Marko944da602016-02-19 12:27:55 +0000184 {
185 ScopedObjectAccess soa(Thread::Current());
186 for (size_t i = 0; i < oat_filenames.size(); ++i) {
187 CreateHeader(i);
188 CopyAndFixupNativeData(i);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800189 }
190 }
Alex Light53cb16b2014-06-12 11:26:29 -0700191
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700192 {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700193 // TODO: heap validation can't handle these fix up passes.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800194 ScopedObjectAccess soa(Thread::Current());
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700195 Runtime::Current()->GetHeap()->DisableObjectValidation();
196 CopyAndFixupObjects();
197 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700198
Jeff Haodcdc85b2015-12-04 14:06:18 -0800199 for (size_t i = 0; i < image_filenames.size(); ++i) {
200 const char* image_filename = image_filenames[i];
Vladimir Marko944da602016-02-19 12:27:55 +0000201 ImageInfo& image_info = GetImageInfo(i);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800202 std::unique_ptr<File> image_file;
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800203 if (image_fd != kInvalidFd) {
204 if (strlen(image_filename) == 0u) {
205 image_file.reset(new File(image_fd, unix_file::kCheckSafeUsage));
Mathieu Chartier784bb092016-01-28 12:02:00 -0800206 // Empty the file in case it already exists.
207 if (image_file != nullptr) {
208 TEMP_FAILURE_RETRY(image_file->SetLength(0));
209 TEMP_FAILURE_RETRY(image_file->Flush());
210 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800211 } else {
212 LOG(ERROR) << "image fd " << image_fd << " name " << image_filename;
213 }
Jeff Haodcdc85b2015-12-04 14:06:18 -0800214 } else {
215 image_file.reset(OS::CreateEmptyFile(image_filename));
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800216 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800217
Jeff Haodcdc85b2015-12-04 14:06:18 -0800218 if (image_file == nullptr) {
219 LOG(ERROR) << "Failed to open image file " << image_filename;
220 return false;
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800221 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800222
223 if (!compile_app_image_ && fchmod(image_file->Fd(), 0644) != 0) {
Jeff Haodcdc85b2015-12-04 14:06:18 -0800224 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
225 image_file->Erase();
226 return EXIT_FAILURE;
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800227 }
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800228
Jeff Haodcdc85b2015-12-04 14:06:18 -0800229 std::unique_ptr<char[]> compressed_data;
230 // Image data size excludes the bitmap and the header.
231 ImageHeader* const image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
232 const size_t image_data_size = image_header->GetImageSize() - sizeof(ImageHeader);
233 char* image_data = reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader);
234 size_t data_size;
235 const char* image_data_to_write;
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800236 const uint64_t compress_start_time = NanoTime();
Nicolas Geoffray83d4d722015-12-10 08:26:32 +0000237
Jeff Haodcdc85b2015-12-04 14:06:18 -0800238 CHECK_EQ(image_header->storage_mode_, image_storage_mode_);
239 switch (image_storage_mode_) {
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700240 case ImageHeader::kStorageModeLZ4HC: // Fall-through.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800241 case ImageHeader::kStorageModeLZ4: {
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800242 const size_t compressed_max_size = LZ4_compressBound(image_data_size);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800243 compressed_data.reset(new char[compressed_max_size]);
David Lin915ec552017-02-23 15:36:06 -0800244 data_size = LZ4_compress_default(
Jeff Haodcdc85b2015-12-04 14:06:18 -0800245 reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
246 &compressed_data[0],
David Lin915ec552017-02-23 15:36:06 -0800247 image_data_size,
248 compressed_max_size);
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800249
250 break;
251 }
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700252 /*
253 * Disabled due to image_test64 flakyness. Both use same decompression. b/27560444
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800254 case ImageHeader::kStorageModeLZ4HC: {
255 // Bound is same as non HC.
256 const size_t compressed_max_size = LZ4_compressBound(image_data_size);
257 compressed_data.reset(new char[compressed_max_size]);
258 data_size = LZ4_compressHC(
259 reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
260 &compressed_data[0],
261 image_data_size);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800262 break;
263 }
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700264 */
Jeff Haodcdc85b2015-12-04 14:06:18 -0800265 case ImageHeader::kStorageModeUncompressed: {
266 data_size = image_data_size;
267 image_data_to_write = image_data;
268 break;
269 }
270 default: {
271 LOG(FATAL) << "Unsupported";
272 UNREACHABLE();
273 }
274 }
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800275
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800276 if (compressed_data != nullptr) {
277 image_data_to_write = &compressed_data[0];
278 VLOG(compiler) << "Compressed from " << image_data_size << " to " << data_size << " in "
279 << PrettyDuration(NanoTime() - compress_start_time);
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700280 if (kIsDebugBuild) {
281 std::unique_ptr<uint8_t[]> temp(new uint8_t[image_data_size]);
282 const size_t decompressed_size = LZ4_decompress_safe(
283 reinterpret_cast<char*>(&compressed_data[0]),
284 reinterpret_cast<char*>(&temp[0]),
285 data_size,
286 image_data_size);
287 CHECK_EQ(decompressed_size, image_data_size);
288 CHECK_EQ(memcmp(image_data, &temp[0], image_data_size), 0) << image_storage_mode_;
289 }
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800290 }
291
Jeff Haodcdc85b2015-12-04 14:06:18 -0800292 // Write out the image + fields + methods.
293 const bool is_compressed = compressed_data != nullptr;
Mathieu Chartier6f6b1342016-03-09 11:14:50 -0800294 if (!image_file->PwriteFully(image_data_to_write, data_size, sizeof(ImageHeader))) {
Jeff Haodcdc85b2015-12-04 14:06:18 -0800295 PLOG(ERROR) << "Failed to write image file data " << image_filename;
296 image_file->Erase();
297 return false;
298 }
299
300 // Write out the image bitmap at the page aligned start of the image end, also uncompressed for
301 // convenience.
302 const ImageSection& bitmap_section = image_header->GetImageSection(
303 ImageHeader::kSectionImageBitmap);
304 // Align up since data size may be unaligned if the image is compressed.
305 size_t bitmap_position_in_file = RoundUp(sizeof(ImageHeader) + data_size, kPageSize);
306 if (!is_compressed) {
307 CHECK_EQ(bitmap_position_in_file, bitmap_section.Offset());
308 }
Mathieu Chartier6f6b1342016-03-09 11:14:50 -0800309 if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_bitmap_->Begin()),
310 bitmap_section.Size(),
311 bitmap_position_in_file)) {
Jeff Haodcdc85b2015-12-04 14:06:18 -0800312 PLOG(ERROR) << "Failed to write image file " << image_filename;
313 image_file->Erase();
314 return false;
315 }
Mathieu Chartier6f6b1342016-03-09 11:14:50 -0800316
317 int err = image_file->Flush();
318 if (err < 0) {
319 PLOG(ERROR) << "Failed to flush image file " << image_filename << " with result " << err;
320 image_file->Erase();
321 return false;
322 }
323
324 // Write header last in case the compiler gets killed in the middle of image writing.
325 // We do not want to have a corrupted image with a valid header.
326 // The header is uncompressed since it contains whether the image is compressed or not.
327 image_header->data_size_ = data_size;
328 if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_->Begin()),
329 sizeof(ImageHeader),
330 0)) {
331 PLOG(ERROR) << "Failed to write image file header " << image_filename;
332 image_file->Erase();
333 return false;
334 }
335
Jeff Haodcdc85b2015-12-04 14:06:18 -0800336 CHECK_EQ(bitmap_position_in_file + bitmap_section.Size(),
337 static_cast<size_t>(image_file->GetLength()));
338 if (image_file->FlushCloseOrErase() != 0) {
339 PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
340 return false;
341 }
Andreas Gampe4303ba92014-11-06 01:00:46 -0800342 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700343 return true;
344}
345
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700346void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700347 DCHECK(object != nullptr);
348 DCHECK_NE(offset, 0U);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800349
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800350 // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700351 object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700352 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700353 DCHECK(IsImageOffsetAssigned(object));
354}
355
Mathieu Chartiere401d142015-04-22 13:56:20 -0700356void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
357 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
358 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
359 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
360}
361
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800362void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700363 DCHECK(object != nullptr);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800364 DCHECK_NE(image_objects_offset_begin_, 0u);
365
Vladimir Marko944da602016-02-19 12:27:55 +0000366 size_t oat_index = GetOatIndex(object);
367 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800368 size_t bin_slot_offset = image_info.bin_slot_offsets_[bin_slot.GetBin()];
Vladimir Markocf36d492015-08-12 19:27:26 +0100369 size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800370 DCHECK_ALIGNED(new_offset, kObjectAlignment);
371
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700372 SetImageOffset(object, new_offset);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800373 DCHECK_LT(new_offset, image_info.image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700374}
375
Ian Rogersef7d42f2014-01-06 12:55:46 -0800376bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800377 // Will also return true if the bin slot was assigned since we are reusing the lock word.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700378 DCHECK(object != nullptr);
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700379 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700380}
381
Ian Rogersef7d42f2014-01-06 12:55:46 -0800382size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700383 DCHECK(object != nullptr);
384 DCHECK(IsImageOffsetAssigned(object));
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700385 LockWord lock_word = object->GetLockWord(false);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700386 size_t offset = lock_word.ForwardingAddress();
Vladimir Marko944da602016-02-19 12:27:55 +0000387 size_t oat_index = GetOatIndex(object);
388 const ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800389 DCHECK_LT(offset, image_info.image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700390 return offset;
Mathieu Chartier31e89252013-08-28 11:29:12 -0700391}
392
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800393void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
394 DCHECK(object != nullptr);
395 DCHECK(!IsImageOffsetAssigned(object));
396 DCHECK(!IsImageBinSlotAssigned(object));
397
398 // Before we stomp over the lock word, save the hash code for later.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800399 LockWord lw(object->GetLockWord(false));
400 switch (lw.GetState()) {
Ian Rogers23e81a12017-07-05 09:36:27 -0700401 case LockWord::kFatLocked:
402 FALLTHROUGH_INTENDED;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800403 case LockWord::kThinLocked: {
Ian Rogers23e81a12017-07-05 09:36:27 -0700404 std::ostringstream oss;
405 bool thin = (lw.GetState() == LockWord::kThinLocked);
406 oss << (thin ? "Thin" : "Fat")
407 << " locked object " << object << "(" << object->PrettyTypeOf()
408 << ") found during object copy";
409 if (thin) {
410 oss << ". Lock owner:" << lw.ThinLockOwner();
411 }
412 LOG(FATAL) << oss.str();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800413 break;
414 }
415 case LockWord::kUnlocked:
416 // No hash, don't need to save it.
417 break;
418 case LockWord::kHashCode:
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700419 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
420 saved_hashcode_map_.emplace(object, lw.GetHashCode());
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800421 break;
422 default:
423 LOG(FATAL) << "Unreachable.";
424 UNREACHABLE();
425 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700426 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700427 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800428 DCHECK(IsImageBinSlotAssigned(object));
429}
430
Vladimir Marko20f85592015-03-19 10:07:02 +0000431void ImageWriter::PrepareDexCacheArraySlots() {
Vladimir Markof60c7e22015-11-23 18:05:08 +0000432 // Prepare dex cache array starts based on the ordering specified in the CompilerDriver.
Vladimir Markof60c7e22015-11-23 18:05:08 +0000433 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
434 // when AssignImageBinSlot() assigns their indexes out or order.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800435 for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
Vladimir Marko944da602016-02-19 12:27:55 +0000436 auto it = dex_file_oat_index_map_.find(dex_file);
437 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
Jeff Haodcdc85b2015-12-04 14:06:18 -0800438 ImageInfo& image_info = GetImageInfo(it->second);
439 image_info.dex_cache_array_starts_.Put(dex_file, image_info.bin_slot_sizes_[kBinDexCacheArray]);
440 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
441 image_info.bin_slot_sizes_[kBinDexCacheArray] += layout.Size();
442 }
Vladimir Markof60c7e22015-11-23 18:05:08 +0000443
Vladimir Marko20f85592015-03-19 10:07:02 +0000444 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700445 Thread* const self = Thread::Current();
Andreas Gampecc1b5352016-12-01 16:58:38 -0800446 ReaderMutexLock mu(self, *Locks::dex_lock_);
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800447 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
Mathieu Chartierc4f39252016-10-05 18:32:08 -0700448 ObjPtr<mirror::DexCache> dex_cache =
449 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
450 if (dex_cache == nullptr || IsInBootImage(dex_cache.Ptr())) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700451 continue;
452 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000453 const DexFile* dex_file = dex_cache->GetDexFile();
Mathieu Chartier0b490842016-05-25 15:05:59 -0700454 CHECK(dex_file_oat_index_map_.find(dex_file) != dex_file_oat_index_map_.end())
455 << "Dex cache should have been pruned " << dex_file->GetLocation()
456 << "; possibly in class path";
Mathieu Chartierc7853442015-03-27 14:35:38 -0700457 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
Vladimir Marko20f85592015-03-19 10:07:02 +0000458 DCHECK(layout.Valid());
Vladimir Marko944da602016-02-19 12:27:55 +0000459 size_t oat_index = GetOatIndexForDexCache(dex_cache);
460 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800461 uint32_t start = image_info.dex_cache_array_starts_.Get(dex_file);
Vladimir Marko05792b92015-08-03 11:56:49 +0100462 DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800463 AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(),
464 start + layout.TypesOffset(),
465 dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +0100466 DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800467 AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(),
468 start + layout.MethodsOffset(),
469 dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +0100470 DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800471 AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(),
472 start + layout.FieldsOffset(),
473 dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +0100474 DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800475 AddDexCacheArrayRelocation(dex_cache->GetStrings(), start + layout.StringsOffset(), dex_cache);
Narayan Kamath7fe56582016-10-14 18:49:12 +0100476
477 if (dex_cache->GetResolvedMethodTypes() != nullptr) {
478 AddDexCacheArrayRelocation(dex_cache->GetResolvedMethodTypes(),
479 start + layout.MethodTypesOffset(),
480 dex_cache);
481 }
Orion Hodson631827d2017-04-10 14:53:47 +0100482 if (dex_cache->GetResolvedCallSites() != nullptr) {
483 AddDexCacheArrayRelocation(dex_cache->GetResolvedCallSites(),
484 start + layout.CallSitesOffset(),
485 dex_cache);
486 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000487 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000488}
489
Mathieu Chartierc4f39252016-10-05 18:32:08 -0700490void ImageWriter::AddDexCacheArrayRelocation(void* array,
491 size_t offset,
492 ObjPtr<mirror::DexCache> dex_cache) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100493 if (array != nullptr) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800494 DCHECK(!IsInBootImage(array));
Vladimir Marko944da602016-02-19 12:27:55 +0000495 size_t oat_index = GetOatIndexForDexCache(dex_cache);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800496 native_object_relocations_.emplace(array,
Vladimir Marko944da602016-02-19 12:27:55 +0000497 NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeDexCacheArray });
Vladimir Marko05792b92015-08-03 11:56:49 +0100498 }
499}
500
Mathieu Chartiere401d142015-04-22 13:56:20 -0700501void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
502 DCHECK(arr != nullptr);
503 if (kIsDebugBuild) {
504 for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800505 ArtMethod* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700506 if (method != nullptr && !method->IsRuntimeMethod()) {
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800507 mirror::Class* klass = method->GetDeclaringClass();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800508 CHECK(klass == nullptr || KeepClass(klass))
David Sehr709b0702016-10-13 09:12:37 -0700509 << Class::PrettyClass(klass) << " should be a kept class";
Mathieu Chartiere401d142015-04-22 13:56:20 -0700510 }
511 }
512 }
513 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
514 // ArtMethods.
515 pointer_arrays_.emplace(arr, kBinArtMethodClean);
516}
517
Mathieu Chartier496577f2016-09-20 15:33:31 -0700518void ImageWriter::AssignImageBinSlot(mirror::Object* object, size_t oat_index) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800519 DCHECK(object != nullptr);
Jeff Haoc7d11882015-02-03 15:08:39 -0800520 size_t object_size = object->SizeOf();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800521
522 // The magic happens here. We segregate objects into different bins based
523 // on how likely they are to get dirty at runtime.
524 //
525 // Likely-to-dirty objects get packed together into the same bin so that
526 // at runtime their page dirtiness ratio (how many dirty objects a page has) is
527 // maximized.
528 //
529 // This means more pages will stay either clean or shared dirty (with zygote) and
530 // the app will use less of its own (private) memory.
531 Bin bin = kBinRegular;
Vladimir Marko20f85592015-03-19 10:07:02 +0000532 size_t current_offset = 0u;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800533
534 if (kBinObjects) {
535 //
536 // Changing the bin of an object is purely a memory-use tuning.
537 // It has no change on runtime correctness.
538 //
539 // Memory analysis has determined that the following types of objects get dirtied
540 // the most:
541 //
Vladimir Marko20f85592015-03-19 10:07:02 +0000542 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
543 // a fixed layout which helps improve generated code (using PC-relative addressing),
544 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
545 // Since these arrays are huge, most pages do not overlap other objects and it's not
546 // really important where they are for the clean/dirty separation. Due to their
Vladimir Marko05792b92015-08-03 11:56:49 +0100547 // special PC-relative addressing, we arbitrarily keep them at the end.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800548 // * Class'es which are verified [their clinit runs only at runtime]
549 // - classes in general [because their static fields get overwritten]
550 // - initialized classes with all-final statics are unlikely to be ever dirty,
551 // so bin them separately
552 // * Art Methods that are:
553 // - native [their native entry point is not looked up until runtime]
554 // - have declaring classes that aren't initialized
555 // [their interpreter/quick entry points are trampolines until the class
556 // becomes initialized]
557 //
558 // We also assume the following objects get dirtied either never or extremely rarely:
559 // * Strings (they are immutable)
560 // * Art methods that aren't native and have initialized declared classes
561 //
562 // We assume that "regular" bin objects are highly unlikely to become dirtied,
563 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
564 //
565 if (object->IsClass()) {
566 bin = kBinClassVerified;
567 mirror::Class* klass = object->AsClass();
568
Mathieu Chartiere401d142015-04-22 13:56:20 -0700569 // Add non-embedded vtable to the pointer array table if there is one.
570 auto* vtable = klass->GetVTable();
571 if (vtable != nullptr) {
572 AddMethodPointerArray(vtable);
573 }
574 auto* iftable = klass->GetIfTable();
575 if (iftable != nullptr) {
576 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
577 if (iftable->GetMethodArrayCount(i) > 0) {
578 AddMethodPointerArray(iftable->GetMethodArray(i));
579 }
580 }
581 }
582
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800583 if (klass->GetStatus() == Class::kStatusInitialized) {
584 bin = kBinClassInitialized;
585
586 // If the class's static fields are all final, put it into a separate bin
587 // since it's very likely it will stay clean.
588 uint32_t num_static_fields = klass->NumStaticFields();
589 if (num_static_fields == 0) {
590 bin = kBinClassInitializedFinalStatics;
591 } else {
592 // Maybe all the statics are final?
593 bool all_final = true;
594 for (uint32_t i = 0; i < num_static_fields; ++i) {
595 ArtField* field = klass->GetStaticField(i);
596 if (!field->IsFinal()) {
597 all_final = false;
598 break;
599 }
600 }
601
602 if (all_final) {
603 bin = kBinClassInitializedFinalStatics;
604 }
605 }
606 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800607 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
608 bin = kBinString; // Strings are almost always immutable (except for object header).
Mathieu Chartier2ba04ea2016-04-08 19:01:05 -0700609 } else if (object->GetClass<kVerifyNone>() ==
610 Runtime::Current()->GetClassLinker()->GetClassRoot(ClassLinker::kJavaLangObject)) {
611 // Instance of java lang object, probably a lock object. This means it will be dirty when we
612 // synchronize on it.
613 bin = kBinMiscDirty;
614 } else if (object->IsDexCache()) {
615 // Dex file field becomes dirty when the image is loaded.
616 bin = kBinMiscDirty;
617 }
618 // else bin = kBinRegular
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800619 }
620
Mathieu Chartier496577f2016-09-20 15:33:31 -0700621 // Assign the oat index too.
622 DCHECK(oat_index_map_.find(object) == oat_index_map_.end());
623 oat_index_map_.emplace(object, oat_index);
624
Vladimir Marko944da602016-02-19 12:27:55 +0000625 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800626
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800627 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment
Jeff Haodcdc85b2015-12-04 14:06:18 -0800628 current_offset = image_info.bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned).
629 // Move the current bin size up to accommodate the object we just assigned a bin slot.
630 image_info.bin_slot_sizes_[bin] += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800631
632 BinSlot new_bin_slot(bin, current_offset);
633 SetImageBinSlot(object, new_bin_slot);
634
Jeff Haodcdc85b2015-12-04 14:06:18 -0800635 ++image_info.bin_slot_count_[bin];
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800636
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800637 // Grow the image closer to the end by the object we just assigned.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800638 image_info.image_end_ += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800639}
640
Mathieu Chartiere401d142015-04-22 13:56:20 -0700641bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
642 if (m->IsNative()) {
643 return true;
644 }
645 mirror::Class* declaring_class = m->GetDeclaringClass();
646 // Initialized is highly unlikely to dirty since there's no entry points to mutate.
647 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized;
648}
649
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800650bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
651 DCHECK(object != nullptr);
652
653 // We always stash the bin slot into a lockword, in the 'forwarding address' state.
654 // If it's in some other state, then we haven't yet assigned an image bin slot.
655 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
656 return false;
657 } else if (kIsDebugBuild) {
658 LockWord lock_word = object->GetLockWord(false);
659 size_t offset = lock_word.ForwardingAddress();
660 BinSlot bin_slot(offset);
Vladimir Marko944da602016-02-19 12:27:55 +0000661 size_t oat_index = GetOatIndex(object);
662 const ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800663 DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()])
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800664 << "bin slot offset should not exceed the size of that bin";
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800665 }
666 return true;
667}
668
669ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
670 DCHECK(object != nullptr);
671 DCHECK(IsImageBinSlotAssigned(object));
672
673 LockWord lock_word = object->GetLockWord(false);
674 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t
675 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
676
677 BinSlot bin_slot(static_cast<uint32_t>(offset));
Vladimir Marko944da602016-02-19 12:27:55 +0000678 size_t oat_index = GetOatIndex(object);
679 const ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800680 DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()]);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800681
682 return bin_slot;
683}
684
Brian Carlstrom7940e442013-07-12 13:46:57 -0700685bool ImageWriter::AllocMemory() {
Vladimir Marko944da602016-02-19 12:27:55 +0000686 for (ImageInfo& image_info : image_infos_) {
Mathieu Chartiera06ba052016-01-06 13:51:52 -0800687 ImageSection unused_sections[ImageHeader::kSectionCount];
688 const size_t length = RoundUp(
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700689 image_info.CreateImageSections(unused_sections), kPageSize);
Mathieu Chartiera06ba052016-01-06 13:51:52 -0800690
Jeff Haodcdc85b2015-12-04 14:06:18 -0800691 std::string error_msg;
692 image_info.image_.reset(MemMap::MapAnonymous("image writer image",
693 nullptr,
694 length,
695 PROT_READ | PROT_WRITE,
696 false,
697 false,
698 &error_msg));
699 if (UNLIKELY(image_info.image_.get() == nullptr)) {
700 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
701 return false;
702 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700703
Jeff Haodcdc85b2015-12-04 14:06:18 -0800704 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
705 CHECK_LE(image_info.image_end_, length);
706 image_info.image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
707 "image bitmap", image_info.image_->Begin(), RoundUp(image_info.image_end_, kPageSize)));
708 if (image_info.image_bitmap_.get() == nullptr) {
709 LOG(ERROR) << "Failed to allocate memory for image bitmap";
710 return false;
711 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700712 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700713 return true;
714}
715
Vladimir Markoad06b982016-11-17 16:38:59 +0000716class ImageWriter::ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700717 public:
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700718 bool operator()(ObjPtr<Class> c) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700719 StackHandleScope<1> hs(Thread::Current());
720 mirror::Class::ComputeName(hs.NewHandle(c));
721 return true;
722 }
723};
724
Brian Carlstrom7940e442013-07-12 13:46:57 -0700725void ImageWriter::ComputeLazyFieldsForImageClasses() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700726 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700727 ComputeLazyFieldsForClassesVisitor visitor;
728 class_linker->VisitClassesWithoutClassesLock(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700729}
730
Vladimir Markof25cc732017-03-16 16:18:15 +0000731static bool IsBootClassLoaderClass(ObjPtr<mirror::Class> klass)
732 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800733 return klass->GetClassLoader() == nullptr;
734}
735
736bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) {
737 return IsBootClassLoaderClass(klass) && !IsInBootImage(klass);
738}
739
Chang Xingb2e8adc2017-07-07 17:35:37 -0700740// This visitor follows the references of an instance, recursively then prune this class
741// if a type of any field is pruned.
742class ImageWriter::PruneObjectReferenceVisitor {
743 public:
744 PruneObjectReferenceVisitor(ImageWriter* image_writer,
745 bool* early_exit,
746 std::unordered_set<mirror::Object*>* visited,
747 bool* result)
748 : image_writer_(image_writer), early_exit_(early_exit), visited_(visited), result_(result) {}
749
750 ALWAYS_INLINE void VisitRootIfNonNull(
751 mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const
752 REQUIRES_SHARED(Locks::mutator_lock_) { }
753
754 ALWAYS_INLINE void VisitRoot(
755 mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const
756 REQUIRES_SHARED(Locks::mutator_lock_) { }
757
758 ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj,
759 MemberOffset offset,
760 bool is_static ATTRIBUTE_UNUSED) const
761 REQUIRES_SHARED(Locks::mutator_lock_) {
762 mirror::Object* ref =
763 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
764 if (ref == nullptr || visited_->find(ref) != visited_->end()) {
765 return;
766 }
767
768 ObjPtr<mirror::Class> klass = ref->IsClass() ? ref->AsClass() : ref->GetClass();
769 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
770 // Prune all classes using reflection because the content they held will not be fixup.
771 *result_ = true;
772 }
773
774 // Record the object visited in case of circular reference.
775 visited_->emplace(ref);
776 if (ref->IsClass()) {
777 *result_ = *result_ ||
778 image_writer_->PruneAppImageClassInternal(ref->AsClass(), early_exit_, visited_);
779 } else {
780 *result_ = *result_ ||
781 image_writer_->PruneAppImageClassInternal(klass, early_exit_, visited_);
782 ref->VisitReferences(*this, *this);
783 }
784 // Clean up before exit for next call of this function.
785 visited_->erase(ref);
786 }
787
788 ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
789 ObjPtr<mirror::Reference> ref) const
790 REQUIRES_SHARED(Locks::mutator_lock_) {
791 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
792 }
793
794 ALWAYS_INLINE bool GetResult() const {
795 return result_;
796 }
797
798 private:
799 ImageWriter* image_writer_;
800 bool* early_exit_;
801 std::unordered_set<mirror::Object*>* visited_;
802 bool* const result_;
803};
804
805
Vladimir Markof25cc732017-03-16 16:18:15 +0000806bool ImageWriter::PruneAppImageClass(ObjPtr<mirror::Class> klass) {
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800807 bool early_exit = false;
Chang Xingb2e8adc2017-07-07 17:35:37 -0700808 std::unordered_set<mirror::Object*> visited;
Mathieu Chartier901e0702016-02-19 13:42:48 -0800809 return PruneAppImageClassInternal(klass, &early_exit, &visited);
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800810}
811
Mathieu Chartier901e0702016-02-19 13:42:48 -0800812bool ImageWriter::PruneAppImageClassInternal(
Vladimir Markof25cc732017-03-16 16:18:15 +0000813 ObjPtr<mirror::Class> klass,
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800814 bool* early_exit,
Chang Xingb2e8adc2017-07-07 17:35:37 -0700815 std::unordered_set<mirror::Object*>* visited) {
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800816 DCHECK(early_exit != nullptr);
817 DCHECK(visited != nullptr);
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800818 DCHECK(compile_app_image_);
Vladimir Markof25cc732017-03-16 16:18:15 +0000819 if (klass == nullptr || IsInBootImage(klass.Ptr())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700820 return false;
821 }
Vladimir Markof25cc732017-03-16 16:18:15 +0000822 auto found = prune_class_memo_.find(klass.Ptr());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800823 if (found != prune_class_memo_.end()) {
824 // Already computed, return the found value.
825 return found->second;
826 }
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800827 // Circular dependencies, return false but do not store the result in the memoization table.
Vladimir Markof25cc732017-03-16 16:18:15 +0000828 if (visited->find(klass.Ptr()) != visited->end()) {
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800829 *early_exit = true;
830 return false;
831 }
Vladimir Markof25cc732017-03-16 16:18:15 +0000832 visited->emplace(klass.Ptr());
Mathieu Chartier901e0702016-02-19 13:42:48 -0800833 bool result = IsBootClassLoaderClass(klass);
834 std::string temp;
835 // Prune if not an image class, this handles any broken sets of image classes such as having a
836 // class in the set but not it's superclass.
837 result = result || !compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800838 bool my_early_exit = false; // Only for ourselves, ignore caller.
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800839 // Remove classes that failed to verify since we don't want to have java.lang.VerifyError in the
840 // app image.
Vladimir Marko72ab6842017-01-20 19:32:50 +0000841 if (klass->IsErroneous()) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800842 result = true;
843 } else {
Alex Lightd6251582016-10-31 11:12:30 -0700844 ObjPtr<mirror::ClassExt> ext(klass->GetExtData());
845 CHECK(ext.IsNull() || ext->GetVerifyError() == nullptr) << klass->PrettyClass();
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800846 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800847 if (!result) {
848 // Check interfaces since these wont be visited through VisitReferences.)
849 mirror::IfTable* if_table = klass->GetIfTable();
850 for (size_t i = 0, num_interfaces = klass->GetIfTableCount(); i < num_interfaces; ++i) {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800851 result = result || PruneAppImageClassInternal(if_table->GetInterface(i),
852 &my_early_exit,
853 visited);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800854 }
855 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800856 if (klass->IsObjectArrayClass()) {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800857 result = result || PruneAppImageClassInternal(klass->GetComponentType(),
858 &my_early_exit,
859 visited);
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800860 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800861 // Check static fields and their classes.
Vladimir Marko72ab6842017-01-20 19:32:50 +0000862 if (klass->IsResolved() && klass->NumReferenceStaticFields() != 0) {
863 size_t num_static_fields = klass->NumReferenceStaticFields();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800864 // Presumably GC can happen when we are cross compiling, it should not cause performance
865 // problems to do pointer size logic.
866 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(
867 Runtime::Current()->GetClassLinker()->GetImagePointerSize());
868 for (size_t i = 0u; i < num_static_fields; ++i) {
869 mirror::Object* ref = klass->GetFieldObject<mirror::Object>(field_offset);
870 if (ref != nullptr) {
871 if (ref->IsClass()) {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800872 result = result || PruneAppImageClassInternal(ref->AsClass(),
873 &my_early_exit,
874 visited);
875 } else {
Chang Xingb2e8adc2017-07-07 17:35:37 -0700876 mirror::Class* type = ref->GetClass();
877 result = result || PruneAppImageClassInternal(type,
Mathieu Chartier901e0702016-02-19 13:42:48 -0800878 &my_early_exit,
879 visited);
Chang Xingb2e8adc2017-07-07 17:35:37 -0700880 if (!result) {
881 // For non-class case, also go through all the types mentioned by it's fields'
882 // references recursively to decide whether to keep this class.
883 bool tmp = false;
884 PruneObjectReferenceVisitor visitor(this, &my_early_exit, visited, &tmp);
885 ref->VisitReferences(visitor, visitor);
886 result = result || tmp;
887 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800888 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800889 }
890 field_offset = MemberOffset(field_offset.Uint32Value() +
891 sizeof(mirror::HeapReference<mirror::Object>));
892 }
893 }
Mathieu Chartier901e0702016-02-19 13:42:48 -0800894 result = result || PruneAppImageClassInternal(klass->GetSuperClass(),
895 &my_early_exit,
896 visited);
Mathieu Chartierc27bc402016-08-05 16:09:09 -0700897 // Remove the class if the dex file is not in the set of dex files. This happens for classes that
898 // are from uses library if there is no profile. b/30688277
899 mirror::DexCache* dex_cache = klass->GetDexCache();
900 if (dex_cache != nullptr) {
901 result = result ||
902 dex_file_oat_index_map_.find(dex_cache->GetDexFile()) == dex_file_oat_index_map_.end();
903 }
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800904 // Erase the element we stored earlier since we are exiting the function.
Vladimir Markof25cc732017-03-16 16:18:15 +0000905 auto it = visited->find(klass.Ptr());
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800906 DCHECK(it != visited->end());
907 visited->erase(it);
908 // Only store result if it is true or none of the calls early exited due to circular
909 // dependencies. If visited is empty then we are the root caller, in this case the cycle was in
910 // a child call and we can remember the result.
911 if (result == true || !my_early_exit || visited->empty()) {
Vladimir Markof25cc732017-03-16 16:18:15 +0000912 prune_class_memo_[klass.Ptr()] = result;
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800913 }
914 *early_exit |= my_early_exit;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800915 return result;
916}
917
Vladimir Markof25cc732017-03-16 16:18:15 +0000918bool ImageWriter::KeepClass(ObjPtr<mirror::Class> klass) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800919 if (klass == nullptr) {
920 return false;
921 }
Mathieu Chartier901e0702016-02-19 13:42:48 -0800922 if (compile_app_image_ && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
923 // Already in boot image, return true.
924 return true;
925 }
926 std::string temp;
927 if (!compiler_driver_.IsImageClass(klass->GetDescriptor(&temp))) {
928 return false;
929 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800930 if (compile_app_image_) {
931 // For app images, we need to prune boot loader classes that are not in the boot image since
932 // these may have already been loaded when the app image is loaded.
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800933 // Keep classes in the boot image space since we don't want to re-resolve these.
Mathieu Chartier901e0702016-02-19 13:42:48 -0800934 return !PruneAppImageClass(klass);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800935 }
Mathieu Chartier901e0702016-02-19 13:42:48 -0800936 return true;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700937}
938
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000939class ImageWriter::PruneClassesVisitor : public ClassVisitor {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700940 public:
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000941 PruneClassesVisitor(ImageWriter* image_writer, ObjPtr<mirror::ClassLoader> class_loader)
942 : image_writer_(image_writer),
943 class_loader_(class_loader),
944 classes_to_prune_(),
945 defined_class_count_(0u) { }
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700946
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000947 bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700948 if (!image_writer_->KeepClass(klass.Ptr())) {
949 classes_to_prune_.insert(klass.Ptr());
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000950 if (klass->GetClassLoader() == class_loader_) {
951 ++defined_class_count_;
952 }
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700953 }
954 return true;
955 }
956
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000957 size_t Prune() REQUIRES_SHARED(Locks::mutator_lock_) {
958 ClassTable* class_table =
959 Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader_);
960 for (mirror::Class* klass : classes_to_prune_) {
961 std::string storage;
962 const char* descriptor = klass->GetDescriptor(&storage);
963 bool result = class_table->Remove(descriptor);
964 DCHECK(result);
965 DCHECK(!class_table->Remove(descriptor)) << descriptor;
966 }
967 return defined_class_count_;
968 }
969
970 private:
Vladimir Markocb5ab352016-11-30 15:31:13 +0000971 ImageWriter* const image_writer_;
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000972 const ObjPtr<mirror::ClassLoader> class_loader_;
973 std::unordered_set<mirror::Class*> classes_to_prune_;
974 size_t defined_class_count_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700975};
976
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000977class ImageWriter::PruneClassLoaderClassesVisitor : public ClassLoaderVisitor {
978 public:
979 explicit PruneClassLoaderClassesVisitor(ImageWriter* image_writer)
980 : image_writer_(image_writer), removed_class_count_(0) {}
981
982 virtual void Visit(ObjPtr<mirror::ClassLoader> class_loader) OVERRIDE
983 REQUIRES_SHARED(Locks::mutator_lock_) {
984 PruneClassesVisitor classes_visitor(image_writer_, class_loader);
985 ClassTable* class_table =
986 Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader);
987 class_table->Visit(classes_visitor);
988 removed_class_count_ += classes_visitor.Prune();
Vladimir Markof25cc732017-03-16 16:18:15 +0000989
990 // Record app image class loader. The fake boot class loader should not get registered
991 // and we should end up with only one class loader for an app and none for boot image.
992 if (class_loader != nullptr && class_table != nullptr) {
993 DCHECK(class_loader_ == nullptr);
994 class_loader_ = class_loader;
995 }
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000996 }
997
998 size_t GetRemovedClassCount() const {
999 return removed_class_count_;
1000 }
1001
Vladimir Markof25cc732017-03-16 16:18:15 +00001002 ObjPtr<mirror::ClassLoader> GetClassLoader() const REQUIRES_SHARED(Locks::mutator_lock_) {
1003 return class_loader_;
1004 }
1005
Vladimir Markoc5798bf2016-12-09 10:20:54 +00001006 private:
1007 ImageWriter* const image_writer_;
1008 size_t removed_class_count_;
Vladimir Markof25cc732017-03-16 16:18:15 +00001009 ObjPtr<mirror::ClassLoader> class_loader_;
Vladimir Markoc5798bf2016-12-09 10:20:54 +00001010};
1011
1012void ImageWriter::VisitClassLoaders(ClassLoaderVisitor* visitor) {
1013 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
1014 visitor->Visit(nullptr); // Visit boot class loader.
1015 Runtime::Current()->GetClassLinker()->VisitClassLoaders(visitor);
1016}
1017
Vladimir Markof25cc732017-03-16 16:18:15 +00001018void ImageWriter::PruneAndPreloadDexCache(ObjPtr<mirror::DexCache> dex_cache,
1019 ObjPtr<mirror::ClassLoader> class_loader) {
1020 // To ensure deterministic contents of the hash-based arrays, each slot shall contain
1021 // the candidate with the lowest index. As we're processing entries in increasing index
1022 // order, this means trying to look up the entry for the current index if the slot is
1023 // empty or if it contains a higher index.
1024
1025 Runtime* runtime = Runtime::Current();
1026 ClassLinker* class_linker = runtime->GetClassLinker();
1027 ArtMethod* resolution_method = runtime->GetResolutionMethod();
1028 const DexFile& dex_file = *dex_cache->GetDexFile();
1029 // Prune methods.
1030 ArtMethod** resolved_methods = dex_cache->GetResolvedMethods();
1031 for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) {
1032 ArtMethod* method =
1033 mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_);
1034 DCHECK(method != nullptr) << "Expected resolution method instead of null method";
Vladimir Marko3b155452017-07-05 11:41:33 +01001035 // Check if the referenced class is in the image. Note that we want to check the referenced
1036 // class rather than the declaring class to preserve the semantics, i.e. using a MethodId
1037 // results in resolving the referenced class and that can for example throw OOME.
1038 ObjPtr<mirror::Class> referencing_class = class_linker->LookupResolvedType(
1039 dex_file,
1040 dex_file.GetMethodId(i).class_idx_,
1041 dex_cache,
1042 class_loader);
Vladimir Markof25cc732017-03-16 16:18:15 +00001043 // Copied methods may be held live by a class which was not an image class but have a
1044 // declaring class which is an image class. Set it to the resolution method to be safe and
1045 // prevent dangling pointers.
Vladimir Marko3b155452017-07-05 11:41:33 +01001046 if (method->IsCopied() || !KeepClass(referencing_class)) {
Vladimir Markof25cc732017-03-16 16:18:15 +00001047 mirror::DexCache::SetElementPtrSize(resolved_methods,
1048 i,
1049 resolution_method,
1050 target_ptr_size_);
1051 } else if (kIsDebugBuild) {
1052 // Check that the class is still in the classes table.
1053 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Vladimir Marko3b155452017-07-05 11:41:33 +01001054 CHECK(class_linker->ClassInClassTable(referencing_class)) << "Class "
1055 << Class::PrettyClass(referencing_class) << " not in class linker table";
Vladimir Markof25cc732017-03-16 16:18:15 +00001056 }
1057 }
1058 // Prune fields and make the contents of the field array deterministic.
1059 mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields();
1060 dex::TypeIndex last_class_idx; // Initialized to invalid index.
1061 ObjPtr<mirror::Class> last_class = nullptr;
1062 for (size_t i = 0, end = dex_file.NumFieldIds(); i < end; ++i) {
1063 uint32_t slot_idx = dex_cache->FieldSlotIndex(i);
1064 auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_fields, slot_idx, target_ptr_size_);
1065 uint32_t stored_index = pair.index;
1066 ArtField* field = pair.object;
1067 if (field != nullptr && i > stored_index) {
1068 continue; // Already checked.
1069 }
1070 // Check if the referenced class is in the image. Note that we want to check the referenced
1071 // class rather than the declaring class to preserve the semantics, i.e. using a FieldId
1072 // results in resolving the referenced class and that can for example throw OOME.
1073 const DexFile::FieldId& field_id = dex_file.GetFieldId(i);
1074 if (field_id.class_idx_ != last_class_idx) {
1075 last_class_idx = field_id.class_idx_;
1076 last_class = class_linker->LookupResolvedType(
1077 dex_file, last_class_idx, dex_cache, class_loader);
1078 if (last_class != nullptr && !KeepClass(last_class)) {
1079 last_class = nullptr;
1080 }
1081 }
1082 if (field == nullptr || i < stored_index) {
1083 if (last_class != nullptr) {
1084 const char* name = dex_file.StringDataByIdx(field_id.name_idx_);
1085 const char* type = dex_file.StringByTypeIdx(field_id.type_idx_);
1086 field = mirror::Class::FindField(Thread::Current(), last_class, name, type);
1087 if (field != nullptr) {
1088 // If the referenced class is in the image, the defining class must also be there.
1089 DCHECK(KeepClass(field->GetDeclaringClass()));
1090 dex_cache->SetResolvedField(i, field, target_ptr_size_);
1091 }
1092 }
1093 } else {
1094 DCHECK_EQ(i, stored_index);
1095 if (last_class == nullptr) {
1096 dex_cache->ClearResolvedField(stored_index, target_ptr_size_);
1097 }
1098 }
1099 }
1100 // Prune types and make the contents of the type array deterministic.
1101 // This is done after fields and methods as their lookup can touch the types array.
1102 for (size_t i = 0, end = dex_cache->GetDexFile()->NumTypeIds(); i < end; ++i) {
1103 dex::TypeIndex type_idx(i);
1104 uint32_t slot_idx = dex_cache->TypeSlotIndex(type_idx);
1105 mirror::TypeDexCachePair pair =
1106 dex_cache->GetResolvedTypes()[slot_idx].load(std::memory_order_relaxed);
1107 uint32_t stored_index = pair.index;
1108 ObjPtr<mirror::Class> klass = pair.object.Read();
1109 if (klass == nullptr || i < stored_index) {
1110 klass = class_linker->LookupResolvedType(dex_file, type_idx, dex_cache, class_loader);
1111 if (klass != nullptr) {
1112 DCHECK_EQ(dex_cache->GetResolvedType(type_idx), klass);
1113 stored_index = i; // For correct clearing below if not keeping the `klass`.
1114 }
1115 } else if (i == stored_index && !KeepClass(klass)) {
1116 dex_cache->ClearResolvedType(dex::TypeIndex(stored_index));
1117 }
1118 }
1119 // Strings do not need pruning, but the contents of the string array must be deterministic.
1120 for (size_t i = 0, end = dex_cache->GetDexFile()->NumStringIds(); i < end; ++i) {
1121 dex::StringIndex string_idx(i);
1122 uint32_t slot_idx = dex_cache->StringSlotIndex(string_idx);
1123 mirror::StringDexCachePair pair =
1124 dex_cache->GetStrings()[slot_idx].load(std::memory_order_relaxed);
1125 uint32_t stored_index = pair.index;
1126 ObjPtr<mirror::String> string = pair.object.Read();
1127 if (string == nullptr || i < stored_index) {
1128 string = class_linker->LookupString(dex_file, string_idx, dex_cache);
1129 DCHECK(string == nullptr || dex_cache->GetResolvedString(string_idx) == string);
1130 }
1131 }
1132}
1133
Brian Carlstrom7940e442013-07-12 13:46:57 -07001134void ImageWriter::PruneNonImageClasses() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001135 Runtime* runtime = Runtime::Current();
1136 ClassLinker* class_linker = runtime->GetClassLinker();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001137 Thread* self = Thread::Current();
Vladimir Markof25cc732017-03-16 16:18:15 +00001138 ScopedAssertNoThreadSuspension sa(__FUNCTION__);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001139
Mathieu Chartier696632e2016-06-03 17:47:32 -07001140 // Clear class table strong roots so that dex caches can get pruned. We require pruning the class
1141 // path dex caches.
1142 class_linker->ClearClassTableStrongRoots();
1143
Brian Carlstrom7940e442013-07-12 13:46:57 -07001144 // Remove the undesired classes from the class roots.
Vladimir Markof25cc732017-03-16 16:18:15 +00001145 ObjPtr<mirror::ClassLoader> class_loader;
Vladimir Markoc5798bf2016-12-09 10:20:54 +00001146 {
1147 PruneClassLoaderClassesVisitor class_loader_visitor(this);
1148 VisitClassLoaders(&class_loader_visitor);
1149 VLOG(compiler) << "Pruned " << class_loader_visitor.GetRemovedClassCount() << " classes";
Vladimir Markof25cc732017-03-16 16:18:15 +00001150 class_loader = class_loader_visitor.GetClassLoader();
1151 DCHECK_EQ(class_loader != nullptr, compile_app_image_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001152 }
1153
1154 // Clear references to removed classes from the DexCaches.
Vladimir Markof25cc732017-03-16 16:18:15 +00001155 std::vector<ObjPtr<mirror::DexCache>> dex_caches;
1156 {
1157 ReaderMutexLock mu2(self, *Locks::dex_lock_);
1158 dex_caches.reserve(class_linker->GetDexCachesData().size());
1159 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1160 if (self->IsJWeakCleared(data.weak_root)) {
1161 continue;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001162 }
Vladimir Markof25cc732017-03-16 16:18:15 +00001163 dex_caches.push_back(self->DecodeJObject(data.weak_root)->AsDexCache());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001164 }
Vladimir Markof25cc732017-03-16 16:18:15 +00001165 }
1166 for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) {
1167 PruneAndPreloadDexCache(dex_cache, class_loader);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001168 }
Andreas Gampe8ac75952015-06-02 21:01:45 -07001169
1170 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
1171 class_linker->DropFindArrayClassCache();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001172
1173 // Clear to save RAM.
Vladimir Marko2c8c6b62016-12-01 17:42:00 +00001174 prune_class_memo_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001175}
1176
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -08001177void ImageWriter::CheckNonImageClassesRemoved() {
Mathieu Chartier590fee92013-09-13 13:46:47 -07001178 if (compiler_driver_.GetImageClasses() != nullptr) {
1179 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -07001180 heap->VisitObjects(CheckNonImageClassesRemovedCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001181 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001182}
1183
1184void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
1185 ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001186 if (obj->IsClass() && !image_writer->IsInBootImage(obj)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07001187 Class* klass = obj->AsClass();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001188 if (!image_writer->KeepClass(klass)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07001189 image_writer->DumpImageClasses();
Ian Rogers1ff3c982014-08-12 02:30:58 -07001190 std::string temp;
Mathieu Chartier4f5e3cb2017-06-12 13:10:01 -07001191 CHECK(image_writer->KeepClass(klass))
1192 << Runtime::Current()->GetHeap()->GetVerification()->FirstPathFromRootSet(klass);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001193 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001194 }
1195}
1196
1197void ImageWriter::DumpImageClasses() {
Andreas Gampeb1fcead2015-04-20 18:53:51 -07001198 auto image_classes = compiler_driver_.GetImageClasses();
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001199 CHECK(image_classes != nullptr);
Mathieu Chartier02e25112013-08-14 16:14:24 -07001200 for (const std::string& image_class : *image_classes) {
1201 LOG(INFO) << " " << image_class;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001202 }
1203}
1204
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001205mirror::String* ImageWriter::FindInternedString(mirror::String* string) {
1206 Thread* const self = Thread::Current();
Vladimir Marko944da602016-02-19 12:27:55 +00001207 for (const ImageInfo& image_info : image_infos_) {
Mathieu Chartier9e868092016-10-31 14:58:04 -07001208 ObjPtr<mirror::String> const found = image_info.intern_table_->LookupStrong(self, string);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001209 DCHECK(image_info.intern_table_->LookupWeak(self, string) == nullptr)
1210 << string->ToModifiedUtf8();
1211 if (found != nullptr) {
Mathieu Chartier9e868092016-10-31 14:58:04 -07001212 return found.Ptr();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001213 }
1214 }
1215 if (compile_app_image_) {
1216 Runtime* const runtime = Runtime::Current();
Mathieu Chartier9e868092016-10-31 14:58:04 -07001217 ObjPtr<mirror::String> found = runtime->GetInternTable()->LookupStrong(self, string);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001218 // If we found it in the runtime intern table it could either be in the boot image or interned
1219 // during app image compilation. If it was in the boot image return that, otherwise return null
1220 // since it belongs to another image space.
Mathieu Chartier9e868092016-10-31 14:58:04 -07001221 if (found != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(found.Ptr())) {
1222 return found.Ptr();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001223 }
1224 DCHECK(runtime->GetInternTable()->LookupWeak(self, string) == nullptr)
1225 << string->ToModifiedUtf8();
1226 }
1227 return nullptr;
1228}
1229
Brian Carlstrom7940e442013-07-12 13:46:57 -07001230
Vladimir Marko944da602016-02-19 12:27:55 +00001231ObjectArray<Object>* ImageWriter::CreateImageRoots(size_t oat_index) const {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001232 Runtime* runtime = Runtime::Current();
1233 ClassLinker* class_linker = runtime->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001234 Thread* self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001235 StackHandleScope<3> hs(self);
1236 Handle<Class> object_array_class(hs.NewHandle(
1237 class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001238
Jeff Haodcdc85b2015-12-04 14:06:18 -08001239 std::unordered_set<const DexFile*> image_dex_files;
Vladimir Marko944da602016-02-19 12:27:55 +00001240 for (auto& pair : dex_file_oat_index_map_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001241 const DexFile* image_dex_file = pair.first;
Vladimir Marko944da602016-02-19 12:27:55 +00001242 size_t image_oat_index = pair.second;
1243 if (oat_index == image_oat_index) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001244 image_dex_files.insert(image_dex_file);
1245 }
1246 }
1247
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001248 // build an Object[] of all the DexCaches used in the source_space_.
1249 // Since we can't hold the dex lock when allocating the dex_caches
1250 // ObjectArray, we lock the dex lock twice, first to get the number
1251 // of dex caches first and then lock it again to copy the dex
1252 // caches. We check that the number of dex caches does not change.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001253 size_t dex_cache_count = 0;
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001254 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08001255 ReaderMutexLock mu(self, *Locks::dex_lock_);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001256 // Count number of dex caches not in the boot image.
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08001257 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001258 ObjPtr<mirror::DexCache> dex_cache =
1259 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
Brian Carlstrom0c050a12016-04-29 10:28:34 -07001260 if (dex_cache == nullptr) {
1261 continue;
1262 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001263 const DexFile* dex_file = dex_cache->GetDexFile();
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001264 if (!IsInBootImage(dex_cache.Ptr())) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001265 dex_cache_count += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1266 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001267 }
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001268 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001269 Handle<ObjectArray<Object>> dex_caches(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001270 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(), dex_cache_count)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08001271 CHECK(dex_caches != nullptr) << "Failed to allocate a dex cache array.";
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001272 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08001273 ReaderMutexLock mu(self, *Locks::dex_lock_);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001274 size_t non_image_dex_caches = 0;
1275 // Re-count number of non image dex caches.
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08001276 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001277 ObjPtr<mirror::DexCache> dex_cache =
1278 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
Brian Carlstrom0c050a12016-04-29 10:28:34 -07001279 if (dex_cache == nullptr) {
1280 continue;
1281 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001282 const DexFile* dex_file = dex_cache->GetDexFile();
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001283 if (!IsInBootImage(dex_cache.Ptr())) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001284 non_image_dex_caches += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1285 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001286 }
1287 CHECK_EQ(dex_cache_count, non_image_dex_caches)
1288 << "The number of non-image dex caches changed.";
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07001289 size_t i = 0;
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08001290 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001291 ObjPtr<mirror::DexCache> dex_cache =
1292 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
Brian Carlstrom0c050a12016-04-29 10:28:34 -07001293 if (dex_cache == nullptr) {
1294 continue;
1295 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001296 const DexFile* dex_file = dex_cache->GetDexFile();
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001297 if (!IsInBootImage(dex_cache.Ptr()) &&
1298 image_dex_files.find(dex_file) != image_dex_files.end()) {
1299 dex_caches->Set<false>(i, dex_cache.Ptr());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001300 ++i;
1301 }
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001302 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001303 }
1304
1305 // build an Object[] of the roots needed to restore the runtime
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001306 int32_t image_roots_size = ImageHeader::NumberOfImageRoots(compile_app_image_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001307 auto image_roots(hs.NewHandle(
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001308 ObjectArray<Object>::Alloc(self, object_array_class.Get(), image_roots_size)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001309 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01001310 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001311 // image_roots[ImageHeader::kClassLoader] will be set later for app image.
1312 static_assert(ImageHeader::kClassLoader + 1u == ImageHeader::kImageRootsMax,
1313 "Class loader should be the last image root.");
1314 for (int32_t i = 0; i < ImageHeader::kImageRootsMax - 1; ++i) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001315 CHECK(image_roots->Get(i) != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001316 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001317 return image_roots.Get();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001318}
1319
Mathieu Chartier496577f2016-09-20 15:33:31 -07001320mirror::Object* ImageWriter::TryAssignBinSlot(WorkStack& work_stack,
1321 mirror::Object* obj,
1322 size_t oat_index) {
1323 if (obj == nullptr || IsInBootImage(obj)) {
1324 // Object is null or already in the image, there is no work to do.
1325 return obj;
Mathieu Chartier590fee92013-09-13 13:46:47 -07001326 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001327 if (!IsImageBinSlotAssigned(obj)) {
Mathieu Chartier496577f2016-09-20 15:33:31 -07001328 // We want to intern all strings but also assign offsets for the source string. Since the
1329 // pruning phase has already happened, if we intern a string to one in the image we still
1330 // end up copying an unreachable string.
1331 if (obj->IsString()) {
1332 // Need to check if the string is already interned in another image info so that we don't have
1333 // the intern tables of two different images contain the same string.
1334 mirror::String* interned = FindInternedString(obj->AsString());
1335 if (interned == nullptr) {
1336 // Not in another image space, insert to our table.
Mathieu Chartier9e868092016-10-31 14:58:04 -07001337 interned =
1338 GetImageInfo(oat_index).intern_table_->InternStrongImageString(obj->AsString()).Ptr();
Mathieu Chartier496577f2016-09-20 15:33:31 -07001339 DCHECK_EQ(interned, obj);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001340 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001341 } else if (obj->IsDexCache()) {
1342 oat_index = GetOatIndexForDexCache(obj->AsDexCache());
1343 } else if (obj->IsClass()) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001344 // Visit and assign offsets for fields and field arrays.
Mathieu Chartier496577f2016-09-20 15:33:31 -07001345 mirror::Class* as_klass = obj->AsClass();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001346 mirror::DexCache* dex_cache = as_klass->GetDexCache();
Vladimir Marko72ab6842017-01-20 19:32:50 +00001347 DCHECK(!as_klass->IsErroneous()) << as_klass->GetStatus();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001348 if (compile_app_image_) {
1349 // Extra sanity, no boot loader classes should be left!
Vladimir Marko2c8c6b62016-12-01 17:42:00 +00001350 CHECK(!IsBootClassLoaderClass(as_klass)) << as_klass->PrettyClass();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001351 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001352 LengthPrefixedArray<ArtField>* fields[] = {
1353 as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
1354 };
Mathieu Chartier496577f2016-09-20 15:33:31 -07001355 // Overwrite the oat index value since the class' dex cache is more accurate of where it
1356 // belongs.
1357 oat_index = GetOatIndexForDexCache(dex_cache);
Vladimir Marko944da602016-02-19 12:27:55 +00001358 ImageInfo& image_info = GetImageInfo(oat_index);
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00001359 if (!compile_app_image_) {
1360 // Note: Avoid locking to prevent lock order violations from root visiting;
1361 // image_info.class_table_ is only accessed from the image writer.
Mathieu Chartier496577f2016-09-20 15:33:31 -07001362 image_info.class_table_->InsertWithoutLocks(as_klass);
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001363 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001364 for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
1365 // Total array length including header.
1366 if (cur_fields != nullptr) {
1367 const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
1368 // Forward the entire array at once.
1369 auto it = native_object_relocations_.find(cur_fields);
1370 CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
1371 << " already forwarded";
Jeff Haodcdc85b2015-12-04 14:06:18 -08001372 size_t& offset = image_info.bin_slot_sizes_[kBinArtField];
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001373 DCHECK(!IsInBootImage(cur_fields));
Vladimir Marko944da602016-02-19 12:27:55 +00001374 native_object_relocations_.emplace(
1375 cur_fields,
1376 NativeObjectRelocation {
1377 oat_index, offset, kNativeObjectRelocationTypeArtFieldArray
1378 });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001379 offset += header_size;
1380 // Forward individual fields so that we can quickly find where they belong.
Vladimir Marko35831e82015-09-11 11:59:18 +01001381 for (size_t i = 0, count = cur_fields->size(); i < count; ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001382 // Need to forward arrays separate of fields.
1383 ArtField* field = &cur_fields->At(i);
1384 auto it2 = native_object_relocations_.find(field);
1385 CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
David Sehr709b0702016-10-13 09:12:37 -07001386 << " already assigned " << field->PrettyField() << " static=" << field->IsStatic();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001387 DCHECK(!IsInBootImage(field));
Vladimir Marko944da602016-02-19 12:27:55 +00001388 native_object_relocations_.emplace(
1389 field,
1390 NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeArtField });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001391 offset += sizeof(ArtField);
1392 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001393 }
1394 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001395 // Visit and assign offsets for methods.
Alex Lighte64300b2015-12-15 15:02:47 -08001396 size_t num_methods = as_klass->NumMethods();
1397 if (num_methods != 0) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001398 bool any_dirty = false;
Alex Lighte64300b2015-12-15 15:02:47 -08001399 for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
1400 if (WillMethodBeDirty(&m)) {
1401 any_dirty = true;
1402 break;
1403 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001404 }
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001405 NativeObjectRelocationType type = any_dirty
1406 ? kNativeObjectRelocationTypeArtMethodDirty
1407 : kNativeObjectRelocationTypeArtMethodClean;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001408 Bin bin_type = BinTypeForNativeRelocationType(type);
1409 // Forward the entire array at once, but header first.
Alex Lighte64300b2015-12-15 15:02:47 -08001410 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
1411 const size_t method_size = ArtMethod::Size(target_ptr_size_);
Vladimir Markocf36d492015-08-12 19:27:26 +01001412 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
1413 method_size,
1414 method_alignment);
Alex Lighte64300b2015-12-15 15:02:47 -08001415 LengthPrefixedArray<ArtMethod>* array = as_klass->GetMethodsPtr();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001416 auto it = native_object_relocations_.find(array);
Alex Lighte64300b2015-12-15 15:02:47 -08001417 CHECK(it == native_object_relocations_.end())
1418 << "Method array " << array << " already forwarded";
Jeff Haodcdc85b2015-12-04 14:06:18 -08001419 size_t& offset = image_info.bin_slot_sizes_[bin_type];
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001420 DCHECK(!IsInBootImage(array));
Jeff Haodcdc85b2015-12-04 14:06:18 -08001421 native_object_relocations_.emplace(array,
1422 NativeObjectRelocation {
Vladimir Marko944da602016-02-19 12:27:55 +00001423 oat_index,
Jeff Haodcdc85b2015-12-04 14:06:18 -08001424 offset,
1425 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty
1426 : kNativeObjectRelocationTypeArtMethodArrayClean });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001427 offset += header_size;
Alex Lighte64300b2015-12-15 15:02:47 -08001428 for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
Vladimir Marko944da602016-02-19 12:27:55 +00001429 AssignMethodOffset(&m, type, oat_index);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001430 }
Alex Lighte64300b2015-12-15 15:02:47 -08001431 (any_dirty ? dirty_methods_ : clean_methods_) += num_methods;
Mathieu Chartier97bad1b2016-05-16 14:58:01 -07001432 }
1433 // Assign offsets for all runtime methods in the IMT since these may hold conflict tables
1434 // live.
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001435 if (as_klass->ShouldHaveImt()) {
1436 ImTable* imt = as_klass->GetImt(target_ptr_size_);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001437 if (TryAssignImTableOffset(imt, oat_index)) {
1438 // Since imt's can be shared only do this the first time to not double count imt method
1439 // fixups.
1440 for (size_t i = 0; i < ImTable::kSize; ++i) {
1441 ArtMethod* imt_method = imt->Get(i, target_ptr_size_);
1442 DCHECK(imt_method != nullptr);
1443 if (imt_method->IsRuntimeMethod() &&
1444 !IsInBootImage(imt_method) &&
1445 !NativeRelocationAssigned(imt_method)) {
1446 AssignMethodOffset(imt_method, kNativeObjectRelocationTypeRuntimeMethod, oat_index);
1447 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001448 }
1449 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001450 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001451 } else if (obj->IsClassLoader()) {
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001452 // Register the class loader if it has a class table.
1453 // The fake boot class loader should not get registered and we should end up with only one
1454 // class loader.
Mathieu Chartier496577f2016-09-20 15:33:31 -07001455 mirror::ClassLoader* class_loader = obj->AsClassLoader();
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001456 if (class_loader->GetClassTable() != nullptr) {
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00001457 DCHECK(compile_app_image_);
1458 DCHECK(class_loaders_.empty());
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001459 class_loaders_.insert(class_loader);
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00001460 ImageInfo& image_info = GetImageInfo(oat_index);
1461 // Note: Avoid locking to prevent lock order violations from root visiting;
1462 // image_info.class_table_ table is only accessed from the image writer
1463 // and class_loader->GetClassTable() is iterated but not modified.
1464 image_info.class_table_->CopyWithoutLocks(*class_loader->GetClassTable());
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001465 }
Mathieu Chartier590fee92013-09-13 13:46:47 -07001466 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001467 AssignImageBinSlot(obj, oat_index);
1468 work_stack.emplace(obj, oat_index);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001469 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001470 if (obj->IsString()) {
1471 // Always return the interned string if there exists one.
1472 mirror::String* interned = FindInternedString(obj->AsString());
1473 if (interned != nullptr) {
1474 return interned;
1475 }
1476 }
1477 return obj;
Mathieu Chartier590fee92013-09-13 13:46:47 -07001478}
1479
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001480bool ImageWriter::NativeRelocationAssigned(void* ptr) const {
1481 return native_object_relocations_.find(ptr) != native_object_relocations_.end();
1482}
1483
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001484bool ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001485 // No offset, or already assigned.
1486 if (imt == nullptr || IsInBootImage(imt) || NativeRelocationAssigned(imt)) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001487 return false;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001488 }
1489 // If the method is a conflict method we also want to assign the conflict table offset.
1490 ImageInfo& image_info = GetImageInfo(oat_index);
1491 const size_t size = ImTable::SizeInBytes(target_ptr_size_);
1492 native_object_relocations_.emplace(
1493 imt,
1494 NativeObjectRelocation {
1495 oat_index,
1496 image_info.bin_slot_sizes_[kBinImTable],
1497 kNativeObjectRelocationTypeIMTable});
1498 image_info.bin_slot_sizes_[kBinImTable] += size;
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001499 return true;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001500}
1501
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001502void ImageWriter::TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) {
1503 // No offset, or already assigned.
1504 if (table == nullptr || NativeRelocationAssigned(table)) {
1505 return;
1506 }
1507 CHECK(!IsInBootImage(table));
1508 // If the method is a conflict method we also want to assign the conflict table offset.
1509 ImageInfo& image_info = GetImageInfo(oat_index);
1510 const size_t size = table->ComputeSize(target_ptr_size_);
1511 native_object_relocations_.emplace(
1512 table,
1513 NativeObjectRelocation {
1514 oat_index,
1515 image_info.bin_slot_sizes_[kBinIMTConflictTable],
1516 kNativeObjectRelocationTypeIMTConflictTable});
1517 image_info.bin_slot_sizes_[kBinIMTConflictTable] += size;
1518}
1519
Jeff Haodcdc85b2015-12-04 14:06:18 -08001520void ImageWriter::AssignMethodOffset(ArtMethod* method,
1521 NativeObjectRelocationType type,
Vladimir Marko944da602016-02-19 12:27:55 +00001522 size_t oat_index) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001523 DCHECK(!IsInBootImage(method));
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001524 CHECK(!NativeRelocationAssigned(method)) << "Method " << method << " already assigned "
David Sehr709b0702016-10-13 09:12:37 -07001525 << ArtMethod::PrettyMethod(method);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001526 if (method->IsRuntimeMethod()) {
1527 TryAssignConflictTableOffset(method->GetImtConflictTable(target_ptr_size_), oat_index);
1528 }
Vladimir Marko944da602016-02-19 12:27:55 +00001529 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001530 size_t& offset = image_info.bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
Vladimir Marko944da602016-02-19 12:27:55 +00001531 native_object_relocations_.emplace(method, NativeObjectRelocation { oat_index, offset, type });
Vladimir Marko14632852015-08-17 12:07:23 +01001532 offset += ArtMethod::Size(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001533}
1534
Mathieu Chartier496577f2016-09-20 15:33:31 -07001535void ImageWriter::EnsureBinSlotAssignedCallback(mirror::Object* obj, void* arg) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07001536 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1537 DCHECK(writer != nullptr);
Mathieu Chartier496577f2016-09-20 15:33:31 -07001538 if (!Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(obj)) {
David Sehr709b0702016-10-13 09:12:37 -07001539 CHECK(writer->IsImageBinSlotAssigned(obj)) << mirror::Object::PrettyTypeOf(obj) << " " << obj;
Mathieu Chartier496577f2016-09-20 15:33:31 -07001540 }
1541}
1542
1543void ImageWriter::DeflateMonitorCallback(mirror::Object* obj, void* arg ATTRIBUTE_UNUSED) {
1544 Monitor::Deflate(Thread::Current(), obj);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001545}
1546
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001547void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) {
1548 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1549 DCHECK(writer != nullptr);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001550 if (!writer->IsInBootImage(obj)) {
1551 writer->UnbinObjectsIntoOffset(obj);
1552 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001553}
1554
1555void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001556 DCHECK(!IsInBootImage(obj));
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001557 CHECK(obj != nullptr);
1558
1559 // We know the bin slot, and the total bin sizes for all objects by now,
1560 // so calculate the object's final image offset.
1561
1562 DCHECK(IsImageBinSlotAssigned(obj));
1563 BinSlot bin_slot = GetImageBinSlot(obj);
1564 // Change the lockword from a bin slot into an offset
1565 AssignImageOffset(obj, bin_slot);
1566}
1567
Mathieu Chartier496577f2016-09-20 15:33:31 -07001568class ImageWriter::VisitReferencesVisitor {
1569 public:
1570 VisitReferencesVisitor(ImageWriter* image_writer, WorkStack* work_stack, size_t oat_index)
1571 : image_writer_(image_writer), work_stack_(work_stack), oat_index_(oat_index) {}
1572
1573 // Fix up separately since we also need to fix up method entrypoints.
1574 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
1575 REQUIRES_SHARED(Locks::mutator_lock_) {
1576 if (!root->IsNull()) {
1577 VisitRoot(root);
1578 }
1579 }
1580
1581 ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1582 REQUIRES_SHARED(Locks::mutator_lock_) {
1583 root->Assign(VisitReference(root->AsMirrorPtr()));
1584 }
1585
Mathieu Chartier31e88222016-10-14 18:43:19 -07001586 ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj,
Mathieu Chartier496577f2016-09-20 15:33:31 -07001587 MemberOffset offset,
1588 bool is_static ATTRIBUTE_UNUSED) const
1589 REQUIRES_SHARED(Locks::mutator_lock_) {
1590 mirror::Object* ref =
1591 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1592 obj->SetFieldObject</*kTransactionActive*/false>(offset, VisitReference(ref));
1593 }
1594
Mathieu Chartier31e88222016-10-14 18:43:19 -07001595 ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1596 ObjPtr<mirror::Reference> ref) const
Mathieu Chartier496577f2016-09-20 15:33:31 -07001597 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001598 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
Mathieu Chartier496577f2016-09-20 15:33:31 -07001599 }
1600
1601 private:
1602 mirror::Object* VisitReference(mirror::Object* ref) const REQUIRES_SHARED(Locks::mutator_lock_) {
1603 return image_writer_->TryAssignBinSlot(*work_stack_, ref, oat_index_);
1604 }
1605
1606 ImageWriter* const image_writer_;
1607 WorkStack* const work_stack_;
1608 const size_t oat_index_;
1609};
1610
1611class ImageWriter::GetRootsVisitor : public RootVisitor {
1612 public:
1613 explicit GetRootsVisitor(std::vector<mirror::Object*>* roots) : roots_(roots) {}
1614
1615 void VisitRoots(mirror::Object*** roots,
1616 size_t count,
1617 const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE
1618 REQUIRES_SHARED(Locks::mutator_lock_) {
1619 for (size_t i = 0; i < count; ++i) {
1620 roots_->push_back(*roots[i]);
1621 }
1622 }
1623
1624 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
1625 size_t count,
1626 const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE
1627 REQUIRES_SHARED(Locks::mutator_lock_) {
1628 for (size_t i = 0; i < count; ++i) {
1629 roots_->push_back(roots[i]->AsMirrorPtr());
1630 }
1631 }
1632
1633 private:
1634 std::vector<mirror::Object*>* const roots_;
1635};
1636
1637void ImageWriter::ProcessWorkStack(WorkStack* work_stack) {
1638 while (!work_stack->empty()) {
1639 std::pair<mirror::Object*, size_t> pair(work_stack->top());
1640 work_stack->pop();
1641 VisitReferencesVisitor visitor(this, work_stack, /*oat_index*/ pair.second);
1642 // Walk references and assign bin slots for them.
1643 pair.first->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
1644 visitor,
1645 visitor);
1646 }
1647}
1648
Vladimir Markof4da6752014-08-01 19:04:18 +01001649void ImageWriter::CalculateNewObjectOffsets() {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001650 Thread* const self = Thread::Current();
Mathieu Chartiere8a3c572016-10-11 16:52:17 -07001651 VariableSizedHandleScope handles(self);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001652 std::vector<Handle<ObjectArray<Object>>> image_roots;
Vladimir Marko944da602016-02-19 12:27:55 +00001653 for (size_t i = 0, size = oat_filenames_.size(); i != size; ++i) {
1654 image_roots.push_back(handles.NewHandle(CreateImageRoots(i)));
Jeff Haodcdc85b2015-12-04 14:06:18 -08001655 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001656
Mathieu Chartier496577f2016-09-20 15:33:31 -07001657 Runtime* const runtime = Runtime::Current();
1658 gc::Heap* const heap = runtime->GetHeap();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001659
Mathieu Chartier31e89252013-08-28 11:29:12 -07001660 // Leave space for the header, but do not write it yet, we need to
Brian Carlstrom7940e442013-07-12 13:46:57 -07001661 // know where image_roots is going to end up
Jeff Haodcdc85b2015-12-04 14:06:18 -08001662 image_objects_offset_begin_ = RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment
Brian Carlstrom7940e442013-07-12 13:46:57 -07001663
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001664 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001665 // Write the image runtime methods.
1666 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
1667 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
1668 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001669 image_methods_[ImageHeader::kSaveAllCalleeSavesMethod] =
Andreas Gampe8228cdf2017-05-30 15:03:54 -07001670 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves);
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001671 image_methods_[ImageHeader::kSaveRefsOnlyMethod] =
Andreas Gampe8228cdf2017-05-30 15:03:54 -07001672 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsOnly);
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001673 image_methods_[ImageHeader::kSaveRefsAndArgsMethod] =
Andreas Gampe8228cdf2017-05-30 15:03:54 -07001674 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs);
Vladimir Marko952dbb12016-07-28 12:01:51 +01001675 image_methods_[ImageHeader::kSaveEverythingMethod] =
Andreas Gampe8228cdf2017-05-30 15:03:54 -07001676 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverything);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001677 // Visit image methods first to have the main runtime methods in the first image.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001678 for (auto* m : image_methods_) {
1679 CHECK(m != nullptr);
1680 CHECK(m->IsRuntimeMethod());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001681 DCHECK_EQ(compile_app_image_, IsInBootImage(m)) << "Trampolines should be in boot image";
1682 if (!IsInBootImage(m)) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001683 AssignMethodOffset(m, kNativeObjectRelocationTypeRuntimeMethod, GetDefaultOatIndex());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001684 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001685 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001686
Mathieu Chartier496577f2016-09-20 15:33:31 -07001687 // Deflate monitors before we visit roots since deflating acquires the monitor lock. Acquiring
1688 // this lock while holding other locks may cause lock order violations.
1689 heap->VisitObjects(DeflateMonitorCallback, this);
1690
1691 // Work list of <object, oat_index> for objects. Everything on the stack must already be
1692 // assigned a bin slot.
1693 WorkStack work_stack;
1694
1695 // Special case interned strings to put them in the image they are likely to be resolved from.
1696 for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
1697 auto it = dex_file_oat_index_map_.find(dex_file);
1698 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
1699 const size_t oat_index = it->second;
1700 InternTable* const intern_table = runtime->GetInternTable();
1701 for (size_t i = 0, count = dex_file->NumStringIds(); i < count; ++i) {
1702 uint32_t utf16_length;
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001703 const char* utf8_data = dex_file->StringDataAndUtf16LengthByIdx(dex::StringIndex(i),
1704 &utf16_length);
Mathieu Chartier9e868092016-10-31 14:58:04 -07001705 mirror::String* string = intern_table->LookupStrong(self, utf16_length, utf8_data).Ptr();
Mathieu Chartier496577f2016-09-20 15:33:31 -07001706 TryAssignBinSlot(work_stack, string, oat_index);
1707 }
1708 }
1709
1710 // Get the GC roots and then visit them separately to avoid lock violations since the root visitor
1711 // visits roots while holding various locks.
1712 {
1713 std::vector<mirror::Object*> roots;
1714 GetRootsVisitor root_visitor(&roots);
1715 runtime->VisitRoots(&root_visitor);
1716 for (mirror::Object* obj : roots) {
1717 TryAssignBinSlot(work_stack, obj, GetDefaultOatIndex());
1718 }
1719 }
1720 ProcessWorkStack(&work_stack);
1721
1722 // For app images, there may be objects that are only held live by the by the boot image. One
1723 // example is finalizer references. Forward these objects so that EnsureBinSlotAssignedCallback
1724 // does not fail any checks. TODO: We should probably avoid copying these objects.
1725 if (compile_app_image_) {
1726 for (gc::space::ImageSpace* space : heap->GetBootImageSpaces()) {
1727 DCHECK(space->IsImageSpace());
1728 gc::accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
1729 live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
1730 reinterpret_cast<uintptr_t>(space->Limit()),
1731 [this, &work_stack](mirror::Object* obj)
1732 REQUIRES_SHARED(Locks::mutator_lock_) {
1733 VisitReferencesVisitor visitor(this, &work_stack, GetDefaultOatIndex());
1734 // Visit all references and try to assign bin slots for them (calls TryAssignBinSlot).
1735 obj->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
1736 visitor,
1737 visitor);
1738 });
1739 }
1740 // Process the work stack in case anything was added by TryAssignBinSlot.
1741 ProcessWorkStack(&work_stack);
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001742
1743 // Store the class loader in the class roots.
1744 CHECK_EQ(class_loaders_.size(), 1u);
1745 CHECK_EQ(image_roots.size(), 1u);
1746 CHECK(*class_loaders_.begin() != nullptr);
1747 image_roots[0]->Set<false>(ImageHeader::kClassLoader, *class_loaders_.begin());
Mathieu Chartier496577f2016-09-20 15:33:31 -07001748 }
1749
1750 // Verify that all objects have assigned image bin slots.
1751 heap->VisitObjects(EnsureBinSlotAssignedCallback, this);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001752
Vladimir Marko05792b92015-08-03 11:56:49 +01001753 // Calculate size of the dex cache arrays slot and prepare offsets.
1754 PrepareDexCacheArraySlots();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001755
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001756 // Calculate the sizes of the intern tables, class tables, and fixup tables.
Vladimir Marko944da602016-02-19 12:27:55 +00001757 for (ImageInfo& image_info : image_infos_) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -08001758 // Calculate how big the intern table will be after being serialized.
1759 InternTable* const intern_table = image_info.intern_table_.get();
1760 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
Vladimir Marko1a1de672016-10-13 12:53:15 +01001761 if (intern_table->StrongSize() != 0u) {
1762 image_info.intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
1763 }
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001764
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001765 // Calculate the size of the class table.
1766 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00001767 DCHECK_EQ(image_info.class_table_->NumReferencedZygoteClasses(), 0u);
1768 if (image_info.class_table_->NumReferencedNonZygoteClasses() != 0u) {
Vladimir Marko1a1de672016-10-13 12:53:15 +01001769 image_info.class_table_bytes_ += image_info.class_table_->WriteToMemory(nullptr);
1770 }
Mathieu Chartierea0831f2015-12-29 13:17:37 -08001771 }
1772
Vladimir Markocf36d492015-08-12 19:27:26 +01001773 // Calculate bin slot offsets.
Vladimir Marko944da602016-02-19 12:27:55 +00001774 for (ImageInfo& image_info : image_infos_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001775 size_t bin_offset = image_objects_offset_begin_;
1776 for (size_t i = 0; i != kBinSize; ++i) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001777 switch (i) {
1778 case kBinArtMethodClean:
1779 case kBinArtMethodDirty: {
1780 bin_offset = RoundUp(bin_offset, method_alignment);
1781 break;
1782 }
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07001783 case kBinDexCacheArray:
Vladimir Markof44d36c2017-03-14 14:18:46 +00001784 bin_offset = RoundUp(bin_offset, DexCacheArraysLayout::Alignment(target_ptr_size_));
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07001785 break;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001786 case kBinImTable:
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001787 case kBinIMTConflictTable: {
Andreas Gampe542451c2016-07-26 09:02:02 -07001788 bin_offset = RoundUp(bin_offset, static_cast<size_t>(target_ptr_size_));
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001789 break;
1790 }
1791 default: {
1792 // Normal alignment.
1793 }
1794 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001795 image_info.bin_slot_offsets_[i] = bin_offset;
1796 bin_offset += image_info.bin_slot_sizes_[i];
Vladimir Markocf36d492015-08-12 19:27:26 +01001797 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001798 // NOTE: There may be additional padding between the bin slots and the intern table.
1799 DCHECK_EQ(image_info.image_end_,
1800 GetBinSizeSum(image_info, kBinMirrorCount) + image_objects_offset_begin_);
Vladimir Marko20f85592015-03-19 10:07:02 +00001801 }
Vladimir Markocf36d492015-08-12 19:27:26 +01001802
Jeff Haodcdc85b2015-12-04 14:06:18 -08001803 // Calculate image offsets.
1804 size_t image_offset = 0;
Vladimir Marko944da602016-02-19 12:27:55 +00001805 for (ImageInfo& image_info : image_infos_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001806 image_info.image_begin_ = global_image_begin_ + image_offset;
1807 image_info.image_offset_ = image_offset;
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001808 ImageSection unused_sections[ImageHeader::kSectionCount];
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001809 image_info.image_size_ = RoundUp(image_info.CreateImageSections(unused_sections), kPageSize);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001810 // There should be no gaps until the next image.
Jeff Haodcdc85b2015-12-04 14:06:18 -08001811 image_offset += image_info.image_size_;
1812 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001813
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -08001814 // Transform each object's bin slot into an offset which will be used to do the final copy.
1815 heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001816
Jeff Haodcdc85b2015-12-04 14:06:18 -08001817 size_t i = 0;
Vladimir Marko944da602016-02-19 12:27:55 +00001818 for (ImageInfo& image_info : image_infos_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001819 image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots[i].Get()));
1820 i++;
1821 }
Vladimir Markof4da6752014-08-01 19:04:18 +01001822
Mathieu Chartiere401d142015-04-22 13:56:20 -07001823 // Update the native relocations by adding their bin sums.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001824 for (auto& pair : native_object_relocations_) {
1825 NativeObjectRelocation& relocation = pair.second;
1826 Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
Vladimir Marko944da602016-02-19 12:27:55 +00001827 ImageInfo& image_info = GetImageInfo(relocation.oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001828 relocation.offset += image_info.bin_slot_offsets_[bin_type];
Mathieu Chartiere401d142015-04-22 13:56:20 -07001829 }
Vladimir Markof4da6752014-08-01 19:04:18 +01001830}
1831
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001832size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) const {
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001833 DCHECK(out_sections != nullptr);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001834
1835 // Do not round up any sections here that are represented by the bins since it will break
1836 // offsets.
1837
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001838 // Objects section
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001839 ImageSection* objects_section = &out_sections[ImageHeader::kSectionObjects];
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001840 *objects_section = ImageSection(0u, image_end_);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001841
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001842 // Add field section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001843 ImageSection* field_section = &out_sections[ImageHeader::kSectionArtFields];
1844 *field_section = ImageSection(bin_slot_offsets_[kBinArtField], bin_slot_sizes_[kBinArtField]);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001845 CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset());
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001846
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001847 // Add method section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001848 ImageSection* methods_section = &out_sections[ImageHeader::kSectionArtMethods];
1849 *methods_section = ImageSection(
1850 bin_slot_offsets_[kBinArtMethodClean],
1851 bin_slot_sizes_[kBinArtMethodClean] + bin_slot_sizes_[kBinArtMethodDirty]);
1852
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001853 // IMT section.
1854 ImageSection* imt_section = &out_sections[ImageHeader::kSectionImTables];
1855 *imt_section = ImageSection(bin_slot_offsets_[kBinImTable], bin_slot_sizes_[kBinImTable]);
1856
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001857 // Conflict tables section.
1858 ImageSection* imt_conflict_tables_section = &out_sections[ImageHeader::kSectionIMTConflictTables];
1859 *imt_conflict_tables_section = ImageSection(bin_slot_offsets_[kBinIMTConflictTable],
1860 bin_slot_sizes_[kBinIMTConflictTable]);
1861
1862 // Runtime methods section.
1863 ImageSection* runtime_methods_section = &out_sections[ImageHeader::kSectionRuntimeMethods];
1864 *runtime_methods_section = ImageSection(bin_slot_offsets_[kBinRuntimeMethod],
1865 bin_slot_sizes_[kBinRuntimeMethod]);
1866
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001867 // Add dex cache arrays section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001868 ImageSection* dex_cache_arrays_section = &out_sections[ImageHeader::kSectionDexCacheArrays];
1869 *dex_cache_arrays_section = ImageSection(bin_slot_offsets_[kBinDexCacheArray],
1870 bin_slot_sizes_[kBinDexCacheArray]);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001871 // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001872 size_t cur_pos = RoundUp(dex_cache_arrays_section->End(), sizeof(uint64_t));
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001873 // Calculate the size of the interned strings.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001874 ImageSection* interned_strings_section = &out_sections[ImageHeader::kSectionInternedStrings];
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001875 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1876 cur_pos = interned_strings_section->End();
1877 // Round up to the alignment the class table expects. See HashSet::WriteToMemory.
1878 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
1879 // Calculate the size of the class table section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001880 ImageSection* class_table_section = &out_sections[ImageHeader::kSectionClassTable];
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001881 *class_table_section = ImageSection(cur_pos, class_table_bytes_);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001882 cur_pos = class_table_section->End();
1883 // Image end goes right before the start of the image bitmap.
1884 return cur_pos;
1885}
1886
Vladimir Marko944da602016-02-19 12:27:55 +00001887void ImageWriter::CreateHeader(size_t oat_index) {
1888 ImageInfo& image_info = GetImageInfo(oat_index);
1889 const uint8_t* oat_file_begin = image_info.oat_file_begin_;
1890 const uint8_t* oat_file_end = oat_file_begin + image_info.oat_loaded_size_;
1891 const uint8_t* oat_data_end = image_info.oat_data_begin_ + image_info.oat_size_;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001892
1893 // Create the image sections.
1894 ImageSection sections[ImageHeader::kSectionCount];
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001895 const size_t image_end = image_info.CreateImageSections(sections);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001896
Mathieu Chartiere401d142015-04-22 13:56:20 -07001897 // Finally bitmap section.
Jeff Haodcdc85b2015-12-04 14:06:18 -08001898 const size_t bitmap_bytes = image_info.image_bitmap_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001899 auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001900 *bitmap_section = ImageSection(RoundUp(image_end, kPageSize), RoundUp(bitmap_bytes, kPageSize));
Jeff Haodcdc85b2015-12-04 14:06:18 -08001901 if (VLOG_IS_ON(compiler)) {
Vladimir Marko944da602016-02-19 12:27:55 +00001902 LOG(INFO) << "Creating header for " << oat_filenames_[oat_index];
Mathieu Chartiere401d142015-04-22 13:56:20 -07001903 size_t idx = 0;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001904 for (const ImageSection& section : sections) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001905 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1906 ++idx;
1907 }
1908 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001909 LOG(INFO) << "Image roots address=" << std::hex << image_info.image_roots_address_ << std::dec;
1910 LOG(INFO) << "Image begin=" << std::hex << reinterpret_cast<uintptr_t>(global_image_begin_)
1911 << " Image offset=" << image_info.image_offset_ << std::dec;
1912 LOG(INFO) << "Oat file begin=" << std::hex << reinterpret_cast<uintptr_t>(oat_file_begin)
1913 << " Oat data begin=" << reinterpret_cast<uintptr_t>(image_info.oat_data_begin_)
1914 << " Oat data end=" << reinterpret_cast<uintptr_t>(oat_data_end)
1915 << " Oat file end=" << reinterpret_cast<uintptr_t>(oat_file_end);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001916 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001917 // Store boot image info for app image so that we can relocate.
1918 uint32_t boot_image_begin = 0;
1919 uint32_t boot_image_end = 0;
1920 uint32_t boot_oat_begin = 0;
1921 uint32_t boot_oat_end = 0;
1922 gc::Heap* const heap = Runtime::Current()->GetHeap();
1923 heap->GetBootImagesSize(&boot_image_begin, &boot_image_end, &boot_oat_begin, &boot_oat_end);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001924
Mathieu Chartierceb07b32015-12-10 09:33:21 -08001925 // Create the header, leave 0 for data size since we will fill this in as we are writing the
1926 // image.
Jeff Haodcdc85b2015-12-04 14:06:18 -08001927 new (image_info.image_->Begin()) ImageHeader(PointerToLowMemUInt32(image_info.image_begin_),
1928 image_end,
1929 sections,
1930 image_info.image_roots_address_,
Vladimir Marko944da602016-02-19 12:27:55 +00001931 image_info.oat_checksum_,
Jeff Haodcdc85b2015-12-04 14:06:18 -08001932 PointerToLowMemUInt32(oat_file_begin),
1933 PointerToLowMemUInt32(image_info.oat_data_begin_),
1934 PointerToLowMemUInt32(oat_data_end),
1935 PointerToLowMemUInt32(oat_file_end),
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001936 boot_image_begin,
1937 boot_image_end - boot_image_begin,
1938 boot_oat_begin,
1939 boot_oat_end - boot_oat_begin,
Andreas Gampe542451c2016-07-26 09:02:02 -07001940 static_cast<uint32_t>(target_ptr_size_),
Jeff Haodcdc85b2015-12-04 14:06:18 -08001941 compile_pic_,
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001942 /*is_pic*/compile_app_image_,
Jeff Haodcdc85b2015-12-04 14:06:18 -08001943 image_storage_mode_,
1944 /*data_size*/0u);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001945}
1946
1947ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001948 auto it = native_object_relocations_.find(method);
David Sehr709b0702016-10-13 09:12:37 -07001949 CHECK(it != native_object_relocations_.end()) << ArtMethod::PrettyMethod(method) << " @ "
1950 << method;
Vladimir Marko944da602016-02-19 12:27:55 +00001951 size_t oat_index = GetOatIndex(method->GetDexCache());
1952 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001953 CHECK_GE(it->second.offset, image_info.image_end_) << "ArtMethods should be after Objects";
1954 return reinterpret_cast<ArtMethod*>(image_info.image_begin_ + it->second.offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001955}
1956
Vladimir Markoad06b982016-11-17 16:38:59 +00001957class ImageWriter::FixupRootVisitor : public RootVisitor {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001958 public:
1959 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
1960 }
1961
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001962 void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED,
1963 size_t count ATTRIBUTE_UNUSED,
1964 const RootInfo& info ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001965 OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001966 LOG(FATAL) << "Unsupported";
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001967 }
1968
1969 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
1970 const RootInfo& info ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001971 OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001972 for (size_t i = 0; i < count; ++i) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001973 image_writer_->CopyReference(roots[i], roots[i]->AsMirrorPtr());
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001974 }
1975 }
1976
1977 private:
1978 ImageWriter* const image_writer_;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001979};
1980
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001981void ImageWriter::CopyAndFixupImTable(ImTable* orig, ImTable* copy) {
1982 for (size_t i = 0; i < ImTable::kSize; ++i) {
1983 ArtMethod* method = orig->Get(i, target_ptr_size_);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001984 void** address = reinterpret_cast<void**>(copy->AddressOfElement(i, target_ptr_size_));
1985 CopyAndFixupPointer(address, method);
1986 DCHECK_EQ(copy->Get(i, target_ptr_size_), NativeLocationInImage(method));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001987 }
1988}
1989
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001990void ImageWriter::CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConflictTable* copy) {
1991 const size_t count = orig->NumEntries(target_ptr_size_);
1992 for (size_t i = 0; i < count; ++i) {
1993 ArtMethod* interface_method = orig->GetInterfaceMethod(i, target_ptr_size_);
1994 ArtMethod* implementation_method = orig->GetImplementationMethod(i, target_ptr_size_);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001995 CopyAndFixupPointer(copy->AddressOfInterfaceMethod(i, target_ptr_size_), interface_method);
1996 CopyAndFixupPointer(copy->AddressOfImplementationMethod(i, target_ptr_size_),
1997 implementation_method);
1998 DCHECK_EQ(copy->GetInterfaceMethod(i, target_ptr_size_),
1999 NativeLocationInImage(interface_method));
2000 DCHECK_EQ(copy->GetImplementationMethod(i, target_ptr_size_),
2001 NativeLocationInImage(implementation_method));
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002002 }
2003}
2004
Vladimir Marko944da602016-02-19 12:27:55 +00002005void ImageWriter::CopyAndFixupNativeData(size_t oat_index) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002006 const ImageInfo& image_info = GetImageInfo(oat_index);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002007 // Copy ArtFields and methods to their locations and update the array for convenience.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002008 for (auto& pair : native_object_relocations_) {
2009 NativeObjectRelocation& relocation = pair.second;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002010 // Only work with fields and methods that are in the current oat file.
Vladimir Marko944da602016-02-19 12:27:55 +00002011 if (relocation.oat_index != oat_index) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08002012 continue;
2013 }
2014 auto* dest = image_info.image_->Begin() + relocation.offset;
2015 DCHECK_GE(dest, image_info.image_->Begin() + image_info.image_end_);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002016 DCHECK(!IsInBootImage(pair.first));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002017 switch (relocation.type) {
2018 case kNativeObjectRelocationTypeArtField: {
2019 memcpy(dest, pair.first, sizeof(ArtField));
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002020 CopyReference(
2021 reinterpret_cast<ArtField*>(dest)->GetDeclaringClassAddressWithoutBarrier(),
2022 reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass().Ptr());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002023 break;
2024 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002025 case kNativeObjectRelocationTypeRuntimeMethod:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002026 case kNativeObjectRelocationTypeArtMethodClean:
2027 case kNativeObjectRelocationTypeArtMethodDirty: {
2028 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
Jeff Haodcdc85b2015-12-04 14:06:18 -08002029 reinterpret_cast<ArtMethod*>(dest),
2030 image_info);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002031 break;
2032 }
2033 // For arrays, copy just the header since the elements will get copied by their corresponding
2034 // relocations.
2035 case kNativeObjectRelocationTypeArtFieldArray: {
2036 memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
2037 break;
2038 }
2039 case kNativeObjectRelocationTypeArtMethodArrayClean:
2040 case kNativeObjectRelocationTypeArtMethodArrayDirty: {
Vladimir Markod9813cb2016-03-15 12:41:27 +00002041 size_t size = ArtMethod::Size(target_ptr_size_);
2042 size_t alignment = ArtMethod::Alignment(target_ptr_size_);
2043 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0, size, alignment));
2044 // Clear padding to avoid non-deterministic data in the image (and placate valgrind).
2045 reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(dest)->ClearPadding(size, alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002046 break;
Vladimir Markod9813cb2016-03-15 12:41:27 +00002047 }
Vladimir Marko05792b92015-08-03 11:56:49 +01002048 case kNativeObjectRelocationTypeDexCacheArray:
2049 // Nothing to copy here, everything is done in FixupDexCache().
2050 break;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002051 case kNativeObjectRelocationTypeIMTable: {
2052 ImTable* orig_imt = reinterpret_cast<ImTable*>(pair.first);
2053 ImTable* dest_imt = reinterpret_cast<ImTable*>(dest);
2054 CopyAndFixupImTable(orig_imt, dest_imt);
2055 break;
2056 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002057 case kNativeObjectRelocationTypeIMTConflictTable: {
2058 auto* orig_table = reinterpret_cast<ImtConflictTable*>(pair.first);
2059 CopyAndFixupImtConflictTable(
2060 orig_table,
2061 new(dest)ImtConflictTable(orig_table->NumEntries(target_ptr_size_), target_ptr_size_));
2062 break;
2063 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002064 }
2065 }
2066 // Fixup the image method roots.
Jeff Haodcdc85b2015-12-04 14:06:18 -08002067 auto* image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
Mathieu Chartiere401d142015-04-22 13:56:20 -07002068 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002069 ArtMethod* method = image_methods_[i];
2070 CHECK(method != nullptr);
2071 if (!IsInBootImage(method)) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002072 method = NativeLocationInImage(method);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002073 }
2074 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), method);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002075 }
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08002076 FixupRootVisitor root_visitor(this);
2077
Mathieu Chartierd39645e2015-06-09 17:50:29 -07002078 // Write the intern table into the image.
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002079 if (image_info.intern_table_bytes_ > 0) {
2080 const ImageSection& intern_table_section = image_header->GetImageSection(
2081 ImageHeader::kSectionInternedStrings);
2082 InternTable* const intern_table = image_info.intern_table_.get();
2083 uint8_t* const intern_table_memory_ptr =
2084 image_info.image_->Begin() + intern_table_section.Offset();
2085 const size_t intern_table_bytes = intern_table->WriteToMemory(intern_table_memory_ptr);
2086 CHECK_EQ(intern_table_bytes, image_info.intern_table_bytes_);
2087 // Fixup the pointers in the newly written intern table to contain image addresses.
2088 InternTable temp_intern_table;
2089 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
2090 // the VisitRoots() will update the memory directly rather than the copies.
2091 // This also relies on visit roots not doing any verification which could fail after we update
2092 // the roots to be the image addresses.
2093 temp_intern_table.AddTableFromMemory(intern_table_memory_ptr);
2094 CHECK_EQ(temp_intern_table.Size(), intern_table->Size());
2095 temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots);
2096 }
Mathieu Chartier67ad20e2015-12-09 15:41:09 -08002097 // Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple
2098 // class loaders. Writing multiple class tables into the image is currently unsupported.
Mathieu Chartier1f47b672016-01-07 16:29:01 -08002099 if (image_info.class_table_bytes_ > 0u) {
Mathieu Chartier67ad20e2015-12-09 15:41:09 -08002100 const ImageSection& class_table_section = image_header->GetImageSection(
2101 ImageHeader::kSectionClassTable);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002102 uint8_t* const class_table_memory_ptr =
2103 image_info.image_->Begin() + class_table_section.Offset();
Mathieu Chartier67ad20e2015-12-09 15:41:09 -08002104 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Mathieu Chartier1f47b672016-01-07 16:29:01 -08002105
2106 ClassTable* table = image_info.class_table_.get();
2107 CHECK(table != nullptr);
2108 const size_t class_table_bytes = table->WriteToMemory(class_table_memory_ptr);
2109 CHECK_EQ(class_table_bytes, image_info.class_table_bytes_);
2110 // Fixup the pointers in the newly written class table to contain image addresses. See
2111 // above comment for intern tables.
2112 ClassTable temp_class_table;
2113 temp_class_table.ReadFromMemory(class_table_memory_ptr);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00002114 CHECK_EQ(temp_class_table.NumReferencedZygoteClasses(),
2115 table->NumReferencedNonZygoteClasses() + table->NumReferencedZygoteClasses());
Mathieu Chartier58c3f6a2016-12-01 14:21:11 -08002116 UnbufferedRootVisitor visitor(&root_visitor, RootInfo(kRootUnknown));
2117 temp_class_table.VisitRoots(visitor);
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08002118 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002119}
2120
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -08002121void ImageWriter::CopyAndFixupObjects() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07002122 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -07002123 heap->VisitObjects(CopyAndFixupObjectsCallback, this);
2124 // Fix up the object previously had hash codes.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07002125 for (const auto& hash_pair : saved_hashcode_map_) {
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08002126 Object* obj = hash_pair.first;
Andreas Gampe3b45ef22015-05-26 21:34:09 -07002127 DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
2128 obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
Mathieu Chartier590fee92013-09-13 13:46:47 -07002129 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07002130 saved_hashcode_map_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -07002131}
2132
Mathieu Chartier590fee92013-09-13 13:46:47 -07002133void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) {
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -07002134 DCHECK(obj != nullptr);
2135 DCHECK(arg != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002136 reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj);
2137}
2138
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002139void ImageWriter::FixupPointerArray(mirror::Object* dst,
2140 mirror::PointerArray* arr,
2141 mirror::Class* klass,
2142 Bin array_type) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002143 CHECK(klass->IsArrayClass());
David Sehr709b0702016-10-13 09:12:37 -07002144 CHECK(arr->IsIntArray() || arr->IsLongArray()) << klass->PrettyClass() << " " << arr;
Mathieu Chartiere401d142015-04-22 13:56:20 -07002145 // Fixup int and long pointers for the ArtMethod or ArtField arrays.
Mathieu Chartierc7853442015-03-27 14:35:38 -07002146 const size_t num_elements = arr->GetLength();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002147 dst->SetClass(GetImageAddress(arr->GetClass()));
2148 auto* dest_array = down_cast<mirror::PointerArray*>(dst);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002149 for (size_t i = 0, count = num_elements; i < count; ++i) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002150 void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002151 if (kIsDebugBuild && elem != nullptr && !IsInBootImage(elem)) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002152 auto it = native_object_relocations_.find(elem);
Vladimir Marko05792b92015-08-03 11:56:49 +01002153 if (UNLIKELY(it == native_object_relocations_.end())) {
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07002154 if (it->second.IsArtMethodRelocation()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002155 auto* method = reinterpret_cast<ArtMethod*>(elem);
David Sehr709b0702016-10-13 09:12:37 -07002156 LOG(FATAL) << "No relocation entry for ArtMethod " << method->PrettyMethod() << " @ "
2157 << method << " idx=" << i << "/" << num_elements << " with declaring class "
2158 << Class::PrettyClass(method->GetDeclaringClass());
Mathieu Chartiere401d142015-04-22 13:56:20 -07002159 } else {
2160 CHECK_EQ(array_type, kBinArtField);
2161 auto* field = reinterpret_cast<ArtField*>(elem);
David Sehr709b0702016-10-13 09:12:37 -07002162 LOG(FATAL) << "No relocation entry for ArtField " << field->PrettyField() << " @ "
Mathieu Chartiere401d142015-04-22 13:56:20 -07002163 << field << " idx=" << i << "/" << num_elements << " with declaring class "
David Sehr709b0702016-10-13 09:12:37 -07002164 << Class::PrettyClass(field->GetDeclaringClass());
Mathieu Chartiere401d142015-04-22 13:56:20 -07002165 }
Vladimir Marko05792b92015-08-03 11:56:49 +01002166 UNREACHABLE();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002167 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002168 }
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002169 CopyAndFixupPointer(dest_array->ElementAddress(i, target_ptr_size_), elem);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002170 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002171}
2172
2173void ImageWriter::CopyAndFixupObject(Object* obj) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002174 if (IsInBootImage(obj)) {
2175 return;
2176 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002177 size_t offset = GetImageOffset(obj);
Vladimir Marko944da602016-02-19 12:27:55 +00002178 size_t oat_index = GetOatIndex(obj);
2179 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002180 auto* dst = reinterpret_cast<Object*>(image_info.image_->Begin() + offset);
2181 DCHECK_LT(offset, image_info.image_end_);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07002182 const auto* src = reinterpret_cast<const uint8_t*>(obj);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002183
Jeff Haodcdc85b2015-12-04 14:06:18 -08002184 image_info.image_bitmap_->Set(dst); // Mark the obj as live.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07002185
2186 const size_t n = obj->SizeOf();
Jeff Haodcdc85b2015-12-04 14:06:18 -08002187 DCHECK_LE(offset + n, image_info.image_->Size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07002188 memcpy(dst, src, n);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002189
Mathieu Chartierad2541a2013-10-25 10:05:23 -07002190 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
2191 // word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07002192 const auto it = saved_hashcode_map_.find(obj);
2193 dst->SetLockWord(it != saved_hashcode_map_.end() ?
2194 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
Mathieu Chartier36a270a2016-07-28 18:08:51 -07002195 if (kUseBakerReadBarrier && gc::collector::ConcurrentCopying::kGrayDirtyImmuneObjects) {
2196 // Treat all of the objects in the image as marked to avoid unnecessary dirty pages. This is
2197 // safe since we mark all of the objects that may reference non immune objects as gray.
2198 CHECK(dst->AtomicSetMarkBit(0, 1));
2199 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002200 FixupObject(obj, dst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002201}
2202
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002203// Rewrite all the references in the copied object to point to their image address equivalent
Vladimir Markoad06b982016-11-17 16:38:59 +00002204class ImageWriter::FixupVisitor {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07002205 public:
2206 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
2207 }
2208
Mathieu Chartierda7c6502015-07-23 16:01:26 -07002209 // Ignore class roots since we don't have a way to map them to the destination. These are handled
2210 // with other logic.
2211 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
2212 const {}
2213 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
2214
2215
Mathieu Chartier31e88222016-10-14 18:43:19 -07002216 void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002217 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mathieu Chartier31e88222016-10-14 18:43:19 -07002218 ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002219 // Copy the reference and record the fixup if necessary.
2220 image_writer_->CopyReference(
2221 copy_->GetFieldObjectReferenceAddr<kVerifyNone>(offset),
2222 ref.Ptr());
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07002223 }
2224
2225 // java.lang.ref.Reference visitor.
Mathieu Chartier31e88222016-10-14 18:43:19 -07002226 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
2227 ObjPtr<mirror::Reference> ref) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002228 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002229 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07002230 }
2231
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002232 protected:
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07002233 ImageWriter* const image_writer_;
2234 mirror::Object* const copy_;
2235};
2236
Vladimir Markoad06b982016-11-17 16:38:59 +00002237class ImageWriter::FixupClassVisitor FINAL : public FixupVisitor {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002238 public:
2239 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
2240 }
2241
Mathieu Chartier31e88222016-10-14 18:43:19 -07002242 void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07002243 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002244 DCHECK(obj->IsClass());
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002245 FixupVisitor::operator()(obj, offset, /*is_static*/false);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002246 }
2247
Mathieu Chartier31e88222016-10-14 18:43:19 -07002248 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
2249 ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002250 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002251 LOG(FATAL) << "Reference not expected here.";
2252 }
2253};
2254
Vladimir Marko05792b92015-08-03 11:56:49 +01002255uintptr_t ImageWriter::NativeOffsetInImage(void* obj) {
2256 DCHECK(obj != nullptr);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002257 DCHECK(!IsInBootImage(obj));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002258 auto it = native_object_relocations_.find(obj);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002259 CHECK(it != native_object_relocations_.end()) << obj << " spaces "
2260 << Runtime::Current()->GetHeap()->DumpSpaces();
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07002261 const NativeObjectRelocation& relocation = it->second;
Vladimir Marko05792b92015-08-03 11:56:49 +01002262 return relocation.offset;
2263}
2264
2265template <typename T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002266std::string PrettyPrint(T* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002267 std::ostringstream oss;
2268 oss << ptr;
2269 return oss.str();
2270}
2271
2272template <>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002273std::string PrettyPrint(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
David Sehr709b0702016-10-13 09:12:37 -07002274 return ArtMethod::PrettyMethod(method);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002275}
2276
2277template <typename T>
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002278T* ImageWriter::NativeLocationInImage(T* obj) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08002279 if (obj == nullptr || IsInBootImage(obj)) {
2280 return obj;
2281 } else {
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002282 auto it = native_object_relocations_.find(obj);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002283 CHECK(it != native_object_relocations_.end()) << obj << " " << PrettyPrint(obj)
2284 << " spaces " << Runtime::Current()->GetHeap()->DumpSpaces();
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002285 const NativeObjectRelocation& relocation = it->second;
Vladimir Marko944da602016-02-19 12:27:55 +00002286 ImageInfo& image_info = GetImageInfo(relocation.oat_index);
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002287 return reinterpret_cast<T*>(image_info.image_begin_ + relocation.offset);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002288 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002289}
2290
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002291template <typename T>
Jeff Haodcdc85b2015-12-04 14:06:18 -08002292T* ImageWriter::NativeCopyLocation(T* obj, mirror::DexCache* dex_cache) {
2293 if (obj == nullptr || IsInBootImage(obj)) {
2294 return obj;
2295 } else {
Vladimir Marko944da602016-02-19 12:27:55 +00002296 size_t oat_index = GetOatIndexForDexCache(dex_cache);
2297 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002298 return reinterpret_cast<T*>(image_info.image_->Begin() + NativeOffsetInImage(obj));
2299 }
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002300}
2301
Vladimir Markoad06b982016-11-17 16:38:59 +00002302class ImageWriter::NativeLocationVisitor {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002303 public:
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002304 explicit NativeLocationVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002305
2306 template <typename T>
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002307 T* operator()(T* ptr, void** dest_addr = nullptr) const REQUIRES_SHARED(Locks::mutator_lock_) {
2308 if (dest_addr != nullptr) {
2309 image_writer_->CopyAndFixupPointer(dest_addr, ptr);
2310 }
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002311 return image_writer_->NativeLocationInImage(ptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002312 }
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002313
2314 private:
2315 ImageWriter* const image_writer_;
2316};
2317
2318void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002319 orig->FixupNativePointers(copy, target_ptr_size_, NativeLocationVisitor(this));
Mathieu Chartierc7853442015-03-27 14:35:38 -07002320 FixupClassVisitor visitor(this, copy);
Mathieu Chartier31e88222016-10-14 18:43:19 -07002321 ObjPtr<mirror::Object>(orig)->VisitReferences(visitor, visitor);
Andreas Gampeace0dc12016-01-20 13:33:13 -08002322
2323 // Remove the clinitThreadId. This is required for image determinism.
2324 copy->SetClinitThreadId(static_cast<pid_t>(0));
Mathieu Chartierc7853442015-03-27 14:35:38 -07002325}
2326
Ian Rogersef7d42f2014-01-06 12:55:46 -08002327void ImageWriter::FixupObject(Object* orig, Object* copy) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07002328 DCHECK(orig != nullptr);
2329 DCHECK(copy != nullptr);
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07002330 if (kUseBakerReadBarrier) {
2331 orig->AssertReadBarrierState();
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -08002332 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002333 auto* klass = orig->GetClass();
2334 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
Vladimir Marko05792b92015-08-03 11:56:49 +01002335 // Is this a native pointer array?
Mathieu Chartiere401d142015-04-22 13:56:20 -07002336 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
2337 if (it != pointer_arrays_.end()) {
2338 // Should only need to fixup every pointer array exactly once.
2339 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
2340 pointer_arrays_.erase(it);
2341 return;
2342 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002343 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002344 if (orig->IsClass()) {
2345 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002346 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002347 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
2348 // Need to go update the ArtMethod.
Neil Fuller0e844392016-09-08 13:43:31 +01002349 auto* dest = down_cast<mirror::Executable*>(copy);
2350 auto* src = down_cast<mirror::Executable*>(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002351 ArtMethod* src_method = src->GetArtMethod();
Jing Ji96e640c2016-08-31 21:21:37 -05002352 dest->SetArtMethod(GetImageMethodAddress(src_method));
Vladimir Marko05792b92015-08-03 11:56:49 +01002353 } else if (!klass->IsArrayClass()) {
2354 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2355 if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) {
2356 FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08002357 } else if (klass->IsClassLoaderClass()) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002358 mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy);
Vladimir Marko05792b92015-08-03 11:56:49 +01002359 // If src is a ClassLoader, set the class table to null so that it gets recreated by the
2360 // ClassLoader.
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002361 copy_loader->SetClassTable(nullptr);
Mathieu Chartier5550c562015-09-22 15:18:04 -07002362 // Also set allocator to null to be safe. The allocator is created when we create the class
2363 // table. We also never expect to unload things in the image since they are held live as
2364 // roots.
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002365 copy_loader->SetAllocator(nullptr);
Vladimir Marko05792b92015-08-03 11:56:49 +01002366 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002367 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002368 FixupVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07002369 orig->VisitReferences(visitor, visitor);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002370 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002371}
2372
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002373class ImageWriter::ImageAddressVisitorForDexCacheArray {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002374 public:
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002375 explicit ImageAddressVisitorForDexCacheArray(ImageWriter* image_writer)
2376 : image_writer_(image_writer) {}
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002377
2378 template <typename T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002379 T* operator()(T* ptr) const REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002380 return image_writer_->GetImageAddress(ptr);
2381 }
2382
2383 private:
2384 ImageWriter* const image_writer_;
2385};
2386
Vladimir Marko05792b92015-08-03 11:56:49 +01002387void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache,
2388 mirror::DexCache* copy_dex_cache) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002389 ImageAddressVisitorForDexCacheArray fixup_visitor(this);
Vladimir Marko05792b92015-08-03 11:56:49 +01002390 // Though the DexCache array fields are usually treated as native pointers, we set the full
2391 // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is
2392 // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e.
2393 // static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))).
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07002394 mirror::StringDexCacheType* orig_strings = orig_dex_cache->GetStrings();
Vladimir Marko05792b92015-08-03 11:56:49 +01002395 if (orig_strings != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002396 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::StringsOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002397 NativeLocationInImage(orig_strings),
Andreas Gampe542451c2016-07-26 09:02:02 -07002398 PointerSize::k64);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002399 orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings, orig_dex_cache), fixup_visitor);
Vladimir Marko05792b92015-08-03 11:56:49 +01002400 }
Vladimir Marko8d6768d2017-03-14 10:13:21 +00002401 mirror::TypeDexCacheType* orig_types = orig_dex_cache->GetResolvedTypes();
Vladimir Marko05792b92015-08-03 11:56:49 +01002402 if (orig_types != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002403 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedTypesOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002404 NativeLocationInImage(orig_types),
Andreas Gampe542451c2016-07-26 09:02:02 -07002405 PointerSize::k64);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002406 orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types, orig_dex_cache),
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002407 fixup_visitor);
Vladimir Marko05792b92015-08-03 11:56:49 +01002408 }
2409 ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
2410 if (orig_methods != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002411 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodsOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002412 NativeLocationInImage(orig_methods),
Andreas Gampe542451c2016-07-26 09:02:02 -07002413 PointerSize::k64);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002414 ArtMethod** copy_methods = NativeCopyLocation(orig_methods, orig_dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +01002415 for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) {
2416 ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, i, target_ptr_size_);
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002417 // NativeLocationInImage also handles runtime methods since these have relocation info.
2418 ArtMethod* copy = NativeLocationInImage(orig);
Vladimir Marko05792b92015-08-03 11:56:49 +01002419 mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_);
2420 }
2421 }
Vladimir Markof44d36c2017-03-14 14:18:46 +00002422 mirror::FieldDexCacheType* orig_fields = orig_dex_cache->GetResolvedFields();
Vladimir Marko05792b92015-08-03 11:56:49 +01002423 if (orig_fields != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002424 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedFieldsOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002425 NativeLocationInImage(orig_fields),
Andreas Gampe542451c2016-07-26 09:02:02 -07002426 PointerSize::k64);
Vladimir Markof44d36c2017-03-14 14:18:46 +00002427 mirror::FieldDexCacheType* copy_fields = NativeCopyLocation(orig_fields, orig_dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +01002428 for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) {
Vladimir Markof44d36c2017-03-14 14:18:46 +00002429 mirror::FieldDexCachePair orig =
2430 mirror::DexCache::GetNativePairPtrSize(orig_fields, i, target_ptr_size_);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002431 mirror::FieldDexCachePair copy = orig;
2432 copy.object = NativeLocationInImage(orig.object);
Vladimir Markof44d36c2017-03-14 14:18:46 +00002433 mirror::DexCache::SetNativePairPtrSize(copy_fields, i, copy, target_ptr_size_);
Vladimir Marko05792b92015-08-03 11:56:49 +01002434 }
2435 }
Narayan Kamath7fe56582016-10-14 18:49:12 +01002436 mirror::MethodTypeDexCacheType* orig_method_types = orig_dex_cache->GetResolvedMethodTypes();
2437 if (orig_method_types != nullptr) {
2438 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodTypesOffset(),
2439 NativeLocationInImage(orig_method_types),
2440 PointerSize::k64);
2441 orig_dex_cache->FixupResolvedMethodTypes(NativeCopyLocation(orig_method_types, orig_dex_cache),
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002442 fixup_visitor);
Narayan Kamath7fe56582016-10-14 18:49:12 +01002443 }
Orion Hodsonc069a302017-01-18 09:23:12 +00002444 GcRoot<mirror::CallSite>* orig_call_sites = orig_dex_cache->GetResolvedCallSites();
2445 if (orig_call_sites != nullptr) {
2446 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedCallSitesOffset(),
2447 NativeLocationInImage(orig_call_sites),
2448 PointerSize::k64);
2449 orig_dex_cache->FixupResolvedCallSites(NativeCopyLocation(orig_call_sites, orig_dex_cache),
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002450 fixup_visitor);
Orion Hodsonc069a302017-01-18 09:23:12 +00002451 }
Andreas Gampeace0dc12016-01-20 13:33:13 -08002452
2453 // Remove the DexFile pointers. They will be fixed up when the runtime loads the oat file. Leaving
2454 // compiler pointers in here will make the output non-deterministic.
2455 copy_dex_cache->SetDexFile(nullptr);
Vladimir Marko05792b92015-08-03 11:56:49 +01002456}
2457
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002458const uint8_t* ImageWriter::GetOatAddress(OatAddress type) const {
2459 DCHECK_LT(type, kOatAddressCount);
2460 // If we are compiling an app image, we need to use the stubs of the boot image.
2461 if (compile_app_image_) {
2462 // Use the current image pointers.
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002463 const std::vector<gc::space::ImageSpace*>& image_spaces =
Jeff Haodcdc85b2015-12-04 14:06:18 -08002464 Runtime::Current()->GetHeap()->GetBootImageSpaces();
2465 DCHECK(!image_spaces.empty());
2466 const OatFile* oat_file = image_spaces[0]->GetOatFile();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002467 CHECK(oat_file != nullptr);
2468 const OatHeader& header = oat_file->GetOatHeader();
2469 switch (type) {
2470 // TODO: We could maybe clean this up if we stored them in an array in the oat header.
2471 case kOatAddressQuickGenericJNITrampoline:
2472 return static_cast<const uint8_t*>(header.GetQuickGenericJniTrampoline());
2473 case kOatAddressInterpreterToInterpreterBridge:
2474 return static_cast<const uint8_t*>(header.GetInterpreterToInterpreterBridge());
2475 case kOatAddressInterpreterToCompiledCodeBridge:
2476 return static_cast<const uint8_t*>(header.GetInterpreterToCompiledCodeBridge());
2477 case kOatAddressJNIDlsymLookup:
2478 return static_cast<const uint8_t*>(header.GetJniDlsymLookup());
2479 case kOatAddressQuickIMTConflictTrampoline:
2480 return static_cast<const uint8_t*>(header.GetQuickImtConflictTrampoline());
2481 case kOatAddressQuickResolutionTrampoline:
2482 return static_cast<const uint8_t*>(header.GetQuickResolutionTrampoline());
2483 case kOatAddressQuickToInterpreterBridge:
2484 return static_cast<const uint8_t*>(header.GetQuickToInterpreterBridge());
2485 default:
2486 UNREACHABLE();
2487 }
2488 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08002489 const ImageInfo& primary_image_info = GetImageInfo(0);
2490 return GetOatAddressForOffset(primary_image_info.oat_address_offsets_[type], primary_image_info);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002491}
2492
Jeff Haodcdc85b2015-12-04 14:06:18 -08002493const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method,
2494 const ImageInfo& image_info,
2495 bool* quick_is_interpreted) {
David Sehr709b0702016-10-13 09:12:37 -07002496 DCHECK(!method->IsResolutionMethod()) << method->PrettyMethod();
2497 DCHECK_NE(method, Runtime::Current()->GetImtConflictMethod()) << method->PrettyMethod();
2498 DCHECK(!method->IsImtUnimplementedMethod()) << method->PrettyMethod();
2499 DCHECK(method->IsInvokable()) << method->PrettyMethod();
2500 DCHECK(!IsInBootImage(method)) << method->PrettyMethod();
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002501
2502 // Use original code if it exists. Otherwise, set the code pointer to the resolution
2503 // trampoline.
2504
2505 // Quick entrypoint:
Igor Murashkin0ccfe2c2016-02-19 16:41:44 -08002506 const void* quick_oat_entry_point =
2507 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_);
2508 const uint8_t* quick_code;
2509
2510 if (UNLIKELY(IsInBootImage(method->GetDeclaringClass()))) {
2511 DCHECK(method->IsCopied());
2512 // If the code is not in the oat file corresponding to this image (e.g. default methods)
2513 quick_code = reinterpret_cast<const uint8_t*>(quick_oat_entry_point);
2514 } else {
2515 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(quick_oat_entry_point);
2516 quick_code = GetOatAddressForOffset(quick_oat_code_offset, image_info);
2517 }
2518
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002519 *quick_is_interpreted = false;
Mathieu Chartiere401d142015-04-22 13:56:20 -07002520 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
2521 method->GetDeclaringClass()->IsInitialized())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002522 // We have code for a non-static or initialized method, just use the code.
2523 } else if (quick_code == nullptr && method->IsNative() &&
2524 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
2525 // Non-static or initialized native method missing compiled code, use generic JNI version.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002526 quick_code = GetOatAddress(kOatAddressQuickGenericJNITrampoline);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002527 } else if (quick_code == nullptr && !method->IsNative()) {
2528 // We don't have code at all for a non-native method, use the interpreter.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002529 quick_code = GetOatAddress(kOatAddressQuickToInterpreterBridge);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002530 *quick_is_interpreted = true;
2531 } else {
2532 CHECK(!method->GetDeclaringClass()->IsInitialized());
2533 // We have code for a static method, but need to go through the resolution stub for class
2534 // initialization.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002535 quick_code = GetOatAddress(kOatAddressQuickResolutionTrampoline);
2536 }
2537 if (!IsInBootOatFile(quick_code)) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08002538 // DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002539 }
2540 return quick_code;
2541}
2542
Jeff Haodcdc85b2015-12-04 14:06:18 -08002543void ImageWriter::CopyAndFixupMethod(ArtMethod* orig,
2544 ArtMethod* copy,
2545 const ImageInfo& image_info) {
Mingyao Yange8fcd012017-01-20 10:43:30 -08002546 if (orig->IsAbstract()) {
2547 // Ignore the single-implementation info for abstract method.
2548 // Do this on orig instead of copy, otherwise there is a crash due to methods
2549 // are copied before classes.
2550 // TODO: handle fixup of single-implementation method for abstract method.
2551 orig->SetHasSingleImplementation(false);
2552 orig->SetSingleImplementation(
2553 nullptr, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
2554 }
2555
Vladimir Marko14632852015-08-17 12:07:23 +01002556 memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002557
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002558 CopyReference(copy->GetDeclaringClassAddressWithoutBarrier(), orig->GetDeclaringClassUnchecked());
2559
Vladimir Marko05792b92015-08-03 11:56:49 +01002560 ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_);
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002561 copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002562
Ian Rogers848871b2013-08-05 10:56:33 -07002563 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
2564 // oat_begin_
Brian Carlstrom7940e442013-07-12 13:46:57 -07002565
Ian Rogers848871b2013-08-05 10:56:33 -07002566 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07002567 Runtime* runtime = Runtime::Current();
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002568 if (orig->IsRuntimeMethod()) {
2569 ImtConflictTable* orig_table = orig->GetImtConflictTable(target_ptr_size_);
2570 if (orig_table != nullptr) {
2571 // Special IMT conflict method, normal IMT conflict method or unimplemented IMT method.
2572 copy->SetEntryPointFromQuickCompiledCodePtrSize(
2573 GetOatAddress(kOatAddressQuickIMTConflictTrampoline), target_ptr_size_);
2574 copy->SetImtConflictTable(NativeLocationInImage(orig_table), target_ptr_size_);
2575 } else if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
2576 copy->SetEntryPointFromQuickCompiledCodePtrSize(
2577 GetOatAddress(kOatAddressQuickResolutionTrampoline), target_ptr_size_);
2578 } else {
2579 bool found_one = false;
Andreas Gampe8228cdf2017-05-30 15:03:54 -07002580 for (size_t i = 0; i < static_cast<size_t>(CalleeSaveType::kLastCalleeSaveType); ++i) {
2581 auto idx = static_cast<CalleeSaveType>(i);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002582 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
2583 found_one = true;
2584 break;
2585 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002586 }
David Sehr709b0702016-10-13 09:12:37 -07002587 CHECK(found_one) << "Expected to find callee save method but got " << orig->PrettyMethod();
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002588 CHECK(copy->IsRuntimeMethod());
Mathieu Chartiere401d142015-04-22 13:56:20 -07002589 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002590 } else {
Ian Rogers848871b2013-08-05 10:56:33 -07002591 // We assume all methods have code. If they don't currently then we set them to the use the
2592 // resolution trampoline. Abstract methods never have code and so we need to make sure their
2593 // use results in an AbstractMethodError. We use the interpreter to achieve this.
Alex Light9139e002015-10-09 15:59:48 -07002594 if (UNLIKELY(!orig->IsInvokable())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002595 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002596 GetOatAddress(kOatAddressQuickToInterpreterBridge), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07002597 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002598 bool quick_is_interpreted;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002599 const uint8_t* quick_code = GetQuickCode(orig, image_info, &quick_is_interpreted);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002600 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
Sebastien Hertze1d07812014-05-21 15:44:09 +02002601
Sebastien Hertze1d07812014-05-21 15:44:09 +02002602 // JNI entrypoint:
Ian Rogers848871b2013-08-05 10:56:33 -07002603 if (orig->IsNative()) {
2604 // The native method's pointer is set to a stub to lookup via dlsym.
2605 // Note this is not the code_ pointer, that is handled above.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002606 copy->SetEntryPointFromJniPtrSize(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002607 GetOatAddress(kOatAddressJNIDlsymLookup), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07002608 }
2609 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002610 }
2611}
2612
Jeff Haodcdc85b2015-12-04 14:06:18 -08002613size_t ImageWriter::GetBinSizeSum(ImageWriter::ImageInfo& image_info, ImageWriter::Bin up_to) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002614 DCHECK_LE(up_to, kBinSize);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002615 return std::accumulate(&image_info.bin_slot_sizes_[0],
2616 &image_info.bin_slot_sizes_[up_to],
2617 /*init*/0);
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002618}
2619
2620ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
2621 // These values may need to get updated if more bins are added to the enum Bin
Mathieu Chartiere401d142015-04-22 13:56:20 -07002622 static_assert(kBinBits == 3, "wrong number of bin bits");
2623 static_assert(kBinShift == 27, "wrong number of shift");
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002624 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
2625
2626 DCHECK_LT(GetBin(), kBinSize);
2627 DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
2628}
2629
2630ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
2631 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
2632 DCHECK_EQ(index, GetIndex());
2633}
2634
2635ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
2636 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
2637}
2638
2639uint32_t ImageWriter::BinSlot::GetIndex() const {
2640 return lockword_ & ~kBinMask;
2641}
2642
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002643ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
2644 switch (type) {
2645 case kNativeObjectRelocationTypeArtField:
2646 case kNativeObjectRelocationTypeArtFieldArray:
2647 return kBinArtField;
2648 case kNativeObjectRelocationTypeArtMethodClean:
2649 case kNativeObjectRelocationTypeArtMethodArrayClean:
2650 return kBinArtMethodClean;
2651 case kNativeObjectRelocationTypeArtMethodDirty:
2652 case kNativeObjectRelocationTypeArtMethodArrayDirty:
2653 return kBinArtMethodDirty;
Vladimir Marko05792b92015-08-03 11:56:49 +01002654 case kNativeObjectRelocationTypeDexCacheArray:
2655 return kBinDexCacheArray;
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002656 case kNativeObjectRelocationTypeRuntimeMethod:
2657 return kBinRuntimeMethod;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002658 case kNativeObjectRelocationTypeIMTable:
2659 return kBinImTable;
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002660 case kNativeObjectRelocationTypeIMTConflictTable:
2661 return kBinIMTConflictTable;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002662 }
2663 UNREACHABLE();
2664}
2665
Vladimir Marko944da602016-02-19 12:27:55 +00002666size_t ImageWriter::GetOatIndex(mirror::Object* obj) const {
Mathieu Chartier496577f2016-09-20 15:33:31 -07002667 if (!IsMultiImage()) {
Vladimir Marko944da602016-02-19 12:27:55 +00002668 return GetDefaultOatIndex();
Jeff Haodcdc85b2015-12-04 14:06:18 -08002669 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07002670 auto it = oat_index_map_.find(obj);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002671 DCHECK(it != oat_index_map_.end()) << obj;
Mathieu Chartier496577f2016-09-20 15:33:31 -07002672 return it->second;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002673}
2674
Vladimir Marko944da602016-02-19 12:27:55 +00002675size_t ImageWriter::GetOatIndexForDexFile(const DexFile* dex_file) const {
Mathieu Chartier496577f2016-09-20 15:33:31 -07002676 if (!IsMultiImage()) {
Vladimir Marko944da602016-02-19 12:27:55 +00002677 return GetDefaultOatIndex();
Jeff Haodcdc85b2015-12-04 14:06:18 -08002678 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07002679 auto it = dex_file_oat_index_map_.find(dex_file);
2680 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
2681 return it->second;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002682}
2683
Mathieu Chartierc4f39252016-10-05 18:32:08 -07002684size_t ImageWriter::GetOatIndexForDexCache(ObjPtr<mirror::DexCache> dex_cache) const {
2685 return (dex_cache == nullptr)
2686 ? GetDefaultOatIndex()
2687 : GetOatIndexForDexFile(dex_cache->GetDexFile());
Jeff Haodcdc85b2015-12-04 14:06:18 -08002688}
2689
Vladimir Marko944da602016-02-19 12:27:55 +00002690void ImageWriter::UpdateOatFileLayout(size_t oat_index,
2691 size_t oat_loaded_size,
2692 size_t oat_data_offset,
2693 size_t oat_data_size) {
2694 const uint8_t* images_end = image_infos_.back().image_begin_ + image_infos_.back().image_size_;
2695 for (const ImageInfo& info : image_infos_) {
2696 DCHECK_LE(info.image_begin_ + info.image_size_, images_end);
2697 }
2698 DCHECK(images_end != nullptr); // Image space must be ready.
Jeff Haodcdc85b2015-12-04 14:06:18 -08002699
Vladimir Marko944da602016-02-19 12:27:55 +00002700 ImageInfo& cur_image_info = GetImageInfo(oat_index);
2701 cur_image_info.oat_file_begin_ = images_end + cur_image_info.oat_offset_;
2702 cur_image_info.oat_loaded_size_ = oat_loaded_size;
2703 cur_image_info.oat_data_begin_ = cur_image_info.oat_file_begin_ + oat_data_offset;
2704 cur_image_info.oat_size_ = oat_data_size;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002705
Mathieu Chartier14567fd2016-01-28 20:33:36 -08002706 if (compile_app_image_) {
2707 CHECK_EQ(oat_filenames_.size(), 1u) << "App image should have no next image.";
2708 return;
2709 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08002710
2711 // Update the oat_offset of the next image info.
Vladimir Marko944da602016-02-19 12:27:55 +00002712 if (oat_index + 1u != oat_filenames_.size()) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08002713 // There is a following one.
Vladimir Marko944da602016-02-19 12:27:55 +00002714 ImageInfo& next_image_info = GetImageInfo(oat_index + 1u);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002715 next_image_info.oat_offset_ = cur_image_info.oat_offset_ + oat_loaded_size;
2716 }
2717}
2718
Vladimir Marko944da602016-02-19 12:27:55 +00002719void ImageWriter::UpdateOatFileHeader(size_t oat_index, const OatHeader& oat_header) {
2720 ImageInfo& cur_image_info = GetImageInfo(oat_index);
2721 cur_image_info.oat_checksum_ = oat_header.GetChecksum();
2722
2723 if (oat_index == GetDefaultOatIndex()) {
2724 // Primary oat file, read the trampolines.
2725 cur_image_info.oat_address_offsets_[kOatAddressInterpreterToInterpreterBridge] =
2726 oat_header.GetInterpreterToInterpreterBridgeOffset();
2727 cur_image_info.oat_address_offsets_[kOatAddressInterpreterToCompiledCodeBridge] =
2728 oat_header.GetInterpreterToCompiledCodeBridgeOffset();
2729 cur_image_info.oat_address_offsets_[kOatAddressJNIDlsymLookup] =
2730 oat_header.GetJniDlsymLookupOffset();
2731 cur_image_info.oat_address_offsets_[kOatAddressQuickGenericJNITrampoline] =
2732 oat_header.GetQuickGenericJniTrampolineOffset();
2733 cur_image_info.oat_address_offsets_[kOatAddressQuickIMTConflictTrampoline] =
2734 oat_header.GetQuickImtConflictTrampolineOffset();
2735 cur_image_info.oat_address_offsets_[kOatAddressQuickResolutionTrampoline] =
2736 oat_header.GetQuickResolutionTrampolineOffset();
2737 cur_image_info.oat_address_offsets_[kOatAddressQuickToInterpreterBridge] =
2738 oat_header.GetQuickToInterpreterBridgeOffset();
2739 }
2740}
2741
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002742ImageWriter::ImageWriter(
2743 const CompilerDriver& compiler_driver,
2744 uintptr_t image_begin,
2745 bool compile_pic,
2746 bool compile_app_image,
2747 ImageHeader::StorageMode image_storage_mode,
Vladimir Marko944da602016-02-19 12:27:55 +00002748 const std::vector<const char*>& oat_filenames,
2749 const std::unordered_map<const DexFile*, size_t>& dex_file_oat_index_map)
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002750 : compiler_driver_(compiler_driver),
2751 global_image_begin_(reinterpret_cast<uint8_t*>(image_begin)),
2752 image_objects_offset_begin_(0),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002753 compile_pic_(compile_pic),
2754 compile_app_image_(compile_app_image),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002755 target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())),
Vladimir Marko944da602016-02-19 12:27:55 +00002756 image_infos_(oat_filenames.size()),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002757 dirty_methods_(0u),
2758 clean_methods_(0u),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002759 image_storage_mode_(image_storage_mode),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002760 oat_filenames_(oat_filenames),
Vladimir Marko944da602016-02-19 12:27:55 +00002761 dex_file_oat_index_map_(dex_file_oat_index_map) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002762 CHECK_NE(image_begin, 0U);
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002763 std::fill_n(image_methods_, arraysize(image_methods_), nullptr);
Mathieu Chartier901e0702016-02-19 13:42:48 -08002764 CHECK_EQ(compile_app_image, !Runtime::Current()->GetHeap()->GetBootImageSpaces().empty())
2765 << "Compiling a boot image should occur iff there are no boot image spaces loaded";
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002766}
2767
Mathieu Chartier1f47b672016-01-07 16:29:01 -08002768ImageWriter::ImageInfo::ImageInfo()
2769 : intern_table_(new InternTable),
2770 class_table_(new ClassTable) {}
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002771
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002772void ImageWriter::CopyReference(mirror::HeapReference<mirror::Object>* dest,
2773 ObjPtr<mirror::Object> src) {
2774 dest->Assign(GetImageAddress(src.Ptr()));
2775}
2776
2777void ImageWriter::CopyReference(mirror::CompressedReference<mirror::Object>* dest,
2778 ObjPtr<mirror::Object> src) {
2779 dest->Assign(GetImageAddress(src.Ptr()));
2780}
2781
2782void ImageWriter::CopyAndFixupPointer(void** target, void* value) {
2783 void* new_value = value;
2784 if (value != nullptr && !IsInBootImage(value)) {
2785 auto it = native_object_relocations_.find(value);
2786 CHECK(it != native_object_relocations_.end()) << value;
2787 const NativeObjectRelocation& relocation = it->second;
2788 ImageInfo& image_info = GetImageInfo(relocation.oat_index);
2789 new_value = reinterpret_cast<void*>(image_info.image_begin_ + relocation.offset);
2790 }
2791 if (target_ptr_size_ == PointerSize::k32) {
2792 *reinterpret_cast<uint32_t*>(target) = PointerToLowMemUInt32(new_value);
2793 } else {
2794 *reinterpret_cast<uint64_t*>(target) = reinterpret_cast<uintptr_t>(new_value);
2795 }
2796}
2797
2798
Brian Carlstrom7940e442013-07-12 13:46:57 -07002799} // namespace art