blob: 210943c88b066bfad10b41c50cb02b378bed3545 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "image_writer.h"
18
19#include <sys/stat.h>
Mathieu Chartierceb07b32015-12-10 09:33:21 -080020#include <lz4.h>
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -080021#include <lz4hc.h>
Brian Carlstrom7940e442013-07-12 13:46:57 -070022
Ian Rogers700a4022014-05-19 16:49:03 -070023#include <memory>
Vladimir Marko20f85592015-03-19 10:07:02 +000024#include <numeric>
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080025#include <unordered_set>
Brian Carlstrom7940e442013-07-12 13:46:57 -070026#include <vector>
27
Mathieu Chartierc7853442015-03-27 14:35:38 -070028#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "art_method-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070030#include "base/logging.h"
31#include "base/unix_file/fd_file.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010032#include "class_linker-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070033#include "compiled_method.h"
34#include "dex_file-inl.h"
35#include "driver/compiler_driver.h"
Alex Light53cb16b2014-06-12 11:26:29 -070036#include "elf_file.h"
37#include "elf_utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070038#include "elf_writer.h"
39#include "gc/accounting/card_table-inl.h"
40#include "gc/accounting/heap_bitmap.h"
Mathieu Chartier31e89252013-08-28 11:29:12 -070041#include "gc/accounting/space_bitmap-inl.h"
Mathieu Chartier36a270a2016-07-28 18:08:51 -070042#include "gc/collector/concurrent_copying.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070043#include "gc/heap.h"
44#include "gc/space/large_object_space.h"
45#include "gc/space/space-inl.h"
46#include "globals.h"
47#include "image.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070048#include "imt_conflict_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070049#include "intern_table.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070050#include "linear_alloc.h"
Mathieu Chartierad2541a2013-10-25 10:05:23 -070051#include "lock_word.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070052#include "mirror/array-inl.h"
53#include "mirror/class-inl.h"
54#include "mirror/class_loader.h"
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070055#include "mirror/dex_cache.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070056#include "mirror/dex_cache-inl.h"
Neil Fuller0e844392016-09-08 13:43:31 +010057#include "mirror/executable.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070058#include "mirror/method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070059#include "mirror/object-inl.h"
60#include "mirror/object_array-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070061#include "mirror/string-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070062#include "oat.h"
63#include "oat_file.h"
Mathieu Chartierf9c6fc62015-10-07 11:44:05 -070064#include "oat_file_manager.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070065#include "runtime.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070066#include "scoped_thread_state_change-inl.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070067#include "handle_scope-inl.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000068#include "utils/dex_cache_arrays_layout-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070069
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070070using ::art::mirror::Class;
71using ::art::mirror::DexCache;
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070072using ::art::mirror::Object;
73using ::art::mirror::ObjectArray;
74using ::art::mirror::String;
Brian Carlstrom7940e442013-07-12 13:46:57 -070075
76namespace art {
77
Igor Murashkinf5b4c502014-11-14 15:01:59 -080078// Separate objects into multiple bins to optimize dirty memory use.
79static constexpr bool kBinObjects = true;
80
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080081// Return true if an object is already in an image space.
82bool ImageWriter::IsInBootImage(const void* obj) const {
Mathieu Chartiere467cea2016-01-07 18:36:19 -080083 gc::Heap* const heap = Runtime::Current()->GetHeap();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080084 if (!compile_app_image_) {
Mathieu Chartiere467cea2016-01-07 18:36:19 -080085 DCHECK(heap->GetBootImageSpaces().empty());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080086 return false;
87 }
Mathieu Chartiere467cea2016-01-07 18:36:19 -080088 for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
89 const uint8_t* image_begin = boot_image_space->Begin();
90 // Real image end including ArtMethods and ArtField sections.
91 const uint8_t* image_end = image_begin + boot_image_space->GetImageHeader().GetImageSize();
92 if (image_begin <= obj && obj < image_end) {
93 return true;
94 }
95 }
96 return false;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080097}
98
99bool ImageWriter::IsInBootOatFile(const void* ptr) const {
Mathieu Chartiere467cea2016-01-07 18:36:19 -0800100 gc::Heap* const heap = Runtime::Current()->GetHeap();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800101 if (!compile_app_image_) {
Mathieu Chartiere467cea2016-01-07 18:36:19 -0800102 DCHECK(heap->GetBootImageSpaces().empty());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800103 return false;
104 }
Mathieu Chartiere467cea2016-01-07 18:36:19 -0800105 for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
106 const ImageHeader& image_header = boot_image_space->GetImageHeader();
107 if (image_header.GetOatFileBegin() <= ptr && ptr < image_header.GetOatFileEnd()) {
108 return true;
109 }
110 }
111 return false;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800112}
113
Andreas Gampedd9d0552015-03-09 12:57:41 -0700114static void CheckNoDexObjectsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700115 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampedd9d0552015-03-09 12:57:41 -0700116 Class* klass = obj->GetClass();
117 CHECK_NE(PrettyClass(klass), "com.android.dex.Dex");
118}
119
120static void CheckNoDexObjects() {
121 ScopedObjectAccess soa(Thread::Current());
122 Runtime::Current()->GetHeap()->VisitObjects(CheckNoDexObjectsCallback, nullptr);
123}
124
Vladimir Markof4da6752014-08-01 19:04:18 +0100125bool ImageWriter::PrepareImageAddressSpace() {
Mathieu Chartier2d721012014-11-10 11:08:06 -0800126 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800127 gc::Heap* const heap = Runtime::Current()->GetHeap();
Vladimir Markof4da6752014-08-01 19:04:18 +0100128 {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700129 ScopedObjectAccess soa(Thread::Current());
Vladimir Markof4da6752014-08-01 19:04:18 +0100130 PruneNonImageClasses(); // Remove junk
Mathieu Chartier901e0702016-02-19 13:42:48 -0800131 if (!compile_app_image_) {
132 // Avoid for app image since this may increase RAM and image size.
133 ComputeLazyFieldsForImageClasses(); // Add useful information
134 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100135 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100136 heap->CollectGarbage(false); // Remove garbage.
137
Andreas Gampedd9d0552015-03-09 12:57:41 -0700138 // Dex caches must not have their dex fields set in the image. These are memory buffers of mapped
139 // dex files.
140 //
141 // We may open them in the unstarted-runtime code for class metadata. Their fields should all be
142 // reset in PruneNonImageClasses and the objects reclaimed in the GC. Make sure that's actually
143 // true.
144 if (kIsDebugBuild) {
145 CheckNoDexObjects();
146 }
147
Vladimir Markof4da6752014-08-01 19:04:18 +0100148 if (kIsDebugBuild) {
149 ScopedObjectAccess soa(Thread::Current());
150 CheckNonImageClassesRemoved();
151 }
152
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700153 {
154 ScopedObjectAccess soa(Thread::Current());
155 CalculateNewObjectOffsets();
156 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100157
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700158 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
159 // bin size sums being calculated.
160 if (!AllocMemory()) {
161 return false;
162 }
163
Vladimir Markof4da6752014-08-01 19:04:18 +0100164 return true;
165}
166
Mathieu Chartiera90c7722015-10-29 15:41:36 -0700167bool ImageWriter::Write(int image_fd,
Jeff Haodcdc85b2015-12-04 14:06:18 -0800168 const std::vector<const char*>& image_filenames,
Vladimir Marko944da602016-02-19 12:27:55 +0000169 const std::vector<const char*>& oat_filenames) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800170 // If image_fd or oat_fd are not kInvalidFd then we may have empty strings in image_filenames or
171 // oat_filenames.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800172 CHECK(!image_filenames.empty());
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800173 if (image_fd != kInvalidFd) {
174 CHECK_EQ(image_filenames.size(), 1u);
175 }
Jeff Haodcdc85b2015-12-04 14:06:18 -0800176 CHECK(!oat_filenames.empty());
177 CHECK_EQ(image_filenames.size(), oat_filenames.size());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700178
Vladimir Marko944da602016-02-19 12:27:55 +0000179 {
180 ScopedObjectAccess soa(Thread::Current());
181 for (size_t i = 0; i < oat_filenames.size(); ++i) {
182 CreateHeader(i);
183 CopyAndFixupNativeData(i);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800184 }
185 }
Alex Light53cb16b2014-06-12 11:26:29 -0700186
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700187 {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700188 // TODO: heap validation can't handle these fix up passes.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800189 ScopedObjectAccess soa(Thread::Current());
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700190 Runtime::Current()->GetHeap()->DisableObjectValidation();
191 CopyAndFixupObjects();
192 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700193
Jeff Haodcdc85b2015-12-04 14:06:18 -0800194 for (size_t i = 0; i < image_filenames.size(); ++i) {
195 const char* image_filename = image_filenames[i];
Vladimir Marko944da602016-02-19 12:27:55 +0000196 ImageInfo& image_info = GetImageInfo(i);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800197 std::unique_ptr<File> image_file;
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800198 if (image_fd != kInvalidFd) {
199 if (strlen(image_filename) == 0u) {
200 image_file.reset(new File(image_fd, unix_file::kCheckSafeUsage));
Mathieu Chartier784bb092016-01-28 12:02:00 -0800201 // Empty the file in case it already exists.
202 if (image_file != nullptr) {
203 TEMP_FAILURE_RETRY(image_file->SetLength(0));
204 TEMP_FAILURE_RETRY(image_file->Flush());
205 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800206 } else {
207 LOG(ERROR) << "image fd " << image_fd << " name " << image_filename;
208 }
Jeff Haodcdc85b2015-12-04 14:06:18 -0800209 } else {
210 image_file.reset(OS::CreateEmptyFile(image_filename));
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800211 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800212
Jeff Haodcdc85b2015-12-04 14:06:18 -0800213 if (image_file == nullptr) {
214 LOG(ERROR) << "Failed to open image file " << image_filename;
215 return false;
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800216 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800217
218 if (!compile_app_image_ && fchmod(image_file->Fd(), 0644) != 0) {
Jeff Haodcdc85b2015-12-04 14:06:18 -0800219 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
220 image_file->Erase();
221 return EXIT_FAILURE;
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800222 }
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800223
Jeff Haodcdc85b2015-12-04 14:06:18 -0800224 std::unique_ptr<char[]> compressed_data;
225 // Image data size excludes the bitmap and the header.
226 ImageHeader* const image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
227 const size_t image_data_size = image_header->GetImageSize() - sizeof(ImageHeader);
228 char* image_data = reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader);
229 size_t data_size;
230 const char* image_data_to_write;
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800231 const uint64_t compress_start_time = NanoTime();
Nicolas Geoffray83d4d722015-12-10 08:26:32 +0000232
Jeff Haodcdc85b2015-12-04 14:06:18 -0800233 CHECK_EQ(image_header->storage_mode_, image_storage_mode_);
234 switch (image_storage_mode_) {
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700235 case ImageHeader::kStorageModeLZ4HC: // Fall-through.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800236 case ImageHeader::kStorageModeLZ4: {
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800237 const size_t compressed_max_size = LZ4_compressBound(image_data_size);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800238 compressed_data.reset(new char[compressed_max_size]);
239 data_size = LZ4_compress(
240 reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
241 &compressed_data[0],
242 image_data_size);
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800243
244 break;
245 }
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700246 /*
247 * Disabled due to image_test64 flakyness. Both use same decompression. b/27560444
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800248 case ImageHeader::kStorageModeLZ4HC: {
249 // Bound is same as non HC.
250 const size_t compressed_max_size = LZ4_compressBound(image_data_size);
251 compressed_data.reset(new char[compressed_max_size]);
252 data_size = LZ4_compressHC(
253 reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
254 &compressed_data[0],
255 image_data_size);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800256 break;
257 }
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700258 */
Jeff Haodcdc85b2015-12-04 14:06:18 -0800259 case ImageHeader::kStorageModeUncompressed: {
260 data_size = image_data_size;
261 image_data_to_write = image_data;
262 break;
263 }
264 default: {
265 LOG(FATAL) << "Unsupported";
266 UNREACHABLE();
267 }
268 }
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800269
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800270 if (compressed_data != nullptr) {
271 image_data_to_write = &compressed_data[0];
272 VLOG(compiler) << "Compressed from " << image_data_size << " to " << data_size << " in "
273 << PrettyDuration(NanoTime() - compress_start_time);
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700274 if (kIsDebugBuild) {
275 std::unique_ptr<uint8_t[]> temp(new uint8_t[image_data_size]);
276 const size_t decompressed_size = LZ4_decompress_safe(
277 reinterpret_cast<char*>(&compressed_data[0]),
278 reinterpret_cast<char*>(&temp[0]),
279 data_size,
280 image_data_size);
281 CHECK_EQ(decompressed_size, image_data_size);
282 CHECK_EQ(memcmp(image_data, &temp[0], image_data_size), 0) << image_storage_mode_;
283 }
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800284 }
285
Jeff Haodcdc85b2015-12-04 14:06:18 -0800286 // Write out the image + fields + methods.
287 const bool is_compressed = compressed_data != nullptr;
Mathieu Chartier6f6b1342016-03-09 11:14:50 -0800288 if (!image_file->PwriteFully(image_data_to_write, data_size, sizeof(ImageHeader))) {
Jeff Haodcdc85b2015-12-04 14:06:18 -0800289 PLOG(ERROR) << "Failed to write image file data " << image_filename;
290 image_file->Erase();
291 return false;
292 }
293
294 // Write out the image bitmap at the page aligned start of the image end, also uncompressed for
295 // convenience.
296 const ImageSection& bitmap_section = image_header->GetImageSection(
297 ImageHeader::kSectionImageBitmap);
298 // Align up since data size may be unaligned if the image is compressed.
299 size_t bitmap_position_in_file = RoundUp(sizeof(ImageHeader) + data_size, kPageSize);
300 if (!is_compressed) {
301 CHECK_EQ(bitmap_position_in_file, bitmap_section.Offset());
302 }
Mathieu Chartier6f6b1342016-03-09 11:14:50 -0800303 if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_bitmap_->Begin()),
304 bitmap_section.Size(),
305 bitmap_position_in_file)) {
Jeff Haodcdc85b2015-12-04 14:06:18 -0800306 PLOG(ERROR) << "Failed to write image file " << image_filename;
307 image_file->Erase();
308 return false;
309 }
Mathieu Chartier6f6b1342016-03-09 11:14:50 -0800310
311 int err = image_file->Flush();
312 if (err < 0) {
313 PLOG(ERROR) << "Failed to flush image file " << image_filename << " with result " << err;
314 image_file->Erase();
315 return false;
316 }
317
318 // Write header last in case the compiler gets killed in the middle of image writing.
319 // We do not want to have a corrupted image with a valid header.
320 // The header is uncompressed since it contains whether the image is compressed or not.
321 image_header->data_size_ = data_size;
322 if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_->Begin()),
323 sizeof(ImageHeader),
324 0)) {
325 PLOG(ERROR) << "Failed to write image file header " << image_filename;
326 image_file->Erase();
327 return false;
328 }
329
Jeff Haodcdc85b2015-12-04 14:06:18 -0800330 CHECK_EQ(bitmap_position_in_file + bitmap_section.Size(),
331 static_cast<size_t>(image_file->GetLength()));
332 if (image_file->FlushCloseOrErase() != 0) {
333 PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
334 return false;
335 }
Andreas Gampe4303ba92014-11-06 01:00:46 -0800336 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700337 return true;
338}
339
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700340void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700341 DCHECK(object != nullptr);
342 DCHECK_NE(offset, 0U);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800343
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800344 // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700345 object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700346 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700347 DCHECK(IsImageOffsetAssigned(object));
348}
349
Mathieu Chartiere401d142015-04-22 13:56:20 -0700350void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
351 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
352 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
353 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
354}
355
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800356void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700357 DCHECK(object != nullptr);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800358 DCHECK_NE(image_objects_offset_begin_, 0u);
359
Vladimir Marko944da602016-02-19 12:27:55 +0000360 size_t oat_index = GetOatIndex(object);
361 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800362 size_t bin_slot_offset = image_info.bin_slot_offsets_[bin_slot.GetBin()];
Vladimir Markocf36d492015-08-12 19:27:26 +0100363 size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800364 DCHECK_ALIGNED(new_offset, kObjectAlignment);
365
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700366 SetImageOffset(object, new_offset);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800367 DCHECK_LT(new_offset, image_info.image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700368}
369
Ian Rogersef7d42f2014-01-06 12:55:46 -0800370bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800371 // Will also return true if the bin slot was assigned since we are reusing the lock word.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700372 DCHECK(object != nullptr);
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700373 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700374}
375
Ian Rogersef7d42f2014-01-06 12:55:46 -0800376size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700377 DCHECK(object != nullptr);
378 DCHECK(IsImageOffsetAssigned(object));
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700379 LockWord lock_word = object->GetLockWord(false);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700380 size_t offset = lock_word.ForwardingAddress();
Vladimir Marko944da602016-02-19 12:27:55 +0000381 size_t oat_index = GetOatIndex(object);
382 const ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800383 DCHECK_LT(offset, image_info.image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700384 return offset;
Mathieu Chartier31e89252013-08-28 11:29:12 -0700385}
386
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800387void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
388 DCHECK(object != nullptr);
389 DCHECK(!IsImageOffsetAssigned(object));
390 DCHECK(!IsImageBinSlotAssigned(object));
391
392 // Before we stomp over the lock word, save the hash code for later.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800393 LockWord lw(object->GetLockWord(false));
394 switch (lw.GetState()) {
395 case LockWord::kFatLocked: {
396 LOG(FATAL) << "Fat locked object " << object << " found during object copy";
397 break;
398 }
399 case LockWord::kThinLocked: {
400 LOG(FATAL) << "Thin locked object " << object << " found during object copy";
401 break;
402 }
403 case LockWord::kUnlocked:
404 // No hash, don't need to save it.
405 break;
406 case LockWord::kHashCode:
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700407 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
408 saved_hashcode_map_.emplace(object, lw.GetHashCode());
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800409 break;
410 default:
411 LOG(FATAL) << "Unreachable.";
412 UNREACHABLE();
413 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700414 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700415 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800416 DCHECK(IsImageBinSlotAssigned(object));
417}
418
Vladimir Marko20f85592015-03-19 10:07:02 +0000419void ImageWriter::PrepareDexCacheArraySlots() {
Vladimir Markof60c7e22015-11-23 18:05:08 +0000420 // Prepare dex cache array starts based on the ordering specified in the CompilerDriver.
Vladimir Markof60c7e22015-11-23 18:05:08 +0000421 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
422 // when AssignImageBinSlot() assigns their indexes out or order.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800423 for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
Vladimir Marko944da602016-02-19 12:27:55 +0000424 auto it = dex_file_oat_index_map_.find(dex_file);
425 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
Jeff Haodcdc85b2015-12-04 14:06:18 -0800426 ImageInfo& image_info = GetImageInfo(it->second);
427 image_info.dex_cache_array_starts_.Put(dex_file, image_info.bin_slot_sizes_[kBinDexCacheArray]);
428 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
429 image_info.bin_slot_sizes_[kBinDexCacheArray] += layout.Size();
430 }
Vladimir Markof60c7e22015-11-23 18:05:08 +0000431
Vladimir Marko20f85592015-03-19 10:07:02 +0000432 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700433 Thread* const self = Thread::Current();
434 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800435 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700436 mirror::DexCache* dex_cache =
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800437 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800438 if (dex_cache == nullptr || IsInBootImage(dex_cache)) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700439 continue;
440 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000441 const DexFile* dex_file = dex_cache->GetDexFile();
Mathieu Chartier0b490842016-05-25 15:05:59 -0700442 CHECK(dex_file_oat_index_map_.find(dex_file) != dex_file_oat_index_map_.end())
443 << "Dex cache should have been pruned " << dex_file->GetLocation()
444 << "; possibly in class path";
Mathieu Chartierc7853442015-03-27 14:35:38 -0700445 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
Vladimir Marko20f85592015-03-19 10:07:02 +0000446 DCHECK(layout.Valid());
Vladimir Marko944da602016-02-19 12:27:55 +0000447 size_t oat_index = GetOatIndexForDexCache(dex_cache);
448 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800449 uint32_t start = image_info.dex_cache_array_starts_.Get(dex_file);
Vladimir Marko05792b92015-08-03 11:56:49 +0100450 DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800451 AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(),
452 start + layout.TypesOffset(),
453 dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +0100454 DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800455 AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(),
456 start + layout.MethodsOffset(),
457 dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +0100458 DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800459 AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(),
460 start + layout.FieldsOffset(),
461 dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +0100462 DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800463 AddDexCacheArrayRelocation(dex_cache->GetStrings(), start + layout.StringsOffset(), dex_cache);
Vladimir Marko20f85592015-03-19 10:07:02 +0000464 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000465}
466
Jeff Haodcdc85b2015-12-04 14:06:18 -0800467void ImageWriter::AddDexCacheArrayRelocation(void* array, size_t offset, DexCache* dex_cache) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100468 if (array != nullptr) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800469 DCHECK(!IsInBootImage(array));
Vladimir Marko944da602016-02-19 12:27:55 +0000470 size_t oat_index = GetOatIndexForDexCache(dex_cache);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800471 native_object_relocations_.emplace(array,
Vladimir Marko944da602016-02-19 12:27:55 +0000472 NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeDexCacheArray });
Vladimir Marko05792b92015-08-03 11:56:49 +0100473 }
474}
475
Mathieu Chartiere401d142015-04-22 13:56:20 -0700476void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
477 DCHECK(arr != nullptr);
478 if (kIsDebugBuild) {
479 for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800480 ArtMethod* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700481 if (method != nullptr && !method->IsRuntimeMethod()) {
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800482 mirror::Class* klass = method->GetDeclaringClass();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800483 CHECK(klass == nullptr || KeepClass(klass))
484 << PrettyClass(klass) << " should be a kept class";
Mathieu Chartiere401d142015-04-22 13:56:20 -0700485 }
486 }
487 }
488 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
489 // ArtMethods.
490 pointer_arrays_.emplace(arr, kBinArtMethodClean);
491}
492
Mathieu Chartier496577f2016-09-20 15:33:31 -0700493void ImageWriter::AssignImageBinSlot(mirror::Object* object, size_t oat_index) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800494 DCHECK(object != nullptr);
Jeff Haoc7d11882015-02-03 15:08:39 -0800495 size_t object_size = object->SizeOf();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800496
497 // The magic happens here. We segregate objects into different bins based
498 // on how likely they are to get dirty at runtime.
499 //
500 // Likely-to-dirty objects get packed together into the same bin so that
501 // at runtime their page dirtiness ratio (how many dirty objects a page has) is
502 // maximized.
503 //
504 // This means more pages will stay either clean or shared dirty (with zygote) and
505 // the app will use less of its own (private) memory.
506 Bin bin = kBinRegular;
Vladimir Marko20f85592015-03-19 10:07:02 +0000507 size_t current_offset = 0u;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800508
509 if (kBinObjects) {
510 //
511 // Changing the bin of an object is purely a memory-use tuning.
512 // It has no change on runtime correctness.
513 //
514 // Memory analysis has determined that the following types of objects get dirtied
515 // the most:
516 //
Vladimir Marko20f85592015-03-19 10:07:02 +0000517 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
518 // a fixed layout which helps improve generated code (using PC-relative addressing),
519 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
520 // Since these arrays are huge, most pages do not overlap other objects and it's not
521 // really important where they are for the clean/dirty separation. Due to their
Vladimir Marko05792b92015-08-03 11:56:49 +0100522 // special PC-relative addressing, we arbitrarily keep them at the end.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800523 // * Class'es which are verified [their clinit runs only at runtime]
524 // - classes in general [because their static fields get overwritten]
525 // - initialized classes with all-final statics are unlikely to be ever dirty,
526 // so bin them separately
527 // * Art Methods that are:
528 // - native [their native entry point is not looked up until runtime]
529 // - have declaring classes that aren't initialized
530 // [their interpreter/quick entry points are trampolines until the class
531 // becomes initialized]
532 //
533 // We also assume the following objects get dirtied either never or extremely rarely:
534 // * Strings (they are immutable)
535 // * Art methods that aren't native and have initialized declared classes
536 //
537 // We assume that "regular" bin objects are highly unlikely to become dirtied,
538 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
539 //
540 if (object->IsClass()) {
541 bin = kBinClassVerified;
542 mirror::Class* klass = object->AsClass();
543
Mathieu Chartiere401d142015-04-22 13:56:20 -0700544 // Add non-embedded vtable to the pointer array table if there is one.
545 auto* vtable = klass->GetVTable();
546 if (vtable != nullptr) {
547 AddMethodPointerArray(vtable);
548 }
549 auto* iftable = klass->GetIfTable();
550 if (iftable != nullptr) {
551 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
552 if (iftable->GetMethodArrayCount(i) > 0) {
553 AddMethodPointerArray(iftable->GetMethodArray(i));
554 }
555 }
556 }
557
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800558 if (klass->GetStatus() == Class::kStatusInitialized) {
559 bin = kBinClassInitialized;
560
561 // If the class's static fields are all final, put it into a separate bin
562 // since it's very likely it will stay clean.
563 uint32_t num_static_fields = klass->NumStaticFields();
564 if (num_static_fields == 0) {
565 bin = kBinClassInitializedFinalStatics;
566 } else {
567 // Maybe all the statics are final?
568 bool all_final = true;
569 for (uint32_t i = 0; i < num_static_fields; ++i) {
570 ArtField* field = klass->GetStaticField(i);
571 if (!field->IsFinal()) {
572 all_final = false;
573 break;
574 }
575 }
576
577 if (all_final) {
578 bin = kBinClassInitializedFinalStatics;
579 }
580 }
581 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800582 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
583 bin = kBinString; // Strings are almost always immutable (except for object header).
Mathieu Chartier2ba04ea2016-04-08 19:01:05 -0700584 } else if (object->GetClass<kVerifyNone>() ==
585 Runtime::Current()->GetClassLinker()->GetClassRoot(ClassLinker::kJavaLangObject)) {
586 // Instance of java lang object, probably a lock object. This means it will be dirty when we
587 // synchronize on it.
588 bin = kBinMiscDirty;
589 } else if (object->IsDexCache()) {
590 // Dex file field becomes dirty when the image is loaded.
591 bin = kBinMiscDirty;
592 }
593 // else bin = kBinRegular
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800594 }
595
Mathieu Chartier496577f2016-09-20 15:33:31 -0700596 // Assign the oat index too.
597 DCHECK(oat_index_map_.find(object) == oat_index_map_.end());
598 oat_index_map_.emplace(object, oat_index);
599
Vladimir Marko944da602016-02-19 12:27:55 +0000600 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800601
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800602 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment
Jeff Haodcdc85b2015-12-04 14:06:18 -0800603 current_offset = image_info.bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned).
604 // Move the current bin size up to accommodate the object we just assigned a bin slot.
605 image_info.bin_slot_sizes_[bin] += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800606
607 BinSlot new_bin_slot(bin, current_offset);
608 SetImageBinSlot(object, new_bin_slot);
609
Jeff Haodcdc85b2015-12-04 14:06:18 -0800610 ++image_info.bin_slot_count_[bin];
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800611
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800612 // Grow the image closer to the end by the object we just assigned.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800613 image_info.image_end_ += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800614}
615
Mathieu Chartiere401d142015-04-22 13:56:20 -0700616bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
617 if (m->IsNative()) {
618 return true;
619 }
620 mirror::Class* declaring_class = m->GetDeclaringClass();
621 // Initialized is highly unlikely to dirty since there's no entry points to mutate.
622 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized;
623}
624
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800625bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
626 DCHECK(object != nullptr);
627
628 // We always stash the bin slot into a lockword, in the 'forwarding address' state.
629 // If it's in some other state, then we haven't yet assigned an image bin slot.
630 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
631 return false;
632 } else if (kIsDebugBuild) {
633 LockWord lock_word = object->GetLockWord(false);
634 size_t offset = lock_word.ForwardingAddress();
635 BinSlot bin_slot(offset);
Vladimir Marko944da602016-02-19 12:27:55 +0000636 size_t oat_index = GetOatIndex(object);
637 const ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800638 DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()])
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800639 << "bin slot offset should not exceed the size of that bin";
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800640 }
641 return true;
642}
643
644ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
645 DCHECK(object != nullptr);
646 DCHECK(IsImageBinSlotAssigned(object));
647
648 LockWord lock_word = object->GetLockWord(false);
649 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t
650 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
651
652 BinSlot bin_slot(static_cast<uint32_t>(offset));
Vladimir Marko944da602016-02-19 12:27:55 +0000653 size_t oat_index = GetOatIndex(object);
654 const ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800655 DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()]);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800656
657 return bin_slot;
658}
659
Brian Carlstrom7940e442013-07-12 13:46:57 -0700660bool ImageWriter::AllocMemory() {
Vladimir Marko944da602016-02-19 12:27:55 +0000661 for (ImageInfo& image_info : image_infos_) {
Mathieu Chartiera06ba052016-01-06 13:51:52 -0800662 ImageSection unused_sections[ImageHeader::kSectionCount];
663 const size_t length = RoundUp(
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700664 image_info.CreateImageSections(unused_sections), kPageSize);
Mathieu Chartiera06ba052016-01-06 13:51:52 -0800665
Jeff Haodcdc85b2015-12-04 14:06:18 -0800666 std::string error_msg;
667 image_info.image_.reset(MemMap::MapAnonymous("image writer image",
668 nullptr,
669 length,
670 PROT_READ | PROT_WRITE,
671 false,
672 false,
673 &error_msg));
674 if (UNLIKELY(image_info.image_.get() == nullptr)) {
675 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
676 return false;
677 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700678
Jeff Haodcdc85b2015-12-04 14:06:18 -0800679 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
680 CHECK_LE(image_info.image_end_, length);
681 image_info.image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
682 "image bitmap", image_info.image_->Begin(), RoundUp(image_info.image_end_, kPageSize)));
683 if (image_info.image_bitmap_.get() == nullptr) {
684 LOG(ERROR) << "Failed to allocate memory for image bitmap";
685 return false;
686 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700687 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700688 return true;
689}
690
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700691class ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
692 public:
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700693 bool operator()(Class* c) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700694 StackHandleScope<1> hs(Thread::Current());
695 mirror::Class::ComputeName(hs.NewHandle(c));
696 return true;
697 }
698};
699
Brian Carlstrom7940e442013-07-12 13:46:57 -0700700void ImageWriter::ComputeLazyFieldsForImageClasses() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700701 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700702 ComputeLazyFieldsForClassesVisitor visitor;
703 class_linker->VisitClassesWithoutClassesLock(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700704}
705
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700706static bool IsBootClassLoaderClass(mirror::Class* klass) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800707 return klass->GetClassLoader() == nullptr;
708}
709
710bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) {
711 return IsBootClassLoaderClass(klass) && !IsInBootImage(klass);
712}
713
Mathieu Chartier901e0702016-02-19 13:42:48 -0800714bool ImageWriter::PruneAppImageClass(mirror::Class* klass) {
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800715 bool early_exit = false;
716 std::unordered_set<mirror::Class*> visited;
Mathieu Chartier901e0702016-02-19 13:42:48 -0800717 return PruneAppImageClassInternal(klass, &early_exit, &visited);
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800718}
719
Mathieu Chartier901e0702016-02-19 13:42:48 -0800720bool ImageWriter::PruneAppImageClassInternal(
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800721 mirror::Class* klass,
722 bool* early_exit,
723 std::unordered_set<mirror::Class*>* visited) {
724 DCHECK(early_exit != nullptr);
725 DCHECK(visited != nullptr);
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800726 DCHECK(compile_app_image_);
Mathieu Chartier901e0702016-02-19 13:42:48 -0800727 if (klass == nullptr || IsInBootImage(klass)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700728 return false;
729 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800730 auto found = prune_class_memo_.find(klass);
731 if (found != prune_class_memo_.end()) {
732 // Already computed, return the found value.
733 return found->second;
734 }
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800735 // Circular dependencies, return false but do not store the result in the memoization table.
736 if (visited->find(klass) != visited->end()) {
737 *early_exit = true;
738 return false;
739 }
740 visited->emplace(klass);
Mathieu Chartier901e0702016-02-19 13:42:48 -0800741 bool result = IsBootClassLoaderClass(klass);
742 std::string temp;
743 // Prune if not an image class, this handles any broken sets of image classes such as having a
744 // class in the set but not it's superclass.
745 result = result || !compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800746 bool my_early_exit = false; // Only for ourselves, ignore caller.
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800747 // Remove classes that failed to verify since we don't want to have java.lang.VerifyError in the
748 // app image.
749 if (klass->GetStatus() == mirror::Class::kStatusError) {
750 result = true;
751 } else {
752 CHECK(klass->GetVerifyError() == nullptr) << PrettyClass(klass);
753 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800754 if (!result) {
755 // Check interfaces since these wont be visited through VisitReferences.)
756 mirror::IfTable* if_table = klass->GetIfTable();
757 for (size_t i = 0, num_interfaces = klass->GetIfTableCount(); i < num_interfaces; ++i) {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800758 result = result || PruneAppImageClassInternal(if_table->GetInterface(i),
759 &my_early_exit,
760 visited);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800761 }
762 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800763 if (klass->IsObjectArrayClass()) {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800764 result = result || PruneAppImageClassInternal(klass->GetComponentType(),
765 &my_early_exit,
766 visited);
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800767 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800768 // Check static fields and their classes.
769 size_t num_static_fields = klass->NumReferenceStaticFields();
770 if (num_static_fields != 0 && klass->IsResolved()) {
771 // Presumably GC can happen when we are cross compiling, it should not cause performance
772 // problems to do pointer size logic.
773 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(
774 Runtime::Current()->GetClassLinker()->GetImagePointerSize());
775 for (size_t i = 0u; i < num_static_fields; ++i) {
776 mirror::Object* ref = klass->GetFieldObject<mirror::Object>(field_offset);
777 if (ref != nullptr) {
778 if (ref->IsClass()) {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800779 result = result || PruneAppImageClassInternal(ref->AsClass(),
780 &my_early_exit,
781 visited);
782 } else {
783 result = result || PruneAppImageClassInternal(ref->GetClass(),
784 &my_early_exit,
785 visited);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800786 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800787 }
788 field_offset = MemberOffset(field_offset.Uint32Value() +
789 sizeof(mirror::HeapReference<mirror::Object>));
790 }
791 }
Mathieu Chartier901e0702016-02-19 13:42:48 -0800792 result = result || PruneAppImageClassInternal(klass->GetSuperClass(),
793 &my_early_exit,
794 visited);
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800795 // Erase the element we stored earlier since we are exiting the function.
796 auto it = visited->find(klass);
797 DCHECK(it != visited->end());
798 visited->erase(it);
799 // Only store result if it is true or none of the calls early exited due to circular
800 // dependencies. If visited is empty then we are the root caller, in this case the cycle was in
801 // a child call and we can remember the result.
802 if (result == true || !my_early_exit || visited->empty()) {
803 prune_class_memo_[klass] = result;
804 }
805 *early_exit |= my_early_exit;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800806 return result;
807}
808
809bool ImageWriter::KeepClass(Class* klass) {
810 if (klass == nullptr) {
811 return false;
812 }
Mathieu Chartier901e0702016-02-19 13:42:48 -0800813 if (compile_app_image_ && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
814 // Already in boot image, return true.
815 return true;
816 }
817 std::string temp;
818 if (!compiler_driver_.IsImageClass(klass->GetDescriptor(&temp))) {
819 return false;
820 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800821 if (compile_app_image_) {
822 // For app images, we need to prune boot loader classes that are not in the boot image since
823 // these may have already been loaded when the app image is loaded.
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800824 // Keep classes in the boot image space since we don't want to re-resolve these.
Mathieu Chartier901e0702016-02-19 13:42:48 -0800825 return !PruneAppImageClass(klass);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800826 }
Mathieu Chartier901e0702016-02-19 13:42:48 -0800827 return true;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700828}
829
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700830class NonImageClassesVisitor : public ClassVisitor {
831 public:
832 explicit NonImageClassesVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
833
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700834 bool operator()(Class* klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800835 if (!image_writer_->KeepClass(klass)) {
836 classes_to_prune_.insert(klass);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700837 }
838 return true;
839 }
840
Mathieu Chartier9b1c9b72016-02-02 10:09:58 -0800841 std::unordered_set<mirror::Class*> classes_to_prune_;
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700842 ImageWriter* const image_writer_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700843};
844
845void ImageWriter::PruneNonImageClasses() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700846 Runtime* runtime = Runtime::Current();
847 ClassLinker* class_linker = runtime->GetClassLinker();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700848 Thread* self = Thread::Current();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700849
Mathieu Chartier696632e2016-06-03 17:47:32 -0700850 // Clear class table strong roots so that dex caches can get pruned. We require pruning the class
851 // path dex caches.
852 class_linker->ClearClassTableStrongRoots();
853
Brian Carlstrom7940e442013-07-12 13:46:57 -0700854 // Make a list of classes we would like to prune.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700855 NonImageClassesVisitor visitor(this);
856 class_linker->VisitClasses(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700857
858 // Remove the undesired classes from the class roots.
Mathieu Chartier901e0702016-02-19 13:42:48 -0800859 VLOG(compiler) << "Pruning " << visitor.classes_to_prune_.size() << " classes";
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800860 for (mirror::Class* klass : visitor.classes_to_prune_) {
861 std::string temp;
862 const char* name = klass->GetDescriptor(&temp);
863 VLOG(compiler) << "Pruning class " << name;
864 if (!compile_app_image_) {
865 DCHECK(IsBootClassLoaderClass(klass));
866 }
867 bool result = class_linker->RemoveClass(name, klass->GetClassLoader());
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800868 DCHECK(result);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700869 }
870
871 // Clear references to removed classes from the DexCaches.
Vladimir Marko05792b92015-08-03 11:56:49 +0100872 ArtMethod* resolution_method = runtime->GetResolutionMethod();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700873
Mathieu Chartier268764d2016-09-13 12:09:38 -0700874 ScopedAssertNoThreadSuspension sa(__FUNCTION__);
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700875 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); // For ClassInClassTable
876 ReaderMutexLock mu2(self, *class_linker->DexLock());
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800877 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800878 if (self->IsJWeakCleared(data.weak_root)) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700879 continue;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700880 }
Mathieu Chartier901e0702016-02-19 13:42:48 -0800881 mirror::DexCache* dex_cache = self->DecodeJObject(data.weak_root)->AsDexCache();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700882 for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
883 Class* klass = dex_cache->GetResolvedType(i);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800884 if (klass != nullptr && !KeepClass(klass)) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700885 dex_cache->SetResolvedType(i, nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700886 }
887 }
Vladimir Marko05792b92015-08-03 11:56:49 +0100888 ArtMethod** resolved_methods = dex_cache->GetResolvedMethods();
889 for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) {
890 ArtMethod* method =
891 mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_);
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800892 DCHECK(method != nullptr) << "Expected resolution method instead of null method";
893 mirror::Class* declaring_class = method->GetDeclaringClass();
Alex Lightfcea56f2016-02-17 11:59:05 -0800894 // Copied methods may be held live by a class which was not an image class but have a
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800895 // declaring class which is an image class. Set it to the resolution method to be safe and
896 // prevent dangling pointers.
Alex Light36121492016-02-22 13:43:29 -0800897 if (method->IsCopied() || !KeepClass(declaring_class)) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800898 mirror::DexCache::SetElementPtrSize(resolved_methods,
899 i,
900 resolution_method,
901 target_ptr_size_);
902 } else {
903 // Check that the class is still in the classes table.
904 DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class "
905 << PrettyClass(declaring_class) << " not in class linker table";
Brian Carlstrom7940e442013-07-12 13:46:57 -0700906 }
907 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800908 ArtField** resolved_fields = dex_cache->GetResolvedFields();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700909 for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800910 ArtField* field = mirror::DexCache::GetElementPtrSize(resolved_fields, i, target_ptr_size_);
Mathieu Chartier3398c782016-09-30 10:27:43 -0700911 if (field != nullptr && !KeepClass(field->GetDeclaringClass().Decode())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700912 dex_cache->SetResolvedField(i, nullptr, target_ptr_size_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700913 }
914 }
Andreas Gampedd9d0552015-03-09 12:57:41 -0700915 // Clean the dex field. It might have been populated during the initialization phase, but
916 // contains data only valid during a real run.
917 dex_cache->SetFieldObject<false>(mirror::DexCache::DexOffset(), nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700918 }
Andreas Gampe8ac75952015-06-02 21:01:45 -0700919
920 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
921 class_linker->DropFindArrayClassCache();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800922
923 // Clear to save RAM.
924 prune_class_memo_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700925}
926
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -0800927void ImageWriter::CheckNonImageClassesRemoved() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700928 if (compiler_driver_.GetImageClasses() != nullptr) {
929 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700930 heap->VisitObjects(CheckNonImageClassesRemovedCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700931 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700932}
933
934void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
935 ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800936 if (obj->IsClass() && !image_writer->IsInBootImage(obj)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700937 Class* klass = obj->AsClass();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800938 if (!image_writer->KeepClass(klass)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700939 image_writer->DumpImageClasses();
Ian Rogers1ff3c982014-08-12 02:30:58 -0700940 std::string temp;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800941 CHECK(image_writer->KeepClass(klass)) << klass->GetDescriptor(&temp)
942 << " " << PrettyDescriptor(klass);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700943 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700944 }
945}
946
947void ImageWriter::DumpImageClasses() {
Andreas Gampeb1fcead2015-04-20 18:53:51 -0700948 auto image_classes = compiler_driver_.GetImageClasses();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700949 CHECK(image_classes != nullptr);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700950 for (const std::string& image_class : *image_classes) {
951 LOG(INFO) << " " << image_class;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700952 }
953}
954
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800955mirror::String* ImageWriter::FindInternedString(mirror::String* string) {
956 Thread* const self = Thread::Current();
Vladimir Marko944da602016-02-19 12:27:55 +0000957 for (const ImageInfo& image_info : image_infos_) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800958 mirror::String* const found = image_info.intern_table_->LookupStrong(self, string);
959 DCHECK(image_info.intern_table_->LookupWeak(self, string) == nullptr)
960 << string->ToModifiedUtf8();
961 if (found != nullptr) {
962 return found;
963 }
964 }
965 if (compile_app_image_) {
966 Runtime* const runtime = Runtime::Current();
967 mirror::String* found = runtime->GetInternTable()->LookupStrong(self, string);
968 // If we found it in the runtime intern table it could either be in the boot image or interned
969 // during app image compilation. If it was in the boot image return that, otherwise return null
970 // since it belongs to another image space.
971 if (found != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(found)) {
972 return found;
973 }
974 DCHECK(runtime->GetInternTable()->LookupWeak(self, string) == nullptr)
975 << string->ToModifiedUtf8();
976 }
977 return nullptr;
978}
979
Brian Carlstrom7940e442013-07-12 13:46:57 -0700980
Vladimir Marko944da602016-02-19 12:27:55 +0000981ObjectArray<Object>* ImageWriter::CreateImageRoots(size_t oat_index) const {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700982 Runtime* runtime = Runtime::Current();
983 ClassLinker* class_linker = runtime->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700984 Thread* self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700985 StackHandleScope<3> hs(self);
986 Handle<Class> object_array_class(hs.NewHandle(
987 class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700988
Jeff Haodcdc85b2015-12-04 14:06:18 -0800989 std::unordered_set<const DexFile*> image_dex_files;
Vladimir Marko944da602016-02-19 12:27:55 +0000990 for (auto& pair : dex_file_oat_index_map_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -0800991 const DexFile* image_dex_file = pair.first;
Vladimir Marko944da602016-02-19 12:27:55 +0000992 size_t image_oat_index = pair.second;
993 if (oat_index == image_oat_index) {
Jeff Haodcdc85b2015-12-04 14:06:18 -0800994 image_dex_files.insert(image_dex_file);
995 }
996 }
997
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700998 // build an Object[] of all the DexCaches used in the source_space_.
999 // Since we can't hold the dex lock when allocating the dex_caches
1000 // ObjectArray, we lock the dex lock twice, first to get the number
1001 // of dex caches first and then lock it again to copy the dex
1002 // caches. We check that the number of dex caches does not change.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001003 size_t dex_cache_count = 0;
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001004 {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001005 ReaderMutexLock mu(self, *class_linker->DexLock());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001006 // Count number of dex caches not in the boot image.
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08001007 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1008 mirror::DexCache* dex_cache =
1009 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
Brian Carlstrom0c050a12016-04-29 10:28:34 -07001010 if (dex_cache == nullptr) {
1011 continue;
1012 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001013 const DexFile* dex_file = dex_cache->GetDexFile();
1014 if (!IsInBootImage(dex_cache)) {
1015 dex_cache_count += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1016 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001017 }
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001018 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001019 Handle<ObjectArray<Object>> dex_caches(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001020 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(), dex_cache_count)));
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001021 CHECK(dex_caches.Get() != nullptr) << "Failed to allocate a dex cache array.";
1022 {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001023 ReaderMutexLock mu(self, *class_linker->DexLock());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001024 size_t non_image_dex_caches = 0;
1025 // Re-count number of non image dex caches.
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08001026 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1027 mirror::DexCache* dex_cache =
1028 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
Brian Carlstrom0c050a12016-04-29 10:28:34 -07001029 if (dex_cache == nullptr) {
1030 continue;
1031 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001032 const DexFile* dex_file = dex_cache->GetDexFile();
1033 if (!IsInBootImage(dex_cache)) {
1034 non_image_dex_caches += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1035 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001036 }
1037 CHECK_EQ(dex_cache_count, non_image_dex_caches)
1038 << "The number of non-image dex caches changed.";
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07001039 size_t i = 0;
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08001040 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1041 mirror::DexCache* dex_cache =
1042 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
Brian Carlstrom0c050a12016-04-29 10:28:34 -07001043 if (dex_cache == nullptr) {
1044 continue;
1045 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001046 const DexFile* dex_file = dex_cache->GetDexFile();
1047 if (!IsInBootImage(dex_cache) && image_dex_files.find(dex_file) != image_dex_files.end()) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001048 dex_caches->Set<false>(i, dex_cache);
1049 ++i;
1050 }
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001051 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001052 }
1053
1054 // build an Object[] of the roots needed to restore the runtime
Mathieu Chartiere401d142015-04-22 13:56:20 -07001055 auto image_roots(hs.NewHandle(
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001056 ObjectArray<Object>::Alloc(self, object_array_class.Get(), ImageHeader::kImageRootsMax)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001057 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01001058 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001059 for (int i = 0; i < ImageHeader::kImageRootsMax; i++) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001060 CHECK(image_roots->Get(i) != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001061 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001062 return image_roots.Get();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001063}
1064
Mathieu Chartier496577f2016-09-20 15:33:31 -07001065mirror::Object* ImageWriter::TryAssignBinSlot(WorkStack& work_stack,
1066 mirror::Object* obj,
1067 size_t oat_index) {
1068 if (obj == nullptr || IsInBootImage(obj)) {
1069 // Object is null or already in the image, there is no work to do.
1070 return obj;
Mathieu Chartier590fee92013-09-13 13:46:47 -07001071 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001072 if (!IsImageBinSlotAssigned(obj)) {
Mathieu Chartier496577f2016-09-20 15:33:31 -07001073 // We want to intern all strings but also assign offsets for the source string. Since the
1074 // pruning phase has already happened, if we intern a string to one in the image we still
1075 // end up copying an unreachable string.
1076 if (obj->IsString()) {
1077 // Need to check if the string is already interned in another image info so that we don't have
1078 // the intern tables of two different images contain the same string.
1079 mirror::String* interned = FindInternedString(obj->AsString());
1080 if (interned == nullptr) {
1081 // Not in another image space, insert to our table.
1082 interned = GetImageInfo(oat_index).intern_table_->InternStrongImageString(obj->AsString());
1083 DCHECK_EQ(interned, obj);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001084 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001085 } else if (obj->IsDexCache()) {
1086 oat_index = GetOatIndexForDexCache(obj->AsDexCache());
1087 } else if (obj->IsClass()) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001088 // Visit and assign offsets for fields and field arrays.
Mathieu Chartier496577f2016-09-20 15:33:31 -07001089 mirror::Class* as_klass = obj->AsClass();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001090 mirror::DexCache* dex_cache = as_klass->GetDexCache();
Mathieu Chartier496577f2016-09-20 15:33:31 -07001091 DCHECK_NE(as_klass->GetStatus(), mirror::Class::kStatusError);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001092 if (compile_app_image_) {
1093 // Extra sanity, no boot loader classes should be left!
1094 CHECK(!IsBootClassLoaderClass(as_klass)) << PrettyClass(as_klass);
1095 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001096 LengthPrefixedArray<ArtField>* fields[] = {
1097 as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
1098 };
Mathieu Chartier496577f2016-09-20 15:33:31 -07001099 // Overwrite the oat index value since the class' dex cache is more accurate of where it
1100 // belongs.
1101 oat_index = GetOatIndexForDexCache(dex_cache);
Vladimir Marko944da602016-02-19 12:27:55 +00001102 ImageInfo& image_info = GetImageInfo(oat_index);
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001103 {
Mathieu Chartier496577f2016-09-20 15:33:31 -07001104 // Note: This table is only accessed from the image writer, avoid locking to prevent lock
1105 // order violations from root visiting.
1106 image_info.class_table_->InsertWithoutLocks(as_klass);
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001107 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001108 for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
1109 // Total array length including header.
1110 if (cur_fields != nullptr) {
1111 const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
1112 // Forward the entire array at once.
1113 auto it = native_object_relocations_.find(cur_fields);
1114 CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
1115 << " already forwarded";
Jeff Haodcdc85b2015-12-04 14:06:18 -08001116 size_t& offset = image_info.bin_slot_sizes_[kBinArtField];
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001117 DCHECK(!IsInBootImage(cur_fields));
Vladimir Marko944da602016-02-19 12:27:55 +00001118 native_object_relocations_.emplace(
1119 cur_fields,
1120 NativeObjectRelocation {
1121 oat_index, offset, kNativeObjectRelocationTypeArtFieldArray
1122 });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001123 offset += header_size;
1124 // Forward individual fields so that we can quickly find where they belong.
Vladimir Marko35831e82015-09-11 11:59:18 +01001125 for (size_t i = 0, count = cur_fields->size(); i < count; ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001126 // Need to forward arrays separate of fields.
1127 ArtField* field = &cur_fields->At(i);
1128 auto it2 = native_object_relocations_.find(field);
1129 CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
1130 << " already assigned " << PrettyField(field) << " static=" << field->IsStatic();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001131 DCHECK(!IsInBootImage(field));
Vladimir Marko944da602016-02-19 12:27:55 +00001132 native_object_relocations_.emplace(
1133 field,
1134 NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeArtField });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001135 offset += sizeof(ArtField);
1136 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001137 }
1138 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001139 // Visit and assign offsets for methods.
Alex Lighte64300b2015-12-15 15:02:47 -08001140 size_t num_methods = as_klass->NumMethods();
1141 if (num_methods != 0) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001142 bool any_dirty = false;
Alex Lighte64300b2015-12-15 15:02:47 -08001143 for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
1144 if (WillMethodBeDirty(&m)) {
1145 any_dirty = true;
1146 break;
1147 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001148 }
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001149 NativeObjectRelocationType type = any_dirty
1150 ? kNativeObjectRelocationTypeArtMethodDirty
1151 : kNativeObjectRelocationTypeArtMethodClean;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001152 Bin bin_type = BinTypeForNativeRelocationType(type);
1153 // Forward the entire array at once, but header first.
Alex Lighte64300b2015-12-15 15:02:47 -08001154 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
1155 const size_t method_size = ArtMethod::Size(target_ptr_size_);
Vladimir Markocf36d492015-08-12 19:27:26 +01001156 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
1157 method_size,
1158 method_alignment);
Alex Lighte64300b2015-12-15 15:02:47 -08001159 LengthPrefixedArray<ArtMethod>* array = as_klass->GetMethodsPtr();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001160 auto it = native_object_relocations_.find(array);
Alex Lighte64300b2015-12-15 15:02:47 -08001161 CHECK(it == native_object_relocations_.end())
1162 << "Method array " << array << " already forwarded";
Jeff Haodcdc85b2015-12-04 14:06:18 -08001163 size_t& offset = image_info.bin_slot_sizes_[bin_type];
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001164 DCHECK(!IsInBootImage(array));
Jeff Haodcdc85b2015-12-04 14:06:18 -08001165 native_object_relocations_.emplace(array,
1166 NativeObjectRelocation {
Vladimir Marko944da602016-02-19 12:27:55 +00001167 oat_index,
Jeff Haodcdc85b2015-12-04 14:06:18 -08001168 offset,
1169 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty
1170 : kNativeObjectRelocationTypeArtMethodArrayClean });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001171 offset += header_size;
Alex Lighte64300b2015-12-15 15:02:47 -08001172 for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
Vladimir Marko944da602016-02-19 12:27:55 +00001173 AssignMethodOffset(&m, type, oat_index);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001174 }
Alex Lighte64300b2015-12-15 15:02:47 -08001175 (any_dirty ? dirty_methods_ : clean_methods_) += num_methods;
Mathieu Chartier97bad1b2016-05-16 14:58:01 -07001176 }
1177 // Assign offsets for all runtime methods in the IMT since these may hold conflict tables
1178 // live.
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001179 if (as_klass->ShouldHaveImt()) {
1180 ImTable* imt = as_klass->GetImt(target_ptr_size_);
1181 for (size_t i = 0; i < ImTable::kSize; ++i) {
1182 ArtMethod* imt_method = imt->Get(i, target_ptr_size_);
Mathieu Chartier97bad1b2016-05-16 14:58:01 -07001183 DCHECK(imt_method != nullptr);
1184 if (imt_method->IsRuntimeMethod() &&
1185 !IsInBootImage(imt_method) &&
1186 !NativeRelocationAssigned(imt_method)) {
1187 AssignMethodOffset(imt_method, kNativeObjectRelocationTypeRuntimeMethod, oat_index);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001188 }
1189 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001190 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001191
1192 if (as_klass->ShouldHaveImt()) {
1193 ImTable* imt = as_klass->GetImt(target_ptr_size_);
1194 TryAssignImTableOffset(imt, oat_index);
1195 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001196 } else if (obj->IsClassLoader()) {
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001197 // Register the class loader if it has a class table.
1198 // The fake boot class loader should not get registered and we should end up with only one
1199 // class loader.
Mathieu Chartier496577f2016-09-20 15:33:31 -07001200 mirror::ClassLoader* class_loader = obj->AsClassLoader();
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001201 if (class_loader->GetClassTable() != nullptr) {
1202 class_loaders_.insert(class_loader);
1203 }
Mathieu Chartier590fee92013-09-13 13:46:47 -07001204 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001205 AssignImageBinSlot(obj, oat_index);
1206 work_stack.emplace(obj, oat_index);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001207 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001208 if (obj->IsString()) {
1209 // Always return the interned string if there exists one.
1210 mirror::String* interned = FindInternedString(obj->AsString());
1211 if (interned != nullptr) {
1212 return interned;
1213 }
1214 }
1215 return obj;
Mathieu Chartier590fee92013-09-13 13:46:47 -07001216}
1217
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001218bool ImageWriter::NativeRelocationAssigned(void* ptr) const {
1219 return native_object_relocations_.find(ptr) != native_object_relocations_.end();
1220}
1221
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001222void ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) {
1223 // No offset, or already assigned.
1224 if (imt == nullptr || IsInBootImage(imt) || NativeRelocationAssigned(imt)) {
1225 return;
1226 }
1227 // If the method is a conflict method we also want to assign the conflict table offset.
1228 ImageInfo& image_info = GetImageInfo(oat_index);
1229 const size_t size = ImTable::SizeInBytes(target_ptr_size_);
1230 native_object_relocations_.emplace(
1231 imt,
1232 NativeObjectRelocation {
1233 oat_index,
1234 image_info.bin_slot_sizes_[kBinImTable],
1235 kNativeObjectRelocationTypeIMTable});
1236 image_info.bin_slot_sizes_[kBinImTable] += size;
1237}
1238
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001239void ImageWriter::TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) {
1240 // No offset, or already assigned.
1241 if (table == nullptr || NativeRelocationAssigned(table)) {
1242 return;
1243 }
1244 CHECK(!IsInBootImage(table));
1245 // If the method is a conflict method we also want to assign the conflict table offset.
1246 ImageInfo& image_info = GetImageInfo(oat_index);
1247 const size_t size = table->ComputeSize(target_ptr_size_);
1248 native_object_relocations_.emplace(
1249 table,
1250 NativeObjectRelocation {
1251 oat_index,
1252 image_info.bin_slot_sizes_[kBinIMTConflictTable],
1253 kNativeObjectRelocationTypeIMTConflictTable});
1254 image_info.bin_slot_sizes_[kBinIMTConflictTable] += size;
1255}
1256
Jeff Haodcdc85b2015-12-04 14:06:18 -08001257void ImageWriter::AssignMethodOffset(ArtMethod* method,
1258 NativeObjectRelocationType type,
Vladimir Marko944da602016-02-19 12:27:55 +00001259 size_t oat_index) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001260 DCHECK(!IsInBootImage(method));
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001261 CHECK(!NativeRelocationAssigned(method)) << "Method " << method << " already assigned "
Mathieu Chartiere401d142015-04-22 13:56:20 -07001262 << PrettyMethod(method);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001263 if (method->IsRuntimeMethod()) {
1264 TryAssignConflictTableOffset(method->GetImtConflictTable(target_ptr_size_), oat_index);
1265 }
Vladimir Marko944da602016-02-19 12:27:55 +00001266 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001267 size_t& offset = image_info.bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
Vladimir Marko944da602016-02-19 12:27:55 +00001268 native_object_relocations_.emplace(method, NativeObjectRelocation { oat_index, offset, type });
Vladimir Marko14632852015-08-17 12:07:23 +01001269 offset += ArtMethod::Size(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001270}
1271
Mathieu Chartier496577f2016-09-20 15:33:31 -07001272void ImageWriter::EnsureBinSlotAssignedCallback(mirror::Object* obj, void* arg) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07001273 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1274 DCHECK(writer != nullptr);
Mathieu Chartier496577f2016-09-20 15:33:31 -07001275 if (!Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(obj)) {
1276 CHECK(writer->IsImageBinSlotAssigned(obj)) << PrettyTypeOf(obj) << " " << obj;
1277 }
1278}
1279
1280void ImageWriter::DeflateMonitorCallback(mirror::Object* obj, void* arg ATTRIBUTE_UNUSED) {
1281 Monitor::Deflate(Thread::Current(), obj);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001282}
1283
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001284void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) {
1285 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1286 DCHECK(writer != nullptr);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001287 if (!writer->IsInBootImage(obj)) {
1288 writer->UnbinObjectsIntoOffset(obj);
1289 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001290}
1291
1292void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001293 DCHECK(!IsInBootImage(obj));
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001294 CHECK(obj != nullptr);
1295
1296 // We know the bin slot, and the total bin sizes for all objects by now,
1297 // so calculate the object's final image offset.
1298
1299 DCHECK(IsImageBinSlotAssigned(obj));
1300 BinSlot bin_slot = GetImageBinSlot(obj);
1301 // Change the lockword from a bin slot into an offset
1302 AssignImageOffset(obj, bin_slot);
1303}
1304
Mathieu Chartier496577f2016-09-20 15:33:31 -07001305class ImageWriter::VisitReferencesVisitor {
1306 public:
1307 VisitReferencesVisitor(ImageWriter* image_writer, WorkStack* work_stack, size_t oat_index)
1308 : image_writer_(image_writer), work_stack_(work_stack), oat_index_(oat_index) {}
1309
1310 // Fix up separately since we also need to fix up method entrypoints.
1311 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
1312 REQUIRES_SHARED(Locks::mutator_lock_) {
1313 if (!root->IsNull()) {
1314 VisitRoot(root);
1315 }
1316 }
1317
1318 ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1319 REQUIRES_SHARED(Locks::mutator_lock_) {
1320 root->Assign(VisitReference(root->AsMirrorPtr()));
1321 }
1322
1323 ALWAYS_INLINE void operator() (mirror::Object* obj,
1324 MemberOffset offset,
1325 bool is_static ATTRIBUTE_UNUSED) const
1326 REQUIRES_SHARED(Locks::mutator_lock_) {
1327 mirror::Object* ref =
1328 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1329 obj->SetFieldObject</*kTransactionActive*/false>(offset, VisitReference(ref));
1330 }
1331
1332 ALWAYS_INLINE void operator() (mirror::Class* klass ATTRIBUTE_UNUSED,
1333 mirror::Reference* ref) const
1334 REQUIRES_SHARED(Locks::mutator_lock_) {
1335 ref->SetReferent</*kTransactionActive*/false>(
1336 VisitReference(ref->GetReferent<kWithoutReadBarrier>()));
1337 }
1338
1339 private:
1340 mirror::Object* VisitReference(mirror::Object* ref) const REQUIRES_SHARED(Locks::mutator_lock_) {
1341 return image_writer_->TryAssignBinSlot(*work_stack_, ref, oat_index_);
1342 }
1343
1344 ImageWriter* const image_writer_;
1345 WorkStack* const work_stack_;
1346 const size_t oat_index_;
1347};
1348
1349class ImageWriter::GetRootsVisitor : public RootVisitor {
1350 public:
1351 explicit GetRootsVisitor(std::vector<mirror::Object*>* roots) : roots_(roots) {}
1352
1353 void VisitRoots(mirror::Object*** roots,
1354 size_t count,
1355 const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE
1356 REQUIRES_SHARED(Locks::mutator_lock_) {
1357 for (size_t i = 0; i < count; ++i) {
1358 roots_->push_back(*roots[i]);
1359 }
1360 }
1361
1362 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
1363 size_t count,
1364 const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE
1365 REQUIRES_SHARED(Locks::mutator_lock_) {
1366 for (size_t i = 0; i < count; ++i) {
1367 roots_->push_back(roots[i]->AsMirrorPtr());
1368 }
1369 }
1370
1371 private:
1372 std::vector<mirror::Object*>* const roots_;
1373};
1374
1375void ImageWriter::ProcessWorkStack(WorkStack* work_stack) {
1376 while (!work_stack->empty()) {
1377 std::pair<mirror::Object*, size_t> pair(work_stack->top());
1378 work_stack->pop();
1379 VisitReferencesVisitor visitor(this, work_stack, /*oat_index*/ pair.second);
1380 // Walk references and assign bin slots for them.
1381 pair.first->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
1382 visitor,
1383 visitor);
1384 }
1385}
1386
Vladimir Markof4da6752014-08-01 19:04:18 +01001387void ImageWriter::CalculateNewObjectOffsets() {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001388 Thread* const self = Thread::Current();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001389 StackHandleScopeCollection handles(self);
1390 std::vector<Handle<ObjectArray<Object>>> image_roots;
Vladimir Marko944da602016-02-19 12:27:55 +00001391 for (size_t i = 0, size = oat_filenames_.size(); i != size; ++i) {
1392 image_roots.push_back(handles.NewHandle(CreateImageRoots(i)));
Jeff Haodcdc85b2015-12-04 14:06:18 -08001393 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001394
Mathieu Chartier496577f2016-09-20 15:33:31 -07001395 Runtime* const runtime = Runtime::Current();
1396 gc::Heap* const heap = runtime->GetHeap();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001397
Mathieu Chartier31e89252013-08-28 11:29:12 -07001398 // Leave space for the header, but do not write it yet, we need to
Brian Carlstrom7940e442013-07-12 13:46:57 -07001399 // know where image_roots is going to end up
Jeff Haodcdc85b2015-12-04 14:06:18 -08001400 image_objects_offset_begin_ = RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment
Brian Carlstrom7940e442013-07-12 13:46:57 -07001401
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001402 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001403 // Write the image runtime methods.
1404 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
1405 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
1406 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001407 image_methods_[ImageHeader::kSaveAllCalleeSavesMethod] =
1408 runtime->GetCalleeSaveMethod(Runtime::kSaveAllCalleeSaves);
1409 image_methods_[ImageHeader::kSaveRefsOnlyMethod] =
1410 runtime->GetCalleeSaveMethod(Runtime::kSaveRefsOnly);
1411 image_methods_[ImageHeader::kSaveRefsAndArgsMethod] =
1412 runtime->GetCalleeSaveMethod(Runtime::kSaveRefsAndArgs);
Vladimir Marko952dbb12016-07-28 12:01:51 +01001413 image_methods_[ImageHeader::kSaveEverythingMethod] =
1414 runtime->GetCalleeSaveMethod(Runtime::kSaveEverything);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001415 // Visit image methods first to have the main runtime methods in the first image.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001416 for (auto* m : image_methods_) {
1417 CHECK(m != nullptr);
1418 CHECK(m->IsRuntimeMethod());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001419 DCHECK_EQ(compile_app_image_, IsInBootImage(m)) << "Trampolines should be in boot image";
1420 if (!IsInBootImage(m)) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001421 AssignMethodOffset(m, kNativeObjectRelocationTypeRuntimeMethod, GetDefaultOatIndex());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001422 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001423 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001424
Mathieu Chartier496577f2016-09-20 15:33:31 -07001425 // Deflate monitors before we visit roots since deflating acquires the monitor lock. Acquiring
1426 // this lock while holding other locks may cause lock order violations.
1427 heap->VisitObjects(DeflateMonitorCallback, this);
1428
1429 // Work list of <object, oat_index> for objects. Everything on the stack must already be
1430 // assigned a bin slot.
1431 WorkStack work_stack;
1432
1433 // Special case interned strings to put them in the image they are likely to be resolved from.
1434 for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
1435 auto it = dex_file_oat_index_map_.find(dex_file);
1436 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
1437 const size_t oat_index = it->second;
1438 InternTable* const intern_table = runtime->GetInternTable();
1439 for (size_t i = 0, count = dex_file->NumStringIds(); i < count; ++i) {
1440 uint32_t utf16_length;
1441 const char* utf8_data = dex_file->StringDataAndUtf16LengthByIdx(i, &utf16_length);
1442 mirror::String* string = intern_table->LookupStrong(self, utf16_length, utf8_data);
1443 TryAssignBinSlot(work_stack, string, oat_index);
1444 }
1445 }
1446
1447 // Get the GC roots and then visit them separately to avoid lock violations since the root visitor
1448 // visits roots while holding various locks.
1449 {
1450 std::vector<mirror::Object*> roots;
1451 GetRootsVisitor root_visitor(&roots);
1452 runtime->VisitRoots(&root_visitor);
1453 for (mirror::Object* obj : roots) {
1454 TryAssignBinSlot(work_stack, obj, GetDefaultOatIndex());
1455 }
1456 }
1457 ProcessWorkStack(&work_stack);
1458
1459 // For app images, there may be objects that are only held live by the by the boot image. One
1460 // example is finalizer references. Forward these objects so that EnsureBinSlotAssignedCallback
1461 // does not fail any checks. TODO: We should probably avoid copying these objects.
1462 if (compile_app_image_) {
1463 for (gc::space::ImageSpace* space : heap->GetBootImageSpaces()) {
1464 DCHECK(space->IsImageSpace());
1465 gc::accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
1466 live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
1467 reinterpret_cast<uintptr_t>(space->Limit()),
1468 [this, &work_stack](mirror::Object* obj)
1469 REQUIRES_SHARED(Locks::mutator_lock_) {
1470 VisitReferencesVisitor visitor(this, &work_stack, GetDefaultOatIndex());
1471 // Visit all references and try to assign bin slots for them (calls TryAssignBinSlot).
1472 obj->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
1473 visitor,
1474 visitor);
1475 });
1476 }
1477 // Process the work stack in case anything was added by TryAssignBinSlot.
1478 ProcessWorkStack(&work_stack);
1479 }
1480
1481 // Verify that all objects have assigned image bin slots.
1482 heap->VisitObjects(EnsureBinSlotAssignedCallback, this);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001483
Vladimir Marko05792b92015-08-03 11:56:49 +01001484 // Calculate size of the dex cache arrays slot and prepare offsets.
1485 PrepareDexCacheArraySlots();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001486
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001487 // Calculate the sizes of the intern tables and class tables.
Vladimir Marko944da602016-02-19 12:27:55 +00001488 for (ImageInfo& image_info : image_infos_) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -08001489 // Calculate how big the intern table will be after being serialized.
1490 InternTable* const intern_table = image_info.intern_table_.get();
1491 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
1492 image_info.intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001493 // Calculate the size of the class table.
1494 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
1495 image_info.class_table_bytes_ += image_info.class_table_->WriteToMemory(nullptr);
Mathieu Chartierea0831f2015-12-29 13:17:37 -08001496 }
1497
Vladimir Markocf36d492015-08-12 19:27:26 +01001498 // Calculate bin slot offsets.
Vladimir Marko944da602016-02-19 12:27:55 +00001499 for (ImageInfo& image_info : image_infos_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001500 size_t bin_offset = image_objects_offset_begin_;
1501 for (size_t i = 0; i != kBinSize; ++i) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001502 switch (i) {
1503 case kBinArtMethodClean:
1504 case kBinArtMethodDirty: {
1505 bin_offset = RoundUp(bin_offset, method_alignment);
1506 break;
1507 }
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07001508 case kBinDexCacheArray:
1509 bin_offset = RoundUp(bin_offset, DexCacheArraysLayout::Alignment());
1510 break;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001511 case kBinImTable:
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001512 case kBinIMTConflictTable: {
Andreas Gampe542451c2016-07-26 09:02:02 -07001513 bin_offset = RoundUp(bin_offset, static_cast<size_t>(target_ptr_size_));
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001514 break;
1515 }
1516 default: {
1517 // Normal alignment.
1518 }
1519 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001520 image_info.bin_slot_offsets_[i] = bin_offset;
1521 bin_offset += image_info.bin_slot_sizes_[i];
Vladimir Markocf36d492015-08-12 19:27:26 +01001522 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001523 // NOTE: There may be additional padding between the bin slots and the intern table.
1524 DCHECK_EQ(image_info.image_end_,
1525 GetBinSizeSum(image_info, kBinMirrorCount) + image_objects_offset_begin_);
Vladimir Marko20f85592015-03-19 10:07:02 +00001526 }
Vladimir Markocf36d492015-08-12 19:27:26 +01001527
Jeff Haodcdc85b2015-12-04 14:06:18 -08001528 // Calculate image offsets.
1529 size_t image_offset = 0;
Vladimir Marko944da602016-02-19 12:27:55 +00001530 for (ImageInfo& image_info : image_infos_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001531 image_info.image_begin_ = global_image_begin_ + image_offset;
1532 image_info.image_offset_ = image_offset;
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001533 ImageSection unused_sections[ImageHeader::kSectionCount];
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001534 image_info.image_size_ = RoundUp(image_info.CreateImageSections(unused_sections), kPageSize);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001535 // There should be no gaps until the next image.
Jeff Haodcdc85b2015-12-04 14:06:18 -08001536 image_offset += image_info.image_size_;
1537 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001538
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -08001539 // Transform each object's bin slot into an offset which will be used to do the final copy.
1540 heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001541
Jeff Haodcdc85b2015-12-04 14:06:18 -08001542 // DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001543
Jeff Haodcdc85b2015-12-04 14:06:18 -08001544 size_t i = 0;
Vladimir Marko944da602016-02-19 12:27:55 +00001545 for (ImageInfo& image_info : image_infos_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001546 image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots[i].Get()));
1547 i++;
1548 }
Vladimir Markof4da6752014-08-01 19:04:18 +01001549
Mathieu Chartiere401d142015-04-22 13:56:20 -07001550 // Update the native relocations by adding their bin sums.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001551 for (auto& pair : native_object_relocations_) {
1552 NativeObjectRelocation& relocation = pair.second;
1553 Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
Vladimir Marko944da602016-02-19 12:27:55 +00001554 ImageInfo& image_info = GetImageInfo(relocation.oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001555 relocation.offset += image_info.bin_slot_offsets_[bin_type];
Mathieu Chartiere401d142015-04-22 13:56:20 -07001556 }
1557
Jeff Haodcdc85b2015-12-04 14:06:18 -08001558 // Note that image_info.image_end_ is left at end of used mirror object section.
Vladimir Markof4da6752014-08-01 19:04:18 +01001559}
1560
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001561size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) const {
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001562 DCHECK(out_sections != nullptr);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001563
1564 // Do not round up any sections here that are represented by the bins since it will break
1565 // offsets.
1566
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001567 // Objects section
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001568 ImageSection* objects_section = &out_sections[ImageHeader::kSectionObjects];
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001569 *objects_section = ImageSection(0u, image_end_);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001570
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001571 // Add field section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001572 ImageSection* field_section = &out_sections[ImageHeader::kSectionArtFields];
1573 *field_section = ImageSection(bin_slot_offsets_[kBinArtField], bin_slot_sizes_[kBinArtField]);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001574 CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset());
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001575
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001576 // Add method section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001577 ImageSection* methods_section = &out_sections[ImageHeader::kSectionArtMethods];
1578 *methods_section = ImageSection(
1579 bin_slot_offsets_[kBinArtMethodClean],
1580 bin_slot_sizes_[kBinArtMethodClean] + bin_slot_sizes_[kBinArtMethodDirty]);
1581
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001582 // IMT section.
1583 ImageSection* imt_section = &out_sections[ImageHeader::kSectionImTables];
1584 *imt_section = ImageSection(bin_slot_offsets_[kBinImTable], bin_slot_sizes_[kBinImTable]);
1585
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001586 // Conflict tables section.
1587 ImageSection* imt_conflict_tables_section = &out_sections[ImageHeader::kSectionIMTConflictTables];
1588 *imt_conflict_tables_section = ImageSection(bin_slot_offsets_[kBinIMTConflictTable],
1589 bin_slot_sizes_[kBinIMTConflictTable]);
1590
1591 // Runtime methods section.
1592 ImageSection* runtime_methods_section = &out_sections[ImageHeader::kSectionRuntimeMethods];
1593 *runtime_methods_section = ImageSection(bin_slot_offsets_[kBinRuntimeMethod],
1594 bin_slot_sizes_[kBinRuntimeMethod]);
1595
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001596 // Add dex cache arrays section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001597 ImageSection* dex_cache_arrays_section = &out_sections[ImageHeader::kSectionDexCacheArrays];
1598 *dex_cache_arrays_section = ImageSection(bin_slot_offsets_[kBinDexCacheArray],
1599 bin_slot_sizes_[kBinDexCacheArray]);
1600
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001601 // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001602 size_t cur_pos = RoundUp(dex_cache_arrays_section->End(), sizeof(uint64_t));
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001603 // Calculate the size of the interned strings.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001604 ImageSection* interned_strings_section = &out_sections[ImageHeader::kSectionInternedStrings];
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001605 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1606 cur_pos = interned_strings_section->End();
1607 // Round up to the alignment the class table expects. See HashSet::WriteToMemory.
1608 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
1609 // Calculate the size of the class table section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001610 ImageSection* class_table_section = &out_sections[ImageHeader::kSectionClassTable];
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001611 *class_table_section = ImageSection(cur_pos, class_table_bytes_);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001612 cur_pos = class_table_section->End();
1613 // Image end goes right before the start of the image bitmap.
1614 return cur_pos;
1615}
1616
Vladimir Marko944da602016-02-19 12:27:55 +00001617void ImageWriter::CreateHeader(size_t oat_index) {
1618 ImageInfo& image_info = GetImageInfo(oat_index);
1619 const uint8_t* oat_file_begin = image_info.oat_file_begin_;
1620 const uint8_t* oat_file_end = oat_file_begin + image_info.oat_loaded_size_;
1621 const uint8_t* oat_data_end = image_info.oat_data_begin_ + image_info.oat_size_;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001622
1623 // Create the image sections.
1624 ImageSection sections[ImageHeader::kSectionCount];
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001625 const size_t image_end = image_info.CreateImageSections(sections);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001626
Mathieu Chartiere401d142015-04-22 13:56:20 -07001627 // Finally bitmap section.
Jeff Haodcdc85b2015-12-04 14:06:18 -08001628 const size_t bitmap_bytes = image_info.image_bitmap_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001629 auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001630 *bitmap_section = ImageSection(RoundUp(image_end, kPageSize), RoundUp(bitmap_bytes, kPageSize));
Jeff Haodcdc85b2015-12-04 14:06:18 -08001631 if (VLOG_IS_ON(compiler)) {
Vladimir Marko944da602016-02-19 12:27:55 +00001632 LOG(INFO) << "Creating header for " << oat_filenames_[oat_index];
Mathieu Chartiere401d142015-04-22 13:56:20 -07001633 size_t idx = 0;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001634 for (const ImageSection& section : sections) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001635 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1636 ++idx;
1637 }
1638 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001639 LOG(INFO) << "Image roots address=" << std::hex << image_info.image_roots_address_ << std::dec;
1640 LOG(INFO) << "Image begin=" << std::hex << reinterpret_cast<uintptr_t>(global_image_begin_)
1641 << " Image offset=" << image_info.image_offset_ << std::dec;
1642 LOG(INFO) << "Oat file begin=" << std::hex << reinterpret_cast<uintptr_t>(oat_file_begin)
1643 << " Oat data begin=" << reinterpret_cast<uintptr_t>(image_info.oat_data_begin_)
1644 << " Oat data end=" << reinterpret_cast<uintptr_t>(oat_data_end)
1645 << " Oat file end=" << reinterpret_cast<uintptr_t>(oat_file_end);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001646 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001647 // Store boot image info for app image so that we can relocate.
1648 uint32_t boot_image_begin = 0;
1649 uint32_t boot_image_end = 0;
1650 uint32_t boot_oat_begin = 0;
1651 uint32_t boot_oat_end = 0;
1652 gc::Heap* const heap = Runtime::Current()->GetHeap();
1653 heap->GetBootImagesSize(&boot_image_begin, &boot_image_end, &boot_oat_begin, &boot_oat_end);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001654
Mathieu Chartierceb07b32015-12-10 09:33:21 -08001655 // Create the header, leave 0 for data size since we will fill this in as we are writing the
1656 // image.
Jeff Haodcdc85b2015-12-04 14:06:18 -08001657 new (image_info.image_->Begin()) ImageHeader(PointerToLowMemUInt32(image_info.image_begin_),
1658 image_end,
1659 sections,
1660 image_info.image_roots_address_,
Vladimir Marko944da602016-02-19 12:27:55 +00001661 image_info.oat_checksum_,
Jeff Haodcdc85b2015-12-04 14:06:18 -08001662 PointerToLowMemUInt32(oat_file_begin),
1663 PointerToLowMemUInt32(image_info.oat_data_begin_),
1664 PointerToLowMemUInt32(oat_data_end),
1665 PointerToLowMemUInt32(oat_file_end),
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001666 boot_image_begin,
1667 boot_image_end - boot_image_begin,
1668 boot_oat_begin,
1669 boot_oat_end - boot_oat_begin,
Andreas Gampe542451c2016-07-26 09:02:02 -07001670 static_cast<uint32_t>(target_ptr_size_),
Jeff Haodcdc85b2015-12-04 14:06:18 -08001671 compile_pic_,
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001672 /*is_pic*/compile_app_image_,
Jeff Haodcdc85b2015-12-04 14:06:18 -08001673 image_storage_mode_,
1674 /*data_size*/0u);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001675}
1676
1677ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001678 auto it = native_object_relocations_.find(method);
1679 CHECK(it != native_object_relocations_.end()) << PrettyMethod(method) << " @ " << method;
Vladimir Marko944da602016-02-19 12:27:55 +00001680 size_t oat_index = GetOatIndex(method->GetDexCache());
1681 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001682 CHECK_GE(it->second.offset, image_info.image_end_) << "ArtMethods should be after Objects";
1683 return reinterpret_cast<ArtMethod*>(image_info.image_begin_ + it->second.offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001684}
1685
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001686class FixupRootVisitor : public RootVisitor {
1687 public:
1688 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
1689 }
1690
1691 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001692 OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001693 for (size_t i = 0; i < count; ++i) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -08001694 *roots[i] = image_writer_->GetImageAddress(*roots[i]);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001695 }
1696 }
1697
1698 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
1699 const RootInfo& info ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001700 OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001701 for (size_t i = 0; i < count; ++i) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -08001702 roots[i]->Assign(image_writer_->GetImageAddress(roots[i]->AsMirrorPtr()));
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001703 }
1704 }
1705
1706 private:
1707 ImageWriter* const image_writer_;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001708};
1709
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001710void ImageWriter::CopyAndFixupImTable(ImTable* orig, ImTable* copy) {
1711 for (size_t i = 0; i < ImTable::kSize; ++i) {
1712 ArtMethod* method = orig->Get(i, target_ptr_size_);
1713 copy->Set(i, NativeLocationInImage(method), target_ptr_size_);
1714 }
1715}
1716
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001717void ImageWriter::CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConflictTable* copy) {
1718 const size_t count = orig->NumEntries(target_ptr_size_);
1719 for (size_t i = 0; i < count; ++i) {
1720 ArtMethod* interface_method = orig->GetInterfaceMethod(i, target_ptr_size_);
1721 ArtMethod* implementation_method = orig->GetImplementationMethod(i, target_ptr_size_);
1722 copy->SetInterfaceMethod(i, target_ptr_size_, NativeLocationInImage(interface_method));
1723 copy->SetImplementationMethod(i,
1724 target_ptr_size_,
1725 NativeLocationInImage(implementation_method));
1726 }
1727}
1728
Vladimir Marko944da602016-02-19 12:27:55 +00001729void ImageWriter::CopyAndFixupNativeData(size_t oat_index) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001730 const ImageInfo& image_info = GetImageInfo(oat_index);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001731 // Copy ArtFields and methods to their locations and update the array for convenience.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001732 for (auto& pair : native_object_relocations_) {
1733 NativeObjectRelocation& relocation = pair.second;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001734 // Only work with fields and methods that are in the current oat file.
Vladimir Marko944da602016-02-19 12:27:55 +00001735 if (relocation.oat_index != oat_index) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001736 continue;
1737 }
1738 auto* dest = image_info.image_->Begin() + relocation.offset;
1739 DCHECK_GE(dest, image_info.image_->Begin() + image_info.image_end_);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001740 DCHECK(!IsInBootImage(pair.first));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001741 switch (relocation.type) {
1742 case kNativeObjectRelocationTypeArtField: {
1743 memcpy(dest, pair.first, sizeof(ArtField));
1744 reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
Mathieu Chartier3398c782016-09-30 10:27:43 -07001745 GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass().Decode()));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001746 break;
1747 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001748 case kNativeObjectRelocationTypeRuntimeMethod:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001749 case kNativeObjectRelocationTypeArtMethodClean:
1750 case kNativeObjectRelocationTypeArtMethodDirty: {
1751 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
Jeff Haodcdc85b2015-12-04 14:06:18 -08001752 reinterpret_cast<ArtMethod*>(dest),
1753 image_info);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001754 break;
1755 }
1756 // For arrays, copy just the header since the elements will get copied by their corresponding
1757 // relocations.
1758 case kNativeObjectRelocationTypeArtFieldArray: {
1759 memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
1760 break;
1761 }
1762 case kNativeObjectRelocationTypeArtMethodArrayClean:
1763 case kNativeObjectRelocationTypeArtMethodArrayDirty: {
Vladimir Markod9813cb2016-03-15 12:41:27 +00001764 size_t size = ArtMethod::Size(target_ptr_size_);
1765 size_t alignment = ArtMethod::Alignment(target_ptr_size_);
1766 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0, size, alignment));
1767 // Clear padding to avoid non-deterministic data in the image (and placate valgrind).
1768 reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(dest)->ClearPadding(size, alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001769 break;
Vladimir Markod9813cb2016-03-15 12:41:27 +00001770 }
Vladimir Marko05792b92015-08-03 11:56:49 +01001771 case kNativeObjectRelocationTypeDexCacheArray:
1772 // Nothing to copy here, everything is done in FixupDexCache().
1773 break;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001774 case kNativeObjectRelocationTypeIMTable: {
1775 ImTable* orig_imt = reinterpret_cast<ImTable*>(pair.first);
1776 ImTable* dest_imt = reinterpret_cast<ImTable*>(dest);
1777 CopyAndFixupImTable(orig_imt, dest_imt);
1778 break;
1779 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001780 case kNativeObjectRelocationTypeIMTConflictTable: {
1781 auto* orig_table = reinterpret_cast<ImtConflictTable*>(pair.first);
1782 CopyAndFixupImtConflictTable(
1783 orig_table,
1784 new(dest)ImtConflictTable(orig_table->NumEntries(target_ptr_size_), target_ptr_size_));
1785 break;
1786 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001787 }
1788 }
1789 // Fixup the image method roots.
Jeff Haodcdc85b2015-12-04 14:06:18 -08001790 auto* image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001791 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001792 ArtMethod* method = image_methods_[i];
1793 CHECK(method != nullptr);
1794 if (!IsInBootImage(method)) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001795 method = NativeLocationInImage(method);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001796 }
1797 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), method);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001798 }
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001799 FixupRootVisitor root_visitor(this);
1800
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001801 // Write the intern table into the image.
Mathieu Chartierea0831f2015-12-29 13:17:37 -08001802 if (image_info.intern_table_bytes_ > 0) {
1803 const ImageSection& intern_table_section = image_header->GetImageSection(
1804 ImageHeader::kSectionInternedStrings);
1805 InternTable* const intern_table = image_info.intern_table_.get();
1806 uint8_t* const intern_table_memory_ptr =
1807 image_info.image_->Begin() + intern_table_section.Offset();
1808 const size_t intern_table_bytes = intern_table->WriteToMemory(intern_table_memory_ptr);
1809 CHECK_EQ(intern_table_bytes, image_info.intern_table_bytes_);
1810 // Fixup the pointers in the newly written intern table to contain image addresses.
1811 InternTable temp_intern_table;
1812 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
1813 // the VisitRoots() will update the memory directly rather than the copies.
1814 // This also relies on visit roots not doing any verification which could fail after we update
1815 // the roots to be the image addresses.
1816 temp_intern_table.AddTableFromMemory(intern_table_memory_ptr);
1817 CHECK_EQ(temp_intern_table.Size(), intern_table->Size());
1818 temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots);
1819 }
Mathieu Chartier67ad20e2015-12-09 15:41:09 -08001820 // Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple
1821 // class loaders. Writing multiple class tables into the image is currently unsupported.
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001822 if (image_info.class_table_bytes_ > 0u) {
Mathieu Chartier67ad20e2015-12-09 15:41:09 -08001823 const ImageSection& class_table_section = image_header->GetImageSection(
1824 ImageHeader::kSectionClassTable);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001825 uint8_t* const class_table_memory_ptr =
1826 image_info.image_->Begin() + class_table_section.Offset();
Mathieu Chartier67ad20e2015-12-09 15:41:09 -08001827 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001828
1829 ClassTable* table = image_info.class_table_.get();
1830 CHECK(table != nullptr);
1831 const size_t class_table_bytes = table->WriteToMemory(class_table_memory_ptr);
1832 CHECK_EQ(class_table_bytes, image_info.class_table_bytes_);
1833 // Fixup the pointers in the newly written class table to contain image addresses. See
1834 // above comment for intern tables.
1835 ClassTable temp_class_table;
1836 temp_class_table.ReadFromMemory(class_table_memory_ptr);
1837 CHECK_EQ(temp_class_table.NumZygoteClasses(), table->NumNonZygoteClasses() +
1838 table->NumZygoteClasses());
1839 BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(&root_visitor,
1840 RootInfo(kRootUnknown));
1841 temp_class_table.VisitRoots(buffered_visitor);
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001842 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001843}
1844
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -08001845void ImageWriter::CopyAndFixupObjects() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001846 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -07001847 heap->VisitObjects(CopyAndFixupObjectsCallback, this);
1848 // Fix up the object previously had hash codes.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001849 for (const auto& hash_pair : saved_hashcode_map_) {
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001850 Object* obj = hash_pair.first;
Andreas Gampe3b45ef22015-05-26 21:34:09 -07001851 DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
1852 obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001853 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001854 saved_hashcode_map_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001855}
1856
Mathieu Chartier590fee92013-09-13 13:46:47 -07001857void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) {
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -07001858 DCHECK(obj != nullptr);
1859 DCHECK(arg != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001860 reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj);
1861}
1862
Mathieu Chartiere401d142015-04-22 13:56:20 -07001863void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr,
1864 mirror::Class* klass, Bin array_type) {
1865 CHECK(klass->IsArrayClass());
1866 CHECK(arr->IsIntArray() || arr->IsLongArray()) << PrettyClass(klass) << " " << arr;
1867 // Fixup int and long pointers for the ArtMethod or ArtField arrays.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001868 const size_t num_elements = arr->GetLength();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001869 dst->SetClass(GetImageAddress(arr->GetClass()));
1870 auto* dest_array = down_cast<mirror::PointerArray*>(dst);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001871 for (size_t i = 0, count = num_elements; i < count; ++i) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001872 void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
1873 if (elem != nullptr && !IsInBootImage(elem)) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001874 auto it = native_object_relocations_.find(elem);
Vladimir Marko05792b92015-08-03 11:56:49 +01001875 if (UNLIKELY(it == native_object_relocations_.end())) {
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001876 if (it->second.IsArtMethodRelocation()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001877 auto* method = reinterpret_cast<ArtMethod*>(elem);
1878 LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ "
1879 << method << " idx=" << i << "/" << num_elements << " with declaring class "
1880 << PrettyClass(method->GetDeclaringClass());
1881 } else {
1882 CHECK_EQ(array_type, kBinArtField);
1883 auto* field = reinterpret_cast<ArtField*>(elem);
1884 LOG(FATAL) << "No relocation entry for ArtField " << PrettyField(field) << " @ "
1885 << field << " idx=" << i << "/" << num_elements << " with declaring class "
1886 << PrettyClass(field->GetDeclaringClass());
1887 }
Vladimir Marko05792b92015-08-03 11:56:49 +01001888 UNREACHABLE();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001889 } else {
Vladimir Marko944da602016-02-19 12:27:55 +00001890 ImageInfo& image_info = GetImageInfo(it->second.oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001891 elem = image_info.image_begin_ + it->second.offset;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001892 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001893 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001894 dest_array->SetElementPtrSize<false, true>(i, elem, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001895 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001896}
1897
1898void ImageWriter::CopyAndFixupObject(Object* obj) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001899 if (IsInBootImage(obj)) {
1900 return;
1901 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001902 size_t offset = GetImageOffset(obj);
Vladimir Marko944da602016-02-19 12:27:55 +00001903 size_t oat_index = GetOatIndex(obj);
1904 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001905 auto* dst = reinterpret_cast<Object*>(image_info.image_->Begin() + offset);
1906 DCHECK_LT(offset, image_info.image_end_);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001907 const auto* src = reinterpret_cast<const uint8_t*>(obj);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001908
Jeff Haodcdc85b2015-12-04 14:06:18 -08001909 image_info.image_bitmap_->Set(dst); // Mark the obj as live.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001910
1911 const size_t n = obj->SizeOf();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001912 DCHECK_LE(offset + n, image_info.image_->Size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001913 memcpy(dst, src, n);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001914
Mathieu Chartierad2541a2013-10-25 10:05:23 -07001915 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
1916 // word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001917 const auto it = saved_hashcode_map_.find(obj);
1918 dst->SetLockWord(it != saved_hashcode_map_.end() ?
1919 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
Mathieu Chartier36a270a2016-07-28 18:08:51 -07001920 if (kUseBakerReadBarrier && gc::collector::ConcurrentCopying::kGrayDirtyImmuneObjects) {
1921 // Treat all of the objects in the image as marked to avoid unnecessary dirty pages. This is
1922 // safe since we mark all of the objects that may reference non immune objects as gray.
1923 CHECK(dst->AtomicSetMarkBit(0, 1));
1924 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001925 FixupObject(obj, dst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001926}
1927
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001928// Rewrite all the references in the copied object to point to their image address equivalent
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001929class FixupVisitor {
1930 public:
1931 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
1932 }
1933
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001934 // Ignore class roots since we don't have a way to map them to the destination. These are handled
1935 // with other logic.
1936 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1937 const {}
1938 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
1939
1940
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001941 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001942 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Hiroshi Yamauchi6e83c172014-05-01 21:25:41 -07001943 Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001944 // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the
1945 // image.
1946 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001947 offset,
1948 image_writer_->GetImageAddress(ref));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001949 }
1950
1951 // java.lang.ref.Reference visitor.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001952 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001953 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001954 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001955 mirror::Reference::ReferentOffset(),
1956 image_writer_->GetImageAddress(ref->GetReferent()));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001957 }
1958
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001959 protected:
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001960 ImageWriter* const image_writer_;
1961 mirror::Object* const copy_;
1962};
1963
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001964class FixupClassVisitor FINAL : public FixupVisitor {
1965 public:
1966 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
1967 }
1968
Mathieu Chartierc7853442015-03-27 14:35:38 -07001969 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001970 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001971 DCHECK(obj->IsClass());
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001972 FixupVisitor::operator()(obj, offset, /*is_static*/false);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001973 }
1974
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001975 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED,
1976 mirror::Reference* ref ATTRIBUTE_UNUSED) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001977 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001978 LOG(FATAL) << "Reference not expected here.";
1979 }
1980};
1981
Vladimir Marko05792b92015-08-03 11:56:49 +01001982uintptr_t ImageWriter::NativeOffsetInImage(void* obj) {
1983 DCHECK(obj != nullptr);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001984 DCHECK(!IsInBootImage(obj));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001985 auto it = native_object_relocations_.find(obj);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001986 CHECK(it != native_object_relocations_.end()) << obj << " spaces "
1987 << Runtime::Current()->GetHeap()->DumpSpaces();
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001988 const NativeObjectRelocation& relocation = it->second;
Vladimir Marko05792b92015-08-03 11:56:49 +01001989 return relocation.offset;
1990}
1991
1992template <typename T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001993std::string PrettyPrint(T* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001994 std::ostringstream oss;
1995 oss << ptr;
1996 return oss.str();
1997}
1998
1999template <>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002000std::string PrettyPrint(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002001 return PrettyMethod(method);
2002}
2003
2004template <typename T>
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002005T* ImageWriter::NativeLocationInImage(T* obj) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08002006 if (obj == nullptr || IsInBootImage(obj)) {
2007 return obj;
2008 } else {
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002009 auto it = native_object_relocations_.find(obj);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002010 CHECK(it != native_object_relocations_.end()) << obj << " " << PrettyPrint(obj)
2011 << " spaces " << Runtime::Current()->GetHeap()->DumpSpaces();
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002012 const NativeObjectRelocation& relocation = it->second;
Vladimir Marko944da602016-02-19 12:27:55 +00002013 ImageInfo& image_info = GetImageInfo(relocation.oat_index);
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002014 return reinterpret_cast<T*>(image_info.image_begin_ + relocation.offset);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002015 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002016}
2017
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002018template <typename T>
Jeff Haodcdc85b2015-12-04 14:06:18 -08002019T* ImageWriter::NativeCopyLocation(T* obj, mirror::DexCache* dex_cache) {
2020 if (obj == nullptr || IsInBootImage(obj)) {
2021 return obj;
2022 } else {
Vladimir Marko944da602016-02-19 12:27:55 +00002023 size_t oat_index = GetOatIndexForDexCache(dex_cache);
2024 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002025 return reinterpret_cast<T*>(image_info.image_->Begin() + NativeOffsetInImage(obj));
2026 }
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002027}
2028
2029class NativeLocationVisitor {
2030 public:
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002031 explicit NativeLocationVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002032
2033 template <typename T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002034 T* operator()(T* ptr) const REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002035 return image_writer_->NativeLocationInImage(ptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002036 }
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002037
2038 private:
2039 ImageWriter* const image_writer_;
2040};
2041
2042void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002043 orig->FixupNativePointers(copy, target_ptr_size_, NativeLocationVisitor(this));
Mathieu Chartierc7853442015-03-27 14:35:38 -07002044 FixupClassVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07002045 static_cast<mirror::Object*>(orig)->VisitReferences(visitor, visitor);
Andreas Gampeace0dc12016-01-20 13:33:13 -08002046
2047 // Remove the clinitThreadId. This is required for image determinism.
2048 copy->SetClinitThreadId(static_cast<pid_t>(0));
Mathieu Chartierc7853442015-03-27 14:35:38 -07002049}
2050
Ian Rogersef7d42f2014-01-06 12:55:46 -08002051void ImageWriter::FixupObject(Object* orig, Object* copy) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07002052 DCHECK(orig != nullptr);
2053 DCHECK(copy != nullptr);
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -07002054 if (kUseBakerOrBrooksReadBarrier) {
2055 orig->AssertReadBarrierPointer();
2056 if (kUseBrooksReadBarrier) {
2057 // Note the address 'copy' isn't the same as the image address of 'orig'.
2058 copy->SetReadBarrierPointer(GetImageAddress(orig));
2059 DCHECK_EQ(copy->GetReadBarrierPointer(), GetImageAddress(orig));
2060 }
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -08002061 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002062 auto* klass = orig->GetClass();
2063 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
Vladimir Marko05792b92015-08-03 11:56:49 +01002064 // Is this a native pointer array?
Mathieu Chartiere401d142015-04-22 13:56:20 -07002065 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
2066 if (it != pointer_arrays_.end()) {
2067 // Should only need to fixup every pointer array exactly once.
2068 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
2069 pointer_arrays_.erase(it);
2070 return;
2071 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002072 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002073 if (orig->IsClass()) {
2074 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002075 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002076 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
2077 // Need to go update the ArtMethod.
Neil Fuller0e844392016-09-08 13:43:31 +01002078 auto* dest = down_cast<mirror::Executable*>(copy);
2079 auto* src = down_cast<mirror::Executable*>(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002080 ArtMethod* src_method = src->GetArtMethod();
Jing Ji96e640c2016-08-31 21:21:37 -05002081 dest->SetArtMethod(GetImageMethodAddress(src_method));
Vladimir Marko05792b92015-08-03 11:56:49 +01002082 } else if (!klass->IsArrayClass()) {
2083 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2084 if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) {
2085 FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08002086 } else if (klass->IsClassLoaderClass()) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002087 mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy);
Vladimir Marko05792b92015-08-03 11:56:49 +01002088 // If src is a ClassLoader, set the class table to null so that it gets recreated by the
2089 // ClassLoader.
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002090 copy_loader->SetClassTable(nullptr);
Mathieu Chartier5550c562015-09-22 15:18:04 -07002091 // Also set allocator to null to be safe. The allocator is created when we create the class
2092 // table. We also never expect to unload things in the image since they are held live as
2093 // roots.
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002094 copy_loader->SetAllocator(nullptr);
Vladimir Marko05792b92015-08-03 11:56:49 +01002095 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002096 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002097 FixupVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07002098 orig->VisitReferences(visitor, visitor);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002099 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002100}
2101
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002102
2103class ImageAddressVisitor {
2104 public:
2105 explicit ImageAddressVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
2106
2107 template <typename T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002108 T* operator()(T* ptr) const REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002109 return image_writer_->GetImageAddress(ptr);
2110 }
2111
2112 private:
2113 ImageWriter* const image_writer_;
2114};
2115
2116
Vladimir Marko05792b92015-08-03 11:56:49 +01002117void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache,
2118 mirror::DexCache* copy_dex_cache) {
2119 // Though the DexCache array fields are usually treated as native pointers, we set the full
2120 // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is
2121 // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e.
2122 // static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))).
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07002123 mirror::StringDexCacheType* orig_strings = orig_dex_cache->GetStrings();
Vladimir Marko05792b92015-08-03 11:56:49 +01002124 if (orig_strings != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002125 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::StringsOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002126 NativeLocationInImage(orig_strings),
Andreas Gampe542451c2016-07-26 09:02:02 -07002127 PointerSize::k64);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002128 orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings, orig_dex_cache),
2129 ImageAddressVisitor(this));
Vladimir Marko05792b92015-08-03 11:56:49 +01002130 }
2131 GcRoot<mirror::Class>* orig_types = orig_dex_cache->GetResolvedTypes();
2132 if (orig_types != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002133 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedTypesOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002134 NativeLocationInImage(orig_types),
Andreas Gampe542451c2016-07-26 09:02:02 -07002135 PointerSize::k64);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002136 orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types, orig_dex_cache),
2137 ImageAddressVisitor(this));
Vladimir Marko05792b92015-08-03 11:56:49 +01002138 }
2139 ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
2140 if (orig_methods != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002141 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodsOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002142 NativeLocationInImage(orig_methods),
Andreas Gampe542451c2016-07-26 09:02:02 -07002143 PointerSize::k64);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002144 ArtMethod** copy_methods = NativeCopyLocation(orig_methods, orig_dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +01002145 for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) {
2146 ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, i, target_ptr_size_);
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002147 // NativeLocationInImage also handles runtime methods since these have relocation info.
2148 ArtMethod* copy = NativeLocationInImage(orig);
Vladimir Marko05792b92015-08-03 11:56:49 +01002149 mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_);
2150 }
2151 }
2152 ArtField** orig_fields = orig_dex_cache->GetResolvedFields();
2153 if (orig_fields != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002154 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedFieldsOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002155 NativeLocationInImage(orig_fields),
Andreas Gampe542451c2016-07-26 09:02:02 -07002156 PointerSize::k64);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002157 ArtField** copy_fields = NativeCopyLocation(orig_fields, orig_dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +01002158 for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) {
2159 ArtField* orig = mirror::DexCache::GetElementPtrSize(orig_fields, i, target_ptr_size_);
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002160 ArtField* copy = NativeLocationInImage(orig);
Vladimir Marko05792b92015-08-03 11:56:49 +01002161 mirror::DexCache::SetElementPtrSize(copy_fields, i, copy, target_ptr_size_);
2162 }
2163 }
Andreas Gampeace0dc12016-01-20 13:33:13 -08002164
2165 // Remove the DexFile pointers. They will be fixed up when the runtime loads the oat file. Leaving
2166 // compiler pointers in here will make the output non-deterministic.
2167 copy_dex_cache->SetDexFile(nullptr);
Vladimir Marko05792b92015-08-03 11:56:49 +01002168}
2169
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002170const uint8_t* ImageWriter::GetOatAddress(OatAddress type) const {
2171 DCHECK_LT(type, kOatAddressCount);
2172 // If we are compiling an app image, we need to use the stubs of the boot image.
2173 if (compile_app_image_) {
2174 // Use the current image pointers.
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002175 const std::vector<gc::space::ImageSpace*>& image_spaces =
Jeff Haodcdc85b2015-12-04 14:06:18 -08002176 Runtime::Current()->GetHeap()->GetBootImageSpaces();
2177 DCHECK(!image_spaces.empty());
2178 const OatFile* oat_file = image_spaces[0]->GetOatFile();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002179 CHECK(oat_file != nullptr);
2180 const OatHeader& header = oat_file->GetOatHeader();
2181 switch (type) {
2182 // TODO: We could maybe clean this up if we stored them in an array in the oat header.
2183 case kOatAddressQuickGenericJNITrampoline:
2184 return static_cast<const uint8_t*>(header.GetQuickGenericJniTrampoline());
2185 case kOatAddressInterpreterToInterpreterBridge:
2186 return static_cast<const uint8_t*>(header.GetInterpreterToInterpreterBridge());
2187 case kOatAddressInterpreterToCompiledCodeBridge:
2188 return static_cast<const uint8_t*>(header.GetInterpreterToCompiledCodeBridge());
2189 case kOatAddressJNIDlsymLookup:
2190 return static_cast<const uint8_t*>(header.GetJniDlsymLookup());
2191 case kOatAddressQuickIMTConflictTrampoline:
2192 return static_cast<const uint8_t*>(header.GetQuickImtConflictTrampoline());
2193 case kOatAddressQuickResolutionTrampoline:
2194 return static_cast<const uint8_t*>(header.GetQuickResolutionTrampoline());
2195 case kOatAddressQuickToInterpreterBridge:
2196 return static_cast<const uint8_t*>(header.GetQuickToInterpreterBridge());
2197 default:
2198 UNREACHABLE();
2199 }
2200 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08002201 const ImageInfo& primary_image_info = GetImageInfo(0);
2202 return GetOatAddressForOffset(primary_image_info.oat_address_offsets_[type], primary_image_info);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002203}
2204
Jeff Haodcdc85b2015-12-04 14:06:18 -08002205const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method,
2206 const ImageInfo& image_info,
2207 bool* quick_is_interpreted) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002208 DCHECK(!method->IsResolutionMethod()) << PrettyMethod(method);
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002209 DCHECK_NE(method, Runtime::Current()->GetImtConflictMethod()) << PrettyMethod(method);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002210 DCHECK(!method->IsImtUnimplementedMethod()) << PrettyMethod(method);
Alex Light9139e002015-10-09 15:59:48 -07002211 DCHECK(method->IsInvokable()) << PrettyMethod(method);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002212 DCHECK(!IsInBootImage(method)) << PrettyMethod(method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002213
2214 // Use original code if it exists. Otherwise, set the code pointer to the resolution
2215 // trampoline.
2216
2217 // Quick entrypoint:
Igor Murashkin0ccfe2c2016-02-19 16:41:44 -08002218 const void* quick_oat_entry_point =
2219 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_);
2220 const uint8_t* quick_code;
2221
2222 if (UNLIKELY(IsInBootImage(method->GetDeclaringClass()))) {
2223 DCHECK(method->IsCopied());
2224 // If the code is not in the oat file corresponding to this image (e.g. default methods)
2225 quick_code = reinterpret_cast<const uint8_t*>(quick_oat_entry_point);
2226 } else {
2227 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(quick_oat_entry_point);
2228 quick_code = GetOatAddressForOffset(quick_oat_code_offset, image_info);
2229 }
2230
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002231 *quick_is_interpreted = false;
Mathieu Chartiere401d142015-04-22 13:56:20 -07002232 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
2233 method->GetDeclaringClass()->IsInitialized())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002234 // We have code for a non-static or initialized method, just use the code.
2235 } else if (quick_code == nullptr && method->IsNative() &&
2236 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
2237 // Non-static or initialized native method missing compiled code, use generic JNI version.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002238 quick_code = GetOatAddress(kOatAddressQuickGenericJNITrampoline);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002239 } else if (quick_code == nullptr && !method->IsNative()) {
2240 // We don't have code at all for a non-native method, use the interpreter.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002241 quick_code = GetOatAddress(kOatAddressQuickToInterpreterBridge);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002242 *quick_is_interpreted = true;
2243 } else {
2244 CHECK(!method->GetDeclaringClass()->IsInitialized());
2245 // We have code for a static method, but need to go through the resolution stub for class
2246 // initialization.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002247 quick_code = GetOatAddress(kOatAddressQuickResolutionTrampoline);
2248 }
2249 if (!IsInBootOatFile(quick_code)) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08002250 // DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002251 }
2252 return quick_code;
2253}
2254
Jeff Haodcdc85b2015-12-04 14:06:18 -08002255void ImageWriter::CopyAndFixupMethod(ArtMethod* orig,
2256 ArtMethod* copy,
2257 const ImageInfo& image_info) {
Vladimir Marko14632852015-08-17 12:07:23 +01002258 memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002259
2260 copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked()));
Vladimir Marko05792b92015-08-03 11:56:49 +01002261 ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_);
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002262 copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_);
Vladimir Marko05792b92015-08-03 11:56:49 +01002263 GcRoot<mirror::Class>* orig_resolved_types = orig->GetDexCacheResolvedTypes(target_ptr_size_);
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002264 copy->SetDexCacheResolvedTypes(NativeLocationInImage(orig_resolved_types), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002265
Ian Rogers848871b2013-08-05 10:56:33 -07002266 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
2267 // oat_begin_
Brian Carlstrom7940e442013-07-12 13:46:57 -07002268
Ian Rogers848871b2013-08-05 10:56:33 -07002269 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07002270 Runtime* runtime = Runtime::Current();
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002271 if (orig->IsRuntimeMethod()) {
2272 ImtConflictTable* orig_table = orig->GetImtConflictTable(target_ptr_size_);
2273 if (orig_table != nullptr) {
2274 // Special IMT conflict method, normal IMT conflict method or unimplemented IMT method.
2275 copy->SetEntryPointFromQuickCompiledCodePtrSize(
2276 GetOatAddress(kOatAddressQuickIMTConflictTrampoline), target_ptr_size_);
2277 copy->SetImtConflictTable(NativeLocationInImage(orig_table), target_ptr_size_);
2278 } else if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
2279 copy->SetEntryPointFromQuickCompiledCodePtrSize(
2280 GetOatAddress(kOatAddressQuickResolutionTrampoline), target_ptr_size_);
2281 } else {
2282 bool found_one = false;
2283 for (size_t i = 0; i < static_cast<size_t>(Runtime::kLastCalleeSaveType); ++i) {
2284 auto idx = static_cast<Runtime::CalleeSaveType>(i);
2285 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
2286 found_one = true;
2287 break;
2288 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002289 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002290 CHECK(found_one) << "Expected to find callee save method but got " << PrettyMethod(orig);
2291 CHECK(copy->IsRuntimeMethod());
Mathieu Chartiere401d142015-04-22 13:56:20 -07002292 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002293 } else {
Ian Rogers848871b2013-08-05 10:56:33 -07002294 // We assume all methods have code. If they don't currently then we set them to the use the
2295 // resolution trampoline. Abstract methods never have code and so we need to make sure their
2296 // use results in an AbstractMethodError. We use the interpreter to achieve this.
Alex Light9139e002015-10-09 15:59:48 -07002297 if (UNLIKELY(!orig->IsInvokable())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002298 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002299 GetOatAddress(kOatAddressQuickToInterpreterBridge), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07002300 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002301 bool quick_is_interpreted;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002302 const uint8_t* quick_code = GetQuickCode(orig, image_info, &quick_is_interpreted);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002303 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
Sebastien Hertze1d07812014-05-21 15:44:09 +02002304
Sebastien Hertze1d07812014-05-21 15:44:09 +02002305 // JNI entrypoint:
Ian Rogers848871b2013-08-05 10:56:33 -07002306 if (orig->IsNative()) {
2307 // The native method's pointer is set to a stub to lookup via dlsym.
2308 // Note this is not the code_ pointer, that is handled above.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002309 copy->SetEntryPointFromJniPtrSize(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002310 GetOatAddress(kOatAddressJNIDlsymLookup), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07002311 }
2312 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002313 }
2314}
2315
Jeff Haodcdc85b2015-12-04 14:06:18 -08002316size_t ImageWriter::GetBinSizeSum(ImageWriter::ImageInfo& image_info, ImageWriter::Bin up_to) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002317 DCHECK_LE(up_to, kBinSize);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002318 return std::accumulate(&image_info.bin_slot_sizes_[0],
2319 &image_info.bin_slot_sizes_[up_to],
2320 /*init*/0);
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002321}
2322
2323ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
2324 // These values may need to get updated if more bins are added to the enum Bin
Mathieu Chartiere401d142015-04-22 13:56:20 -07002325 static_assert(kBinBits == 3, "wrong number of bin bits");
2326 static_assert(kBinShift == 27, "wrong number of shift");
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002327 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
2328
2329 DCHECK_LT(GetBin(), kBinSize);
2330 DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
2331}
2332
2333ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
2334 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
2335 DCHECK_EQ(index, GetIndex());
2336}
2337
2338ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
2339 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
2340}
2341
2342uint32_t ImageWriter::BinSlot::GetIndex() const {
2343 return lockword_ & ~kBinMask;
2344}
2345
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002346ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
2347 switch (type) {
2348 case kNativeObjectRelocationTypeArtField:
2349 case kNativeObjectRelocationTypeArtFieldArray:
2350 return kBinArtField;
2351 case kNativeObjectRelocationTypeArtMethodClean:
2352 case kNativeObjectRelocationTypeArtMethodArrayClean:
2353 return kBinArtMethodClean;
2354 case kNativeObjectRelocationTypeArtMethodDirty:
2355 case kNativeObjectRelocationTypeArtMethodArrayDirty:
2356 return kBinArtMethodDirty;
Vladimir Marko05792b92015-08-03 11:56:49 +01002357 case kNativeObjectRelocationTypeDexCacheArray:
2358 return kBinDexCacheArray;
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002359 case kNativeObjectRelocationTypeRuntimeMethod:
2360 return kBinRuntimeMethod;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002361 case kNativeObjectRelocationTypeIMTable:
2362 return kBinImTable;
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002363 case kNativeObjectRelocationTypeIMTConflictTable:
2364 return kBinIMTConflictTable;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002365 }
2366 UNREACHABLE();
2367}
2368
Vladimir Marko944da602016-02-19 12:27:55 +00002369size_t ImageWriter::GetOatIndex(mirror::Object* obj) const {
Mathieu Chartier496577f2016-09-20 15:33:31 -07002370 if (!IsMultiImage()) {
Vladimir Marko944da602016-02-19 12:27:55 +00002371 return GetDefaultOatIndex();
Jeff Haodcdc85b2015-12-04 14:06:18 -08002372 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07002373 auto it = oat_index_map_.find(obj);
2374 DCHECK(it != oat_index_map_.end());
2375 return it->second;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002376}
2377
Vladimir Marko944da602016-02-19 12:27:55 +00002378size_t ImageWriter::GetOatIndexForDexFile(const DexFile* dex_file) const {
Mathieu Chartier496577f2016-09-20 15:33:31 -07002379 if (!IsMultiImage()) {
Vladimir Marko944da602016-02-19 12:27:55 +00002380 return GetDefaultOatIndex();
Jeff Haodcdc85b2015-12-04 14:06:18 -08002381 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07002382 auto it = dex_file_oat_index_map_.find(dex_file);
2383 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
2384 return it->second;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002385}
2386
Vladimir Marko944da602016-02-19 12:27:55 +00002387size_t ImageWriter::GetOatIndexForDexCache(mirror::DexCache* dex_cache) const {
2388 if (dex_cache == nullptr) {
2389 return GetDefaultOatIndex();
2390 } else {
2391 return GetOatIndexForDexFile(dex_cache->GetDexFile());
2392 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08002393}
2394
Vladimir Marko944da602016-02-19 12:27:55 +00002395void ImageWriter::UpdateOatFileLayout(size_t oat_index,
2396 size_t oat_loaded_size,
2397 size_t oat_data_offset,
2398 size_t oat_data_size) {
2399 const uint8_t* images_end = image_infos_.back().image_begin_ + image_infos_.back().image_size_;
2400 for (const ImageInfo& info : image_infos_) {
2401 DCHECK_LE(info.image_begin_ + info.image_size_, images_end);
2402 }
2403 DCHECK(images_end != nullptr); // Image space must be ready.
Jeff Haodcdc85b2015-12-04 14:06:18 -08002404
Vladimir Marko944da602016-02-19 12:27:55 +00002405 ImageInfo& cur_image_info = GetImageInfo(oat_index);
2406 cur_image_info.oat_file_begin_ = images_end + cur_image_info.oat_offset_;
2407 cur_image_info.oat_loaded_size_ = oat_loaded_size;
2408 cur_image_info.oat_data_begin_ = cur_image_info.oat_file_begin_ + oat_data_offset;
2409 cur_image_info.oat_size_ = oat_data_size;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002410
Mathieu Chartier14567fd2016-01-28 20:33:36 -08002411 if (compile_app_image_) {
2412 CHECK_EQ(oat_filenames_.size(), 1u) << "App image should have no next image.";
2413 return;
2414 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08002415
2416 // Update the oat_offset of the next image info.
Vladimir Marko944da602016-02-19 12:27:55 +00002417 if (oat_index + 1u != oat_filenames_.size()) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08002418 // There is a following one.
Vladimir Marko944da602016-02-19 12:27:55 +00002419 ImageInfo& next_image_info = GetImageInfo(oat_index + 1u);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002420 next_image_info.oat_offset_ = cur_image_info.oat_offset_ + oat_loaded_size;
2421 }
2422}
2423
Vladimir Marko944da602016-02-19 12:27:55 +00002424void ImageWriter::UpdateOatFileHeader(size_t oat_index, const OatHeader& oat_header) {
2425 ImageInfo& cur_image_info = GetImageInfo(oat_index);
2426 cur_image_info.oat_checksum_ = oat_header.GetChecksum();
2427
2428 if (oat_index == GetDefaultOatIndex()) {
2429 // Primary oat file, read the trampolines.
2430 cur_image_info.oat_address_offsets_[kOatAddressInterpreterToInterpreterBridge] =
2431 oat_header.GetInterpreterToInterpreterBridgeOffset();
2432 cur_image_info.oat_address_offsets_[kOatAddressInterpreterToCompiledCodeBridge] =
2433 oat_header.GetInterpreterToCompiledCodeBridgeOffset();
2434 cur_image_info.oat_address_offsets_[kOatAddressJNIDlsymLookup] =
2435 oat_header.GetJniDlsymLookupOffset();
2436 cur_image_info.oat_address_offsets_[kOatAddressQuickGenericJNITrampoline] =
2437 oat_header.GetQuickGenericJniTrampolineOffset();
2438 cur_image_info.oat_address_offsets_[kOatAddressQuickIMTConflictTrampoline] =
2439 oat_header.GetQuickImtConflictTrampolineOffset();
2440 cur_image_info.oat_address_offsets_[kOatAddressQuickResolutionTrampoline] =
2441 oat_header.GetQuickResolutionTrampolineOffset();
2442 cur_image_info.oat_address_offsets_[kOatAddressQuickToInterpreterBridge] =
2443 oat_header.GetQuickToInterpreterBridgeOffset();
2444 }
2445}
2446
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002447ImageWriter::ImageWriter(
2448 const CompilerDriver& compiler_driver,
2449 uintptr_t image_begin,
2450 bool compile_pic,
2451 bool compile_app_image,
2452 ImageHeader::StorageMode image_storage_mode,
Vladimir Marko944da602016-02-19 12:27:55 +00002453 const std::vector<const char*>& oat_filenames,
2454 const std::unordered_map<const DexFile*, size_t>& dex_file_oat_index_map)
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002455 : compiler_driver_(compiler_driver),
2456 global_image_begin_(reinterpret_cast<uint8_t*>(image_begin)),
2457 image_objects_offset_begin_(0),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002458 compile_pic_(compile_pic),
2459 compile_app_image_(compile_app_image),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002460 target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())),
Vladimir Marko944da602016-02-19 12:27:55 +00002461 image_infos_(oat_filenames.size()),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002462 dirty_methods_(0u),
2463 clean_methods_(0u),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002464 image_storage_mode_(image_storage_mode),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002465 oat_filenames_(oat_filenames),
Vladimir Marko944da602016-02-19 12:27:55 +00002466 dex_file_oat_index_map_(dex_file_oat_index_map) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002467 CHECK_NE(image_begin, 0U);
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002468 std::fill_n(image_methods_, arraysize(image_methods_), nullptr);
Mathieu Chartier901e0702016-02-19 13:42:48 -08002469 CHECK_EQ(compile_app_image, !Runtime::Current()->GetHeap()->GetBootImageSpaces().empty())
2470 << "Compiling a boot image should occur iff there are no boot image spaces loaded";
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002471}
2472
Mathieu Chartier1f47b672016-01-07 16:29:01 -08002473ImageWriter::ImageInfo::ImageInfo()
2474 : intern_table_(new InternTable),
2475 class_table_(new ClassTable) {}
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002476
Brian Carlstrom7940e442013-07-12 13:46:57 -07002477} // namespace art