blob: e75f75bbd9e457edd299e50ee2a28f7711c66805 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "image_writer.h"
18
19#include <sys/stat.h>
Mathieu Chartierceb07b32015-12-10 09:33:21 -080020#include <lz4.h>
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -080021#include <lz4hc.h>
Brian Carlstrom7940e442013-07-12 13:46:57 -070022
Ian Rogers700a4022014-05-19 16:49:03 -070023#include <memory>
Vladimir Marko20f85592015-03-19 10:07:02 +000024#include <numeric>
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080025#include <unordered_set>
Brian Carlstrom7940e442013-07-12 13:46:57 -070026#include <vector>
27
Mathieu Chartierc7853442015-03-27 14:35:38 -070028#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "art_method-inl.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070030#include "base/callee_save_type.h"
31#include "base/enums.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070032#include "base/logging.h"
33#include "base/unix_file/fd_file.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010034#include "class_linker-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070035#include "compiled_method.h"
36#include "dex_file-inl.h"
Andreas Gampea5b09a62016-11-17 15:21:22 -080037#include "dex_file_types.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070038#include "driver/compiler_driver.h"
Alex Light53cb16b2014-06-12 11:26:29 -070039#include "elf_file.h"
40#include "elf_utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070041#include "elf_writer.h"
42#include "gc/accounting/card_table-inl.h"
43#include "gc/accounting/heap_bitmap.h"
Mathieu Chartier31e89252013-08-28 11:29:12 -070044#include "gc/accounting/space_bitmap-inl.h"
Mathieu Chartier36a270a2016-07-28 18:08:51 -070045#include "gc/collector/concurrent_copying.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070046#include "gc/heap.h"
47#include "gc/space/large_object_space.h"
48#include "gc/space/space-inl.h"
Mathieu Chartier4f5e3cb2017-06-12 13:10:01 -070049#include "gc/verification.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070050#include "globals.h"
Mathieu Chartier4f5e3cb2017-06-12 13:10:01 -070051#include "handle_scope-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070052#include "image.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070053#include "imt_conflict_table.h"
Mathieu Chartier3738e982017-05-12 16:07:28 -070054#include "jni_internal.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070055#include "linear_alloc.h"
Mathieu Chartierad2541a2013-10-25 10:05:23 -070056#include "lock_word.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070057#include "mirror/array-inl.h"
58#include "mirror/class-inl.h"
Alex Lightd6251582016-10-31 11:12:30 -070059#include "mirror/class_ext.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070060#include "mirror/class_loader.h"
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070061#include "mirror/dex_cache.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070062#include "mirror/dex_cache-inl.h"
Neil Fuller0e844392016-09-08 13:43:31 +010063#include "mirror/executable.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070064#include "mirror/method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070065#include "mirror/object-inl.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080066#include "mirror/object-refvisitor-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070067#include "mirror/object_array-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070068#include "mirror/string-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070069#include "oat.h"
70#include "oat_file.h"
Mathieu Chartierf9c6fc62015-10-07 11:44:05 -070071#include "oat_file_manager.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070072#include "runtime.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070073#include "scoped_thread_state_change-inl.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000074#include "utils/dex_cache_arrays_layout-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070075
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070076using ::art::mirror::Class;
77using ::art::mirror::DexCache;
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070078using ::art::mirror::Object;
79using ::art::mirror::ObjectArray;
80using ::art::mirror::String;
Brian Carlstrom7940e442013-07-12 13:46:57 -070081
82namespace art {
83
Igor Murashkinf5b4c502014-11-14 15:01:59 -080084// Separate objects into multiple bins to optimize dirty memory use.
85static constexpr bool kBinObjects = true;
86
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080087// Return true if an object is already in an image space.
88bool ImageWriter::IsInBootImage(const void* obj) const {
Mathieu Chartiere467cea2016-01-07 18:36:19 -080089 gc::Heap* const heap = Runtime::Current()->GetHeap();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080090 if (!compile_app_image_) {
Mathieu Chartiere467cea2016-01-07 18:36:19 -080091 DCHECK(heap->GetBootImageSpaces().empty());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080092 return false;
93 }
Mathieu Chartiere467cea2016-01-07 18:36:19 -080094 for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
95 const uint8_t* image_begin = boot_image_space->Begin();
96 // Real image end including ArtMethods and ArtField sections.
97 const uint8_t* image_end = image_begin + boot_image_space->GetImageHeader().GetImageSize();
98 if (image_begin <= obj && obj < image_end) {
99 return true;
100 }
101 }
102 return false;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800103}
104
105bool ImageWriter::IsInBootOatFile(const void* ptr) const {
Mathieu Chartiere467cea2016-01-07 18:36:19 -0800106 gc::Heap* const heap = Runtime::Current()->GetHeap();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800107 if (!compile_app_image_) {
Mathieu Chartiere467cea2016-01-07 18:36:19 -0800108 DCHECK(heap->GetBootImageSpaces().empty());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800109 return false;
110 }
Mathieu Chartiere467cea2016-01-07 18:36:19 -0800111 for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
112 const ImageHeader& image_header = boot_image_space->GetImageHeader();
113 if (image_header.GetOatFileBegin() <= ptr && ptr < image_header.GetOatFileEnd()) {
114 return true;
115 }
116 }
117 return false;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800118}
119
Mathieu Chartier3738e982017-05-12 16:07:28 -0700120static void ClearDexFileCookieCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700121 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier3738e982017-05-12 16:07:28 -0700122 DCHECK(obj != nullptr);
Andreas Gampedd9d0552015-03-09 12:57:41 -0700123 Class* klass = obj->GetClass();
Mathieu Chartier3738e982017-05-12 16:07:28 -0700124 if (klass == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_DexFile)) {
125 ArtField* field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
126 // Null out the cookie to enable determinism. b/34090128
127 field->SetObject</*kTransactionActive*/false>(obj, nullptr);
128 }
Andreas Gampedd9d0552015-03-09 12:57:41 -0700129}
130
Mathieu Chartier3738e982017-05-12 16:07:28 -0700131static void ClearDexFileCookies() REQUIRES_SHARED(Locks::mutator_lock_) {
132 Runtime::Current()->GetHeap()->VisitObjects(ClearDexFileCookieCallback, nullptr);
Andreas Gampedd9d0552015-03-09 12:57:41 -0700133}
134
Vladimir Markof4da6752014-08-01 19:04:18 +0100135bool ImageWriter::PrepareImageAddressSpace() {
Mathieu Chartier2d721012014-11-10 11:08:06 -0800136 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800137 gc::Heap* const heap = Runtime::Current()->GetHeap();
Vladimir Markof4da6752014-08-01 19:04:18 +0100138 {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700139 ScopedObjectAccess soa(Thread::Current());
Vladimir Markof4da6752014-08-01 19:04:18 +0100140 PruneNonImageClasses(); // Remove junk
Mathieu Chartier3738e982017-05-12 16:07:28 -0700141 if (compile_app_image_) {
142 // Clear dex file cookies for app images to enable app image determinism. This is required
143 // since the cookie field contains long pointers to DexFiles which are not deterministic.
144 // b/34090128
145 ClearDexFileCookies();
146 } else {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800147 // Avoid for app image since this may increase RAM and image size.
148 ComputeLazyFieldsForImageClasses(); // Add useful information
149 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100150 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100151 heap->CollectGarbage(false); // Remove garbage.
152
Vladimir Markof4da6752014-08-01 19:04:18 +0100153 if (kIsDebugBuild) {
154 ScopedObjectAccess soa(Thread::Current());
155 CheckNonImageClassesRemoved();
156 }
157
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700158 {
159 ScopedObjectAccess soa(Thread::Current());
160 CalculateNewObjectOffsets();
161 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100162
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700163 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
164 // bin size sums being calculated.
165 if (!AllocMemory()) {
166 return false;
167 }
168
Vladimir Markof4da6752014-08-01 19:04:18 +0100169 return true;
170}
171
Mathieu Chartiera90c7722015-10-29 15:41:36 -0700172bool ImageWriter::Write(int image_fd,
Jeff Haodcdc85b2015-12-04 14:06:18 -0800173 const std::vector<const char*>& image_filenames,
Vladimir Marko944da602016-02-19 12:27:55 +0000174 const std::vector<const char*>& oat_filenames) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800175 // If image_fd or oat_fd are not kInvalidFd then we may have empty strings in image_filenames or
176 // oat_filenames.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800177 CHECK(!image_filenames.empty());
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800178 if (image_fd != kInvalidFd) {
179 CHECK_EQ(image_filenames.size(), 1u);
180 }
Jeff Haodcdc85b2015-12-04 14:06:18 -0800181 CHECK(!oat_filenames.empty());
182 CHECK_EQ(image_filenames.size(), oat_filenames.size());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700183
Vladimir Marko944da602016-02-19 12:27:55 +0000184 {
185 ScopedObjectAccess soa(Thread::Current());
186 for (size_t i = 0; i < oat_filenames.size(); ++i) {
187 CreateHeader(i);
188 CopyAndFixupNativeData(i);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800189 }
190 }
Alex Light53cb16b2014-06-12 11:26:29 -0700191
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700192 {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700193 // TODO: heap validation can't handle these fix up passes.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800194 ScopedObjectAccess soa(Thread::Current());
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700195 Runtime::Current()->GetHeap()->DisableObjectValidation();
196 CopyAndFixupObjects();
197 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700198
Jeff Haodcdc85b2015-12-04 14:06:18 -0800199 for (size_t i = 0; i < image_filenames.size(); ++i) {
200 const char* image_filename = image_filenames[i];
Vladimir Marko944da602016-02-19 12:27:55 +0000201 ImageInfo& image_info = GetImageInfo(i);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800202 std::unique_ptr<File> image_file;
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800203 if (image_fd != kInvalidFd) {
204 if (strlen(image_filename) == 0u) {
205 image_file.reset(new File(image_fd, unix_file::kCheckSafeUsage));
Mathieu Chartier784bb092016-01-28 12:02:00 -0800206 // Empty the file in case it already exists.
207 if (image_file != nullptr) {
208 TEMP_FAILURE_RETRY(image_file->SetLength(0));
209 TEMP_FAILURE_RETRY(image_file->Flush());
210 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800211 } else {
212 LOG(ERROR) << "image fd " << image_fd << " name " << image_filename;
213 }
Jeff Haodcdc85b2015-12-04 14:06:18 -0800214 } else {
215 image_file.reset(OS::CreateEmptyFile(image_filename));
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800216 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800217
Jeff Haodcdc85b2015-12-04 14:06:18 -0800218 if (image_file == nullptr) {
219 LOG(ERROR) << "Failed to open image file " << image_filename;
220 return false;
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800221 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800222
223 if (!compile_app_image_ && fchmod(image_file->Fd(), 0644) != 0) {
Jeff Haodcdc85b2015-12-04 14:06:18 -0800224 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
225 image_file->Erase();
226 return EXIT_FAILURE;
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800227 }
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800228
Jeff Haodcdc85b2015-12-04 14:06:18 -0800229 std::unique_ptr<char[]> compressed_data;
230 // Image data size excludes the bitmap and the header.
231 ImageHeader* const image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
232 const size_t image_data_size = image_header->GetImageSize() - sizeof(ImageHeader);
233 char* image_data = reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader);
234 size_t data_size;
235 const char* image_data_to_write;
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800236 const uint64_t compress_start_time = NanoTime();
Nicolas Geoffray83d4d722015-12-10 08:26:32 +0000237
Jeff Haodcdc85b2015-12-04 14:06:18 -0800238 CHECK_EQ(image_header->storage_mode_, image_storage_mode_);
239 switch (image_storage_mode_) {
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700240 case ImageHeader::kStorageModeLZ4HC: // Fall-through.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800241 case ImageHeader::kStorageModeLZ4: {
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800242 const size_t compressed_max_size = LZ4_compressBound(image_data_size);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800243 compressed_data.reset(new char[compressed_max_size]);
David Lin915ec552017-02-23 15:36:06 -0800244 data_size = LZ4_compress_default(
Jeff Haodcdc85b2015-12-04 14:06:18 -0800245 reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
246 &compressed_data[0],
David Lin915ec552017-02-23 15:36:06 -0800247 image_data_size,
248 compressed_max_size);
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800249
250 break;
251 }
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700252 /*
253 * Disabled due to image_test64 flakyness. Both use same decompression. b/27560444
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800254 case ImageHeader::kStorageModeLZ4HC: {
255 // Bound is same as non HC.
256 const size_t compressed_max_size = LZ4_compressBound(image_data_size);
257 compressed_data.reset(new char[compressed_max_size]);
258 data_size = LZ4_compressHC(
259 reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
260 &compressed_data[0],
261 image_data_size);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800262 break;
263 }
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700264 */
Jeff Haodcdc85b2015-12-04 14:06:18 -0800265 case ImageHeader::kStorageModeUncompressed: {
266 data_size = image_data_size;
267 image_data_to_write = image_data;
268 break;
269 }
270 default: {
271 LOG(FATAL) << "Unsupported";
272 UNREACHABLE();
273 }
274 }
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800275
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800276 if (compressed_data != nullptr) {
277 image_data_to_write = &compressed_data[0];
278 VLOG(compiler) << "Compressed from " << image_data_size << " to " << data_size << " in "
279 << PrettyDuration(NanoTime() - compress_start_time);
Mathieu Chartier9894fc82016-03-17 19:19:15 -0700280 if (kIsDebugBuild) {
281 std::unique_ptr<uint8_t[]> temp(new uint8_t[image_data_size]);
282 const size_t decompressed_size = LZ4_decompress_safe(
283 reinterpret_cast<char*>(&compressed_data[0]),
284 reinterpret_cast<char*>(&temp[0]),
285 data_size,
286 image_data_size);
287 CHECK_EQ(decompressed_size, image_data_size);
288 CHECK_EQ(memcmp(image_data, &temp[0], image_data_size), 0) << image_storage_mode_;
289 }
Mathieu Chartiera6e81ed2016-02-25 13:52:10 -0800290 }
291
Jeff Haodcdc85b2015-12-04 14:06:18 -0800292 // Write out the image + fields + methods.
293 const bool is_compressed = compressed_data != nullptr;
Mathieu Chartier6f6b1342016-03-09 11:14:50 -0800294 if (!image_file->PwriteFully(image_data_to_write, data_size, sizeof(ImageHeader))) {
Jeff Haodcdc85b2015-12-04 14:06:18 -0800295 PLOG(ERROR) << "Failed to write image file data " << image_filename;
296 image_file->Erase();
297 return false;
298 }
299
300 // Write out the image bitmap at the page aligned start of the image end, also uncompressed for
301 // convenience.
302 const ImageSection& bitmap_section = image_header->GetImageSection(
303 ImageHeader::kSectionImageBitmap);
304 // Align up since data size may be unaligned if the image is compressed.
305 size_t bitmap_position_in_file = RoundUp(sizeof(ImageHeader) + data_size, kPageSize);
306 if (!is_compressed) {
307 CHECK_EQ(bitmap_position_in_file, bitmap_section.Offset());
308 }
Mathieu Chartier6f6b1342016-03-09 11:14:50 -0800309 if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_bitmap_->Begin()),
310 bitmap_section.Size(),
311 bitmap_position_in_file)) {
Jeff Haodcdc85b2015-12-04 14:06:18 -0800312 PLOG(ERROR) << "Failed to write image file " << image_filename;
313 image_file->Erase();
314 return false;
315 }
Mathieu Chartier6f6b1342016-03-09 11:14:50 -0800316
317 int err = image_file->Flush();
318 if (err < 0) {
319 PLOG(ERROR) << "Failed to flush image file " << image_filename << " with result " << err;
320 image_file->Erase();
321 return false;
322 }
323
324 // Write header last in case the compiler gets killed in the middle of image writing.
325 // We do not want to have a corrupted image with a valid header.
326 // The header is uncompressed since it contains whether the image is compressed or not.
327 image_header->data_size_ = data_size;
328 if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_->Begin()),
329 sizeof(ImageHeader),
330 0)) {
331 PLOG(ERROR) << "Failed to write image file header " << image_filename;
332 image_file->Erase();
333 return false;
334 }
335
Jeff Haodcdc85b2015-12-04 14:06:18 -0800336 CHECK_EQ(bitmap_position_in_file + bitmap_section.Size(),
337 static_cast<size_t>(image_file->GetLength()));
338 if (image_file->FlushCloseOrErase() != 0) {
339 PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
340 return false;
341 }
Andreas Gampe4303ba92014-11-06 01:00:46 -0800342 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700343 return true;
344}
345
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700346void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700347 DCHECK(object != nullptr);
348 DCHECK_NE(offset, 0U);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800349
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800350 // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700351 object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700352 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700353 DCHECK(IsImageOffsetAssigned(object));
354}
355
Mathieu Chartiere401d142015-04-22 13:56:20 -0700356void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
357 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
358 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
359 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
360}
361
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800362void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700363 DCHECK(object != nullptr);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800364 DCHECK_NE(image_objects_offset_begin_, 0u);
365
Vladimir Marko944da602016-02-19 12:27:55 +0000366 size_t oat_index = GetOatIndex(object);
367 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800368 size_t bin_slot_offset = image_info.bin_slot_offsets_[bin_slot.GetBin()];
Vladimir Markocf36d492015-08-12 19:27:26 +0100369 size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800370 DCHECK_ALIGNED(new_offset, kObjectAlignment);
371
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700372 SetImageOffset(object, new_offset);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800373 DCHECK_LT(new_offset, image_info.image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700374}
375
Ian Rogersef7d42f2014-01-06 12:55:46 -0800376bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800377 // Will also return true if the bin slot was assigned since we are reusing the lock word.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700378 DCHECK(object != nullptr);
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700379 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700380}
381
Ian Rogersef7d42f2014-01-06 12:55:46 -0800382size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700383 DCHECK(object != nullptr);
384 DCHECK(IsImageOffsetAssigned(object));
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700385 LockWord lock_word = object->GetLockWord(false);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700386 size_t offset = lock_word.ForwardingAddress();
Vladimir Marko944da602016-02-19 12:27:55 +0000387 size_t oat_index = GetOatIndex(object);
388 const ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800389 DCHECK_LT(offset, image_info.image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700390 return offset;
Mathieu Chartier31e89252013-08-28 11:29:12 -0700391}
392
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800393void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
394 DCHECK(object != nullptr);
395 DCHECK(!IsImageOffsetAssigned(object));
396 DCHECK(!IsImageBinSlotAssigned(object));
397
398 // Before we stomp over the lock word, save the hash code for later.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800399 LockWord lw(object->GetLockWord(false));
400 switch (lw.GetState()) {
401 case LockWord::kFatLocked: {
402 LOG(FATAL) << "Fat locked object " << object << " found during object copy";
403 break;
404 }
405 case LockWord::kThinLocked: {
406 LOG(FATAL) << "Thin locked object " << object << " found during object copy";
407 break;
408 }
409 case LockWord::kUnlocked:
410 // No hash, don't need to save it.
411 break;
412 case LockWord::kHashCode:
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700413 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
414 saved_hashcode_map_.emplace(object, lw.GetHashCode());
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800415 break;
416 default:
417 LOG(FATAL) << "Unreachable.";
418 UNREACHABLE();
419 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700420 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700421 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800422 DCHECK(IsImageBinSlotAssigned(object));
423}
424
Vladimir Marko20f85592015-03-19 10:07:02 +0000425void ImageWriter::PrepareDexCacheArraySlots() {
Vladimir Markof60c7e22015-11-23 18:05:08 +0000426 // Prepare dex cache array starts based on the ordering specified in the CompilerDriver.
Vladimir Markof60c7e22015-11-23 18:05:08 +0000427 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
428 // when AssignImageBinSlot() assigns their indexes out or order.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800429 for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
Vladimir Marko944da602016-02-19 12:27:55 +0000430 auto it = dex_file_oat_index_map_.find(dex_file);
431 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
Jeff Haodcdc85b2015-12-04 14:06:18 -0800432 ImageInfo& image_info = GetImageInfo(it->second);
433 image_info.dex_cache_array_starts_.Put(dex_file, image_info.bin_slot_sizes_[kBinDexCacheArray]);
434 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
435 image_info.bin_slot_sizes_[kBinDexCacheArray] += layout.Size();
436 }
Vladimir Markof60c7e22015-11-23 18:05:08 +0000437
Vladimir Marko20f85592015-03-19 10:07:02 +0000438 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700439 Thread* const self = Thread::Current();
Andreas Gampecc1b5352016-12-01 16:58:38 -0800440 ReaderMutexLock mu(self, *Locks::dex_lock_);
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800441 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
Mathieu Chartierc4f39252016-10-05 18:32:08 -0700442 ObjPtr<mirror::DexCache> dex_cache =
443 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
444 if (dex_cache == nullptr || IsInBootImage(dex_cache.Ptr())) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700445 continue;
446 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000447 const DexFile* dex_file = dex_cache->GetDexFile();
Mathieu Chartier0b490842016-05-25 15:05:59 -0700448 CHECK(dex_file_oat_index_map_.find(dex_file) != dex_file_oat_index_map_.end())
449 << "Dex cache should have been pruned " << dex_file->GetLocation()
450 << "; possibly in class path";
Mathieu Chartierc7853442015-03-27 14:35:38 -0700451 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
Vladimir Marko20f85592015-03-19 10:07:02 +0000452 DCHECK(layout.Valid());
Vladimir Marko944da602016-02-19 12:27:55 +0000453 size_t oat_index = GetOatIndexForDexCache(dex_cache);
454 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800455 uint32_t start = image_info.dex_cache_array_starts_.Get(dex_file);
Vladimir Marko05792b92015-08-03 11:56:49 +0100456 DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800457 AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(),
458 start + layout.TypesOffset(),
459 dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +0100460 DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800461 AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(),
462 start + layout.MethodsOffset(),
463 dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +0100464 DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800465 AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(),
466 start + layout.FieldsOffset(),
467 dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +0100468 DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800469 AddDexCacheArrayRelocation(dex_cache->GetStrings(), start + layout.StringsOffset(), dex_cache);
Narayan Kamath7fe56582016-10-14 18:49:12 +0100470
471 if (dex_cache->GetResolvedMethodTypes() != nullptr) {
472 AddDexCacheArrayRelocation(dex_cache->GetResolvedMethodTypes(),
473 start + layout.MethodTypesOffset(),
474 dex_cache);
475 }
Orion Hodson631827d2017-04-10 14:53:47 +0100476 if (dex_cache->GetResolvedCallSites() != nullptr) {
477 AddDexCacheArrayRelocation(dex_cache->GetResolvedCallSites(),
478 start + layout.CallSitesOffset(),
479 dex_cache);
480 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000481 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000482}
483
Mathieu Chartierc4f39252016-10-05 18:32:08 -0700484void ImageWriter::AddDexCacheArrayRelocation(void* array,
485 size_t offset,
486 ObjPtr<mirror::DexCache> dex_cache) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100487 if (array != nullptr) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800488 DCHECK(!IsInBootImage(array));
Vladimir Marko944da602016-02-19 12:27:55 +0000489 size_t oat_index = GetOatIndexForDexCache(dex_cache);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800490 native_object_relocations_.emplace(array,
Vladimir Marko944da602016-02-19 12:27:55 +0000491 NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeDexCacheArray });
Vladimir Marko05792b92015-08-03 11:56:49 +0100492 }
493}
494
Mathieu Chartiere401d142015-04-22 13:56:20 -0700495void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
496 DCHECK(arr != nullptr);
497 if (kIsDebugBuild) {
498 for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800499 ArtMethod* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700500 if (method != nullptr && !method->IsRuntimeMethod()) {
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800501 mirror::Class* klass = method->GetDeclaringClass();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800502 CHECK(klass == nullptr || KeepClass(klass))
David Sehr709b0702016-10-13 09:12:37 -0700503 << Class::PrettyClass(klass) << " should be a kept class";
Mathieu Chartiere401d142015-04-22 13:56:20 -0700504 }
505 }
506 }
507 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
508 // ArtMethods.
509 pointer_arrays_.emplace(arr, kBinArtMethodClean);
510}
511
Mathieu Chartier496577f2016-09-20 15:33:31 -0700512void ImageWriter::AssignImageBinSlot(mirror::Object* object, size_t oat_index) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800513 DCHECK(object != nullptr);
Jeff Haoc7d11882015-02-03 15:08:39 -0800514 size_t object_size = object->SizeOf();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800515
516 // The magic happens here. We segregate objects into different bins based
517 // on how likely they are to get dirty at runtime.
518 //
519 // Likely-to-dirty objects get packed together into the same bin so that
520 // at runtime their page dirtiness ratio (how many dirty objects a page has) is
521 // maximized.
522 //
523 // This means more pages will stay either clean or shared dirty (with zygote) and
524 // the app will use less of its own (private) memory.
525 Bin bin = kBinRegular;
Vladimir Marko20f85592015-03-19 10:07:02 +0000526 size_t current_offset = 0u;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800527
528 if (kBinObjects) {
529 //
530 // Changing the bin of an object is purely a memory-use tuning.
531 // It has no change on runtime correctness.
532 //
533 // Memory analysis has determined that the following types of objects get dirtied
534 // the most:
535 //
Vladimir Marko20f85592015-03-19 10:07:02 +0000536 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
537 // a fixed layout which helps improve generated code (using PC-relative addressing),
538 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
539 // Since these arrays are huge, most pages do not overlap other objects and it's not
540 // really important where they are for the clean/dirty separation. Due to their
Vladimir Marko05792b92015-08-03 11:56:49 +0100541 // special PC-relative addressing, we arbitrarily keep them at the end.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800542 // * Class'es which are verified [their clinit runs only at runtime]
543 // - classes in general [because their static fields get overwritten]
544 // - initialized classes with all-final statics are unlikely to be ever dirty,
545 // so bin them separately
546 // * Art Methods that are:
547 // - native [their native entry point is not looked up until runtime]
548 // - have declaring classes that aren't initialized
549 // [their interpreter/quick entry points are trampolines until the class
550 // becomes initialized]
551 //
552 // We also assume the following objects get dirtied either never or extremely rarely:
553 // * Strings (they are immutable)
554 // * Art methods that aren't native and have initialized declared classes
555 //
556 // We assume that "regular" bin objects are highly unlikely to become dirtied,
557 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
558 //
559 if (object->IsClass()) {
560 bin = kBinClassVerified;
561 mirror::Class* klass = object->AsClass();
562
Mathieu Chartiere401d142015-04-22 13:56:20 -0700563 // Add non-embedded vtable to the pointer array table if there is one.
564 auto* vtable = klass->GetVTable();
565 if (vtable != nullptr) {
566 AddMethodPointerArray(vtable);
567 }
568 auto* iftable = klass->GetIfTable();
569 if (iftable != nullptr) {
570 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
571 if (iftable->GetMethodArrayCount(i) > 0) {
572 AddMethodPointerArray(iftable->GetMethodArray(i));
573 }
574 }
575 }
576
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800577 if (klass->GetStatus() == Class::kStatusInitialized) {
578 bin = kBinClassInitialized;
579
580 // If the class's static fields are all final, put it into a separate bin
581 // since it's very likely it will stay clean.
582 uint32_t num_static_fields = klass->NumStaticFields();
583 if (num_static_fields == 0) {
584 bin = kBinClassInitializedFinalStatics;
585 } else {
586 // Maybe all the statics are final?
587 bool all_final = true;
588 for (uint32_t i = 0; i < num_static_fields; ++i) {
589 ArtField* field = klass->GetStaticField(i);
590 if (!field->IsFinal()) {
591 all_final = false;
592 break;
593 }
594 }
595
596 if (all_final) {
597 bin = kBinClassInitializedFinalStatics;
598 }
599 }
600 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800601 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
602 bin = kBinString; // Strings are almost always immutable (except for object header).
Mathieu Chartier2ba04ea2016-04-08 19:01:05 -0700603 } else if (object->GetClass<kVerifyNone>() ==
604 Runtime::Current()->GetClassLinker()->GetClassRoot(ClassLinker::kJavaLangObject)) {
605 // Instance of java lang object, probably a lock object. This means it will be dirty when we
606 // synchronize on it.
607 bin = kBinMiscDirty;
608 } else if (object->IsDexCache()) {
609 // Dex file field becomes dirty when the image is loaded.
610 bin = kBinMiscDirty;
611 }
612 // else bin = kBinRegular
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800613 }
614
Mathieu Chartier496577f2016-09-20 15:33:31 -0700615 // Assign the oat index too.
616 DCHECK(oat_index_map_.find(object) == oat_index_map_.end());
617 oat_index_map_.emplace(object, oat_index);
618
Vladimir Marko944da602016-02-19 12:27:55 +0000619 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800620
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800621 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment
Jeff Haodcdc85b2015-12-04 14:06:18 -0800622 current_offset = image_info.bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned).
623 // Move the current bin size up to accommodate the object we just assigned a bin slot.
624 image_info.bin_slot_sizes_[bin] += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800625
626 BinSlot new_bin_slot(bin, current_offset);
627 SetImageBinSlot(object, new_bin_slot);
628
Jeff Haodcdc85b2015-12-04 14:06:18 -0800629 ++image_info.bin_slot_count_[bin];
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800630
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800631 // Grow the image closer to the end by the object we just assigned.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800632 image_info.image_end_ += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800633}
634
Mathieu Chartiere401d142015-04-22 13:56:20 -0700635bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
636 if (m->IsNative()) {
637 return true;
638 }
639 mirror::Class* declaring_class = m->GetDeclaringClass();
640 // Initialized is highly unlikely to dirty since there's no entry points to mutate.
641 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized;
642}
643
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800644bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
645 DCHECK(object != nullptr);
646
647 // We always stash the bin slot into a lockword, in the 'forwarding address' state.
648 // If it's in some other state, then we haven't yet assigned an image bin slot.
649 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
650 return false;
651 } else if (kIsDebugBuild) {
652 LockWord lock_word = object->GetLockWord(false);
653 size_t offset = lock_word.ForwardingAddress();
654 BinSlot bin_slot(offset);
Vladimir Marko944da602016-02-19 12:27:55 +0000655 size_t oat_index = GetOatIndex(object);
656 const ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800657 DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()])
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800658 << "bin slot offset should not exceed the size of that bin";
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800659 }
660 return true;
661}
662
663ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
664 DCHECK(object != nullptr);
665 DCHECK(IsImageBinSlotAssigned(object));
666
667 LockWord lock_word = object->GetLockWord(false);
668 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t
669 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
670
671 BinSlot bin_slot(static_cast<uint32_t>(offset));
Vladimir Marko944da602016-02-19 12:27:55 +0000672 size_t oat_index = GetOatIndex(object);
673 const ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800674 DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()]);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800675
676 return bin_slot;
677}
678
Brian Carlstrom7940e442013-07-12 13:46:57 -0700679bool ImageWriter::AllocMemory() {
Vladimir Marko944da602016-02-19 12:27:55 +0000680 for (ImageInfo& image_info : image_infos_) {
Mathieu Chartiera06ba052016-01-06 13:51:52 -0800681 ImageSection unused_sections[ImageHeader::kSectionCount];
682 const size_t length = RoundUp(
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700683 image_info.CreateImageSections(unused_sections), kPageSize);
Mathieu Chartiera06ba052016-01-06 13:51:52 -0800684
Jeff Haodcdc85b2015-12-04 14:06:18 -0800685 std::string error_msg;
686 image_info.image_.reset(MemMap::MapAnonymous("image writer image",
687 nullptr,
688 length,
689 PROT_READ | PROT_WRITE,
690 false,
691 false,
692 &error_msg));
693 if (UNLIKELY(image_info.image_.get() == nullptr)) {
694 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
695 return false;
696 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700697
Jeff Haodcdc85b2015-12-04 14:06:18 -0800698 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
699 CHECK_LE(image_info.image_end_, length);
700 image_info.image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
701 "image bitmap", image_info.image_->Begin(), RoundUp(image_info.image_end_, kPageSize)));
702 if (image_info.image_bitmap_.get() == nullptr) {
703 LOG(ERROR) << "Failed to allocate memory for image bitmap";
704 return false;
705 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700706 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700707 return true;
708}
709
Vladimir Markoad06b982016-11-17 16:38:59 +0000710class ImageWriter::ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700711 public:
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700712 bool operator()(ObjPtr<Class> c) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700713 StackHandleScope<1> hs(Thread::Current());
714 mirror::Class::ComputeName(hs.NewHandle(c));
715 return true;
716 }
717};
718
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719void ImageWriter::ComputeLazyFieldsForImageClasses() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700720 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700721 ComputeLazyFieldsForClassesVisitor visitor;
722 class_linker->VisitClassesWithoutClassesLock(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700723}
724
Vladimir Markof25cc732017-03-16 16:18:15 +0000725static bool IsBootClassLoaderClass(ObjPtr<mirror::Class> klass)
726 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800727 return klass->GetClassLoader() == nullptr;
728}
729
730bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) {
731 return IsBootClassLoaderClass(klass) && !IsInBootImage(klass);
732}
733
Vladimir Markof25cc732017-03-16 16:18:15 +0000734bool ImageWriter::PruneAppImageClass(ObjPtr<mirror::Class> klass) {
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800735 bool early_exit = false;
736 std::unordered_set<mirror::Class*> visited;
Mathieu Chartier901e0702016-02-19 13:42:48 -0800737 return PruneAppImageClassInternal(klass, &early_exit, &visited);
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800738}
739
Mathieu Chartier901e0702016-02-19 13:42:48 -0800740bool ImageWriter::PruneAppImageClassInternal(
Vladimir Markof25cc732017-03-16 16:18:15 +0000741 ObjPtr<mirror::Class> klass,
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800742 bool* early_exit,
743 std::unordered_set<mirror::Class*>* visited) {
744 DCHECK(early_exit != nullptr);
745 DCHECK(visited != nullptr);
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800746 DCHECK(compile_app_image_);
Vladimir Markof25cc732017-03-16 16:18:15 +0000747 if (klass == nullptr || IsInBootImage(klass.Ptr())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700748 return false;
749 }
Vladimir Markof25cc732017-03-16 16:18:15 +0000750 auto found = prune_class_memo_.find(klass.Ptr());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800751 if (found != prune_class_memo_.end()) {
752 // Already computed, return the found value.
753 return found->second;
754 }
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800755 // Circular dependencies, return false but do not store the result in the memoization table.
Vladimir Markof25cc732017-03-16 16:18:15 +0000756 if (visited->find(klass.Ptr()) != visited->end()) {
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800757 *early_exit = true;
758 return false;
759 }
Vladimir Markof25cc732017-03-16 16:18:15 +0000760 visited->emplace(klass.Ptr());
Mathieu Chartier901e0702016-02-19 13:42:48 -0800761 bool result = IsBootClassLoaderClass(klass);
762 std::string temp;
763 // Prune if not an image class, this handles any broken sets of image classes such as having a
764 // class in the set but not it's superclass.
765 result = result || !compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800766 bool my_early_exit = false; // Only for ourselves, ignore caller.
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800767 // Remove classes that failed to verify since we don't want to have java.lang.VerifyError in the
768 // app image.
Vladimir Marko72ab6842017-01-20 19:32:50 +0000769 if (klass->IsErroneous()) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800770 result = true;
771 } else {
Alex Lightd6251582016-10-31 11:12:30 -0700772 ObjPtr<mirror::ClassExt> ext(klass->GetExtData());
773 CHECK(ext.IsNull() || ext->GetVerifyError() == nullptr) << klass->PrettyClass();
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800774 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800775 if (!result) {
776 // Check interfaces since these wont be visited through VisitReferences.)
777 mirror::IfTable* if_table = klass->GetIfTable();
778 for (size_t i = 0, num_interfaces = klass->GetIfTableCount(); i < num_interfaces; ++i) {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800779 result = result || PruneAppImageClassInternal(if_table->GetInterface(i),
780 &my_early_exit,
781 visited);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800782 }
783 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800784 if (klass->IsObjectArrayClass()) {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800785 result = result || PruneAppImageClassInternal(klass->GetComponentType(),
786 &my_early_exit,
787 visited);
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800788 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800789 // Check static fields and their classes.
Vladimir Marko72ab6842017-01-20 19:32:50 +0000790 if (klass->IsResolved() && klass->NumReferenceStaticFields() != 0) {
791 size_t num_static_fields = klass->NumReferenceStaticFields();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800792 // Presumably GC can happen when we are cross compiling, it should not cause performance
793 // problems to do pointer size logic.
794 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(
795 Runtime::Current()->GetClassLinker()->GetImagePointerSize());
796 for (size_t i = 0u; i < num_static_fields; ++i) {
797 mirror::Object* ref = klass->GetFieldObject<mirror::Object>(field_offset);
798 if (ref != nullptr) {
799 if (ref->IsClass()) {
Mathieu Chartier901e0702016-02-19 13:42:48 -0800800 result = result || PruneAppImageClassInternal(ref->AsClass(),
801 &my_early_exit,
802 visited);
803 } else {
804 result = result || PruneAppImageClassInternal(ref->GetClass(),
805 &my_early_exit,
806 visited);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800807 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800808 }
809 field_offset = MemberOffset(field_offset.Uint32Value() +
810 sizeof(mirror::HeapReference<mirror::Object>));
811 }
812 }
Mathieu Chartier901e0702016-02-19 13:42:48 -0800813 result = result || PruneAppImageClassInternal(klass->GetSuperClass(),
814 &my_early_exit,
815 visited);
Mathieu Chartierc27bc402016-08-05 16:09:09 -0700816 // Remove the class if the dex file is not in the set of dex files. This happens for classes that
817 // are from uses library if there is no profile. b/30688277
818 mirror::DexCache* dex_cache = klass->GetDexCache();
819 if (dex_cache != nullptr) {
820 result = result ||
821 dex_file_oat_index_map_.find(dex_cache->GetDexFile()) == dex_file_oat_index_map_.end();
822 }
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800823 // Erase the element we stored earlier since we are exiting the function.
Vladimir Markof25cc732017-03-16 16:18:15 +0000824 auto it = visited->find(klass.Ptr());
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800825 DCHECK(it != visited->end());
826 visited->erase(it);
827 // Only store result if it is true or none of the calls early exited due to circular
828 // dependencies. If visited is empty then we are the root caller, in this case the cycle was in
829 // a child call and we can remember the result.
830 if (result == true || !my_early_exit || visited->empty()) {
Vladimir Markof25cc732017-03-16 16:18:15 +0000831 prune_class_memo_[klass.Ptr()] = result;
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800832 }
833 *early_exit |= my_early_exit;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800834 return result;
835}
836
Vladimir Markof25cc732017-03-16 16:18:15 +0000837bool ImageWriter::KeepClass(ObjPtr<mirror::Class> klass) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800838 if (klass == nullptr) {
839 return false;
840 }
Mathieu Chartier901e0702016-02-19 13:42:48 -0800841 if (compile_app_image_ && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
842 // Already in boot image, return true.
843 return true;
844 }
845 std::string temp;
846 if (!compiler_driver_.IsImageClass(klass->GetDescriptor(&temp))) {
847 return false;
848 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800849 if (compile_app_image_) {
850 // For app images, we need to prune boot loader classes that are not in the boot image since
851 // these may have already been loaded when the app image is loaded.
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800852 // Keep classes in the boot image space since we don't want to re-resolve these.
Mathieu Chartier901e0702016-02-19 13:42:48 -0800853 return !PruneAppImageClass(klass);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800854 }
Mathieu Chartier901e0702016-02-19 13:42:48 -0800855 return true;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700856}
857
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000858class ImageWriter::PruneClassesVisitor : public ClassVisitor {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700859 public:
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000860 PruneClassesVisitor(ImageWriter* image_writer, ObjPtr<mirror::ClassLoader> class_loader)
861 : image_writer_(image_writer),
862 class_loader_(class_loader),
863 classes_to_prune_(),
864 defined_class_count_(0u) { }
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700865
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000866 bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700867 if (!image_writer_->KeepClass(klass.Ptr())) {
868 classes_to_prune_.insert(klass.Ptr());
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000869 if (klass->GetClassLoader() == class_loader_) {
870 ++defined_class_count_;
871 }
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700872 }
873 return true;
874 }
875
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000876 size_t Prune() REQUIRES_SHARED(Locks::mutator_lock_) {
877 ClassTable* class_table =
878 Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader_);
879 for (mirror::Class* klass : classes_to_prune_) {
880 std::string storage;
881 const char* descriptor = klass->GetDescriptor(&storage);
882 bool result = class_table->Remove(descriptor);
883 DCHECK(result);
884 DCHECK(!class_table->Remove(descriptor)) << descriptor;
885 }
886 return defined_class_count_;
887 }
888
889 private:
Vladimir Markocb5ab352016-11-30 15:31:13 +0000890 ImageWriter* const image_writer_;
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000891 const ObjPtr<mirror::ClassLoader> class_loader_;
892 std::unordered_set<mirror::Class*> classes_to_prune_;
893 size_t defined_class_count_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700894};
895
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000896class ImageWriter::PruneClassLoaderClassesVisitor : public ClassLoaderVisitor {
897 public:
898 explicit PruneClassLoaderClassesVisitor(ImageWriter* image_writer)
899 : image_writer_(image_writer), removed_class_count_(0) {}
900
901 virtual void Visit(ObjPtr<mirror::ClassLoader> class_loader) OVERRIDE
902 REQUIRES_SHARED(Locks::mutator_lock_) {
903 PruneClassesVisitor classes_visitor(image_writer_, class_loader);
904 ClassTable* class_table =
905 Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader);
906 class_table->Visit(classes_visitor);
907 removed_class_count_ += classes_visitor.Prune();
Vladimir Markof25cc732017-03-16 16:18:15 +0000908
909 // Record app image class loader. The fake boot class loader should not get registered
910 // and we should end up with only one class loader for an app and none for boot image.
911 if (class_loader != nullptr && class_table != nullptr) {
912 DCHECK(class_loader_ == nullptr);
913 class_loader_ = class_loader;
914 }
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000915 }
916
917 size_t GetRemovedClassCount() const {
918 return removed_class_count_;
919 }
920
Vladimir Markof25cc732017-03-16 16:18:15 +0000921 ObjPtr<mirror::ClassLoader> GetClassLoader() const REQUIRES_SHARED(Locks::mutator_lock_) {
922 return class_loader_;
923 }
924
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000925 private:
926 ImageWriter* const image_writer_;
927 size_t removed_class_count_;
Vladimir Markof25cc732017-03-16 16:18:15 +0000928 ObjPtr<mirror::ClassLoader> class_loader_;
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000929};
930
931void ImageWriter::VisitClassLoaders(ClassLoaderVisitor* visitor) {
932 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
933 visitor->Visit(nullptr); // Visit boot class loader.
934 Runtime::Current()->GetClassLinker()->VisitClassLoaders(visitor);
935}
936
Vladimir Markof25cc732017-03-16 16:18:15 +0000937void ImageWriter::PruneAndPreloadDexCache(ObjPtr<mirror::DexCache> dex_cache,
938 ObjPtr<mirror::ClassLoader> class_loader) {
939 // To ensure deterministic contents of the hash-based arrays, each slot shall contain
940 // the candidate with the lowest index. As we're processing entries in increasing index
941 // order, this means trying to look up the entry for the current index if the slot is
942 // empty or if it contains a higher index.
943
944 Runtime* runtime = Runtime::Current();
945 ClassLinker* class_linker = runtime->GetClassLinker();
946 ArtMethod* resolution_method = runtime->GetResolutionMethod();
947 const DexFile& dex_file = *dex_cache->GetDexFile();
948 // Prune methods.
949 ArtMethod** resolved_methods = dex_cache->GetResolvedMethods();
950 for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) {
951 ArtMethod* method =
952 mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_);
953 DCHECK(method != nullptr) << "Expected resolution method instead of null method";
954 mirror::Class* declaring_class = method->GetDeclaringClass();
955 // Copied methods may be held live by a class which was not an image class but have a
956 // declaring class which is an image class. Set it to the resolution method to be safe and
957 // prevent dangling pointers.
958 if (method->IsCopied() || !KeepClass(declaring_class)) {
959 mirror::DexCache::SetElementPtrSize(resolved_methods,
960 i,
961 resolution_method,
962 target_ptr_size_);
963 } else if (kIsDebugBuild) {
964 // Check that the class is still in the classes table.
965 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
966 CHECK(class_linker->ClassInClassTable(declaring_class)) << "Class "
967 << Class::PrettyClass(declaring_class) << " not in class linker table";
968 }
969 }
970 // Prune fields and make the contents of the field array deterministic.
971 mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields();
972 dex::TypeIndex last_class_idx; // Initialized to invalid index.
973 ObjPtr<mirror::Class> last_class = nullptr;
974 for (size_t i = 0, end = dex_file.NumFieldIds(); i < end; ++i) {
975 uint32_t slot_idx = dex_cache->FieldSlotIndex(i);
976 auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_fields, slot_idx, target_ptr_size_);
977 uint32_t stored_index = pair.index;
978 ArtField* field = pair.object;
979 if (field != nullptr && i > stored_index) {
980 continue; // Already checked.
981 }
982 // Check if the referenced class is in the image. Note that we want to check the referenced
983 // class rather than the declaring class to preserve the semantics, i.e. using a FieldId
984 // results in resolving the referenced class and that can for example throw OOME.
985 const DexFile::FieldId& field_id = dex_file.GetFieldId(i);
986 if (field_id.class_idx_ != last_class_idx) {
987 last_class_idx = field_id.class_idx_;
988 last_class = class_linker->LookupResolvedType(
989 dex_file, last_class_idx, dex_cache, class_loader);
990 if (last_class != nullptr && !KeepClass(last_class)) {
991 last_class = nullptr;
992 }
993 }
994 if (field == nullptr || i < stored_index) {
995 if (last_class != nullptr) {
996 const char* name = dex_file.StringDataByIdx(field_id.name_idx_);
997 const char* type = dex_file.StringByTypeIdx(field_id.type_idx_);
998 field = mirror::Class::FindField(Thread::Current(), last_class, name, type);
999 if (field != nullptr) {
1000 // If the referenced class is in the image, the defining class must also be there.
1001 DCHECK(KeepClass(field->GetDeclaringClass()));
1002 dex_cache->SetResolvedField(i, field, target_ptr_size_);
1003 }
1004 }
1005 } else {
1006 DCHECK_EQ(i, stored_index);
1007 if (last_class == nullptr) {
1008 dex_cache->ClearResolvedField(stored_index, target_ptr_size_);
1009 }
1010 }
1011 }
1012 // Prune types and make the contents of the type array deterministic.
1013 // This is done after fields and methods as their lookup can touch the types array.
1014 for (size_t i = 0, end = dex_cache->GetDexFile()->NumTypeIds(); i < end; ++i) {
1015 dex::TypeIndex type_idx(i);
1016 uint32_t slot_idx = dex_cache->TypeSlotIndex(type_idx);
1017 mirror::TypeDexCachePair pair =
1018 dex_cache->GetResolvedTypes()[slot_idx].load(std::memory_order_relaxed);
1019 uint32_t stored_index = pair.index;
1020 ObjPtr<mirror::Class> klass = pair.object.Read();
1021 if (klass == nullptr || i < stored_index) {
1022 klass = class_linker->LookupResolvedType(dex_file, type_idx, dex_cache, class_loader);
1023 if (klass != nullptr) {
1024 DCHECK_EQ(dex_cache->GetResolvedType(type_idx), klass);
1025 stored_index = i; // For correct clearing below if not keeping the `klass`.
1026 }
1027 } else if (i == stored_index && !KeepClass(klass)) {
1028 dex_cache->ClearResolvedType(dex::TypeIndex(stored_index));
1029 }
1030 }
1031 // Strings do not need pruning, but the contents of the string array must be deterministic.
1032 for (size_t i = 0, end = dex_cache->GetDexFile()->NumStringIds(); i < end; ++i) {
1033 dex::StringIndex string_idx(i);
1034 uint32_t slot_idx = dex_cache->StringSlotIndex(string_idx);
1035 mirror::StringDexCachePair pair =
1036 dex_cache->GetStrings()[slot_idx].load(std::memory_order_relaxed);
1037 uint32_t stored_index = pair.index;
1038 ObjPtr<mirror::String> string = pair.object.Read();
1039 if (string == nullptr || i < stored_index) {
1040 string = class_linker->LookupString(dex_file, string_idx, dex_cache);
1041 DCHECK(string == nullptr || dex_cache->GetResolvedString(string_idx) == string);
1042 }
1043 }
1044}
1045
Brian Carlstrom7940e442013-07-12 13:46:57 -07001046void ImageWriter::PruneNonImageClasses() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001047 Runtime* runtime = Runtime::Current();
1048 ClassLinker* class_linker = runtime->GetClassLinker();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001049 Thread* self = Thread::Current();
Vladimir Markof25cc732017-03-16 16:18:15 +00001050 ScopedAssertNoThreadSuspension sa(__FUNCTION__);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001051
Mathieu Chartier696632e2016-06-03 17:47:32 -07001052 // Clear class table strong roots so that dex caches can get pruned. We require pruning the class
1053 // path dex caches.
1054 class_linker->ClearClassTableStrongRoots();
1055
Brian Carlstrom7940e442013-07-12 13:46:57 -07001056 // Remove the undesired classes from the class roots.
Vladimir Markof25cc732017-03-16 16:18:15 +00001057 ObjPtr<mirror::ClassLoader> class_loader;
Vladimir Markoc5798bf2016-12-09 10:20:54 +00001058 {
1059 PruneClassLoaderClassesVisitor class_loader_visitor(this);
1060 VisitClassLoaders(&class_loader_visitor);
1061 VLOG(compiler) << "Pruned " << class_loader_visitor.GetRemovedClassCount() << " classes";
Vladimir Markof25cc732017-03-16 16:18:15 +00001062 class_loader = class_loader_visitor.GetClassLoader();
1063 DCHECK_EQ(class_loader != nullptr, compile_app_image_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001064 }
1065
1066 // Clear references to removed classes from the DexCaches.
Vladimir Markof25cc732017-03-16 16:18:15 +00001067 std::vector<ObjPtr<mirror::DexCache>> dex_caches;
1068 {
1069 ReaderMutexLock mu2(self, *Locks::dex_lock_);
1070 dex_caches.reserve(class_linker->GetDexCachesData().size());
1071 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1072 if (self->IsJWeakCleared(data.weak_root)) {
1073 continue;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001074 }
Vladimir Markof25cc732017-03-16 16:18:15 +00001075 dex_caches.push_back(self->DecodeJObject(data.weak_root)->AsDexCache());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001076 }
Vladimir Markof25cc732017-03-16 16:18:15 +00001077 }
1078 for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) {
1079 PruneAndPreloadDexCache(dex_cache, class_loader);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001080 }
Andreas Gampe8ac75952015-06-02 21:01:45 -07001081
1082 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
1083 class_linker->DropFindArrayClassCache();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001084
1085 // Clear to save RAM.
Vladimir Marko2c8c6b62016-12-01 17:42:00 +00001086 prune_class_memo_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001087}
1088
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -08001089void ImageWriter::CheckNonImageClassesRemoved() {
Mathieu Chartier590fee92013-09-13 13:46:47 -07001090 if (compiler_driver_.GetImageClasses() != nullptr) {
1091 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -07001092 heap->VisitObjects(CheckNonImageClassesRemovedCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001093 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001094}
1095
1096void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
1097 ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001098 if (obj->IsClass() && !image_writer->IsInBootImage(obj)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07001099 Class* klass = obj->AsClass();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001100 if (!image_writer->KeepClass(klass)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07001101 image_writer->DumpImageClasses();
Ian Rogers1ff3c982014-08-12 02:30:58 -07001102 std::string temp;
Mathieu Chartier4f5e3cb2017-06-12 13:10:01 -07001103 CHECK(image_writer->KeepClass(klass))
1104 << Runtime::Current()->GetHeap()->GetVerification()->FirstPathFromRootSet(klass);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001105 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001106 }
1107}
1108
1109void ImageWriter::DumpImageClasses() {
Andreas Gampeb1fcead2015-04-20 18:53:51 -07001110 auto image_classes = compiler_driver_.GetImageClasses();
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001111 CHECK(image_classes != nullptr);
Mathieu Chartier02e25112013-08-14 16:14:24 -07001112 for (const std::string& image_class : *image_classes) {
1113 LOG(INFO) << " " << image_class;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001114 }
1115}
1116
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001117mirror::String* ImageWriter::FindInternedString(mirror::String* string) {
1118 Thread* const self = Thread::Current();
Vladimir Marko944da602016-02-19 12:27:55 +00001119 for (const ImageInfo& image_info : image_infos_) {
Mathieu Chartier9e868092016-10-31 14:58:04 -07001120 ObjPtr<mirror::String> const found = image_info.intern_table_->LookupStrong(self, string);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001121 DCHECK(image_info.intern_table_->LookupWeak(self, string) == nullptr)
1122 << string->ToModifiedUtf8();
1123 if (found != nullptr) {
Mathieu Chartier9e868092016-10-31 14:58:04 -07001124 return found.Ptr();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001125 }
1126 }
1127 if (compile_app_image_) {
1128 Runtime* const runtime = Runtime::Current();
Mathieu Chartier9e868092016-10-31 14:58:04 -07001129 ObjPtr<mirror::String> found = runtime->GetInternTable()->LookupStrong(self, string);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001130 // If we found it in the runtime intern table it could either be in the boot image or interned
1131 // during app image compilation. If it was in the boot image return that, otherwise return null
1132 // since it belongs to another image space.
Mathieu Chartier9e868092016-10-31 14:58:04 -07001133 if (found != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(found.Ptr())) {
1134 return found.Ptr();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001135 }
1136 DCHECK(runtime->GetInternTable()->LookupWeak(self, string) == nullptr)
1137 << string->ToModifiedUtf8();
1138 }
1139 return nullptr;
1140}
1141
Brian Carlstrom7940e442013-07-12 13:46:57 -07001142
Vladimir Marko944da602016-02-19 12:27:55 +00001143ObjectArray<Object>* ImageWriter::CreateImageRoots(size_t oat_index) const {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001144 Runtime* runtime = Runtime::Current();
1145 ClassLinker* class_linker = runtime->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001146 Thread* self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001147 StackHandleScope<3> hs(self);
1148 Handle<Class> object_array_class(hs.NewHandle(
1149 class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001150
Jeff Haodcdc85b2015-12-04 14:06:18 -08001151 std::unordered_set<const DexFile*> image_dex_files;
Vladimir Marko944da602016-02-19 12:27:55 +00001152 for (auto& pair : dex_file_oat_index_map_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001153 const DexFile* image_dex_file = pair.first;
Vladimir Marko944da602016-02-19 12:27:55 +00001154 size_t image_oat_index = pair.second;
1155 if (oat_index == image_oat_index) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001156 image_dex_files.insert(image_dex_file);
1157 }
1158 }
1159
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001160 // build an Object[] of all the DexCaches used in the source_space_.
1161 // Since we can't hold the dex lock when allocating the dex_caches
1162 // ObjectArray, we lock the dex lock twice, first to get the number
1163 // of dex caches first and then lock it again to copy the dex
1164 // caches. We check that the number of dex caches does not change.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001165 size_t dex_cache_count = 0;
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001166 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08001167 ReaderMutexLock mu(self, *Locks::dex_lock_);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001168 // Count number of dex caches not in the boot image.
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08001169 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001170 ObjPtr<mirror::DexCache> dex_cache =
1171 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
Brian Carlstrom0c050a12016-04-29 10:28:34 -07001172 if (dex_cache == nullptr) {
1173 continue;
1174 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001175 const DexFile* dex_file = dex_cache->GetDexFile();
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001176 if (!IsInBootImage(dex_cache.Ptr())) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001177 dex_cache_count += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1178 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001179 }
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001180 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001181 Handle<ObjectArray<Object>> dex_caches(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001182 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(), dex_cache_count)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08001183 CHECK(dex_caches != nullptr) << "Failed to allocate a dex cache array.";
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001184 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08001185 ReaderMutexLock mu(self, *Locks::dex_lock_);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001186 size_t non_image_dex_caches = 0;
1187 // Re-count number of non image dex caches.
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08001188 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001189 ObjPtr<mirror::DexCache> dex_cache =
1190 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
Brian Carlstrom0c050a12016-04-29 10:28:34 -07001191 if (dex_cache == nullptr) {
1192 continue;
1193 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001194 const DexFile* dex_file = dex_cache->GetDexFile();
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001195 if (!IsInBootImage(dex_cache.Ptr())) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001196 non_image_dex_caches += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1197 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001198 }
1199 CHECK_EQ(dex_cache_count, non_image_dex_caches)
1200 << "The number of non-image dex caches changed.";
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07001201 size_t i = 0;
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08001202 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001203 ObjPtr<mirror::DexCache> dex_cache =
1204 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
Brian Carlstrom0c050a12016-04-29 10:28:34 -07001205 if (dex_cache == nullptr) {
1206 continue;
1207 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001208 const DexFile* dex_file = dex_cache->GetDexFile();
Mathieu Chartierc4f39252016-10-05 18:32:08 -07001209 if (!IsInBootImage(dex_cache.Ptr()) &&
1210 image_dex_files.find(dex_file) != image_dex_files.end()) {
1211 dex_caches->Set<false>(i, dex_cache.Ptr());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001212 ++i;
1213 }
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -07001214 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001215 }
1216
1217 // build an Object[] of the roots needed to restore the runtime
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001218 int32_t image_roots_size = ImageHeader::NumberOfImageRoots(compile_app_image_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001219 auto image_roots(hs.NewHandle(
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001220 ObjectArray<Object>::Alloc(self, object_array_class.Get(), image_roots_size)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001221 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01001222 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001223 // image_roots[ImageHeader::kClassLoader] will be set later for app image.
1224 static_assert(ImageHeader::kClassLoader + 1u == ImageHeader::kImageRootsMax,
1225 "Class loader should be the last image root.");
1226 for (int32_t i = 0; i < ImageHeader::kImageRootsMax - 1; ++i) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001227 CHECK(image_roots->Get(i) != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001228 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001229 return image_roots.Get();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001230}
1231
Mathieu Chartier496577f2016-09-20 15:33:31 -07001232mirror::Object* ImageWriter::TryAssignBinSlot(WorkStack& work_stack,
1233 mirror::Object* obj,
1234 size_t oat_index) {
1235 if (obj == nullptr || IsInBootImage(obj)) {
1236 // Object is null or already in the image, there is no work to do.
1237 return obj;
Mathieu Chartier590fee92013-09-13 13:46:47 -07001238 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001239 if (!IsImageBinSlotAssigned(obj)) {
Mathieu Chartier496577f2016-09-20 15:33:31 -07001240 // We want to intern all strings but also assign offsets for the source string. Since the
1241 // pruning phase has already happened, if we intern a string to one in the image we still
1242 // end up copying an unreachable string.
1243 if (obj->IsString()) {
1244 // Need to check if the string is already interned in another image info so that we don't have
1245 // the intern tables of two different images contain the same string.
1246 mirror::String* interned = FindInternedString(obj->AsString());
1247 if (interned == nullptr) {
1248 // Not in another image space, insert to our table.
Mathieu Chartier9e868092016-10-31 14:58:04 -07001249 interned =
1250 GetImageInfo(oat_index).intern_table_->InternStrongImageString(obj->AsString()).Ptr();
Mathieu Chartier496577f2016-09-20 15:33:31 -07001251 DCHECK_EQ(interned, obj);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001252 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001253 } else if (obj->IsDexCache()) {
1254 oat_index = GetOatIndexForDexCache(obj->AsDexCache());
1255 } else if (obj->IsClass()) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001256 // Visit and assign offsets for fields and field arrays.
Mathieu Chartier496577f2016-09-20 15:33:31 -07001257 mirror::Class* as_klass = obj->AsClass();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001258 mirror::DexCache* dex_cache = as_klass->GetDexCache();
Vladimir Marko72ab6842017-01-20 19:32:50 +00001259 DCHECK(!as_klass->IsErroneous()) << as_klass->GetStatus();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001260 if (compile_app_image_) {
1261 // Extra sanity, no boot loader classes should be left!
Vladimir Marko2c8c6b62016-12-01 17:42:00 +00001262 CHECK(!IsBootClassLoaderClass(as_klass)) << as_klass->PrettyClass();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001263 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001264 LengthPrefixedArray<ArtField>* fields[] = {
1265 as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
1266 };
Mathieu Chartier496577f2016-09-20 15:33:31 -07001267 // Overwrite the oat index value since the class' dex cache is more accurate of where it
1268 // belongs.
1269 oat_index = GetOatIndexForDexCache(dex_cache);
Vladimir Marko944da602016-02-19 12:27:55 +00001270 ImageInfo& image_info = GetImageInfo(oat_index);
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00001271 if (!compile_app_image_) {
1272 // Note: Avoid locking to prevent lock order violations from root visiting;
1273 // image_info.class_table_ is only accessed from the image writer.
Mathieu Chartier496577f2016-09-20 15:33:31 -07001274 image_info.class_table_->InsertWithoutLocks(as_klass);
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001275 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001276 for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
1277 // Total array length including header.
1278 if (cur_fields != nullptr) {
1279 const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
1280 // Forward the entire array at once.
1281 auto it = native_object_relocations_.find(cur_fields);
1282 CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
1283 << " already forwarded";
Jeff Haodcdc85b2015-12-04 14:06:18 -08001284 size_t& offset = image_info.bin_slot_sizes_[kBinArtField];
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001285 DCHECK(!IsInBootImage(cur_fields));
Vladimir Marko944da602016-02-19 12:27:55 +00001286 native_object_relocations_.emplace(
1287 cur_fields,
1288 NativeObjectRelocation {
1289 oat_index, offset, kNativeObjectRelocationTypeArtFieldArray
1290 });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001291 offset += header_size;
1292 // Forward individual fields so that we can quickly find where they belong.
Vladimir Marko35831e82015-09-11 11:59:18 +01001293 for (size_t i = 0, count = cur_fields->size(); i < count; ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001294 // Need to forward arrays separate of fields.
1295 ArtField* field = &cur_fields->At(i);
1296 auto it2 = native_object_relocations_.find(field);
1297 CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
David Sehr709b0702016-10-13 09:12:37 -07001298 << " already assigned " << field->PrettyField() << " static=" << field->IsStatic();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001299 DCHECK(!IsInBootImage(field));
Vladimir Marko944da602016-02-19 12:27:55 +00001300 native_object_relocations_.emplace(
1301 field,
1302 NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeArtField });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001303 offset += sizeof(ArtField);
1304 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001305 }
1306 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001307 // Visit and assign offsets for methods.
Alex Lighte64300b2015-12-15 15:02:47 -08001308 size_t num_methods = as_klass->NumMethods();
1309 if (num_methods != 0) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001310 bool any_dirty = false;
Alex Lighte64300b2015-12-15 15:02:47 -08001311 for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
1312 if (WillMethodBeDirty(&m)) {
1313 any_dirty = true;
1314 break;
1315 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001316 }
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001317 NativeObjectRelocationType type = any_dirty
1318 ? kNativeObjectRelocationTypeArtMethodDirty
1319 : kNativeObjectRelocationTypeArtMethodClean;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001320 Bin bin_type = BinTypeForNativeRelocationType(type);
1321 // Forward the entire array at once, but header first.
Alex Lighte64300b2015-12-15 15:02:47 -08001322 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
1323 const size_t method_size = ArtMethod::Size(target_ptr_size_);
Vladimir Markocf36d492015-08-12 19:27:26 +01001324 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
1325 method_size,
1326 method_alignment);
Alex Lighte64300b2015-12-15 15:02:47 -08001327 LengthPrefixedArray<ArtMethod>* array = as_klass->GetMethodsPtr();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001328 auto it = native_object_relocations_.find(array);
Alex Lighte64300b2015-12-15 15:02:47 -08001329 CHECK(it == native_object_relocations_.end())
1330 << "Method array " << array << " already forwarded";
Jeff Haodcdc85b2015-12-04 14:06:18 -08001331 size_t& offset = image_info.bin_slot_sizes_[bin_type];
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001332 DCHECK(!IsInBootImage(array));
Jeff Haodcdc85b2015-12-04 14:06:18 -08001333 native_object_relocations_.emplace(array,
1334 NativeObjectRelocation {
Vladimir Marko944da602016-02-19 12:27:55 +00001335 oat_index,
Jeff Haodcdc85b2015-12-04 14:06:18 -08001336 offset,
1337 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty
1338 : kNativeObjectRelocationTypeArtMethodArrayClean });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001339 offset += header_size;
Alex Lighte64300b2015-12-15 15:02:47 -08001340 for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
Vladimir Marko944da602016-02-19 12:27:55 +00001341 AssignMethodOffset(&m, type, oat_index);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001342 }
Alex Lighte64300b2015-12-15 15:02:47 -08001343 (any_dirty ? dirty_methods_ : clean_methods_) += num_methods;
Mathieu Chartier97bad1b2016-05-16 14:58:01 -07001344 }
1345 // Assign offsets for all runtime methods in the IMT since these may hold conflict tables
1346 // live.
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001347 if (as_klass->ShouldHaveImt()) {
1348 ImTable* imt = as_klass->GetImt(target_ptr_size_);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001349 if (TryAssignImTableOffset(imt, oat_index)) {
1350 // Since imt's can be shared only do this the first time to not double count imt method
1351 // fixups.
1352 for (size_t i = 0; i < ImTable::kSize; ++i) {
1353 ArtMethod* imt_method = imt->Get(i, target_ptr_size_);
1354 DCHECK(imt_method != nullptr);
1355 if (imt_method->IsRuntimeMethod() &&
1356 !IsInBootImage(imt_method) &&
1357 !NativeRelocationAssigned(imt_method)) {
1358 AssignMethodOffset(imt_method, kNativeObjectRelocationTypeRuntimeMethod, oat_index);
1359 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001360 }
1361 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001362 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001363 } else if (obj->IsClassLoader()) {
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001364 // Register the class loader if it has a class table.
1365 // The fake boot class loader should not get registered and we should end up with only one
1366 // class loader.
Mathieu Chartier496577f2016-09-20 15:33:31 -07001367 mirror::ClassLoader* class_loader = obj->AsClassLoader();
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001368 if (class_loader->GetClassTable() != nullptr) {
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00001369 DCHECK(compile_app_image_);
1370 DCHECK(class_loaders_.empty());
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001371 class_loaders_.insert(class_loader);
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00001372 ImageInfo& image_info = GetImageInfo(oat_index);
1373 // Note: Avoid locking to prevent lock order violations from root visiting;
1374 // image_info.class_table_ table is only accessed from the image writer
1375 // and class_loader->GetClassTable() is iterated but not modified.
1376 image_info.class_table_->CopyWithoutLocks(*class_loader->GetClassTable());
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001377 }
Mathieu Chartier590fee92013-09-13 13:46:47 -07001378 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001379 AssignImageBinSlot(obj, oat_index);
1380 work_stack.emplace(obj, oat_index);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001381 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07001382 if (obj->IsString()) {
1383 // Always return the interned string if there exists one.
1384 mirror::String* interned = FindInternedString(obj->AsString());
1385 if (interned != nullptr) {
1386 return interned;
1387 }
1388 }
1389 return obj;
Mathieu Chartier590fee92013-09-13 13:46:47 -07001390}
1391
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001392bool ImageWriter::NativeRelocationAssigned(void* ptr) const {
1393 return native_object_relocations_.find(ptr) != native_object_relocations_.end();
1394}
1395
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001396bool ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001397 // No offset, or already assigned.
1398 if (imt == nullptr || IsInBootImage(imt) || NativeRelocationAssigned(imt)) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001399 return false;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001400 }
1401 // If the method is a conflict method we also want to assign the conflict table offset.
1402 ImageInfo& image_info = GetImageInfo(oat_index);
1403 const size_t size = ImTable::SizeInBytes(target_ptr_size_);
1404 native_object_relocations_.emplace(
1405 imt,
1406 NativeObjectRelocation {
1407 oat_index,
1408 image_info.bin_slot_sizes_[kBinImTable],
1409 kNativeObjectRelocationTypeIMTable});
1410 image_info.bin_slot_sizes_[kBinImTable] += size;
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001411 return true;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001412}
1413
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001414void ImageWriter::TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) {
1415 // No offset, or already assigned.
1416 if (table == nullptr || NativeRelocationAssigned(table)) {
1417 return;
1418 }
1419 CHECK(!IsInBootImage(table));
1420 // If the method is a conflict method we also want to assign the conflict table offset.
1421 ImageInfo& image_info = GetImageInfo(oat_index);
1422 const size_t size = table->ComputeSize(target_ptr_size_);
1423 native_object_relocations_.emplace(
1424 table,
1425 NativeObjectRelocation {
1426 oat_index,
1427 image_info.bin_slot_sizes_[kBinIMTConflictTable],
1428 kNativeObjectRelocationTypeIMTConflictTable});
1429 image_info.bin_slot_sizes_[kBinIMTConflictTable] += size;
1430}
1431
Jeff Haodcdc85b2015-12-04 14:06:18 -08001432void ImageWriter::AssignMethodOffset(ArtMethod* method,
1433 NativeObjectRelocationType type,
Vladimir Marko944da602016-02-19 12:27:55 +00001434 size_t oat_index) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001435 DCHECK(!IsInBootImage(method));
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001436 CHECK(!NativeRelocationAssigned(method)) << "Method " << method << " already assigned "
David Sehr709b0702016-10-13 09:12:37 -07001437 << ArtMethod::PrettyMethod(method);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001438 if (method->IsRuntimeMethod()) {
1439 TryAssignConflictTableOffset(method->GetImtConflictTable(target_ptr_size_), oat_index);
1440 }
Vladimir Marko944da602016-02-19 12:27:55 +00001441 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001442 size_t& offset = image_info.bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
Vladimir Marko944da602016-02-19 12:27:55 +00001443 native_object_relocations_.emplace(method, NativeObjectRelocation { oat_index, offset, type });
Vladimir Marko14632852015-08-17 12:07:23 +01001444 offset += ArtMethod::Size(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001445}
1446
Mathieu Chartier496577f2016-09-20 15:33:31 -07001447void ImageWriter::EnsureBinSlotAssignedCallback(mirror::Object* obj, void* arg) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07001448 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1449 DCHECK(writer != nullptr);
Mathieu Chartier496577f2016-09-20 15:33:31 -07001450 if (!Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(obj)) {
David Sehr709b0702016-10-13 09:12:37 -07001451 CHECK(writer->IsImageBinSlotAssigned(obj)) << mirror::Object::PrettyTypeOf(obj) << " " << obj;
Mathieu Chartier496577f2016-09-20 15:33:31 -07001452 }
1453}
1454
1455void ImageWriter::DeflateMonitorCallback(mirror::Object* obj, void* arg ATTRIBUTE_UNUSED) {
1456 Monitor::Deflate(Thread::Current(), obj);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001457}
1458
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001459void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) {
1460 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1461 DCHECK(writer != nullptr);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001462 if (!writer->IsInBootImage(obj)) {
1463 writer->UnbinObjectsIntoOffset(obj);
1464 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001465}
1466
1467void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001468 DCHECK(!IsInBootImage(obj));
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001469 CHECK(obj != nullptr);
1470
1471 // We know the bin slot, and the total bin sizes for all objects by now,
1472 // so calculate the object's final image offset.
1473
1474 DCHECK(IsImageBinSlotAssigned(obj));
1475 BinSlot bin_slot = GetImageBinSlot(obj);
1476 // Change the lockword from a bin slot into an offset
1477 AssignImageOffset(obj, bin_slot);
1478}
1479
Mathieu Chartier496577f2016-09-20 15:33:31 -07001480class ImageWriter::VisitReferencesVisitor {
1481 public:
1482 VisitReferencesVisitor(ImageWriter* image_writer, WorkStack* work_stack, size_t oat_index)
1483 : image_writer_(image_writer), work_stack_(work_stack), oat_index_(oat_index) {}
1484
1485 // Fix up separately since we also need to fix up method entrypoints.
1486 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
1487 REQUIRES_SHARED(Locks::mutator_lock_) {
1488 if (!root->IsNull()) {
1489 VisitRoot(root);
1490 }
1491 }
1492
1493 ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1494 REQUIRES_SHARED(Locks::mutator_lock_) {
1495 root->Assign(VisitReference(root->AsMirrorPtr()));
1496 }
1497
Mathieu Chartier31e88222016-10-14 18:43:19 -07001498 ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj,
Mathieu Chartier496577f2016-09-20 15:33:31 -07001499 MemberOffset offset,
1500 bool is_static ATTRIBUTE_UNUSED) const
1501 REQUIRES_SHARED(Locks::mutator_lock_) {
1502 mirror::Object* ref =
1503 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1504 obj->SetFieldObject</*kTransactionActive*/false>(offset, VisitReference(ref));
1505 }
1506
Mathieu Chartier31e88222016-10-14 18:43:19 -07001507 ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1508 ObjPtr<mirror::Reference> ref) const
Mathieu Chartier496577f2016-09-20 15:33:31 -07001509 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001510 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
Mathieu Chartier496577f2016-09-20 15:33:31 -07001511 }
1512
1513 private:
1514 mirror::Object* VisitReference(mirror::Object* ref) const REQUIRES_SHARED(Locks::mutator_lock_) {
1515 return image_writer_->TryAssignBinSlot(*work_stack_, ref, oat_index_);
1516 }
1517
1518 ImageWriter* const image_writer_;
1519 WorkStack* const work_stack_;
1520 const size_t oat_index_;
1521};
1522
1523class ImageWriter::GetRootsVisitor : public RootVisitor {
1524 public:
1525 explicit GetRootsVisitor(std::vector<mirror::Object*>* roots) : roots_(roots) {}
1526
1527 void VisitRoots(mirror::Object*** roots,
1528 size_t count,
1529 const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE
1530 REQUIRES_SHARED(Locks::mutator_lock_) {
1531 for (size_t i = 0; i < count; ++i) {
1532 roots_->push_back(*roots[i]);
1533 }
1534 }
1535
1536 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
1537 size_t count,
1538 const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE
1539 REQUIRES_SHARED(Locks::mutator_lock_) {
1540 for (size_t i = 0; i < count; ++i) {
1541 roots_->push_back(roots[i]->AsMirrorPtr());
1542 }
1543 }
1544
1545 private:
1546 std::vector<mirror::Object*>* const roots_;
1547};
1548
1549void ImageWriter::ProcessWorkStack(WorkStack* work_stack) {
1550 while (!work_stack->empty()) {
1551 std::pair<mirror::Object*, size_t> pair(work_stack->top());
1552 work_stack->pop();
1553 VisitReferencesVisitor visitor(this, work_stack, /*oat_index*/ pair.second);
1554 // Walk references and assign bin slots for them.
1555 pair.first->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
1556 visitor,
1557 visitor);
1558 }
1559}
1560
Vladimir Markof4da6752014-08-01 19:04:18 +01001561void ImageWriter::CalculateNewObjectOffsets() {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001562 Thread* const self = Thread::Current();
Mathieu Chartiere8a3c572016-10-11 16:52:17 -07001563 VariableSizedHandleScope handles(self);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001564 std::vector<Handle<ObjectArray<Object>>> image_roots;
Vladimir Marko944da602016-02-19 12:27:55 +00001565 for (size_t i = 0, size = oat_filenames_.size(); i != size; ++i) {
1566 image_roots.push_back(handles.NewHandle(CreateImageRoots(i)));
Jeff Haodcdc85b2015-12-04 14:06:18 -08001567 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001568
Mathieu Chartier496577f2016-09-20 15:33:31 -07001569 Runtime* const runtime = Runtime::Current();
1570 gc::Heap* const heap = runtime->GetHeap();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001571
Mathieu Chartier31e89252013-08-28 11:29:12 -07001572 // Leave space for the header, but do not write it yet, we need to
Brian Carlstrom7940e442013-07-12 13:46:57 -07001573 // know where image_roots is going to end up
Jeff Haodcdc85b2015-12-04 14:06:18 -08001574 image_objects_offset_begin_ = RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment
Brian Carlstrom7940e442013-07-12 13:46:57 -07001575
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001576 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001577 // Write the image runtime methods.
1578 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
1579 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
1580 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001581 image_methods_[ImageHeader::kSaveAllCalleeSavesMethod] =
Andreas Gampe8228cdf2017-05-30 15:03:54 -07001582 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves);
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001583 image_methods_[ImageHeader::kSaveRefsOnlyMethod] =
Andreas Gampe8228cdf2017-05-30 15:03:54 -07001584 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsOnly);
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001585 image_methods_[ImageHeader::kSaveRefsAndArgsMethod] =
Andreas Gampe8228cdf2017-05-30 15:03:54 -07001586 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs);
Vladimir Marko952dbb12016-07-28 12:01:51 +01001587 image_methods_[ImageHeader::kSaveEverythingMethod] =
Andreas Gampe8228cdf2017-05-30 15:03:54 -07001588 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverything);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001589 // Visit image methods first to have the main runtime methods in the first image.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001590 for (auto* m : image_methods_) {
1591 CHECK(m != nullptr);
1592 CHECK(m->IsRuntimeMethod());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001593 DCHECK_EQ(compile_app_image_, IsInBootImage(m)) << "Trampolines should be in boot image";
1594 if (!IsInBootImage(m)) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001595 AssignMethodOffset(m, kNativeObjectRelocationTypeRuntimeMethod, GetDefaultOatIndex());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001596 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001597 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001598
Mathieu Chartier496577f2016-09-20 15:33:31 -07001599 // Deflate monitors before we visit roots since deflating acquires the monitor lock. Acquiring
1600 // this lock while holding other locks may cause lock order violations.
1601 heap->VisitObjects(DeflateMonitorCallback, this);
1602
1603 // Work list of <object, oat_index> for objects. Everything on the stack must already be
1604 // assigned a bin slot.
1605 WorkStack work_stack;
1606
1607 // Special case interned strings to put them in the image they are likely to be resolved from.
1608 for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
1609 auto it = dex_file_oat_index_map_.find(dex_file);
1610 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
1611 const size_t oat_index = it->second;
1612 InternTable* const intern_table = runtime->GetInternTable();
1613 for (size_t i = 0, count = dex_file->NumStringIds(); i < count; ++i) {
1614 uint32_t utf16_length;
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001615 const char* utf8_data = dex_file->StringDataAndUtf16LengthByIdx(dex::StringIndex(i),
1616 &utf16_length);
Mathieu Chartier9e868092016-10-31 14:58:04 -07001617 mirror::String* string = intern_table->LookupStrong(self, utf16_length, utf8_data).Ptr();
Mathieu Chartier496577f2016-09-20 15:33:31 -07001618 TryAssignBinSlot(work_stack, string, oat_index);
1619 }
1620 }
1621
1622 // Get the GC roots and then visit them separately to avoid lock violations since the root visitor
1623 // visits roots while holding various locks.
1624 {
1625 std::vector<mirror::Object*> roots;
1626 GetRootsVisitor root_visitor(&roots);
1627 runtime->VisitRoots(&root_visitor);
1628 for (mirror::Object* obj : roots) {
1629 TryAssignBinSlot(work_stack, obj, GetDefaultOatIndex());
1630 }
1631 }
1632 ProcessWorkStack(&work_stack);
1633
1634 // For app images, there may be objects that are only held live by the by the boot image. One
1635 // example is finalizer references. Forward these objects so that EnsureBinSlotAssignedCallback
1636 // does not fail any checks. TODO: We should probably avoid copying these objects.
1637 if (compile_app_image_) {
1638 for (gc::space::ImageSpace* space : heap->GetBootImageSpaces()) {
1639 DCHECK(space->IsImageSpace());
1640 gc::accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
1641 live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
1642 reinterpret_cast<uintptr_t>(space->Limit()),
1643 [this, &work_stack](mirror::Object* obj)
1644 REQUIRES_SHARED(Locks::mutator_lock_) {
1645 VisitReferencesVisitor visitor(this, &work_stack, GetDefaultOatIndex());
1646 // Visit all references and try to assign bin slots for them (calls TryAssignBinSlot).
1647 obj->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
1648 visitor,
1649 visitor);
1650 });
1651 }
1652 // Process the work stack in case anything was added by TryAssignBinSlot.
1653 ProcessWorkStack(&work_stack);
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001654
1655 // Store the class loader in the class roots.
1656 CHECK_EQ(class_loaders_.size(), 1u);
1657 CHECK_EQ(image_roots.size(), 1u);
1658 CHECK(*class_loaders_.begin() != nullptr);
1659 image_roots[0]->Set<false>(ImageHeader::kClassLoader, *class_loaders_.begin());
Mathieu Chartier496577f2016-09-20 15:33:31 -07001660 }
1661
1662 // Verify that all objects have assigned image bin slots.
1663 heap->VisitObjects(EnsureBinSlotAssignedCallback, this);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001664
Vladimir Marko05792b92015-08-03 11:56:49 +01001665 // Calculate size of the dex cache arrays slot and prepare offsets.
1666 PrepareDexCacheArraySlots();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001667
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001668 // Calculate the sizes of the intern tables, class tables, and fixup tables.
Vladimir Marko944da602016-02-19 12:27:55 +00001669 for (ImageInfo& image_info : image_infos_) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -08001670 // Calculate how big the intern table will be after being serialized.
1671 InternTable* const intern_table = image_info.intern_table_.get();
1672 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
Vladimir Marko1a1de672016-10-13 12:53:15 +01001673 if (intern_table->StrongSize() != 0u) {
1674 image_info.intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
1675 }
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001676
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001677 // Calculate the size of the class table.
1678 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00001679 DCHECK_EQ(image_info.class_table_->NumReferencedZygoteClasses(), 0u);
1680 if (image_info.class_table_->NumReferencedNonZygoteClasses() != 0u) {
Vladimir Marko1a1de672016-10-13 12:53:15 +01001681 image_info.class_table_bytes_ += image_info.class_table_->WriteToMemory(nullptr);
1682 }
Mathieu Chartierea0831f2015-12-29 13:17:37 -08001683 }
1684
Vladimir Markocf36d492015-08-12 19:27:26 +01001685 // Calculate bin slot offsets.
Vladimir Marko944da602016-02-19 12:27:55 +00001686 for (ImageInfo& image_info : image_infos_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001687 size_t bin_offset = image_objects_offset_begin_;
1688 for (size_t i = 0; i != kBinSize; ++i) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001689 switch (i) {
1690 case kBinArtMethodClean:
1691 case kBinArtMethodDirty: {
1692 bin_offset = RoundUp(bin_offset, method_alignment);
1693 break;
1694 }
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07001695 case kBinDexCacheArray:
Vladimir Markof44d36c2017-03-14 14:18:46 +00001696 bin_offset = RoundUp(bin_offset, DexCacheArraysLayout::Alignment(target_ptr_size_));
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07001697 break;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001698 case kBinImTable:
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001699 case kBinIMTConflictTable: {
Andreas Gampe542451c2016-07-26 09:02:02 -07001700 bin_offset = RoundUp(bin_offset, static_cast<size_t>(target_ptr_size_));
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001701 break;
1702 }
1703 default: {
1704 // Normal alignment.
1705 }
1706 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001707 image_info.bin_slot_offsets_[i] = bin_offset;
1708 bin_offset += image_info.bin_slot_sizes_[i];
Vladimir Markocf36d492015-08-12 19:27:26 +01001709 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08001710 // NOTE: There may be additional padding between the bin slots and the intern table.
1711 DCHECK_EQ(image_info.image_end_,
1712 GetBinSizeSum(image_info, kBinMirrorCount) + image_objects_offset_begin_);
Vladimir Marko20f85592015-03-19 10:07:02 +00001713 }
Vladimir Markocf36d492015-08-12 19:27:26 +01001714
Jeff Haodcdc85b2015-12-04 14:06:18 -08001715 // Calculate image offsets.
1716 size_t image_offset = 0;
Vladimir Marko944da602016-02-19 12:27:55 +00001717 for (ImageInfo& image_info : image_infos_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001718 image_info.image_begin_ = global_image_begin_ + image_offset;
1719 image_info.image_offset_ = image_offset;
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001720 ImageSection unused_sections[ImageHeader::kSectionCount];
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001721 image_info.image_size_ = RoundUp(image_info.CreateImageSections(unused_sections), kPageSize);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001722 // There should be no gaps until the next image.
Jeff Haodcdc85b2015-12-04 14:06:18 -08001723 image_offset += image_info.image_size_;
1724 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001725
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -08001726 // Transform each object's bin slot into an offset which will be used to do the final copy.
1727 heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001728
Jeff Haodcdc85b2015-12-04 14:06:18 -08001729 size_t i = 0;
Vladimir Marko944da602016-02-19 12:27:55 +00001730 for (ImageInfo& image_info : image_infos_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001731 image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots[i].Get()));
1732 i++;
1733 }
Vladimir Markof4da6752014-08-01 19:04:18 +01001734
Mathieu Chartiere401d142015-04-22 13:56:20 -07001735 // Update the native relocations by adding their bin sums.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001736 for (auto& pair : native_object_relocations_) {
1737 NativeObjectRelocation& relocation = pair.second;
1738 Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
Vladimir Marko944da602016-02-19 12:27:55 +00001739 ImageInfo& image_info = GetImageInfo(relocation.oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001740 relocation.offset += image_info.bin_slot_offsets_[bin_type];
Mathieu Chartiere401d142015-04-22 13:56:20 -07001741 }
Vladimir Markof4da6752014-08-01 19:04:18 +01001742}
1743
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001744size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) const {
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001745 DCHECK(out_sections != nullptr);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001746
1747 // Do not round up any sections here that are represented by the bins since it will break
1748 // offsets.
1749
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001750 // Objects section
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001751 ImageSection* objects_section = &out_sections[ImageHeader::kSectionObjects];
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001752 *objects_section = ImageSection(0u, image_end_);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001753
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001754 // Add field section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001755 ImageSection* field_section = &out_sections[ImageHeader::kSectionArtFields];
1756 *field_section = ImageSection(bin_slot_offsets_[kBinArtField], bin_slot_sizes_[kBinArtField]);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001757 CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset());
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001758
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001759 // Add method section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001760 ImageSection* methods_section = &out_sections[ImageHeader::kSectionArtMethods];
1761 *methods_section = ImageSection(
1762 bin_slot_offsets_[kBinArtMethodClean],
1763 bin_slot_sizes_[kBinArtMethodClean] + bin_slot_sizes_[kBinArtMethodDirty]);
1764
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001765 // IMT section.
1766 ImageSection* imt_section = &out_sections[ImageHeader::kSectionImTables];
1767 *imt_section = ImageSection(bin_slot_offsets_[kBinImTable], bin_slot_sizes_[kBinImTable]);
1768
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001769 // Conflict tables section.
1770 ImageSection* imt_conflict_tables_section = &out_sections[ImageHeader::kSectionIMTConflictTables];
1771 *imt_conflict_tables_section = ImageSection(bin_slot_offsets_[kBinIMTConflictTable],
1772 bin_slot_sizes_[kBinIMTConflictTable]);
1773
1774 // Runtime methods section.
1775 ImageSection* runtime_methods_section = &out_sections[ImageHeader::kSectionRuntimeMethods];
1776 *runtime_methods_section = ImageSection(bin_slot_offsets_[kBinRuntimeMethod],
1777 bin_slot_sizes_[kBinRuntimeMethod]);
1778
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001779 // Add dex cache arrays section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001780 ImageSection* dex_cache_arrays_section = &out_sections[ImageHeader::kSectionDexCacheArrays];
1781 *dex_cache_arrays_section = ImageSection(bin_slot_offsets_[kBinDexCacheArray],
1782 bin_slot_sizes_[kBinDexCacheArray]);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001783 // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001784 size_t cur_pos = RoundUp(dex_cache_arrays_section->End(), sizeof(uint64_t));
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001785 // Calculate the size of the interned strings.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001786 ImageSection* interned_strings_section = &out_sections[ImageHeader::kSectionInternedStrings];
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001787 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1788 cur_pos = interned_strings_section->End();
1789 // Round up to the alignment the class table expects. See HashSet::WriteToMemory.
1790 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
1791 // Calculate the size of the class table section.
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001792 ImageSection* class_table_section = &out_sections[ImageHeader::kSectionClassTable];
Mathieu Chartier1f47b672016-01-07 16:29:01 -08001793 *class_table_section = ImageSection(cur_pos, class_table_bytes_);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001794 cur_pos = class_table_section->End();
1795 // Image end goes right before the start of the image bitmap.
1796 return cur_pos;
1797}
1798
Vladimir Marko944da602016-02-19 12:27:55 +00001799void ImageWriter::CreateHeader(size_t oat_index) {
1800 ImageInfo& image_info = GetImageInfo(oat_index);
1801 const uint8_t* oat_file_begin = image_info.oat_file_begin_;
1802 const uint8_t* oat_file_end = oat_file_begin + image_info.oat_loaded_size_;
1803 const uint8_t* oat_data_end = image_info.oat_data_begin_ + image_info.oat_size_;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001804
1805 // Create the image sections.
1806 ImageSection sections[ImageHeader::kSectionCount];
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001807 const size_t image_end = image_info.CreateImageSections(sections);
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001808
Mathieu Chartiere401d142015-04-22 13:56:20 -07001809 // Finally bitmap section.
Jeff Haodcdc85b2015-12-04 14:06:18 -08001810 const size_t bitmap_bytes = image_info.image_bitmap_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001811 auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
Mathieu Chartiera06ba052016-01-06 13:51:52 -08001812 *bitmap_section = ImageSection(RoundUp(image_end, kPageSize), RoundUp(bitmap_bytes, kPageSize));
Jeff Haodcdc85b2015-12-04 14:06:18 -08001813 if (VLOG_IS_ON(compiler)) {
Vladimir Marko944da602016-02-19 12:27:55 +00001814 LOG(INFO) << "Creating header for " << oat_filenames_[oat_index];
Mathieu Chartiere401d142015-04-22 13:56:20 -07001815 size_t idx = 0;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001816 for (const ImageSection& section : sections) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001817 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1818 ++idx;
1819 }
1820 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001821 LOG(INFO) << "Image roots address=" << std::hex << image_info.image_roots_address_ << std::dec;
1822 LOG(INFO) << "Image begin=" << std::hex << reinterpret_cast<uintptr_t>(global_image_begin_)
1823 << " Image offset=" << image_info.image_offset_ << std::dec;
1824 LOG(INFO) << "Oat file begin=" << std::hex << reinterpret_cast<uintptr_t>(oat_file_begin)
1825 << " Oat data begin=" << reinterpret_cast<uintptr_t>(image_info.oat_data_begin_)
1826 << " Oat data end=" << reinterpret_cast<uintptr_t>(oat_data_end)
1827 << " Oat file end=" << reinterpret_cast<uintptr_t>(oat_file_end);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001828 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001829 // Store boot image info for app image so that we can relocate.
1830 uint32_t boot_image_begin = 0;
1831 uint32_t boot_image_end = 0;
1832 uint32_t boot_oat_begin = 0;
1833 uint32_t boot_oat_end = 0;
1834 gc::Heap* const heap = Runtime::Current()->GetHeap();
1835 heap->GetBootImagesSize(&boot_image_begin, &boot_image_end, &boot_oat_begin, &boot_oat_end);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001836
Mathieu Chartierceb07b32015-12-10 09:33:21 -08001837 // Create the header, leave 0 for data size since we will fill this in as we are writing the
1838 // image.
Jeff Haodcdc85b2015-12-04 14:06:18 -08001839 new (image_info.image_->Begin()) ImageHeader(PointerToLowMemUInt32(image_info.image_begin_),
1840 image_end,
1841 sections,
1842 image_info.image_roots_address_,
Vladimir Marko944da602016-02-19 12:27:55 +00001843 image_info.oat_checksum_,
Jeff Haodcdc85b2015-12-04 14:06:18 -08001844 PointerToLowMemUInt32(oat_file_begin),
1845 PointerToLowMemUInt32(image_info.oat_data_begin_),
1846 PointerToLowMemUInt32(oat_data_end),
1847 PointerToLowMemUInt32(oat_file_end),
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001848 boot_image_begin,
1849 boot_image_end - boot_image_begin,
1850 boot_oat_begin,
1851 boot_oat_end - boot_oat_begin,
Andreas Gampe542451c2016-07-26 09:02:02 -07001852 static_cast<uint32_t>(target_ptr_size_),
Jeff Haodcdc85b2015-12-04 14:06:18 -08001853 compile_pic_,
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001854 /*is_pic*/compile_app_image_,
Jeff Haodcdc85b2015-12-04 14:06:18 -08001855 image_storage_mode_,
1856 /*data_size*/0u);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001857}
1858
1859ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001860 auto it = native_object_relocations_.find(method);
David Sehr709b0702016-10-13 09:12:37 -07001861 CHECK(it != native_object_relocations_.end()) << ArtMethod::PrettyMethod(method) << " @ "
1862 << method;
Vladimir Marko944da602016-02-19 12:27:55 +00001863 size_t oat_index = GetOatIndex(method->GetDexCache());
1864 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001865 CHECK_GE(it->second.offset, image_info.image_end_) << "ArtMethods should be after Objects";
1866 return reinterpret_cast<ArtMethod*>(image_info.image_begin_ + it->second.offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001867}
1868
Vladimir Markoad06b982016-11-17 16:38:59 +00001869class ImageWriter::FixupRootVisitor : public RootVisitor {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001870 public:
1871 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
1872 }
1873
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001874 void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED,
1875 size_t count ATTRIBUTE_UNUSED,
1876 const RootInfo& info ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001877 OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001878 LOG(FATAL) << "Unsupported";
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001879 }
1880
1881 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
1882 const RootInfo& info ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001883 OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001884 for (size_t i = 0; i < count; ++i) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001885 image_writer_->CopyReference(roots[i], roots[i]->AsMirrorPtr());
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001886 }
1887 }
1888
1889 private:
1890 ImageWriter* const image_writer_;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001891};
1892
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001893void ImageWriter::CopyAndFixupImTable(ImTable* orig, ImTable* copy) {
1894 for (size_t i = 0; i < ImTable::kSize; ++i) {
1895 ArtMethod* method = orig->Get(i, target_ptr_size_);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001896 void** address = reinterpret_cast<void**>(copy->AddressOfElement(i, target_ptr_size_));
1897 CopyAndFixupPointer(address, method);
1898 DCHECK_EQ(copy->Get(i, target_ptr_size_), NativeLocationInImage(method));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001899 }
1900}
1901
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001902void ImageWriter::CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConflictTable* copy) {
1903 const size_t count = orig->NumEntries(target_ptr_size_);
1904 for (size_t i = 0; i < count; ++i) {
1905 ArtMethod* interface_method = orig->GetInterfaceMethod(i, target_ptr_size_);
1906 ArtMethod* implementation_method = orig->GetImplementationMethod(i, target_ptr_size_);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001907 CopyAndFixupPointer(copy->AddressOfInterfaceMethod(i, target_ptr_size_), interface_method);
1908 CopyAndFixupPointer(copy->AddressOfImplementationMethod(i, target_ptr_size_),
1909 implementation_method);
1910 DCHECK_EQ(copy->GetInterfaceMethod(i, target_ptr_size_),
1911 NativeLocationInImage(interface_method));
1912 DCHECK_EQ(copy->GetImplementationMethod(i, target_ptr_size_),
1913 NativeLocationInImage(implementation_method));
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001914 }
1915}
1916
Vladimir Marko944da602016-02-19 12:27:55 +00001917void ImageWriter::CopyAndFixupNativeData(size_t oat_index) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001918 const ImageInfo& image_info = GetImageInfo(oat_index);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001919 // Copy ArtFields and methods to their locations and update the array for convenience.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001920 for (auto& pair : native_object_relocations_) {
1921 NativeObjectRelocation& relocation = pair.second;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001922 // Only work with fields and methods that are in the current oat file.
Vladimir Marko944da602016-02-19 12:27:55 +00001923 if (relocation.oat_index != oat_index) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001924 continue;
1925 }
1926 auto* dest = image_info.image_->Begin() + relocation.offset;
1927 DCHECK_GE(dest, image_info.image_->Begin() + image_info.image_end_);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001928 DCHECK(!IsInBootImage(pair.first));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001929 switch (relocation.type) {
1930 case kNativeObjectRelocationTypeArtField: {
1931 memcpy(dest, pair.first, sizeof(ArtField));
Mathieu Chartier8c19d242017-03-06 12:35:10 -08001932 CopyReference(
1933 reinterpret_cast<ArtField*>(dest)->GetDeclaringClassAddressWithoutBarrier(),
1934 reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass().Ptr());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001935 break;
1936 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001937 case kNativeObjectRelocationTypeRuntimeMethod:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001938 case kNativeObjectRelocationTypeArtMethodClean:
1939 case kNativeObjectRelocationTypeArtMethodDirty: {
1940 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
Jeff Haodcdc85b2015-12-04 14:06:18 -08001941 reinterpret_cast<ArtMethod*>(dest),
1942 image_info);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001943 break;
1944 }
1945 // For arrays, copy just the header since the elements will get copied by their corresponding
1946 // relocations.
1947 case kNativeObjectRelocationTypeArtFieldArray: {
1948 memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
1949 break;
1950 }
1951 case kNativeObjectRelocationTypeArtMethodArrayClean:
1952 case kNativeObjectRelocationTypeArtMethodArrayDirty: {
Vladimir Markod9813cb2016-03-15 12:41:27 +00001953 size_t size = ArtMethod::Size(target_ptr_size_);
1954 size_t alignment = ArtMethod::Alignment(target_ptr_size_);
1955 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0, size, alignment));
1956 // Clear padding to avoid non-deterministic data in the image (and placate valgrind).
1957 reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(dest)->ClearPadding(size, alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001958 break;
Vladimir Markod9813cb2016-03-15 12:41:27 +00001959 }
Vladimir Marko05792b92015-08-03 11:56:49 +01001960 case kNativeObjectRelocationTypeDexCacheArray:
1961 // Nothing to copy here, everything is done in FixupDexCache().
1962 break;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00001963 case kNativeObjectRelocationTypeIMTable: {
1964 ImTable* orig_imt = reinterpret_cast<ImTable*>(pair.first);
1965 ImTable* dest_imt = reinterpret_cast<ImTable*>(dest);
1966 CopyAndFixupImTable(orig_imt, dest_imt);
1967 break;
1968 }
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001969 case kNativeObjectRelocationTypeIMTConflictTable: {
1970 auto* orig_table = reinterpret_cast<ImtConflictTable*>(pair.first);
1971 CopyAndFixupImtConflictTable(
1972 orig_table,
1973 new(dest)ImtConflictTable(orig_table->NumEntries(target_ptr_size_), target_ptr_size_));
1974 break;
1975 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001976 }
1977 }
1978 // Fixup the image method roots.
Jeff Haodcdc85b2015-12-04 14:06:18 -08001979 auto* image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001980 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001981 ArtMethod* method = image_methods_[i];
1982 CHECK(method != nullptr);
1983 if (!IsInBootImage(method)) {
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001984 method = NativeLocationInImage(method);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001985 }
1986 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), method);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001987 }
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001988 FixupRootVisitor root_visitor(this);
1989
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001990 // Write the intern table into the image.
Mathieu Chartierea0831f2015-12-29 13:17:37 -08001991 if (image_info.intern_table_bytes_ > 0) {
1992 const ImageSection& intern_table_section = image_header->GetImageSection(
1993 ImageHeader::kSectionInternedStrings);
1994 InternTable* const intern_table = image_info.intern_table_.get();
1995 uint8_t* const intern_table_memory_ptr =
1996 image_info.image_->Begin() + intern_table_section.Offset();
1997 const size_t intern_table_bytes = intern_table->WriteToMemory(intern_table_memory_ptr);
1998 CHECK_EQ(intern_table_bytes, image_info.intern_table_bytes_);
1999 // Fixup the pointers in the newly written intern table to contain image addresses.
2000 InternTable temp_intern_table;
2001 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
2002 // the VisitRoots() will update the memory directly rather than the copies.
2003 // This also relies on visit roots not doing any verification which could fail after we update
2004 // the roots to be the image addresses.
2005 temp_intern_table.AddTableFromMemory(intern_table_memory_ptr);
2006 CHECK_EQ(temp_intern_table.Size(), intern_table->Size());
2007 temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots);
2008 }
Mathieu Chartier67ad20e2015-12-09 15:41:09 -08002009 // Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple
2010 // class loaders. Writing multiple class tables into the image is currently unsupported.
Mathieu Chartier1f47b672016-01-07 16:29:01 -08002011 if (image_info.class_table_bytes_ > 0u) {
Mathieu Chartier67ad20e2015-12-09 15:41:09 -08002012 const ImageSection& class_table_section = image_header->GetImageSection(
2013 ImageHeader::kSectionClassTable);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002014 uint8_t* const class_table_memory_ptr =
2015 image_info.image_->Begin() + class_table_section.Offset();
Mathieu Chartier67ad20e2015-12-09 15:41:09 -08002016 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Mathieu Chartier1f47b672016-01-07 16:29:01 -08002017
2018 ClassTable* table = image_info.class_table_.get();
2019 CHECK(table != nullptr);
2020 const size_t class_table_bytes = table->WriteToMemory(class_table_memory_ptr);
2021 CHECK_EQ(class_table_bytes, image_info.class_table_bytes_);
2022 // Fixup the pointers in the newly written class table to contain image addresses. See
2023 // above comment for intern tables.
2024 ClassTable temp_class_table;
2025 temp_class_table.ReadFromMemory(class_table_memory_ptr);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00002026 CHECK_EQ(temp_class_table.NumReferencedZygoteClasses(),
2027 table->NumReferencedNonZygoteClasses() + table->NumReferencedZygoteClasses());
Mathieu Chartier58c3f6a2016-12-01 14:21:11 -08002028 UnbufferedRootVisitor visitor(&root_visitor, RootInfo(kRootUnknown));
2029 temp_class_table.VisitRoots(visitor);
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08002030 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002031}
2032
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -08002033void ImageWriter::CopyAndFixupObjects() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07002034 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -07002035 heap->VisitObjects(CopyAndFixupObjectsCallback, this);
2036 // Fix up the object previously had hash codes.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07002037 for (const auto& hash_pair : saved_hashcode_map_) {
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08002038 Object* obj = hash_pair.first;
Andreas Gampe3b45ef22015-05-26 21:34:09 -07002039 DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
2040 obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
Mathieu Chartier590fee92013-09-13 13:46:47 -07002041 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07002042 saved_hashcode_map_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -07002043}
2044
Mathieu Chartier590fee92013-09-13 13:46:47 -07002045void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) {
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -07002046 DCHECK(obj != nullptr);
2047 DCHECK(arg != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002048 reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj);
2049}
2050
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002051void ImageWriter::FixupPointerArray(mirror::Object* dst,
2052 mirror::PointerArray* arr,
2053 mirror::Class* klass,
2054 Bin array_type) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002055 CHECK(klass->IsArrayClass());
David Sehr709b0702016-10-13 09:12:37 -07002056 CHECK(arr->IsIntArray() || arr->IsLongArray()) << klass->PrettyClass() << " " << arr;
Mathieu Chartiere401d142015-04-22 13:56:20 -07002057 // Fixup int and long pointers for the ArtMethod or ArtField arrays.
Mathieu Chartierc7853442015-03-27 14:35:38 -07002058 const size_t num_elements = arr->GetLength();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002059 dst->SetClass(GetImageAddress(arr->GetClass()));
2060 auto* dest_array = down_cast<mirror::PointerArray*>(dst);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002061 for (size_t i = 0, count = num_elements; i < count; ++i) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002062 void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002063 if (kIsDebugBuild && elem != nullptr && !IsInBootImage(elem)) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002064 auto it = native_object_relocations_.find(elem);
Vladimir Marko05792b92015-08-03 11:56:49 +01002065 if (UNLIKELY(it == native_object_relocations_.end())) {
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07002066 if (it->second.IsArtMethodRelocation()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002067 auto* method = reinterpret_cast<ArtMethod*>(elem);
David Sehr709b0702016-10-13 09:12:37 -07002068 LOG(FATAL) << "No relocation entry for ArtMethod " << method->PrettyMethod() << " @ "
2069 << method << " idx=" << i << "/" << num_elements << " with declaring class "
2070 << Class::PrettyClass(method->GetDeclaringClass());
Mathieu Chartiere401d142015-04-22 13:56:20 -07002071 } else {
2072 CHECK_EQ(array_type, kBinArtField);
2073 auto* field = reinterpret_cast<ArtField*>(elem);
David Sehr709b0702016-10-13 09:12:37 -07002074 LOG(FATAL) << "No relocation entry for ArtField " << field->PrettyField() << " @ "
Mathieu Chartiere401d142015-04-22 13:56:20 -07002075 << field << " idx=" << i << "/" << num_elements << " with declaring class "
David Sehr709b0702016-10-13 09:12:37 -07002076 << Class::PrettyClass(field->GetDeclaringClass());
Mathieu Chartiere401d142015-04-22 13:56:20 -07002077 }
Vladimir Marko05792b92015-08-03 11:56:49 +01002078 UNREACHABLE();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002079 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002080 }
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002081 CopyAndFixupPointer(dest_array->ElementAddress(i, target_ptr_size_), elem);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002082 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002083}
2084
2085void ImageWriter::CopyAndFixupObject(Object* obj) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002086 if (IsInBootImage(obj)) {
2087 return;
2088 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002089 size_t offset = GetImageOffset(obj);
Vladimir Marko944da602016-02-19 12:27:55 +00002090 size_t oat_index = GetOatIndex(obj);
2091 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002092 auto* dst = reinterpret_cast<Object*>(image_info.image_->Begin() + offset);
2093 DCHECK_LT(offset, image_info.image_end_);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07002094 const auto* src = reinterpret_cast<const uint8_t*>(obj);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002095
Jeff Haodcdc85b2015-12-04 14:06:18 -08002096 image_info.image_bitmap_->Set(dst); // Mark the obj as live.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07002097
2098 const size_t n = obj->SizeOf();
Jeff Haodcdc85b2015-12-04 14:06:18 -08002099 DCHECK_LE(offset + n, image_info.image_->Size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07002100 memcpy(dst, src, n);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002101
Mathieu Chartierad2541a2013-10-25 10:05:23 -07002102 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
2103 // word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07002104 const auto it = saved_hashcode_map_.find(obj);
2105 dst->SetLockWord(it != saved_hashcode_map_.end() ?
2106 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
Mathieu Chartier36a270a2016-07-28 18:08:51 -07002107 if (kUseBakerReadBarrier && gc::collector::ConcurrentCopying::kGrayDirtyImmuneObjects) {
2108 // Treat all of the objects in the image as marked to avoid unnecessary dirty pages. This is
2109 // safe since we mark all of the objects that may reference non immune objects as gray.
2110 CHECK(dst->AtomicSetMarkBit(0, 1));
2111 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002112 FixupObject(obj, dst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002113}
2114
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002115// Rewrite all the references in the copied object to point to their image address equivalent
Vladimir Markoad06b982016-11-17 16:38:59 +00002116class ImageWriter::FixupVisitor {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07002117 public:
2118 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
2119 }
2120
Mathieu Chartierda7c6502015-07-23 16:01:26 -07002121 // Ignore class roots since we don't have a way to map them to the destination. These are handled
2122 // with other logic.
2123 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
2124 const {}
2125 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
2126
2127
Mathieu Chartier31e88222016-10-14 18:43:19 -07002128 void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002129 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mathieu Chartier31e88222016-10-14 18:43:19 -07002130 ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002131 // Copy the reference and record the fixup if necessary.
2132 image_writer_->CopyReference(
2133 copy_->GetFieldObjectReferenceAddr<kVerifyNone>(offset),
2134 ref.Ptr());
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07002135 }
2136
2137 // java.lang.ref.Reference visitor.
Mathieu Chartier31e88222016-10-14 18:43:19 -07002138 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
2139 ObjPtr<mirror::Reference> ref) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002140 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002141 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07002142 }
2143
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002144 protected:
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07002145 ImageWriter* const image_writer_;
2146 mirror::Object* const copy_;
2147};
2148
Vladimir Markoad06b982016-11-17 16:38:59 +00002149class ImageWriter::FixupClassVisitor FINAL : public FixupVisitor {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002150 public:
2151 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
2152 }
2153
Mathieu Chartier31e88222016-10-14 18:43:19 -07002154 void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07002155 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002156 DCHECK(obj->IsClass());
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002157 FixupVisitor::operator()(obj, offset, /*is_static*/false);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002158 }
2159
Mathieu Chartier31e88222016-10-14 18:43:19 -07002160 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
2161 ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002162 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002163 LOG(FATAL) << "Reference not expected here.";
2164 }
2165};
2166
Vladimir Marko05792b92015-08-03 11:56:49 +01002167uintptr_t ImageWriter::NativeOffsetInImage(void* obj) {
2168 DCHECK(obj != nullptr);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002169 DCHECK(!IsInBootImage(obj));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002170 auto it = native_object_relocations_.find(obj);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002171 CHECK(it != native_object_relocations_.end()) << obj << " spaces "
2172 << Runtime::Current()->GetHeap()->DumpSpaces();
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07002173 const NativeObjectRelocation& relocation = it->second;
Vladimir Marko05792b92015-08-03 11:56:49 +01002174 return relocation.offset;
2175}
2176
2177template <typename T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002178std::string PrettyPrint(T* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002179 std::ostringstream oss;
2180 oss << ptr;
2181 return oss.str();
2182}
2183
2184template <>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002185std::string PrettyPrint(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
David Sehr709b0702016-10-13 09:12:37 -07002186 return ArtMethod::PrettyMethod(method);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002187}
2188
2189template <typename T>
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002190T* ImageWriter::NativeLocationInImage(T* obj) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08002191 if (obj == nullptr || IsInBootImage(obj)) {
2192 return obj;
2193 } else {
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002194 auto it = native_object_relocations_.find(obj);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002195 CHECK(it != native_object_relocations_.end()) << obj << " " << PrettyPrint(obj)
2196 << " spaces " << Runtime::Current()->GetHeap()->DumpSpaces();
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002197 const NativeObjectRelocation& relocation = it->second;
Vladimir Marko944da602016-02-19 12:27:55 +00002198 ImageInfo& image_info = GetImageInfo(relocation.oat_index);
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002199 return reinterpret_cast<T*>(image_info.image_begin_ + relocation.offset);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002200 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002201}
2202
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002203template <typename T>
Jeff Haodcdc85b2015-12-04 14:06:18 -08002204T* ImageWriter::NativeCopyLocation(T* obj, mirror::DexCache* dex_cache) {
2205 if (obj == nullptr || IsInBootImage(obj)) {
2206 return obj;
2207 } else {
Vladimir Marko944da602016-02-19 12:27:55 +00002208 size_t oat_index = GetOatIndexForDexCache(dex_cache);
2209 ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002210 return reinterpret_cast<T*>(image_info.image_->Begin() + NativeOffsetInImage(obj));
2211 }
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002212}
2213
Vladimir Markoad06b982016-11-17 16:38:59 +00002214class ImageWriter::NativeLocationVisitor {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002215 public:
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002216 explicit NativeLocationVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002217
2218 template <typename T>
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002219 T* operator()(T* ptr, void** dest_addr = nullptr) const REQUIRES_SHARED(Locks::mutator_lock_) {
2220 if (dest_addr != nullptr) {
2221 image_writer_->CopyAndFixupPointer(dest_addr, ptr);
2222 }
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002223 return image_writer_->NativeLocationInImage(ptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002224 }
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002225
2226 private:
2227 ImageWriter* const image_writer_;
2228};
2229
2230void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002231 orig->FixupNativePointers(copy, target_ptr_size_, NativeLocationVisitor(this));
Mathieu Chartierc7853442015-03-27 14:35:38 -07002232 FixupClassVisitor visitor(this, copy);
Mathieu Chartier31e88222016-10-14 18:43:19 -07002233 ObjPtr<mirror::Object>(orig)->VisitReferences(visitor, visitor);
Andreas Gampeace0dc12016-01-20 13:33:13 -08002234
2235 // Remove the clinitThreadId. This is required for image determinism.
2236 copy->SetClinitThreadId(static_cast<pid_t>(0));
Mathieu Chartierc7853442015-03-27 14:35:38 -07002237}
2238
Ian Rogersef7d42f2014-01-06 12:55:46 -08002239void ImageWriter::FixupObject(Object* orig, Object* copy) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07002240 DCHECK(orig != nullptr);
2241 DCHECK(copy != nullptr);
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07002242 if (kUseBakerReadBarrier) {
2243 orig->AssertReadBarrierState();
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -08002244 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002245 auto* klass = orig->GetClass();
2246 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
Vladimir Marko05792b92015-08-03 11:56:49 +01002247 // Is this a native pointer array?
Mathieu Chartiere401d142015-04-22 13:56:20 -07002248 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
2249 if (it != pointer_arrays_.end()) {
2250 // Should only need to fixup every pointer array exactly once.
2251 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
2252 pointer_arrays_.erase(it);
2253 return;
2254 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002255 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07002256 if (orig->IsClass()) {
2257 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002258 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002259 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
2260 // Need to go update the ArtMethod.
Neil Fuller0e844392016-09-08 13:43:31 +01002261 auto* dest = down_cast<mirror::Executable*>(copy);
2262 auto* src = down_cast<mirror::Executable*>(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002263 ArtMethod* src_method = src->GetArtMethod();
Jing Ji96e640c2016-08-31 21:21:37 -05002264 dest->SetArtMethod(GetImageMethodAddress(src_method));
Vladimir Marko05792b92015-08-03 11:56:49 +01002265 } else if (!klass->IsArrayClass()) {
2266 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2267 if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) {
2268 FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08002269 } else if (klass->IsClassLoaderClass()) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002270 mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy);
Vladimir Marko05792b92015-08-03 11:56:49 +01002271 // If src is a ClassLoader, set the class table to null so that it gets recreated by the
2272 // ClassLoader.
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002273 copy_loader->SetClassTable(nullptr);
Mathieu Chartier5550c562015-09-22 15:18:04 -07002274 // Also set allocator to null to be safe. The allocator is created when we create the class
2275 // table. We also never expect to unload things in the image since they are held live as
2276 // roots.
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002277 copy_loader->SetAllocator(nullptr);
Vladimir Marko05792b92015-08-03 11:56:49 +01002278 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002279 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002280 FixupVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07002281 orig->VisitReferences(visitor, visitor);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002282 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002283}
2284
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002285class ImageWriter::ImageAddressVisitorForDexCacheArray {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002286 public:
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002287 explicit ImageAddressVisitorForDexCacheArray(ImageWriter* image_writer)
2288 : image_writer_(image_writer) {}
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002289
2290 template <typename T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002291 T* operator()(T* ptr) const REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002292 return image_writer_->GetImageAddress(ptr);
2293 }
2294
2295 private:
2296 ImageWriter* const image_writer_;
2297};
2298
Vladimir Marko05792b92015-08-03 11:56:49 +01002299void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache,
2300 mirror::DexCache* copy_dex_cache) {
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002301 ImageAddressVisitorForDexCacheArray fixup_visitor(this);
Vladimir Marko05792b92015-08-03 11:56:49 +01002302 // Though the DexCache array fields are usually treated as native pointers, we set the full
2303 // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is
2304 // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e.
2305 // static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))).
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07002306 mirror::StringDexCacheType* orig_strings = orig_dex_cache->GetStrings();
Vladimir Marko05792b92015-08-03 11:56:49 +01002307 if (orig_strings != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002308 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::StringsOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002309 NativeLocationInImage(orig_strings),
Andreas Gampe542451c2016-07-26 09:02:02 -07002310 PointerSize::k64);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002311 orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings, orig_dex_cache), fixup_visitor);
Vladimir Marko05792b92015-08-03 11:56:49 +01002312 }
Vladimir Marko8d6768d2017-03-14 10:13:21 +00002313 mirror::TypeDexCacheType* orig_types = orig_dex_cache->GetResolvedTypes();
Vladimir Marko05792b92015-08-03 11:56:49 +01002314 if (orig_types != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002315 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedTypesOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002316 NativeLocationInImage(orig_types),
Andreas Gampe542451c2016-07-26 09:02:02 -07002317 PointerSize::k64);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002318 orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types, orig_dex_cache),
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002319 fixup_visitor);
Vladimir Marko05792b92015-08-03 11:56:49 +01002320 }
2321 ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
2322 if (orig_methods != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002323 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodsOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002324 NativeLocationInImage(orig_methods),
Andreas Gampe542451c2016-07-26 09:02:02 -07002325 PointerSize::k64);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002326 ArtMethod** copy_methods = NativeCopyLocation(orig_methods, orig_dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +01002327 for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) {
2328 ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, i, target_ptr_size_);
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002329 // NativeLocationInImage also handles runtime methods since these have relocation info.
2330 ArtMethod* copy = NativeLocationInImage(orig);
Vladimir Marko05792b92015-08-03 11:56:49 +01002331 mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_);
2332 }
2333 }
Vladimir Markof44d36c2017-03-14 14:18:46 +00002334 mirror::FieldDexCacheType* orig_fields = orig_dex_cache->GetResolvedFields();
Vladimir Marko05792b92015-08-03 11:56:49 +01002335 if (orig_fields != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08002336 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedFieldsOffset(),
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002337 NativeLocationInImage(orig_fields),
Andreas Gampe542451c2016-07-26 09:02:02 -07002338 PointerSize::k64);
Vladimir Markof44d36c2017-03-14 14:18:46 +00002339 mirror::FieldDexCacheType* copy_fields = NativeCopyLocation(orig_fields, orig_dex_cache);
Vladimir Marko05792b92015-08-03 11:56:49 +01002340 for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) {
Vladimir Markof44d36c2017-03-14 14:18:46 +00002341 mirror::FieldDexCachePair orig =
2342 mirror::DexCache::GetNativePairPtrSize(orig_fields, i, target_ptr_size_);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002343 mirror::FieldDexCachePair copy = orig;
2344 copy.object = NativeLocationInImage(orig.object);
Vladimir Markof44d36c2017-03-14 14:18:46 +00002345 mirror::DexCache::SetNativePairPtrSize(copy_fields, i, copy, target_ptr_size_);
Vladimir Marko05792b92015-08-03 11:56:49 +01002346 }
2347 }
Narayan Kamath7fe56582016-10-14 18:49:12 +01002348 mirror::MethodTypeDexCacheType* orig_method_types = orig_dex_cache->GetResolvedMethodTypes();
2349 if (orig_method_types != nullptr) {
2350 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodTypesOffset(),
2351 NativeLocationInImage(orig_method_types),
2352 PointerSize::k64);
2353 orig_dex_cache->FixupResolvedMethodTypes(NativeCopyLocation(orig_method_types, orig_dex_cache),
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002354 fixup_visitor);
Narayan Kamath7fe56582016-10-14 18:49:12 +01002355 }
Orion Hodsonc069a302017-01-18 09:23:12 +00002356 GcRoot<mirror::CallSite>* orig_call_sites = orig_dex_cache->GetResolvedCallSites();
2357 if (orig_call_sites != nullptr) {
2358 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedCallSitesOffset(),
2359 NativeLocationInImage(orig_call_sites),
2360 PointerSize::k64);
2361 orig_dex_cache->FixupResolvedCallSites(NativeCopyLocation(orig_call_sites, orig_dex_cache),
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002362 fixup_visitor);
Orion Hodsonc069a302017-01-18 09:23:12 +00002363 }
Andreas Gampeace0dc12016-01-20 13:33:13 -08002364
2365 // Remove the DexFile pointers. They will be fixed up when the runtime loads the oat file. Leaving
2366 // compiler pointers in here will make the output non-deterministic.
2367 copy_dex_cache->SetDexFile(nullptr);
Vladimir Marko05792b92015-08-03 11:56:49 +01002368}
2369
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002370const uint8_t* ImageWriter::GetOatAddress(OatAddress type) const {
2371 DCHECK_LT(type, kOatAddressCount);
2372 // If we are compiling an app image, we need to use the stubs of the boot image.
2373 if (compile_app_image_) {
2374 // Use the current image pointers.
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002375 const std::vector<gc::space::ImageSpace*>& image_spaces =
Jeff Haodcdc85b2015-12-04 14:06:18 -08002376 Runtime::Current()->GetHeap()->GetBootImageSpaces();
2377 DCHECK(!image_spaces.empty());
2378 const OatFile* oat_file = image_spaces[0]->GetOatFile();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002379 CHECK(oat_file != nullptr);
2380 const OatHeader& header = oat_file->GetOatHeader();
2381 switch (type) {
2382 // TODO: We could maybe clean this up if we stored them in an array in the oat header.
2383 case kOatAddressQuickGenericJNITrampoline:
2384 return static_cast<const uint8_t*>(header.GetQuickGenericJniTrampoline());
2385 case kOatAddressInterpreterToInterpreterBridge:
2386 return static_cast<const uint8_t*>(header.GetInterpreterToInterpreterBridge());
2387 case kOatAddressInterpreterToCompiledCodeBridge:
2388 return static_cast<const uint8_t*>(header.GetInterpreterToCompiledCodeBridge());
2389 case kOatAddressJNIDlsymLookup:
2390 return static_cast<const uint8_t*>(header.GetJniDlsymLookup());
2391 case kOatAddressQuickIMTConflictTrampoline:
2392 return static_cast<const uint8_t*>(header.GetQuickImtConflictTrampoline());
2393 case kOatAddressQuickResolutionTrampoline:
2394 return static_cast<const uint8_t*>(header.GetQuickResolutionTrampoline());
2395 case kOatAddressQuickToInterpreterBridge:
2396 return static_cast<const uint8_t*>(header.GetQuickToInterpreterBridge());
2397 default:
2398 UNREACHABLE();
2399 }
2400 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08002401 const ImageInfo& primary_image_info = GetImageInfo(0);
2402 return GetOatAddressForOffset(primary_image_info.oat_address_offsets_[type], primary_image_info);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002403}
2404
Jeff Haodcdc85b2015-12-04 14:06:18 -08002405const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method,
2406 const ImageInfo& image_info,
2407 bool* quick_is_interpreted) {
David Sehr709b0702016-10-13 09:12:37 -07002408 DCHECK(!method->IsResolutionMethod()) << method->PrettyMethod();
2409 DCHECK_NE(method, Runtime::Current()->GetImtConflictMethod()) << method->PrettyMethod();
2410 DCHECK(!method->IsImtUnimplementedMethod()) << method->PrettyMethod();
2411 DCHECK(method->IsInvokable()) << method->PrettyMethod();
2412 DCHECK(!IsInBootImage(method)) << method->PrettyMethod();
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002413
2414 // Use original code if it exists. Otherwise, set the code pointer to the resolution
2415 // trampoline.
2416
2417 // Quick entrypoint:
Igor Murashkin0ccfe2c2016-02-19 16:41:44 -08002418 const void* quick_oat_entry_point =
2419 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_);
2420 const uint8_t* quick_code;
2421
2422 if (UNLIKELY(IsInBootImage(method->GetDeclaringClass()))) {
2423 DCHECK(method->IsCopied());
2424 // If the code is not in the oat file corresponding to this image (e.g. default methods)
2425 quick_code = reinterpret_cast<const uint8_t*>(quick_oat_entry_point);
2426 } else {
2427 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(quick_oat_entry_point);
2428 quick_code = GetOatAddressForOffset(quick_oat_code_offset, image_info);
2429 }
2430
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002431 *quick_is_interpreted = false;
Mathieu Chartiere401d142015-04-22 13:56:20 -07002432 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
2433 method->GetDeclaringClass()->IsInitialized())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002434 // We have code for a non-static or initialized method, just use the code.
2435 } else if (quick_code == nullptr && method->IsNative() &&
2436 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
2437 // Non-static or initialized native method missing compiled code, use generic JNI version.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002438 quick_code = GetOatAddress(kOatAddressQuickGenericJNITrampoline);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002439 } else if (quick_code == nullptr && !method->IsNative()) {
2440 // We don't have code at all for a non-native method, use the interpreter.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002441 quick_code = GetOatAddress(kOatAddressQuickToInterpreterBridge);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002442 *quick_is_interpreted = true;
2443 } else {
2444 CHECK(!method->GetDeclaringClass()->IsInitialized());
2445 // We have code for a static method, but need to go through the resolution stub for class
2446 // initialization.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002447 quick_code = GetOatAddress(kOatAddressQuickResolutionTrampoline);
2448 }
2449 if (!IsInBootOatFile(quick_code)) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08002450 // DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002451 }
2452 return quick_code;
2453}
2454
Jeff Haodcdc85b2015-12-04 14:06:18 -08002455void ImageWriter::CopyAndFixupMethod(ArtMethod* orig,
2456 ArtMethod* copy,
2457 const ImageInfo& image_info) {
Mingyao Yange8fcd012017-01-20 10:43:30 -08002458 if (orig->IsAbstract()) {
2459 // Ignore the single-implementation info for abstract method.
2460 // Do this on orig instead of copy, otherwise there is a crash due to methods
2461 // are copied before classes.
2462 // TODO: handle fixup of single-implementation method for abstract method.
2463 orig->SetHasSingleImplementation(false);
2464 orig->SetSingleImplementation(
2465 nullptr, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
2466 }
2467
Vladimir Marko14632852015-08-17 12:07:23 +01002468 memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002469
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002470 CopyReference(copy->GetDeclaringClassAddressWithoutBarrier(), orig->GetDeclaringClassUnchecked());
2471
Vladimir Marko05792b92015-08-03 11:56:49 +01002472 ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_);
Mathieu Chartiere8bf1342016-02-17 18:02:40 -08002473 copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002474
Ian Rogers848871b2013-08-05 10:56:33 -07002475 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
2476 // oat_begin_
Brian Carlstrom7940e442013-07-12 13:46:57 -07002477
Ian Rogers848871b2013-08-05 10:56:33 -07002478 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07002479 Runtime* runtime = Runtime::Current();
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002480 if (orig->IsRuntimeMethod()) {
2481 ImtConflictTable* orig_table = orig->GetImtConflictTable(target_ptr_size_);
2482 if (orig_table != nullptr) {
2483 // Special IMT conflict method, normal IMT conflict method or unimplemented IMT method.
2484 copy->SetEntryPointFromQuickCompiledCodePtrSize(
2485 GetOatAddress(kOatAddressQuickIMTConflictTrampoline), target_ptr_size_);
2486 copy->SetImtConflictTable(NativeLocationInImage(orig_table), target_ptr_size_);
2487 } else if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
2488 copy->SetEntryPointFromQuickCompiledCodePtrSize(
2489 GetOatAddress(kOatAddressQuickResolutionTrampoline), target_ptr_size_);
2490 } else {
2491 bool found_one = false;
Andreas Gampe8228cdf2017-05-30 15:03:54 -07002492 for (size_t i = 0; i < static_cast<size_t>(CalleeSaveType::kLastCalleeSaveType); ++i) {
2493 auto idx = static_cast<CalleeSaveType>(i);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002494 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
2495 found_one = true;
2496 break;
2497 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07002498 }
David Sehr709b0702016-10-13 09:12:37 -07002499 CHECK(found_one) << "Expected to find callee save method but got " << orig->PrettyMethod();
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002500 CHECK(copy->IsRuntimeMethod());
Mathieu Chartiere401d142015-04-22 13:56:20 -07002501 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002502 } else {
Ian Rogers848871b2013-08-05 10:56:33 -07002503 // We assume all methods have code. If they don't currently then we set them to the use the
2504 // resolution trampoline. Abstract methods never have code and so we need to make sure their
2505 // use results in an AbstractMethodError. We use the interpreter to achieve this.
Alex Light9139e002015-10-09 15:59:48 -07002506 if (UNLIKELY(!orig->IsInvokable())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002507 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002508 GetOatAddress(kOatAddressQuickToInterpreterBridge), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07002509 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002510 bool quick_is_interpreted;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002511 const uint8_t* quick_code = GetQuickCode(orig, image_info, &quick_is_interpreted);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002512 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
Sebastien Hertze1d07812014-05-21 15:44:09 +02002513
Sebastien Hertze1d07812014-05-21 15:44:09 +02002514 // JNI entrypoint:
Ian Rogers848871b2013-08-05 10:56:33 -07002515 if (orig->IsNative()) {
2516 // The native method's pointer is set to a stub to lookup via dlsym.
2517 // Note this is not the code_ pointer, that is handled above.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002518 copy->SetEntryPointFromJniPtrSize(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08002519 GetOatAddress(kOatAddressJNIDlsymLookup), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07002520 }
2521 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002522 }
2523}
2524
Jeff Haodcdc85b2015-12-04 14:06:18 -08002525size_t ImageWriter::GetBinSizeSum(ImageWriter::ImageInfo& image_info, ImageWriter::Bin up_to) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002526 DCHECK_LE(up_to, kBinSize);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002527 return std::accumulate(&image_info.bin_slot_sizes_[0],
2528 &image_info.bin_slot_sizes_[up_to],
2529 /*init*/0);
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002530}
2531
2532ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
2533 // These values may need to get updated if more bins are added to the enum Bin
Mathieu Chartiere401d142015-04-22 13:56:20 -07002534 static_assert(kBinBits == 3, "wrong number of bin bits");
2535 static_assert(kBinShift == 27, "wrong number of shift");
Igor Murashkinf5b4c502014-11-14 15:01:59 -08002536 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
2537
2538 DCHECK_LT(GetBin(), kBinSize);
2539 DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
2540}
2541
2542ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
2543 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
2544 DCHECK_EQ(index, GetIndex());
2545}
2546
2547ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
2548 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
2549}
2550
2551uint32_t ImageWriter::BinSlot::GetIndex() const {
2552 return lockword_ & ~kBinMask;
2553}
2554
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002555ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
2556 switch (type) {
2557 case kNativeObjectRelocationTypeArtField:
2558 case kNativeObjectRelocationTypeArtFieldArray:
2559 return kBinArtField;
2560 case kNativeObjectRelocationTypeArtMethodClean:
2561 case kNativeObjectRelocationTypeArtMethodArrayClean:
2562 return kBinArtMethodClean;
2563 case kNativeObjectRelocationTypeArtMethodDirty:
2564 case kNativeObjectRelocationTypeArtMethodArrayDirty:
2565 return kBinArtMethodDirty;
Vladimir Marko05792b92015-08-03 11:56:49 +01002566 case kNativeObjectRelocationTypeDexCacheArray:
2567 return kBinDexCacheArray;
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002568 case kNativeObjectRelocationTypeRuntimeMethod:
2569 return kBinRuntimeMethod;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002570 case kNativeObjectRelocationTypeIMTable:
2571 return kBinImTable;
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002572 case kNativeObjectRelocationTypeIMTConflictTable:
2573 return kBinIMTConflictTable;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002574 }
2575 UNREACHABLE();
2576}
2577
Vladimir Marko944da602016-02-19 12:27:55 +00002578size_t ImageWriter::GetOatIndex(mirror::Object* obj) const {
Mathieu Chartier496577f2016-09-20 15:33:31 -07002579 if (!IsMultiImage()) {
Vladimir Marko944da602016-02-19 12:27:55 +00002580 return GetDefaultOatIndex();
Jeff Haodcdc85b2015-12-04 14:06:18 -08002581 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07002582 auto it = oat_index_map_.find(obj);
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002583 DCHECK(it != oat_index_map_.end()) << obj;
Mathieu Chartier496577f2016-09-20 15:33:31 -07002584 return it->second;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002585}
2586
Vladimir Marko944da602016-02-19 12:27:55 +00002587size_t ImageWriter::GetOatIndexForDexFile(const DexFile* dex_file) const {
Mathieu Chartier496577f2016-09-20 15:33:31 -07002588 if (!IsMultiImage()) {
Vladimir Marko944da602016-02-19 12:27:55 +00002589 return GetDefaultOatIndex();
Jeff Haodcdc85b2015-12-04 14:06:18 -08002590 }
Mathieu Chartier496577f2016-09-20 15:33:31 -07002591 auto it = dex_file_oat_index_map_.find(dex_file);
2592 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
2593 return it->second;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002594}
2595
Mathieu Chartierc4f39252016-10-05 18:32:08 -07002596size_t ImageWriter::GetOatIndexForDexCache(ObjPtr<mirror::DexCache> dex_cache) const {
2597 return (dex_cache == nullptr)
2598 ? GetDefaultOatIndex()
2599 : GetOatIndexForDexFile(dex_cache->GetDexFile());
Jeff Haodcdc85b2015-12-04 14:06:18 -08002600}
2601
Vladimir Marko944da602016-02-19 12:27:55 +00002602void ImageWriter::UpdateOatFileLayout(size_t oat_index,
2603 size_t oat_loaded_size,
2604 size_t oat_data_offset,
2605 size_t oat_data_size) {
2606 const uint8_t* images_end = image_infos_.back().image_begin_ + image_infos_.back().image_size_;
2607 for (const ImageInfo& info : image_infos_) {
2608 DCHECK_LE(info.image_begin_ + info.image_size_, images_end);
2609 }
2610 DCHECK(images_end != nullptr); // Image space must be ready.
Jeff Haodcdc85b2015-12-04 14:06:18 -08002611
Vladimir Marko944da602016-02-19 12:27:55 +00002612 ImageInfo& cur_image_info = GetImageInfo(oat_index);
2613 cur_image_info.oat_file_begin_ = images_end + cur_image_info.oat_offset_;
2614 cur_image_info.oat_loaded_size_ = oat_loaded_size;
2615 cur_image_info.oat_data_begin_ = cur_image_info.oat_file_begin_ + oat_data_offset;
2616 cur_image_info.oat_size_ = oat_data_size;
Jeff Haodcdc85b2015-12-04 14:06:18 -08002617
Mathieu Chartier14567fd2016-01-28 20:33:36 -08002618 if (compile_app_image_) {
2619 CHECK_EQ(oat_filenames_.size(), 1u) << "App image should have no next image.";
2620 return;
2621 }
Jeff Haodcdc85b2015-12-04 14:06:18 -08002622
2623 // Update the oat_offset of the next image info.
Vladimir Marko944da602016-02-19 12:27:55 +00002624 if (oat_index + 1u != oat_filenames_.size()) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08002625 // There is a following one.
Vladimir Marko944da602016-02-19 12:27:55 +00002626 ImageInfo& next_image_info = GetImageInfo(oat_index + 1u);
Jeff Haodcdc85b2015-12-04 14:06:18 -08002627 next_image_info.oat_offset_ = cur_image_info.oat_offset_ + oat_loaded_size;
2628 }
2629}
2630
Vladimir Marko944da602016-02-19 12:27:55 +00002631void ImageWriter::UpdateOatFileHeader(size_t oat_index, const OatHeader& oat_header) {
2632 ImageInfo& cur_image_info = GetImageInfo(oat_index);
2633 cur_image_info.oat_checksum_ = oat_header.GetChecksum();
2634
2635 if (oat_index == GetDefaultOatIndex()) {
2636 // Primary oat file, read the trampolines.
2637 cur_image_info.oat_address_offsets_[kOatAddressInterpreterToInterpreterBridge] =
2638 oat_header.GetInterpreterToInterpreterBridgeOffset();
2639 cur_image_info.oat_address_offsets_[kOatAddressInterpreterToCompiledCodeBridge] =
2640 oat_header.GetInterpreterToCompiledCodeBridgeOffset();
2641 cur_image_info.oat_address_offsets_[kOatAddressJNIDlsymLookup] =
2642 oat_header.GetJniDlsymLookupOffset();
2643 cur_image_info.oat_address_offsets_[kOatAddressQuickGenericJNITrampoline] =
2644 oat_header.GetQuickGenericJniTrampolineOffset();
2645 cur_image_info.oat_address_offsets_[kOatAddressQuickIMTConflictTrampoline] =
2646 oat_header.GetQuickImtConflictTrampolineOffset();
2647 cur_image_info.oat_address_offsets_[kOatAddressQuickResolutionTrampoline] =
2648 oat_header.GetQuickResolutionTrampolineOffset();
2649 cur_image_info.oat_address_offsets_[kOatAddressQuickToInterpreterBridge] =
2650 oat_header.GetQuickToInterpreterBridgeOffset();
2651 }
2652}
2653
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002654ImageWriter::ImageWriter(
2655 const CompilerDriver& compiler_driver,
2656 uintptr_t image_begin,
2657 bool compile_pic,
2658 bool compile_app_image,
2659 ImageHeader::StorageMode image_storage_mode,
Vladimir Marko944da602016-02-19 12:27:55 +00002660 const std::vector<const char*>& oat_filenames,
2661 const std::unordered_map<const DexFile*, size_t>& dex_file_oat_index_map)
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002662 : compiler_driver_(compiler_driver),
2663 global_image_begin_(reinterpret_cast<uint8_t*>(image_begin)),
2664 image_objects_offset_begin_(0),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002665 compile_pic_(compile_pic),
2666 compile_app_image_(compile_app_image),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002667 target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())),
Vladimir Marko944da602016-02-19 12:27:55 +00002668 image_infos_(oat_filenames.size()),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002669 dirty_methods_(0u),
2670 clean_methods_(0u),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002671 image_storage_mode_(image_storage_mode),
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002672 oat_filenames_(oat_filenames),
Vladimir Marko944da602016-02-19 12:27:55 +00002673 dex_file_oat_index_map_(dex_file_oat_index_map) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002674 CHECK_NE(image_begin, 0U);
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002675 std::fill_n(image_methods_, arraysize(image_methods_), nullptr);
Mathieu Chartier901e0702016-02-19 13:42:48 -08002676 CHECK_EQ(compile_app_image, !Runtime::Current()->GetHeap()->GetBootImageSpaces().empty())
2677 << "Compiling a boot image should occur iff there are no boot image spaces loaded";
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002678}
2679
Mathieu Chartier1f47b672016-01-07 16:29:01 -08002680ImageWriter::ImageInfo::ImageInfo()
2681 : intern_table_(new InternTable),
2682 class_table_(new ClassTable) {}
Mathieu Chartierea0831f2015-12-29 13:17:37 -08002683
Mathieu Chartier8c19d242017-03-06 12:35:10 -08002684void ImageWriter::CopyReference(mirror::HeapReference<mirror::Object>* dest,
2685 ObjPtr<mirror::Object> src) {
2686 dest->Assign(GetImageAddress(src.Ptr()));
2687}
2688
2689void ImageWriter::CopyReference(mirror::CompressedReference<mirror::Object>* dest,
2690 ObjPtr<mirror::Object> src) {
2691 dest->Assign(GetImageAddress(src.Ptr()));
2692}
2693
2694void ImageWriter::CopyAndFixupPointer(void** target, void* value) {
2695 void* new_value = value;
2696 if (value != nullptr && !IsInBootImage(value)) {
2697 auto it = native_object_relocations_.find(value);
2698 CHECK(it != native_object_relocations_.end()) << value;
2699 const NativeObjectRelocation& relocation = it->second;
2700 ImageInfo& image_info = GetImageInfo(relocation.oat_index);
2701 new_value = reinterpret_cast<void*>(image_info.image_begin_ + relocation.offset);
2702 }
2703 if (target_ptr_size_ == PointerSize::k32) {
2704 *reinterpret_cast<uint32_t*>(target) = PointerToLowMemUInt32(new_value);
2705 } else {
2706 *reinterpret_cast<uint64_t*>(target) = reinterpret_cast<uintptr_t>(new_value);
2707 }
2708}
2709
2710
Brian Carlstrom7940e442013-07-12 13:46:57 -07002711} // namespace art