blob: afa4904f469115b42d656686e2419028f24d5eed [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "image_writer.h"
18
19#include <sys/stat.h>
Mathieu Chartierceb07b32015-12-10 09:33:21 -080020#include <lz4.h>
Brian Carlstrom7940e442013-07-12 13:46:57 -070021
Ian Rogers700a4022014-05-19 16:49:03 -070022#include <memory>
Vladimir Marko20f85592015-03-19 10:07:02 +000023#include <numeric>
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080024#include <unordered_set>
Brian Carlstrom7940e442013-07-12 13:46:57 -070025#include <vector>
26
Mathieu Chartierc7853442015-03-27 14:35:38 -070027#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070028#include "art_method-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070029#include "base/logging.h"
30#include "base/unix_file/fd_file.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010031#include "class_linker-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070032#include "compiled_method.h"
33#include "dex_file-inl.h"
34#include "driver/compiler_driver.h"
Alex Light53cb16b2014-06-12 11:26:29 -070035#include "elf_file.h"
36#include "elf_utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070037#include "elf_writer.h"
38#include "gc/accounting/card_table-inl.h"
39#include "gc/accounting/heap_bitmap.h"
Mathieu Chartier31e89252013-08-28 11:29:12 -070040#include "gc/accounting/space_bitmap-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070041#include "gc/heap.h"
42#include "gc/space/large_object_space.h"
43#include "gc/space/space-inl.h"
44#include "globals.h"
45#include "image.h"
46#include "intern_table.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070047#include "linear_alloc.h"
Mathieu Chartierad2541a2013-10-25 10:05:23 -070048#include "lock_word.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070049#include "mirror/abstract_method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070050#include "mirror/array-inl.h"
51#include "mirror/class-inl.h"
52#include "mirror/class_loader.h"
53#include "mirror/dex_cache-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070054#include "mirror/method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070055#include "mirror/object-inl.h"
56#include "mirror/object_array-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070057#include "mirror/string-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070058#include "oat.h"
59#include "oat_file.h"
Mathieu Chartierf9c6fc62015-10-07 11:44:05 -070060#include "oat_file_manager.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070061#include "runtime.h"
62#include "scoped_thread_state_change.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070063#include "handle_scope-inl.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000064#include "utils/dex_cache_arrays_layout-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070065
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070066using ::art::mirror::Class;
67using ::art::mirror::DexCache;
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070068using ::art::mirror::Object;
69using ::art::mirror::ObjectArray;
70using ::art::mirror::String;
Brian Carlstrom7940e442013-07-12 13:46:57 -070071
72namespace art {
73
Igor Murashkinf5b4c502014-11-14 15:01:59 -080074// Separate objects into multiple bins to optimize dirty memory use.
75static constexpr bool kBinObjects = true;
76
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080077// Return true if an object is already in an image space.
78bool ImageWriter::IsInBootImage(const void* obj) const {
79 if (!compile_app_image_) {
80 DCHECK(boot_image_space_ == nullptr);
81 return false;
82 }
83 const uint8_t* image_begin = boot_image_space_->Begin();
84 // Real image end including ArtMethods and ArtField sections.
85 const uint8_t* image_end = image_begin + boot_image_space_->GetImageHeader().GetImageSize();
86 return image_begin <= obj && obj < image_end;
87}
88
89bool ImageWriter::IsInBootOatFile(const void* ptr) const {
90 if (!compile_app_image_) {
91 DCHECK(boot_image_space_ == nullptr);
92 return false;
93 }
94 const ImageHeader& image_header = boot_image_space_->GetImageHeader();
95 return image_header.GetOatFileBegin() <= ptr && ptr < image_header.GetOatFileEnd();
96}
97
Andreas Gampedd9d0552015-03-09 12:57:41 -070098static void CheckNoDexObjectsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -070099 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampedd9d0552015-03-09 12:57:41 -0700100 Class* klass = obj->GetClass();
101 CHECK_NE(PrettyClass(klass), "com.android.dex.Dex");
102}
103
104static void CheckNoDexObjects() {
105 ScopedObjectAccess soa(Thread::Current());
106 Runtime::Current()->GetHeap()->VisitObjects(CheckNoDexObjectsCallback, nullptr);
107}
108
Vladimir Markof4da6752014-08-01 19:04:18 +0100109bool ImageWriter::PrepareImageAddressSpace() {
Mathieu Chartier2d721012014-11-10 11:08:06 -0800110 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800111 gc::Heap* const heap = Runtime::Current()->GetHeap();
112 // Cache boot image space.
113 for (gc::space::ContinuousSpace* space : heap->GetContinuousSpaces()) {
114 if (space->IsImageSpace()) {
115 CHECK(compile_app_image_);
116 CHECK(boot_image_space_ == nullptr) << "Multiple image spaces";
117 boot_image_space_ = space->AsImageSpace();
118 }
119 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100120 {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700121 ScopedObjectAccess soa(Thread::Current());
Vladimir Markof4da6752014-08-01 19:04:18 +0100122 PruneNonImageClasses(); // Remove junk
123 ComputeLazyFieldsForImageClasses(); // Add useful information
Vladimir Markof4da6752014-08-01 19:04:18 +0100124 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100125 heap->CollectGarbage(false); // Remove garbage.
126
Andreas Gampedd9d0552015-03-09 12:57:41 -0700127 // Dex caches must not have their dex fields set in the image. These are memory buffers of mapped
128 // dex files.
129 //
130 // We may open them in the unstarted-runtime code for class metadata. Their fields should all be
131 // reset in PruneNonImageClasses and the objects reclaimed in the GC. Make sure that's actually
132 // true.
133 if (kIsDebugBuild) {
134 CheckNoDexObjects();
135 }
136
Vladimir Markof4da6752014-08-01 19:04:18 +0100137 if (kIsDebugBuild) {
138 ScopedObjectAccess soa(Thread::Current());
139 CheckNonImageClassesRemoved();
140 }
141
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700142 {
143 ScopedObjectAccess soa(Thread::Current());
144 CalculateNewObjectOffsets();
145 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100146
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700147 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
148 // bin size sums being calculated.
149 if (!AllocMemory()) {
150 return false;
151 }
152
Vladimir Markof4da6752014-08-01 19:04:18 +0100153 return true;
154}
155
Mathieu Chartiera90c7722015-10-29 15:41:36 -0700156bool ImageWriter::Write(int image_fd,
157 const std::string& image_filename,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700158 const std::string& oat_filename,
159 const std::string& oat_location) {
160 CHECK(!image_filename.empty());
161
Ian Rogers700a4022014-05-19 16:49:03 -0700162 std::unique_ptr<File> oat_file(OS::OpenFileReadWrite(oat_filename.c_str()));
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700163 if (oat_file.get() == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800164 PLOG(ERROR) << "Failed to open oat file " << oat_filename << " for " << oat_location;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700165 return false;
166 }
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700167 std::string error_msg;
Igor Murashkinb1d8c312015-08-04 11:18:43 -0700168 oat_file_ = OatFile::OpenReadable(oat_file.get(), oat_location, nullptr, &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700169 if (oat_file_ == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800170 PLOG(ERROR) << "Failed to open writable oat file " << oat_filename << " for " << oat_location
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700171 << ": " << error_msg;
Andreas Gampe0b7fcf92015-03-13 16:54:54 -0700172 oat_file->Erase();
Brian Carlstromc50d8e12013-07-23 22:35:16 -0700173 return false;
174 }
Mathieu Chartierf9c6fc62015-10-07 11:44:05 -0700175 Runtime::Current()->GetOatFileManager().RegisterOatFile(
176 std::unique_ptr<const OatFile>(oat_file_));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700177
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800178 const OatHeader& oat_header = oat_file_->GetOatHeader();
179 oat_address_offsets_[kOatAddressInterpreterToInterpreterBridge] =
180 oat_header.GetInterpreterToInterpreterBridgeOffset();
181 oat_address_offsets_[kOatAddressInterpreterToCompiledCodeBridge] =
182 oat_header.GetInterpreterToCompiledCodeBridgeOffset();
183 oat_address_offsets_[kOatAddressJNIDlsymLookup] =
184 oat_header.GetJniDlsymLookupOffset();
185 oat_address_offsets_[kOatAddressQuickGenericJNITrampoline] =
186 oat_header.GetQuickGenericJniTrampolineOffset();
187 oat_address_offsets_[kOatAddressQuickIMTConflictTrampoline] =
188 oat_header.GetQuickImtConflictTrampolineOffset();
189 oat_address_offsets_[kOatAddressQuickResolutionTrampoline] =
190 oat_header.GetQuickResolutionTrampolineOffset();
191 oat_address_offsets_[kOatAddressQuickToInterpreterBridge] =
192 oat_header.GetQuickToInterpreterBridgeOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700193
Brian Carlstrom7940e442013-07-12 13:46:57 -0700194 size_t oat_loaded_size = 0;
195 size_t oat_data_offset = 0;
Vladimir Marko3fc99032015-05-13 19:06:30 +0100196 ElfWriter::GetOatElfInformation(oat_file.get(), &oat_loaded_size, &oat_data_offset);
Alex Light53cb16b2014-06-12 11:26:29 -0700197
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700198 {
199 ScopedObjectAccess soa(Thread::Current());
200 CreateHeader(oat_loaded_size, oat_data_offset);
201 CopyAndFixupNativeData();
202 // TODO: heap validation can't handle these fix up passes.
203 Runtime::Current()->GetHeap()->DisableObjectValidation();
204 CopyAndFixupObjects();
205 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700206
Vladimir Markof4da6752014-08-01 19:04:18 +0100207 SetOatChecksumFromElfFile(oat_file.get());
208
Andreas Gampe4303ba92014-11-06 01:00:46 -0800209 if (oat_file->FlushCloseOrErase() != 0) {
210 LOG(ERROR) << "Failed to flush and close oat file " << oat_filename << " for " << oat_location;
211 return false;
212 }
Mathieu Chartiera90c7722015-10-29 15:41:36 -0700213 std::unique_ptr<File> image_file;
214 if (image_fd != kInvalidImageFd) {
215 image_file.reset(new File(image_fd, image_filename, unix_file::kCheckSafeUsage));
216 } else {
217 image_file.reset(OS::CreateEmptyFile(image_filename.c_str()));
218 }
219 if (image_file == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700220 LOG(ERROR) << "Failed to open image file " << image_filename;
221 return false;
222 }
223 if (fchmod(image_file->Fd(), 0644) != 0) {
224 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800225 image_file->Erase();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700226 return EXIT_FAILURE;
227 }
Mathieu Chartier31e89252013-08-28 11:29:12 -0700228
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800229 std::unique_ptr<char[]> compressed_data;
230 // Image data size excludes the bitmap and the header.
Nicolas Geoffray83d4d722015-12-10 08:26:32 +0000231 ImageHeader* const image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800232 const size_t image_data_size = image_header->GetImageSize() - sizeof(ImageHeader);
233 char* image_data = reinterpret_cast<char*>(image_->Begin()) + sizeof(ImageHeader);
234 size_t data_size;
235 const char* image_data_to_write;
236
237 CHECK_EQ(image_header->storage_mode_, image_storage_mode_);
238 switch (image_storage_mode_) {
239 case ImageHeader::kStorageModeLZ4: {
240 size_t compressed_max_size = LZ4_compressBound(image_data_size);
241 compressed_data.reset(new char[compressed_max_size]);
242 data_size = LZ4_compress(
243 reinterpret_cast<char*>(image_->Begin()) + sizeof(ImageHeader),
244 &compressed_data[0],
245 image_data_size);
246 image_data_to_write = &compressed_data[0];
247 VLOG(compiler) << "Compressed from " << image_data_size << " to " << data_size;
248 break;
249 }
250 case ImageHeader::kStorageModeUncompressed: {
251 data_size = image_data_size;
252 image_data_to_write = image_data;
253 break;
254 }
255 default: {
256 LOG(FATAL) << "Unsupported";
257 UNREACHABLE();
258 }
259 }
260
261 // Write header first, as uncompressed.
262 image_header->data_size_ = data_size;
263 if (!image_file->WriteFully(image_->Begin(), sizeof(ImageHeader))) {
264 PLOG(ERROR) << "Failed to write image file header " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800265 image_file->Erase();
Mathieu Chartier31e89252013-08-28 11:29:12 -0700266 return false;
267 }
Nicolas Geoffray83d4d722015-12-10 08:26:32 +0000268
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800269 // Write out the image + fields + methods.
270 const bool is_compressed = compressed_data != nullptr;
271 if (!image_file->WriteFully(image_data_to_write, data_size)) {
272 PLOG(ERROR) << "Failed to write image file data " << image_filename;
273 image_file->Erase();
274 return false;
275 }
276
277 // Write out the image bitmap at the page aligned start of the image end, also uncompressed for
278 // convenience.
Nicolas Geoffray83d4d722015-12-10 08:26:32 +0000279 const ImageSection& bitmap_section = image_header->GetImageSection(
280 ImageHeader::kSectionImageBitmap);
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800281 // Align up since data size may be unaligned if the image is compressed.
282 size_t bitmap_position_in_file = RoundUp(sizeof(ImageHeader) + data_size, kPageSize);
283 if (!is_compressed) {
284 CHECK_EQ(bitmap_position_in_file, bitmap_section.Offset());
285 }
Nicolas Geoffray83d4d722015-12-10 08:26:32 +0000286 if (!image_file->Write(reinterpret_cast<char*>(image_bitmap_->Begin()),
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800287 bitmap_section.Size(),
288 bitmap_position_in_file)) {
Nicolas Geoffray83d4d722015-12-10 08:26:32 +0000289 PLOG(ERROR) << "Failed to write image file " << image_filename;
290 image_file->Erase();
291 return false;
292 }
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800293 CHECK_EQ(bitmap_position_in_file + bitmap_section.Size(),
294 static_cast<size_t>(image_file->GetLength()));
Andreas Gampe4303ba92014-11-06 01:00:46 -0800295 if (image_file->FlushCloseOrErase() != 0) {
296 PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
297 return false;
298 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700299 return true;
300}
301
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700302void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700303 DCHECK(object != nullptr);
304 DCHECK_NE(offset, 0U);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800305
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800306 // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700307 object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700308 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700309 DCHECK(IsImageOffsetAssigned(object));
310}
311
Mathieu Chartiere401d142015-04-22 13:56:20 -0700312void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
313 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
314 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
315 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
316}
317
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800318void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700319 DCHECK(object != nullptr);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800320 DCHECK_NE(image_objects_offset_begin_, 0u);
321
Vladimir Markocf36d492015-08-12 19:27:26 +0100322 size_t bin_slot_offset = bin_slot_offsets_[bin_slot.GetBin()];
323 size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800324 DCHECK_ALIGNED(new_offset, kObjectAlignment);
325
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700326 SetImageOffset(object, new_offset);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800327 DCHECK_LT(new_offset, image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700328}
329
Ian Rogersef7d42f2014-01-06 12:55:46 -0800330bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800331 // Will also return true if the bin slot was assigned since we are reusing the lock word.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700332 DCHECK(object != nullptr);
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700333 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700334}
335
Ian Rogersef7d42f2014-01-06 12:55:46 -0800336size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700337 DCHECK(object != nullptr);
338 DCHECK(IsImageOffsetAssigned(object));
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700339 LockWord lock_word = object->GetLockWord(false);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700340 size_t offset = lock_word.ForwardingAddress();
341 DCHECK_LT(offset, image_end_);
342 return offset;
Mathieu Chartier31e89252013-08-28 11:29:12 -0700343}
344
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800345void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
346 DCHECK(object != nullptr);
347 DCHECK(!IsImageOffsetAssigned(object));
348 DCHECK(!IsImageBinSlotAssigned(object));
349
350 // Before we stomp over the lock word, save the hash code for later.
351 Monitor::Deflate(Thread::Current(), object);;
352 LockWord lw(object->GetLockWord(false));
353 switch (lw.GetState()) {
354 case LockWord::kFatLocked: {
355 LOG(FATAL) << "Fat locked object " << object << " found during object copy";
356 break;
357 }
358 case LockWord::kThinLocked: {
359 LOG(FATAL) << "Thin locked object " << object << " found during object copy";
360 break;
361 }
362 case LockWord::kUnlocked:
363 // No hash, don't need to save it.
364 break;
365 case LockWord::kHashCode:
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700366 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
367 saved_hashcode_map_.emplace(object, lw.GetHashCode());
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800368 break;
369 default:
370 LOG(FATAL) << "Unreachable.";
371 UNREACHABLE();
372 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700373 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700374 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800375 DCHECK(IsImageBinSlotAssigned(object));
376}
377
Vladimir Marko20f85592015-03-19 10:07:02 +0000378void ImageWriter::PrepareDexCacheArraySlots() {
Vladimir Markof60c7e22015-11-23 18:05:08 +0000379 // Prepare dex cache array starts based on the ordering specified in the CompilerDriver.
380 uint32_t size = 0u;
381 for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
382 dex_cache_array_starts_.Put(dex_file, size);
383 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
384 size += layout.Size();
385 }
386 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
387 // when AssignImageBinSlot() assigns their indexes out or order.
388 bin_slot_sizes_[kBinDexCacheArray] = size;
389
Vladimir Marko20f85592015-03-19 10:07:02 +0000390 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700391 Thread* const self = Thread::Current();
392 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800393 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700394 mirror::DexCache* dex_cache =
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800395 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800396 if (dex_cache == nullptr || IsInBootImage(dex_cache)) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700397 continue;
398 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000399 const DexFile* dex_file = dex_cache->GetDexFile();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700400 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
Vladimir Marko20f85592015-03-19 10:07:02 +0000401 DCHECK(layout.Valid());
Vladimir Markof60c7e22015-11-23 18:05:08 +0000402 uint32_t start = dex_cache_array_starts_.Get(dex_file);
Vladimir Marko05792b92015-08-03 11:56:49 +0100403 DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
Vladimir Markof60c7e22015-11-23 18:05:08 +0000404 AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(), start + layout.TypesOffset());
Vladimir Marko05792b92015-08-03 11:56:49 +0100405 DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
Vladimir Markof60c7e22015-11-23 18:05:08 +0000406 AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(), start + layout.MethodsOffset());
Vladimir Marko05792b92015-08-03 11:56:49 +0100407 DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
Vladimir Markof60c7e22015-11-23 18:05:08 +0000408 AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(), start + layout.FieldsOffset());
Vladimir Marko05792b92015-08-03 11:56:49 +0100409 DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
Vladimir Markof60c7e22015-11-23 18:05:08 +0000410 AddDexCacheArrayRelocation(dex_cache->GetStrings(), start + layout.StringsOffset());
Vladimir Marko20f85592015-03-19 10:07:02 +0000411 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000412}
413
Vladimir Marko05792b92015-08-03 11:56:49 +0100414void ImageWriter::AddDexCacheArrayRelocation(void* array, size_t offset) {
415 if (array != nullptr) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800416 DCHECK(!IsInBootImage(array));
Vladimir Marko05792b92015-08-03 11:56:49 +0100417 native_object_relocations_.emplace(
418 array,
419 NativeObjectRelocation { offset, kNativeObjectRelocationTypeDexCacheArray });
420 }
421}
422
Mathieu Chartiere401d142015-04-22 13:56:20 -0700423void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
424 DCHECK(arr != nullptr);
425 if (kIsDebugBuild) {
426 for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800427 ArtMethod* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700428 if (method != nullptr && !method->IsRuntimeMethod()) {
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800429 mirror::Class* klass = method->GetDeclaringClass();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800430 CHECK(klass == nullptr || KeepClass(klass))
431 << PrettyClass(klass) << " should be a kept class";
Mathieu Chartiere401d142015-04-22 13:56:20 -0700432 }
433 }
434 }
435 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
436 // ArtMethods.
437 pointer_arrays_.emplace(arr, kBinArtMethodClean);
438}
439
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800440void ImageWriter::AssignImageBinSlot(mirror::Object* object) {
441 DCHECK(object != nullptr);
Jeff Haoc7d11882015-02-03 15:08:39 -0800442 size_t object_size = object->SizeOf();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800443
444 // The magic happens here. We segregate objects into different bins based
445 // on how likely they are to get dirty at runtime.
446 //
447 // Likely-to-dirty objects get packed together into the same bin so that
448 // at runtime their page dirtiness ratio (how many dirty objects a page has) is
449 // maximized.
450 //
451 // This means more pages will stay either clean or shared dirty (with zygote) and
452 // the app will use less of its own (private) memory.
453 Bin bin = kBinRegular;
Vladimir Marko20f85592015-03-19 10:07:02 +0000454 size_t current_offset = 0u;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800455
456 if (kBinObjects) {
457 //
458 // Changing the bin of an object is purely a memory-use tuning.
459 // It has no change on runtime correctness.
460 //
461 // Memory analysis has determined that the following types of objects get dirtied
462 // the most:
463 //
Vladimir Marko20f85592015-03-19 10:07:02 +0000464 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
465 // a fixed layout which helps improve generated code (using PC-relative addressing),
466 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
467 // Since these arrays are huge, most pages do not overlap other objects and it's not
468 // really important where they are for the clean/dirty separation. Due to their
Vladimir Marko05792b92015-08-03 11:56:49 +0100469 // special PC-relative addressing, we arbitrarily keep them at the end.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800470 // * Class'es which are verified [their clinit runs only at runtime]
471 // - classes in general [because their static fields get overwritten]
472 // - initialized classes with all-final statics are unlikely to be ever dirty,
473 // so bin them separately
474 // * Art Methods that are:
475 // - native [their native entry point is not looked up until runtime]
476 // - have declaring classes that aren't initialized
477 // [their interpreter/quick entry points are trampolines until the class
478 // becomes initialized]
479 //
480 // We also assume the following objects get dirtied either never or extremely rarely:
481 // * Strings (they are immutable)
482 // * Art methods that aren't native and have initialized declared classes
483 //
484 // We assume that "regular" bin objects are highly unlikely to become dirtied,
485 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
486 //
487 if (object->IsClass()) {
488 bin = kBinClassVerified;
489 mirror::Class* klass = object->AsClass();
490
Mathieu Chartiere401d142015-04-22 13:56:20 -0700491 // Add non-embedded vtable to the pointer array table if there is one.
492 auto* vtable = klass->GetVTable();
493 if (vtable != nullptr) {
494 AddMethodPointerArray(vtable);
495 }
496 auto* iftable = klass->GetIfTable();
497 if (iftable != nullptr) {
498 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
499 if (iftable->GetMethodArrayCount(i) > 0) {
500 AddMethodPointerArray(iftable->GetMethodArray(i));
501 }
502 }
503 }
504
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800505 if (klass->GetStatus() == Class::kStatusInitialized) {
506 bin = kBinClassInitialized;
507
508 // If the class's static fields are all final, put it into a separate bin
509 // since it's very likely it will stay clean.
510 uint32_t num_static_fields = klass->NumStaticFields();
511 if (num_static_fields == 0) {
512 bin = kBinClassInitializedFinalStatics;
513 } else {
514 // Maybe all the statics are final?
515 bool all_final = true;
516 for (uint32_t i = 0; i < num_static_fields; ++i) {
517 ArtField* field = klass->GetStaticField(i);
518 if (!field->IsFinal()) {
519 all_final = false;
520 break;
521 }
522 }
523
524 if (all_final) {
525 bin = kBinClassInitializedFinalStatics;
526 }
527 }
528 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800529 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
530 bin = kBinString; // Strings are almost always immutable (except for object header).
531 } // else bin = kBinRegular
532 }
533
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800534 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment
Vladimir Marko05792b92015-08-03 11:56:49 +0100535 current_offset = bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned).
536 // Move the current bin size up to accomodate the object we just assigned a bin slot.
537 bin_slot_sizes_[bin] += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800538
539 BinSlot new_bin_slot(bin, current_offset);
540 SetImageBinSlot(object, new_bin_slot);
541
542 ++bin_slot_count_[bin];
543
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800544 // Grow the image closer to the end by the object we just assigned.
545 image_end_ += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800546}
547
Mathieu Chartiere401d142015-04-22 13:56:20 -0700548bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
549 if (m->IsNative()) {
550 return true;
551 }
552 mirror::Class* declaring_class = m->GetDeclaringClass();
553 // Initialized is highly unlikely to dirty since there's no entry points to mutate.
554 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized;
555}
556
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800557bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
558 DCHECK(object != nullptr);
559
560 // We always stash the bin slot into a lockword, in the 'forwarding address' state.
561 // If it's in some other state, then we haven't yet assigned an image bin slot.
562 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
563 return false;
564 } else if (kIsDebugBuild) {
565 LockWord lock_word = object->GetLockWord(false);
566 size_t offset = lock_word.ForwardingAddress();
567 BinSlot bin_slot(offset);
568 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()])
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800569 << "bin slot offset should not exceed the size of that bin";
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800570 }
571 return true;
572}
573
574ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
575 DCHECK(object != nullptr);
576 DCHECK(IsImageBinSlotAssigned(object));
577
578 LockWord lock_word = object->GetLockWord(false);
579 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t
580 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
581
582 BinSlot bin_slot(static_cast<uint32_t>(offset));
583 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()]);
584
585 return bin_slot;
586}
587
Brian Carlstrom7940e442013-07-12 13:46:57 -0700588bool ImageWriter::AllocMemory() {
Mathieu Chartier208a5cb2015-12-02 15:44:07 -0800589 const size_t length = RoundUp(image_objects_offset_begin_ +
590 GetBinSizeSum() +
591 intern_table_bytes_ +
592 class_table_bytes_,
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700593 kPageSize);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700594 std::string error_msg;
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800595 image_.reset(MemMap::MapAnonymous("image writer image",
596 nullptr,
597 length,
598 PROT_READ | PROT_WRITE,
599 false,
600 false,
601 &error_msg));
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700602 if (UNLIKELY(image_.get() == nullptr)) {
603 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700604 return false;
605 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700606
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700607 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
608 CHECK_LE(image_end_, length);
609 image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800610 "image bitmap",
611 image_->Begin(),
612 RoundUp(image_end_, kPageSize)));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700613 if (image_bitmap_.get() == nullptr) {
614 LOG(ERROR) << "Failed to allocate memory for image bitmap";
615 return false;
616 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700617 return true;
618}
619
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700620class ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
621 public:
622 bool Visit(Class* c) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
623 StackHandleScope<1> hs(Thread::Current());
624 mirror::Class::ComputeName(hs.NewHandle(c));
625 return true;
626 }
627};
628
Brian Carlstrom7940e442013-07-12 13:46:57 -0700629void ImageWriter::ComputeLazyFieldsForImageClasses() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700630 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700631 ComputeLazyFieldsForClassesVisitor visitor;
632 class_linker->VisitClassesWithoutClassesLock(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700633}
634
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800635static bool IsBootClassLoaderClass(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_) {
636 return klass->GetClassLoader() == nullptr;
637}
638
639bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) {
640 return IsBootClassLoaderClass(klass) && !IsInBootImage(klass);
641}
642
643bool ImageWriter::ContainsBootClassLoaderNonImageClass(mirror::Class* klass) {
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800644 bool early_exit = false;
645 std::unordered_set<mirror::Class*> visited;
646 return ContainsBootClassLoaderNonImageClassInternal(klass, &early_exit, &visited);
647}
648
649bool ImageWriter::ContainsBootClassLoaderNonImageClassInternal(
650 mirror::Class* klass,
651 bool* early_exit,
652 std::unordered_set<mirror::Class*>* visited) {
653 DCHECK(early_exit != nullptr);
654 DCHECK(visited != nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700655 if (klass == nullptr) {
656 return false;
657 }
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800658 auto found = prune_class_memo_.find(klass);
659 if (found != prune_class_memo_.end()) {
660 // Already computed, return the found value.
661 return found->second;
662 }
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800663 // Circular dependencies, return false but do not store the result in the memoization table.
664 if (visited->find(klass) != visited->end()) {
665 *early_exit = true;
666 return false;
667 }
668 visited->emplace(klass);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800669 bool result = IsBootClassLoaderNonImageClass(klass);
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800670 bool my_early_exit = false; // Only for ourselves, ignore caller.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800671 if (!result) {
672 // Check interfaces since these wont be visited through VisitReferences.)
673 mirror::IfTable* if_table = klass->GetIfTable();
674 for (size_t i = 0, num_interfaces = klass->GetIfTableCount(); i < num_interfaces; ++i) {
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800675 result = result || ContainsBootClassLoaderNonImageClassInternal(
676 if_table->GetInterface(i),
677 &my_early_exit,
678 visited);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800679 }
680 }
681 // Check static fields and their classes.
682 size_t num_static_fields = klass->NumReferenceStaticFields();
683 if (num_static_fields != 0 && klass->IsResolved()) {
684 // Presumably GC can happen when we are cross compiling, it should not cause performance
685 // problems to do pointer size logic.
686 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(
687 Runtime::Current()->GetClassLinker()->GetImagePointerSize());
688 for (size_t i = 0u; i < num_static_fields; ++i) {
689 mirror::Object* ref = klass->GetFieldObject<mirror::Object>(field_offset);
690 if (ref != nullptr) {
691 if (ref->IsClass()) {
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800692 result = result ||
693 ContainsBootClassLoaderNonImageClassInternal(
694 ref->AsClass(),
695 &my_early_exit,
696 visited);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800697 }
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800698 result = result ||
699 ContainsBootClassLoaderNonImageClassInternal(
700 ref->GetClass(),
701 &my_early_exit,
702 visited);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800703 }
704 field_offset = MemberOffset(field_offset.Uint32Value() +
705 sizeof(mirror::HeapReference<mirror::Object>));
706 }
707 }
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800708 result = result ||
709 ContainsBootClassLoaderNonImageClassInternal(
710 klass->GetSuperClass(),
711 &my_early_exit,
712 visited);
713 // Erase the element we stored earlier since we are exiting the function.
714 auto it = visited->find(klass);
715 DCHECK(it != visited->end());
716 visited->erase(it);
717 // Only store result if it is true or none of the calls early exited due to circular
718 // dependencies. If visited is empty then we are the root caller, in this case the cycle was in
719 // a child call and we can remember the result.
720 if (result == true || !my_early_exit || visited->empty()) {
721 prune_class_memo_[klass] = result;
722 }
723 *early_exit |= my_early_exit;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800724 return result;
725}
726
727bool ImageWriter::KeepClass(Class* klass) {
728 if (klass == nullptr) {
729 return false;
730 }
731 if (compile_app_image_) {
732 // For app images, we need to prune boot loader classes that are not in the boot image since
733 // these may have already been loaded when the app image is loaded.
734 return !ContainsBootClassLoaderNonImageClass(klass);
735 }
Ian Rogers1ff3c982014-08-12 02:30:58 -0700736 std::string temp;
737 return compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700738}
739
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700740class NonImageClassesVisitor : public ClassVisitor {
741 public:
742 explicit NonImageClassesVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
743
744 bool Visit(Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800745 if (!image_writer_->KeepClass(klass)) {
746 classes_to_prune_.insert(klass);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700747 }
748 return true;
749 }
750
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800751 std::unordered_set<mirror::Class*> classes_to_prune_;
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700752 ImageWriter* const image_writer_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700753};
754
755void ImageWriter::PruneNonImageClasses() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700756 Runtime* runtime = Runtime::Current();
757 ClassLinker* class_linker = runtime->GetClassLinker();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700758 Thread* self = Thread::Current();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700759
760 // Make a list of classes we would like to prune.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700761 NonImageClassesVisitor visitor(this);
762 class_linker->VisitClasses(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700763
764 // Remove the undesired classes from the class roots.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800765 for (mirror::Class* klass : visitor.classes_to_prune_) {
766 std::string temp;
767 const char* name = klass->GetDescriptor(&temp);
768 VLOG(compiler) << "Pruning class " << name;
769 if (!compile_app_image_) {
770 DCHECK(IsBootClassLoaderClass(klass));
771 }
772 bool result = class_linker->RemoveClass(name, klass->GetClassLoader());
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800773 DCHECK(result);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700774 }
775
776 // Clear references to removed classes from the DexCaches.
Vladimir Marko05792b92015-08-03 11:56:49 +0100777 ArtMethod* resolution_method = runtime->GetResolutionMethod();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700778
779 ScopedAssertNoThreadSuspension sa(self, __FUNCTION__);
780 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); // For ClassInClassTable
781 ReaderMutexLock mu2(self, *class_linker->DexLock());
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800782 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
783 mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700784 if (dex_cache == nullptr) {
785 continue;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700786 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700787 for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
788 Class* klass = dex_cache->GetResolvedType(i);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800789 if (klass != nullptr && !KeepClass(klass)) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700790 dex_cache->SetResolvedType(i, nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700791 }
792 }
Vladimir Marko05792b92015-08-03 11:56:49 +0100793 ArtMethod** resolved_methods = dex_cache->GetResolvedMethods();
794 for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) {
795 ArtMethod* method =
796 mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700797 if (method != nullptr) {
798 auto* declaring_class = method->GetDeclaringClass();
799 // Miranda methods may be held live by a class which was not an image class but have a
800 // declaring class which is an image class. Set it to the resolution method to be safe and
801 // prevent dangling pointers.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800802 if (method->IsMiranda() || !KeepClass(declaring_class)) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100803 mirror::DexCache::SetElementPtrSize(resolved_methods,
804 i,
805 resolution_method,
806 target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700807 } else {
808 // Check that the class is still in the classes table.
809 DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class "
810 << PrettyClass(declaring_class) << " not in class linker table";
811 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700812 }
813 }
814 for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700815 ArtField* field = dex_cache->GetResolvedField(i, target_ptr_size_);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800816 if (field != nullptr && !KeepClass(field->GetDeclaringClass())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700817 dex_cache->SetResolvedField(i, nullptr, target_ptr_size_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700818 }
819 }
Andreas Gampedd9d0552015-03-09 12:57:41 -0700820 // Clean the dex field. It might have been populated during the initialization phase, but
821 // contains data only valid during a real run.
822 dex_cache->SetFieldObject<false>(mirror::DexCache::DexOffset(), nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700823 }
Andreas Gampe8ac75952015-06-02 21:01:45 -0700824
825 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
826 class_linker->DropFindArrayClassCache();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800827
828 // Clear to save RAM.
829 prune_class_memo_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700830}
831
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -0800832void ImageWriter::CheckNonImageClassesRemoved() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700833 if (compiler_driver_.GetImageClasses() != nullptr) {
834 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700835 heap->VisitObjects(CheckNonImageClassesRemovedCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700836 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700837}
838
839void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
840 ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800841 if (obj->IsClass() && !image_writer->IsInBootImage(obj)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700842 Class* klass = obj->AsClass();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800843 if (!image_writer->KeepClass(klass)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700844 image_writer->DumpImageClasses();
Ian Rogers1ff3c982014-08-12 02:30:58 -0700845 std::string temp;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800846 CHECK(image_writer->KeepClass(klass)) << klass->GetDescriptor(&temp)
847 << " " << PrettyDescriptor(klass);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700848 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700849 }
850}
851
852void ImageWriter::DumpImageClasses() {
Andreas Gampeb1fcead2015-04-20 18:53:51 -0700853 auto image_classes = compiler_driver_.GetImageClasses();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700854 CHECK(image_classes != nullptr);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700855 for (const std::string& image_class : *image_classes) {
856 LOG(INFO) << " " << image_class;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700857 }
858}
859
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800860void ImageWriter::CalculateObjectBinSlots(Object* obj) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700861 DCHECK(obj != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700862 // if it is a string, we want to intern it if its not interned.
863 if (obj->GetClass()->IsStringClass()) {
864 // we must be an interned string that was forward referenced and already assigned
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800865 if (IsImageBinSlotAssigned(obj)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700866 DCHECK_EQ(obj, obj->AsString()->Intern());
867 return;
868 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700869 // InternImageString allows us to intern while holding the heap bitmap lock. This is safe since
870 // we are guaranteed to not have GC during image writing.
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700871 mirror::String* const interned = Runtime::Current()->GetInternTable()->InternStrongImageString(
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700872 obj->AsString());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700873 if (obj != interned) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800874 if (!IsImageBinSlotAssigned(interned)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700875 // interned obj is after us, allocate its location early
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800876 AssignImageBinSlot(interned);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700877 }
878 // point those looking for this object to the interned version.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800879 SetImageBinSlot(obj, GetImageBinSlot(interned));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700880 return;
881 }
882 // else (obj == interned), nothing to do but fall through to the normal case
883 }
884
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800885 AssignImageBinSlot(obj);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700886}
887
888ObjectArray<Object>* ImageWriter::CreateImageRoots() const {
889 Runtime* runtime = Runtime::Current();
890 ClassLinker* class_linker = runtime->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700891 Thread* self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700892 StackHandleScope<3> hs(self);
893 Handle<Class> object_array_class(hs.NewHandle(
894 class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700895
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700896 // build an Object[] of all the DexCaches used in the source_space_.
897 // Since we can't hold the dex lock when allocating the dex_caches
898 // ObjectArray, we lock the dex lock twice, first to get the number
899 // of dex caches first and then lock it again to copy the dex
900 // caches. We check that the number of dex caches does not change.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800901 size_t dex_cache_count = 0;
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700902 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700903 ReaderMutexLock mu(self, *class_linker->DexLock());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800904 // Count number of dex caches not in the boot image.
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800905 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
906 mirror::DexCache* dex_cache =
907 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800908 dex_cache_count += IsInBootImage(dex_cache) ? 0u : 1u;
909 }
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700910 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700911 Handle<ObjectArray<Object>> dex_caches(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800912 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(), dex_cache_count)));
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700913 CHECK(dex_caches.Get() != nullptr) << "Failed to allocate a dex cache array.";
914 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700915 ReaderMutexLock mu(self, *class_linker->DexLock());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800916 size_t non_image_dex_caches = 0;
917 // Re-count number of non image dex caches.
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800918 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
919 mirror::DexCache* dex_cache =
920 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800921 non_image_dex_caches += IsInBootImage(dex_cache) ? 0u : 1u;
922 }
923 CHECK_EQ(dex_cache_count, non_image_dex_caches)
924 << "The number of non-image dex caches changed.";
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700925 size_t i = 0;
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800926 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
927 mirror::DexCache* dex_cache =
928 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800929 if (!IsInBootImage(dex_cache)) {
930 dex_caches->Set<false>(i, dex_cache);
931 ++i;
932 }
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700933 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700934 }
935
936 // build an Object[] of the roots needed to restore the runtime
Mathieu Chartiere401d142015-04-22 13:56:20 -0700937 auto image_roots(hs.NewHandle(
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700938 ObjectArray<Object>::Alloc(self, object_array_class.Get(), ImageHeader::kImageRootsMax)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700939 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100940 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700941 for (int i = 0; i < ImageHeader::kImageRootsMax; i++) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700942 CHECK(image_roots->Get(i) != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700943 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700944 return image_roots.Get();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700945}
946
Mathieu Chartier590fee92013-09-13 13:46:47 -0700947// Walk instance fields of the given Class. Separate function to allow recursion on the super
948// class.
949void ImageWriter::WalkInstanceFields(mirror::Object* obj, mirror::Class* klass) {
950 // Visit fields of parent classes first.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700951 StackHandleScope<1> hs(Thread::Current());
952 Handle<mirror::Class> h_class(hs.NewHandle(klass));
953 mirror::Class* super = h_class->GetSuperClass();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700954 if (super != nullptr) {
955 WalkInstanceFields(obj, super);
956 }
957 //
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700958 size_t num_reference_fields = h_class->NumReferenceInstanceFields();
Vladimir Marko76649e82014-11-10 18:32:59 +0000959 MemberOffset field_offset = h_class->GetFirstReferenceInstanceFieldOffset();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700960 for (size_t i = 0; i < num_reference_fields; ++i) {
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700961 mirror::Object* value = obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700962 if (value != nullptr) {
963 WalkFieldsInOrder(value);
964 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000965 field_offset = MemberOffset(field_offset.Uint32Value() +
966 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700967 }
968}
969
970// For an unvisited object, visit it then all its children found via fields.
971void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800972 if (IsInBootImage(obj)) {
973 // Object is in the image, don't need to fix it up.
974 return;
975 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800976 // Use our own visitor routine (instead of GC visitor) to get better locality between
977 // an object and its fields
978 if (!IsImageBinSlotAssigned(obj)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700979 // Walk instance fields of all objects
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700980 StackHandleScope<2> hs(Thread::Current());
981 Handle<mirror::Object> h_obj(hs.NewHandle(obj));
982 Handle<mirror::Class> klass(hs.NewHandle(obj->GetClass()));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700983 // visit the object itself.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800984 CalculateObjectBinSlots(h_obj.Get());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700985 WalkInstanceFields(h_obj.Get(), klass.Get());
Mathieu Chartier590fee92013-09-13 13:46:47 -0700986 // Walk static fields of a Class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700987 if (h_obj->IsClass()) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700988 size_t num_reference_static_fields = klass->NumReferenceStaticFields();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700989 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700990 for (size_t i = 0; i < num_reference_static_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700991 mirror::Object* value = h_obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700992 if (value != nullptr) {
993 WalkFieldsInOrder(value);
994 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000995 field_offset = MemberOffset(field_offset.Uint32Value() +
996 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700997 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700998 // Visit and assign offsets for fields and field arrays.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700999 auto* as_klass = h_obj->AsClass();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001000 LengthPrefixedArray<ArtField>* fields[] = {
1001 as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
1002 };
1003 for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
1004 // Total array length including header.
1005 if (cur_fields != nullptr) {
1006 const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
1007 // Forward the entire array at once.
1008 auto it = native_object_relocations_.find(cur_fields);
1009 CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
1010 << " already forwarded";
1011 size_t& offset = bin_slot_sizes_[kBinArtField];
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001012 DCHECK(!IsInBootImage(cur_fields));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001013 native_object_relocations_.emplace(
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001014 cur_fields,
1015 NativeObjectRelocation {offset, kNativeObjectRelocationTypeArtFieldArray });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001016 offset += header_size;
1017 // Forward individual fields so that we can quickly find where they belong.
Vladimir Marko35831e82015-09-11 11:59:18 +01001018 for (size_t i = 0, count = cur_fields->size(); i < count; ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001019 // Need to forward arrays separate of fields.
1020 ArtField* field = &cur_fields->At(i);
1021 auto it2 = native_object_relocations_.find(field);
1022 CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
1023 << " already assigned " << PrettyField(field) << " static=" << field->IsStatic();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001024 DCHECK(!IsInBootImage(field));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001025 native_object_relocations_.emplace(
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001026 field,
1027 NativeObjectRelocation {offset, kNativeObjectRelocationTypeArtField });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001028 offset += sizeof(ArtField);
1029 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001030 }
1031 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001032 // Visit and assign offsets for methods.
Alex Lightae358c12015-12-15 22:15:26 +00001033 LengthPrefixedArray<ArtMethod>* method_arrays[] = {
1034 as_klass->GetDirectMethodsPtr(), as_klass->GetVirtualMethodsPtr(),
1035 };
1036 for (LengthPrefixedArray<ArtMethod>* array : method_arrays) {
1037 if (array == nullptr) {
1038 continue;
1039 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001040 bool any_dirty = false;
Alex Lightae358c12015-12-15 22:15:26 +00001041 size_t count = 0;
1042 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
1043 const size_t method_size = ArtMethod::Size(target_ptr_size_);
1044 auto iteration_range =
1045 MakeIterationRangeFromLengthPrefixedArray(array, method_size, method_alignment);
1046 for (auto& m : iteration_range) {
1047 any_dirty = any_dirty || WillMethodBeDirty(&m);
1048 ++count;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001049 }
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001050 NativeObjectRelocationType type = any_dirty
1051 ? kNativeObjectRelocationTypeArtMethodDirty
1052 : kNativeObjectRelocationTypeArtMethodClean;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001053 Bin bin_type = BinTypeForNativeRelocationType(type);
1054 // Forward the entire array at once, but header first.
Vladimir Markocf36d492015-08-12 19:27:26 +01001055 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
1056 method_size,
1057 method_alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001058 auto it = native_object_relocations_.find(array);
Alex Lightae358c12015-12-15 22:15:26 +00001059 CHECK(it == native_object_relocations_.end()) << "Method array " << array
1060 << " already forwarded";
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001061 size_t& offset = bin_slot_sizes_[bin_type];
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001062 DCHECK(!IsInBootImage(array));
Alex Lightae358c12015-12-15 22:15:26 +00001063 native_object_relocations_.emplace(array, NativeObjectRelocation { offset,
1064 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty :
1065 kNativeObjectRelocationTypeArtMethodArrayClean });
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001066 offset += header_size;
Alex Lightae358c12015-12-15 22:15:26 +00001067 for (auto& m : iteration_range) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001068 AssignMethodOffset(&m, type);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001069 }
Alex Lightae358c12015-12-15 22:15:26 +00001070 (any_dirty ? dirty_methods_ : clean_methods_) += count;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001071 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001072 } else if (h_obj->IsObjectArray()) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07001073 // Walk elements of an object array.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001074 int32_t length = h_obj->AsObjectArray<mirror::Object>()->GetLength();
Mathieu Chartier590fee92013-09-13 13:46:47 -07001075 for (int32_t i = 0; i < length; i++) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001076 mirror::ObjectArray<mirror::Object>* obj_array = h_obj->AsObjectArray<mirror::Object>();
Mathieu Chartier590fee92013-09-13 13:46:47 -07001077 mirror::Object* value = obj_array->Get(i);
1078 if (value != nullptr) {
1079 WalkFieldsInOrder(value);
1080 }
1081 }
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001082 } else if (h_obj->IsClassLoader()) {
1083 // Register the class loader if it has a class table.
1084 // The fake boot class loader should not get registered and we should end up with only one
1085 // class loader.
1086 mirror::ClassLoader* class_loader = h_obj->AsClassLoader();
1087 if (class_loader->GetClassTable() != nullptr) {
1088 class_loaders_.insert(class_loader);
1089 }
Mathieu Chartier590fee92013-09-13 13:46:47 -07001090 }
1091 }
1092}
1093
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001094void ImageWriter::AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001095 DCHECK(!IsInBootImage(method));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001096 auto it = native_object_relocations_.find(method);
1097 CHECK(it == native_object_relocations_.end()) << "Method " << method << " already assigned "
Mathieu Chartiere401d142015-04-22 13:56:20 -07001098 << PrettyMethod(method);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001099 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
1100 native_object_relocations_.emplace(method, NativeObjectRelocation { offset, type });
Vladimir Marko14632852015-08-17 12:07:23 +01001101 offset += ArtMethod::Size(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001102}
1103
Mathieu Chartier590fee92013-09-13 13:46:47 -07001104void ImageWriter::WalkFieldsCallback(mirror::Object* obj, void* arg) {
1105 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1106 DCHECK(writer != nullptr);
1107 writer->WalkFieldsInOrder(obj);
1108}
1109
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001110void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) {
1111 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1112 DCHECK(writer != nullptr);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001113 if (!writer->IsInBootImage(obj)) {
1114 writer->UnbinObjectsIntoOffset(obj);
1115 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001116}
1117
1118void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001119 DCHECK(!IsInBootImage(obj));
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001120 CHECK(obj != nullptr);
1121
1122 // We know the bin slot, and the total bin sizes for all objects by now,
1123 // so calculate the object's final image offset.
1124
1125 DCHECK(IsImageBinSlotAssigned(obj));
1126 BinSlot bin_slot = GetImageBinSlot(obj);
1127 // Change the lockword from a bin slot into an offset
1128 AssignImageOffset(obj, bin_slot);
1129}
1130
Vladimir Markof4da6752014-08-01 19:04:18 +01001131void ImageWriter::CalculateNewObjectOffsets() {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001132 Thread* const self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001133 StackHandleScope<1> hs(self);
1134 Handle<ObjectArray<Object>> image_roots(hs.NewHandle(CreateImageRoots()));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001135
Mathieu Chartiere401d142015-04-22 13:56:20 -07001136 auto* runtime = Runtime::Current();
1137 auto* heap = runtime->GetHeap();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001138 DCHECK_EQ(0U, image_end_);
1139
Mathieu Chartier31e89252013-08-28 11:29:12 -07001140 // Leave space for the header, but do not write it yet, we need to
Brian Carlstrom7940e442013-07-12 13:46:57 -07001141 // know where image_roots is going to end up
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001142 image_end_ += RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment
Brian Carlstrom7940e442013-07-12 13:46:57 -07001143
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -08001144 image_objects_offset_begin_ = image_end_;
1145 // Clear any pre-existing monitors which may have been in the monitor words, assign bin slots.
1146 heap->VisitObjects(WalkFieldsCallback, this);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001147 // Write the image runtime methods.
1148 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
1149 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
1150 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
1151 image_methods_[ImageHeader::kCalleeSaveMethod] = runtime->GetCalleeSaveMethod(Runtime::kSaveAll);
1152 image_methods_[ImageHeader::kRefsOnlySaveMethod] =
1153 runtime->GetCalleeSaveMethod(Runtime::kRefsOnly);
1154 image_methods_[ImageHeader::kRefsAndArgsSaveMethod] =
1155 runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001156
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001157 // Add room for fake length prefixed array for holding the image methods.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001158 const auto image_method_type = kNativeObjectRelocationTypeArtMethodArrayClean;
1159 auto it = native_object_relocations_.find(&image_method_array_);
1160 CHECK(it == native_object_relocations_.end());
1161 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(image_method_type)];
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001162 if (!compile_app_image_) {
1163 native_object_relocations_.emplace(&image_method_array_,
1164 NativeObjectRelocation { offset, image_method_type });
1165 }
Vladimir Marko14632852015-08-17 12:07:23 +01001166 size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001167 const size_t array_size = LengthPrefixedArray<ArtMethod>::ComputeSize(
Vladimir Marko14632852015-08-17 12:07:23 +01001168 0, ArtMethod::Size(target_ptr_size_), method_alignment);
Vladimir Markocf36d492015-08-12 19:27:26 +01001169 CHECK_ALIGNED_PARAM(array_size, method_alignment);
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001170 offset += array_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001171 for (auto* m : image_methods_) {
1172 CHECK(m != nullptr);
1173 CHECK(m->IsRuntimeMethod());
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001174 DCHECK_EQ(compile_app_image_, IsInBootImage(m)) << "Trampolines should be in boot image";
1175 if (!IsInBootImage(m)) {
1176 AssignMethodOffset(m, kNativeObjectRelocationTypeArtMethodClean);
1177 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001178 }
Vladimir Marko05792b92015-08-03 11:56:49 +01001179 // Calculate size of the dex cache arrays slot and prepare offsets.
1180 PrepareDexCacheArraySlots();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001181
Vladimir Markocf36d492015-08-12 19:27:26 +01001182 // Calculate bin slot offsets.
1183 size_t bin_offset = image_objects_offset_begin_;
Vladimir Marko20f85592015-03-19 10:07:02 +00001184 for (size_t i = 0; i != kBinSize; ++i) {
Vladimir Markocf36d492015-08-12 19:27:26 +01001185 bin_slot_offsets_[i] = bin_offset;
1186 bin_offset += bin_slot_sizes_[i];
1187 if (i == kBinArtField) {
1188 static_assert(kBinArtField + 1 == kBinArtMethodClean, "Methods follow fields.");
1189 static_assert(alignof(ArtField) == 4u, "ArtField alignment is 4.");
1190 DCHECK_ALIGNED(bin_offset, 4u);
1191 DCHECK(method_alignment == 4u || method_alignment == 8u);
1192 bin_offset = RoundUp(bin_offset, method_alignment);
1193 }
Vladimir Marko20f85592015-03-19 10:07:02 +00001194 }
Vladimir Markocf36d492015-08-12 19:27:26 +01001195 // NOTE: There may be additional padding between the bin slots and the intern table.
1196
Mathieu Chartierc7853442015-03-27 14:35:38 -07001197 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
1198
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -08001199 // Transform each object's bin slot into an offset which will be used to do the final copy.
1200 heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001201
Mathieu Chartierc7853442015-03-27 14:35:38 -07001202 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001203
Vladimir Markof4da6752014-08-01 19:04:18 +01001204 image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Get()));
1205
Mathieu Chartiere401d142015-04-22 13:56:20 -07001206 // Update the native relocations by adding their bin sums.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001207 for (auto& pair : native_object_relocations_) {
1208 NativeObjectRelocation& relocation = pair.second;
1209 Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
Vladimir Markocf36d492015-08-12 19:27:26 +01001210 relocation.offset += bin_slot_offsets_[bin_type];
Mathieu Chartiere401d142015-04-22 13:56:20 -07001211 }
1212
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001213 // Calculate how big the intern table will be after being serialized.
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001214 InternTable* const intern_table = runtime->GetInternTable();
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001215 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
1216 intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
1217
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001218 // Write out the class table.
1219 ClassLinker* class_linker = runtime->GetClassLinker();
1220 if (boot_image_space_ == nullptr) {
1221 // Compiling the boot image, add null class loader.
1222 class_loaders_.insert(nullptr);
1223 }
Mathieu Chartier67ad20e2015-12-09 15:41:09 -08001224 // class_loaders_ usually will not be empty, but may be empty if we attempt to create an image
1225 // with no classes.
1226 if (class_loaders_.size() == 1u) {
1227 // Only write the class table if we have exactly one class loader. There may be cases where
1228 // there are multiple class loaders if a class path is passed to dex2oat.
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001229 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
1230 for (mirror::ClassLoader* loader : class_loaders_) {
1231 ClassTable* table = class_linker->ClassTableForClassLoader(loader);
1232 CHECK(table != nullptr);
1233 class_table_bytes_ += table->WriteToMemory(nullptr);
1234 }
1235 }
1236
Mathieu Chartiere401d142015-04-22 13:56:20 -07001237 // Note that image_end_ is left at end of used mirror object section.
Vladimir Markof4da6752014-08-01 19:04:18 +01001238}
1239
1240void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) {
1241 CHECK_NE(0U, oat_loaded_size);
Ian Rogers13735952014-10-08 12:43:28 -07001242 const uint8_t* oat_file_begin = GetOatFileBegin();
1243 const uint8_t* oat_file_end = oat_file_begin + oat_loaded_size;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001244 oat_data_begin_ = oat_file_begin + oat_data_offset;
Ian Rogers13735952014-10-08 12:43:28 -07001245 const uint8_t* oat_data_end = oat_data_begin_ + oat_file_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001246
1247 // Create the image sections.
1248 ImageSection sections[ImageHeader::kSectionCount];
1249 // Objects section
1250 auto* objects_section = &sections[ImageHeader::kSectionObjects];
1251 *objects_section = ImageSection(0u, image_end_);
1252 size_t cur_pos = objects_section->End();
1253 // Add field section.
1254 auto* field_section = &sections[ImageHeader::kSectionArtFields];
1255 *field_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtField]);
Vladimir Markocf36d492015-08-12 19:27:26 +01001256 CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001257 cur_pos = field_section->End();
Vladimir Markocf36d492015-08-12 19:27:26 +01001258 // Round up to the alignment the required by the method section.
Vladimir Marko14632852015-08-17 12:07:23 +01001259 cur_pos = RoundUp(cur_pos, ArtMethod::Alignment(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001260 // Add method section.
1261 auto* methods_section = &sections[ImageHeader::kSectionArtMethods];
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001262 *methods_section = ImageSection(cur_pos,
1263 bin_slot_sizes_[kBinArtMethodClean] +
1264 bin_slot_sizes_[kBinArtMethodDirty]);
Vladimir Markocf36d492015-08-12 19:27:26 +01001265 CHECK_EQ(bin_slot_offsets_[kBinArtMethodClean], methods_section->Offset());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001266 cur_pos = methods_section->End();
Vladimir Marko05792b92015-08-03 11:56:49 +01001267 // Add dex cache arrays section.
1268 auto* dex_cache_arrays_section = &sections[ImageHeader::kSectionDexCacheArrays];
1269 *dex_cache_arrays_section = ImageSection(cur_pos, bin_slot_sizes_[kBinDexCacheArray]);
1270 CHECK_EQ(bin_slot_offsets_[kBinDexCacheArray], dex_cache_arrays_section->Offset());
1271 cur_pos = dex_cache_arrays_section->End();
Nicolas Geoffray7bf2b4f2015-07-08 10:11:59 +00001272 // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
1273 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001274 // Calculate the size of the interned strings.
1275 auto* interned_strings_section = &sections[ImageHeader::kSectionInternedStrings];
1276 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1277 cur_pos = interned_strings_section->End();
Alex Lighte050c8f2015-12-16 15:52:51 -08001278 // Round up to the alignment the class table expects. See HashSet::WriteToMemory.
1279 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001280 // Calculate the size of the class table section.
1281 auto* class_table_section = &sections[ImageHeader::kSectionClassTable];
1282 *class_table_section = ImageSection(cur_pos, class_table_bytes_);
1283 cur_pos = class_table_section->End();
1284 // Image end goes right before the start of the image bitmap.
1285 const size_t image_end = static_cast<uint32_t>(cur_pos);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001286 // Finally bitmap section.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001287 const size_t bitmap_bytes = image_bitmap_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001288 auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
1289 *bitmap_section = ImageSection(RoundUp(cur_pos, kPageSize), RoundUp(bitmap_bytes, kPageSize));
1290 cur_pos = bitmap_section->End();
1291 if (kIsDebugBuild) {
1292 size_t idx = 0;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001293 for (const ImageSection& section : sections) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001294 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1295 ++idx;
1296 }
1297 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
1298 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001299 CHECK_EQ(AlignUp(image_begin_ + image_end, kPageSize), oat_file_begin) <<
1300 "Oat file should be right after the image.";
Mathieu Chartierceb07b32015-12-10 09:33:21 -08001301 // Create the header, leave 0 for data size since we will fill this in as we are writing the
1302 // image.
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001303 new (image_->Begin()) ImageHeader(PointerToLowMemUInt32(image_begin_),
1304 image_end,
1305 sections,
1306 image_roots_address_,
1307 oat_file_->GetOatHeader().GetChecksum(),
1308 PointerToLowMemUInt32(oat_file_begin),
1309 PointerToLowMemUInt32(oat_data_begin_),
1310 PointerToLowMemUInt32(oat_data_end),
1311 PointerToLowMemUInt32(oat_file_end),
1312 target_ptr_size_,
Mathieu Chartierceb07b32015-12-10 09:33:21 -08001313 compile_pic_,
1314 image_storage_mode_,
1315 /*data_size*/0u);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001316}
1317
1318ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001319 auto it = native_object_relocations_.find(method);
1320 CHECK(it != native_object_relocations_.end()) << PrettyMethod(method) << " @ " << method;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001321 CHECK_GE(it->second.offset, image_end_) << "ArtMethods should be after Objects";
1322 return reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001323}
1324
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001325class FixupRootVisitor : public RootVisitor {
1326 public:
1327 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
1328 }
1329
1330 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001331 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001332 for (size_t i = 0; i < count; ++i) {
1333 *roots[i] = ImageAddress(*roots[i]);
1334 }
1335 }
1336
1337 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
1338 const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001339 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001340 for (size_t i = 0; i < count; ++i) {
1341 roots[i]->Assign(ImageAddress(roots[i]->AsMirrorPtr()));
1342 }
1343 }
1344
1345 private:
1346 ImageWriter* const image_writer_;
1347
Mathieu Chartier90443472015-07-16 20:32:27 -07001348 mirror::Object* ImageAddress(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001349 const size_t offset = image_writer_->GetImageOffset(obj);
1350 auto* const dest = reinterpret_cast<Object*>(image_writer_->image_begin_ + offset);
1351 VLOG(compiler) << "Update root from " << obj << " to " << dest;
1352 return dest;
1353 }
1354};
1355
Mathieu Chartierc7853442015-03-27 14:35:38 -07001356void ImageWriter::CopyAndFixupNativeData() {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001357 // Copy ArtFields and methods to their locations and update the array for convenience.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001358 for (auto& pair : native_object_relocations_) {
1359 NativeObjectRelocation& relocation = pair.second;
1360 auto* dest = image_->Begin() + relocation.offset;
1361 DCHECK_GE(dest, image_->Begin() + image_end_);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001362 DCHECK(!IsInBootImage(pair.first));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001363 switch (relocation.type) {
1364 case kNativeObjectRelocationTypeArtField: {
1365 memcpy(dest, pair.first, sizeof(ArtField));
1366 reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
1367 GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass()));
1368 break;
1369 }
1370 case kNativeObjectRelocationTypeArtMethodClean:
1371 case kNativeObjectRelocationTypeArtMethodDirty: {
1372 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
1373 reinterpret_cast<ArtMethod*>(dest));
1374 break;
1375 }
1376 // For arrays, copy just the header since the elements will get copied by their corresponding
1377 // relocations.
1378 case kNativeObjectRelocationTypeArtFieldArray: {
1379 memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
1380 break;
1381 }
1382 case kNativeObjectRelocationTypeArtMethodArrayClean:
1383 case kNativeObjectRelocationTypeArtMethodArrayDirty: {
Vladimir Markocf36d492015-08-12 19:27:26 +01001384 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(
1385 0,
Vladimir Marko14632852015-08-17 12:07:23 +01001386 ArtMethod::Size(target_ptr_size_),
1387 ArtMethod::Alignment(target_ptr_size_)));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001388 break;
Vladimir Marko05792b92015-08-03 11:56:49 +01001389 case kNativeObjectRelocationTypeDexCacheArray:
1390 // Nothing to copy here, everything is done in FixupDexCache().
1391 break;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001392 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001393 }
1394 }
1395 // Fixup the image method roots.
1396 auto* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001397 const ImageSection& methods_section = image_header->GetMethodsSection();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001398 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001399 ArtMethod* method = image_methods_[i];
1400 CHECK(method != nullptr);
1401 if (!IsInBootImage(method)) {
1402 auto it = native_object_relocations_.find(method);
1403 CHECK(it != native_object_relocations_.end()) << "No fowarding for " << PrettyMethod(method);
1404 NativeObjectRelocation& relocation = it->second;
1405 CHECK(methods_section.Contains(relocation.offset)) << relocation.offset << " not in "
1406 << methods_section;
1407 CHECK(relocation.IsArtMethodRelocation()) << relocation.type;
1408 method = reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
1409 }
1410 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), method);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001411 }
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001412 FixupRootVisitor root_visitor(this);
1413
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001414 // Write the intern table into the image.
1415 const ImageSection& intern_table_section = image_header->GetImageSection(
1416 ImageHeader::kSectionInternedStrings);
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001417 Runtime* const runtime = Runtime::Current();
1418 InternTable* const intern_table = runtime->GetInternTable();
1419 uint8_t* const intern_table_memory_ptr = image_->Begin() + intern_table_section.Offset();
1420 const size_t intern_table_bytes = intern_table->WriteToMemory(intern_table_memory_ptr);
1421 CHECK_EQ(intern_table_bytes, intern_table_bytes_);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001422 // Fixup the pointers in the newly written intern table to contain image addresses.
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001423 InternTable temp_intern_table;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001424 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
1425 // the VisitRoots() will update the memory directly rather than the copies.
1426 // This also relies on visit roots not doing any verification which could fail after we update
1427 // the roots to be the image addresses.
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001428 temp_intern_table.ReadFromMemory(intern_table_memory_ptr);
1429 CHECK_EQ(temp_intern_table.Size(), intern_table->Size());
1430 temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots);
1431
Mathieu Chartier67ad20e2015-12-09 15:41:09 -08001432 // Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple
1433 // class loaders. Writing multiple class tables into the image is currently unsupported.
1434 if (class_table_bytes_ > 0u) {
1435 ClassLinker* const class_linker = runtime->GetClassLinker();
1436 const ImageSection& class_table_section = image_header->GetImageSection(
1437 ImageHeader::kSectionClassTable);
1438 uint8_t* const class_table_memory_ptr = image_->Begin() + class_table_section.Offset();
1439 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
1440 size_t class_table_bytes = 0;
1441 for (mirror::ClassLoader* loader : class_loaders_) {
1442 ClassTable* table = class_linker->ClassTableForClassLoader(loader);
1443 CHECK(table != nullptr);
1444 uint8_t* memory_ptr = class_table_memory_ptr + class_table_bytes;
1445 class_table_bytes += table->WriteToMemory(memory_ptr);
1446 // Fixup the pointers in the newly written class table to contain image addresses. See
1447 // above comment for intern tables.
1448 ClassTable temp_class_table;
1449 temp_class_table.ReadFromMemory(memory_ptr);
1450 CHECK_EQ(temp_class_table.NumZygoteClasses(), table->NumNonZygoteClasses() +
1451 table->NumZygoteClasses());
1452 BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(&root_visitor,
1453 RootInfo(kRootUnknown));
1454 temp_class_table.VisitRoots(buffered_visitor);
1455 }
1456 CHECK_EQ(class_table_bytes, class_table_bytes_);
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001457 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001458}
1459
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -08001460void ImageWriter::CopyAndFixupObjects() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001461 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -07001462 heap->VisitObjects(CopyAndFixupObjectsCallback, this);
1463 // Fix up the object previously had hash codes.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001464 for (const auto& hash_pair : saved_hashcode_map_) {
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001465 Object* obj = hash_pair.first;
Andreas Gampe3b45ef22015-05-26 21:34:09 -07001466 DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
1467 obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001468 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001469 saved_hashcode_map_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001470}
1471
Mathieu Chartier590fee92013-09-13 13:46:47 -07001472void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) {
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -07001473 DCHECK(obj != nullptr);
1474 DCHECK(arg != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001475 reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj);
1476}
1477
Mathieu Chartiere401d142015-04-22 13:56:20 -07001478void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr,
1479 mirror::Class* klass, Bin array_type) {
1480 CHECK(klass->IsArrayClass());
1481 CHECK(arr->IsIntArray() || arr->IsLongArray()) << PrettyClass(klass) << " " << arr;
1482 // Fixup int and long pointers for the ArtMethod or ArtField arrays.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001483 const size_t num_elements = arr->GetLength();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001484 dst->SetClass(GetImageAddress(arr->GetClass()));
1485 auto* dest_array = down_cast<mirror::PointerArray*>(dst);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001486 for (size_t i = 0, count = num_elements; i < count; ++i) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001487 void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
1488 if (elem != nullptr && !IsInBootImage(elem)) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001489 auto it = native_object_relocations_.find(elem);
Vladimir Marko05792b92015-08-03 11:56:49 +01001490 if (UNLIKELY(it == native_object_relocations_.end())) {
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001491 if (it->second.IsArtMethodRelocation()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001492 auto* method = reinterpret_cast<ArtMethod*>(elem);
1493 LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ "
1494 << method << " idx=" << i << "/" << num_elements << " with declaring class "
1495 << PrettyClass(method->GetDeclaringClass());
1496 } else {
1497 CHECK_EQ(array_type, kBinArtField);
1498 auto* field = reinterpret_cast<ArtField*>(elem);
1499 LOG(FATAL) << "No relocation entry for ArtField " << PrettyField(field) << " @ "
1500 << field << " idx=" << i << "/" << num_elements << " with declaring class "
1501 << PrettyClass(field->GetDeclaringClass());
1502 }
Vladimir Marko05792b92015-08-03 11:56:49 +01001503 UNREACHABLE();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001504 } else {
1505 elem = image_begin_ + it->second.offset;
1506 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001507 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001508 dest_array->SetElementPtrSize<false, true>(i, elem, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001509 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001510}
1511
1512void ImageWriter::CopyAndFixupObject(Object* obj) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001513 if (IsInBootImage(obj)) {
1514 return;
1515 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001516 size_t offset = GetImageOffset(obj);
1517 auto* dst = reinterpret_cast<Object*>(image_->Begin() + offset);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001518 DCHECK_LT(offset, image_end_);
1519 const auto* src = reinterpret_cast<const uint8_t*>(obj);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001520
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001521 image_bitmap_->Set(dst); // Mark the obj as live.
1522
1523 const size_t n = obj->SizeOf();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001524 DCHECK_LE(offset + n, image_->Size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001525 memcpy(dst, src, n);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001526
Mathieu Chartierad2541a2013-10-25 10:05:23 -07001527 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
1528 // word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001529 const auto it = saved_hashcode_map_.find(obj);
1530 dst->SetLockWord(it != saved_hashcode_map_.end() ?
1531 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001532 FixupObject(obj, dst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001533}
1534
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001535// Rewrite all the references in the copied object to point to their image address equivalent
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001536class FixupVisitor {
1537 public:
1538 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
1539 }
1540
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001541 // Ignore class roots since we don't have a way to map them to the destination. These are handled
1542 // with other logic.
1543 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1544 const {}
1545 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
1546
1547
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001548 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001549 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Hiroshi Yamauchi6e83c172014-05-01 21:25:41 -07001550 Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001551 // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the
1552 // image.
1553 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001554 offset,
1555 image_writer_->GetImageAddress(ref));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001556 }
1557
1558 // java.lang.ref.Reference visitor.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001559 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001560 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001561 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001562 mirror::Reference::ReferentOffset(),
1563 image_writer_->GetImageAddress(ref->GetReferent()));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001564 }
1565
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001566 protected:
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001567 ImageWriter* const image_writer_;
1568 mirror::Object* const copy_;
1569};
1570
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001571class FixupClassVisitor FINAL : public FixupVisitor {
1572 public:
1573 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
1574 }
1575
Mathieu Chartierc7853442015-03-27 14:35:38 -07001576 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001577 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001578 DCHECK(obj->IsClass());
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001579 FixupVisitor::operator()(obj, offset, /*is_static*/false);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001580 }
1581
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001582 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED,
1583 mirror::Reference* ref ATTRIBUTE_UNUSED) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001584 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001585 LOG(FATAL) << "Reference not expected here.";
1586 }
1587};
1588
Vladimir Marko05792b92015-08-03 11:56:49 +01001589uintptr_t ImageWriter::NativeOffsetInImage(void* obj) {
1590 DCHECK(obj != nullptr);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001591 DCHECK(!IsInBootImage(obj));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001592 auto it = native_object_relocations_.find(obj);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001593 CHECK(it != native_object_relocations_.end()) << obj << " spaces "
1594 << Runtime::Current()->GetHeap()->DumpSpaces();
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001595 const NativeObjectRelocation& relocation = it->second;
Vladimir Marko05792b92015-08-03 11:56:49 +01001596 return relocation.offset;
1597}
1598
1599template <typename T>
1600T* ImageWriter::NativeLocationInImage(T* obj) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001601 return (obj == nullptr || IsInBootImage(obj))
1602 ? obj
1603 : reinterpret_cast<T*>(image_begin_ + NativeOffsetInImage(obj));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001604}
1605
Mathieu Chartier4b00d342015-11-13 10:42:08 -08001606template <typename T>
1607T* ImageWriter::NativeCopyLocation(T* obj) {
1608 return (obj == nullptr || IsInBootImage(obj))
1609 ? obj
1610 : reinterpret_cast<T*>(image_->Begin() + NativeOffsetInImage(obj));
1611}
1612
1613class NativeLocationVisitor {
1614 public:
1615 explicit NativeLocationVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
1616
1617 template <typename T>
1618 T* operator()(T* ptr) const {
1619 return image_writer_->NativeLocationInImage(ptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001620 }
Mathieu Chartier4b00d342015-11-13 10:42:08 -08001621
1622 private:
1623 ImageWriter* const image_writer_;
1624};
1625
1626void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
1627 orig->FixupNativePointers(copy, target_ptr_size_, NativeLocationVisitor(this));
Mathieu Chartierc7853442015-03-27 14:35:38 -07001628 FixupClassVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07001629 static_cast<mirror::Object*>(orig)->VisitReferences(visitor, visitor);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001630}
1631
Ian Rogersef7d42f2014-01-06 12:55:46 -08001632void ImageWriter::FixupObject(Object* orig, Object* copy) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001633 DCHECK(orig != nullptr);
1634 DCHECK(copy != nullptr);
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -07001635 if (kUseBakerOrBrooksReadBarrier) {
1636 orig->AssertReadBarrierPointer();
1637 if (kUseBrooksReadBarrier) {
1638 // Note the address 'copy' isn't the same as the image address of 'orig'.
1639 copy->SetReadBarrierPointer(GetImageAddress(orig));
1640 DCHECK_EQ(copy->GetReadBarrierPointer(), GetImageAddress(orig));
1641 }
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -08001642 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001643 auto* klass = orig->GetClass();
1644 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
Vladimir Marko05792b92015-08-03 11:56:49 +01001645 // Is this a native pointer array?
Mathieu Chartiere401d142015-04-22 13:56:20 -07001646 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
1647 if (it != pointer_arrays_.end()) {
1648 // Should only need to fixup every pointer array exactly once.
1649 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
1650 pointer_arrays_.erase(it);
1651 return;
1652 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001653 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001654 if (orig->IsClass()) {
1655 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001656 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001657 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
1658 // Need to go update the ArtMethod.
1659 auto* dest = down_cast<mirror::AbstractMethod*>(copy);
1660 auto* src = down_cast<mirror::AbstractMethod*>(orig);
1661 ArtMethod* src_method = src->GetArtMethod();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001662 auto it = native_object_relocations_.find(src_method);
1663 CHECK(it != native_object_relocations_.end())
1664 << "Missing relocation for AbstractMethod.artMethod " << PrettyMethod(src_method);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001665 dest->SetArtMethod(
1666 reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
Vladimir Marko05792b92015-08-03 11:56:49 +01001667 } else if (!klass->IsArrayClass()) {
1668 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1669 if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) {
1670 FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001671 } else if (klass->IsClassLoaderClass()) {
Vladimir Marko05792b92015-08-03 11:56:49 +01001672 // If src is a ClassLoader, set the class table to null so that it gets recreated by the
1673 // ClassLoader.
1674 down_cast<mirror::ClassLoader*>(copy)->SetClassTable(nullptr);
Mathieu Chartier5550c562015-09-22 15:18:04 -07001675 // Also set allocator to null to be safe. The allocator is created when we create the class
1676 // table. We also never expect to unload things in the image since they are held live as
1677 // roots.
1678 down_cast<mirror::ClassLoader*>(copy)->SetAllocator(nullptr);
Vladimir Marko05792b92015-08-03 11:56:49 +01001679 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001680 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001681 FixupVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07001682 orig->VisitReferences(visitor, visitor);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001683 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001684}
1685
Mathieu Chartier4b00d342015-11-13 10:42:08 -08001686
1687class ImageAddressVisitor {
1688 public:
1689 explicit ImageAddressVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
1690
1691 template <typename T>
1692 T* operator()(T* ptr) const SHARED_REQUIRES(Locks::mutator_lock_) {
1693 return image_writer_->GetImageAddress(ptr);
1694 }
1695
1696 private:
1697 ImageWriter* const image_writer_;
1698};
1699
1700
Vladimir Marko05792b92015-08-03 11:56:49 +01001701void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache,
1702 mirror::DexCache* copy_dex_cache) {
1703 // Though the DexCache array fields are usually treated as native pointers, we set the full
1704 // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is
1705 // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e.
1706 // static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))).
1707 GcRoot<mirror::String>* orig_strings = orig_dex_cache->GetStrings();
1708 if (orig_strings != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08001709 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::StringsOffset(),
1710 NativeLocationInImage(orig_strings),
1711 /*pointer size*/8u);
1712 orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings), ImageAddressVisitor(this));
Vladimir Marko05792b92015-08-03 11:56:49 +01001713 }
1714 GcRoot<mirror::Class>* orig_types = orig_dex_cache->GetResolvedTypes();
1715 if (orig_types != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08001716 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedTypesOffset(),
1717 NativeLocationInImage(orig_types),
1718 /*pointer size*/8u);
1719 orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types), ImageAddressVisitor(this));
Vladimir Marko05792b92015-08-03 11:56:49 +01001720 }
1721 ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
1722 if (orig_methods != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08001723 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodsOffset(),
1724 NativeLocationInImage(orig_methods),
1725 /*pointer size*/8u);
1726 ArtMethod** copy_methods = NativeCopyLocation(orig_methods);
Vladimir Marko05792b92015-08-03 11:56:49 +01001727 for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) {
1728 ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, i, target_ptr_size_);
Mathieu Chartier4b00d342015-11-13 10:42:08 -08001729 ArtMethod* copy = NativeLocationInImage(orig);
Vladimir Marko05792b92015-08-03 11:56:49 +01001730 mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_);
1731 }
1732 }
1733 ArtField** orig_fields = orig_dex_cache->GetResolvedFields();
1734 if (orig_fields != nullptr) {
Mathieu Chartier4b00d342015-11-13 10:42:08 -08001735 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedFieldsOffset(),
1736 NativeLocationInImage(orig_fields),
1737 /*pointer size*/8u);
1738 ArtField** copy_fields = NativeCopyLocation(orig_fields);
Vladimir Marko05792b92015-08-03 11:56:49 +01001739 for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) {
1740 ArtField* orig = mirror::DexCache::GetElementPtrSize(orig_fields, i, target_ptr_size_);
Mathieu Chartier4b00d342015-11-13 10:42:08 -08001741 ArtField* copy = NativeLocationInImage(orig);
Vladimir Marko05792b92015-08-03 11:56:49 +01001742 mirror::DexCache::SetElementPtrSize(copy_fields, i, copy, target_ptr_size_);
1743 }
1744 }
1745}
1746
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001747const uint8_t* ImageWriter::GetOatAddress(OatAddress type) const {
1748 DCHECK_LT(type, kOatAddressCount);
1749 // If we are compiling an app image, we need to use the stubs of the boot image.
1750 if (compile_app_image_) {
1751 // Use the current image pointers.
Mathieu Chartier073b16c2015-11-10 14:13:23 -08001752 gc::space::ImageSpace* image_space = Runtime::Current()->GetHeap()->GetBootImageSpace();
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001753 DCHECK(image_space != nullptr);
1754 const OatFile* oat_file = image_space->GetOatFile();
1755 CHECK(oat_file != nullptr);
1756 const OatHeader& header = oat_file->GetOatHeader();
1757 switch (type) {
1758 // TODO: We could maybe clean this up if we stored them in an array in the oat header.
1759 case kOatAddressQuickGenericJNITrampoline:
1760 return static_cast<const uint8_t*>(header.GetQuickGenericJniTrampoline());
1761 case kOatAddressInterpreterToInterpreterBridge:
1762 return static_cast<const uint8_t*>(header.GetInterpreterToInterpreterBridge());
1763 case kOatAddressInterpreterToCompiledCodeBridge:
1764 return static_cast<const uint8_t*>(header.GetInterpreterToCompiledCodeBridge());
1765 case kOatAddressJNIDlsymLookup:
1766 return static_cast<const uint8_t*>(header.GetJniDlsymLookup());
1767 case kOatAddressQuickIMTConflictTrampoline:
1768 return static_cast<const uint8_t*>(header.GetQuickImtConflictTrampoline());
1769 case kOatAddressQuickResolutionTrampoline:
1770 return static_cast<const uint8_t*>(header.GetQuickResolutionTrampoline());
1771 case kOatAddressQuickToInterpreterBridge:
1772 return static_cast<const uint8_t*>(header.GetQuickToInterpreterBridge());
1773 default:
1774 UNREACHABLE();
1775 }
1776 }
1777 return GetOatAddressForOffset(oat_address_offsets_[type]);
1778}
1779
Mathieu Chartiere401d142015-04-22 13:56:20 -07001780const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method, bool* quick_is_interpreted) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001781 DCHECK(!method->IsResolutionMethod()) << PrettyMethod(method);
1782 DCHECK(!method->IsImtConflictMethod()) << PrettyMethod(method);
1783 DCHECK(!method->IsImtUnimplementedMethod()) << PrettyMethod(method);
Alex Light9139e002015-10-09 15:59:48 -07001784 DCHECK(method->IsInvokable()) << PrettyMethod(method);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001785 DCHECK(!IsInBootImage(method)) << PrettyMethod(method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001786
1787 // Use original code if it exists. Otherwise, set the code pointer to the resolution
1788 // trampoline.
1789
1790 // Quick entrypoint:
Jeff Haoc7d11882015-02-03 15:08:39 -08001791 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(
1792 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_));
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001793 const uint8_t* quick_code = GetOatAddressForOffset(quick_oat_code_offset);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001794 *quick_is_interpreted = false;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001795 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
1796 method->GetDeclaringClass()->IsInitialized())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001797 // We have code for a non-static or initialized method, just use the code.
1798 } else if (quick_code == nullptr && method->IsNative() &&
1799 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
1800 // Non-static or initialized native method missing compiled code, use generic JNI version.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001801 quick_code = GetOatAddress(kOatAddressQuickGenericJNITrampoline);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001802 } else if (quick_code == nullptr && !method->IsNative()) {
1803 // We don't have code at all for a non-native method, use the interpreter.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001804 quick_code = GetOatAddress(kOatAddressQuickToInterpreterBridge);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001805 *quick_is_interpreted = true;
1806 } else {
1807 CHECK(!method->GetDeclaringClass()->IsInitialized());
1808 // We have code for a static method, but need to go through the resolution stub for class
1809 // initialization.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001810 quick_code = GetOatAddress(kOatAddressQuickResolutionTrampoline);
1811 }
1812 if (!IsInBootOatFile(quick_code)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001813 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001814 }
1815 return quick_code;
1816}
1817
Mathieu Chartiere401d142015-04-22 13:56:20 -07001818const uint8_t* ImageWriter::GetQuickEntryPoint(ArtMethod* method) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001819 // Calculate the quick entry point following the same logic as FixupMethod() below.
1820 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001821 Runtime* runtime = Runtime::Current();
1822 if (UNLIKELY(method == runtime->GetResolutionMethod())) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001823 return GetOatAddress(kOatAddressQuickResolutionTrampoline);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001824 } else if (UNLIKELY(method == runtime->GetImtConflictMethod() ||
1825 method == runtime->GetImtUnimplementedMethod())) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001826 return GetOatAddress(kOatAddressQuickIMTConflictTrampoline);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001827 } else {
1828 // We assume all methods have code. If they don't currently then we set them to the use the
1829 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1830 // use results in an AbstractMethodError. We use the interpreter to achieve this.
Alex Light9139e002015-10-09 15:59:48 -07001831 if (UNLIKELY(!method->IsInvokable())) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001832 return GetOatAddress(kOatAddressQuickToInterpreterBridge);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001833 } else {
1834 bool quick_is_interpreted;
1835 return GetQuickCode(method, &quick_is_interpreted);
1836 }
1837 }
1838}
1839
Mathieu Chartiere401d142015-04-22 13:56:20 -07001840void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) {
Vladimir Marko14632852015-08-17 12:07:23 +01001841 memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001842
1843 copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked()));
Vladimir Marko05792b92015-08-03 11:56:49 +01001844
1845 ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_);
1846 copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_);
1847 GcRoot<mirror::Class>* orig_resolved_types = orig->GetDexCacheResolvedTypes(target_ptr_size_);
1848 copy->SetDexCacheResolvedTypes(NativeLocationInImage(orig_resolved_types), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001849
Ian Rogers848871b2013-08-05 10:56:33 -07001850 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
1851 // oat_begin_
Brian Carlstrom7940e442013-07-12 13:46:57 -07001852
Ian Rogers848871b2013-08-05 10:56:33 -07001853 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001854 Runtime* runtime = Runtime::Current();
1855 if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001856 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001857 GetOatAddress(kOatAddressQuickResolutionTrampoline), target_ptr_size_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001858 } else if (UNLIKELY(orig == runtime->GetImtConflictMethod() ||
1859 orig == runtime->GetImtUnimplementedMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001860 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001861 GetOatAddress(kOatAddressQuickIMTConflictTrampoline), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001862 } else if (UNLIKELY(orig->IsRuntimeMethod())) {
1863 bool found_one = false;
1864 for (size_t i = 0; i < static_cast<size_t>(Runtime::kLastCalleeSaveType); ++i) {
1865 auto idx = static_cast<Runtime::CalleeSaveType>(i);
1866 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
1867 found_one = true;
1868 break;
1869 }
1870 }
1871 CHECK(found_one) << "Expected to find callee save method but got " << PrettyMethod(orig);
1872 CHECK(copy->IsRuntimeMethod());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001873 } else {
Ian Rogers848871b2013-08-05 10:56:33 -07001874 // We assume all methods have code. If they don't currently then we set them to the use the
1875 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1876 // use results in an AbstractMethodError. We use the interpreter to achieve this.
Alex Light9139e002015-10-09 15:59:48 -07001877 if (UNLIKELY(!orig->IsInvokable())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001878 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001879 GetOatAddress(kOatAddressQuickToInterpreterBridge), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001880 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001881 bool quick_is_interpreted;
Ian Rogers13735952014-10-08 12:43:28 -07001882 const uint8_t* quick_code = GetQuickCode(orig, &quick_is_interpreted);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001883 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
Sebastien Hertze1d07812014-05-21 15:44:09 +02001884
Sebastien Hertze1d07812014-05-21 15:44:09 +02001885 // JNI entrypoint:
Ian Rogers848871b2013-08-05 10:56:33 -07001886 if (orig->IsNative()) {
1887 // The native method's pointer is set to a stub to lookup via dlsym.
1888 // Note this is not the code_ pointer, that is handled above.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001889 copy->SetEntryPointFromJniPtrSize(
Mathieu Chartierda5b28a2015-11-05 08:03:47 -08001890 GetOatAddress(kOatAddressJNIDlsymLookup), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001891 }
1892 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001893 }
1894}
1895
Alex Lighta59dd802014-07-02 16:28:08 -07001896static OatHeader* GetOatHeaderFromElf(ElfFile* elf) {
Tong Shen62d1ca32014-09-03 17:24:56 -07001897 uint64_t data_sec_offset;
1898 bool has_data_sec = elf->GetSectionOffsetAndSize(".rodata", &data_sec_offset, nullptr);
1899 if (!has_data_sec) {
Alex Lighta59dd802014-07-02 16:28:08 -07001900 return nullptr;
1901 }
Tong Shen62d1ca32014-09-03 17:24:56 -07001902 return reinterpret_cast<OatHeader*>(elf->Begin() + data_sec_offset);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001903}
1904
Vladimir Markof4da6752014-08-01 19:04:18 +01001905void ImageWriter::SetOatChecksumFromElfFile(File* elf_file) {
Alex Lighta59dd802014-07-02 16:28:08 -07001906 std::string error_msg;
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001907 std::unique_ptr<ElfFile> elf(ElfFile::Open(elf_file,
1908 PROT_READ | PROT_WRITE,
1909 MAP_SHARED,
1910 &error_msg));
Alex Lighta59dd802014-07-02 16:28:08 -07001911 if (elf.get() == nullptr) {
Vladimir Markof4da6752014-08-01 19:04:18 +01001912 LOG(FATAL) << "Unable open oat file: " << error_msg;
Alex Lighta59dd802014-07-02 16:28:08 -07001913 return;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001914 }
Alex Lighta59dd802014-07-02 16:28:08 -07001915 OatHeader* oat_header = GetOatHeaderFromElf(elf.get());
1916 CHECK(oat_header != nullptr);
1917 CHECK(oat_header->IsValid());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001918
Brian Carlstrom7940e442013-07-12 13:46:57 -07001919 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Alex Lighta59dd802014-07-02 16:28:08 -07001920 image_header->SetOatChecksum(oat_header->GetChecksum());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001921}
1922
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001923size_t ImageWriter::GetBinSizeSum(ImageWriter::Bin up_to) const {
1924 DCHECK_LE(up_to, kBinSize);
1925 return std::accumulate(&bin_slot_sizes_[0], &bin_slot_sizes_[up_to], /*init*/0);
1926}
1927
1928ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
1929 // These values may need to get updated if more bins are added to the enum Bin
Mathieu Chartiere401d142015-04-22 13:56:20 -07001930 static_assert(kBinBits == 3, "wrong number of bin bits");
1931 static_assert(kBinShift == 27, "wrong number of shift");
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001932 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
1933
1934 DCHECK_LT(GetBin(), kBinSize);
1935 DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
1936}
1937
1938ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
1939 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
1940 DCHECK_EQ(index, GetIndex());
1941}
1942
1943ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
1944 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
1945}
1946
1947uint32_t ImageWriter::BinSlot::GetIndex() const {
1948 return lockword_ & ~kBinMask;
1949}
1950
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001951uint8_t* ImageWriter::GetOatFileBegin() const {
1952 DCHECK_GT(intern_table_bytes_, 0u);
Mathieu Chartiera808bac2015-11-05 16:33:15 -08001953 size_t native_sections_size = bin_slot_sizes_[kBinArtField] +
1954 bin_slot_sizes_[kBinArtMethodDirty] +
1955 bin_slot_sizes_[kBinArtMethodClean] +
1956 bin_slot_sizes_[kBinDexCacheArray] +
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001957 intern_table_bytes_ +
1958 class_table_bytes_;
Vladimir Marko05792b92015-08-03 11:56:49 +01001959 return image_begin_ + RoundUp(image_end_ + native_sections_size, kPageSize);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001960}
1961
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001962ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
1963 switch (type) {
1964 case kNativeObjectRelocationTypeArtField:
1965 case kNativeObjectRelocationTypeArtFieldArray:
1966 return kBinArtField;
1967 case kNativeObjectRelocationTypeArtMethodClean:
1968 case kNativeObjectRelocationTypeArtMethodArrayClean:
1969 return kBinArtMethodClean;
1970 case kNativeObjectRelocationTypeArtMethodDirty:
1971 case kNativeObjectRelocationTypeArtMethodArrayDirty:
1972 return kBinArtMethodDirty;
Vladimir Marko05792b92015-08-03 11:56:49 +01001973 case kNativeObjectRelocationTypeDexCacheArray:
1974 return kBinDexCacheArray;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001975 }
1976 UNREACHABLE();
1977}
1978
Brian Carlstrom7940e442013-07-12 13:46:57 -07001979} // namespace art