blob: 4310be6464ac601a05a4a971f20559f61b13192c [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "image_writer.h"
18
19#include <sys/stat.h>
20
Ian Rogers700a4022014-05-19 16:49:03 -070021#include <memory>
Vladimir Marko20f85592015-03-19 10:07:02 +000022#include <numeric>
Brian Carlstrom7940e442013-07-12 13:46:57 -070023#include <vector>
24
Mathieu Chartierc7853442015-03-27 14:35:38 -070025#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070026#include "art_method-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070027#include "base/logging.h"
28#include "base/unix_file/fd_file.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010029#include "class_linker-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070030#include "compiled_method.h"
31#include "dex_file-inl.h"
32#include "driver/compiler_driver.h"
Alex Light53cb16b2014-06-12 11:26:29 -070033#include "elf_file.h"
34#include "elf_utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070035#include "elf_writer.h"
36#include "gc/accounting/card_table-inl.h"
37#include "gc/accounting/heap_bitmap.h"
Mathieu Chartier31e89252013-08-28 11:29:12 -070038#include "gc/accounting/space_bitmap-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070039#include "gc/heap.h"
40#include "gc/space/large_object_space.h"
41#include "gc/space/space-inl.h"
42#include "globals.h"
43#include "image.h"
44#include "intern_table.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070045#include "linear_alloc.h"
Mathieu Chartierad2541a2013-10-25 10:05:23 -070046#include "lock_word.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070047#include "mirror/abstract_method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070048#include "mirror/array-inl.h"
49#include "mirror/class-inl.h"
50#include "mirror/class_loader.h"
51#include "mirror/dex_cache-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070052#include "mirror/method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070053#include "mirror/object-inl.h"
54#include "mirror/object_array-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070055#include "mirror/string-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070056#include "oat.h"
57#include "oat_file.h"
Mathieu Chartierf9c6fc62015-10-07 11:44:05 -070058#include "oat_file_manager.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070059#include "runtime.h"
60#include "scoped_thread_state_change.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070061#include "handle_scope-inl.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000062#include "utils/dex_cache_arrays_layout-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070063
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070064using ::art::mirror::Class;
65using ::art::mirror::DexCache;
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070066using ::art::mirror::Object;
67using ::art::mirror::ObjectArray;
68using ::art::mirror::String;
Brian Carlstrom7940e442013-07-12 13:46:57 -070069
70namespace art {
71
Igor Murashkinf5b4c502014-11-14 15:01:59 -080072// Separate objects into multiple bins to optimize dirty memory use.
73static constexpr bool kBinObjects = true;
74
Andreas Gampedd9d0552015-03-09 12:57:41 -070075static void CheckNoDexObjectsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -070076 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampedd9d0552015-03-09 12:57:41 -070077 Class* klass = obj->GetClass();
78 CHECK_NE(PrettyClass(klass), "com.android.dex.Dex");
79}
80
81static void CheckNoDexObjects() {
82 ScopedObjectAccess soa(Thread::Current());
83 Runtime::Current()->GetHeap()->VisitObjects(CheckNoDexObjectsCallback, nullptr);
84}
85
Vladimir Markof4da6752014-08-01 19:04:18 +010086bool ImageWriter::PrepareImageAddressSpace() {
Mathieu Chartier2d721012014-11-10 11:08:06 -080087 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
Vladimir Markof4da6752014-08-01 19:04:18 +010088 {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -070089 ScopedObjectAccess soa(Thread::Current());
Vladimir Markof4da6752014-08-01 19:04:18 +010090 PruneNonImageClasses(); // Remove junk
91 ComputeLazyFieldsForImageClasses(); // Add useful information
Vladimir Markof4da6752014-08-01 19:04:18 +010092 }
93 gc::Heap* heap = Runtime::Current()->GetHeap();
94 heap->CollectGarbage(false); // Remove garbage.
95
Andreas Gampedd9d0552015-03-09 12:57:41 -070096 // Dex caches must not have their dex fields set in the image. These are memory buffers of mapped
97 // dex files.
98 //
99 // We may open them in the unstarted-runtime code for class metadata. Their fields should all be
100 // reset in PruneNonImageClasses and the objects reclaimed in the GC. Make sure that's actually
101 // true.
102 if (kIsDebugBuild) {
103 CheckNoDexObjects();
104 }
105
Vladimir Markof4da6752014-08-01 19:04:18 +0100106 if (kIsDebugBuild) {
107 ScopedObjectAccess soa(Thread::Current());
108 CheckNonImageClassesRemoved();
109 }
110
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700111 {
112 ScopedObjectAccess soa(Thread::Current());
113 CalculateNewObjectOffsets();
114 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100115
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700116 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
117 // bin size sums being calculated.
118 if (!AllocMemory()) {
119 return false;
120 }
121
Vladimir Markof4da6752014-08-01 19:04:18 +0100122 return true;
123}
124
Brian Carlstrom7940e442013-07-12 13:46:57 -0700125bool ImageWriter::Write(const std::string& image_filename,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700126 const std::string& oat_filename,
127 const std::string& oat_location) {
128 CHECK(!image_filename.empty());
129
Ian Rogers700a4022014-05-19 16:49:03 -0700130 std::unique_ptr<File> oat_file(OS::OpenFileReadWrite(oat_filename.c_str()));
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700131 if (oat_file.get() == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800132 PLOG(ERROR) << "Failed to open oat file " << oat_filename << " for " << oat_location;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700133 return false;
134 }
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700135 std::string error_msg;
Igor Murashkinb1d8c312015-08-04 11:18:43 -0700136 oat_file_ = OatFile::OpenReadable(oat_file.get(), oat_location, nullptr, &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700137 if (oat_file_ == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800138 PLOG(ERROR) << "Failed to open writable oat file " << oat_filename << " for " << oat_location
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700139 << ": " << error_msg;
Andreas Gampe0b7fcf92015-03-13 16:54:54 -0700140 oat_file->Erase();
Brian Carlstromc50d8e12013-07-23 22:35:16 -0700141 return false;
142 }
Mathieu Chartierf9c6fc62015-10-07 11:44:05 -0700143 Runtime::Current()->GetOatFileManager().RegisterOatFile(
144 std::unique_ptr<const OatFile>(oat_file_));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700145
Ian Rogers848871b2013-08-05 10:56:33 -0700146 interpreter_to_interpreter_bridge_offset_ =
147 oat_file_->GetOatHeader().GetInterpreterToInterpreterBridgeOffset();
148 interpreter_to_compiled_code_bridge_offset_ =
149 oat_file_->GetOatHeader().GetInterpreterToCompiledCodeBridgeOffset();
150
151 jni_dlsym_lookup_offset_ = oat_file_->GetOatHeader().GetJniDlsymLookupOffset();
152
Andreas Gampe2da88232014-02-27 12:26:20 -0800153 quick_generic_jni_trampoline_offset_ =
154 oat_file_->GetOatHeader().GetQuickGenericJniTrampolineOffset();
Jeff Hao88474b42013-10-23 16:24:40 -0700155 quick_imt_conflict_trampoline_offset_ =
156 oat_file_->GetOatHeader().GetQuickImtConflictTrampolineOffset();
Ian Rogers848871b2013-08-05 10:56:33 -0700157 quick_resolution_trampoline_offset_ =
158 oat_file_->GetOatHeader().GetQuickResolutionTrampolineOffset();
159 quick_to_interpreter_bridge_offset_ =
160 oat_file_->GetOatHeader().GetQuickToInterpreterBridgeOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700161
Brian Carlstrom7940e442013-07-12 13:46:57 -0700162 size_t oat_loaded_size = 0;
163 size_t oat_data_offset = 0;
Vladimir Marko3fc99032015-05-13 19:06:30 +0100164 ElfWriter::GetOatElfInformation(oat_file.get(), &oat_loaded_size, &oat_data_offset);
Alex Light53cb16b2014-06-12 11:26:29 -0700165
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700166 {
167 ScopedObjectAccess soa(Thread::Current());
168 CreateHeader(oat_loaded_size, oat_data_offset);
169 CopyAndFixupNativeData();
170 // TODO: heap validation can't handle these fix up passes.
171 Runtime::Current()->GetHeap()->DisableObjectValidation();
172 CopyAndFixupObjects();
173 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700174
Vladimir Markof4da6752014-08-01 19:04:18 +0100175 SetOatChecksumFromElfFile(oat_file.get());
176
Andreas Gampe4303ba92014-11-06 01:00:46 -0800177 if (oat_file->FlushCloseOrErase() != 0) {
178 LOG(ERROR) << "Failed to flush and close oat file " << oat_filename << " for " << oat_location;
179 return false;
180 }
181
Ian Rogers700a4022014-05-19 16:49:03 -0700182 std::unique_ptr<File> image_file(OS::CreateEmptyFile(image_filename.c_str()));
Mathieu Chartier31e89252013-08-28 11:29:12 -0700183 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700184 if (image_file.get() == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700185 LOG(ERROR) << "Failed to open image file " << image_filename;
186 return false;
187 }
188 if (fchmod(image_file->Fd(), 0644) != 0) {
189 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800190 image_file->Erase();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700191 return EXIT_FAILURE;
192 }
Mathieu Chartier31e89252013-08-28 11:29:12 -0700193
Mathieu Chartiere401d142015-04-22 13:56:20 -0700194 // Write out the image + fields + methods.
195 const auto write_count = image_header->GetImageSize();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700196 if (!image_file->WriteFully(image_->Begin(), write_count)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700197 PLOG(ERROR) << "Failed to write image file " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800198 image_file->Erase();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700199 return false;
200 }
Mathieu Chartier31e89252013-08-28 11:29:12 -0700201
202 // Write out the image bitmap at the page aligned start of the image end.
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700203 const ImageSection& bitmap_section = image_header->GetImageSection(ImageHeader::kSectionImageBitmap);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700204 CHECK_ALIGNED(bitmap_section.Offset(), kPageSize);
Mathieu Chartier31e89252013-08-28 11:29:12 -0700205 if (!image_file->Write(reinterpret_cast<char*>(image_bitmap_->Begin()),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700206 bitmap_section.Size(), bitmap_section.Offset())) {
Mathieu Chartier31e89252013-08-28 11:29:12 -0700207 PLOG(ERROR) << "Failed to write image file " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800208 image_file->Erase();
Mathieu Chartier31e89252013-08-28 11:29:12 -0700209 return false;
210 }
211
Mathieu Chartiere401d142015-04-22 13:56:20 -0700212 CHECK_EQ(bitmap_section.End(), static_cast<size_t>(image_file->GetLength()));
Andreas Gampe4303ba92014-11-06 01:00:46 -0800213 if (image_file->FlushCloseOrErase() != 0) {
214 PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
215 return false;
216 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700217 return true;
218}
219
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700220void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700221 DCHECK(object != nullptr);
222 DCHECK_NE(offset, 0U);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800223
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800224 // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700225 object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700226 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700227 DCHECK(IsImageOffsetAssigned(object));
228}
229
Mathieu Chartiere401d142015-04-22 13:56:20 -0700230void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
231 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
232 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
233 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
234}
235
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800236void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700237 DCHECK(object != nullptr);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800238 DCHECK_NE(image_objects_offset_begin_, 0u);
239
Vladimir Markocf36d492015-08-12 19:27:26 +0100240 size_t bin_slot_offset = bin_slot_offsets_[bin_slot.GetBin()];
241 size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800242 DCHECK_ALIGNED(new_offset, kObjectAlignment);
243
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700244 SetImageOffset(object, new_offset);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800245 DCHECK_LT(new_offset, image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700246}
247
Ian Rogersef7d42f2014-01-06 12:55:46 -0800248bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800249 // Will also return true if the bin slot was assigned since we are reusing the lock word.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700250 DCHECK(object != nullptr);
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700251 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700252}
253
Ian Rogersef7d42f2014-01-06 12:55:46 -0800254size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700255 DCHECK(object != nullptr);
256 DCHECK(IsImageOffsetAssigned(object));
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700257 LockWord lock_word = object->GetLockWord(false);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700258 size_t offset = lock_word.ForwardingAddress();
259 DCHECK_LT(offset, image_end_);
260 return offset;
Mathieu Chartier31e89252013-08-28 11:29:12 -0700261}
262
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800263void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
264 DCHECK(object != nullptr);
265 DCHECK(!IsImageOffsetAssigned(object));
266 DCHECK(!IsImageBinSlotAssigned(object));
267
268 // Before we stomp over the lock word, save the hash code for later.
269 Monitor::Deflate(Thread::Current(), object);;
270 LockWord lw(object->GetLockWord(false));
271 switch (lw.GetState()) {
272 case LockWord::kFatLocked: {
273 LOG(FATAL) << "Fat locked object " << object << " found during object copy";
274 break;
275 }
276 case LockWord::kThinLocked: {
277 LOG(FATAL) << "Thin locked object " << object << " found during object copy";
278 break;
279 }
280 case LockWord::kUnlocked:
281 // No hash, don't need to save it.
282 break;
283 case LockWord::kHashCode:
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700284 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
285 saved_hashcode_map_.emplace(object, lw.GetHashCode());
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800286 break;
287 default:
288 LOG(FATAL) << "Unreachable.";
289 UNREACHABLE();
290 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700291 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700292 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800293 DCHECK(IsImageBinSlotAssigned(object));
294}
295
Vladimir Marko20f85592015-03-19 10:07:02 +0000296void ImageWriter::PrepareDexCacheArraySlots() {
297 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700298 Thread* const self = Thread::Current();
299 ReaderMutexLock mu(self, *class_linker->DexLock());
Vladimir Marko20f85592015-03-19 10:07:02 +0000300 uint32_t size = 0u;
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700301 for (jobject weak_root : class_linker->GetDexCaches()) {
302 mirror::DexCache* dex_cache =
303 down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
304 if (dex_cache == nullptr) {
305 continue;
306 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000307 const DexFile* dex_file = dex_cache->GetDexFile();
308 dex_cache_array_starts_.Put(dex_file, size);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700309 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
Vladimir Marko20f85592015-03-19 10:07:02 +0000310 DCHECK(layout.Valid());
Vladimir Marko05792b92015-08-03 11:56:49 +0100311 DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
312 AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(), size + layout.TypesOffset());
313 DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
314 AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(), size + layout.MethodsOffset());
315 DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
316 AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(), size + layout.FieldsOffset());
317 DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
318 AddDexCacheArrayRelocation(dex_cache->GetStrings(), size + layout.StringsOffset());
Vladimir Marko20f85592015-03-19 10:07:02 +0000319 size += layout.Size();
320 }
321 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
322 // when AssignImageBinSlot() assigns their indexes out or order.
323 bin_slot_sizes_[kBinDexCacheArray] = size;
324}
325
Vladimir Marko05792b92015-08-03 11:56:49 +0100326void ImageWriter::AddDexCacheArrayRelocation(void* array, size_t offset) {
327 if (array != nullptr) {
328 native_object_relocations_.emplace(
329 array,
330 NativeObjectRelocation { offset, kNativeObjectRelocationTypeDexCacheArray });
331 }
332}
333
Mathieu Chartiere401d142015-04-22 13:56:20 -0700334void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
335 DCHECK(arr != nullptr);
336 if (kIsDebugBuild) {
337 for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
338 auto* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
339 if (method != nullptr && !method->IsRuntimeMethod()) {
340 auto* klass = method->GetDeclaringClass();
341 CHECK(klass == nullptr || IsImageClass(klass)) << PrettyClass(klass)
342 << " should be an image class";
343 }
344 }
345 }
346 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
347 // ArtMethods.
348 pointer_arrays_.emplace(arr, kBinArtMethodClean);
349}
350
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800351void ImageWriter::AssignImageBinSlot(mirror::Object* object) {
352 DCHECK(object != nullptr);
Jeff Haoc7d11882015-02-03 15:08:39 -0800353 size_t object_size = object->SizeOf();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800354
355 // The magic happens here. We segregate objects into different bins based
356 // on how likely they are to get dirty at runtime.
357 //
358 // Likely-to-dirty objects get packed together into the same bin so that
359 // at runtime their page dirtiness ratio (how many dirty objects a page has) is
360 // maximized.
361 //
362 // This means more pages will stay either clean or shared dirty (with zygote) and
363 // the app will use less of its own (private) memory.
364 Bin bin = kBinRegular;
Vladimir Marko20f85592015-03-19 10:07:02 +0000365 size_t current_offset = 0u;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800366
367 if (kBinObjects) {
368 //
369 // Changing the bin of an object is purely a memory-use tuning.
370 // It has no change on runtime correctness.
371 //
372 // Memory analysis has determined that the following types of objects get dirtied
373 // the most:
374 //
Vladimir Marko20f85592015-03-19 10:07:02 +0000375 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
376 // a fixed layout which helps improve generated code (using PC-relative addressing),
377 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
378 // Since these arrays are huge, most pages do not overlap other objects and it's not
379 // really important where they are for the clean/dirty separation. Due to their
Vladimir Marko05792b92015-08-03 11:56:49 +0100380 // special PC-relative addressing, we arbitrarily keep them at the end.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800381 // * Class'es which are verified [their clinit runs only at runtime]
382 // - classes in general [because their static fields get overwritten]
383 // - initialized classes with all-final statics are unlikely to be ever dirty,
384 // so bin them separately
385 // * Art Methods that are:
386 // - native [their native entry point is not looked up until runtime]
387 // - have declaring classes that aren't initialized
388 // [their interpreter/quick entry points are trampolines until the class
389 // becomes initialized]
390 //
391 // We also assume the following objects get dirtied either never or extremely rarely:
392 // * Strings (they are immutable)
393 // * Art methods that aren't native and have initialized declared classes
394 //
395 // We assume that "regular" bin objects are highly unlikely to become dirtied,
396 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
397 //
398 if (object->IsClass()) {
399 bin = kBinClassVerified;
400 mirror::Class* klass = object->AsClass();
401
Mathieu Chartiere401d142015-04-22 13:56:20 -0700402 // Add non-embedded vtable to the pointer array table if there is one.
403 auto* vtable = klass->GetVTable();
404 if (vtable != nullptr) {
405 AddMethodPointerArray(vtable);
406 }
407 auto* iftable = klass->GetIfTable();
408 if (iftable != nullptr) {
409 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
410 if (iftable->GetMethodArrayCount(i) > 0) {
411 AddMethodPointerArray(iftable->GetMethodArray(i));
412 }
413 }
414 }
415
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800416 if (klass->GetStatus() == Class::kStatusInitialized) {
417 bin = kBinClassInitialized;
418
419 // If the class's static fields are all final, put it into a separate bin
420 // since it's very likely it will stay clean.
421 uint32_t num_static_fields = klass->NumStaticFields();
422 if (num_static_fields == 0) {
423 bin = kBinClassInitializedFinalStatics;
424 } else {
425 // Maybe all the statics are final?
426 bool all_final = true;
427 for (uint32_t i = 0; i < num_static_fields; ++i) {
428 ArtField* field = klass->GetStaticField(i);
429 if (!field->IsFinal()) {
430 all_final = false;
431 break;
432 }
433 }
434
435 if (all_final) {
436 bin = kBinClassInitializedFinalStatics;
437 }
438 }
439 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800440 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
441 bin = kBinString; // Strings are almost always immutable (except for object header).
442 } // else bin = kBinRegular
443 }
444
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800445 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment
Vladimir Marko05792b92015-08-03 11:56:49 +0100446 current_offset = bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned).
447 // Move the current bin size up to accomodate the object we just assigned a bin slot.
448 bin_slot_sizes_[bin] += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800449
450 BinSlot new_bin_slot(bin, current_offset);
451 SetImageBinSlot(object, new_bin_slot);
452
453 ++bin_slot_count_[bin];
454
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800455 // Grow the image closer to the end by the object we just assigned.
456 image_end_ += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800457}
458
Mathieu Chartiere401d142015-04-22 13:56:20 -0700459bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
460 if (m->IsNative()) {
461 return true;
462 }
463 mirror::Class* declaring_class = m->GetDeclaringClass();
464 // Initialized is highly unlikely to dirty since there's no entry points to mutate.
465 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized;
466}
467
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800468bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
469 DCHECK(object != nullptr);
470
471 // We always stash the bin slot into a lockword, in the 'forwarding address' state.
472 // If it's in some other state, then we haven't yet assigned an image bin slot.
473 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
474 return false;
475 } else if (kIsDebugBuild) {
476 LockWord lock_word = object->GetLockWord(false);
477 size_t offset = lock_word.ForwardingAddress();
478 BinSlot bin_slot(offset);
479 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()])
480 << "bin slot offset should not exceed the size of that bin";
481 }
482 return true;
483}
484
485ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
486 DCHECK(object != nullptr);
487 DCHECK(IsImageBinSlotAssigned(object));
488
489 LockWord lock_word = object->GetLockWord(false);
490 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t
491 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
492
493 BinSlot bin_slot(static_cast<uint32_t>(offset));
494 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()]);
495
496 return bin_slot;
497}
498
Brian Carlstrom7940e442013-07-12 13:46:57 -0700499bool ImageWriter::AllocMemory() {
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700500 const size_t length = RoundUp(image_objects_offset_begin_ + GetBinSizeSum() + intern_table_bytes_,
501 kPageSize);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700502 std::string error_msg;
Vladimir Marko5c42c292015-02-25 12:02:49 +0000503 image_.reset(MemMap::MapAnonymous("image writer image", nullptr, length, PROT_READ | PROT_WRITE,
504 false, false, &error_msg));
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700505 if (UNLIKELY(image_.get() == nullptr)) {
506 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700507 return false;
508 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700509
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700510 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
511 CHECK_LE(image_end_, length);
512 image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
513 "image bitmap", image_->Begin(), RoundUp(image_end_, kPageSize)));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700514 if (image_bitmap_.get() == nullptr) {
515 LOG(ERROR) << "Failed to allocate memory for image bitmap";
516 return false;
517 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700518 return true;
519}
520
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700521class ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
522 public:
523 bool Visit(Class* c) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
524 StackHandleScope<1> hs(Thread::Current());
525 mirror::Class::ComputeName(hs.NewHandle(c));
526 return true;
527 }
528};
529
Brian Carlstrom7940e442013-07-12 13:46:57 -0700530void ImageWriter::ComputeLazyFieldsForImageClasses() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700531 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700532 ComputeLazyFieldsForClassesVisitor visitor;
533 class_linker->VisitClassesWithoutClassesLock(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700534}
535
Ian Rogersef7d42f2014-01-06 12:55:46 -0800536bool ImageWriter::IsImageClass(Class* klass) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700537 if (klass == nullptr) {
538 return false;
539 }
Ian Rogers1ff3c982014-08-12 02:30:58 -0700540 std::string temp;
541 return compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700542}
543
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700544class NonImageClassesVisitor : public ClassVisitor {
545 public:
546 explicit NonImageClassesVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
547
548 bool Visit(Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
549 if (!image_writer_->IsImageClass(klass)) {
550 std::string temp;
551 non_image_classes_.insert(klass->GetDescriptor(&temp));
552 }
553 return true;
554 }
555
556 std::set<std::string> non_image_classes_;
557 ImageWriter* const image_writer_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700558};
559
560void ImageWriter::PruneNonImageClasses() {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700561 if (compiler_driver_.GetImageClasses() == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700562 return;
563 }
564 Runtime* runtime = Runtime::Current();
565 ClassLinker* class_linker = runtime->GetClassLinker();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700566 Thread* self = Thread::Current();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700567
568 // Make a list of classes we would like to prune.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700569 NonImageClassesVisitor visitor(this);
570 class_linker->VisitClasses(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700571
572 // Remove the undesired classes from the class roots.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700573 for (const std::string& it : visitor.non_image_classes_) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700574 bool result = class_linker->RemoveClass(it.c_str(), nullptr);
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800575 DCHECK(result);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700576 }
577
578 // Clear references to removed classes from the DexCaches.
Vladimir Marko05792b92015-08-03 11:56:49 +0100579 ArtMethod* resolution_method = runtime->GetResolutionMethod();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700580
581 ScopedAssertNoThreadSuspension sa(self, __FUNCTION__);
582 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); // For ClassInClassTable
583 ReaderMutexLock mu2(self, *class_linker->DexLock());
584 for (jobject weak_root : class_linker->GetDexCaches()) {
585 mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
586 if (dex_cache == nullptr) {
587 continue;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700588 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700589 for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
590 Class* klass = dex_cache->GetResolvedType(i);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700591 if (klass != nullptr && !IsImageClass(klass)) {
592 dex_cache->SetResolvedType(i, nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700593 }
594 }
Vladimir Marko05792b92015-08-03 11:56:49 +0100595 ArtMethod** resolved_methods = dex_cache->GetResolvedMethods();
596 for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) {
597 ArtMethod* method =
598 mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700599 if (method != nullptr) {
600 auto* declaring_class = method->GetDeclaringClass();
601 // Miranda methods may be held live by a class which was not an image class but have a
602 // declaring class which is an image class. Set it to the resolution method to be safe and
603 // prevent dangling pointers.
604 if (method->IsMiranda() || !IsImageClass(declaring_class)) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100605 mirror::DexCache::SetElementPtrSize(resolved_methods,
606 i,
607 resolution_method,
608 target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700609 } else {
610 // Check that the class is still in the classes table.
611 DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class "
612 << PrettyClass(declaring_class) << " not in class linker table";
613 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700614 }
615 }
616 for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700617 ArtField* field = dex_cache->GetResolvedField(i, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700618 if (field != nullptr && !IsImageClass(field->GetDeclaringClass())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700619 dex_cache->SetResolvedField(i, nullptr, target_ptr_size_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700620 }
621 }
Andreas Gampedd9d0552015-03-09 12:57:41 -0700622 // Clean the dex field. It might have been populated during the initialization phase, but
623 // contains data only valid during a real run.
624 dex_cache->SetFieldObject<false>(mirror::DexCache::DexOffset(), nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700625 }
Andreas Gampe8ac75952015-06-02 21:01:45 -0700626
627 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
628 class_linker->DropFindArrayClassCache();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700629}
630
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -0800631void ImageWriter::CheckNonImageClassesRemoved() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700632 if (compiler_driver_.GetImageClasses() != nullptr) {
633 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700634 heap->VisitObjects(CheckNonImageClassesRemovedCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700635 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700636}
637
638void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
639 ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700640 if (obj->IsClass()) {
641 Class* klass = obj->AsClass();
642 if (!image_writer->IsImageClass(klass)) {
643 image_writer->DumpImageClasses();
Ian Rogers1ff3c982014-08-12 02:30:58 -0700644 std::string temp;
645 CHECK(image_writer->IsImageClass(klass)) << klass->GetDescriptor(&temp)
Mathieu Chartier590fee92013-09-13 13:46:47 -0700646 << " " << PrettyDescriptor(klass);
647 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700648 }
649}
650
651void ImageWriter::DumpImageClasses() {
Andreas Gampeb1fcead2015-04-20 18:53:51 -0700652 auto image_classes = compiler_driver_.GetImageClasses();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700653 CHECK(image_classes != nullptr);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700654 for (const std::string& image_class : *image_classes) {
655 LOG(INFO) << " " << image_class;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700656 }
657}
658
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800659void ImageWriter::CalculateObjectBinSlots(Object* obj) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700660 DCHECK(obj != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700661 // if it is a string, we want to intern it if its not interned.
662 if (obj->GetClass()->IsStringClass()) {
663 // we must be an interned string that was forward referenced and already assigned
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800664 if (IsImageBinSlotAssigned(obj)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700665 DCHECK_EQ(obj, obj->AsString()->Intern());
666 return;
667 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700668 // InternImageString allows us to intern while holding the heap bitmap lock. This is safe since
669 // we are guaranteed to not have GC during image writing.
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700670 mirror::String* const interned = Runtime::Current()->GetInternTable()->InternStrongImageString(
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700671 obj->AsString());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700672 if (obj != interned) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800673 if (!IsImageBinSlotAssigned(interned)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700674 // interned obj is after us, allocate its location early
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800675 AssignImageBinSlot(interned);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700676 }
677 // point those looking for this object to the interned version.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800678 SetImageBinSlot(obj, GetImageBinSlot(interned));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700679 return;
680 }
681 // else (obj == interned), nothing to do but fall through to the normal case
682 }
683
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800684 AssignImageBinSlot(obj);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700685}
686
687ObjectArray<Object>* ImageWriter::CreateImageRoots() const {
688 Runtime* runtime = Runtime::Current();
689 ClassLinker* class_linker = runtime->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700690 Thread* self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700691 StackHandleScope<3> hs(self);
692 Handle<Class> object_array_class(hs.NewHandle(
693 class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700694
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700695 // build an Object[] of all the DexCaches used in the source_space_.
696 // Since we can't hold the dex lock when allocating the dex_caches
697 // ObjectArray, we lock the dex lock twice, first to get the number
698 // of dex caches first and then lock it again to copy the dex
699 // caches. We check that the number of dex caches does not change.
700 size_t dex_cache_count;
701 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700702 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700703 dex_cache_count = class_linker->GetDexCacheCount();
704 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700705 Handle<ObjectArray<Object>> dex_caches(
706 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(),
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700707 dex_cache_count)));
708 CHECK(dex_caches.Get() != nullptr) << "Failed to allocate a dex cache array.";
709 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700710 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700711 CHECK_EQ(dex_cache_count, class_linker->GetDexCacheCount())
712 << "The number of dex caches changed.";
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700713 size_t i = 0;
714 for (jobject weak_root : class_linker->GetDexCaches()) {
715 mirror::DexCache* dex_cache =
716 down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
717 dex_caches->Set<false>(i, dex_cache);
718 ++i;
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700719 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700720 }
721
722 // build an Object[] of the roots needed to restore the runtime
Mathieu Chartiere401d142015-04-22 13:56:20 -0700723 auto image_roots(hs.NewHandle(
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700724 ObjectArray<Object>::Alloc(self, object_array_class.Get(), ImageHeader::kImageRootsMax)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100726 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700727 for (int i = 0; i < ImageHeader::kImageRootsMax; i++) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700728 CHECK(image_roots->Get(i) != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700729 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700730 return image_roots.Get();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700731}
732
Mathieu Chartier590fee92013-09-13 13:46:47 -0700733// Walk instance fields of the given Class. Separate function to allow recursion on the super
734// class.
735void ImageWriter::WalkInstanceFields(mirror::Object* obj, mirror::Class* klass) {
736 // Visit fields of parent classes first.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700737 StackHandleScope<1> hs(Thread::Current());
738 Handle<mirror::Class> h_class(hs.NewHandle(klass));
739 mirror::Class* super = h_class->GetSuperClass();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700740 if (super != nullptr) {
741 WalkInstanceFields(obj, super);
742 }
743 //
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700744 size_t num_reference_fields = h_class->NumReferenceInstanceFields();
Vladimir Marko76649e82014-11-10 18:32:59 +0000745 MemberOffset field_offset = h_class->GetFirstReferenceInstanceFieldOffset();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700746 for (size_t i = 0; i < num_reference_fields; ++i) {
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700747 mirror::Object* value = obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700748 if (value != nullptr) {
749 WalkFieldsInOrder(value);
750 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000751 field_offset = MemberOffset(field_offset.Uint32Value() +
752 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700753 }
754}
755
756// For an unvisited object, visit it then all its children found via fields.
757void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800758 // Use our own visitor routine (instead of GC visitor) to get better locality between
759 // an object and its fields
760 if (!IsImageBinSlotAssigned(obj)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700761 // Walk instance fields of all objects
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700762 StackHandleScope<2> hs(Thread::Current());
763 Handle<mirror::Object> h_obj(hs.NewHandle(obj));
764 Handle<mirror::Class> klass(hs.NewHandle(obj->GetClass()));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700765 // visit the object itself.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800766 CalculateObjectBinSlots(h_obj.Get());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700767 WalkInstanceFields(h_obj.Get(), klass.Get());
Mathieu Chartier590fee92013-09-13 13:46:47 -0700768 // Walk static fields of a Class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700769 if (h_obj->IsClass()) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700770 size_t num_reference_static_fields = klass->NumReferenceStaticFields();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700771 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700772 for (size_t i = 0; i < num_reference_static_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700773 mirror::Object* value = h_obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700774 if (value != nullptr) {
775 WalkFieldsInOrder(value);
776 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000777 field_offset = MemberOffset(field_offset.Uint32Value() +
778 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700779 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700780 // Visit and assign offsets for fields and field arrays.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700781 auto* as_klass = h_obj->AsClass();
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700782 LengthPrefixedArray<ArtField>* fields[] = {
783 as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
784 };
785 for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
786 // Total array length including header.
787 if (cur_fields != nullptr) {
788 const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
789 // Forward the entire array at once.
790 auto it = native_object_relocations_.find(cur_fields);
791 CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
792 << " already forwarded";
793 size_t& offset = bin_slot_sizes_[kBinArtField];
794 native_object_relocations_.emplace(
795 cur_fields, NativeObjectRelocation {
796 offset, kNativeObjectRelocationTypeArtFieldArray });
797 offset += header_size;
798 // Forward individual fields so that we can quickly find where they belong.
799 for (size_t i = 0, count = cur_fields->Length(); i < count; ++i) {
800 // Need to forward arrays separate of fields.
801 ArtField* field = &cur_fields->At(i);
802 auto it2 = native_object_relocations_.find(field);
803 CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
804 << " already assigned " << PrettyField(field) << " static=" << field->IsStatic();
805 native_object_relocations_.emplace(
806 field, NativeObjectRelocation {offset, kNativeObjectRelocationTypeArtField });
807 offset += sizeof(ArtField);
808 }
Mathieu Chartierc7853442015-03-27 14:35:38 -0700809 }
810 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700811 // Visit and assign offsets for methods.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700812 LengthPrefixedArray<ArtMethod>* method_arrays[] = {
813 as_klass->GetDirectMethodsPtr(), as_klass->GetVirtualMethodsPtr(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700814 };
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700815 for (LengthPrefixedArray<ArtMethod>* array : method_arrays) {
816 if (array == nullptr) {
817 continue;
818 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700819 bool any_dirty = false;
820 size_t count = 0;
Vladimir Marko14632852015-08-17 12:07:23 +0100821 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
822 const size_t method_size = ArtMethod::Size(target_ptr_size_);
Vladimir Markocf36d492015-08-12 19:27:26 +0100823 auto iteration_range =
824 MakeIterationRangeFromLengthPrefixedArray(array, method_size, method_alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700825 for (auto& m : iteration_range) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700826 any_dirty = any_dirty || WillMethodBeDirty(&m);
827 ++count;
828 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700829 NativeObjectRelocationType type = any_dirty ? kNativeObjectRelocationTypeArtMethodDirty :
830 kNativeObjectRelocationTypeArtMethodClean;
831 Bin bin_type = BinTypeForNativeRelocationType(type);
832 // Forward the entire array at once, but header first.
Vladimir Markocf36d492015-08-12 19:27:26 +0100833 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
834 method_size,
835 method_alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700836 auto it = native_object_relocations_.find(array);
837 CHECK(it == native_object_relocations_.end()) << "Method array " << array
838 << " already forwarded";
839 size_t& offset = bin_slot_sizes_[bin_type];
840 native_object_relocations_.emplace(array, NativeObjectRelocation { offset,
841 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty :
842 kNativeObjectRelocationTypeArtMethodArrayClean });
843 offset += header_size;
844 for (auto& m : iteration_range) {
845 AssignMethodOffset(&m, type);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700846 }
847 (any_dirty ? dirty_methods_ : clean_methods_) += count;
848 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700849 } else if (h_obj->IsObjectArray()) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700850 // Walk elements of an object array.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700851 int32_t length = h_obj->AsObjectArray<mirror::Object>()->GetLength();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700852 for (int32_t i = 0; i < length; i++) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700853 mirror::ObjectArray<mirror::Object>* obj_array = h_obj->AsObjectArray<mirror::Object>();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700854 mirror::Object* value = obj_array->Get(i);
855 if (value != nullptr) {
856 WalkFieldsInOrder(value);
857 }
858 }
859 }
860 }
861}
862
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700863void ImageWriter::AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type) {
864 auto it = native_object_relocations_.find(method);
865 CHECK(it == native_object_relocations_.end()) << "Method " << method << " already assigned "
Mathieu Chartiere401d142015-04-22 13:56:20 -0700866 << PrettyMethod(method);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700867 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
868 native_object_relocations_.emplace(method, NativeObjectRelocation { offset, type });
Vladimir Marko14632852015-08-17 12:07:23 +0100869 offset += ArtMethod::Size(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700870}
871
Mathieu Chartier590fee92013-09-13 13:46:47 -0700872void ImageWriter::WalkFieldsCallback(mirror::Object* obj, void* arg) {
873 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
874 DCHECK(writer != nullptr);
875 writer->WalkFieldsInOrder(obj);
876}
877
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800878void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) {
879 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
880 DCHECK(writer != nullptr);
881 writer->UnbinObjectsIntoOffset(obj);
882}
883
884void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
885 CHECK(obj != nullptr);
886
887 // We know the bin slot, and the total bin sizes for all objects by now,
888 // so calculate the object's final image offset.
889
890 DCHECK(IsImageBinSlotAssigned(obj));
891 BinSlot bin_slot = GetImageBinSlot(obj);
892 // Change the lockword from a bin slot into an offset
893 AssignImageOffset(obj, bin_slot);
894}
895
Vladimir Markof4da6752014-08-01 19:04:18 +0100896void ImageWriter::CalculateNewObjectOffsets() {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700897 Thread* const self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700898 StackHandleScope<1> hs(self);
899 Handle<ObjectArray<Object>> image_roots(hs.NewHandle(CreateImageRoots()));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700900
Mathieu Chartiere401d142015-04-22 13:56:20 -0700901 auto* runtime = Runtime::Current();
902 auto* heap = runtime->GetHeap();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700903 DCHECK_EQ(0U, image_end_);
904
Mathieu Chartier31e89252013-08-28 11:29:12 -0700905 // Leave space for the header, but do not write it yet, we need to
Brian Carlstrom7940e442013-07-12 13:46:57 -0700906 // know where image_roots is going to end up
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800907 image_end_ += RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment
Brian Carlstrom7940e442013-07-12 13:46:57 -0700908
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800909 image_objects_offset_begin_ = image_end_;
910 // Clear any pre-existing monitors which may have been in the monitor words, assign bin slots.
911 heap->VisitObjects(WalkFieldsCallback, this);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700912 // Write the image runtime methods.
913 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
914 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
915 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
916 image_methods_[ImageHeader::kCalleeSaveMethod] = runtime->GetCalleeSaveMethod(Runtime::kSaveAll);
917 image_methods_[ImageHeader::kRefsOnlySaveMethod] =
918 runtime->GetCalleeSaveMethod(Runtime::kRefsOnly);
919 image_methods_[ImageHeader::kRefsAndArgsSaveMethod] =
920 runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700921
922 // Add room for fake length prefixed array.
923 const auto image_method_type = kNativeObjectRelocationTypeArtMethodArrayClean;
924 auto it = native_object_relocations_.find(&image_method_array_);
925 CHECK(it == native_object_relocations_.end());
926 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(image_method_type)];
927 native_object_relocations_.emplace(&image_method_array_,
928 NativeObjectRelocation { offset, image_method_type });
Vladimir Marko14632852015-08-17 12:07:23 +0100929 size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -0700930 const size_t array_size = LengthPrefixedArray<ArtMethod>::ComputeSize(
Vladimir Marko14632852015-08-17 12:07:23 +0100931 0, ArtMethod::Size(target_ptr_size_), method_alignment);
Vladimir Markocf36d492015-08-12 19:27:26 +0100932 CHECK_ALIGNED_PARAM(array_size, method_alignment);
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -0700933 offset += array_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700934 for (auto* m : image_methods_) {
935 CHECK(m != nullptr);
936 CHECK(m->IsRuntimeMethod());
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700937 AssignMethodOffset(m, kNativeObjectRelocationTypeArtMethodClean);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700938 }
Vladimir Marko05792b92015-08-03 11:56:49 +0100939 // Calculate size of the dex cache arrays slot and prepare offsets.
940 PrepareDexCacheArraySlots();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700941
Vladimir Markocf36d492015-08-12 19:27:26 +0100942 // Calculate bin slot offsets.
943 size_t bin_offset = image_objects_offset_begin_;
Vladimir Marko20f85592015-03-19 10:07:02 +0000944 for (size_t i = 0; i != kBinSize; ++i) {
Vladimir Markocf36d492015-08-12 19:27:26 +0100945 bin_slot_offsets_[i] = bin_offset;
946 bin_offset += bin_slot_sizes_[i];
947 if (i == kBinArtField) {
948 static_assert(kBinArtField + 1 == kBinArtMethodClean, "Methods follow fields.");
949 static_assert(alignof(ArtField) == 4u, "ArtField alignment is 4.");
950 DCHECK_ALIGNED(bin_offset, 4u);
951 DCHECK(method_alignment == 4u || method_alignment == 8u);
952 bin_offset = RoundUp(bin_offset, method_alignment);
953 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000954 }
Vladimir Markocf36d492015-08-12 19:27:26 +0100955 // NOTE: There may be additional padding between the bin slots and the intern table.
956
Mathieu Chartierc7853442015-03-27 14:35:38 -0700957 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
958
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800959 // Transform each object's bin slot into an offset which will be used to do the final copy.
960 heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700961
Mathieu Chartierc7853442015-03-27 14:35:38 -0700962 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800963
Vladimir Markof4da6752014-08-01 19:04:18 +0100964 image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Get()));
965
Mathieu Chartiere401d142015-04-22 13:56:20 -0700966 // Update the native relocations by adding their bin sums.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700967 for (auto& pair : native_object_relocations_) {
968 NativeObjectRelocation& relocation = pair.second;
969 Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
Vladimir Markocf36d492015-08-12 19:27:26 +0100970 relocation.offset += bin_slot_offsets_[bin_type];
Mathieu Chartiere401d142015-04-22 13:56:20 -0700971 }
972
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700973 // Calculate how big the intern table will be after being serialized.
974 auto* const intern_table = Runtime::Current()->GetInternTable();
975 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
976 intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
977
Mathieu Chartiere401d142015-04-22 13:56:20 -0700978 // Note that image_end_ is left at end of used mirror object section.
Vladimir Markof4da6752014-08-01 19:04:18 +0100979}
980
981void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) {
982 CHECK_NE(0U, oat_loaded_size);
Ian Rogers13735952014-10-08 12:43:28 -0700983 const uint8_t* oat_file_begin = GetOatFileBegin();
984 const uint8_t* oat_file_end = oat_file_begin + oat_loaded_size;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700985 oat_data_begin_ = oat_file_begin + oat_data_offset;
Ian Rogers13735952014-10-08 12:43:28 -0700986 const uint8_t* oat_data_end = oat_data_begin_ + oat_file_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700987
988 // Create the image sections.
989 ImageSection sections[ImageHeader::kSectionCount];
990 // Objects section
991 auto* objects_section = &sections[ImageHeader::kSectionObjects];
992 *objects_section = ImageSection(0u, image_end_);
993 size_t cur_pos = objects_section->End();
994 // Add field section.
995 auto* field_section = &sections[ImageHeader::kSectionArtFields];
996 *field_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtField]);
Vladimir Markocf36d492015-08-12 19:27:26 +0100997 CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700998 cur_pos = field_section->End();
Vladimir Markocf36d492015-08-12 19:27:26 +0100999 // Round up to the alignment the required by the method section.
Vladimir Marko14632852015-08-17 12:07:23 +01001000 cur_pos = RoundUp(cur_pos, ArtMethod::Alignment(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001001 // Add method section.
1002 auto* methods_section = &sections[ImageHeader::kSectionArtMethods];
1003 *methods_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtMethodClean] +
1004 bin_slot_sizes_[kBinArtMethodDirty]);
Vladimir Markocf36d492015-08-12 19:27:26 +01001005 CHECK_EQ(bin_slot_offsets_[kBinArtMethodClean], methods_section->Offset());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001006 cur_pos = methods_section->End();
Vladimir Marko05792b92015-08-03 11:56:49 +01001007 // Add dex cache arrays section.
1008 auto* dex_cache_arrays_section = &sections[ImageHeader::kSectionDexCacheArrays];
1009 *dex_cache_arrays_section = ImageSection(cur_pos, bin_slot_sizes_[kBinDexCacheArray]);
1010 CHECK_EQ(bin_slot_offsets_[kBinDexCacheArray], dex_cache_arrays_section->Offset());
1011 cur_pos = dex_cache_arrays_section->End();
Nicolas Geoffray7bf2b4f2015-07-08 10:11:59 +00001012 // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
1013 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001014 // Calculate the size of the interned strings.
1015 auto* interned_strings_section = &sections[ImageHeader::kSectionInternedStrings];
1016 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1017 cur_pos = interned_strings_section->End();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001018 // Finally bitmap section.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001019 const size_t bitmap_bytes = image_bitmap_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001020 auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
1021 *bitmap_section = ImageSection(RoundUp(cur_pos, kPageSize), RoundUp(bitmap_bytes, kPageSize));
1022 cur_pos = bitmap_section->End();
1023 if (kIsDebugBuild) {
1024 size_t idx = 0;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001025 for (const ImageSection& section : sections) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001026 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1027 ++idx;
1028 }
1029 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
1030 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001031 const size_t image_end = static_cast<uint32_t>(interned_strings_section->End());
1032 CHECK_EQ(AlignUp(image_begin_ + image_end, kPageSize), oat_file_begin) <<
1033 "Oat file should be right after the image.";
Mathieu Chartiere401d142015-04-22 13:56:20 -07001034 // Create the header.
1035 new (image_->Begin()) ImageHeader(
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001036 PointerToLowMemUInt32(image_begin_), image_end,
1037 sections, image_roots_address_, oat_file_->GetOatHeader().GetChecksum(),
Mathieu Chartiere401d142015-04-22 13:56:20 -07001038 PointerToLowMemUInt32(oat_file_begin), PointerToLowMemUInt32(oat_data_begin_),
1039 PointerToLowMemUInt32(oat_data_end), PointerToLowMemUInt32(oat_file_end), target_ptr_size_,
1040 compile_pic_);
1041}
1042
1043ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001044 auto it = native_object_relocations_.find(method);
1045 CHECK(it != native_object_relocations_.end()) << PrettyMethod(method) << " @ " << method;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001046 CHECK_GE(it->second.offset, image_end_) << "ArtMethods should be after Objects";
1047 return reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001048}
1049
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001050class FixupRootVisitor : public RootVisitor {
1051 public:
1052 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
1053 }
1054
1055 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001056 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001057 for (size_t i = 0; i < count; ++i) {
1058 *roots[i] = ImageAddress(*roots[i]);
1059 }
1060 }
1061
1062 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
1063 const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001064 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001065 for (size_t i = 0; i < count; ++i) {
1066 roots[i]->Assign(ImageAddress(roots[i]->AsMirrorPtr()));
1067 }
1068 }
1069
1070 private:
1071 ImageWriter* const image_writer_;
1072
Mathieu Chartier90443472015-07-16 20:32:27 -07001073 mirror::Object* ImageAddress(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001074 const size_t offset = image_writer_->GetImageOffset(obj);
1075 auto* const dest = reinterpret_cast<Object*>(image_writer_->image_begin_ + offset);
1076 VLOG(compiler) << "Update root from " << obj << " to " << dest;
1077 return dest;
1078 }
1079};
1080
Mathieu Chartierc7853442015-03-27 14:35:38 -07001081void ImageWriter::CopyAndFixupNativeData() {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001082 // Copy ArtFields and methods to their locations and update the array for convenience.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001083 for (auto& pair : native_object_relocations_) {
1084 NativeObjectRelocation& relocation = pair.second;
1085 auto* dest = image_->Begin() + relocation.offset;
1086 DCHECK_GE(dest, image_->Begin() + image_end_);
1087 switch (relocation.type) {
1088 case kNativeObjectRelocationTypeArtField: {
1089 memcpy(dest, pair.first, sizeof(ArtField));
1090 reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
1091 GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass()));
1092 break;
1093 }
1094 case kNativeObjectRelocationTypeArtMethodClean:
1095 case kNativeObjectRelocationTypeArtMethodDirty: {
1096 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
1097 reinterpret_cast<ArtMethod*>(dest));
1098 break;
1099 }
1100 // For arrays, copy just the header since the elements will get copied by their corresponding
1101 // relocations.
1102 case kNativeObjectRelocationTypeArtFieldArray: {
1103 memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
1104 break;
1105 }
1106 case kNativeObjectRelocationTypeArtMethodArrayClean:
1107 case kNativeObjectRelocationTypeArtMethodArrayDirty: {
Vladimir Markocf36d492015-08-12 19:27:26 +01001108 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(
1109 0,
Vladimir Marko14632852015-08-17 12:07:23 +01001110 ArtMethod::Size(target_ptr_size_),
1111 ArtMethod::Alignment(target_ptr_size_)));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001112 break;
Vladimir Marko05792b92015-08-03 11:56:49 +01001113 case kNativeObjectRelocationTypeDexCacheArray:
1114 // Nothing to copy here, everything is done in FixupDexCache().
1115 break;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001116 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001117 }
1118 }
1119 // Fixup the image method roots.
1120 auto* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001121 const ImageSection& methods_section = image_header->GetMethodsSection();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001122 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
1123 auto* m = image_methods_[i];
1124 CHECK(m != nullptr);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001125 auto it = native_object_relocations_.find(m);
1126 CHECK(it != native_object_relocations_.end()) << "No fowarding for " << PrettyMethod(m);
1127 NativeObjectRelocation& relocation = it->second;
1128 CHECK(methods_section.Contains(relocation.offset)) << relocation.offset << " not in "
Mathieu Chartiere401d142015-04-22 13:56:20 -07001129 << methods_section;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001130 CHECK(relocation.IsArtMethodRelocation()) << relocation.type;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001131 auto* dest = reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
1132 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), dest);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001133 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001134 // Write the intern table into the image.
1135 const ImageSection& intern_table_section = image_header->GetImageSection(
1136 ImageHeader::kSectionInternedStrings);
1137 InternTable* const intern_table = Runtime::Current()->GetInternTable();
1138 uint8_t* const memory_ptr = image_->Begin() + intern_table_section.Offset();
1139 const size_t intern_table_bytes = intern_table->WriteToMemory(memory_ptr);
1140 // Fixup the pointers in the newly written intern table to contain image addresses.
1141 InternTable temp_table;
1142 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
1143 // the VisitRoots() will update the memory directly rather than the copies.
1144 // This also relies on visit roots not doing any verification which could fail after we update
1145 // the roots to be the image addresses.
1146 temp_table.ReadFromMemory(memory_ptr);
1147 CHECK_EQ(temp_table.Size(), intern_table->Size());
1148 FixupRootVisitor visitor(this);
1149 temp_table.VisitRoots(&visitor, kVisitRootFlagAllRoots);
1150 CHECK_EQ(intern_table_bytes, intern_table_bytes_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001151}
1152
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -08001153void ImageWriter::CopyAndFixupObjects() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001154 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -07001155 heap->VisitObjects(CopyAndFixupObjectsCallback, this);
1156 // Fix up the object previously had hash codes.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001157 for (const auto& hash_pair : saved_hashcode_map_) {
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001158 Object* obj = hash_pair.first;
Andreas Gampe3b45ef22015-05-26 21:34:09 -07001159 DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
1160 obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001161 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001162 saved_hashcode_map_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001163}
1164
Mathieu Chartier590fee92013-09-13 13:46:47 -07001165void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) {
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -07001166 DCHECK(obj != nullptr);
1167 DCHECK(arg != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001168 reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj);
1169}
1170
Mathieu Chartiere401d142015-04-22 13:56:20 -07001171void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr,
1172 mirror::Class* klass, Bin array_type) {
1173 CHECK(klass->IsArrayClass());
1174 CHECK(arr->IsIntArray() || arr->IsLongArray()) << PrettyClass(klass) << " " << arr;
1175 // Fixup int and long pointers for the ArtMethod or ArtField arrays.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001176 const size_t num_elements = arr->GetLength();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001177 dst->SetClass(GetImageAddress(arr->GetClass()));
1178 auto* dest_array = down_cast<mirror::PointerArray*>(dst);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001179 for (size_t i = 0, count = num_elements; i < count; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001180 auto* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
1181 if (elem != nullptr) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001182 auto it = native_object_relocations_.find(elem);
Vladimir Marko05792b92015-08-03 11:56:49 +01001183 if (UNLIKELY(it == native_object_relocations_.end())) {
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001184 if (it->second.IsArtMethodRelocation()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001185 auto* method = reinterpret_cast<ArtMethod*>(elem);
1186 LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ "
1187 << method << " idx=" << i << "/" << num_elements << " with declaring class "
1188 << PrettyClass(method->GetDeclaringClass());
1189 } else {
1190 CHECK_EQ(array_type, kBinArtField);
1191 auto* field = reinterpret_cast<ArtField*>(elem);
1192 LOG(FATAL) << "No relocation entry for ArtField " << PrettyField(field) << " @ "
1193 << field << " idx=" << i << "/" << num_elements << " with declaring class "
1194 << PrettyClass(field->GetDeclaringClass());
1195 }
Vladimir Marko05792b92015-08-03 11:56:49 +01001196 UNREACHABLE();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001197 } else {
1198 elem = image_begin_ + it->second.offset;
1199 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001200 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001201 dest_array->SetElementPtrSize<false, true>(i, elem, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001202 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001203}
1204
1205void ImageWriter::CopyAndFixupObject(Object* obj) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001206 size_t offset = GetImageOffset(obj);
1207 auto* dst = reinterpret_cast<Object*>(image_->Begin() + offset);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001208 DCHECK_LT(offset, image_end_);
1209 const auto* src = reinterpret_cast<const uint8_t*>(obj);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001210
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001211 image_bitmap_->Set(dst); // Mark the obj as live.
1212
1213 const size_t n = obj->SizeOf();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001214 DCHECK_LE(offset + n, image_->Size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001215 memcpy(dst, src, n);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001216
Mathieu Chartierad2541a2013-10-25 10:05:23 -07001217 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
1218 // word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001219 const auto it = saved_hashcode_map_.find(obj);
1220 dst->SetLockWord(it != saved_hashcode_map_.end() ?
1221 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001222 FixupObject(obj, dst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001223}
1224
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001225// Rewrite all the references in the copied object to point to their image address equivalent
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001226class FixupVisitor {
1227 public:
1228 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
1229 }
1230
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001231 // Ignore class roots since we don't have a way to map them to the destination. These are handled
1232 // with other logic.
1233 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1234 const {}
1235 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
1236
1237
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001238 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001239 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Hiroshi Yamauchi6e83c172014-05-01 21:25:41 -07001240 Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001241 // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the
1242 // image.
1243 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Ian Rogersb0fa5dc2014-04-28 16:47:08 -07001244 offset, image_writer_->GetImageAddress(ref));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001245 }
1246
1247 // java.lang.ref.Reference visitor.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001248 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001249 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001250 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Ian Rogersb0fa5dc2014-04-28 16:47:08 -07001251 mirror::Reference::ReferentOffset(), image_writer_->GetImageAddress(ref->GetReferent()));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001252 }
1253
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001254 protected:
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001255 ImageWriter* const image_writer_;
1256 mirror::Object* const copy_;
1257};
1258
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001259class FixupClassVisitor FINAL : public FixupVisitor {
1260 public:
1261 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
1262 }
1263
Mathieu Chartierc7853442015-03-27 14:35:38 -07001264 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001265 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001266 DCHECK(obj->IsClass());
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001267 FixupVisitor::operator()(obj, offset, /*is_static*/false);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001268 }
1269
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001270 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED,
1271 mirror::Reference* ref ATTRIBUTE_UNUSED) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001272 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001273 LOG(FATAL) << "Reference not expected here.";
1274 }
1275};
1276
Vladimir Marko05792b92015-08-03 11:56:49 +01001277uintptr_t ImageWriter::NativeOffsetInImage(void* obj) {
1278 DCHECK(obj != nullptr);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001279 auto it = native_object_relocations_.find(obj);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001280 CHECK(it != native_object_relocations_.end()) << obj;
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001281 const NativeObjectRelocation& relocation = it->second;
Vladimir Marko05792b92015-08-03 11:56:49 +01001282 return relocation.offset;
1283}
1284
1285template <typename T>
1286T* ImageWriter::NativeLocationInImage(T* obj) {
1287 if (obj == nullptr) {
1288 return nullptr;
1289 }
1290 return reinterpret_cast<T*>(image_begin_ + NativeOffsetInImage(obj));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001291}
1292
Mathieu Chartierc7853442015-03-27 14:35:38 -07001293void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001294 // Update the field arrays.
Vladimir Marko05792b92015-08-03 11:56:49 +01001295 copy->SetSFieldsPtrUnchecked(NativeLocationInImage(orig->GetSFieldsPtr()));
1296 copy->SetIFieldsPtrUnchecked(NativeLocationInImage(orig->GetIFieldsPtr()));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001297 // Update direct and virtual method arrays.
Vladimir Marko05792b92015-08-03 11:56:49 +01001298 copy->SetDirectMethodsPtrUnchecked(NativeLocationInImage(orig->GetDirectMethodsPtr()));
1299 copy->SetVirtualMethodsPtr(NativeLocationInImage(orig->GetVirtualMethodsPtr()));
1300 // Update dex cache strings.
1301 copy->SetDexCacheStrings(NativeLocationInImage(orig->GetDexCacheStrings()));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001302 // Fix up embedded tables.
1303 if (orig->ShouldHaveEmbeddedImtAndVTable()) {
1304 for (int32_t i = 0; i < orig->GetEmbeddedVTableLength(); ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001305 auto it = native_object_relocations_.find(orig->GetEmbeddedVTableEntry(i, target_ptr_size_));
1306 CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001307 copy->SetEmbeddedVTableEntryUnchecked(
1308 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
1309 }
1310 for (size_t i = 0; i < mirror::Class::kImtSize; ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001311 auto it = native_object_relocations_.find(orig->GetEmbeddedImTableEntry(i, target_ptr_size_));
1312 CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001313 copy->SetEmbeddedImTableEntry(
1314 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001315 }
1316 }
1317 FixupClassVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07001318 static_cast<mirror::Object*>(orig)->VisitReferences(visitor, visitor);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001319}
1320
Ian Rogersef7d42f2014-01-06 12:55:46 -08001321void ImageWriter::FixupObject(Object* orig, Object* copy) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001322 DCHECK(orig != nullptr);
1323 DCHECK(copy != nullptr);
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -07001324 if (kUseBakerOrBrooksReadBarrier) {
1325 orig->AssertReadBarrierPointer();
1326 if (kUseBrooksReadBarrier) {
1327 // Note the address 'copy' isn't the same as the image address of 'orig'.
1328 copy->SetReadBarrierPointer(GetImageAddress(orig));
1329 DCHECK_EQ(copy->GetReadBarrierPointer(), GetImageAddress(orig));
1330 }
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -08001331 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001332 auto* klass = orig->GetClass();
1333 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
Vladimir Marko05792b92015-08-03 11:56:49 +01001334 // Is this a native pointer array?
Mathieu Chartiere401d142015-04-22 13:56:20 -07001335 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
1336 if (it != pointer_arrays_.end()) {
1337 // Should only need to fixup every pointer array exactly once.
1338 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
1339 pointer_arrays_.erase(it);
1340 return;
1341 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001342 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001343 if (orig->IsClass()) {
1344 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001345 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001346 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
1347 // Need to go update the ArtMethod.
1348 auto* dest = down_cast<mirror::AbstractMethod*>(copy);
1349 auto* src = down_cast<mirror::AbstractMethod*>(orig);
1350 ArtMethod* src_method = src->GetArtMethod();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001351 auto it = native_object_relocations_.find(src_method);
1352 CHECK(it != native_object_relocations_.end())
1353 << "Missing relocation for AbstractMethod.artMethod " << PrettyMethod(src_method);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001354 dest->SetArtMethod(
1355 reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
Vladimir Marko05792b92015-08-03 11:56:49 +01001356 } else if (!klass->IsArrayClass()) {
1357 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1358 if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) {
1359 FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
1360 } else if (klass->IsSubClass(down_cast<mirror::Class*>(
1361 class_linker->GetClassRoot(ClassLinker::kJavaLangClassLoader)))) {
1362 // If src is a ClassLoader, set the class table to null so that it gets recreated by the
1363 // ClassLoader.
1364 down_cast<mirror::ClassLoader*>(copy)->SetClassTable(nullptr);
Mathieu Chartier5550c562015-09-22 15:18:04 -07001365 // Also set allocator to null to be safe. The allocator is created when we create the class
1366 // table. We also never expect to unload things in the image since they are held live as
1367 // roots.
1368 down_cast<mirror::ClassLoader*>(copy)->SetAllocator(nullptr);
Vladimir Marko05792b92015-08-03 11:56:49 +01001369 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001370 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001371 FixupVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07001372 orig->VisitReferences(visitor, visitor);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001373 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001374}
1375
Vladimir Marko05792b92015-08-03 11:56:49 +01001376void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache,
1377 mirror::DexCache* copy_dex_cache) {
1378 // Though the DexCache array fields are usually treated as native pointers, we set the full
1379 // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is
1380 // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e.
1381 // static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))).
1382 GcRoot<mirror::String>* orig_strings = orig_dex_cache->GetStrings();
1383 if (orig_strings != nullptr) {
1384 uintptr_t copy_strings_offset = NativeOffsetInImage(orig_strings);
1385 copy_dex_cache->SetField64<false>(
1386 mirror::DexCache::StringsOffset(),
1387 static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_strings_offset)));
1388 GcRoot<mirror::String>* copy_strings =
1389 reinterpret_cast<GcRoot<mirror::String>*>(image_->Begin() + copy_strings_offset);
1390 for (size_t i = 0, num = orig_dex_cache->NumStrings(); i != num; ++i) {
1391 copy_strings[i] = GcRoot<mirror::String>(GetImageAddress(orig_strings[i].Read()));
1392 }
1393 }
1394 GcRoot<mirror::Class>* orig_types = orig_dex_cache->GetResolvedTypes();
1395 if (orig_types != nullptr) {
1396 uintptr_t copy_types_offset = NativeOffsetInImage(orig_types);
1397 copy_dex_cache->SetField64<false>(
1398 mirror::DexCache::ResolvedTypesOffset(),
1399 static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_types_offset)));
1400 GcRoot<mirror::Class>* copy_types =
1401 reinterpret_cast<GcRoot<mirror::Class>*>(image_->Begin() + copy_types_offset);
1402 for (size_t i = 0, num = orig_dex_cache->NumResolvedTypes(); i != num; ++i) {
1403 copy_types[i] = GcRoot<mirror::Class>(GetImageAddress(orig_types[i].Read()));
1404 }
1405 }
1406 ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
1407 if (orig_methods != nullptr) {
1408 uintptr_t copy_methods_offset = NativeOffsetInImage(orig_methods);
1409 copy_dex_cache->SetField64<false>(
1410 mirror::DexCache::ResolvedMethodsOffset(),
1411 static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_methods_offset)));
1412 ArtMethod** copy_methods =
1413 reinterpret_cast<ArtMethod**>(image_->Begin() + copy_methods_offset);
1414 for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) {
1415 ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, i, target_ptr_size_);
1416 ArtMethod* copy = NativeLocationInImage(orig);
1417 mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_);
1418 }
1419 }
1420 ArtField** orig_fields = orig_dex_cache->GetResolvedFields();
1421 if (orig_fields != nullptr) {
1422 uintptr_t copy_fields_offset = NativeOffsetInImage(orig_fields);
1423 copy_dex_cache->SetField64<false>(
1424 mirror::DexCache::ResolvedFieldsOffset(),
1425 static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_fields_offset)));
1426 ArtField** copy_fields = reinterpret_cast<ArtField**>(image_->Begin() + copy_fields_offset);
1427 for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) {
1428 ArtField* orig = mirror::DexCache::GetElementPtrSize(orig_fields, i, target_ptr_size_);
1429 ArtField* copy = NativeLocationInImage(orig);
1430 mirror::DexCache::SetElementPtrSize(copy_fields, i, copy, target_ptr_size_);
1431 }
1432 }
1433}
1434
Mathieu Chartiere401d142015-04-22 13:56:20 -07001435const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method, bool* quick_is_interpreted) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001436 DCHECK(!method->IsResolutionMethod() && !method->IsImtConflictMethod() &&
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001437 !method->IsImtUnimplementedMethod() && !method->IsAbstract()) << PrettyMethod(method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001438
1439 // Use original code if it exists. Otherwise, set the code pointer to the resolution
1440 // trampoline.
1441
1442 // Quick entrypoint:
Jeff Haoc7d11882015-02-03 15:08:39 -08001443 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(
1444 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_));
1445 const uint8_t* quick_code = GetOatAddress(quick_oat_code_offset);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001446 *quick_is_interpreted = false;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001447 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
1448 method->GetDeclaringClass()->IsInitialized())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001449 // We have code for a non-static or initialized method, just use the code.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001450 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001451 } else if (quick_code == nullptr && method->IsNative() &&
1452 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
1453 // Non-static or initialized native method missing compiled code, use generic JNI version.
1454 quick_code = GetOatAddress(quick_generic_jni_trampoline_offset_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001455 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001456 } else if (quick_code == nullptr && !method->IsNative()) {
1457 // We don't have code at all for a non-native method, use the interpreter.
1458 quick_code = GetOatAddress(quick_to_interpreter_bridge_offset_);
1459 *quick_is_interpreted = true;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001460 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001461 } else {
1462 CHECK(!method->GetDeclaringClass()->IsInitialized());
1463 // We have code for a static method, but need to go through the resolution stub for class
1464 // initialization.
1465 quick_code = GetOatAddress(quick_resolution_trampoline_offset_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001466 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001467 }
1468 return quick_code;
1469}
1470
Mathieu Chartiere401d142015-04-22 13:56:20 -07001471const uint8_t* ImageWriter::GetQuickEntryPoint(ArtMethod* method) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001472 // Calculate the quick entry point following the same logic as FixupMethod() below.
1473 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001474 Runtime* runtime = Runtime::Current();
1475 if (UNLIKELY(method == runtime->GetResolutionMethod())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001476 return GetOatAddress(quick_resolution_trampoline_offset_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001477 } else if (UNLIKELY(method == runtime->GetImtConflictMethod() ||
1478 method == runtime->GetImtUnimplementedMethod())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001479 return GetOatAddress(quick_imt_conflict_trampoline_offset_);
1480 } else {
1481 // We assume all methods have code. If they don't currently then we set them to the use the
1482 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1483 // use results in an AbstractMethodError. We use the interpreter to achieve this.
1484 if (UNLIKELY(method->IsAbstract())) {
1485 return GetOatAddress(quick_to_interpreter_bridge_offset_);
1486 } else {
1487 bool quick_is_interpreted;
1488 return GetQuickCode(method, &quick_is_interpreted);
1489 }
1490 }
1491}
1492
Mathieu Chartiere401d142015-04-22 13:56:20 -07001493void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) {
Vladimir Marko14632852015-08-17 12:07:23 +01001494 memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001495
1496 copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked()));
Vladimir Marko05792b92015-08-03 11:56:49 +01001497
1498 ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_);
1499 copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_);
1500 GcRoot<mirror::Class>* orig_resolved_types = orig->GetDexCacheResolvedTypes(target_ptr_size_);
1501 copy->SetDexCacheResolvedTypes(NativeLocationInImage(orig_resolved_types), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001502
Ian Rogers848871b2013-08-05 10:56:33 -07001503 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
1504 // oat_begin_
Brian Carlstrom7940e442013-07-12 13:46:57 -07001505
Ian Rogers848871b2013-08-05 10:56:33 -07001506 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001507 Runtime* runtime = Runtime::Current();
1508 if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001509 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001510 GetOatAddress(quick_resolution_trampoline_offset_), target_ptr_size_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001511 } else if (UNLIKELY(orig == runtime->GetImtConflictMethod() ||
1512 orig == runtime->GetImtUnimplementedMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001513 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001514 GetOatAddress(quick_imt_conflict_trampoline_offset_), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001515 } else if (UNLIKELY(orig->IsRuntimeMethod())) {
1516 bool found_one = false;
1517 for (size_t i = 0; i < static_cast<size_t>(Runtime::kLastCalleeSaveType); ++i) {
1518 auto idx = static_cast<Runtime::CalleeSaveType>(i);
1519 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
1520 found_one = true;
1521 break;
1522 }
1523 }
1524 CHECK(found_one) << "Expected to find callee save method but got " << PrettyMethod(orig);
1525 CHECK(copy->IsRuntimeMethod());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001526 } else {
Ian Rogers848871b2013-08-05 10:56:33 -07001527 // We assume all methods have code. If they don't currently then we set them to the use the
1528 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1529 // use results in an AbstractMethodError. We use the interpreter to achieve this.
1530 if (UNLIKELY(orig->IsAbstract())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001531 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001532 GetOatAddress(quick_to_interpreter_bridge_offset_), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001533 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001534 bool quick_is_interpreted;
Ian Rogers13735952014-10-08 12:43:28 -07001535 const uint8_t* quick_code = GetQuickCode(orig, &quick_is_interpreted);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001536 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
Sebastien Hertze1d07812014-05-21 15:44:09 +02001537
Sebastien Hertze1d07812014-05-21 15:44:09 +02001538 // JNI entrypoint:
Ian Rogers848871b2013-08-05 10:56:33 -07001539 if (orig->IsNative()) {
1540 // The native method's pointer is set to a stub to lookup via dlsym.
1541 // Note this is not the code_ pointer, that is handled above.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001542 copy->SetEntryPointFromJniPtrSize(
1543 GetOatAddress(jni_dlsym_lookup_offset_), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001544 }
1545 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001546 }
1547}
1548
Alex Lighta59dd802014-07-02 16:28:08 -07001549static OatHeader* GetOatHeaderFromElf(ElfFile* elf) {
Tong Shen62d1ca32014-09-03 17:24:56 -07001550 uint64_t data_sec_offset;
1551 bool has_data_sec = elf->GetSectionOffsetAndSize(".rodata", &data_sec_offset, nullptr);
1552 if (!has_data_sec) {
Alex Lighta59dd802014-07-02 16:28:08 -07001553 return nullptr;
1554 }
Tong Shen62d1ca32014-09-03 17:24:56 -07001555 return reinterpret_cast<OatHeader*>(elf->Begin() + data_sec_offset);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001556}
1557
Vladimir Markof4da6752014-08-01 19:04:18 +01001558void ImageWriter::SetOatChecksumFromElfFile(File* elf_file) {
Alex Lighta59dd802014-07-02 16:28:08 -07001559 std::string error_msg;
1560 std::unique_ptr<ElfFile> elf(ElfFile::Open(elf_file, PROT_READ|PROT_WRITE,
1561 MAP_SHARED, &error_msg));
1562 if (elf.get() == nullptr) {
Vladimir Markof4da6752014-08-01 19:04:18 +01001563 LOG(FATAL) << "Unable open oat file: " << error_msg;
Alex Lighta59dd802014-07-02 16:28:08 -07001564 return;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001565 }
Alex Lighta59dd802014-07-02 16:28:08 -07001566 OatHeader* oat_header = GetOatHeaderFromElf(elf.get());
1567 CHECK(oat_header != nullptr);
1568 CHECK(oat_header->IsValid());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001569
Brian Carlstrom7940e442013-07-12 13:46:57 -07001570 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Alex Lighta59dd802014-07-02 16:28:08 -07001571 image_header->SetOatChecksum(oat_header->GetChecksum());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001572}
1573
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001574size_t ImageWriter::GetBinSizeSum(ImageWriter::Bin up_to) const {
1575 DCHECK_LE(up_to, kBinSize);
1576 return std::accumulate(&bin_slot_sizes_[0], &bin_slot_sizes_[up_to], /*init*/0);
1577}
1578
1579ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
1580 // These values may need to get updated if more bins are added to the enum Bin
Mathieu Chartiere401d142015-04-22 13:56:20 -07001581 static_assert(kBinBits == 3, "wrong number of bin bits");
1582 static_assert(kBinShift == 27, "wrong number of shift");
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001583 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
1584
1585 DCHECK_LT(GetBin(), kBinSize);
1586 DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
1587}
1588
1589ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
1590 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
1591 DCHECK_EQ(index, GetIndex());
1592}
1593
1594ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
1595 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
1596}
1597
1598uint32_t ImageWriter::BinSlot::GetIndex() const {
1599 return lockword_ & ~kBinMask;
1600}
1601
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001602uint8_t* ImageWriter::GetOatFileBegin() const {
1603 DCHECK_GT(intern_table_bytes_, 0u);
Vladimir Marko05792b92015-08-03 11:56:49 +01001604 size_t native_sections_size =
1605 bin_slot_sizes_[kBinArtField] + bin_slot_sizes_[kBinArtMethodDirty] +
1606 bin_slot_sizes_[kBinArtMethodClean] + bin_slot_sizes_[kBinDexCacheArray] +
1607 intern_table_bytes_;
1608 return image_begin_ + RoundUp(image_end_ + native_sections_size, kPageSize);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001609}
1610
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001611ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
1612 switch (type) {
1613 case kNativeObjectRelocationTypeArtField:
1614 case kNativeObjectRelocationTypeArtFieldArray:
1615 return kBinArtField;
1616 case kNativeObjectRelocationTypeArtMethodClean:
1617 case kNativeObjectRelocationTypeArtMethodArrayClean:
1618 return kBinArtMethodClean;
1619 case kNativeObjectRelocationTypeArtMethodDirty:
1620 case kNativeObjectRelocationTypeArtMethodArrayDirty:
1621 return kBinArtMethodDirty;
Vladimir Marko05792b92015-08-03 11:56:49 +01001622 case kNativeObjectRelocationTypeDexCacheArray:
1623 return kBinDexCacheArray;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001624 }
1625 UNREACHABLE();
1626}
1627
Brian Carlstrom7940e442013-07-12 13:46:57 -07001628} // namespace art