blob: dbd3366c1bdde1c915b8d4d7fd05adb2fdc95939 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "image_writer.h"
18
19#include <sys/stat.h>
20
Ian Rogers700a4022014-05-19 16:49:03 -070021#include <memory>
Vladimir Marko20f85592015-03-19 10:07:02 +000022#include <numeric>
Brian Carlstrom7940e442013-07-12 13:46:57 -070023#include <vector>
24
Mathieu Chartierc7853442015-03-27 14:35:38 -070025#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070026#include "art_method-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070027#include "base/logging.h"
28#include "base/unix_file/fd_file.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010029#include "class_linker-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070030#include "compiled_method.h"
31#include "dex_file-inl.h"
32#include "driver/compiler_driver.h"
Alex Light53cb16b2014-06-12 11:26:29 -070033#include "elf_file.h"
34#include "elf_utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070035#include "elf_writer.h"
36#include "gc/accounting/card_table-inl.h"
37#include "gc/accounting/heap_bitmap.h"
Mathieu Chartier31e89252013-08-28 11:29:12 -070038#include "gc/accounting/space_bitmap-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070039#include "gc/heap.h"
40#include "gc/space/large_object_space.h"
41#include "gc/space/space-inl.h"
42#include "globals.h"
43#include "image.h"
44#include "intern_table.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070045#include "linear_alloc.h"
Mathieu Chartierad2541a2013-10-25 10:05:23 -070046#include "lock_word.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070047#include "mirror/abstract_method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070048#include "mirror/array-inl.h"
49#include "mirror/class-inl.h"
50#include "mirror/class_loader.h"
51#include "mirror/dex_cache-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070052#include "mirror/method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070053#include "mirror/object-inl.h"
54#include "mirror/object_array-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070055#include "mirror/string-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070056#include "oat.h"
57#include "oat_file.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070058#include "runtime.h"
59#include "scoped_thread_state_change.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070060#include "handle_scope-inl.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000061#include "utils/dex_cache_arrays_layout-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070062
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070063using ::art::mirror::Class;
64using ::art::mirror::DexCache;
Brian Carlstrom3e3d5912013-07-18 00:19:45 -070065using ::art::mirror::Object;
66using ::art::mirror::ObjectArray;
67using ::art::mirror::String;
Brian Carlstrom7940e442013-07-12 13:46:57 -070068
69namespace art {
70
Igor Murashkinf5b4c502014-11-14 15:01:59 -080071// Separate objects into multiple bins to optimize dirty memory use.
72static constexpr bool kBinObjects = true;
73
Andreas Gampedd9d0552015-03-09 12:57:41 -070074static void CheckNoDexObjectsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -070075 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampedd9d0552015-03-09 12:57:41 -070076 Class* klass = obj->GetClass();
77 CHECK_NE(PrettyClass(klass), "com.android.dex.Dex");
78}
79
80static void CheckNoDexObjects() {
81 ScopedObjectAccess soa(Thread::Current());
82 Runtime::Current()->GetHeap()->VisitObjects(CheckNoDexObjectsCallback, nullptr);
83}
84
Vladimir Markof4da6752014-08-01 19:04:18 +010085bool ImageWriter::PrepareImageAddressSpace() {
Mathieu Chartier2d721012014-11-10 11:08:06 -080086 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
Vladimir Markof4da6752014-08-01 19:04:18 +010087 {
88 Thread::Current()->TransitionFromSuspendedToRunnable();
89 PruneNonImageClasses(); // Remove junk
90 ComputeLazyFieldsForImageClasses(); // Add useful information
Jeff Hao848f70a2014-01-15 13:49:50 -080091
Vladimir Markof4da6752014-08-01 19:04:18 +010092 Thread::Current()->TransitionFromRunnableToSuspended(kNative);
93 }
94 gc::Heap* heap = Runtime::Current()->GetHeap();
95 heap->CollectGarbage(false); // Remove garbage.
96
Andreas Gampedd9d0552015-03-09 12:57:41 -070097 // Dex caches must not have their dex fields set in the image. These are memory buffers of mapped
98 // dex files.
99 //
100 // We may open them in the unstarted-runtime code for class metadata. Their fields should all be
101 // reset in PruneNonImageClasses and the objects reclaimed in the GC. Make sure that's actually
102 // true.
103 if (kIsDebugBuild) {
104 CheckNoDexObjects();
105 }
106
Vladimir Markof4da6752014-08-01 19:04:18 +0100107 if (kIsDebugBuild) {
108 ScopedObjectAccess soa(Thread::Current());
109 CheckNonImageClassesRemoved();
110 }
111
112 Thread::Current()->TransitionFromSuspendedToRunnable();
113 CalculateNewObjectOffsets();
114 Thread::Current()->TransitionFromRunnableToSuspended(kNative);
115
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700116 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
117 // bin size sums being calculated.
118 if (!AllocMemory()) {
119 return false;
120 }
121
Vladimir Markof4da6752014-08-01 19:04:18 +0100122 return true;
123}
124
Brian Carlstrom7940e442013-07-12 13:46:57 -0700125bool ImageWriter::Write(const std::string& image_filename,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700126 const std::string& oat_filename,
127 const std::string& oat_location) {
128 CHECK(!image_filename.empty());
129
Brian Carlstrom7940e442013-07-12 13:46:57 -0700130 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700131
Ian Rogers700a4022014-05-19 16:49:03 -0700132 std::unique_ptr<File> oat_file(OS::OpenFileReadWrite(oat_filename.c_str()));
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700133 if (oat_file.get() == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800134 PLOG(ERROR) << "Failed to open oat file " << oat_filename << " for " << oat_location;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700135 return false;
136 }
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700137 std::string error_msg;
Igor Murashkinb1d8c312015-08-04 11:18:43 -0700138 oat_file_ = OatFile::OpenReadable(oat_file.get(), oat_location, nullptr, &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700139 if (oat_file_ == nullptr) {
Andreas Gampe88ec7f42014-11-05 10:18:32 -0800140 PLOG(ERROR) << "Failed to open writable oat file " << oat_filename << " for " << oat_location
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700141 << ": " << error_msg;
Andreas Gampe0b7fcf92015-03-13 16:54:54 -0700142 oat_file->Erase();
Brian Carlstromc50d8e12013-07-23 22:35:16 -0700143 return false;
144 }
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700145 CHECK_EQ(class_linker->RegisterOatFile(oat_file_), oat_file_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700146
Ian Rogers848871b2013-08-05 10:56:33 -0700147 interpreter_to_interpreter_bridge_offset_ =
148 oat_file_->GetOatHeader().GetInterpreterToInterpreterBridgeOffset();
149 interpreter_to_compiled_code_bridge_offset_ =
150 oat_file_->GetOatHeader().GetInterpreterToCompiledCodeBridgeOffset();
151
152 jni_dlsym_lookup_offset_ = oat_file_->GetOatHeader().GetJniDlsymLookupOffset();
153
Andreas Gampe2da88232014-02-27 12:26:20 -0800154 quick_generic_jni_trampoline_offset_ =
155 oat_file_->GetOatHeader().GetQuickGenericJniTrampolineOffset();
Jeff Hao88474b42013-10-23 16:24:40 -0700156 quick_imt_conflict_trampoline_offset_ =
157 oat_file_->GetOatHeader().GetQuickImtConflictTrampolineOffset();
Ian Rogers848871b2013-08-05 10:56:33 -0700158 quick_resolution_trampoline_offset_ =
159 oat_file_->GetOatHeader().GetQuickResolutionTrampolineOffset();
160 quick_to_interpreter_bridge_offset_ =
161 oat_file_->GetOatHeader().GetQuickToInterpreterBridgeOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700162
Brian Carlstrom7940e442013-07-12 13:46:57 -0700163 size_t oat_loaded_size = 0;
164 size_t oat_data_offset = 0;
Vladimir Marko3fc99032015-05-13 19:06:30 +0100165 ElfWriter::GetOatElfInformation(oat_file.get(), &oat_loaded_size, &oat_data_offset);
Alex Light53cb16b2014-06-12 11:26:29 -0700166
Vladimir Markof4da6752014-08-01 19:04:18 +0100167 Thread::Current()->TransitionFromSuspendedToRunnable();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700168
Vladimir Markof4da6752014-08-01 19:04:18 +0100169 CreateHeader(oat_loaded_size, oat_data_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700170 CopyAndFixupNativeData();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700171 // TODO: heap validation can't handle these fix up passes.
172 Runtime::Current()->GetHeap()->DisableObjectValidation();
Vladimir Markof4da6752014-08-01 19:04:18 +0100173 CopyAndFixupObjects();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700174 Thread::Current()->TransitionFromRunnableToSuspended(kNative);
175
Vladimir Markof4da6752014-08-01 19:04:18 +0100176 SetOatChecksumFromElfFile(oat_file.get());
177
Andreas Gampe4303ba92014-11-06 01:00:46 -0800178 if (oat_file->FlushCloseOrErase() != 0) {
179 LOG(ERROR) << "Failed to flush and close oat file " << oat_filename << " for " << oat_location;
180 return false;
181 }
182
Ian Rogers700a4022014-05-19 16:49:03 -0700183 std::unique_ptr<File> image_file(OS::CreateEmptyFile(image_filename.c_str()));
Mathieu Chartier31e89252013-08-28 11:29:12 -0700184 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700185 if (image_file.get() == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700186 LOG(ERROR) << "Failed to open image file " << image_filename;
187 return false;
188 }
189 if (fchmod(image_file->Fd(), 0644) != 0) {
190 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800191 image_file->Erase();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700192 return EXIT_FAILURE;
193 }
Mathieu Chartier31e89252013-08-28 11:29:12 -0700194
Mathieu Chartiere401d142015-04-22 13:56:20 -0700195 // Write out the image + fields + methods.
196 const auto write_count = image_header->GetImageSize();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700197 if (!image_file->WriteFully(image_->Begin(), write_count)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700198 PLOG(ERROR) << "Failed to write image file " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800199 image_file->Erase();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700200 return false;
201 }
Mathieu Chartier31e89252013-08-28 11:29:12 -0700202
203 // Write out the image bitmap at the page aligned start of the image end.
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700204 const ImageSection& bitmap_section = image_header->GetImageSection(ImageHeader::kSectionImageBitmap);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700205 CHECK_ALIGNED(bitmap_section.Offset(), kPageSize);
Mathieu Chartier31e89252013-08-28 11:29:12 -0700206 if (!image_file->Write(reinterpret_cast<char*>(image_bitmap_->Begin()),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700207 bitmap_section.Size(), bitmap_section.Offset())) {
Mathieu Chartier31e89252013-08-28 11:29:12 -0700208 PLOG(ERROR) << "Failed to write image file " << image_filename;
Andreas Gampe4303ba92014-11-06 01:00:46 -0800209 image_file->Erase();
Mathieu Chartier31e89252013-08-28 11:29:12 -0700210 return false;
211 }
212
Mathieu Chartiere401d142015-04-22 13:56:20 -0700213 CHECK_EQ(bitmap_section.End(), static_cast<size_t>(image_file->GetLength()));
Andreas Gampe4303ba92014-11-06 01:00:46 -0800214 if (image_file->FlushCloseOrErase() != 0) {
215 PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
216 return false;
217 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700218 return true;
219}
220
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700221void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700222 DCHECK(object != nullptr);
223 DCHECK_NE(offset, 0U);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800224
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800225 // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700226 object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700227 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700228 DCHECK(IsImageOffsetAssigned(object));
229}
230
Mathieu Chartiere401d142015-04-22 13:56:20 -0700231void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
232 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
233 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
234 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
235}
236
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800237void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700238 DCHECK(object != nullptr);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800239 DCHECK_NE(image_objects_offset_begin_, 0u);
240
Vladimir Markocf36d492015-08-12 19:27:26 +0100241 size_t bin_slot_offset = bin_slot_offsets_[bin_slot.GetBin()];
242 size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800243 DCHECK_ALIGNED(new_offset, kObjectAlignment);
244
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700245 SetImageOffset(object, new_offset);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800246 DCHECK_LT(new_offset, image_end_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700247}
248
Ian Rogersef7d42f2014-01-06 12:55:46 -0800249bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800250 // Will also return true if the bin slot was assigned since we are reusing the lock word.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700251 DCHECK(object != nullptr);
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700252 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700253}
254
Ian Rogersef7d42f2014-01-06 12:55:46 -0800255size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700256 DCHECK(object != nullptr);
257 DCHECK(IsImageOffsetAssigned(object));
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700258 LockWord lock_word = object->GetLockWord(false);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700259 size_t offset = lock_word.ForwardingAddress();
260 DCHECK_LT(offset, image_end_);
261 return offset;
Mathieu Chartier31e89252013-08-28 11:29:12 -0700262}
263
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800264void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
265 DCHECK(object != nullptr);
266 DCHECK(!IsImageOffsetAssigned(object));
267 DCHECK(!IsImageBinSlotAssigned(object));
268
269 // Before we stomp over the lock word, save the hash code for later.
270 Monitor::Deflate(Thread::Current(), object);;
271 LockWord lw(object->GetLockWord(false));
272 switch (lw.GetState()) {
273 case LockWord::kFatLocked: {
274 LOG(FATAL) << "Fat locked object " << object << " found during object copy";
275 break;
276 }
277 case LockWord::kThinLocked: {
278 LOG(FATAL) << "Thin locked object " << object << " found during object copy";
279 break;
280 }
281 case LockWord::kUnlocked:
282 // No hash, don't need to save it.
283 break;
284 case LockWord::kHashCode:
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700285 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
286 saved_hashcode_map_.emplace(object, lw.GetHashCode());
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800287 break;
288 default:
289 LOG(FATAL) << "Unreachable.";
290 UNREACHABLE();
291 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700292 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700293 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800294 DCHECK(IsImageBinSlotAssigned(object));
295}
296
Vladimir Marko20f85592015-03-19 10:07:02 +0000297void ImageWriter::PrepareDexCacheArraySlots() {
298 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartier3ae6b1d2015-08-14 14:03:10 -0700299 Thread* const self = Thread::Current();
300 ReaderMutexLock mu(self, *class_linker->DexLock());
Vladimir Marko20f85592015-03-19 10:07:02 +0000301 uint32_t size = 0u;
Mathieu Chartier3ae6b1d2015-08-14 14:03:10 -0700302 for (jobject weak_root : class_linker->GetDexCaches()) {
303 mirror::DexCache* dex_cache =
304 down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
305 if (dex_cache == nullptr) {
306 continue;
307 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000308 const DexFile* dex_file = dex_cache->GetDexFile();
309 dex_cache_array_starts_.Put(dex_file, size);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700310 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
Vladimir Marko20f85592015-03-19 10:07:02 +0000311 DCHECK(layout.Valid());
Mathieu Chartierc7853442015-03-27 14:35:38 -0700312 auto types_size = layout.TypesSize(dex_file->NumTypeIds());
313 auto methods_size = layout.MethodsSize(dex_file->NumMethodIds());
314 auto fields_size = layout.FieldsSize(dex_file->NumFieldIds());
315 auto strings_size = layout.StringsSize(dex_file->NumStringIds());
316 dex_cache_array_indexes_.Put(
317 dex_cache->GetResolvedTypes(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700318 DexCacheArrayLocation {size + layout.TypesOffset(), types_size, kBinRegular});
Mathieu Chartierc7853442015-03-27 14:35:38 -0700319 dex_cache_array_indexes_.Put(
320 dex_cache->GetResolvedMethods(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700321 DexCacheArrayLocation {size + layout.MethodsOffset(), methods_size, kBinArtMethodClean});
322 AddMethodPointerArray(dex_cache->GetResolvedMethods());
Mathieu Chartierc7853442015-03-27 14:35:38 -0700323 dex_cache_array_indexes_.Put(
324 dex_cache->GetResolvedFields(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700325 DexCacheArrayLocation {size + layout.FieldsOffset(), fields_size, kBinArtField});
326 pointer_arrays_.emplace(dex_cache->GetResolvedFields(), kBinArtField);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700327 dex_cache_array_indexes_.Put(
328 dex_cache->GetStrings(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700329 DexCacheArrayLocation {size + layout.StringsOffset(), strings_size, kBinRegular});
Vladimir Marko20f85592015-03-19 10:07:02 +0000330 size += layout.Size();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700331 CHECK_EQ(layout.Size(), types_size + methods_size + fields_size + strings_size);
Vladimir Marko20f85592015-03-19 10:07:02 +0000332 }
333 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
334 // when AssignImageBinSlot() assigns their indexes out or order.
335 bin_slot_sizes_[kBinDexCacheArray] = size;
336}
337
Mathieu Chartiere401d142015-04-22 13:56:20 -0700338void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
339 DCHECK(arr != nullptr);
340 if (kIsDebugBuild) {
341 for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
342 auto* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
343 if (method != nullptr && !method->IsRuntimeMethod()) {
344 auto* klass = method->GetDeclaringClass();
345 CHECK(klass == nullptr || IsImageClass(klass)) << PrettyClass(klass)
346 << " should be an image class";
347 }
348 }
349 }
350 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
351 // ArtMethods.
352 pointer_arrays_.emplace(arr, kBinArtMethodClean);
353}
354
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800355void ImageWriter::AssignImageBinSlot(mirror::Object* object) {
356 DCHECK(object != nullptr);
Jeff Haoc7d11882015-02-03 15:08:39 -0800357 size_t object_size = object->SizeOf();
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800358
359 // The magic happens here. We segregate objects into different bins based
360 // on how likely they are to get dirty at runtime.
361 //
362 // Likely-to-dirty objects get packed together into the same bin so that
363 // at runtime their page dirtiness ratio (how many dirty objects a page has) is
364 // maximized.
365 //
366 // This means more pages will stay either clean or shared dirty (with zygote) and
367 // the app will use less of its own (private) memory.
368 Bin bin = kBinRegular;
Vladimir Marko20f85592015-03-19 10:07:02 +0000369 size_t current_offset = 0u;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800370
371 if (kBinObjects) {
372 //
373 // Changing the bin of an object is purely a memory-use tuning.
374 // It has no change on runtime correctness.
375 //
376 // Memory analysis has determined that the following types of objects get dirtied
377 // the most:
378 //
Vladimir Marko20f85592015-03-19 10:07:02 +0000379 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
380 // a fixed layout which helps improve generated code (using PC-relative addressing),
381 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
382 // Since these arrays are huge, most pages do not overlap other objects and it's not
383 // really important where they are for the clean/dirty separation. Due to their
384 // special PC-relative addressing, we arbitrarily keep them at the beginning.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800385 // * Class'es which are verified [their clinit runs only at runtime]
386 // - classes in general [because their static fields get overwritten]
387 // - initialized classes with all-final statics are unlikely to be ever dirty,
388 // so bin them separately
389 // * Art Methods that are:
390 // - native [their native entry point is not looked up until runtime]
391 // - have declaring classes that aren't initialized
392 // [their interpreter/quick entry points are trampolines until the class
393 // becomes initialized]
394 //
395 // We also assume the following objects get dirtied either never or extremely rarely:
396 // * Strings (they are immutable)
397 // * Art methods that aren't native and have initialized declared classes
398 //
399 // We assume that "regular" bin objects are highly unlikely to become dirtied,
400 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
401 //
402 if (object->IsClass()) {
403 bin = kBinClassVerified;
404 mirror::Class* klass = object->AsClass();
405
Mathieu Chartiere401d142015-04-22 13:56:20 -0700406 // Add non-embedded vtable to the pointer array table if there is one.
407 auto* vtable = klass->GetVTable();
408 if (vtable != nullptr) {
409 AddMethodPointerArray(vtable);
410 }
411 auto* iftable = klass->GetIfTable();
412 if (iftable != nullptr) {
413 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
414 if (iftable->GetMethodArrayCount(i) > 0) {
415 AddMethodPointerArray(iftable->GetMethodArray(i));
416 }
417 }
418 }
419
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800420 if (klass->GetStatus() == Class::kStatusInitialized) {
421 bin = kBinClassInitialized;
422
423 // If the class's static fields are all final, put it into a separate bin
424 // since it's very likely it will stay clean.
425 uint32_t num_static_fields = klass->NumStaticFields();
426 if (num_static_fields == 0) {
427 bin = kBinClassInitializedFinalStatics;
428 } else {
429 // Maybe all the statics are final?
430 bool all_final = true;
431 for (uint32_t i = 0; i < num_static_fields; ++i) {
432 ArtField* field = klass->GetStaticField(i);
433 if (!field->IsFinal()) {
434 all_final = false;
435 break;
436 }
437 }
438
439 if (all_final) {
440 bin = kBinClassInitializedFinalStatics;
441 }
442 }
443 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800444 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
445 bin = kBinString; // Strings are almost always immutable (except for object header).
Mathieu Chartierc7853442015-03-27 14:35:38 -0700446 } else if (object->IsArrayInstance()) {
447 mirror::Class* klass = object->GetClass<kVerifyNone>();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700448 if (klass->IsObjectArrayClass() || klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700449 auto it = dex_cache_array_indexes_.find(object);
450 if (it != dex_cache_array_indexes_.end()) {
451 bin = kBinDexCacheArray;
452 // Use prepared offset defined by the DexCacheLayout.
453 current_offset = it->second.offset_;
454 // Override incase of cross compilation.
455 object_size = it->second.length_;
456 } // else bin = kBinRegular
457 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800458 } // else bin = kBinRegular
459 }
460
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800461 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment
Vladimir Marko20f85592015-03-19 10:07:02 +0000462 if (bin != kBinDexCacheArray) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700463 DCHECK(dex_cache_array_indexes_.find(object) == dex_cache_array_indexes_.end()) << object;
Vladimir Marko20f85592015-03-19 10:07:02 +0000464 current_offset = bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned).
465 // Move the current bin size up to accomodate the object we just assigned a bin slot.
466 bin_slot_sizes_[bin] += offset_delta;
467 }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800468
469 BinSlot new_bin_slot(bin, current_offset);
470 SetImageBinSlot(object, new_bin_slot);
471
472 ++bin_slot_count_[bin];
473
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800474 // Grow the image closer to the end by the object we just assigned.
475 image_end_ += offset_delta;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800476}
477
Mathieu Chartiere401d142015-04-22 13:56:20 -0700478bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
479 if (m->IsNative()) {
480 return true;
481 }
482 mirror::Class* declaring_class = m->GetDeclaringClass();
483 // Initialized is highly unlikely to dirty since there's no entry points to mutate.
484 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized;
485}
486
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800487bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
488 DCHECK(object != nullptr);
489
490 // We always stash the bin slot into a lockword, in the 'forwarding address' state.
491 // If it's in some other state, then we haven't yet assigned an image bin slot.
492 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
493 return false;
494 } else if (kIsDebugBuild) {
495 LockWord lock_word = object->GetLockWord(false);
496 size_t offset = lock_word.ForwardingAddress();
497 BinSlot bin_slot(offset);
498 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()])
499 << "bin slot offset should not exceed the size of that bin";
500 }
501 return true;
502}
503
504ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
505 DCHECK(object != nullptr);
506 DCHECK(IsImageBinSlotAssigned(object));
507
508 LockWord lock_word = object->GetLockWord(false);
509 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t
510 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
511
512 BinSlot bin_slot(static_cast<uint32_t>(offset));
513 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()]);
514
515 return bin_slot;
516}
517
Brian Carlstrom7940e442013-07-12 13:46:57 -0700518bool ImageWriter::AllocMemory() {
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700519 const size_t length = RoundUp(image_objects_offset_begin_ + GetBinSizeSum() + intern_table_bytes_,
520 kPageSize);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700521 std::string error_msg;
Vladimir Marko5c42c292015-02-25 12:02:49 +0000522 image_.reset(MemMap::MapAnonymous("image writer image", nullptr, length, PROT_READ | PROT_WRITE,
523 false, false, &error_msg));
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700524 if (UNLIKELY(image_.get() == nullptr)) {
525 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700526 return false;
527 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700528
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700529 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
530 CHECK_LE(image_end_, length);
531 image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
532 "image bitmap", image_->Begin(), RoundUp(image_end_, kPageSize)));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700533 if (image_bitmap_.get() == nullptr) {
534 LOG(ERROR) << "Failed to allocate memory for image bitmap";
535 return false;
536 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700537 return true;
538}
539
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700540class ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
541 public:
542 bool Visit(Class* c) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
543 StackHandleScope<1> hs(Thread::Current());
544 mirror::Class::ComputeName(hs.NewHandle(c));
545 return true;
546 }
547};
548
Brian Carlstrom7940e442013-07-12 13:46:57 -0700549void ImageWriter::ComputeLazyFieldsForImageClasses() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700550 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700551 ComputeLazyFieldsForClassesVisitor visitor;
552 class_linker->VisitClassesWithoutClassesLock(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700553}
554
Ian Rogersef7d42f2014-01-06 12:55:46 -0800555bool ImageWriter::IsImageClass(Class* klass) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700556 if (klass == nullptr) {
557 return false;
558 }
Ian Rogers1ff3c982014-08-12 02:30:58 -0700559 std::string temp;
560 return compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700561}
562
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700563class NonImageClassesVisitor : public ClassVisitor {
564 public:
565 explicit NonImageClassesVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
566
567 bool Visit(Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
568 if (!image_writer_->IsImageClass(klass)) {
569 std::string temp;
570 non_image_classes_.insert(klass->GetDescriptor(&temp));
571 }
572 return true;
573 }
574
575 std::set<std::string> non_image_classes_;
576 ImageWriter* const image_writer_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700577};
578
579void ImageWriter::PruneNonImageClasses() {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700580 if (compiler_driver_.GetImageClasses() == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700581 return;
582 }
583 Runtime* runtime = Runtime::Current();
584 ClassLinker* class_linker = runtime->GetClassLinker();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700585 Thread* self = Thread::Current();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700586
587 // Make a list of classes we would like to prune.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700588 NonImageClassesVisitor visitor(this);
589 class_linker->VisitClasses(&visitor);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700590
591 // Remove the undesired classes from the class roots.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700592 for (const std::string& it : visitor.non_image_classes_) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700593 bool result = class_linker->RemoveClass(it.c_str(), nullptr);
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800594 DCHECK(result);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700595 }
596
597 // Clear references to removed classes from the DexCaches.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700598 const ArtMethod* resolution_method = runtime->GetResolutionMethod();
Mathieu Chartier3ae6b1d2015-08-14 14:03:10 -0700599
600 ScopedAssertNoThreadSuspension sa(self, __FUNCTION__);
601 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); // For ClassInClassTable
602 ReaderMutexLock mu2(self, *class_linker->DexLock());
603 for (jobject weak_root : class_linker->GetDexCaches()) {
604 mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
605 if (dex_cache == nullptr) {
606 continue;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700607 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700608 for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
609 Class* klass = dex_cache->GetResolvedType(i);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700610 if (klass != nullptr && !IsImageClass(klass)) {
611 dex_cache->SetResolvedType(i, nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700612 }
613 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700614 auto* resolved_methods = down_cast<mirror::PointerArray*>(dex_cache->GetResolvedMethods());
615 for (size_t i = 0, len = resolved_methods->GetLength(); i < len; i++) {
616 auto* method = resolved_methods->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
617 if (method != nullptr) {
618 auto* declaring_class = method->GetDeclaringClass();
619 // Miranda methods may be held live by a class which was not an image class but have a
620 // declaring class which is an image class. Set it to the resolution method to be safe and
621 // prevent dangling pointers.
622 if (method->IsMiranda() || !IsImageClass(declaring_class)) {
623 resolved_methods->SetElementPtrSize(i, resolution_method, target_ptr_size_);
624 } else {
625 // Check that the class is still in the classes table.
626 DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class "
627 << PrettyClass(declaring_class) << " not in class linker table";
628 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700629 }
630 }
631 for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700632 ArtField* field = dex_cache->GetResolvedField(i, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700633 if (field != nullptr && !IsImageClass(field->GetDeclaringClass())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700634 dex_cache->SetResolvedField(i, nullptr, target_ptr_size_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700635 }
636 }
Andreas Gampedd9d0552015-03-09 12:57:41 -0700637 // Clean the dex field. It might have been populated during the initialization phase, but
638 // contains data only valid during a real run.
639 dex_cache->SetFieldObject<false>(mirror::DexCache::DexOffset(), nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700640 }
Andreas Gampe8ac75952015-06-02 21:01:45 -0700641
642 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
643 class_linker->DropFindArrayClassCache();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700644}
645
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -0800646void ImageWriter::CheckNonImageClassesRemoved() {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700647 if (compiler_driver_.GetImageClasses() != nullptr) {
648 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700649 heap->VisitObjects(CheckNonImageClassesRemovedCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700650 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700651}
652
653void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
654 ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700655 if (obj->IsClass()) {
656 Class* klass = obj->AsClass();
657 if (!image_writer->IsImageClass(klass)) {
658 image_writer->DumpImageClasses();
Ian Rogers1ff3c982014-08-12 02:30:58 -0700659 std::string temp;
660 CHECK(image_writer->IsImageClass(klass)) << klass->GetDescriptor(&temp)
Mathieu Chartier590fee92013-09-13 13:46:47 -0700661 << " " << PrettyDescriptor(klass);
662 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700663 }
664}
665
666void ImageWriter::DumpImageClasses() {
Andreas Gampeb1fcead2015-04-20 18:53:51 -0700667 auto image_classes = compiler_driver_.GetImageClasses();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700668 CHECK(image_classes != nullptr);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700669 for (const std::string& image_class : *image_classes) {
670 LOG(INFO) << " " << image_class;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700671 }
672}
673
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800674void ImageWriter::CalculateObjectBinSlots(Object* obj) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700675 DCHECK(obj != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700676 // if it is a string, we want to intern it if its not interned.
677 if (obj->GetClass()->IsStringClass()) {
678 // we must be an interned string that was forward referenced and already assigned
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800679 if (IsImageBinSlotAssigned(obj)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700680 DCHECK_EQ(obj, obj->AsString()->Intern());
681 return;
682 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700683 // InternImageString allows us to intern while holding the heap bitmap lock. This is safe since
684 // we are guaranteed to not have GC during image writing.
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700685 mirror::String* const interned = Runtime::Current()->GetInternTable()->InternStrongImageString(
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700686 obj->AsString());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700687 if (obj != interned) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800688 if (!IsImageBinSlotAssigned(interned)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700689 // interned obj is after us, allocate its location early
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800690 AssignImageBinSlot(interned);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700691 }
692 // point those looking for this object to the interned version.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800693 SetImageBinSlot(obj, GetImageBinSlot(interned));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700694 return;
695 }
696 // else (obj == interned), nothing to do but fall through to the normal case
697 }
698
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800699 AssignImageBinSlot(obj);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700700}
701
702ObjectArray<Object>* ImageWriter::CreateImageRoots() const {
703 Runtime* runtime = Runtime::Current();
704 ClassLinker* class_linker = runtime->GetClassLinker();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700705 Thread* self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700706 StackHandleScope<3> hs(self);
707 Handle<Class> object_array_class(hs.NewHandle(
708 class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700709
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700710 // build an Object[] of all the DexCaches used in the source_space_.
711 // Since we can't hold the dex lock when allocating the dex_caches
712 // ObjectArray, we lock the dex lock twice, first to get the number
713 // of dex caches first and then lock it again to copy the dex
714 // caches. We check that the number of dex caches does not change.
715 size_t dex_cache_count;
716 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700717 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700718 dex_cache_count = class_linker->GetDexCacheCount();
719 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700720 Handle<ObjectArray<Object>> dex_caches(
721 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(),
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700722 dex_cache_count)));
723 CHECK(dex_caches.Get() != nullptr) << "Failed to allocate a dex cache array.";
724 {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700725 ReaderMutexLock mu(self, *class_linker->DexLock());
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700726 CHECK_EQ(dex_cache_count, class_linker->GetDexCacheCount())
727 << "The number of dex caches changed.";
Mathieu Chartier3ae6b1d2015-08-14 14:03:10 -0700728 size_t i = 0;
729 for (jobject weak_root : class_linker->GetDexCaches()) {
730 mirror::DexCache* dex_cache =
731 down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
732 dex_caches->Set<false>(i, dex_cache);
733 ++i;
Hiroshi Yamauchie9e3e692014-06-24 14:31:37 -0700734 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700735 }
736
737 // build an Object[] of the roots needed to restore the runtime
Mathieu Chartiere401d142015-04-22 13:56:20 -0700738 auto image_roots(hs.NewHandle(
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700739 ObjectArray<Object>::Alloc(self, object_array_class.Get(), ImageHeader::kImageRootsMax)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700740 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100741 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700742 for (int i = 0; i < ImageHeader::kImageRootsMax; i++) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700743 CHECK(image_roots->Get(i) != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700744 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700745 return image_roots.Get();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700746}
747
Mathieu Chartier590fee92013-09-13 13:46:47 -0700748// Walk instance fields of the given Class. Separate function to allow recursion on the super
749// class.
750void ImageWriter::WalkInstanceFields(mirror::Object* obj, mirror::Class* klass) {
751 // Visit fields of parent classes first.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700752 StackHandleScope<1> hs(Thread::Current());
753 Handle<mirror::Class> h_class(hs.NewHandle(klass));
754 mirror::Class* super = h_class->GetSuperClass();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700755 if (super != nullptr) {
756 WalkInstanceFields(obj, super);
757 }
758 //
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700759 size_t num_reference_fields = h_class->NumReferenceInstanceFields();
Vladimir Marko76649e82014-11-10 18:32:59 +0000760 MemberOffset field_offset = h_class->GetFirstReferenceInstanceFieldOffset();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700761 for (size_t i = 0; i < num_reference_fields; ++i) {
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700762 mirror::Object* value = obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700763 if (value != nullptr) {
764 WalkFieldsInOrder(value);
765 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000766 field_offset = MemberOffset(field_offset.Uint32Value() +
767 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700768 }
769}
770
771// For an unvisited object, visit it then all its children found via fields.
772void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) {
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800773 // Use our own visitor routine (instead of GC visitor) to get better locality between
774 // an object and its fields
775 if (!IsImageBinSlotAssigned(obj)) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700776 // Walk instance fields of all objects
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700777 StackHandleScope<2> hs(Thread::Current());
778 Handle<mirror::Object> h_obj(hs.NewHandle(obj));
779 Handle<mirror::Class> klass(hs.NewHandle(obj->GetClass()));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700780 // visit the object itself.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800781 CalculateObjectBinSlots(h_obj.Get());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700782 WalkInstanceFields(h_obj.Get(), klass.Get());
Mathieu Chartier590fee92013-09-13 13:46:47 -0700783 // Walk static fields of a Class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700784 if (h_obj->IsClass()) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700785 size_t num_reference_static_fields = klass->NumReferenceStaticFields();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700786 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700787 for (size_t i = 0; i < num_reference_static_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700788 mirror::Object* value = h_obj->GetFieldObject<mirror::Object>(field_offset);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700789 if (value != nullptr) {
790 WalkFieldsInOrder(value);
791 }
Vladimir Marko76649e82014-11-10 18:32:59 +0000792 field_offset = MemberOffset(field_offset.Uint32Value() +
793 sizeof(mirror::HeapReference<mirror::Object>));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700794 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700795 // Visit and assign offsets for fields and field arrays.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700796 auto* as_klass = h_obj->AsClass();
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700797 LengthPrefixedArray<ArtField>* fields[] = {
798 as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
799 };
800 for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
801 // Total array length including header.
802 if (cur_fields != nullptr) {
803 const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
804 // Forward the entire array at once.
805 auto it = native_object_relocations_.find(cur_fields);
806 CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
807 << " already forwarded";
808 size_t& offset = bin_slot_sizes_[kBinArtField];
809 native_object_relocations_.emplace(
810 cur_fields, NativeObjectRelocation {
811 offset, kNativeObjectRelocationTypeArtFieldArray });
812 offset += header_size;
813 // Forward individual fields so that we can quickly find where they belong.
814 for (size_t i = 0, count = cur_fields->Length(); i < count; ++i) {
815 // Need to forward arrays separate of fields.
816 ArtField* field = &cur_fields->At(i);
817 auto it2 = native_object_relocations_.find(field);
818 CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
819 << " already assigned " << PrettyField(field) << " static=" << field->IsStatic();
820 native_object_relocations_.emplace(
821 field, NativeObjectRelocation {offset, kNativeObjectRelocationTypeArtField });
822 offset += sizeof(ArtField);
823 }
Mathieu Chartierc7853442015-03-27 14:35:38 -0700824 }
825 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700826 // Visit and assign offsets for methods.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700827 LengthPrefixedArray<ArtMethod>* method_arrays[] = {
828 as_klass->GetDirectMethodsPtr(), as_klass->GetVirtualMethodsPtr(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700829 };
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700830 for (LengthPrefixedArray<ArtMethod>* array : method_arrays) {
831 if (array == nullptr) {
832 continue;
833 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700834 bool any_dirty = false;
835 size_t count = 0;
Vladimir Marko14632852015-08-17 12:07:23 +0100836 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
837 const size_t method_size = ArtMethod::Size(target_ptr_size_);
Vladimir Markocf36d492015-08-12 19:27:26 +0100838 auto iteration_range =
839 MakeIterationRangeFromLengthPrefixedArray(array, method_size, method_alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700840 for (auto& m : iteration_range) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700841 any_dirty = any_dirty || WillMethodBeDirty(&m);
842 ++count;
843 }
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700844 NativeObjectRelocationType type = any_dirty ? kNativeObjectRelocationTypeArtMethodDirty :
845 kNativeObjectRelocationTypeArtMethodClean;
846 Bin bin_type = BinTypeForNativeRelocationType(type);
847 // Forward the entire array at once, but header first.
Vladimir Markocf36d492015-08-12 19:27:26 +0100848 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
849 method_size,
850 method_alignment);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700851 auto it = native_object_relocations_.find(array);
852 CHECK(it == native_object_relocations_.end()) << "Method array " << array
853 << " already forwarded";
854 size_t& offset = bin_slot_sizes_[bin_type];
855 native_object_relocations_.emplace(array, NativeObjectRelocation { offset,
856 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty :
857 kNativeObjectRelocationTypeArtMethodArrayClean });
858 offset += header_size;
859 for (auto& m : iteration_range) {
860 AssignMethodOffset(&m, type);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700861 }
862 (any_dirty ? dirty_methods_ : clean_methods_) += count;
863 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700864 } else if (h_obj->IsObjectArray()) {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700865 // Walk elements of an object array.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700866 int32_t length = h_obj->AsObjectArray<mirror::Object>()->GetLength();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700867 for (int32_t i = 0; i < length; i++) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700868 mirror::ObjectArray<mirror::Object>* obj_array = h_obj->AsObjectArray<mirror::Object>();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700869 mirror::Object* value = obj_array->Get(i);
870 if (value != nullptr) {
871 WalkFieldsInOrder(value);
872 }
873 }
874 }
875 }
876}
877
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700878void ImageWriter::AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type) {
879 auto it = native_object_relocations_.find(method);
880 CHECK(it == native_object_relocations_.end()) << "Method " << method << " already assigned "
Mathieu Chartiere401d142015-04-22 13:56:20 -0700881 << PrettyMethod(method);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700882 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
883 native_object_relocations_.emplace(method, NativeObjectRelocation { offset, type });
Vladimir Marko14632852015-08-17 12:07:23 +0100884 offset += ArtMethod::Size(target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700885}
886
Mathieu Chartier590fee92013-09-13 13:46:47 -0700887void ImageWriter::WalkFieldsCallback(mirror::Object* obj, void* arg) {
888 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
889 DCHECK(writer != nullptr);
890 writer->WalkFieldsInOrder(obj);
891}
892
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800893void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) {
894 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
895 DCHECK(writer != nullptr);
896 writer->UnbinObjectsIntoOffset(obj);
897}
898
899void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
900 CHECK(obj != nullptr);
901
902 // We know the bin slot, and the total bin sizes for all objects by now,
903 // so calculate the object's final image offset.
904
905 DCHECK(IsImageBinSlotAssigned(obj));
906 BinSlot bin_slot = GetImageBinSlot(obj);
907 // Change the lockword from a bin slot into an offset
908 AssignImageOffset(obj, bin_slot);
909}
910
Vladimir Markof4da6752014-08-01 19:04:18 +0100911void ImageWriter::CalculateNewObjectOffsets() {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700912 Thread* const self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700913 StackHandleScope<1> hs(self);
914 Handle<ObjectArray<Object>> image_roots(hs.NewHandle(CreateImageRoots()));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700915
Mathieu Chartiere401d142015-04-22 13:56:20 -0700916 auto* runtime = Runtime::Current();
917 auto* heap = runtime->GetHeap();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700918 DCHECK_EQ(0U, image_end_);
919
Mathieu Chartier31e89252013-08-28 11:29:12 -0700920 // Leave space for the header, but do not write it yet, we need to
Brian Carlstrom7940e442013-07-12 13:46:57 -0700921 // know where image_roots is going to end up
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800922 image_end_ += RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment
Brian Carlstrom7940e442013-07-12 13:46:57 -0700923
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800924 image_objects_offset_begin_ = image_end_;
Vladimir Marko20f85592015-03-19 10:07:02 +0000925 // Prepare bin slots for dex cache arrays.
926 PrepareDexCacheArraySlots();
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800927 // Clear any pre-existing monitors which may have been in the monitor words, assign bin slots.
928 heap->VisitObjects(WalkFieldsCallback, this);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700929 // Write the image runtime methods.
930 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
931 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
932 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
933 image_methods_[ImageHeader::kCalleeSaveMethod] = runtime->GetCalleeSaveMethod(Runtime::kSaveAll);
934 image_methods_[ImageHeader::kRefsOnlySaveMethod] =
935 runtime->GetCalleeSaveMethod(Runtime::kRefsOnly);
936 image_methods_[ImageHeader::kRefsAndArgsSaveMethod] =
937 runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700938
939 // Add room for fake length prefixed array.
940 const auto image_method_type = kNativeObjectRelocationTypeArtMethodArrayClean;
941 auto it = native_object_relocations_.find(&image_method_array_);
942 CHECK(it == native_object_relocations_.end());
943 size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(image_method_type)];
944 native_object_relocations_.emplace(&image_method_array_,
945 NativeObjectRelocation { offset, image_method_type });
Vladimir Marko14632852015-08-17 12:07:23 +0100946 size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -0700947 const size_t array_size = LengthPrefixedArray<ArtMethod>::ComputeSize(
Vladimir Marko14632852015-08-17 12:07:23 +0100948 0, ArtMethod::Size(target_ptr_size_), method_alignment);
Vladimir Markocf36d492015-08-12 19:27:26 +0100949 CHECK_ALIGNED_PARAM(array_size, method_alignment);
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -0700950 offset += array_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700951 for (auto* m : image_methods_) {
952 CHECK(m != nullptr);
953 CHECK(m->IsRuntimeMethod());
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700954 AssignMethodOffset(m, kNativeObjectRelocationTypeArtMethodClean);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700955 }
956
Vladimir Markocf36d492015-08-12 19:27:26 +0100957 // Calculate bin slot offsets.
958 size_t bin_offset = image_objects_offset_begin_;
Vladimir Marko20f85592015-03-19 10:07:02 +0000959 for (size_t i = 0; i != kBinSize; ++i) {
Vladimir Markocf36d492015-08-12 19:27:26 +0100960 bin_slot_offsets_[i] = bin_offset;
961 bin_offset += bin_slot_sizes_[i];
962 if (i == kBinArtField) {
963 static_assert(kBinArtField + 1 == kBinArtMethodClean, "Methods follow fields.");
964 static_assert(alignof(ArtField) == 4u, "ArtField alignment is 4.");
965 DCHECK_ALIGNED(bin_offset, 4u);
966 DCHECK(method_alignment == 4u || method_alignment == 8u);
967 bin_offset = RoundUp(bin_offset, method_alignment);
968 }
Vladimir Marko20f85592015-03-19 10:07:02 +0000969 }
Vladimir Markocf36d492015-08-12 19:27:26 +0100970 // NOTE: There may be additional padding between the bin slots and the intern table.
971
Mathieu Chartierc7853442015-03-27 14:35:38 -0700972 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
973
Hiroshi Yamauchi0c8c3032015-01-16 16:54:35 -0800974 // Transform each object's bin slot into an offset which will be used to do the final copy.
975 heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700976
Mathieu Chartierc7853442015-03-27 14:35:38 -0700977 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800978
Vladimir Markof4da6752014-08-01 19:04:18 +0100979 image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Get()));
980
Mathieu Chartiere401d142015-04-22 13:56:20 -0700981 // Update the native relocations by adding their bin sums.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700982 for (auto& pair : native_object_relocations_) {
983 NativeObjectRelocation& relocation = pair.second;
984 Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
Vladimir Markocf36d492015-08-12 19:27:26 +0100985 relocation.offset += bin_slot_offsets_[bin_type];
Mathieu Chartiere401d142015-04-22 13:56:20 -0700986 }
987
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700988 // Calculate how big the intern table will be after being serialized.
989 auto* const intern_table = Runtime::Current()->GetInternTable();
990 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
991 intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
992
Mathieu Chartiere401d142015-04-22 13:56:20 -0700993 // Note that image_end_ is left at end of used mirror object section.
Vladimir Markof4da6752014-08-01 19:04:18 +0100994}
995
996void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) {
997 CHECK_NE(0U, oat_loaded_size);
Ian Rogers13735952014-10-08 12:43:28 -0700998 const uint8_t* oat_file_begin = GetOatFileBegin();
999 const uint8_t* oat_file_end = oat_file_begin + oat_loaded_size;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001000 oat_data_begin_ = oat_file_begin + oat_data_offset;
Ian Rogers13735952014-10-08 12:43:28 -07001001 const uint8_t* oat_data_end = oat_data_begin_ + oat_file_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001002
1003 // Create the image sections.
1004 ImageSection sections[ImageHeader::kSectionCount];
1005 // Objects section
1006 auto* objects_section = &sections[ImageHeader::kSectionObjects];
1007 *objects_section = ImageSection(0u, image_end_);
1008 size_t cur_pos = objects_section->End();
1009 // Add field section.
1010 auto* field_section = &sections[ImageHeader::kSectionArtFields];
1011 *field_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtField]);
Vladimir Markocf36d492015-08-12 19:27:26 +01001012 CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001013 cur_pos = field_section->End();
Vladimir Markocf36d492015-08-12 19:27:26 +01001014 // Round up to the alignment the required by the method section.
Vladimir Marko14632852015-08-17 12:07:23 +01001015 cur_pos = RoundUp(cur_pos, ArtMethod::Alignment(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001016 // Add method section.
1017 auto* methods_section = &sections[ImageHeader::kSectionArtMethods];
1018 *methods_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtMethodClean] +
1019 bin_slot_sizes_[kBinArtMethodDirty]);
Vladimir Markocf36d492015-08-12 19:27:26 +01001020 CHECK_EQ(bin_slot_offsets_[kBinArtMethodClean], methods_section->Offset());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001021 cur_pos = methods_section->End();
Nicolas Geoffray7bf2b4f2015-07-08 10:11:59 +00001022 // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
1023 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001024 // Calculate the size of the interned strings.
1025 auto* interned_strings_section = &sections[ImageHeader::kSectionInternedStrings];
1026 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1027 cur_pos = interned_strings_section->End();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001028 // Finally bitmap section.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001029 const size_t bitmap_bytes = image_bitmap_->Size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001030 auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
1031 *bitmap_section = ImageSection(RoundUp(cur_pos, kPageSize), RoundUp(bitmap_bytes, kPageSize));
1032 cur_pos = bitmap_section->End();
1033 if (kIsDebugBuild) {
1034 size_t idx = 0;
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001035 for (const ImageSection& section : sections) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001036 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1037 ++idx;
1038 }
1039 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
1040 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001041 const size_t image_end = static_cast<uint32_t>(interned_strings_section->End());
1042 CHECK_EQ(AlignUp(image_begin_ + image_end, kPageSize), oat_file_begin) <<
1043 "Oat file should be right after the image.";
Mathieu Chartiere401d142015-04-22 13:56:20 -07001044 // Create the header.
1045 new (image_->Begin()) ImageHeader(
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001046 PointerToLowMemUInt32(image_begin_), image_end,
1047 sections, image_roots_address_, oat_file_->GetOatHeader().GetChecksum(),
Mathieu Chartiere401d142015-04-22 13:56:20 -07001048 PointerToLowMemUInt32(oat_file_begin), PointerToLowMemUInt32(oat_data_begin_),
1049 PointerToLowMemUInt32(oat_data_end), PointerToLowMemUInt32(oat_file_end), target_ptr_size_,
1050 compile_pic_);
1051}
1052
1053ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001054 auto it = native_object_relocations_.find(method);
1055 CHECK(it != native_object_relocations_.end()) << PrettyMethod(method) << " @ " << method;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001056 CHECK_GE(it->second.offset, image_end_) << "ArtMethods should be after Objects";
1057 return reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001058}
1059
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001060class FixupRootVisitor : public RootVisitor {
1061 public:
1062 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
1063 }
1064
1065 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001066 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001067 for (size_t i = 0; i < count; ++i) {
1068 *roots[i] = ImageAddress(*roots[i]);
1069 }
1070 }
1071
1072 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
1073 const RootInfo& info ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001074 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001075 for (size_t i = 0; i < count; ++i) {
1076 roots[i]->Assign(ImageAddress(roots[i]->AsMirrorPtr()));
1077 }
1078 }
1079
1080 private:
1081 ImageWriter* const image_writer_;
1082
Mathieu Chartier90443472015-07-16 20:32:27 -07001083 mirror::Object* ImageAddress(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001084 const size_t offset = image_writer_->GetImageOffset(obj);
1085 auto* const dest = reinterpret_cast<Object*>(image_writer_->image_begin_ + offset);
1086 VLOG(compiler) << "Update root from " << obj << " to " << dest;
1087 return dest;
1088 }
1089};
1090
Mathieu Chartierc7853442015-03-27 14:35:38 -07001091void ImageWriter::CopyAndFixupNativeData() {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001092 // Copy ArtFields and methods to their locations and update the array for convenience.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001093 for (auto& pair : native_object_relocations_) {
1094 NativeObjectRelocation& relocation = pair.second;
1095 auto* dest = image_->Begin() + relocation.offset;
1096 DCHECK_GE(dest, image_->Begin() + image_end_);
1097 switch (relocation.type) {
1098 case kNativeObjectRelocationTypeArtField: {
1099 memcpy(dest, pair.first, sizeof(ArtField));
1100 reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
1101 GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass()));
1102 break;
1103 }
1104 case kNativeObjectRelocationTypeArtMethodClean:
1105 case kNativeObjectRelocationTypeArtMethodDirty: {
1106 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
1107 reinterpret_cast<ArtMethod*>(dest));
1108 break;
1109 }
1110 // For arrays, copy just the header since the elements will get copied by their corresponding
1111 // relocations.
1112 case kNativeObjectRelocationTypeArtFieldArray: {
1113 memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
1114 break;
1115 }
1116 case kNativeObjectRelocationTypeArtMethodArrayClean:
1117 case kNativeObjectRelocationTypeArtMethodArrayDirty: {
Vladimir Markocf36d492015-08-12 19:27:26 +01001118 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(
1119 0,
Vladimir Marko14632852015-08-17 12:07:23 +01001120 ArtMethod::Size(target_ptr_size_),
1121 ArtMethod::Alignment(target_ptr_size_)));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001122 break;
1123 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001124 }
1125 }
1126 // Fixup the image method roots.
1127 auto* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001128 const ImageSection& methods_section = image_header->GetMethodsSection();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001129 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
1130 auto* m = image_methods_[i];
1131 CHECK(m != nullptr);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001132 auto it = native_object_relocations_.find(m);
1133 CHECK(it != native_object_relocations_.end()) << "No fowarding for " << PrettyMethod(m);
1134 NativeObjectRelocation& relocation = it->second;
1135 CHECK(methods_section.Contains(relocation.offset)) << relocation.offset << " not in "
Mathieu Chartiere401d142015-04-22 13:56:20 -07001136 << methods_section;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001137 CHECK(relocation.IsArtMethodRelocation()) << relocation.type;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001138 auto* dest = reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
1139 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), dest);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001140 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001141 // Write the intern table into the image.
1142 const ImageSection& intern_table_section = image_header->GetImageSection(
1143 ImageHeader::kSectionInternedStrings);
1144 InternTable* const intern_table = Runtime::Current()->GetInternTable();
1145 uint8_t* const memory_ptr = image_->Begin() + intern_table_section.Offset();
1146 const size_t intern_table_bytes = intern_table->WriteToMemory(memory_ptr);
1147 // Fixup the pointers in the newly written intern table to contain image addresses.
1148 InternTable temp_table;
1149 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
1150 // the VisitRoots() will update the memory directly rather than the copies.
1151 // This also relies on visit roots not doing any verification which could fail after we update
1152 // the roots to be the image addresses.
1153 temp_table.ReadFromMemory(memory_ptr);
1154 CHECK_EQ(temp_table.Size(), intern_table->Size());
1155 FixupRootVisitor visitor(this);
1156 temp_table.VisitRoots(&visitor, kVisitRootFlagAllRoots);
1157 CHECK_EQ(intern_table_bytes, intern_table_bytes_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001158}
1159
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -08001160void ImageWriter::CopyAndFixupObjects() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001161 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier590fee92013-09-13 13:46:47 -07001162 heap->VisitObjects(CopyAndFixupObjectsCallback, this);
1163 // Fix up the object previously had hash codes.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001164 for (const auto& hash_pair : saved_hashcode_map_) {
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001165 Object* obj = hash_pair.first;
Andreas Gampe3b45ef22015-05-26 21:34:09 -07001166 DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
1167 obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001168 }
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001169 saved_hashcode_map_.clear();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001170}
1171
Mathieu Chartier590fee92013-09-13 13:46:47 -07001172void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) {
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -07001173 DCHECK(obj != nullptr);
1174 DCHECK(arg != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001175 reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj);
1176}
1177
Mathieu Chartiere401d142015-04-22 13:56:20 -07001178void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr,
1179 mirror::Class* klass, Bin array_type) {
1180 CHECK(klass->IsArrayClass());
1181 CHECK(arr->IsIntArray() || arr->IsLongArray()) << PrettyClass(klass) << " " << arr;
1182 // Fixup int and long pointers for the ArtMethod or ArtField arrays.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001183 const size_t num_elements = arr->GetLength();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001184 dst->SetClass(GetImageAddress(arr->GetClass()));
1185 auto* dest_array = down_cast<mirror::PointerArray*>(dst);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001186 for (size_t i = 0, count = num_elements; i < count; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001187 auto* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
1188 if (elem != nullptr) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001189 auto it = native_object_relocations_.find(elem);
1190 if (it == native_object_relocations_.end()) {
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001191 if (it->second.IsArtMethodRelocation()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001192 auto* method = reinterpret_cast<ArtMethod*>(elem);
1193 LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ "
1194 << method << " idx=" << i << "/" << num_elements << " with declaring class "
1195 << PrettyClass(method->GetDeclaringClass());
1196 } else {
1197 CHECK_EQ(array_type, kBinArtField);
1198 auto* field = reinterpret_cast<ArtField*>(elem);
1199 LOG(FATAL) << "No relocation entry for ArtField " << PrettyField(field) << " @ "
1200 << field << " idx=" << i << "/" << num_elements << " with declaring class "
1201 << PrettyClass(field->GetDeclaringClass());
1202 }
1203 } else {
1204 elem = image_begin_ + it->second.offset;
1205 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001206 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001207 dest_array->SetElementPtrSize<false, true>(i, elem, target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001208 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001209}
1210
1211void ImageWriter::CopyAndFixupObject(Object* obj) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001212 size_t offset = GetImageOffset(obj);
1213 auto* dst = reinterpret_cast<Object*>(image_->Begin() + offset);
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001214 DCHECK_LT(offset, image_end_);
1215 const auto* src = reinterpret_cast<const uint8_t*>(obj);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001216
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001217 image_bitmap_->Set(dst); // Mark the obj as live.
1218
1219 const size_t n = obj->SizeOf();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001220 DCHECK_LE(offset + n, image_->Size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001221 memcpy(dst, src, n);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001222
Mathieu Chartierad2541a2013-10-25 10:05:23 -07001223 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
1224 // word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001225 const auto it = saved_hashcode_map_.find(obj);
1226 dst->SetLockWord(it != saved_hashcode_map_.end() ?
1227 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001228 FixupObject(obj, dst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001229}
1230
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001231// Rewrite all the references in the copied object to point to their image address equivalent
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001232class FixupVisitor {
1233 public:
1234 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
1235 }
1236
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001237 // Ignore class roots since we don't have a way to map them to the destination. These are handled
1238 // with other logic.
1239 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1240 const {}
1241 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
1242
1243
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001244 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001245 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Hiroshi Yamauchi6e83c172014-05-01 21:25:41 -07001246 Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001247 // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the
1248 // image.
1249 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Ian Rogersb0fa5dc2014-04-28 16:47:08 -07001250 offset, image_writer_->GetImageAddress(ref));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001251 }
1252
1253 // java.lang.ref.Reference visitor.
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001254 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001255 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001256 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
Ian Rogersb0fa5dc2014-04-28 16:47:08 -07001257 mirror::Reference::ReferentOffset(), image_writer_->GetImageAddress(ref->GetReferent()));
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001258 }
1259
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001260 protected:
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001261 ImageWriter* const image_writer_;
1262 mirror::Object* const copy_;
1263};
1264
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001265class FixupClassVisitor FINAL : public FixupVisitor {
1266 public:
1267 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
1268 }
1269
Mathieu Chartierc7853442015-03-27 14:35:38 -07001270 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -07001271 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001272 DCHECK(obj->IsClass());
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001273 FixupVisitor::operator()(obj, offset, /*is_static*/false);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001274 }
1275
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001276 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED,
1277 mirror::Reference* ref ATTRIBUTE_UNUSED) const
Mathieu Chartierda7c6502015-07-23 16:01:26 -07001278 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001279 LOG(FATAL) << "Reference not expected here.";
1280 }
1281};
1282
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001283void* ImageWriter::NativeLocationInImage(void* obj) {
1284 if (obj == nullptr) {
1285 return nullptr;
1286 }
1287 auto it = native_object_relocations_.find(obj);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001288 CHECK(it != native_object_relocations_.end()) << obj;
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -07001289 const NativeObjectRelocation& relocation = it->second;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001290 return reinterpret_cast<void*>(image_begin_ + relocation.offset);
1291}
1292
Mathieu Chartierc7853442015-03-27 14:35:38 -07001293void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001294 // Update the field arrays.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001295 copy->SetSFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>(
1296 NativeLocationInImage(orig->GetSFieldsPtr())));
1297 copy->SetIFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>(
1298 NativeLocationInImage(orig->GetIFieldsPtr())));
1299 // Update direct and virtual method arrays.
1300 copy->SetDirectMethodsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
1301 NativeLocationInImage(orig->GetDirectMethodsPtr())));
1302 copy->SetVirtualMethodsPtr(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
1303 NativeLocationInImage(orig->GetVirtualMethodsPtr())));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001304 // Fix up embedded tables.
1305 if (orig->ShouldHaveEmbeddedImtAndVTable()) {
1306 for (int32_t i = 0; i < orig->GetEmbeddedVTableLength(); ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001307 auto it = native_object_relocations_.find(orig->GetEmbeddedVTableEntry(i, target_ptr_size_));
1308 CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001309 copy->SetEmbeddedVTableEntryUnchecked(
1310 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
1311 }
1312 for (size_t i = 0; i < mirror::Class::kImtSize; ++i) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001313 auto it = native_object_relocations_.find(orig->GetEmbeddedImTableEntry(i, target_ptr_size_));
1314 CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001315 copy->SetEmbeddedImTableEntry(
1316 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001317 }
1318 }
1319 FixupClassVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07001320 static_cast<mirror::Object*>(orig)->VisitReferences(visitor, visitor);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001321}
1322
Ian Rogersef7d42f2014-01-06 12:55:46 -08001323void ImageWriter::FixupObject(Object* orig, Object* copy) {
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -07001324 DCHECK(orig != nullptr);
1325 DCHECK(copy != nullptr);
Hiroshi Yamauchi624468c2014-03-31 15:14:47 -07001326 if (kUseBakerOrBrooksReadBarrier) {
1327 orig->AssertReadBarrierPointer();
1328 if (kUseBrooksReadBarrier) {
1329 // Note the address 'copy' isn't the same as the image address of 'orig'.
1330 copy->SetReadBarrierPointer(GetImageAddress(orig));
1331 DCHECK_EQ(copy->GetReadBarrierPointer(), GetImageAddress(orig));
1332 }
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -08001333 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001334 auto* klass = orig->GetClass();
1335 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
1336 // Is this a native dex cache array?
1337 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
1338 if (it != pointer_arrays_.end()) {
1339 // Should only need to fixup every pointer array exactly once.
1340 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
1341 pointer_arrays_.erase(it);
1342 return;
1343 }
1344 CHECK(dex_cache_array_indexes_.find(orig) == dex_cache_array_indexes_.end())
1345 << "Should have been pointer array.";
1346 }
Mathieu Chartierc7853442015-03-27 14:35:38 -07001347 if (orig->IsClass()) {
1348 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001349 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001350 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
1351 // Need to go update the ArtMethod.
1352 auto* dest = down_cast<mirror::AbstractMethod*>(copy);
1353 auto* src = down_cast<mirror::AbstractMethod*>(orig);
1354 ArtMethod* src_method = src->GetArtMethod();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001355 auto it = native_object_relocations_.find(src_method);
1356 CHECK(it != native_object_relocations_.end())
1357 << "Missing relocation for AbstractMethod.artMethod " << PrettyMethod(src_method);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001358 dest->SetArtMethod(
1359 reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
Mathieu Chartier6b069532015-08-05 15:08:12 -07001360 } else if (!klass->IsArrayClass() && klass->IsSubClass(down_cast<mirror::Class*>(
1361 Thread::Current()->DecodeJObject(WellKnownClasses::java_lang_ClassLoader)))) {
1362 // If src is a ClassLoader, set the class table to null so that it gets recreated by the
1363 // ClassLoader.
1364 down_cast<mirror::ClassLoader*>(copy)->SetClassTable(nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001365 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001366 FixupVisitor visitor(this, copy);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -07001367 orig->VisitReferences(visitor, visitor);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001368 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001369}
1370
Mathieu Chartiere401d142015-04-22 13:56:20 -07001371const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method, bool* quick_is_interpreted) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001372 DCHECK(!method->IsResolutionMethod() && !method->IsImtConflictMethod() &&
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001373 !method->IsImtUnimplementedMethod() && !method->IsAbstract()) << PrettyMethod(method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001374
1375 // Use original code if it exists. Otherwise, set the code pointer to the resolution
1376 // trampoline.
1377
1378 // Quick entrypoint:
Jeff Haoc7d11882015-02-03 15:08:39 -08001379 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(
1380 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_));
1381 const uint8_t* quick_code = GetOatAddress(quick_oat_code_offset);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001382 *quick_is_interpreted = false;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001383 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
1384 method->GetDeclaringClass()->IsInitialized())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001385 // We have code for a non-static or initialized method, just use the code.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001386 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001387 } else if (quick_code == nullptr && method->IsNative() &&
1388 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
1389 // Non-static or initialized native method missing compiled code, use generic JNI version.
1390 quick_code = GetOatAddress(quick_generic_jni_trampoline_offset_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001391 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001392 } else if (quick_code == nullptr && !method->IsNative()) {
1393 // We don't have code at all for a non-native method, use the interpreter.
1394 quick_code = GetOatAddress(quick_to_interpreter_bridge_offset_);
1395 *quick_is_interpreted = true;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001396 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001397 } else {
1398 CHECK(!method->GetDeclaringClass()->IsInitialized());
1399 // We have code for a static method, but need to go through the resolution stub for class
1400 // initialization.
1401 quick_code = GetOatAddress(quick_resolution_trampoline_offset_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001402 DCHECK_GE(quick_code, oat_data_begin_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001403 }
1404 return quick_code;
1405}
1406
Mathieu Chartiere401d142015-04-22 13:56:20 -07001407const uint8_t* ImageWriter::GetQuickEntryPoint(ArtMethod* method) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001408 // Calculate the quick entry point following the same logic as FixupMethod() below.
1409 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001410 Runtime* runtime = Runtime::Current();
1411 if (UNLIKELY(method == runtime->GetResolutionMethod())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001412 return GetOatAddress(quick_resolution_trampoline_offset_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001413 } else if (UNLIKELY(method == runtime->GetImtConflictMethod() ||
1414 method == runtime->GetImtUnimplementedMethod())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001415 return GetOatAddress(quick_imt_conflict_trampoline_offset_);
1416 } else {
1417 // We assume all methods have code. If they don't currently then we set them to the use the
1418 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1419 // use results in an AbstractMethodError. We use the interpreter to achieve this.
1420 if (UNLIKELY(method->IsAbstract())) {
1421 return GetOatAddress(quick_to_interpreter_bridge_offset_);
1422 } else {
1423 bool quick_is_interpreted;
1424 return GetQuickCode(method, &quick_is_interpreted);
1425 }
1426 }
1427}
1428
Mathieu Chartiere401d142015-04-22 13:56:20 -07001429void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) {
Vladimir Marko14632852015-08-17 12:07:23 +01001430 memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
Mathieu Chartiere401d142015-04-22 13:56:20 -07001431
1432 copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked()));
1433 copy->SetDexCacheResolvedMethods(GetImageAddress(orig->GetDexCacheResolvedMethods()));
1434 copy->SetDexCacheResolvedTypes(GetImageAddress(orig->GetDexCacheResolvedTypes()));
1435
Ian Rogers848871b2013-08-05 10:56:33 -07001436 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
1437 // oat_begin_
Brian Carlstrom7940e442013-07-12 13:46:57 -07001438
Ian Rogers848871b2013-08-05 10:56:33 -07001439 // The resolution method has a special trampoline to call.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001440 Runtime* runtime = Runtime::Current();
1441 if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001442 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001443 GetOatAddress(quick_resolution_trampoline_offset_), target_ptr_size_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07001444 } else if (UNLIKELY(orig == runtime->GetImtConflictMethod() ||
1445 orig == runtime->GetImtUnimplementedMethod())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001446 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001447 GetOatAddress(quick_imt_conflict_trampoline_offset_), target_ptr_size_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001448 } else if (UNLIKELY(orig->IsRuntimeMethod())) {
1449 bool found_one = false;
1450 for (size_t i = 0; i < static_cast<size_t>(Runtime::kLastCalleeSaveType); ++i) {
1451 auto idx = static_cast<Runtime::CalleeSaveType>(i);
1452 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
1453 found_one = true;
1454 break;
1455 }
1456 }
1457 CHECK(found_one) << "Expected to find callee save method but got " << PrettyMethod(orig);
1458 CHECK(copy->IsRuntimeMethod());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001459 } else {
Ian Rogers848871b2013-08-05 10:56:33 -07001460 // We assume all methods have code. If they don't currently then we set them to the use the
1461 // resolution trampoline. Abstract methods never have code and so we need to make sure their
1462 // use results in an AbstractMethodError. We use the interpreter to achieve this.
1463 if (UNLIKELY(orig->IsAbstract())) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001464 copy->SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartier2d721012014-11-10 11:08:06 -08001465 GetOatAddress(quick_to_interpreter_bridge_offset_), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001466 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001467 bool quick_is_interpreted;
Ian Rogers13735952014-10-08 12:43:28 -07001468 const uint8_t* quick_code = GetQuickCode(orig, &quick_is_interpreted);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001469 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
Sebastien Hertze1d07812014-05-21 15:44:09 +02001470
Sebastien Hertze1d07812014-05-21 15:44:09 +02001471 // JNI entrypoint:
Ian Rogers848871b2013-08-05 10:56:33 -07001472 if (orig->IsNative()) {
1473 // The native method's pointer is set to a stub to lookup via dlsym.
1474 // Note this is not the code_ pointer, that is handled above.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001475 copy->SetEntryPointFromJniPtrSize(
1476 GetOatAddress(jni_dlsym_lookup_offset_), target_ptr_size_);
Ian Rogers848871b2013-08-05 10:56:33 -07001477 }
1478 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001479 }
1480}
1481
Alex Lighta59dd802014-07-02 16:28:08 -07001482static OatHeader* GetOatHeaderFromElf(ElfFile* elf) {
Tong Shen62d1ca32014-09-03 17:24:56 -07001483 uint64_t data_sec_offset;
1484 bool has_data_sec = elf->GetSectionOffsetAndSize(".rodata", &data_sec_offset, nullptr);
1485 if (!has_data_sec) {
Alex Lighta59dd802014-07-02 16:28:08 -07001486 return nullptr;
1487 }
Tong Shen62d1ca32014-09-03 17:24:56 -07001488 return reinterpret_cast<OatHeader*>(elf->Begin() + data_sec_offset);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001489}
1490
Vladimir Markof4da6752014-08-01 19:04:18 +01001491void ImageWriter::SetOatChecksumFromElfFile(File* elf_file) {
Alex Lighta59dd802014-07-02 16:28:08 -07001492 std::string error_msg;
1493 std::unique_ptr<ElfFile> elf(ElfFile::Open(elf_file, PROT_READ|PROT_WRITE,
1494 MAP_SHARED, &error_msg));
1495 if (elf.get() == nullptr) {
Vladimir Markof4da6752014-08-01 19:04:18 +01001496 LOG(FATAL) << "Unable open oat file: " << error_msg;
Alex Lighta59dd802014-07-02 16:28:08 -07001497 return;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001498 }
Alex Lighta59dd802014-07-02 16:28:08 -07001499 OatHeader* oat_header = GetOatHeaderFromElf(elf.get());
1500 CHECK(oat_header != nullptr);
1501 CHECK(oat_header->IsValid());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001502
Brian Carlstrom7940e442013-07-12 13:46:57 -07001503 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin());
Alex Lighta59dd802014-07-02 16:28:08 -07001504 image_header->SetOatChecksum(oat_header->GetChecksum());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001505}
1506
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001507size_t ImageWriter::GetBinSizeSum(ImageWriter::Bin up_to) const {
1508 DCHECK_LE(up_to, kBinSize);
1509 return std::accumulate(&bin_slot_sizes_[0], &bin_slot_sizes_[up_to], /*init*/0);
1510}
1511
1512ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
1513 // These values may need to get updated if more bins are added to the enum Bin
Mathieu Chartiere401d142015-04-22 13:56:20 -07001514 static_assert(kBinBits == 3, "wrong number of bin bits");
1515 static_assert(kBinShift == 27, "wrong number of shift");
Igor Murashkinf5b4c502014-11-14 15:01:59 -08001516 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
1517
1518 DCHECK_LT(GetBin(), kBinSize);
1519 DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
1520}
1521
1522ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
1523 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
1524 DCHECK_EQ(index, GetIndex());
1525}
1526
1527ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
1528 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
1529}
1530
1531uint32_t ImageWriter::BinSlot::GetIndex() const {
1532 return lockword_ & ~kBinMask;
1533}
1534
Mathieu Chartierd39645e2015-06-09 17:50:29 -07001535uint8_t* ImageWriter::GetOatFileBegin() const {
1536 DCHECK_GT(intern_table_bytes_, 0u);
1537 return image_begin_ + RoundUp(
1538 image_end_ + bin_slot_sizes_[kBinArtField] + bin_slot_sizes_[kBinArtMethodDirty] +
1539 bin_slot_sizes_[kBinArtMethodClean] + intern_table_bytes_, kPageSize);
1540}
1541
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001542ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
1543 switch (type) {
1544 case kNativeObjectRelocationTypeArtField:
1545 case kNativeObjectRelocationTypeArtFieldArray:
1546 return kBinArtField;
1547 case kNativeObjectRelocationTypeArtMethodClean:
1548 case kNativeObjectRelocationTypeArtMethodArrayClean:
1549 return kBinArtMethodClean;
1550 case kNativeObjectRelocationTypeArtMethodDirty:
1551 case kNativeObjectRelocationTypeArtMethodArrayDirty:
1552 return kBinArtMethodDirty;
1553 }
1554 UNREACHABLE();
1555}
1556
Brian Carlstrom7940e442013-07-12 13:46:57 -07001557} // namespace art