Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "image_writer.h" |
| 18 | |
| 19 | #include <sys/stat.h> |
| 20 | |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 21 | #include <memory> |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 22 | #include <numeric> |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 23 | #include <vector> |
| 24 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 25 | #include "art_field-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 26 | #include "base/logging.h" |
| 27 | #include "base/unix_file/fd_file.h" |
Vladimir Marko | 3481ba2 | 2015-04-13 12:22:36 +0100 | [diff] [blame] | 28 | #include "class_linker-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 29 | #include "compiled_method.h" |
| 30 | #include "dex_file-inl.h" |
| 31 | #include "driver/compiler_driver.h" |
Alex Light | 53cb16b | 2014-06-12 11:26:29 -0700 | [diff] [blame] | 32 | #include "elf_file.h" |
| 33 | #include "elf_utils.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 34 | #include "elf_writer.h" |
| 35 | #include "gc/accounting/card_table-inl.h" |
| 36 | #include "gc/accounting/heap_bitmap.h" |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 37 | #include "gc/accounting/space_bitmap-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 38 | #include "gc/heap.h" |
| 39 | #include "gc/space/large_object_space.h" |
| 40 | #include "gc/space/space-inl.h" |
| 41 | #include "globals.h" |
| 42 | #include "image.h" |
| 43 | #include "intern_table.h" |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 44 | #include "linear_alloc.h" |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 45 | #include "lock_word.h" |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 46 | #include "mirror/art_method-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 47 | #include "mirror/array-inl.h" |
| 48 | #include "mirror/class-inl.h" |
| 49 | #include "mirror/class_loader.h" |
| 50 | #include "mirror/dex_cache-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 51 | #include "mirror/object-inl.h" |
| 52 | #include "mirror/object_array-inl.h" |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 53 | #include "mirror/string-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 54 | #include "oat.h" |
| 55 | #include "oat_file.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 56 | #include "runtime.h" |
| 57 | #include "scoped_thread_state_change.h" |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 58 | #include "handle_scope-inl.h" |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 59 | #include "utils/dex_cache_arrays_layout-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 60 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 61 | using ::art::mirror::ArtMethod; |
Brian Carlstrom | 3e3d591 | 2013-07-18 00:19:45 -0700 | [diff] [blame] | 62 | using ::art::mirror::Class; |
| 63 | using ::art::mirror::DexCache; |
| 64 | using ::art::mirror::EntryPointFromInterpreter; |
Brian Carlstrom | 3e3d591 | 2013-07-18 00:19:45 -0700 | [diff] [blame] | 65 | using ::art::mirror::Object; |
| 66 | using ::art::mirror::ObjectArray; |
| 67 | using ::art::mirror::String; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 68 | |
| 69 | namespace art { |
| 70 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 71 | // Separate objects into multiple bins to optimize dirty memory use. |
| 72 | static constexpr bool kBinObjects = true; |
Mathieu Chartier | de48692 | 2015-04-15 20:03:16 -0700 | [diff] [blame] | 73 | static constexpr bool kComputeEagerResolvedStrings = false; |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 74 | |
Andreas Gampe | dd9d055 | 2015-03-09 12:57:41 -0700 | [diff] [blame] | 75 | static void CheckNoDexObjectsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED) |
| 76 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 77 | Class* klass = obj->GetClass(); |
| 78 | CHECK_NE(PrettyClass(klass), "com.android.dex.Dex"); |
| 79 | } |
| 80 | |
| 81 | static void CheckNoDexObjects() { |
| 82 | ScopedObjectAccess soa(Thread::Current()); |
| 83 | Runtime::Current()->GetHeap()->VisitObjects(CheckNoDexObjectsCallback, nullptr); |
| 84 | } |
| 85 | |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 86 | bool ImageWriter::PrepareImageAddressSpace() { |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 87 | target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet()); |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 88 | { |
| 89 | Thread::Current()->TransitionFromSuspendedToRunnable(); |
| 90 | PruneNonImageClasses(); // Remove junk |
| 91 | ComputeLazyFieldsForImageClasses(); // Add useful information |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 92 | |
| 93 | // Calling this can in theory fill in some resolved strings. However, in practice it seems to |
| 94 | // never resolve any. |
| 95 | if (kComputeEagerResolvedStrings) { |
| 96 | ComputeEagerResolvedStrings(); |
| 97 | } |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 98 | Thread::Current()->TransitionFromRunnableToSuspended(kNative); |
| 99 | } |
| 100 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
| 101 | heap->CollectGarbage(false); // Remove garbage. |
| 102 | |
Andreas Gampe | dd9d055 | 2015-03-09 12:57:41 -0700 | [diff] [blame] | 103 | // Dex caches must not have their dex fields set in the image. These are memory buffers of mapped |
| 104 | // dex files. |
| 105 | // |
| 106 | // We may open them in the unstarted-runtime code for class metadata. Their fields should all be |
| 107 | // reset in PruneNonImageClasses and the objects reclaimed in the GC. Make sure that's actually |
| 108 | // true. |
| 109 | if (kIsDebugBuild) { |
| 110 | CheckNoDexObjects(); |
| 111 | } |
| 112 | |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 113 | if (!AllocMemory()) { |
| 114 | return false; |
| 115 | } |
| 116 | |
| 117 | if (kIsDebugBuild) { |
| 118 | ScopedObjectAccess soa(Thread::Current()); |
| 119 | CheckNonImageClassesRemoved(); |
| 120 | } |
| 121 | |
| 122 | Thread::Current()->TransitionFromSuspendedToRunnable(); |
| 123 | CalculateNewObjectOffsets(); |
| 124 | Thread::Current()->TransitionFromRunnableToSuspended(kNative); |
| 125 | |
| 126 | return true; |
| 127 | } |
| 128 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 129 | bool ImageWriter::Write(const std::string& image_filename, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 130 | const std::string& oat_filename, |
| 131 | const std::string& oat_location) { |
| 132 | CHECK(!image_filename.empty()); |
| 133 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 134 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 135 | |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 136 | std::unique_ptr<File> oat_file(OS::OpenFileReadWrite(oat_filename.c_str())); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 137 | if (oat_file.get() == nullptr) { |
Andreas Gampe | 88ec7f4 | 2014-11-05 10:18:32 -0800 | [diff] [blame] | 138 | PLOG(ERROR) << "Failed to open oat file " << oat_filename << " for " << oat_location; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 139 | return false; |
| 140 | } |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 141 | std::string error_msg; |
Richard Uhler | e5fed03 | 2015-03-18 08:21:11 -0700 | [diff] [blame] | 142 | oat_file_ = OatFile::OpenReadable(oat_file.get(), oat_location, nullptr, &error_msg); |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 143 | if (oat_file_ == nullptr) { |
Andreas Gampe | 88ec7f4 | 2014-11-05 10:18:32 -0800 | [diff] [blame] | 144 | PLOG(ERROR) << "Failed to open writable oat file " << oat_filename << " for " << oat_location |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 145 | << ": " << error_msg; |
Andreas Gampe | 0b7fcf9 | 2015-03-13 16:54:54 -0700 | [diff] [blame] | 146 | oat_file->Erase(); |
Brian Carlstrom | c50d8e1 | 2013-07-23 22:35:16 -0700 | [diff] [blame] | 147 | return false; |
| 148 | } |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 149 | CHECK_EQ(class_linker->RegisterOatFile(oat_file_), oat_file_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 150 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 151 | interpreter_to_interpreter_bridge_offset_ = |
| 152 | oat_file_->GetOatHeader().GetInterpreterToInterpreterBridgeOffset(); |
| 153 | interpreter_to_compiled_code_bridge_offset_ = |
| 154 | oat_file_->GetOatHeader().GetInterpreterToCompiledCodeBridgeOffset(); |
| 155 | |
| 156 | jni_dlsym_lookup_offset_ = oat_file_->GetOatHeader().GetJniDlsymLookupOffset(); |
| 157 | |
Andreas Gampe | 2da8823 | 2014-02-27 12:26:20 -0800 | [diff] [blame] | 158 | quick_generic_jni_trampoline_offset_ = |
| 159 | oat_file_->GetOatHeader().GetQuickGenericJniTrampolineOffset(); |
Jeff Hao | 88474b4 | 2013-10-23 16:24:40 -0700 | [diff] [blame] | 160 | quick_imt_conflict_trampoline_offset_ = |
| 161 | oat_file_->GetOatHeader().GetQuickImtConflictTrampolineOffset(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 162 | quick_resolution_trampoline_offset_ = |
| 163 | oat_file_->GetOatHeader().GetQuickResolutionTrampolineOffset(); |
| 164 | quick_to_interpreter_bridge_offset_ = |
| 165 | oat_file_->GetOatHeader().GetQuickToInterpreterBridgeOffset(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 166 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 167 | size_t oat_loaded_size = 0; |
| 168 | size_t oat_data_offset = 0; |
| 169 | ElfWriter::GetOatElfInformation(oat_file.get(), oat_loaded_size, oat_data_offset); |
Alex Light | 53cb16b | 2014-06-12 11:26:29 -0700 | [diff] [blame] | 170 | |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 171 | Thread::Current()->TransitionFromSuspendedToRunnable(); |
| 172 | CreateHeader(oat_loaded_size, oat_data_offset); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 173 | // TODO: heap validation can't handle these fix up passes. |
| 174 | Runtime::Current()->GetHeap()->DisableObjectValidation(); |
| 175 | CopyAndFixupNativeData(); |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 176 | CopyAndFixupObjects(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 177 | Thread::Current()->TransitionFromRunnableToSuspended(kNative); |
| 178 | |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 179 | SetOatChecksumFromElfFile(oat_file.get()); |
| 180 | |
Andreas Gampe | 4303ba9 | 2014-11-06 01:00:46 -0800 | [diff] [blame] | 181 | if (oat_file->FlushCloseOrErase() != 0) { |
| 182 | LOG(ERROR) << "Failed to flush and close oat file " << oat_filename << " for " << oat_location; |
| 183 | return false; |
| 184 | } |
| 185 | |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 186 | std::unique_ptr<File> image_file(OS::CreateEmptyFile(image_filename.c_str())); |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 187 | ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin()); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 188 | if (image_file.get() == nullptr) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 189 | LOG(ERROR) << "Failed to open image file " << image_filename; |
| 190 | return false; |
| 191 | } |
| 192 | if (fchmod(image_file->Fd(), 0644) != 0) { |
| 193 | PLOG(ERROR) << "Failed to make image file world readable: " << image_filename; |
Andreas Gampe | 4303ba9 | 2014-11-06 01:00:46 -0800 | [diff] [blame] | 194 | image_file->Erase(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 195 | return EXIT_FAILURE; |
| 196 | } |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 197 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 198 | // Write out the image + fields. |
| 199 | const auto write_count = image_header->GetImageSize() + image_header->GetArtFieldsSize(); |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 200 | CHECK_EQ(image_end_, image_header->GetImageSize()); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 201 | if (!image_file->WriteFully(image_->Begin(), write_count)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 202 | PLOG(ERROR) << "Failed to write image file " << image_filename; |
Andreas Gampe | 4303ba9 | 2014-11-06 01:00:46 -0800 | [diff] [blame] | 203 | image_file->Erase(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 204 | return false; |
| 205 | } |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 206 | |
| 207 | // Write out the image bitmap at the page aligned start of the image end. |
| 208 | CHECK_ALIGNED(image_header->GetImageBitmapOffset(), kPageSize); |
| 209 | if (!image_file->Write(reinterpret_cast<char*>(image_bitmap_->Begin()), |
| 210 | image_header->GetImageBitmapSize(), |
| 211 | image_header->GetImageBitmapOffset())) { |
| 212 | PLOG(ERROR) << "Failed to write image file " << image_filename; |
Andreas Gampe | 4303ba9 | 2014-11-06 01:00:46 -0800 | [diff] [blame] | 213 | image_file->Erase(); |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 214 | return false; |
| 215 | } |
| 216 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 217 | CHECK_EQ(image_header->GetImageBitmapOffset() + image_header->GetImageBitmapSize(), |
| 218 | static_cast<size_t>(image_file->GetLength())); |
Andreas Gampe | 4303ba9 | 2014-11-06 01:00:46 -0800 | [diff] [blame] | 219 | if (image_file->FlushCloseOrErase() != 0) { |
| 220 | PLOG(ERROR) << "Failed to flush and close image file " << image_filename; |
| 221 | return false; |
| 222 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 223 | return true; |
| 224 | } |
| 225 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 226 | void ImageWriter::SetImageOffset(mirror::Object* object, |
| 227 | ImageWriter::BinSlot bin_slot, |
| 228 | size_t offset) { |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 229 | DCHECK(object != nullptr); |
| 230 | DCHECK_NE(offset, 0U); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 231 | mirror::Object* obj = reinterpret_cast<mirror::Object*>(image_->Begin() + offset); |
| 232 | DCHECK_ALIGNED(obj, kObjectAlignment); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 233 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 234 | static size_t max_offset = 0; |
| 235 | max_offset = std::max(max_offset, offset); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 236 | image_bitmap_->Set(obj); // Mark the obj as mutated, since we will end up changing it. |
| 237 | { |
| 238 | // Remember the object-inside-of-the-image's hash code so we can restore it after the copy. |
| 239 | auto hash_it = saved_hashes_map_.find(bin_slot); |
| 240 | if (hash_it != saved_hashes_map_.end()) { |
| 241 | std::pair<BinSlot, uint32_t> slot_hash = *hash_it; |
| 242 | saved_hashes_.push_back(std::make_pair(obj, slot_hash.second)); |
| 243 | saved_hashes_map_.erase(hash_it); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 244 | } |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 245 | } |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 246 | // The object is already deflated from when we set the bin slot. Just overwrite the lock word. |
Mathieu Chartier | 4d7f61d | 2014-04-17 14:43:39 -0700 | [diff] [blame] | 247 | object->SetLockWord(LockWord::FromForwardingAddress(offset), false); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 248 | DCHECK(IsImageOffsetAssigned(object)); |
| 249 | } |
| 250 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 251 | void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) { |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 252 | DCHECK(object != nullptr); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 253 | DCHECK_NE(image_objects_offset_begin_, 0u); |
| 254 | |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 255 | size_t previous_bin_sizes = bin_slot_previous_sizes_[bin_slot.GetBin()]; |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 256 | size_t new_offset = image_objects_offset_begin_ + previous_bin_sizes + bin_slot.GetIndex(); |
| 257 | DCHECK_ALIGNED(new_offset, kObjectAlignment); |
| 258 | |
| 259 | SetImageOffset(object, bin_slot, new_offset); |
| 260 | DCHECK_LT(new_offset, image_end_); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 261 | } |
| 262 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 263 | bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const { |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 264 | // Will also return true if the bin slot was assigned since we are reusing the lock word. |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 265 | DCHECK(object != nullptr); |
Mathieu Chartier | 4d7f61d | 2014-04-17 14:43:39 -0700 | [diff] [blame] | 266 | return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress; |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 267 | } |
| 268 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 269 | size_t ImageWriter::GetImageOffset(mirror::Object* object) const { |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 270 | DCHECK(object != nullptr); |
| 271 | DCHECK(IsImageOffsetAssigned(object)); |
Mathieu Chartier | 4d7f61d | 2014-04-17 14:43:39 -0700 | [diff] [blame] | 272 | LockWord lock_word = object->GetLockWord(false); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 273 | size_t offset = lock_word.ForwardingAddress(); |
| 274 | DCHECK_LT(offset, image_end_); |
| 275 | return offset; |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 276 | } |
| 277 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 278 | void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) { |
| 279 | DCHECK(object != nullptr); |
| 280 | DCHECK(!IsImageOffsetAssigned(object)); |
| 281 | DCHECK(!IsImageBinSlotAssigned(object)); |
| 282 | |
| 283 | // Before we stomp over the lock word, save the hash code for later. |
| 284 | Monitor::Deflate(Thread::Current(), object);; |
| 285 | LockWord lw(object->GetLockWord(false)); |
| 286 | switch (lw.GetState()) { |
| 287 | case LockWord::kFatLocked: { |
| 288 | LOG(FATAL) << "Fat locked object " << object << " found during object copy"; |
| 289 | break; |
| 290 | } |
| 291 | case LockWord::kThinLocked: { |
| 292 | LOG(FATAL) << "Thin locked object " << object << " found during object copy"; |
| 293 | break; |
| 294 | } |
| 295 | case LockWord::kUnlocked: |
| 296 | // No hash, don't need to save it. |
| 297 | break; |
| 298 | case LockWord::kHashCode: |
| 299 | saved_hashes_map_[bin_slot] = lw.GetHashCode(); |
| 300 | break; |
| 301 | default: |
| 302 | LOG(FATAL) << "Unreachable."; |
| 303 | UNREACHABLE(); |
| 304 | } |
| 305 | object->SetLockWord(LockWord::FromForwardingAddress(static_cast<uint32_t>(bin_slot)), |
| 306 | false); |
| 307 | DCHECK(IsImageBinSlotAssigned(object)); |
| 308 | } |
| 309 | |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 310 | void ImageWriter::PrepareDexCacheArraySlots() { |
| 311 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
| 312 | ReaderMutexLock mu(Thread::Current(), *class_linker->DexLock()); |
| 313 | size_t dex_cache_count = class_linker->GetDexCacheCount(); |
| 314 | uint32_t size = 0u; |
| 315 | for (size_t idx = 0; idx < dex_cache_count; ++idx) { |
| 316 | DexCache* dex_cache = class_linker->GetDexCache(idx); |
| 317 | const DexFile* dex_file = dex_cache->GetDexFile(); |
| 318 | dex_cache_array_starts_.Put(dex_file, size); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 319 | DexCacheArraysLayout layout(target_ptr_size_, dex_file); |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 320 | DCHECK(layout.Valid()); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 321 | auto types_size = layout.TypesSize(dex_file->NumTypeIds()); |
| 322 | auto methods_size = layout.MethodsSize(dex_file->NumMethodIds()); |
| 323 | auto fields_size = layout.FieldsSize(dex_file->NumFieldIds()); |
| 324 | auto strings_size = layout.StringsSize(dex_file->NumStringIds()); |
| 325 | dex_cache_array_indexes_.Put( |
| 326 | dex_cache->GetResolvedTypes(), |
| 327 | DexCacheArrayLocation {size + layout.TypesOffset(), types_size}); |
| 328 | dex_cache_array_indexes_.Put( |
| 329 | dex_cache->GetResolvedMethods(), |
| 330 | DexCacheArrayLocation {size + layout.MethodsOffset(), methods_size}); |
| 331 | dex_cache_array_indexes_.Put( |
| 332 | dex_cache->GetResolvedFields(), |
| 333 | DexCacheArrayLocation {size + layout.FieldsOffset(), fields_size}); |
| 334 | dex_cache_array_indexes_.Put( |
| 335 | dex_cache->GetStrings(), |
| 336 | DexCacheArrayLocation {size + layout.StringsOffset(), strings_size}); |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 337 | size += layout.Size(); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 338 | CHECK_EQ(layout.Size(), types_size + methods_size + fields_size + strings_size); |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 339 | } |
| 340 | // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned() |
| 341 | // when AssignImageBinSlot() assigns their indexes out or order. |
| 342 | bin_slot_sizes_[kBinDexCacheArray] = size; |
| 343 | } |
| 344 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 345 | void ImageWriter::AssignImageBinSlot(mirror::Object* object) { |
| 346 | DCHECK(object != nullptr); |
Jeff Hao | c7d1188 | 2015-02-03 15:08:39 -0800 | [diff] [blame] | 347 | size_t object_size = object->SizeOf(); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 348 | |
| 349 | // The magic happens here. We segregate objects into different bins based |
| 350 | // on how likely they are to get dirty at runtime. |
| 351 | // |
| 352 | // Likely-to-dirty objects get packed together into the same bin so that |
| 353 | // at runtime their page dirtiness ratio (how many dirty objects a page has) is |
| 354 | // maximized. |
| 355 | // |
| 356 | // This means more pages will stay either clean or shared dirty (with zygote) and |
| 357 | // the app will use less of its own (private) memory. |
| 358 | Bin bin = kBinRegular; |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 359 | size_t current_offset = 0u; |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 360 | |
| 361 | if (kBinObjects) { |
| 362 | // |
| 363 | // Changing the bin of an object is purely a memory-use tuning. |
| 364 | // It has no change on runtime correctness. |
| 365 | // |
| 366 | // Memory analysis has determined that the following types of objects get dirtied |
| 367 | // the most: |
| 368 | // |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 369 | // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have |
| 370 | // a fixed layout which helps improve generated code (using PC-relative addressing), |
| 371 | // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots(). |
| 372 | // Since these arrays are huge, most pages do not overlap other objects and it's not |
| 373 | // really important where they are for the clean/dirty separation. Due to their |
| 374 | // special PC-relative addressing, we arbitrarily keep them at the beginning. |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 375 | // * Class'es which are verified [their clinit runs only at runtime] |
| 376 | // - classes in general [because their static fields get overwritten] |
| 377 | // - initialized classes with all-final statics are unlikely to be ever dirty, |
| 378 | // so bin them separately |
| 379 | // * Art Methods that are: |
| 380 | // - native [their native entry point is not looked up until runtime] |
| 381 | // - have declaring classes that aren't initialized |
| 382 | // [their interpreter/quick entry points are trampolines until the class |
| 383 | // becomes initialized] |
| 384 | // |
| 385 | // We also assume the following objects get dirtied either never or extremely rarely: |
| 386 | // * Strings (they are immutable) |
| 387 | // * Art methods that aren't native and have initialized declared classes |
| 388 | // |
| 389 | // We assume that "regular" bin objects are highly unlikely to become dirtied, |
| 390 | // so packing them together will not result in a noticeably tighter dirty-to-clean ratio. |
| 391 | // |
| 392 | if (object->IsClass()) { |
| 393 | bin = kBinClassVerified; |
| 394 | mirror::Class* klass = object->AsClass(); |
| 395 | |
| 396 | if (klass->GetStatus() == Class::kStatusInitialized) { |
| 397 | bin = kBinClassInitialized; |
| 398 | |
| 399 | // If the class's static fields are all final, put it into a separate bin |
| 400 | // since it's very likely it will stay clean. |
| 401 | uint32_t num_static_fields = klass->NumStaticFields(); |
| 402 | if (num_static_fields == 0) { |
| 403 | bin = kBinClassInitializedFinalStatics; |
| 404 | } else { |
| 405 | // Maybe all the statics are final? |
| 406 | bool all_final = true; |
| 407 | for (uint32_t i = 0; i < num_static_fields; ++i) { |
| 408 | ArtField* field = klass->GetStaticField(i); |
| 409 | if (!field->IsFinal()) { |
| 410 | all_final = false; |
| 411 | break; |
| 412 | } |
| 413 | } |
| 414 | |
| 415 | if (all_final) { |
| 416 | bin = kBinClassInitializedFinalStatics; |
| 417 | } |
| 418 | } |
| 419 | } |
| 420 | } else if (object->IsArtMethod<kVerifyNone>()) { |
| 421 | mirror::ArtMethod* art_method = down_cast<ArtMethod*>(object); |
| 422 | if (art_method->IsNative()) { |
| 423 | bin = kBinArtMethodNative; |
| 424 | } else { |
| 425 | mirror::Class* declaring_class = art_method->GetDeclaringClass(); |
| 426 | if (declaring_class->GetStatus() != Class::kStatusInitialized) { |
| 427 | bin = kBinArtMethodNotInitialized; |
| 428 | } else { |
| 429 | // This is highly unlikely to dirty since there's no entry points to mutate. |
| 430 | bin = kBinArtMethodsManagedInitialized; |
| 431 | } |
| 432 | } |
| 433 | } else if (object->GetClass<kVerifyNone>()->IsStringClass()) { |
| 434 | bin = kBinString; // Strings are almost always immutable (except for object header). |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 435 | } else if (object->IsArrayInstance()) { |
| 436 | mirror::Class* klass = object->GetClass<kVerifyNone>(); |
| 437 | auto* component_type = klass->GetComponentType(); |
| 438 | if (!component_type->IsPrimitive() || component_type->IsPrimitiveInt() || |
| 439 | component_type->IsPrimitiveLong()) { |
| 440 | auto it = dex_cache_array_indexes_.find(object); |
| 441 | if (it != dex_cache_array_indexes_.end()) { |
| 442 | bin = kBinDexCacheArray; |
| 443 | // Use prepared offset defined by the DexCacheLayout. |
| 444 | current_offset = it->second.offset_; |
| 445 | // Override incase of cross compilation. |
| 446 | object_size = it->second.length_; |
| 447 | } // else bin = kBinRegular |
| 448 | } |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 449 | } // else bin = kBinRegular |
| 450 | } |
| 451 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 452 | size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 453 | if (bin != kBinDexCacheArray) { |
| 454 | current_offset = bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned). |
| 455 | // Move the current bin size up to accomodate the object we just assigned a bin slot. |
| 456 | bin_slot_sizes_[bin] += offset_delta; |
| 457 | } |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 458 | |
| 459 | BinSlot new_bin_slot(bin, current_offset); |
| 460 | SetImageBinSlot(object, new_bin_slot); |
| 461 | |
| 462 | ++bin_slot_count_[bin]; |
| 463 | |
| 464 | DCHECK_LT(GetBinSizeSum(), image_->Size()); |
| 465 | |
| 466 | // Grow the image closer to the end by the object we just assigned. |
| 467 | image_end_ += offset_delta; |
| 468 | DCHECK_LT(image_end_, image_->Size()); |
| 469 | } |
| 470 | |
| 471 | bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const { |
| 472 | DCHECK(object != nullptr); |
| 473 | |
| 474 | // We always stash the bin slot into a lockword, in the 'forwarding address' state. |
| 475 | // If it's in some other state, then we haven't yet assigned an image bin slot. |
| 476 | if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) { |
| 477 | return false; |
| 478 | } else if (kIsDebugBuild) { |
| 479 | LockWord lock_word = object->GetLockWord(false); |
| 480 | size_t offset = lock_word.ForwardingAddress(); |
| 481 | BinSlot bin_slot(offset); |
| 482 | DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()]) |
| 483 | << "bin slot offset should not exceed the size of that bin"; |
| 484 | } |
| 485 | return true; |
| 486 | } |
| 487 | |
| 488 | ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const { |
| 489 | DCHECK(object != nullptr); |
| 490 | DCHECK(IsImageBinSlotAssigned(object)); |
| 491 | |
| 492 | LockWord lock_word = object->GetLockWord(false); |
| 493 | size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t |
| 494 | DCHECK_LE(offset, std::numeric_limits<uint32_t>::max()); |
| 495 | |
| 496 | BinSlot bin_slot(static_cast<uint32_t>(offset)); |
| 497 | DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()]); |
| 498 | |
| 499 | return bin_slot; |
| 500 | } |
| 501 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 502 | bool ImageWriter::AllocMemory() { |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 503 | auto* runtime = Runtime::Current(); |
| 504 | const size_t heap_size = runtime->GetHeap()->GetTotalMemory(); |
| 505 | // Add linear alloc usage since we need to have room for the ArtFields. |
| 506 | const size_t length = RoundUp(heap_size + runtime->GetLinearAlloc()->GetUsedMemory(), kPageSize); |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 507 | std::string error_msg; |
Vladimir Marko | 5c42c29 | 2015-02-25 12:02:49 +0000 | [diff] [blame] | 508 | image_.reset(MemMap::MapAnonymous("image writer image", nullptr, length, PROT_READ | PROT_WRITE, |
| 509 | false, false, &error_msg)); |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 510 | if (UNLIKELY(image_.get() == nullptr)) { |
| 511 | LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 512 | return false; |
| 513 | } |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 514 | |
| 515 | // Create the image bitmap. |
Mathieu Chartier | a8e8f9c | 2014-04-09 14:51:05 -0700 | [diff] [blame] | 516 | image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create("image bitmap", image_->Begin(), |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 517 | RoundUp(length, kPageSize))); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 518 | if (image_bitmap_.get() == nullptr) { |
| 519 | LOG(ERROR) << "Failed to allocate memory for image bitmap"; |
| 520 | return false; |
| 521 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 522 | return true; |
| 523 | } |
| 524 | |
| 525 | void ImageWriter::ComputeLazyFieldsForImageClasses() { |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 526 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 527 | class_linker->VisitClassesWithoutClassesLock(ComputeLazyFieldsForClassesVisitor, nullptr); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 528 | } |
| 529 | |
| 530 | bool ImageWriter::ComputeLazyFieldsForClassesVisitor(Class* c, void* /*arg*/) { |
Mathieu Chartier | f832284 | 2014-05-16 10:59:25 -0700 | [diff] [blame] | 531 | Thread* self = Thread::Current(); |
| 532 | StackHandleScope<1> hs(self); |
| 533 | mirror::Class::ComputeName(hs.NewHandle(c)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 534 | return true; |
| 535 | } |
| 536 | |
Mathieu Chartier | fd04b6f | 2014-11-14 19:34:18 -0800 | [diff] [blame] | 537 | // Collect all the java.lang.String in the heap and put them in the output strings_ array. |
| 538 | class StringCollector { |
| 539 | public: |
| 540 | StringCollector(Handle<mirror::ObjectArray<mirror::String>> strings, size_t index) |
| 541 | : strings_(strings), index_(index) { |
| 542 | } |
| 543 | static void Callback(Object* obj, void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 544 | auto* collector = reinterpret_cast<StringCollector*>(arg); |
| 545 | if (obj->GetClass()->IsStringClass()) { |
| 546 | collector->strings_->SetWithoutChecks<false>(collector->index_++, obj->AsString()); |
| 547 | } |
| 548 | } |
| 549 | size_t GetIndex() const { |
| 550 | return index_; |
| 551 | } |
| 552 | |
| 553 | private: |
| 554 | Handle<mirror::ObjectArray<mirror::String>> strings_; |
| 555 | size_t index_; |
| 556 | }; |
| 557 | |
| 558 | // Compare strings based on length, used for sorting strings by length / reverse length. |
Vladimir Marko | faeda18 | 2014-12-04 14:52:25 +0000 | [diff] [blame] | 559 | class LexicographicalStringComparator { |
Mathieu Chartier | fd04b6f | 2014-11-14 19:34:18 -0800 | [diff] [blame] | 560 | public: |
Vladimir Marko | faeda18 | 2014-12-04 14:52:25 +0000 | [diff] [blame] | 561 | bool operator()(const mirror::HeapReference<mirror::String>& lhs, |
| 562 | const mirror::HeapReference<mirror::String>& rhs) const |
| 563 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 564 | mirror::String* lhs_s = lhs.AsMirrorPtr(); |
| 565 | mirror::String* rhs_s = rhs.AsMirrorPtr(); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 566 | uint16_t* lhs_begin = lhs_s->GetValue(); |
| 567 | uint16_t* rhs_begin = rhs_s->GetValue(); |
Vladimir Marko | faeda18 | 2014-12-04 14:52:25 +0000 | [diff] [blame] | 568 | return std::lexicographical_compare(lhs_begin, lhs_begin + lhs_s->GetLength(), |
| 569 | rhs_begin, rhs_begin + rhs_s->GetLength()); |
Mathieu Chartier | fd04b6f | 2014-11-14 19:34:18 -0800 | [diff] [blame] | 570 | } |
Mathieu Chartier | fd04b6f | 2014-11-14 19:34:18 -0800 | [diff] [blame] | 571 | }; |
| 572 | |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 573 | void ImageWriter::ComputeEagerResolvedStringsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 574 | if (!obj->GetClass()->IsStringClass()) { |
| 575 | return; |
| 576 | } |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 577 | mirror::String* string = obj->AsString(); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 578 | const uint16_t* utf16_string = string->GetValue(); |
Vladimir Marko | a48aef4 | 2014-12-03 17:53:53 +0000 | [diff] [blame] | 579 | size_t utf16_length = static_cast<size_t>(string->GetLength()); |
Hiroshi Yamauchi | e9e3e69 | 2014-06-24 14:31:37 -0700 | [diff] [blame] | 580 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
| 581 | ReaderMutexLock mu(Thread::Current(), *class_linker->DexLock()); |
| 582 | size_t dex_cache_count = class_linker->GetDexCacheCount(); |
| 583 | for (size_t i = 0; i < dex_cache_count; ++i) { |
| 584 | DexCache* dex_cache = class_linker->GetDexCache(i); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 585 | const DexFile& dex_file = *dex_cache->GetDexFile(); |
Ian Rogers | 24c534d | 2013-11-14 00:15:00 -0800 | [diff] [blame] | 586 | const DexFile::StringId* string_id; |
Vladimir Marko | a48aef4 | 2014-12-03 17:53:53 +0000 | [diff] [blame] | 587 | if (UNLIKELY(utf16_length == 0)) { |
Ian Rogers | 24c534d | 2013-11-14 00:15:00 -0800 | [diff] [blame] | 588 | string_id = dex_file.FindStringId(""); |
| 589 | } else { |
Vladimir Marko | a48aef4 | 2014-12-03 17:53:53 +0000 | [diff] [blame] | 590 | string_id = dex_file.FindStringId(utf16_string, utf16_length); |
Ian Rogers | 24c534d | 2013-11-14 00:15:00 -0800 | [diff] [blame] | 591 | } |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 592 | if (string_id != nullptr) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 593 | // This string occurs in this dex file, assign the dex cache entry. |
| 594 | uint32_t string_idx = dex_file.GetIndexForStringId(*string_id); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 595 | if (dex_cache->GetResolvedString(string_idx) == nullptr) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 596 | dex_cache->SetResolvedString(string_idx, string); |
| 597 | } |
| 598 | } |
| 599 | } |
| 600 | } |
| 601 | |
Mathieu Chartier | fd04b6f | 2014-11-14 19:34:18 -0800 | [diff] [blame] | 602 | void ImageWriter::ComputeEagerResolvedStrings() { |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 603 | Runtime::Current()->GetHeap()->VisitObjects(ComputeEagerResolvedStringsCallback, this); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 604 | } |
| 605 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 606 | bool ImageWriter::IsImageClass(Class* klass) { |
Ian Rogers | 1ff3c98 | 2014-08-12 02:30:58 -0700 | [diff] [blame] | 607 | std::string temp; |
| 608 | return compiler_driver_.IsImageClass(klass->GetDescriptor(&temp)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 609 | } |
| 610 | |
| 611 | struct NonImageClasses { |
| 612 | ImageWriter* image_writer; |
| 613 | std::set<std::string>* non_image_classes; |
| 614 | }; |
| 615 | |
| 616 | void ImageWriter::PruneNonImageClasses() { |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 617 | if (compiler_driver_.GetImageClasses() == nullptr) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 618 | return; |
| 619 | } |
| 620 | Runtime* runtime = Runtime::Current(); |
| 621 | ClassLinker* class_linker = runtime->GetClassLinker(); |
| 622 | |
| 623 | // Make a list of classes we would like to prune. |
| 624 | std::set<std::string> non_image_classes; |
| 625 | NonImageClasses context; |
| 626 | context.image_writer = this; |
| 627 | context.non_image_classes = &non_image_classes; |
| 628 | class_linker->VisitClasses(NonImageClassesVisitor, &context); |
| 629 | |
| 630 | // Remove the undesired classes from the class roots. |
Mathieu Chartier | 02e2511 | 2013-08-14 16:14:24 -0700 | [diff] [blame] | 631 | for (const std::string& it : non_image_classes) { |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 632 | bool result = class_linker->RemoveClass(it.c_str(), nullptr); |
Mathieu Chartier | c2e2062 | 2014-11-03 11:41:47 -0800 | [diff] [blame] | 633 | DCHECK(result); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 634 | } |
| 635 | |
| 636 | // Clear references to removed classes from the DexCaches. |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 637 | ArtMethod* resolution_method = runtime->GetResolutionMethod(); |
Hiroshi Yamauchi | e9e3e69 | 2014-06-24 14:31:37 -0700 | [diff] [blame] | 638 | ReaderMutexLock mu(Thread::Current(), *class_linker->DexLock()); |
| 639 | size_t dex_cache_count = class_linker->GetDexCacheCount(); |
| 640 | for (size_t idx = 0; idx < dex_cache_count; ++idx) { |
| 641 | DexCache* dex_cache = class_linker->GetDexCache(idx); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 642 | for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) { |
| 643 | Class* klass = dex_cache->GetResolvedType(i); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 644 | if (klass != nullptr && !IsImageClass(klass)) { |
| 645 | dex_cache->SetResolvedType(i, nullptr); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 646 | } |
| 647 | } |
| 648 | for (size_t i = 0; i < dex_cache->NumResolvedMethods(); i++) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 649 | ArtMethod* method = dex_cache->GetResolvedMethod(i); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 650 | if (method != nullptr && !IsImageClass(method->GetDeclaringClass())) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 651 | dex_cache->SetResolvedMethod(i, resolution_method); |
| 652 | } |
| 653 | } |
| 654 | for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) { |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 655 | ArtField* field = dex_cache->GetResolvedField(i, sizeof(void*)); |
| 656 | if (field != nullptr && !IsImageClass(field->GetDeclaringClass())) { |
| 657 | dex_cache->SetResolvedField(i, nullptr, sizeof(void*)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 658 | } |
| 659 | } |
Andreas Gampe | dd9d055 | 2015-03-09 12:57:41 -0700 | [diff] [blame] | 660 | // Clean the dex field. It might have been populated during the initialization phase, but |
| 661 | // contains data only valid during a real run. |
| 662 | dex_cache->SetFieldObject<false>(mirror::DexCache::DexOffset(), nullptr); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 663 | } |
| 664 | } |
| 665 | |
| 666 | bool ImageWriter::NonImageClassesVisitor(Class* klass, void* arg) { |
| 667 | NonImageClasses* context = reinterpret_cast<NonImageClasses*>(arg); |
| 668 | if (!context->image_writer->IsImageClass(klass)) { |
Ian Rogers | 1ff3c98 | 2014-08-12 02:30:58 -0700 | [diff] [blame] | 669 | std::string temp; |
| 670 | context->non_image_classes->insert(klass->GetDescriptor(&temp)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 671 | } |
| 672 | return true; |
| 673 | } |
| 674 | |
Mathieu Chartier | fd04b6f | 2014-11-14 19:34:18 -0800 | [diff] [blame] | 675 | void ImageWriter::CheckNonImageClassesRemoved() { |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 676 | if (compiler_driver_.GetImageClasses() != nullptr) { |
| 677 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 678 | heap->VisitObjects(CheckNonImageClassesRemovedCallback, this); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 679 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 680 | } |
| 681 | |
| 682 | void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) { |
| 683 | ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 684 | if (obj->IsClass()) { |
| 685 | Class* klass = obj->AsClass(); |
| 686 | if (!image_writer->IsImageClass(klass)) { |
| 687 | image_writer->DumpImageClasses(); |
Ian Rogers | 1ff3c98 | 2014-08-12 02:30:58 -0700 | [diff] [blame] | 688 | std::string temp; |
| 689 | CHECK(image_writer->IsImageClass(klass)) << klass->GetDescriptor(&temp) |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 690 | << " " << PrettyDescriptor(klass); |
| 691 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 692 | } |
| 693 | } |
| 694 | |
| 695 | void ImageWriter::DumpImageClasses() { |
Andreas Gampe | b1fcead | 2015-04-20 18:53:51 -0700 | [diff] [blame] | 696 | auto image_classes = compiler_driver_.GetImageClasses(); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 697 | CHECK(image_classes != nullptr); |
Mathieu Chartier | 02e2511 | 2013-08-14 16:14:24 -0700 | [diff] [blame] | 698 | for (const std::string& image_class : *image_classes) { |
| 699 | LOG(INFO) << " " << image_class; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 700 | } |
| 701 | } |
| 702 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 703 | void ImageWriter::CalculateObjectBinSlots(Object* obj) { |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 704 | DCHECK(obj != nullptr); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 705 | // if it is a string, we want to intern it if its not interned. |
| 706 | if (obj->GetClass()->IsStringClass()) { |
| 707 | // we must be an interned string that was forward referenced and already assigned |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 708 | if (IsImageBinSlotAssigned(obj)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 709 | DCHECK_EQ(obj, obj->AsString()->Intern()); |
| 710 | return; |
| 711 | } |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 712 | mirror::String* const interned = obj->AsString()->Intern(); |
| 713 | if (obj != interned) { |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 714 | if (!IsImageBinSlotAssigned(interned)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 715 | // interned obj is after us, allocate its location early |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 716 | AssignImageBinSlot(interned); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 717 | } |
| 718 | // point those looking for this object to the interned version. |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 719 | SetImageBinSlot(obj, GetImageBinSlot(interned)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 720 | return; |
| 721 | } |
| 722 | // else (obj == interned), nothing to do but fall through to the normal case |
| 723 | } |
| 724 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 725 | AssignImageBinSlot(obj); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 726 | } |
| 727 | |
| 728 | ObjectArray<Object>* ImageWriter::CreateImageRoots() const { |
| 729 | Runtime* runtime = Runtime::Current(); |
| 730 | ClassLinker* class_linker = runtime->GetClassLinker(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 731 | Thread* self = Thread::Current(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 732 | StackHandleScope<3> hs(self); |
| 733 | Handle<Class> object_array_class(hs.NewHandle( |
| 734 | class_linker->FindSystemClass(self, "[Ljava/lang/Object;"))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 735 | |
Hiroshi Yamauchi | e9e3e69 | 2014-06-24 14:31:37 -0700 | [diff] [blame] | 736 | // build an Object[] of all the DexCaches used in the source_space_. |
| 737 | // Since we can't hold the dex lock when allocating the dex_caches |
| 738 | // ObjectArray, we lock the dex lock twice, first to get the number |
| 739 | // of dex caches first and then lock it again to copy the dex |
| 740 | // caches. We check that the number of dex caches does not change. |
| 741 | size_t dex_cache_count; |
| 742 | { |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 743 | ReaderMutexLock mu(self, *class_linker->DexLock()); |
Hiroshi Yamauchi | e9e3e69 | 2014-06-24 14:31:37 -0700 | [diff] [blame] | 744 | dex_cache_count = class_linker->GetDexCacheCount(); |
| 745 | } |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 746 | Handle<ObjectArray<Object>> dex_caches( |
| 747 | hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(), |
Hiroshi Yamauchi | e9e3e69 | 2014-06-24 14:31:37 -0700 | [diff] [blame] | 748 | dex_cache_count))); |
| 749 | CHECK(dex_caches.Get() != nullptr) << "Failed to allocate a dex cache array."; |
| 750 | { |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 751 | ReaderMutexLock mu(self, *class_linker->DexLock()); |
Hiroshi Yamauchi | e9e3e69 | 2014-06-24 14:31:37 -0700 | [diff] [blame] | 752 | CHECK_EQ(dex_cache_count, class_linker->GetDexCacheCount()) |
| 753 | << "The number of dex caches changed."; |
| 754 | for (size_t i = 0; i < dex_cache_count; ++i) { |
| 755 | dex_caches->Set<false>(i, class_linker->GetDexCache(i)); |
| 756 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 757 | } |
| 758 | |
| 759 | // build an Object[] of the roots needed to restore the runtime |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 760 | Handle<ObjectArray<Object>> image_roots(hs.NewHandle( |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 761 | ObjectArray<Object>::Alloc(self, object_array_class.Get(), ImageHeader::kImageRootsMax))); |
Sebastien Hertz | d2fe10a | 2014-01-15 10:20:56 +0100 | [diff] [blame] | 762 | image_roots->Set<false>(ImageHeader::kResolutionMethod, runtime->GetResolutionMethod()); |
| 763 | image_roots->Set<false>(ImageHeader::kImtConflictMethod, runtime->GetImtConflictMethod()); |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 764 | image_roots->Set<false>(ImageHeader::kImtUnimplementedMethod, |
| 765 | runtime->GetImtUnimplementedMethod()); |
Sebastien Hertz | d2fe10a | 2014-01-15 10:20:56 +0100 | [diff] [blame] | 766 | image_roots->Set<false>(ImageHeader::kDefaultImt, runtime->GetDefaultImt()); |
| 767 | image_roots->Set<false>(ImageHeader::kCalleeSaveMethod, |
| 768 | runtime->GetCalleeSaveMethod(Runtime::kSaveAll)); |
| 769 | image_roots->Set<false>(ImageHeader::kRefsOnlySaveMethod, |
| 770 | runtime->GetCalleeSaveMethod(Runtime::kRefsOnly)); |
| 771 | image_roots->Set<false>(ImageHeader::kRefsAndArgsSaveMethod, |
| 772 | runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs)); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 773 | image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get()); |
Sebastien Hertz | d2fe10a | 2014-01-15 10:20:56 +0100 | [diff] [blame] | 774 | image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 775 | for (int i = 0; i < ImageHeader::kImageRootsMax; i++) { |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 776 | CHECK(image_roots->Get(i) != nullptr); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 777 | } |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 778 | return image_roots.Get(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 779 | } |
| 780 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 781 | // Walk instance fields of the given Class. Separate function to allow recursion on the super |
| 782 | // class. |
| 783 | void ImageWriter::WalkInstanceFields(mirror::Object* obj, mirror::Class* klass) { |
| 784 | // Visit fields of parent classes first. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 785 | StackHandleScope<1> hs(Thread::Current()); |
| 786 | Handle<mirror::Class> h_class(hs.NewHandle(klass)); |
| 787 | mirror::Class* super = h_class->GetSuperClass(); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 788 | if (super != nullptr) { |
| 789 | WalkInstanceFields(obj, super); |
| 790 | } |
| 791 | // |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 792 | size_t num_reference_fields = h_class->NumReferenceInstanceFields(); |
Vladimir Marko | 76649e8 | 2014-11-10 18:32:59 +0000 | [diff] [blame] | 793 | MemberOffset field_offset = h_class->GetFirstReferenceInstanceFieldOffset(); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 794 | for (size_t i = 0; i < num_reference_fields; ++i) { |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 795 | mirror::Object* value = obj->GetFieldObject<mirror::Object>(field_offset); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 796 | if (value != nullptr) { |
| 797 | WalkFieldsInOrder(value); |
| 798 | } |
Vladimir Marko | 76649e8 | 2014-11-10 18:32:59 +0000 | [diff] [blame] | 799 | field_offset = MemberOffset(field_offset.Uint32Value() + |
| 800 | sizeof(mirror::HeapReference<mirror::Object>)); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 801 | } |
| 802 | } |
| 803 | |
| 804 | // For an unvisited object, visit it then all its children found via fields. |
| 805 | void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) { |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 806 | // Use our own visitor routine (instead of GC visitor) to get better locality between |
| 807 | // an object and its fields |
| 808 | if (!IsImageBinSlotAssigned(obj)) { |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 809 | // Walk instance fields of all objects |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 810 | StackHandleScope<2> hs(Thread::Current()); |
| 811 | Handle<mirror::Object> h_obj(hs.NewHandle(obj)); |
| 812 | Handle<mirror::Class> klass(hs.NewHandle(obj->GetClass())); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 813 | // visit the object itself. |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 814 | CalculateObjectBinSlots(h_obj.Get()); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 815 | WalkInstanceFields(h_obj.Get(), klass.Get()); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 816 | // Walk static fields of a Class. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 817 | if (h_obj->IsClass()) { |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 818 | size_t num_reference_static_fields = klass->NumReferenceStaticFields(); |
Vladimir Marko | 76649e8 | 2014-11-10 18:32:59 +0000 | [diff] [blame] | 819 | MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 820 | for (size_t i = 0; i < num_reference_static_fields; ++i) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 821 | mirror::Object* value = h_obj->GetFieldObject<mirror::Object>(field_offset); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 822 | if (value != nullptr) { |
| 823 | WalkFieldsInOrder(value); |
| 824 | } |
Vladimir Marko | 76649e8 | 2014-11-10 18:32:59 +0000 | [diff] [blame] | 825 | field_offset = MemberOffset(field_offset.Uint32Value() + |
| 826 | sizeof(mirror::HeapReference<mirror::Object>)); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 827 | } |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 828 | |
| 829 | // Visit and assign offsets for fields. |
| 830 | ArtField* fields[2] = { h_obj->AsClass()->GetSFields(), h_obj->AsClass()->GetIFields() }; |
| 831 | size_t num_fields[2] = { h_obj->AsClass()->NumStaticFields(), |
| 832 | h_obj->AsClass()->NumInstanceFields() }; |
| 833 | for (size_t i = 0; i < 2; ++i) { |
| 834 | for (size_t j = 0; j < num_fields[i]; ++j) { |
| 835 | auto* field = fields[i] + j; |
| 836 | auto it = art_field_reloc_.find(field); |
| 837 | CHECK(it == art_field_reloc_.end()) << "Field at index " << i << ":" << j |
| 838 | << " already assigned " << PrettyField(field); |
| 839 | art_field_reloc_.emplace(field, bin_slot_sizes_[kBinArtField]); |
| 840 | bin_slot_sizes_[kBinArtField] += sizeof(ArtField); |
| 841 | } |
| 842 | } |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 843 | } else if (h_obj->IsObjectArray()) { |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 844 | // Walk elements of an object array. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 845 | int32_t length = h_obj->AsObjectArray<mirror::Object>()->GetLength(); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 846 | for (int32_t i = 0; i < length; i++) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 847 | mirror::ObjectArray<mirror::Object>* obj_array = h_obj->AsObjectArray<mirror::Object>(); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 848 | mirror::Object* value = obj_array->Get(i); |
| 849 | if (value != nullptr) { |
| 850 | WalkFieldsInOrder(value); |
| 851 | } |
| 852 | } |
| 853 | } |
| 854 | } |
| 855 | } |
| 856 | |
| 857 | void ImageWriter::WalkFieldsCallback(mirror::Object* obj, void* arg) { |
| 858 | ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg); |
| 859 | DCHECK(writer != nullptr); |
| 860 | writer->WalkFieldsInOrder(obj); |
| 861 | } |
| 862 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 863 | void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) { |
| 864 | ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg); |
| 865 | DCHECK(writer != nullptr); |
| 866 | writer->UnbinObjectsIntoOffset(obj); |
| 867 | } |
| 868 | |
| 869 | void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) { |
| 870 | CHECK(obj != nullptr); |
| 871 | |
| 872 | // We know the bin slot, and the total bin sizes for all objects by now, |
| 873 | // so calculate the object's final image offset. |
| 874 | |
| 875 | DCHECK(IsImageBinSlotAssigned(obj)); |
| 876 | BinSlot bin_slot = GetImageBinSlot(obj); |
| 877 | // Change the lockword from a bin slot into an offset |
| 878 | AssignImageOffset(obj, bin_slot); |
| 879 | } |
| 880 | |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 881 | void ImageWriter::CalculateNewObjectOffsets() { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 882 | Thread* self = Thread::Current(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 883 | StackHandleScope<1> hs(self); |
| 884 | Handle<ObjectArray<Object>> image_roots(hs.NewHandle(CreateImageRoots())); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 885 | |
| 886 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 887 | DCHECK_EQ(0U, image_end_); |
| 888 | |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 889 | // Leave space for the header, but do not write it yet, we need to |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 890 | // know where image_roots is going to end up |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 891 | image_end_ += RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 892 | |
Hiroshi Yamauchi | 0c8c303 | 2015-01-16 16:54:35 -0800 | [diff] [blame] | 893 | DCHECK_LT(image_end_, image_->Size()); |
| 894 | image_objects_offset_begin_ = image_end_; |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 895 | // Prepare bin slots for dex cache arrays. |
| 896 | PrepareDexCacheArraySlots(); |
Hiroshi Yamauchi | 0c8c303 | 2015-01-16 16:54:35 -0800 | [diff] [blame] | 897 | // Clear any pre-existing monitors which may have been in the monitor words, assign bin slots. |
| 898 | heap->VisitObjects(WalkFieldsCallback, this); |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 899 | // Calculate cumulative bin slot sizes. |
| 900 | size_t previous_sizes = 0u; |
| 901 | for (size_t i = 0; i != kBinSize; ++i) { |
| 902 | bin_slot_previous_sizes_[i] = previous_sizes; |
| 903 | previous_sizes += bin_slot_sizes_[i]; |
| 904 | } |
| 905 | DCHECK_EQ(previous_sizes, GetBinSizeSum()); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 906 | DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_); |
| 907 | |
Hiroshi Yamauchi | 0c8c303 | 2015-01-16 16:54:35 -0800 | [diff] [blame] | 908 | // Transform each object's bin slot into an offset which will be used to do the final copy. |
| 909 | heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this); |
| 910 | DCHECK(saved_hashes_map_.empty()); // All binslot hashes should've been put into vector by now. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 911 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 912 | DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 913 | |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 914 | image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Get())); |
| 915 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 916 | // Note that image_end_ is left at end of used mirror space |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 917 | } |
| 918 | |
| 919 | void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) { |
| 920 | CHECK_NE(0U, oat_loaded_size); |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 921 | const uint8_t* oat_file_begin = GetOatFileBegin(); |
| 922 | const uint8_t* oat_file_end = oat_file_begin + oat_loaded_size; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 923 | oat_data_begin_ = oat_file_begin + oat_data_offset; |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 924 | const uint8_t* oat_data_end = oat_data_begin_ + oat_file_->Size(); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 925 | // Write out sections. |
| 926 | size_t cur_pos = image_end_; |
| 927 | // Add fields. |
| 928 | auto fields_offset = cur_pos; |
| 929 | CHECK_EQ(image_objects_offset_begin_ + GetBinSizeSum(kBinArtField), fields_offset); |
| 930 | auto fields_size = bin_slot_sizes_[kBinArtField]; |
| 931 | cur_pos += fields_size; |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 932 | // Return to write header at start of image with future location of image_roots. At this point, |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 933 | // image_end_ is the size of the image (excluding bitmaps, ArtFields). |
| 934 | /* |
Mathieu Chartier | a8e8f9c | 2014-04-09 14:51:05 -0700 | [diff] [blame] | 935 | const size_t heap_bytes_per_bitmap_byte = kBitsPerByte * kObjectAlignment; |
Mathieu Chartier | 12aeccd | 2013-11-13 15:52:06 -0800 | [diff] [blame] | 936 | const size_t bitmap_bytes = RoundUp(image_end_, heap_bytes_per_bitmap_byte) / |
| 937 | heap_bytes_per_bitmap_byte; |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 938 | */ |
| 939 | const size_t bitmap_bytes = image_bitmap_->Size(); |
| 940 | auto bitmap_offset = RoundUp(cur_pos, kPageSize); |
| 941 | auto bitmap_size = RoundUp(bitmap_bytes, kPageSize); |
| 942 | cur_pos += bitmap_size; |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 943 | new (image_->Begin()) ImageHeader(PointerToLowMemUInt32(image_begin_), |
| 944 | static_cast<uint32_t>(image_end_), |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 945 | fields_offset, fields_size, |
| 946 | bitmap_offset, bitmap_size, |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 947 | image_roots_address_, |
| 948 | oat_file_->GetOatHeader().GetChecksum(), |
| 949 | PointerToLowMemUInt32(oat_file_begin), |
| 950 | PointerToLowMemUInt32(oat_data_begin_), |
| 951 | PointerToLowMemUInt32(oat_data_end), |
Igor Murashkin | 4677476 | 2014-10-22 11:37:02 -0700 | [diff] [blame] | 952 | PointerToLowMemUInt32(oat_file_end), |
| 953 | compile_pic_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 954 | } |
| 955 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 956 | void ImageWriter::CopyAndFixupNativeData() { |
| 957 | // Copy ArtFields to their locations and update the array for convenience. |
| 958 | auto fields_offset = image_objects_offset_begin_ + GetBinSizeSum(kBinArtField); |
| 959 | for (auto& pair : art_field_reloc_) { |
| 960 | pair.second += fields_offset; |
| 961 | auto* dest = image_->Begin() + pair.second; |
| 962 | DCHECK_GE(dest, image_->Begin() + image_end_); |
| 963 | memcpy(dest, pair.first, sizeof(ArtField)); |
| 964 | reinterpret_cast<ArtField*>(dest)->SetDeclaringClass( |
| 965 | down_cast<Class*>(GetImageAddress(pair.first->GetDeclaringClass()))); |
| 966 | } |
| 967 | } |
| 968 | |
Mathieu Chartier | fd04b6f | 2014-11-14 19:34:18 -0800 | [diff] [blame] | 969 | void ImageWriter::CopyAndFixupObjects() { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 970 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 971 | heap->VisitObjects(CopyAndFixupObjectsCallback, this); |
| 972 | // Fix up the object previously had hash codes. |
| 973 | for (const std::pair<mirror::Object*, uint32_t>& hash_pair : saved_hashes_) { |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 974 | Object* obj = hash_pair.first; |
| 975 | DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0U); |
| 976 | obj->SetLockWord(LockWord::FromHashCode(hash_pair.second, 0U), false); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 977 | } |
| 978 | saved_hashes_.clear(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 979 | } |
| 980 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 981 | void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) { |
Mathieu Chartier | 4d7f61d | 2014-04-17 14:43:39 -0700 | [diff] [blame] | 982 | DCHECK(obj != nullptr); |
| 983 | DCHECK(arg != nullptr); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 984 | reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj); |
| 985 | } |
| 986 | |
| 987 | bool ImageWriter::CopyAndFixupIfDexCacheFieldArray(mirror::Object* dst, mirror::Object* obj, |
| 988 | mirror::Class* klass) { |
| 989 | if (!klass->IsArrayClass()) { |
| 990 | return false; |
| 991 | } |
| 992 | auto* component_type = klass->GetComponentType(); |
| 993 | bool is_int_arr = component_type->IsPrimitiveInt(); |
| 994 | bool is_long_arr = component_type->IsPrimitiveLong(); |
| 995 | if (!is_int_arr && !is_long_arr) { |
| 996 | return false; |
| 997 | } |
| 998 | auto it = dex_cache_array_indexes_.find(obj); // Is this a dex cache array? |
| 999 | if (it == dex_cache_array_indexes_.end()) { |
| 1000 | return false; |
| 1001 | } |
| 1002 | mirror::Array* arr = obj->AsArray(); |
| 1003 | CHECK_EQ(reinterpret_cast<Object*>( |
| 1004 | image_->Begin() + it->second.offset_ + image_objects_offset_begin_), dst); |
| 1005 | dex_cache_array_indexes_.erase(it); |
| 1006 | // Fixup int pointers for the field array. |
| 1007 | CHECK(!arr->IsObjectArray()); |
| 1008 | const size_t num_elements = arr->GetLength(); |
| 1009 | if (target_ptr_size_ == 4) { |
| 1010 | // Will get fixed up by fixup object. |
| 1011 | dst->SetClass(down_cast<mirror::Class*>( |
| 1012 | GetImageAddress(mirror::IntArray::GetArrayClass()))); |
| 1013 | } else { |
| 1014 | DCHECK_EQ(target_ptr_size_, 8u); |
| 1015 | dst->SetClass(down_cast<mirror::Class*>( |
| 1016 | GetImageAddress(mirror::LongArray::GetArrayClass()))); |
| 1017 | } |
| 1018 | mirror::Array* dest_array = down_cast<mirror::Array*>(dst); |
| 1019 | dest_array->SetLength(num_elements); |
| 1020 | for (size_t i = 0, count = num_elements; i < count; ++i) { |
| 1021 | ArtField* field = reinterpret_cast<ArtField*>(is_int_arr ? |
| 1022 | arr->AsIntArray()->GetWithoutChecks(i) : arr->AsLongArray()->GetWithoutChecks(i)); |
| 1023 | uint8_t* fixup_location = nullptr; |
| 1024 | if (field != nullptr) { |
| 1025 | auto it2 = art_field_reloc_.find(field); |
| 1026 | CHECK(it2 != art_field_reloc_.end()) << "No relocation for field " << PrettyField(field); |
| 1027 | fixup_location = image_begin_ + it2->second; |
| 1028 | } |
| 1029 | if (target_ptr_size_ == 4) { |
| 1030 | down_cast<mirror::IntArray*>(dest_array)->SetWithoutChecks<kVerifyNone>( |
| 1031 | i, static_cast<uint32_t>(reinterpret_cast<uint64_t>(fixup_location))); |
| 1032 | } else { |
| 1033 | down_cast<mirror::LongArray*>(dest_array)->SetWithoutChecks<kVerifyNone>( |
| 1034 | i, reinterpret_cast<uint64_t>(fixup_location)); |
| 1035 | } |
| 1036 | } |
| 1037 | dst->SetLockWord(LockWord::Default(), false); |
| 1038 | return true; |
| 1039 | } |
| 1040 | |
| 1041 | void ImageWriter::CopyAndFixupObject(Object* obj) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1042 | // see GetLocalAddress for similar computation |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 1043 | size_t offset = GetImageOffset(obj); |
| 1044 | auto* dst = reinterpret_cast<Object*>(image_->Begin() + offset); |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 1045 | const uint8_t* src = reinterpret_cast<const uint8_t*>(obj); |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1046 | size_t n; |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 1047 | mirror::Class* klass = obj->GetClass(); |
| 1048 | |
| 1049 | if (CopyAndFixupIfDexCacheFieldArray(dst, obj, klass)) { |
| 1050 | return; |
| 1051 | } |
| 1052 | if (klass->IsArtMethodClass()) { |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1053 | // Size without pointer fields since we don't want to overrun the buffer if target art method |
| 1054 | // is 32 bits but source is 64 bits. |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 1055 | n = mirror::ArtMethod::SizeWithoutPointerFields(target_ptr_size_); |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1056 | } else { |
| 1057 | n = obj->SizeOf(); |
| 1058 | } |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 1059 | DCHECK_LE(offset + n, image_->Size()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1060 | memcpy(dst, src, n); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 1061 | |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 1062 | // Write in a hash code of objects which have inflated monitors or a hash code in their monitor |
| 1063 | // word. |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 1064 | dst->SetLockWord(LockWord::Default(), false); |
| 1065 | FixupObject(obj, dst); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1066 | } |
| 1067 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 1068 | // Rewrite all the references in the copied object to point to their image address equivalent |
Mathieu Chartier | b7ea3ac | 2014-03-24 16:54:46 -0700 | [diff] [blame] | 1069 | class FixupVisitor { |
| 1070 | public: |
| 1071 | FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) { |
| 1072 | } |
| 1073 | |
| 1074 | void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const |
| 1075 | EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { |
Hiroshi Yamauchi | 6e83c17 | 2014-05-01 21:25:41 -0700 | [diff] [blame] | 1076 | Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset); |
Mathieu Chartier | b7ea3ac | 2014-03-24 16:54:46 -0700 | [diff] [blame] | 1077 | // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the |
| 1078 | // image. |
| 1079 | copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>( |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 1080 | offset, image_writer_->GetImageAddress(ref)); |
Mathieu Chartier | b7ea3ac | 2014-03-24 16:54:46 -0700 | [diff] [blame] | 1081 | } |
| 1082 | |
| 1083 | // java.lang.ref.Reference visitor. |
| 1084 | void operator()(mirror::Class* /*klass*/, mirror::Reference* ref) const |
| 1085 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 1086 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) { |
| 1087 | copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>( |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 1088 | mirror::Reference::ReferentOffset(), image_writer_->GetImageAddress(ref->GetReferent())); |
Mathieu Chartier | b7ea3ac | 2014-03-24 16:54:46 -0700 | [diff] [blame] | 1089 | } |
| 1090 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1091 | protected: |
Mathieu Chartier | b7ea3ac | 2014-03-24 16:54:46 -0700 | [diff] [blame] | 1092 | ImageWriter* const image_writer_; |
| 1093 | mirror::Object* const copy_; |
| 1094 | }; |
| 1095 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1096 | class FixupClassVisitor FINAL : public FixupVisitor { |
| 1097 | public: |
| 1098 | FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) { |
| 1099 | } |
| 1100 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 1101 | void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1102 | EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { |
| 1103 | DCHECK(obj->IsClass()); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 1104 | FixupVisitor::operator()(obj, offset, /*is_static*/false); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1105 | } |
| 1106 | |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1107 | void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, |
| 1108 | mirror::Reference* ref ATTRIBUTE_UNUSED) const |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1109 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 1110 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) { |
| 1111 | LOG(FATAL) << "Reference not expected here."; |
| 1112 | } |
| 1113 | }; |
| 1114 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 1115 | void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) { |
| 1116 | // Copy and fix up ArtFields in the class. |
| 1117 | ArtField* fields[2] = { orig->AsClass()->GetSFields(), orig->AsClass()->GetIFields() }; |
| 1118 | size_t num_fields[2] = { orig->AsClass()->NumStaticFields(), |
| 1119 | orig->AsClass()->NumInstanceFields() }; |
| 1120 | // Update the arrays. |
| 1121 | for (size_t i = 0; i < 2; ++i) { |
| 1122 | if (num_fields[i] == 0) { |
| 1123 | CHECK(fields[i] == nullptr); |
| 1124 | continue; |
| 1125 | } |
| 1126 | auto it = art_field_reloc_.find(fields[i]); |
| 1127 | CHECK(it != art_field_reloc_.end()) << PrettyClass(orig->AsClass()) << " : " |
| 1128 | << PrettyField(fields[i]); |
| 1129 | auto* image_fields = reinterpret_cast<ArtField*>(image_begin_ + it->second); |
| 1130 | if (i == 0) { |
| 1131 | down_cast<Class*>(copy)->SetSFieldsUnchecked(image_fields); |
| 1132 | } else { |
| 1133 | down_cast<Class*>(copy)->SetIFieldsUnchecked(image_fields); |
| 1134 | } |
| 1135 | } |
| 1136 | FixupClassVisitor visitor(this, copy); |
| 1137 | static_cast<mirror::Object*>(orig)->VisitReferences<true /*visit class*/>(visitor, visitor); |
| 1138 | } |
| 1139 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1140 | void ImageWriter::FixupObject(Object* orig, Object* copy) { |
Mathieu Chartier | b7ea3ac | 2014-03-24 16:54:46 -0700 | [diff] [blame] | 1141 | DCHECK(orig != nullptr); |
| 1142 | DCHECK(copy != nullptr); |
Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 1143 | if (kUseBakerOrBrooksReadBarrier) { |
| 1144 | orig->AssertReadBarrierPointer(); |
| 1145 | if (kUseBrooksReadBarrier) { |
| 1146 | // Note the address 'copy' isn't the same as the image address of 'orig'. |
| 1147 | copy->SetReadBarrierPointer(GetImageAddress(orig)); |
| 1148 | DCHECK_EQ(copy->GetReadBarrierPointer(), GetImageAddress(orig)); |
| 1149 | } |
Hiroshi Yamauchi | 9d04a20 | 2014-01-31 13:35:49 -0800 | [diff] [blame] | 1150 | } |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 1151 | if (orig->IsClass()) { |
| 1152 | FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy)); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1153 | } else { |
| 1154 | FixupVisitor visitor(this, copy); |
| 1155 | orig->VisitReferences<true /*visit class*/>(visitor, visitor); |
| 1156 | } |
Mathieu Chartier | b7ea3ac | 2014-03-24 16:54:46 -0700 | [diff] [blame] | 1157 | if (orig->IsArtMethod<kVerifyNone>()) { |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 1158 | FixupMethod(orig->AsArtMethod<kVerifyNone>(), down_cast<ArtMethod*>(copy)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1159 | } |
| 1160 | } |
| 1161 | |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 1162 | const uint8_t* ImageWriter::GetQuickCode(mirror::ArtMethod* method, bool* quick_is_interpreted) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1163 | DCHECK(!method->IsResolutionMethod() && !method->IsImtConflictMethod() && |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 1164 | !method->IsImtUnimplementedMethod() && !method->IsAbstract()) << PrettyMethod(method); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1165 | |
| 1166 | // Use original code if it exists. Otherwise, set the code pointer to the resolution |
| 1167 | // trampoline. |
| 1168 | |
| 1169 | // Quick entrypoint: |
Jeff Hao | c7d1188 | 2015-02-03 15:08:39 -0800 | [diff] [blame] | 1170 | uint32_t quick_oat_code_offset = PointerToLowMemUInt32( |
| 1171 | method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_)); |
| 1172 | const uint8_t* quick_code = GetOatAddress(quick_oat_code_offset); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1173 | *quick_is_interpreted = false; |
| 1174 | if (quick_code != nullptr && |
| 1175 | (!method->IsStatic() || method->IsConstructor() || method->GetDeclaringClass()->IsInitialized())) { |
| 1176 | // We have code for a non-static or initialized method, just use the code. |
| 1177 | } else if (quick_code == nullptr && method->IsNative() && |
| 1178 | (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) { |
| 1179 | // Non-static or initialized native method missing compiled code, use generic JNI version. |
| 1180 | quick_code = GetOatAddress(quick_generic_jni_trampoline_offset_); |
| 1181 | } else if (quick_code == nullptr && !method->IsNative()) { |
| 1182 | // We don't have code at all for a non-native method, use the interpreter. |
| 1183 | quick_code = GetOatAddress(quick_to_interpreter_bridge_offset_); |
| 1184 | *quick_is_interpreted = true; |
| 1185 | } else { |
| 1186 | CHECK(!method->GetDeclaringClass()->IsInitialized()); |
| 1187 | // We have code for a static method, but need to go through the resolution stub for class |
| 1188 | // initialization. |
| 1189 | quick_code = GetOatAddress(quick_resolution_trampoline_offset_); |
| 1190 | } |
| 1191 | return quick_code; |
| 1192 | } |
| 1193 | |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 1194 | const uint8_t* ImageWriter::GetQuickEntryPoint(mirror::ArtMethod* method) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1195 | // Calculate the quick entry point following the same logic as FixupMethod() below. |
| 1196 | // The resolution method has a special trampoline to call. |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 1197 | Runtime* runtime = Runtime::Current(); |
| 1198 | if (UNLIKELY(method == runtime->GetResolutionMethod())) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1199 | return GetOatAddress(quick_resolution_trampoline_offset_); |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 1200 | } else if (UNLIKELY(method == runtime->GetImtConflictMethod() || |
| 1201 | method == runtime->GetImtUnimplementedMethod())) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1202 | return GetOatAddress(quick_imt_conflict_trampoline_offset_); |
| 1203 | } else { |
| 1204 | // We assume all methods have code. If they don't currently then we set them to the use the |
| 1205 | // resolution trampoline. Abstract methods never have code and so we need to make sure their |
| 1206 | // use results in an AbstractMethodError. We use the interpreter to achieve this. |
| 1207 | if (UNLIKELY(method->IsAbstract())) { |
| 1208 | return GetOatAddress(quick_to_interpreter_bridge_offset_); |
| 1209 | } else { |
| 1210 | bool quick_is_interpreted; |
| 1211 | return GetQuickCode(method, &quick_is_interpreted); |
| 1212 | } |
| 1213 | } |
| 1214 | } |
| 1215 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1216 | void ImageWriter::FixupMethod(ArtMethod* orig, ArtMethod* copy) { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1217 | // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to |
| 1218 | // oat_begin_ |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1219 | // For 64 bit targets we need to repack the current runtime pointer sized fields to the right |
| 1220 | // locations. |
| 1221 | // Copy all of the fields from the runtime methods to the target methods first since we did a |
| 1222 | // bytewise copy earlier. |
Jeff Hao | c7d1188 | 2015-02-03 15:08:39 -0800 | [diff] [blame] | 1223 | copy->SetEntryPointFromInterpreterPtrSize<kVerifyNone>( |
| 1224 | orig->GetEntryPointFromInterpreterPtrSize(target_ptr_size_), target_ptr_size_); |
| 1225 | copy->SetEntryPointFromJniPtrSize<kVerifyNone>( |
| 1226 | orig->GetEntryPointFromJniPtrSize(target_ptr_size_), target_ptr_size_); |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1227 | copy->SetEntryPointFromQuickCompiledCodePtrSize<kVerifyNone>( |
Jeff Hao | c7d1188 | 2015-02-03 15:08:39 -0800 | [diff] [blame] | 1228 | orig->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_), target_ptr_size_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1229 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1230 | // The resolution method has a special trampoline to call. |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 1231 | Runtime* runtime = Runtime::Current(); |
| 1232 | if (UNLIKELY(orig == runtime->GetResolutionMethod())) { |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1233 | copy->SetEntryPointFromQuickCompiledCodePtrSize<kVerifyNone>( |
| 1234 | GetOatAddress(quick_resolution_trampoline_offset_), target_ptr_size_); |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 1235 | } else if (UNLIKELY(orig == runtime->GetImtConflictMethod() || |
| 1236 | orig == runtime->GetImtUnimplementedMethod())) { |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1237 | copy->SetEntryPointFromQuickCompiledCodePtrSize<kVerifyNone>( |
| 1238 | GetOatAddress(quick_imt_conflict_trampoline_offset_), target_ptr_size_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1239 | } else { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1240 | // We assume all methods have code. If they don't currently then we set them to the use the |
| 1241 | // resolution trampoline. Abstract methods never have code and so we need to make sure their |
| 1242 | // use results in an AbstractMethodError. We use the interpreter to achieve this. |
| 1243 | if (UNLIKELY(orig->IsAbstract())) { |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1244 | copy->SetEntryPointFromQuickCompiledCodePtrSize<kVerifyNone>( |
| 1245 | GetOatAddress(quick_to_interpreter_bridge_offset_), target_ptr_size_); |
| 1246 | copy->SetEntryPointFromInterpreterPtrSize<kVerifyNone>( |
| 1247 | reinterpret_cast<EntryPointFromInterpreter*>(const_cast<uint8_t*>( |
| 1248 | GetOatAddress(interpreter_to_interpreter_bridge_offset_))), target_ptr_size_); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1249 | } else { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1250 | bool quick_is_interpreted; |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 1251 | const uint8_t* quick_code = GetQuickCode(orig, &quick_is_interpreted); |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1252 | copy->SetEntryPointFromQuickCompiledCodePtrSize<kVerifyNone>(quick_code, target_ptr_size_); |
Sebastien Hertz | e1d0781 | 2014-05-21 15:44:09 +0200 | [diff] [blame] | 1253 | |
Sebastien Hertz | e1d0781 | 2014-05-21 15:44:09 +0200 | [diff] [blame] | 1254 | // JNI entrypoint: |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1255 | if (orig->IsNative()) { |
| 1256 | // The native method's pointer is set to a stub to lookup via dlsym. |
| 1257 | // Note this is not the code_ pointer, that is handled above. |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1258 | copy->SetEntryPointFromJniPtrSize<kVerifyNone>(GetOatAddress(jni_dlsym_lookup_offset_), |
| 1259 | target_ptr_size_); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1260 | } |
Sebastien Hertz | e1d0781 | 2014-05-21 15:44:09 +0200 | [diff] [blame] | 1261 | |
| 1262 | // Interpreter entrypoint: |
| 1263 | // Set the interpreter entrypoint depending on whether there is compiled code or not. |
Elliott Hughes | 956af0f | 2014-12-11 14:34:28 -0800 | [diff] [blame] | 1264 | uint32_t interpreter_code = (quick_is_interpreted) |
Sebastien Hertz | e1d0781 | 2014-05-21 15:44:09 +0200 | [diff] [blame] | 1265 | ? interpreter_to_interpreter_bridge_offset_ |
| 1266 | : interpreter_to_compiled_code_bridge_offset_; |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1267 | EntryPointFromInterpreter* interpreter_entrypoint = |
Sebastien Hertz | e1d0781 | 2014-05-21 15:44:09 +0200 | [diff] [blame] | 1268 | reinterpret_cast<EntryPointFromInterpreter*>( |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1269 | const_cast<uint8_t*>(GetOatAddress(interpreter_code))); |
| 1270 | copy->SetEntryPointFromInterpreterPtrSize<kVerifyNone>( |
| 1271 | interpreter_entrypoint, target_ptr_size_); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1272 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1273 | } |
| 1274 | } |
| 1275 | |
Alex Light | a59dd80 | 2014-07-02 16:28:08 -0700 | [diff] [blame] | 1276 | static OatHeader* GetOatHeaderFromElf(ElfFile* elf) { |
Tong Shen | 62d1ca3 | 2014-09-03 17:24:56 -0700 | [diff] [blame] | 1277 | uint64_t data_sec_offset; |
| 1278 | bool has_data_sec = elf->GetSectionOffsetAndSize(".rodata", &data_sec_offset, nullptr); |
| 1279 | if (!has_data_sec) { |
Alex Light | a59dd80 | 2014-07-02 16:28:08 -0700 | [diff] [blame] | 1280 | return nullptr; |
| 1281 | } |
Tong Shen | 62d1ca3 | 2014-09-03 17:24:56 -0700 | [diff] [blame] | 1282 | return reinterpret_cast<OatHeader*>(elf->Begin() + data_sec_offset); |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 1283 | } |
| 1284 | |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 1285 | void ImageWriter::SetOatChecksumFromElfFile(File* elf_file) { |
Alex Light | a59dd80 | 2014-07-02 16:28:08 -0700 | [diff] [blame] | 1286 | std::string error_msg; |
| 1287 | std::unique_ptr<ElfFile> elf(ElfFile::Open(elf_file, PROT_READ|PROT_WRITE, |
| 1288 | MAP_SHARED, &error_msg)); |
| 1289 | if (elf.get() == nullptr) { |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 1290 | LOG(FATAL) << "Unable open oat file: " << error_msg; |
Alex Light | a59dd80 | 2014-07-02 16:28:08 -0700 | [diff] [blame] | 1291 | return; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1292 | } |
Alex Light | a59dd80 | 2014-07-02 16:28:08 -0700 | [diff] [blame] | 1293 | OatHeader* oat_header = GetOatHeaderFromElf(elf.get()); |
| 1294 | CHECK(oat_header != nullptr); |
| 1295 | CHECK(oat_header->IsValid()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1296 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1297 | ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin()); |
Alex Light | a59dd80 | 2014-07-02 16:28:08 -0700 | [diff] [blame] | 1298 | image_header->SetOatChecksum(oat_header->GetChecksum()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1299 | } |
| 1300 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 1301 | size_t ImageWriter::GetBinSizeSum(ImageWriter::Bin up_to) const { |
| 1302 | DCHECK_LE(up_to, kBinSize); |
| 1303 | return std::accumulate(&bin_slot_sizes_[0], &bin_slot_sizes_[up_to], /*init*/0); |
| 1304 | } |
| 1305 | |
| 1306 | ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) { |
| 1307 | // These values may need to get updated if more bins are added to the enum Bin |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 1308 | static_assert(kBinBits == 4, "wrong number of bin bits"); |
| 1309 | static_assert(kBinShift == 28, "wrong number of shift"); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 1310 | static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes"); |
| 1311 | |
| 1312 | DCHECK_LT(GetBin(), kBinSize); |
| 1313 | DCHECK_ALIGNED(GetIndex(), kObjectAlignment); |
| 1314 | } |
| 1315 | |
| 1316 | ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index) |
| 1317 | : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) { |
| 1318 | DCHECK_EQ(index, GetIndex()); |
| 1319 | } |
| 1320 | |
| 1321 | ImageWriter::Bin ImageWriter::BinSlot::GetBin() const { |
| 1322 | return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift); |
| 1323 | } |
| 1324 | |
| 1325 | uint32_t ImageWriter::BinSlot::GetIndex() const { |
| 1326 | return lockword_ & ~kBinMask; |
| 1327 | } |
| 1328 | |
Andreas Gampe | 245ee00 | 2014-12-04 21:25:04 -0800 | [diff] [blame] | 1329 | void ImageWriter::FreeStringDataArray() { |
| 1330 | if (string_data_array_ != nullptr) { |
| 1331 | gc::space::LargeObjectSpace* los = Runtime::Current()->GetHeap()->GetLargeObjectsSpace(); |
| 1332 | if (los != nullptr) { |
| 1333 | los->Free(Thread::Current(), reinterpret_cast<mirror::Object*>(string_data_array_)); |
| 1334 | } |
| 1335 | } |
| 1336 | } |
| 1337 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1338 | } // namespace art |