Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_COMPILER_IMAGE_WRITER_H_ |
| 18 | #define ART_COMPILER_IMAGE_WRITER_H_ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 19 | |
| 20 | #include <stdint.h> |
Evgenii Stepanov | 1e13374 | 2015-05-20 12:30:59 -0700 | [diff] [blame] | 21 | #include "base/memory_tool.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 22 | |
| 23 | #include <cstddef> |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 24 | #include <memory> |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 25 | #include <set> |
| 26 | #include <string> |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 27 | #include <ostream> |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 28 | |
Vladimir Marko | 80afd02 | 2015-05-19 18:08:00 +0100 | [diff] [blame] | 29 | #include "base/bit_utils.h" |
Igor Murashkin | 4677476 | 2014-10-22 11:37:02 -0700 | [diff] [blame] | 30 | #include "base/macros.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 31 | #include "driver/compiler_driver.h" |
Mathieu Chartier | fd04b6f | 2014-11-14 19:34:18 -0800 | [diff] [blame] | 32 | #include "gc/space/space.h" |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 33 | #include "length_prefixed_array.h" |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 34 | #include "lock_word.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 35 | #include "mem_map.h" |
| 36 | #include "oat_file.h" |
| 37 | #include "mirror/dex_cache.h" |
| 38 | #include "os.h" |
| 39 | #include "safe_map.h" |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 40 | #include "utils.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 41 | |
| 42 | namespace art { |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 43 | namespace gc { |
| 44 | namespace space { |
| 45 | class ImageSpace; |
| 46 | } // namespace space |
| 47 | } // namespace gc |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 48 | |
Mathieu Chartier | a90c772 | 2015-10-29 15:41:36 -0700 | [diff] [blame] | 49 | static constexpr int kInvalidImageFd = -1; |
| 50 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 51 | // Write a Space built during compilation for use during execution. |
Igor Murashkin | 4677476 | 2014-10-22 11:37:02 -0700 | [diff] [blame] | 52 | class ImageWriter FINAL { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 53 | public: |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 54 | ImageWriter(const CompilerDriver& compiler_driver, |
| 55 | uintptr_t image_begin, |
| 56 | bool compile_pic, |
| 57 | bool compile_app_image) |
| 58 | : compiler_driver_(compiler_driver), |
| 59 | image_begin_(reinterpret_cast<uint8_t*>(image_begin)), |
| 60 | image_end_(0), |
| 61 | image_objects_offset_begin_(0), |
| 62 | image_roots_address_(0), |
| 63 | oat_file_(nullptr), |
| 64 | oat_data_begin_(nullptr), |
| 65 | compile_pic_(compile_pic), |
| 66 | compile_app_image_(compile_app_image), |
| 67 | boot_image_space_(nullptr), |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 68 | target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())), |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 69 | bin_slot_sizes_(), |
| 70 | bin_slot_offsets_(), |
| 71 | bin_slot_count_(), |
| 72 | intern_table_bytes_(0u), |
| 73 | image_method_array_(ImageHeader::kImageMethodsCount), |
| 74 | dirty_methods_(0u), |
| 75 | clean_methods_(0u) { |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 76 | CHECK_NE(image_begin, 0U); |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 77 | std::fill_n(image_methods_, arraysize(image_methods_), nullptr); |
| 78 | std::fill_n(oat_address_offsets_, arraysize(oat_address_offsets_), 0); |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 79 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 80 | |
Andreas Gampe | 245ee00 | 2014-12-04 21:25:04 -0800 | [diff] [blame] | 81 | ~ImageWriter() { |
Andreas Gampe | 245ee00 | 2014-12-04 21:25:04 -0800 | [diff] [blame] | 82 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 83 | |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 84 | bool PrepareImageAddressSpace(); |
| 85 | |
| 86 | bool IsImageAddressSpaceReady() const { |
| 87 | return image_roots_address_ != 0u; |
| 88 | } |
| 89 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 90 | template <typename T> |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 91 | T* GetImageAddress(T* object) const SHARED_REQUIRES(Locks::mutator_lock_) { |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 92 | return (object == nullptr || IsInBootImage(object)) |
| 93 | ? object |
| 94 | : reinterpret_cast<T*>(image_begin_ + GetImageOffset(object)); |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 95 | } |
| 96 | |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 97 | ArtMethod* GetImageMethodAddress(ArtMethod* method) SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 98 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 99 | template <typename PtrType> |
| 100 | PtrType GetDexCacheArrayElementImageAddress(const DexFile* dex_file, uint32_t offset) |
| 101 | const SHARED_REQUIRES(Locks::mutator_lock_) { |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 102 | auto it = dex_cache_array_starts_.find(dex_file); |
| 103 | DCHECK(it != dex_cache_array_starts_.end()); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 104 | return reinterpret_cast<PtrType>( |
| 105 | image_begin_ + bin_slot_offsets_[kBinDexCacheArray] + it->second + offset); |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 106 | } |
| 107 | |
Mathieu Chartier | d39645e | 2015-06-09 17:50:29 -0700 | [diff] [blame] | 108 | uint8_t* GetOatFileBegin() const; |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 109 | |
Mathieu Chartier | a90c772 | 2015-10-29 15:41:36 -0700 | [diff] [blame] | 110 | // If image_fd is not kInvalidImageFd, then we use that for the file. Otherwise we open |
| 111 | // image_filename. |
| 112 | bool Write(int image_fd, |
| 113 | const std::string& image_filename, |
| 114 | const std::string& oat_filename, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 115 | const std::string& oat_location) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 116 | REQUIRES(!Locks::mutator_lock_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 117 | |
| 118 | uintptr_t GetOatDataBegin() { |
| 119 | return reinterpret_cast<uintptr_t>(oat_data_begin_); |
| 120 | } |
| 121 | |
| 122 | private: |
| 123 | bool AllocMemory(); |
| 124 | |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 125 | // Mark the objects defined in this space in the given live bitmap. |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 126 | void RecordImageAllocations() SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 127 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 128 | // Classify different kinds of bins that objects end up getting packed into during image writing. |
| 129 | enum Bin { |
| 130 | // Likely-clean: |
| 131 | kBinString, // [String] Almost always immutable (except for obj header). |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 132 | // Unknown mix of clean/dirty: |
| 133 | kBinRegular, |
| 134 | // Likely-dirty: |
| 135 | // All classes get their own bins since their fields often dirty |
| 136 | kBinClassInitializedFinalStatics, // Class initializers have been run, no non-final statics |
| 137 | kBinClassInitialized, // Class initializers have been run |
| 138 | kBinClassVerified, // Class verified, but initializers haven't been run |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 139 | // Add more bins here if we add more segregation code. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 140 | // Non mirror fields must be below. |
| 141 | // ArtFields should be always clean. |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 142 | kBinArtField, |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 143 | // If the class is initialized, then the ArtMethods are probably clean. |
| 144 | kBinArtMethodClean, |
| 145 | // ArtMethods may be dirty if the class has native methods or a declaring class that isn't |
| 146 | // initialized. |
| 147 | kBinArtMethodDirty, |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 148 | // Dex cache arrays have a special slot for PC-relative addressing. Since they are |
| 149 | // huge, and as such their dirtiness is not important for the clean/dirty separation, |
| 150 | // we arbitrarily keep them at the end of the native data. |
| 151 | kBinDexCacheArray, // Arrays belonging to dex cache. |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 152 | kBinSize, |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 153 | // Number of bins which are for mirror objects. |
| 154 | kBinMirrorCount = kBinArtField, |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 155 | }; |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 156 | friend std::ostream& operator<<(std::ostream& stream, const Bin& bin); |
| 157 | |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 158 | enum NativeObjectRelocationType { |
| 159 | kNativeObjectRelocationTypeArtField, |
| 160 | kNativeObjectRelocationTypeArtFieldArray, |
| 161 | kNativeObjectRelocationTypeArtMethodClean, |
| 162 | kNativeObjectRelocationTypeArtMethodArrayClean, |
| 163 | kNativeObjectRelocationTypeArtMethodDirty, |
| 164 | kNativeObjectRelocationTypeArtMethodArrayDirty, |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 165 | kNativeObjectRelocationTypeDexCacheArray, |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 166 | }; |
| 167 | friend std::ostream& operator<<(std::ostream& stream, const NativeObjectRelocationType& type); |
| 168 | |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 169 | enum OatAddress { |
| 170 | kOatAddressInterpreterToInterpreterBridge, |
| 171 | kOatAddressInterpreterToCompiledCodeBridge, |
| 172 | kOatAddressJNIDlsymLookup, |
| 173 | kOatAddressQuickGenericJNITrampoline, |
| 174 | kOatAddressQuickIMTConflictTrampoline, |
| 175 | kOatAddressQuickResolutionTrampoline, |
| 176 | kOatAddressQuickToInterpreterBridge, |
| 177 | // Number of elements in the enum. |
| 178 | kOatAddressCount, |
| 179 | }; |
| 180 | friend std::ostream& operator<<(std::ostream& stream, const OatAddress& oat_address); |
| 181 | |
Vladimir Marko | 80afd02 | 2015-05-19 18:08:00 +0100 | [diff] [blame] | 182 | static constexpr size_t kBinBits = MinimumBitsToStore<uint32_t>(kBinMirrorCount - 1); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 183 | // uint32 = typeof(lockword_) |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 184 | // Subtract read barrier bits since we want these to remain 0, or else it may result in DCHECK |
| 185 | // failures due to invalid read barrier bits during object field reads. |
| 186 | static const size_t kBinShift = BitSizeOf<uint32_t>() - kBinBits - |
| 187 | LockWord::kReadBarrierStateSize; |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 188 | // 111000.....0 |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 189 | static const size_t kBinMask = ((static_cast<size_t>(1) << kBinBits) - 1) << kBinShift; |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 190 | |
| 191 | // We use the lock word to store the bin # and bin index of the object in the image. |
| 192 | // |
| 193 | // The struct size must be exactly sizeof(LockWord), currently 32-bits, since this will end up |
| 194 | // stored in the lock word bit-for-bit when object forwarding addresses are being calculated. |
| 195 | struct BinSlot { |
| 196 | explicit BinSlot(uint32_t lockword); |
| 197 | BinSlot(Bin bin, uint32_t index); |
| 198 | |
| 199 | // The bin an object belongs to, i.e. regular, class/verified, class/initialized, etc. |
| 200 | Bin GetBin() const; |
| 201 | // The offset in bytes from the beginning of the bin. Aligned to object size. |
| 202 | uint32_t GetIndex() const; |
| 203 | // Pack into a single uint32_t, for storing into a lock word. |
Mathieu Chartier | d39645e | 2015-06-09 17:50:29 -0700 | [diff] [blame] | 204 | uint32_t Uint32Value() const { return lockword_; } |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 205 | // Comparison operator for map support |
| 206 | bool operator<(const BinSlot& other) const { return lockword_ < other.lockword_; } |
| 207 | |
| 208 | private: |
| 209 | // Must be the same size as LockWord, any larger and we would truncate the data. |
| 210 | const uint32_t lockword_; |
| 211 | }; |
| 212 | |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 213 | // We use the lock word to store the offset of the object in the image. |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 214 | void AssignImageOffset(mirror::Object* object, BinSlot bin_slot) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 215 | SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | d39645e | 2015-06-09 17:50:29 -0700 | [diff] [blame] | 216 | void SetImageOffset(mirror::Object* object, size_t offset) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 217 | SHARED_REQUIRES(Locks::mutator_lock_); |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 218 | bool IsImageOffsetAssigned(mirror::Object* object) const |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 219 | SHARED_REQUIRES(Locks::mutator_lock_); |
| 220 | size_t GetImageOffset(mirror::Object* object) const SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 221 | void UpdateImageOffset(mirror::Object* obj, uintptr_t offset) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 222 | SHARED_REQUIRES(Locks::mutator_lock_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 223 | |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 224 | void PrepareDexCacheArraySlots() SHARED_REQUIRES(Locks::mutator_lock_); |
| 225 | void AssignImageBinSlot(mirror::Object* object) SHARED_REQUIRES(Locks::mutator_lock_); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 226 | void SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 227 | SHARED_REQUIRES(Locks::mutator_lock_); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 228 | bool IsImageBinSlotAssigned(mirror::Object* object) const |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 229 | SHARED_REQUIRES(Locks::mutator_lock_); |
| 230 | BinSlot GetImageBinSlot(mirror::Object* object) const SHARED_REQUIRES(Locks::mutator_lock_); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 231 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 232 | void AddDexCacheArrayRelocation(void* array, size_t offset) SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 233 | void AddMethodPointerArray(mirror::PointerArray* arr) SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 234 | |
Alex Light | a59dd80 | 2014-07-02 16:28:08 -0700 | [diff] [blame] | 235 | static void* GetImageAddressCallback(void* writer, mirror::Object* obj) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 236 | SHARED_REQUIRES(Locks::mutator_lock_) { |
Alex Light | a59dd80 | 2014-07-02 16:28:08 -0700 | [diff] [blame] | 237 | return reinterpret_cast<ImageWriter*>(writer)->GetImageAddress(obj); |
| 238 | } |
| 239 | |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 240 | mirror::Object* GetLocalAddress(mirror::Object* object) const |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 241 | SHARED_REQUIRES(Locks::mutator_lock_) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 242 | size_t offset = GetImageOffset(object); |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 243 | uint8_t* dst = image_->Begin() + offset; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 244 | return reinterpret_cast<mirror::Object*>(dst); |
| 245 | } |
| 246 | |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 247 | // Returns the address in the boot image if we are compiling the app image. |
| 248 | const uint8_t* GetOatAddress(OatAddress type) const; |
| 249 | |
| 250 | const uint8_t* GetOatAddressForOffset(uint32_t offset) const { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 251 | // With Quick, code is within the OatFile, as there are all in one |
Elliott Hughes | 956af0f | 2014-12-11 14:34:28 -0800 | [diff] [blame] | 252 | // .o ELF object. |
Nicolas Geoffray | c04c800 | 2015-07-14 11:37:54 +0100 | [diff] [blame] | 253 | DCHECK_LE(offset, oat_file_->Size()); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 254 | DCHECK(oat_data_begin_ != nullptr); |
| 255 | return offset == 0u ? nullptr : oat_data_begin_ + offset; |
| 256 | } |
| 257 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 258 | // Returns true if the class was in the original requested image classes list. |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 259 | bool KeepClass(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 260 | |
| 261 | // Debug aid that list of requested image classes. |
| 262 | void DumpImageClasses(); |
| 263 | |
| 264 | // Preinitializes some otherwise lazy fields (such as Class name) to avoid runtime image dirtying. |
| 265 | void ComputeLazyFieldsForImageClasses() |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 266 | SHARED_REQUIRES(Locks::mutator_lock_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 267 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 268 | // Remove unwanted classes from various roots. |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 269 | void PruneNonImageClasses() SHARED_REQUIRES(Locks::mutator_lock_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 270 | |
| 271 | // Verify unwanted classes removed. |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 272 | void CheckNonImageClassesRemoved() SHARED_REQUIRES(Locks::mutator_lock_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 273 | static void CheckNonImageClassesRemovedCallback(mirror::Object* obj, void* arg) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 274 | SHARED_REQUIRES(Locks::mutator_lock_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 275 | |
| 276 | // Lays out where the image objects will be at runtime. |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 277 | void CalculateNewObjectOffsets() |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 278 | SHARED_REQUIRES(Locks::mutator_lock_); |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 279 | void CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 280 | SHARED_REQUIRES(Locks::mutator_lock_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 281 | mirror::ObjectArray<mirror::Object>* CreateImageRoots() const |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 282 | SHARED_REQUIRES(Locks::mutator_lock_); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 283 | void CalculateObjectBinSlots(mirror::Object* obj) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 284 | SHARED_REQUIRES(Locks::mutator_lock_); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 285 | void UnbinObjectsIntoOffset(mirror::Object* obj) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 286 | SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 287 | |
| 288 | void WalkInstanceFields(mirror::Object* obj, mirror::Class* klass) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 289 | SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 290 | void WalkFieldsInOrder(mirror::Object* obj) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 291 | SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 292 | static void WalkFieldsCallback(mirror::Object* obj, void* arg) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 293 | SHARED_REQUIRES(Locks::mutator_lock_); |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 294 | static void UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 295 | SHARED_REQUIRES(Locks::mutator_lock_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 296 | |
| 297 | // Creates the contiguous image in memory and adjusts pointers. |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 298 | void CopyAndFixupNativeData() SHARED_REQUIRES(Locks::mutator_lock_); |
| 299 | void CopyAndFixupObjects() SHARED_REQUIRES(Locks::mutator_lock_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 300 | static void CopyAndFixupObjectsCallback(mirror::Object* obj, void* arg) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 301 | SHARED_REQUIRES(Locks::mutator_lock_); |
| 302 | void CopyAndFixupObject(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 303 | void CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 304 | SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 305 | void FixupClass(mirror::Class* orig, mirror::Class* copy) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 306 | SHARED_REQUIRES(Locks::mutator_lock_); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 307 | void FixupObject(mirror::Object* orig, mirror::Object* copy) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 308 | SHARED_REQUIRES(Locks::mutator_lock_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 309 | void FixupDexCache(mirror::DexCache* orig_dex_cache, mirror::DexCache* copy_dex_cache) |
| 310 | SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | a808bac | 2015-11-05 16:33:15 -0800 | [diff] [blame] | 311 | void FixupPointerArray(mirror::Object* dst, |
| 312 | mirror::PointerArray* arr, |
| 313 | mirror::Class* klass, |
| 314 | Bin array_type) |
| 315 | SHARED_REQUIRES(Locks::mutator_lock_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 316 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 317 | // Get quick code for non-resolution/imt_conflict/abstract method. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 318 | const uint8_t* GetQuickCode(ArtMethod* method, bool* quick_is_interpreted) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 319 | SHARED_REQUIRES(Locks::mutator_lock_); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 320 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 321 | const uint8_t* GetQuickEntryPoint(ArtMethod* method) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 322 | SHARED_REQUIRES(Locks::mutator_lock_); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 323 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 324 | // Patches references in OatFile to expect runtime addresses. |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 325 | void SetOatChecksumFromElfFile(File* elf_file); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 326 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 327 | // Calculate the sum total of the bin slot sizes in [0, up_to). Defaults to all bins. |
| 328 | size_t GetBinSizeSum(Bin up_to = kBinSize) const; |
| 329 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 330 | // Return true if a method is likely to be dirtied at runtime. |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 331 | bool WillMethodBeDirty(ArtMethod* m) const SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 332 | |
| 333 | // Assign the offset for an ArtMethod. |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 334 | void AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type) |
| 335 | SHARED_REQUIRES(Locks::mutator_lock_); |
| 336 | |
Mathieu Chartier | a808bac | 2015-11-05 16:33:15 -0800 | [diff] [blame] | 337 | // Return true if klass is loaded by the boot class loader but not in the boot image. |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 338 | bool IsBootClassLoaderNonImageClass(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_); |
| 339 | |
Mathieu Chartier | a808bac | 2015-11-05 16:33:15 -0800 | [diff] [blame] | 340 | // Return true if klass depends on a boot class loader non image class live. We want to prune |
| 341 | // these classes since we do not want any boot class loader classes in the image. This means that |
| 342 | // we also cannot have any classes which refer to these boot class loader non image classes. |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 343 | bool ContainsBootClassLoaderNonImageClass(mirror::Class* klass) |
| 344 | SHARED_REQUIRES(Locks::mutator_lock_); |
| 345 | |
Mathieu Chartier | 945c1c1 | 2015-11-24 15:37:12 -0800 | [diff] [blame] | 346 | // early_exit is true if we had a cyclic dependency anywhere down the chain. |
| 347 | bool ContainsBootClassLoaderNonImageClassInternal(mirror::Class* klass, |
| 348 | bool* early_exit, |
| 349 | std::unordered_set<mirror::Class*>* visited) |
| 350 | SHARED_REQUIRES(Locks::mutator_lock_); |
| 351 | |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 352 | static Bin BinTypeForNativeRelocationType(NativeObjectRelocationType type); |
| 353 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 354 | uintptr_t NativeOffsetInImage(void* obj); |
| 355 | |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 356 | // Location of where the object will be when the image is loaded at runtime. |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 357 | template <typename T> |
| 358 | T* NativeLocationInImage(T* obj); |
Andreas Gampe | 245ee00 | 2014-12-04 21:25:04 -0800 | [diff] [blame] | 359 | |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 360 | // Location of where the temporary copy of the object currently is. |
| 361 | template <typename T> |
| 362 | T* NativeCopyLocation(T* obj); |
| 363 | |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 364 | // Return true of obj is inside of the boot image space. This may only return true if we are |
| 365 | // compiling an app image. |
| 366 | bool IsInBootImage(const void* obj) const; |
| 367 | |
| 368 | // Return true if ptr is within the boot oat file. |
| 369 | bool IsInBootOatFile(const void* ptr) const; |
| 370 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 371 | const CompilerDriver& compiler_driver_; |
| 372 | |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 373 | // Beginning target image address for the output image. |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 374 | uint8_t* image_begin_; |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 375 | |
| 376 | // Offset to the free space in image_. |
| 377 | size_t image_end_; |
| 378 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 379 | // Offset from image_begin_ to where the first object is in image_. |
| 380 | size_t image_objects_offset_begin_; |
| 381 | |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 382 | // The image roots address in the image. |
| 383 | uint32_t image_roots_address_; |
| 384 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 385 | // oat file with code for this image |
| 386 | OatFile* oat_file_; |
| 387 | |
| 388 | // Memory mapped for generating the image. |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 389 | std::unique_ptr<MemMap> image_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 390 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 391 | // Pointer arrays that need to be updated. Since these are only some int and long arrays, we need |
| 392 | // to keep track. These include vtable arrays, iftable arrays, and dex caches. |
| 393 | std::unordered_map<mirror::PointerArray*, Bin> pointer_arrays_; |
| 394 | |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 395 | // The start offsets of the dex cache arrays. |
| 396 | SafeMap<const DexFile*, size_t> dex_cache_array_starts_; |
| 397 | |
Mathieu Chartier | d39645e | 2015-06-09 17:50:29 -0700 | [diff] [blame] | 398 | // Saved hash codes. We use these to restore lockwords which were temporarily used to have |
| 399 | // forwarding addresses as well as copying over hash codes. |
| 400 | std::unordered_map<mirror::Object*, uint32_t> saved_hashcode_map_; |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 401 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 402 | // Beginning target oat address for the pointers from the output image to its oat file. |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 403 | const uint8_t* oat_data_begin_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 404 | |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 405 | // Image bitmap which lets us know where the objects inside of the image reside. |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 406 | std::unique_ptr<gc::accounting::ContinuousSpaceBitmap> image_bitmap_; |
Mathieu Chartier | 31e8925 | 2013-08-28 11:29:12 -0700 | [diff] [blame] | 407 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 408 | // Offset from oat_data_begin_ to the stubs. |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 409 | uint32_t oat_address_offsets_[kOatAddressCount]; |
| 410 | |
| 411 | // Boolean flags. |
Igor Murashkin | 4677476 | 2014-10-22 11:37:02 -0700 | [diff] [blame] | 412 | const bool compile_pic_; |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 413 | const bool compile_app_image_; |
| 414 | |
Mathieu Chartier | a808bac | 2015-11-05 16:33:15 -0800 | [diff] [blame] | 415 | // Cache the boot image space in this class for faster lookups. |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 416 | gc::space::ImageSpace* boot_image_space_; |
Mathieu Chartier | b7ea3ac | 2014-03-24 16:54:46 -0700 | [diff] [blame] | 417 | |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 418 | // Size of pointers on the target architecture. |
| 419 | size_t target_ptr_size_; |
| 420 | |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 421 | // Bin slot tracking for dirty object packing |
| 422 | size_t bin_slot_sizes_[kBinSize]; // Number of bytes in a bin |
Vladimir Marko | cf36d49 | 2015-08-12 19:27:26 +0100 | [diff] [blame] | 423 | size_t bin_slot_offsets_[kBinSize]; // Number of bytes in previous bins. |
Igor Murashkin | f5b4c50 | 2014-11-14 15:01:59 -0800 | [diff] [blame] | 424 | size_t bin_slot_count_[kBinSize]; // Number of objects in a bin |
| 425 | |
Mathieu Chartier | d39645e | 2015-06-09 17:50:29 -0700 | [diff] [blame] | 426 | // Cached size of the intern table for when we allocate memory. |
| 427 | size_t intern_table_bytes_; |
| 428 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 429 | // ArtField, ArtMethod relocating map. These are allocated as array of structs but we want to |
| 430 | // have one entry per art field for convenience. ArtFields are placed right after the end of the |
| 431 | // image objects (aka sum of bin_slot_sizes_). ArtMethods are placed right after the ArtFields. |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 432 | struct NativeObjectRelocation { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 433 | uintptr_t offset; |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 434 | NativeObjectRelocationType type; |
| 435 | |
| 436 | bool IsArtMethodRelocation() const { |
| 437 | return type == kNativeObjectRelocationTypeArtMethodClean || |
| 438 | type == kNativeObjectRelocationTypeArtMethodDirty; |
| 439 | } |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 440 | }; |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 441 | std::unordered_map<void*, NativeObjectRelocation> native_object_relocations_; |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 442 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 443 | // Runtime ArtMethods which aren't reachable from any Class but need to be copied into the image. |
| 444 | ArtMethod* image_methods_[ImageHeader::kImageMethodsCount]; |
Mathieu Chartier | c0fe56a | 2015-08-11 13:01:23 -0700 | [diff] [blame] | 445 | // Fake length prefixed array for image methods. This array does not contain the actual |
| 446 | // ArtMethods. We only use it for the header and relocation addresses. |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 447 | LengthPrefixedArray<ArtMethod> image_method_array_; |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 448 | |
| 449 | // Counters for measurements, used for logging only. |
| 450 | uint64_t dirty_methods_; |
| 451 | uint64_t clean_methods_; |
Andreas Gampe | 245ee00 | 2014-12-04 21:25:04 -0800 | [diff] [blame] | 452 | |
Mathieu Chartier | a808bac | 2015-11-05 16:33:15 -0800 | [diff] [blame] | 453 | // Prune class memoization table to speed up ContainsBootClassLoaderNonImageClass. |
Mathieu Chartier | da5b28a | 2015-11-05 08:03:47 -0800 | [diff] [blame] | 454 | std::unordered_map<mirror::Class*, bool> prune_class_memo_; |
| 455 | |
| 456 | friend class ContainsBootClassLoaderNonImageClassVisitor; |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 457 | friend class FixupClassVisitor; |
Mathieu Chartier | d39645e | 2015-06-09 17:50:29 -0700 | [diff] [blame] | 458 | friend class FixupRootVisitor; |
| 459 | friend class FixupVisitor; |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 460 | friend class NativeLocationVisitor; |
Mathieu Chartier | e0671ce | 2015-07-28 17:23:28 -0700 | [diff] [blame] | 461 | friend class NonImageClassesVisitor; |
Mathieu Chartier | b7ea3ac | 2014-03-24 16:54:46 -0700 | [diff] [blame] | 462 | DISALLOW_COPY_AND_ASSIGN(ImageWriter); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 463 | }; |
| 464 | |
| 465 | } // namespace art |
| 466 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 467 | #endif // ART_COMPILER_IMAGE_WRITER_H_ |