blob: 386838fde0d6e4b06a25e2dedaef41f62796229d [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_COMPILER_IMAGE_WRITER_H_
18#define ART_COMPILER_IMAGE_WRITER_H_
Brian Carlstrom7940e442013-07-12 13:46:57 -070019
20#include <stdint.h>
Evgenii Stepanov1e133742015-05-20 12:30:59 -070021#include "base/memory_tool.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070022
23#include <cstddef>
Ian Rogers700a4022014-05-19 16:49:03 -070024#include <memory>
Brian Carlstrom7940e442013-07-12 13:46:57 -070025#include <set>
26#include <string>
Igor Murashkinf5b4c502014-11-14 15:01:59 -080027#include <ostream>
Brian Carlstrom7940e442013-07-12 13:46:57 -070028
Vladimir Marko80afd022015-05-19 18:08:00 +010029#include "base/bit_utils.h"
Igor Murashkin46774762014-10-22 11:37:02 -070030#include "base/macros.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070031#include "driver/compiler_driver.h"
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -080032#include "gc/space/space.h"
Mathieu Chartier54d220e2015-07-30 16:20:06 -070033#include "length_prefixed_array.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070034#include "lock_word.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070035#include "mem_map.h"
36#include "oat_file.h"
37#include "mirror/dex_cache.h"
38#include "os.h"
39#include "safe_map.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070040#include "utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070041
42namespace art {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080043namespace gc {
44namespace space {
45class ImageSpace;
46} // namespace space
47} // namespace gc
Brian Carlstrom7940e442013-07-12 13:46:57 -070048
Mathieu Chartiera90c7722015-10-29 15:41:36 -070049static constexpr int kInvalidImageFd = -1;
50
Brian Carlstrom7940e442013-07-12 13:46:57 -070051// Write a Space built during compilation for use during execution.
Igor Murashkin46774762014-10-22 11:37:02 -070052class ImageWriter FINAL {
Brian Carlstrom7940e442013-07-12 13:46:57 -070053 public:
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080054 ImageWriter(const CompilerDriver& compiler_driver,
55 uintptr_t image_begin,
56 bool compile_pic,
57 bool compile_app_image)
58 : compiler_driver_(compiler_driver),
59 image_begin_(reinterpret_cast<uint8_t*>(image_begin)),
60 image_end_(0),
61 image_objects_offset_begin_(0),
62 image_roots_address_(0),
63 oat_file_(nullptr),
64 oat_data_begin_(nullptr),
65 compile_pic_(compile_pic),
66 compile_app_image_(compile_app_image),
67 boot_image_space_(nullptr),
Igor Murashkinf5b4c502014-11-14 15:01:59 -080068 target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())),
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080069 bin_slot_sizes_(),
70 bin_slot_offsets_(),
71 bin_slot_count_(),
72 intern_table_bytes_(0u),
73 image_method_array_(ImageHeader::kImageMethodsCount),
74 dirty_methods_(0u),
Mathieu Chartier208a5cb2015-12-02 15:44:07 -080075 clean_methods_(0u),
76 class_table_bytes_(0u) {
Vladimir Markof4da6752014-08-01 19:04:18 +010077 CHECK_NE(image_begin, 0U);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080078 std::fill_n(image_methods_, arraysize(image_methods_), nullptr);
79 std::fill_n(oat_address_offsets_, arraysize(oat_address_offsets_), 0);
Vladimir Markof4da6752014-08-01 19:04:18 +010080 }
Brian Carlstrom7940e442013-07-12 13:46:57 -070081
Andreas Gampe245ee002014-12-04 21:25:04 -080082 ~ImageWriter() {
Andreas Gampe245ee002014-12-04 21:25:04 -080083 }
Brian Carlstrom7940e442013-07-12 13:46:57 -070084
Vladimir Markof4da6752014-08-01 19:04:18 +010085 bool PrepareImageAddressSpace();
86
87 bool IsImageAddressSpaceReady() const {
88 return image_roots_address_ != 0u;
89 }
90
Mathieu Chartiere401d142015-04-22 13:56:20 -070091 template <typename T>
Mathieu Chartier90443472015-07-16 20:32:27 -070092 T* GetImageAddress(T* object) const SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080093 return (object == nullptr || IsInBootImage(object))
94 ? object
95 : reinterpret_cast<T*>(image_begin_ + GetImageOffset(object));
Vladimir Markof4da6752014-08-01 19:04:18 +010096 }
97
Mathieu Chartier90443472015-07-16 20:32:27 -070098 ArtMethod* GetImageMethodAddress(ArtMethod* method) SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -070099
Vladimir Marko05792b92015-08-03 11:56:49 +0100100 template <typename PtrType>
101 PtrType GetDexCacheArrayElementImageAddress(const DexFile* dex_file, uint32_t offset)
102 const SHARED_REQUIRES(Locks::mutator_lock_) {
Vladimir Marko20f85592015-03-19 10:07:02 +0000103 auto it = dex_cache_array_starts_.find(dex_file);
104 DCHECK(it != dex_cache_array_starts_.end());
Vladimir Marko05792b92015-08-03 11:56:49 +0100105 return reinterpret_cast<PtrType>(
106 image_begin_ + bin_slot_offsets_[kBinDexCacheArray] + it->second + offset);
Vladimir Marko20f85592015-03-19 10:07:02 +0000107 }
108
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700109 uint8_t* GetOatFileBegin() const;
Vladimir Markof4da6752014-08-01 19:04:18 +0100110
Mathieu Chartiera90c7722015-10-29 15:41:36 -0700111 // If image_fd is not kInvalidImageFd, then we use that for the file. Otherwise we open
112 // image_filename.
113 bool Write(int image_fd,
114 const std::string& image_filename,
115 const std::string& oat_filename,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700116 const std::string& oat_location)
Mathieu Chartier90443472015-07-16 20:32:27 -0700117 REQUIRES(!Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700118
119 uintptr_t GetOatDataBegin() {
120 return reinterpret_cast<uintptr_t>(oat_data_begin_);
121 }
122
123 private:
124 bool AllocMemory();
125
Mathieu Chartier31e89252013-08-28 11:29:12 -0700126 // Mark the objects defined in this space in the given live bitmap.
Mathieu Chartier90443472015-07-16 20:32:27 -0700127 void RecordImageAllocations() SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier31e89252013-08-28 11:29:12 -0700128
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800129 // Classify different kinds of bins that objects end up getting packed into during image writing.
130 enum Bin {
131 // Likely-clean:
132 kBinString, // [String] Almost always immutable (except for obj header).
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800133 // Unknown mix of clean/dirty:
134 kBinRegular,
135 // Likely-dirty:
136 // All classes get their own bins since their fields often dirty
137 kBinClassInitializedFinalStatics, // Class initializers have been run, no non-final statics
138 kBinClassInitialized, // Class initializers have been run
139 kBinClassVerified, // Class verified, but initializers haven't been run
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800140 // Add more bins here if we add more segregation code.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700141 // Non mirror fields must be below.
142 // ArtFields should be always clean.
Mathieu Chartierc7853442015-03-27 14:35:38 -0700143 kBinArtField,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700144 // If the class is initialized, then the ArtMethods are probably clean.
145 kBinArtMethodClean,
146 // ArtMethods may be dirty if the class has native methods or a declaring class that isn't
147 // initialized.
148 kBinArtMethodDirty,
Vladimir Marko05792b92015-08-03 11:56:49 +0100149 // Dex cache arrays have a special slot for PC-relative addressing. Since they are
150 // huge, and as such their dirtiness is not important for the clean/dirty separation,
151 // we arbitrarily keep them at the end of the native data.
152 kBinDexCacheArray, // Arrays belonging to dex cache.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800153 kBinSize,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700154 // Number of bins which are for mirror objects.
155 kBinMirrorCount = kBinArtField,
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800156 };
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800157 friend std::ostream& operator<<(std::ostream& stream, const Bin& bin);
158
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700159 enum NativeObjectRelocationType {
160 kNativeObjectRelocationTypeArtField,
161 kNativeObjectRelocationTypeArtFieldArray,
162 kNativeObjectRelocationTypeArtMethodClean,
163 kNativeObjectRelocationTypeArtMethodArrayClean,
164 kNativeObjectRelocationTypeArtMethodDirty,
165 kNativeObjectRelocationTypeArtMethodArrayDirty,
Vladimir Marko05792b92015-08-03 11:56:49 +0100166 kNativeObjectRelocationTypeDexCacheArray,
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700167 };
168 friend std::ostream& operator<<(std::ostream& stream, const NativeObjectRelocationType& type);
169
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800170 enum OatAddress {
171 kOatAddressInterpreterToInterpreterBridge,
172 kOatAddressInterpreterToCompiledCodeBridge,
173 kOatAddressJNIDlsymLookup,
174 kOatAddressQuickGenericJNITrampoline,
175 kOatAddressQuickIMTConflictTrampoline,
176 kOatAddressQuickResolutionTrampoline,
177 kOatAddressQuickToInterpreterBridge,
178 // Number of elements in the enum.
179 kOatAddressCount,
180 };
181 friend std::ostream& operator<<(std::ostream& stream, const OatAddress& oat_address);
182
Vladimir Marko80afd022015-05-19 18:08:00 +0100183 static constexpr size_t kBinBits = MinimumBitsToStore<uint32_t>(kBinMirrorCount - 1);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800184 // uint32 = typeof(lockword_)
Mathieu Chartiere401d142015-04-22 13:56:20 -0700185 // Subtract read barrier bits since we want these to remain 0, or else it may result in DCHECK
186 // failures due to invalid read barrier bits during object field reads.
187 static const size_t kBinShift = BitSizeOf<uint32_t>() - kBinBits -
188 LockWord::kReadBarrierStateSize;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800189 // 111000.....0
Mathieu Chartiere401d142015-04-22 13:56:20 -0700190 static const size_t kBinMask = ((static_cast<size_t>(1) << kBinBits) - 1) << kBinShift;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800191
192 // We use the lock word to store the bin # and bin index of the object in the image.
193 //
194 // The struct size must be exactly sizeof(LockWord), currently 32-bits, since this will end up
195 // stored in the lock word bit-for-bit when object forwarding addresses are being calculated.
196 struct BinSlot {
197 explicit BinSlot(uint32_t lockword);
198 BinSlot(Bin bin, uint32_t index);
199
200 // The bin an object belongs to, i.e. regular, class/verified, class/initialized, etc.
201 Bin GetBin() const;
202 // The offset in bytes from the beginning of the bin. Aligned to object size.
203 uint32_t GetIndex() const;
204 // Pack into a single uint32_t, for storing into a lock word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700205 uint32_t Uint32Value() const { return lockword_; }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800206 // Comparison operator for map support
207 bool operator<(const BinSlot& other) const { return lockword_ < other.lockword_; }
208
209 private:
210 // Must be the same size as LockWord, any larger and we would truncate the data.
211 const uint32_t lockword_;
212 };
213
Mathieu Chartier31e89252013-08-28 11:29:12 -0700214 // We use the lock word to store the offset of the object in the image.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800215 void AssignImageOffset(mirror::Object* object, BinSlot bin_slot)
Mathieu Chartier90443472015-07-16 20:32:27 -0700216 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700217 void SetImageOffset(mirror::Object* object, size_t offset)
Mathieu Chartier90443472015-07-16 20:32:27 -0700218 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700219 bool IsImageOffsetAssigned(mirror::Object* object) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700220 SHARED_REQUIRES(Locks::mutator_lock_);
221 size_t GetImageOffset(mirror::Object* object) const SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700222 void UpdateImageOffset(mirror::Object* obj, uintptr_t offset)
Mathieu Chartier90443472015-07-16 20:32:27 -0700223 SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700224
Mathieu Chartier90443472015-07-16 20:32:27 -0700225 void PrepareDexCacheArraySlots() SHARED_REQUIRES(Locks::mutator_lock_);
226 void AssignImageBinSlot(mirror::Object* object) SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800227 void SetImageBinSlot(mirror::Object* object, BinSlot bin_slot)
Mathieu Chartier90443472015-07-16 20:32:27 -0700228 SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800229 bool IsImageBinSlotAssigned(mirror::Object* object) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700230 SHARED_REQUIRES(Locks::mutator_lock_);
231 BinSlot GetImageBinSlot(mirror::Object* object) const SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800232
Vladimir Marko05792b92015-08-03 11:56:49 +0100233 void AddDexCacheArrayRelocation(void* array, size_t offset) SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier90443472015-07-16 20:32:27 -0700234 void AddMethodPointerArray(mirror::PointerArray* arr) SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700235
Alex Lighta59dd802014-07-02 16:28:08 -0700236 static void* GetImageAddressCallback(void* writer, mirror::Object* obj)
Mathieu Chartier90443472015-07-16 20:32:27 -0700237 SHARED_REQUIRES(Locks::mutator_lock_) {
Alex Lighta59dd802014-07-02 16:28:08 -0700238 return reinterpret_cast<ImageWriter*>(writer)->GetImageAddress(obj);
239 }
240
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700241 mirror::Object* GetLocalAddress(mirror::Object* object) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700242 SHARED_REQUIRES(Locks::mutator_lock_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700243 size_t offset = GetImageOffset(object);
Ian Rogers13735952014-10-08 12:43:28 -0700244 uint8_t* dst = image_->Begin() + offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700245 return reinterpret_cast<mirror::Object*>(dst);
246 }
247
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800248 // Returns the address in the boot image if we are compiling the app image.
249 const uint8_t* GetOatAddress(OatAddress type) const;
250
251 const uint8_t* GetOatAddressForOffset(uint32_t offset) const {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700252 // With Quick, code is within the OatFile, as there are all in one
Elliott Hughes956af0f2014-12-11 14:34:28 -0800253 // .o ELF object.
Nicolas Geoffrayc04c8002015-07-14 11:37:54 +0100254 DCHECK_LE(offset, oat_file_->Size());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700255 DCHECK(oat_data_begin_ != nullptr);
256 return offset == 0u ? nullptr : oat_data_begin_ + offset;
257 }
258
Brian Carlstrom7940e442013-07-12 13:46:57 -0700259 // Returns true if the class was in the original requested image classes list.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800260 bool KeepClass(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700261
262 // Debug aid that list of requested image classes.
263 void DumpImageClasses();
264
265 // Preinitializes some otherwise lazy fields (such as Class name) to avoid runtime image dirtying.
266 void ComputeLazyFieldsForImageClasses()
Mathieu Chartier90443472015-07-16 20:32:27 -0700267 SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700268
Brian Carlstrom7940e442013-07-12 13:46:57 -0700269 // Remove unwanted classes from various roots.
Mathieu Chartier90443472015-07-16 20:32:27 -0700270 void PruneNonImageClasses() SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700271
272 // Verify unwanted classes removed.
Mathieu Chartier90443472015-07-16 20:32:27 -0700273 void CheckNonImageClassesRemoved() SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700274 static void CheckNonImageClassesRemovedCallback(mirror::Object* obj, void* arg)
Mathieu Chartier90443472015-07-16 20:32:27 -0700275 SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700276
277 // Lays out where the image objects will be at runtime.
Vladimir Markof4da6752014-08-01 19:04:18 +0100278 void CalculateNewObjectOffsets()
Mathieu Chartier90443472015-07-16 20:32:27 -0700279 SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Markof4da6752014-08-01 19:04:18 +0100280 void CreateHeader(size_t oat_loaded_size, size_t oat_data_offset)
Mathieu Chartier90443472015-07-16 20:32:27 -0700281 SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700282 mirror::ObjectArray<mirror::Object>* CreateImageRoots() const
Mathieu Chartier90443472015-07-16 20:32:27 -0700283 SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800284 void CalculateObjectBinSlots(mirror::Object* obj)
Mathieu Chartier90443472015-07-16 20:32:27 -0700285 SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800286 void UnbinObjectsIntoOffset(mirror::Object* obj)
Mathieu Chartier90443472015-07-16 20:32:27 -0700287 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700288
289 void WalkInstanceFields(mirror::Object* obj, mirror::Class* klass)
Mathieu Chartier90443472015-07-16 20:32:27 -0700290 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700291 void WalkFieldsInOrder(mirror::Object* obj)
Mathieu Chartier90443472015-07-16 20:32:27 -0700292 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700293 static void WalkFieldsCallback(mirror::Object* obj, void* arg)
Mathieu Chartier90443472015-07-16 20:32:27 -0700294 SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800295 static void UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg)
Mathieu Chartier90443472015-07-16 20:32:27 -0700296 SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700297
298 // Creates the contiguous image in memory and adjusts pointers.
Mathieu Chartier90443472015-07-16 20:32:27 -0700299 void CopyAndFixupNativeData() SHARED_REQUIRES(Locks::mutator_lock_);
300 void CopyAndFixupObjects() SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700301 static void CopyAndFixupObjectsCallback(mirror::Object* obj, void* arg)
Mathieu Chartier90443472015-07-16 20:32:27 -0700302 SHARED_REQUIRES(Locks::mutator_lock_);
303 void CopyAndFixupObject(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700304 void CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy)
Mathieu Chartier90443472015-07-16 20:32:27 -0700305 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700306 void FixupClass(mirror::Class* orig, mirror::Class* copy)
Mathieu Chartier90443472015-07-16 20:32:27 -0700307 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800308 void FixupObject(mirror::Object* orig, mirror::Object* copy)
Mathieu Chartier90443472015-07-16 20:32:27 -0700309 SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100310 void FixupDexCache(mirror::DexCache* orig_dex_cache, mirror::DexCache* copy_dex_cache)
311 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800312 void FixupPointerArray(mirror::Object* dst,
313 mirror::PointerArray* arr,
314 mirror::Class* klass,
315 Bin array_type)
316 SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700317
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700318 // Get quick code for non-resolution/imt_conflict/abstract method.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700319 const uint8_t* GetQuickCode(ArtMethod* method, bool* quick_is_interpreted)
Mathieu Chartier90443472015-07-16 20:32:27 -0700320 SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700321
Mathieu Chartiere401d142015-04-22 13:56:20 -0700322 const uint8_t* GetQuickEntryPoint(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -0700323 SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700324
Brian Carlstrom7940e442013-07-12 13:46:57 -0700325 // Patches references in OatFile to expect runtime addresses.
Vladimir Markof4da6752014-08-01 19:04:18 +0100326 void SetOatChecksumFromElfFile(File* elf_file);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700327
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800328 // Calculate the sum total of the bin slot sizes in [0, up_to). Defaults to all bins.
329 size_t GetBinSizeSum(Bin up_to = kBinSize) const;
330
Mathieu Chartiere401d142015-04-22 13:56:20 -0700331 // Return true if a method is likely to be dirtied at runtime.
Mathieu Chartier90443472015-07-16 20:32:27 -0700332 bool WillMethodBeDirty(ArtMethod* m) const SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700333
334 // Assign the offset for an ArtMethod.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700335 void AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type)
336 SHARED_REQUIRES(Locks::mutator_lock_);
337
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800338 // Return true if klass is loaded by the boot class loader but not in the boot image.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800339 bool IsBootClassLoaderNonImageClass(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_);
340
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800341 // Return true if klass depends on a boot class loader non image class live. We want to prune
342 // these classes since we do not want any boot class loader classes in the image. This means that
343 // we also cannot have any classes which refer to these boot class loader non image classes.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800344 bool ContainsBootClassLoaderNonImageClass(mirror::Class* klass)
345 SHARED_REQUIRES(Locks::mutator_lock_);
346
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800347 // early_exit is true if we had a cyclic dependency anywhere down the chain.
348 bool ContainsBootClassLoaderNonImageClassInternal(mirror::Class* klass,
349 bool* early_exit,
350 std::unordered_set<mirror::Class*>* visited)
351 SHARED_REQUIRES(Locks::mutator_lock_);
352
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700353 static Bin BinTypeForNativeRelocationType(NativeObjectRelocationType type);
354
Vladimir Marko05792b92015-08-03 11:56:49 +0100355 uintptr_t NativeOffsetInImage(void* obj);
356
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800357 // Location of where the object will be when the image is loaded at runtime.
Vladimir Marko05792b92015-08-03 11:56:49 +0100358 template <typename T>
359 T* NativeLocationInImage(T* obj);
Andreas Gampe245ee002014-12-04 21:25:04 -0800360
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800361 // Location of where the temporary copy of the object currently is.
362 template <typename T>
363 T* NativeCopyLocation(T* obj);
364
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800365 // Return true of obj is inside of the boot image space. This may only return true if we are
366 // compiling an app image.
367 bool IsInBootImage(const void* obj) const;
368
369 // Return true if ptr is within the boot oat file.
370 bool IsInBootOatFile(const void* ptr) const;
371
Brian Carlstrom7940e442013-07-12 13:46:57 -0700372 const CompilerDriver& compiler_driver_;
373
Vladimir Markof4da6752014-08-01 19:04:18 +0100374 // Beginning target image address for the output image.
Ian Rogers13735952014-10-08 12:43:28 -0700375 uint8_t* image_begin_;
Vladimir Markof4da6752014-08-01 19:04:18 +0100376
377 // Offset to the free space in image_.
378 size_t image_end_;
379
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800380 // Offset from image_begin_ to where the first object is in image_.
381 size_t image_objects_offset_begin_;
382
Vladimir Markof4da6752014-08-01 19:04:18 +0100383 // The image roots address in the image.
384 uint32_t image_roots_address_;
385
Brian Carlstrom7940e442013-07-12 13:46:57 -0700386 // oat file with code for this image
387 OatFile* oat_file_;
388
389 // Memory mapped for generating the image.
Ian Rogers700a4022014-05-19 16:49:03 -0700390 std::unique_ptr<MemMap> image_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700391
Mathieu Chartiere401d142015-04-22 13:56:20 -0700392 // Pointer arrays that need to be updated. Since these are only some int and long arrays, we need
393 // to keep track. These include vtable arrays, iftable arrays, and dex caches.
394 std::unordered_map<mirror::PointerArray*, Bin> pointer_arrays_;
395
Vladimir Marko20f85592015-03-19 10:07:02 +0000396 // The start offsets of the dex cache arrays.
397 SafeMap<const DexFile*, size_t> dex_cache_array_starts_;
398
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700399 // Saved hash codes. We use these to restore lockwords which were temporarily used to have
400 // forwarding addresses as well as copying over hash codes.
401 std::unordered_map<mirror::Object*, uint32_t> saved_hashcode_map_;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800402
Brian Carlstrom7940e442013-07-12 13:46:57 -0700403 // Beginning target oat address for the pointers from the output image to its oat file.
Ian Rogers13735952014-10-08 12:43:28 -0700404 const uint8_t* oat_data_begin_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700405
Mathieu Chartier31e89252013-08-28 11:29:12 -0700406 // Image bitmap which lets us know where the objects inside of the image reside.
Ian Rogers700a4022014-05-19 16:49:03 -0700407 std::unique_ptr<gc::accounting::ContinuousSpaceBitmap> image_bitmap_;
Mathieu Chartier31e89252013-08-28 11:29:12 -0700408
Brian Carlstrom7940e442013-07-12 13:46:57 -0700409 // Offset from oat_data_begin_ to the stubs.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800410 uint32_t oat_address_offsets_[kOatAddressCount];
411
412 // Boolean flags.
Igor Murashkin46774762014-10-22 11:37:02 -0700413 const bool compile_pic_;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800414 const bool compile_app_image_;
415
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800416 // Cache the boot image space in this class for faster lookups.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800417 gc::space::ImageSpace* boot_image_space_;
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -0700418
Mathieu Chartier2d721012014-11-10 11:08:06 -0800419 // Size of pointers on the target architecture.
420 size_t target_ptr_size_;
421
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800422 // Bin slot tracking for dirty object packing
423 size_t bin_slot_sizes_[kBinSize]; // Number of bytes in a bin
Vladimir Markocf36d492015-08-12 19:27:26 +0100424 size_t bin_slot_offsets_[kBinSize]; // Number of bytes in previous bins.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800425 size_t bin_slot_count_[kBinSize]; // Number of objects in a bin
426
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700427 // Cached size of the intern table for when we allocate memory.
428 size_t intern_table_bytes_;
429
Mathieu Chartiere401d142015-04-22 13:56:20 -0700430 // ArtField, ArtMethod relocating map. These are allocated as array of structs but we want to
431 // have one entry per art field for convenience. ArtFields are placed right after the end of the
432 // image objects (aka sum of bin_slot_sizes_). ArtMethods are placed right after the ArtFields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700433 struct NativeObjectRelocation {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700434 uintptr_t offset;
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700435 NativeObjectRelocationType type;
436
437 bool IsArtMethodRelocation() const {
438 return type == kNativeObjectRelocationTypeArtMethodClean ||
439 type == kNativeObjectRelocationTypeArtMethodDirty;
440 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700441 };
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700442 std::unordered_map<void*, NativeObjectRelocation> native_object_relocations_;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700443
Mathieu Chartiere401d142015-04-22 13:56:20 -0700444 // Runtime ArtMethods which aren't reachable from any Class but need to be copied into the image.
445 ArtMethod* image_methods_[ImageHeader::kImageMethodsCount];
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -0700446 // Fake length prefixed array for image methods. This array does not contain the actual
447 // ArtMethods. We only use it for the header and relocation addresses.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700448 LengthPrefixedArray<ArtMethod> image_method_array_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700449
450 // Counters for measurements, used for logging only.
451 uint64_t dirty_methods_;
452 uint64_t clean_methods_;
Andreas Gampe245ee002014-12-04 21:25:04 -0800453
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800454 // Prune class memoization table to speed up ContainsBootClassLoaderNonImageClass.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800455 std::unordered_map<mirror::Class*, bool> prune_class_memo_;
456
Mathieu Chartier208a5cb2015-12-02 15:44:07 -0800457 // Class loaders with a class table to write out. Should only be one currently.
458 std::unordered_set<mirror::ClassLoader*> class_loaders_;
459
460 // Number of image class table bytes.
461 size_t class_table_bytes_;
462
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800463 friend class ContainsBootClassLoaderNonImageClassVisitor;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700464 friend class FixupClassVisitor;
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700465 friend class FixupRootVisitor;
466 friend class FixupVisitor;
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800467 friend class NativeLocationVisitor;
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700468 friend class NonImageClassesVisitor;
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -0700469 DISALLOW_COPY_AND_ASSIGN(ImageWriter);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700470};
471
472} // namespace art
473
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700474#endif // ART_COMPILER_IMAGE_WRITER_H_