blob: 7a2febcea19a5ffe24a6acd0f8e95356acd60c96 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_COMPILER_IMAGE_WRITER_H_
18#define ART_COMPILER_IMAGE_WRITER_H_
Brian Carlstrom7940e442013-07-12 13:46:57 -070019
20#include <stdint.h>
Evgenii Stepanov1e133742015-05-20 12:30:59 -070021#include "base/memory_tool.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070022
23#include <cstddef>
Ian Rogers700a4022014-05-19 16:49:03 -070024#include <memory>
Brian Carlstrom7940e442013-07-12 13:46:57 -070025#include <set>
26#include <string>
Igor Murashkinf5b4c502014-11-14 15:01:59 -080027#include <ostream>
Brian Carlstrom7940e442013-07-12 13:46:57 -070028
Vladimir Marko80afd022015-05-19 18:08:00 +010029#include "base/bit_utils.h"
Igor Murashkin46774762014-10-22 11:37:02 -070030#include "base/macros.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070031#include "driver/compiler_driver.h"
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -080032#include "gc/space/space.h"
Mathieu Chartier54d220e2015-07-30 16:20:06 -070033#include "length_prefixed_array.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070034#include "lock_word.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070035#include "mem_map.h"
36#include "oat_file.h"
37#include "mirror/dex_cache.h"
38#include "os.h"
39#include "safe_map.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070040#include "utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070041
42namespace art {
43
Mathieu Chartiera90c7722015-10-29 15:41:36 -070044static constexpr int kInvalidImageFd = -1;
45
Brian Carlstrom7940e442013-07-12 13:46:57 -070046// Write a Space built during compilation for use during execution.
Igor Murashkin46774762014-10-22 11:37:02 -070047class ImageWriter FINAL {
Brian Carlstrom7940e442013-07-12 13:46:57 -070048 public:
Igor Murashkin46774762014-10-22 11:37:02 -070049 ImageWriter(const CompilerDriver& compiler_driver, uintptr_t image_begin,
50 bool compile_pic)
Ian Rogers13735952014-10-08 12:43:28 -070051 : compiler_driver_(compiler_driver), image_begin_(reinterpret_cast<uint8_t*>(image_begin)),
Igor Murashkinf5b4c502014-11-14 15:01:59 -080052 image_end_(0), image_objects_offset_begin_(0), image_roots_address_(0), oat_file_(nullptr),
Igor Murashkin46774762014-10-22 11:37:02 -070053 oat_data_begin_(nullptr), interpreter_to_interpreter_bridge_offset_(0),
Vladimir Markof4da6752014-08-01 19:04:18 +010054 interpreter_to_compiled_code_bridge_offset_(0), jni_dlsym_lookup_offset_(0),
Elliott Hughes956af0f2014-12-11 14:34:28 -080055 quick_generic_jni_trampoline_offset_(0),
Vladimir Markof4da6752014-08-01 19:04:18 +010056 quick_imt_conflict_trampoline_offset_(0), quick_resolution_trampoline_offset_(0),
Mathieu Chartier2d721012014-11-10 11:08:06 -080057 quick_to_interpreter_bridge_offset_(0), compile_pic_(compile_pic),
Igor Murashkinf5b4c502014-11-14 15:01:59 -080058 target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())),
Vladimir Markocf36d492015-08-12 19:27:26 +010059 bin_slot_sizes_(), bin_slot_offsets_(), bin_slot_count_(),
Mathieu Chartier54d220e2015-07-30 16:20:06 -070060 intern_table_bytes_(0u), image_method_array_(ImageHeader::kImageMethodsCount),
61 dirty_methods_(0u), clean_methods_(0u) {
Vladimir Markof4da6752014-08-01 19:04:18 +010062 CHECK_NE(image_begin, 0U);
Mathieu Chartiere401d142015-04-22 13:56:20 -070063 std::fill(image_methods_, image_methods_ + arraysize(image_methods_), nullptr);
Vladimir Markof4da6752014-08-01 19:04:18 +010064 }
Brian Carlstrom7940e442013-07-12 13:46:57 -070065
Andreas Gampe245ee002014-12-04 21:25:04 -080066 ~ImageWriter() {
Andreas Gampe245ee002014-12-04 21:25:04 -080067 }
Brian Carlstrom7940e442013-07-12 13:46:57 -070068
Vladimir Markof4da6752014-08-01 19:04:18 +010069 bool PrepareImageAddressSpace();
70
71 bool IsImageAddressSpaceReady() const {
72 return image_roots_address_ != 0u;
73 }
74
Mathieu Chartiere401d142015-04-22 13:56:20 -070075 template <typename T>
Mathieu Chartier90443472015-07-16 20:32:27 -070076 T* GetImageAddress(T* object) const SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -070077 return object == nullptr ? nullptr :
78 reinterpret_cast<T*>(image_begin_ + GetImageOffset(object));
Vladimir Markof4da6752014-08-01 19:04:18 +010079 }
80
Mathieu Chartier90443472015-07-16 20:32:27 -070081 ArtMethod* GetImageMethodAddress(ArtMethod* method) SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -070082
Vladimir Marko05792b92015-08-03 11:56:49 +010083 template <typename PtrType>
84 PtrType GetDexCacheArrayElementImageAddress(const DexFile* dex_file, uint32_t offset)
85 const SHARED_REQUIRES(Locks::mutator_lock_) {
Vladimir Marko20f85592015-03-19 10:07:02 +000086 auto it = dex_cache_array_starts_.find(dex_file);
87 DCHECK(it != dex_cache_array_starts_.end());
Vladimir Marko05792b92015-08-03 11:56:49 +010088 return reinterpret_cast<PtrType>(
89 image_begin_ + bin_slot_offsets_[kBinDexCacheArray] + it->second + offset);
Vladimir Marko20f85592015-03-19 10:07:02 +000090 }
91
Mathieu Chartierd39645e2015-06-09 17:50:29 -070092 uint8_t* GetOatFileBegin() const;
Vladimir Markof4da6752014-08-01 19:04:18 +010093
Mathieu Chartiera90c7722015-10-29 15:41:36 -070094 // If image_fd is not kInvalidImageFd, then we use that for the file. Otherwise we open
95 // image_filename.
96 bool Write(int image_fd,
97 const std::string& image_filename,
98 const std::string& oat_filename,
Brian Carlstrom7940e442013-07-12 13:46:57 -070099 const std::string& oat_location)
Mathieu Chartier90443472015-07-16 20:32:27 -0700100 REQUIRES(!Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700101
102 uintptr_t GetOatDataBegin() {
103 return reinterpret_cast<uintptr_t>(oat_data_begin_);
104 }
105
106 private:
107 bool AllocMemory();
108
Mathieu Chartier31e89252013-08-28 11:29:12 -0700109 // Mark the objects defined in this space in the given live bitmap.
Mathieu Chartier90443472015-07-16 20:32:27 -0700110 void RecordImageAllocations() SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier31e89252013-08-28 11:29:12 -0700111
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800112 // Classify different kinds of bins that objects end up getting packed into during image writing.
113 enum Bin {
114 // Likely-clean:
115 kBinString, // [String] Almost always immutable (except for obj header).
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800116 // Unknown mix of clean/dirty:
117 kBinRegular,
118 // Likely-dirty:
119 // All classes get their own bins since their fields often dirty
120 kBinClassInitializedFinalStatics, // Class initializers have been run, no non-final statics
121 kBinClassInitialized, // Class initializers have been run
122 kBinClassVerified, // Class verified, but initializers haven't been run
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800123 // Add more bins here if we add more segregation code.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700124 // Non mirror fields must be below.
125 // ArtFields should be always clean.
Mathieu Chartierc7853442015-03-27 14:35:38 -0700126 kBinArtField,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700127 // If the class is initialized, then the ArtMethods are probably clean.
128 kBinArtMethodClean,
129 // ArtMethods may be dirty if the class has native methods or a declaring class that isn't
130 // initialized.
131 kBinArtMethodDirty,
Vladimir Marko05792b92015-08-03 11:56:49 +0100132 // Dex cache arrays have a special slot for PC-relative addressing. Since they are
133 // huge, and as such their dirtiness is not important for the clean/dirty separation,
134 // we arbitrarily keep them at the end of the native data.
135 kBinDexCacheArray, // Arrays belonging to dex cache.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800136 kBinSize,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700137 // Number of bins which are for mirror objects.
138 kBinMirrorCount = kBinArtField,
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800139 };
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800140 friend std::ostream& operator<<(std::ostream& stream, const Bin& bin);
141
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700142 enum NativeObjectRelocationType {
143 kNativeObjectRelocationTypeArtField,
144 kNativeObjectRelocationTypeArtFieldArray,
145 kNativeObjectRelocationTypeArtMethodClean,
146 kNativeObjectRelocationTypeArtMethodArrayClean,
147 kNativeObjectRelocationTypeArtMethodDirty,
148 kNativeObjectRelocationTypeArtMethodArrayDirty,
Vladimir Marko05792b92015-08-03 11:56:49 +0100149 kNativeObjectRelocationTypeDexCacheArray,
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700150 };
151 friend std::ostream& operator<<(std::ostream& stream, const NativeObjectRelocationType& type);
152
Vladimir Marko80afd022015-05-19 18:08:00 +0100153 static constexpr size_t kBinBits = MinimumBitsToStore<uint32_t>(kBinMirrorCount - 1);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800154 // uint32 = typeof(lockword_)
Mathieu Chartiere401d142015-04-22 13:56:20 -0700155 // Subtract read barrier bits since we want these to remain 0, or else it may result in DCHECK
156 // failures due to invalid read barrier bits during object field reads.
157 static const size_t kBinShift = BitSizeOf<uint32_t>() - kBinBits -
158 LockWord::kReadBarrierStateSize;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800159 // 111000.....0
Mathieu Chartiere401d142015-04-22 13:56:20 -0700160 static const size_t kBinMask = ((static_cast<size_t>(1) << kBinBits) - 1) << kBinShift;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800161
162 // We use the lock word to store the bin # and bin index of the object in the image.
163 //
164 // The struct size must be exactly sizeof(LockWord), currently 32-bits, since this will end up
165 // stored in the lock word bit-for-bit when object forwarding addresses are being calculated.
166 struct BinSlot {
167 explicit BinSlot(uint32_t lockword);
168 BinSlot(Bin bin, uint32_t index);
169
170 // The bin an object belongs to, i.e. regular, class/verified, class/initialized, etc.
171 Bin GetBin() const;
172 // The offset in bytes from the beginning of the bin. Aligned to object size.
173 uint32_t GetIndex() const;
174 // Pack into a single uint32_t, for storing into a lock word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700175 uint32_t Uint32Value() const { return lockword_; }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800176 // Comparison operator for map support
177 bool operator<(const BinSlot& other) const { return lockword_ < other.lockword_; }
178
179 private:
180 // Must be the same size as LockWord, any larger and we would truncate the data.
181 const uint32_t lockword_;
182 };
183
Mathieu Chartier31e89252013-08-28 11:29:12 -0700184 // We use the lock word to store the offset of the object in the image.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800185 void AssignImageOffset(mirror::Object* object, BinSlot bin_slot)
Mathieu Chartier90443472015-07-16 20:32:27 -0700186 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700187 void SetImageOffset(mirror::Object* object, size_t offset)
Mathieu Chartier90443472015-07-16 20:32:27 -0700188 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700189 bool IsImageOffsetAssigned(mirror::Object* object) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700190 SHARED_REQUIRES(Locks::mutator_lock_);
191 size_t GetImageOffset(mirror::Object* object) const SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700192 void UpdateImageOffset(mirror::Object* obj, uintptr_t offset)
Mathieu Chartier90443472015-07-16 20:32:27 -0700193 SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700194
Mathieu Chartier90443472015-07-16 20:32:27 -0700195 void PrepareDexCacheArraySlots() SHARED_REQUIRES(Locks::mutator_lock_);
196 void AssignImageBinSlot(mirror::Object* object) SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800197 void SetImageBinSlot(mirror::Object* object, BinSlot bin_slot)
Mathieu Chartier90443472015-07-16 20:32:27 -0700198 SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800199 bool IsImageBinSlotAssigned(mirror::Object* object) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700200 SHARED_REQUIRES(Locks::mutator_lock_);
201 BinSlot GetImageBinSlot(mirror::Object* object) const SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800202
Vladimir Marko05792b92015-08-03 11:56:49 +0100203 void AddDexCacheArrayRelocation(void* array, size_t offset) SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier90443472015-07-16 20:32:27 -0700204 void AddMethodPointerArray(mirror::PointerArray* arr) SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700205
Alex Lighta59dd802014-07-02 16:28:08 -0700206 static void* GetImageAddressCallback(void* writer, mirror::Object* obj)
Mathieu Chartier90443472015-07-16 20:32:27 -0700207 SHARED_REQUIRES(Locks::mutator_lock_) {
Alex Lighta59dd802014-07-02 16:28:08 -0700208 return reinterpret_cast<ImageWriter*>(writer)->GetImageAddress(obj);
209 }
210
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700211 mirror::Object* GetLocalAddress(mirror::Object* object) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700212 SHARED_REQUIRES(Locks::mutator_lock_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700213 size_t offset = GetImageOffset(object);
Ian Rogers13735952014-10-08 12:43:28 -0700214 uint8_t* dst = image_->Begin() + offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700215 return reinterpret_cast<mirror::Object*>(dst);
216 }
217
Ian Rogers13735952014-10-08 12:43:28 -0700218 const uint8_t* GetOatAddress(uint32_t offset) const {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700219 // With Quick, code is within the OatFile, as there are all in one
Elliott Hughes956af0f2014-12-11 14:34:28 -0800220 // .o ELF object.
Nicolas Geoffrayc04c8002015-07-14 11:37:54 +0100221 DCHECK_LE(offset, oat_file_->Size());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700222 DCHECK(oat_data_begin_ != nullptr);
223 return offset == 0u ? nullptr : oat_data_begin_ + offset;
224 }
225
Brian Carlstrom7940e442013-07-12 13:46:57 -0700226 // Returns true if the class was in the original requested image classes list.
Mathieu Chartier90443472015-07-16 20:32:27 -0700227 bool IsImageClass(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700228
229 // Debug aid that list of requested image classes.
230 void DumpImageClasses();
231
232 // Preinitializes some otherwise lazy fields (such as Class name) to avoid runtime image dirtying.
233 void ComputeLazyFieldsForImageClasses()
Mathieu Chartier90443472015-07-16 20:32:27 -0700234 SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700235
Brian Carlstrom7940e442013-07-12 13:46:57 -0700236 // Remove unwanted classes from various roots.
Mathieu Chartier90443472015-07-16 20:32:27 -0700237 void PruneNonImageClasses() SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700238
239 // Verify unwanted classes removed.
Mathieu Chartier90443472015-07-16 20:32:27 -0700240 void CheckNonImageClassesRemoved() SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700241 static void CheckNonImageClassesRemovedCallback(mirror::Object* obj, void* arg)
Mathieu Chartier90443472015-07-16 20:32:27 -0700242 SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700243
244 // Lays out where the image objects will be at runtime.
Vladimir Markof4da6752014-08-01 19:04:18 +0100245 void CalculateNewObjectOffsets()
Mathieu Chartier90443472015-07-16 20:32:27 -0700246 SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Markof4da6752014-08-01 19:04:18 +0100247 void CreateHeader(size_t oat_loaded_size, size_t oat_data_offset)
Mathieu Chartier90443472015-07-16 20:32:27 -0700248 SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700249 mirror::ObjectArray<mirror::Object>* CreateImageRoots() const
Mathieu Chartier90443472015-07-16 20:32:27 -0700250 SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800251 void CalculateObjectBinSlots(mirror::Object* obj)
Mathieu Chartier90443472015-07-16 20:32:27 -0700252 SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800253 void UnbinObjectsIntoOffset(mirror::Object* obj)
Mathieu Chartier90443472015-07-16 20:32:27 -0700254 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700255
256 void WalkInstanceFields(mirror::Object* obj, mirror::Class* klass)
Mathieu Chartier90443472015-07-16 20:32:27 -0700257 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700258 void WalkFieldsInOrder(mirror::Object* obj)
Mathieu Chartier90443472015-07-16 20:32:27 -0700259 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700260 static void WalkFieldsCallback(mirror::Object* obj, void* arg)
Mathieu Chartier90443472015-07-16 20:32:27 -0700261 SHARED_REQUIRES(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800262 static void UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg)
Mathieu Chartier90443472015-07-16 20:32:27 -0700263 SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700264
265 // Creates the contiguous image in memory and adjusts pointers.
Mathieu Chartier90443472015-07-16 20:32:27 -0700266 void CopyAndFixupNativeData() SHARED_REQUIRES(Locks::mutator_lock_);
267 void CopyAndFixupObjects() SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700268 static void CopyAndFixupObjectsCallback(mirror::Object* obj, void* arg)
Mathieu Chartier90443472015-07-16 20:32:27 -0700269 SHARED_REQUIRES(Locks::mutator_lock_);
270 void CopyAndFixupObject(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700271 void CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy)
Mathieu Chartier90443472015-07-16 20:32:27 -0700272 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700273 void FixupClass(mirror::Class* orig, mirror::Class* copy)
Mathieu Chartier90443472015-07-16 20:32:27 -0700274 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800275 void FixupObject(mirror::Object* orig, mirror::Object* copy)
Mathieu Chartier90443472015-07-16 20:32:27 -0700276 SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100277 void FixupDexCache(mirror::DexCache* orig_dex_cache, mirror::DexCache* copy_dex_cache)
278 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700279 void FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr, mirror::Class* klass,
Mathieu Chartier90443472015-07-16 20:32:27 -0700280 Bin array_type) SHARED_REQUIRES(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700281
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700282 // Get quick code for non-resolution/imt_conflict/abstract method.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700283 const uint8_t* GetQuickCode(ArtMethod* method, bool* quick_is_interpreted)
Mathieu Chartier90443472015-07-16 20:32:27 -0700284 SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700285
Mathieu Chartiere401d142015-04-22 13:56:20 -0700286 const uint8_t* GetQuickEntryPoint(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -0700287 SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700288
Brian Carlstrom7940e442013-07-12 13:46:57 -0700289 // Patches references in OatFile to expect runtime addresses.
Vladimir Markof4da6752014-08-01 19:04:18 +0100290 void SetOatChecksumFromElfFile(File* elf_file);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700291
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800292 // Calculate the sum total of the bin slot sizes in [0, up_to). Defaults to all bins.
293 size_t GetBinSizeSum(Bin up_to = kBinSize) const;
294
Mathieu Chartiere401d142015-04-22 13:56:20 -0700295 // Return true if a method is likely to be dirtied at runtime.
Mathieu Chartier90443472015-07-16 20:32:27 -0700296 bool WillMethodBeDirty(ArtMethod* m) const SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700297
298 // Assign the offset for an ArtMethod.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700299 void AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type)
300 SHARED_REQUIRES(Locks::mutator_lock_);
301
302 static Bin BinTypeForNativeRelocationType(NativeObjectRelocationType type);
303
Vladimir Marko05792b92015-08-03 11:56:49 +0100304 uintptr_t NativeOffsetInImage(void* obj);
305
306 template <typename T>
307 T* NativeLocationInImage(T* obj);
Andreas Gampe245ee002014-12-04 21:25:04 -0800308
Brian Carlstrom7940e442013-07-12 13:46:57 -0700309 const CompilerDriver& compiler_driver_;
310
Vladimir Markof4da6752014-08-01 19:04:18 +0100311 // Beginning target image address for the output image.
Ian Rogers13735952014-10-08 12:43:28 -0700312 uint8_t* image_begin_;
Vladimir Markof4da6752014-08-01 19:04:18 +0100313
314 // Offset to the free space in image_.
315 size_t image_end_;
316
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800317 // Offset from image_begin_ to where the first object is in image_.
318 size_t image_objects_offset_begin_;
319
Vladimir Markof4da6752014-08-01 19:04:18 +0100320 // The image roots address in the image.
321 uint32_t image_roots_address_;
322
Brian Carlstrom7940e442013-07-12 13:46:57 -0700323 // oat file with code for this image
324 OatFile* oat_file_;
325
326 // Memory mapped for generating the image.
Ian Rogers700a4022014-05-19 16:49:03 -0700327 std::unique_ptr<MemMap> image_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700328
Mathieu Chartiere401d142015-04-22 13:56:20 -0700329 // Pointer arrays that need to be updated. Since these are only some int and long arrays, we need
330 // to keep track. These include vtable arrays, iftable arrays, and dex caches.
331 std::unordered_map<mirror::PointerArray*, Bin> pointer_arrays_;
332
Vladimir Marko20f85592015-03-19 10:07:02 +0000333 // The start offsets of the dex cache arrays.
334 SafeMap<const DexFile*, size_t> dex_cache_array_starts_;
335
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700336 // Saved hash codes. We use these to restore lockwords which were temporarily used to have
337 // forwarding addresses as well as copying over hash codes.
338 std::unordered_map<mirror::Object*, uint32_t> saved_hashcode_map_;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800339
Brian Carlstrom7940e442013-07-12 13:46:57 -0700340 // Beginning target oat address for the pointers from the output image to its oat file.
Ian Rogers13735952014-10-08 12:43:28 -0700341 const uint8_t* oat_data_begin_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700342
Mathieu Chartier31e89252013-08-28 11:29:12 -0700343 // Image bitmap which lets us know where the objects inside of the image reside.
Ian Rogers700a4022014-05-19 16:49:03 -0700344 std::unique_ptr<gc::accounting::ContinuousSpaceBitmap> image_bitmap_;
Mathieu Chartier31e89252013-08-28 11:29:12 -0700345
Brian Carlstrom7940e442013-07-12 13:46:57 -0700346 // Offset from oat_data_begin_ to the stubs.
Ian Rogers848871b2013-08-05 10:56:33 -0700347 uint32_t interpreter_to_interpreter_bridge_offset_;
348 uint32_t interpreter_to_compiled_code_bridge_offset_;
349 uint32_t jni_dlsym_lookup_offset_;
Andreas Gampe2da88232014-02-27 12:26:20 -0800350 uint32_t quick_generic_jni_trampoline_offset_;
Jeff Hao88474b42013-10-23 16:24:40 -0700351 uint32_t quick_imt_conflict_trampoline_offset_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700352 uint32_t quick_resolution_trampoline_offset_;
Ian Rogers848871b2013-08-05 10:56:33 -0700353 uint32_t quick_to_interpreter_bridge_offset_;
Igor Murashkin46774762014-10-22 11:37:02 -0700354 const bool compile_pic_;
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -0700355
Mathieu Chartier2d721012014-11-10 11:08:06 -0800356 // Size of pointers on the target architecture.
357 size_t target_ptr_size_;
358
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800359 // Bin slot tracking for dirty object packing
360 size_t bin_slot_sizes_[kBinSize]; // Number of bytes in a bin
Vladimir Markocf36d492015-08-12 19:27:26 +0100361 size_t bin_slot_offsets_[kBinSize]; // Number of bytes in previous bins.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800362 size_t bin_slot_count_[kBinSize]; // Number of objects in a bin
363
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700364 // Cached size of the intern table for when we allocate memory.
365 size_t intern_table_bytes_;
366
Mathieu Chartiere401d142015-04-22 13:56:20 -0700367 // ArtField, ArtMethod relocating map. These are allocated as array of structs but we want to
368 // have one entry per art field for convenience. ArtFields are placed right after the end of the
369 // image objects (aka sum of bin_slot_sizes_). ArtMethods are placed right after the ArtFields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700370 struct NativeObjectRelocation {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700371 uintptr_t offset;
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700372 NativeObjectRelocationType type;
373
374 bool IsArtMethodRelocation() const {
375 return type == kNativeObjectRelocationTypeArtMethodClean ||
376 type == kNativeObjectRelocationTypeArtMethodDirty;
377 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700378 };
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700379 std::unordered_map<void*, NativeObjectRelocation> native_object_relocations_;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700380
Mathieu Chartiere401d142015-04-22 13:56:20 -0700381 // Runtime ArtMethods which aren't reachable from any Class but need to be copied into the image.
382 ArtMethod* image_methods_[ImageHeader::kImageMethodsCount];
Mathieu Chartierc0fe56a2015-08-11 13:01:23 -0700383 // Fake length prefixed array for image methods. This array does not contain the actual
384 // ArtMethods. We only use it for the header and relocation addresses.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700385 LengthPrefixedArray<ArtMethod> image_method_array_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700386
387 // Counters for measurements, used for logging only.
388 uint64_t dirty_methods_;
389 uint64_t clean_methods_;
Andreas Gampe245ee002014-12-04 21:25:04 -0800390
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700391 friend class FixupClassVisitor;
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700392 friend class FixupRootVisitor;
393 friend class FixupVisitor;
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700394 friend class NonImageClassesVisitor;
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -0700395 DISALLOW_COPY_AND_ASSIGN(ImageWriter);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700396};
397
398} // namespace art
399
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700400#endif // ART_COMPILER_IMAGE_WRITER_H_