blob: 63598f19707092843205e54f30eb53719d1ccedb [file] [log] [blame]
Lang Hamesa5216882014-07-17 18:54:50 +00001//===-- RuntimeDyldMachOAArch64.h -- MachO/AArch64 specific code. -*- C++ -*-=//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
Benjamin Kramera7c40ef2014-08-13 16:26:38 +000010#ifndef LLVM_LIB_EXECUTIONENGINE_RUNTIMEDYLD_TARGETS_RUNTIMEDYLDMACHOAARCH64_H
11#define LLVM_LIB_EXECUTIONENGINE_RUNTIMEDYLD_TARGETS_RUNTIMEDYLDMACHOAARCH64_H
Lang Hamesa5216882014-07-17 18:54:50 +000012
13#include "../RuntimeDyldMachO.h"
Juergen Ributzka0e913b12014-07-29 19:57:15 +000014#include "llvm/Support/Endian.h"
Lang Hamesa5216882014-07-17 18:54:50 +000015
16#define DEBUG_TYPE "dyld"
17
18namespace llvm {
19
20class RuntimeDyldMachOAArch64
21 : public RuntimeDyldMachOCRTPBase<RuntimeDyldMachOAArch64> {
22public:
Lang Hameseb195f02014-09-04 04:53:03 +000023
24 typedef uint64_t TargetPtrT;
25
Lang Hames633fe142015-03-30 03:37:06 +000026 RuntimeDyldMachOAArch64(RuntimeDyld::MemoryManager &MM,
27 RuntimeDyld::SymbolResolver &Resolver)
28 : RuntimeDyldMachOCRTPBase(MM, Resolver) {}
Lang Hamesa5216882014-07-17 18:54:50 +000029
30 unsigned getMaxStubSize() override { return 8; }
31
Lang Hamese5fc8262014-07-17 23:11:30 +000032 unsigned getStubAlignment() override { return 8; }
Lang Hamesa5216882014-07-17 18:54:50 +000033
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000034 /// Extract the addend encoded in the instruction / memory location.
Lang Hames25d93092014-08-08 23:12:22 +000035 int64_t decodeAddend(const RelocationEntry &RE) const {
36 const SectionEntry &Section = Sections[RE.SectionID];
Sanjoy Das277776a2015-11-23 21:47:41 +000037 uint8_t *LocalAddress = Section.getAddressWithOffset(RE.Offset);
Lang Hames25d93092014-08-08 23:12:22 +000038 unsigned NumBytes = 1 << RE.Size;
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000039 int64_t Addend = 0;
40 // Verify that the relocation has the correct size and alignment.
Lang Hames25d93092014-08-08 23:12:22 +000041 switch (RE.RelType) {
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000042 default:
43 llvm_unreachable("Unsupported relocation type!");
44 case MachO::ARM64_RELOC_UNSIGNED:
Juergen Ributzka0e913b12014-07-29 19:57:15 +000045 assert((NumBytes == 4 || NumBytes == 8) && "Invalid relocation size.");
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000046 break;
47 case MachO::ARM64_RELOC_BRANCH26:
48 case MachO::ARM64_RELOC_PAGE21:
49 case MachO::ARM64_RELOC_PAGEOFF12:
50 case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
51 case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
52 assert(NumBytes == 4 && "Invalid relocation size.");
53 assert((((uintptr_t)LocalAddress & 0x3) == 0) &&
54 "Instruction address is not aligned to 4 bytes.");
55 break;
56 }
57
Lang Hames25d93092014-08-08 23:12:22 +000058 switch (RE.RelType) {
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000059 default:
60 llvm_unreachable("Unsupported relocation type!");
61 case MachO::ARM64_RELOC_UNSIGNED:
Juergen Ributzka0e913b12014-07-29 19:57:15 +000062 // This could be an unaligned memory location.
63 if (NumBytes == 4)
64 Addend = *reinterpret_cast<support::ulittle32_t *>(LocalAddress);
65 else
66 Addend = *reinterpret_cast<support::ulittle64_t *>(LocalAddress);
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000067 break;
68 case MachO::ARM64_RELOC_BRANCH26: {
69 // Verify that the relocation points to the expected branch instruction.
Juergen Ributzka0e913b12014-07-29 19:57:15 +000070 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000071 assert((*p & 0xFC000000) == 0x14000000 && "Expected branch instruction.");
72
73 // Get the 26 bit addend encoded in the branch instruction and sign-extend
74 // to 64 bit. The lower 2 bits are always zeros and are therefore implicit
75 // (<< 2).
76 Addend = (*p & 0x03FFFFFF) << 2;
77 Addend = SignExtend64(Addend, 28);
78 break;
79 }
80 case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
81 case MachO::ARM64_RELOC_PAGE21: {
82 // Verify that the relocation points to the expected adrp instruction.
Juergen Ributzka0e913b12014-07-29 19:57:15 +000083 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000084 assert((*p & 0x9F000000) == 0x90000000 && "Expected adrp instruction.");
85
86 // Get the 21 bit addend encoded in the adrp instruction and sign-extend
87 // to 64 bit. The lower 12 bits (4096 byte page) are always zeros and are
88 // therefore implicit (<< 12).
89 Addend = ((*p & 0x60000000) >> 29) | ((*p & 0x01FFFFE0) >> 3) << 12;
90 Addend = SignExtend64(Addend, 33);
91 break;
92 }
93 case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: {
94 // Verify that the relocation points to one of the expected load / store
95 // instructions.
Juergen Ributzka0e913b12014-07-29 19:57:15 +000096 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzka0e957cf2014-07-22 22:02:19 +000097 (void)p;
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000098 assert((*p & 0x3B000000) == 0x39000000 &&
99 "Only expected load / store instructions.");
100 } // fall-through
101 case MachO::ARM64_RELOC_PAGEOFF12: {
102 // Verify that the relocation points to one of the expected load / store
103 // or add / sub instructions.
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000104 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkab13b52e2014-07-22 21:42:51 +0000105 assert((((*p & 0x3B000000) == 0x39000000) ||
106 ((*p & 0x11C00000) == 0x11000000) ) &&
107 "Expected load / store or add/sub instruction.");
108
109 // Get the 12 bit addend encoded in the instruction.
110 Addend = (*p & 0x003FFC00) >> 10;
111
112 // Check which instruction we are decoding to obtain the implicit shift
113 // factor of the instruction.
114 int ImplicitShift = 0;
115 if ((*p & 0x3B000000) == 0x39000000) { // << load / store
116 // For load / store instructions the size is encoded in bits 31:30.
117 ImplicitShift = ((*p >> 30) & 0x3);
118 if (ImplicitShift == 0) {
119 // Check if this a vector op to get the correct shift value.
120 if ((*p & 0x04800000) == 0x04800000)
121 ImplicitShift = 4;
122 }
123 }
124 // Compensate for implicit shift.
125 Addend <<= ImplicitShift;
126 break;
127 }
128 }
129 return Addend;
130 }
131
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000132 /// Extract the addend encoded in the instruction.
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000133 void encodeAddend(uint8_t *LocalAddress, unsigned NumBytes,
134 MachO::RelocationInfoType RelType, int64_t Addend) const {
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000135 // Verify that the relocation has the correct alignment.
136 switch (RelType) {
137 default:
138 llvm_unreachable("Unsupported relocation type!");
139 case MachO::ARM64_RELOC_UNSIGNED:
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000140 assert((NumBytes == 4 || NumBytes == 8) && "Invalid relocation size.");
141 break;
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000142 case MachO::ARM64_RELOC_BRANCH26:
143 case MachO::ARM64_RELOC_PAGE21:
144 case MachO::ARM64_RELOC_PAGEOFF12:
145 case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
146 case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000147 assert(NumBytes == 4 && "Invalid relocation size.");
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000148 assert((((uintptr_t)LocalAddress & 0x3) == 0) &&
149 "Instruction address is not aligned to 4 bytes.");
150 break;
151 }
152
153 switch (RelType) {
154 default:
155 llvm_unreachable("Unsupported relocation type!");
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000156 case MachO::ARM64_RELOC_UNSIGNED:
157 // This could be an unaligned memory location.
158 if (NumBytes == 4)
159 *reinterpret_cast<support::ulittle32_t *>(LocalAddress) = Addend;
160 else
161 *reinterpret_cast<support::ulittle64_t *>(LocalAddress) = Addend;
162 break;
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000163 case MachO::ARM64_RELOC_BRANCH26: {
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000164 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000165 // Verify that the relocation points to the expected branch instruction.
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000166 assert((*p & 0xFC000000) == 0x14000000 && "Expected branch instruction.");
167
168 // Verify addend value.
169 assert((Addend & 0x3) == 0 && "Branch target is not aligned");
170 assert(isInt<28>(Addend) && "Branch target is out of range.");
171
172 // Encode the addend as 26 bit immediate in the branch instruction.
173 *p = (*p & 0xFC000000) | ((uint32_t)(Addend >> 2) & 0x03FFFFFF);
174 break;
175 }
176 case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
177 case MachO::ARM64_RELOC_PAGE21: {
178 // Verify that the relocation points to the expected adrp instruction.
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000179 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000180 assert((*p & 0x9F000000) == 0x90000000 && "Expected adrp instruction.");
181
182 // Check that the addend fits into 21 bits (+ 12 lower bits).
183 assert((Addend & 0xFFF) == 0 && "ADRP target is not page aligned.");
184 assert(isInt<33>(Addend) && "Invalid page reloc value.");
185
186 // Encode the addend into the instruction.
Alexey Samsonov7c8a7252015-01-10 00:46:38 +0000187 uint32_t ImmLoValue = ((uint64_t)Addend << 17) & 0x60000000;
188 uint32_t ImmHiValue = ((uint64_t)Addend >> 9) & 0x00FFFFE0;
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000189 *p = (*p & 0x9F00001F) | ImmHiValue | ImmLoValue;
190 break;
191 }
192 case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: {
193 // Verify that the relocation points to one of the expected load / store
194 // instructions.
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000195 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000196 assert((*p & 0x3B000000) == 0x39000000 &&
197 "Only expected load / store instructions.");
NAKAMURA Takumiea4a8da2014-07-23 00:17:44 +0000198 (void)p;
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000199 } // fall-through
200 case MachO::ARM64_RELOC_PAGEOFF12: {
201 // Verify that the relocation points to one of the expected load / store
202 // or add / sub instructions.
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000203 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000204 assert((((*p & 0x3B000000) == 0x39000000) ||
205 ((*p & 0x11C00000) == 0x11000000) ) &&
206 "Expected load / store or add/sub instruction.");
207
208 // Check which instruction we are decoding to obtain the implicit shift
209 // factor of the instruction and verify alignment.
210 int ImplicitShift = 0;
211 if ((*p & 0x3B000000) == 0x39000000) { // << load / store
212 // For load / store instructions the size is encoded in bits 31:30.
213 ImplicitShift = ((*p >> 30) & 0x3);
214 switch (ImplicitShift) {
215 case 0:
216 // Check if this a vector op to get the correct shift value.
217 if ((*p & 0x04800000) == 0x04800000) {
218 ImplicitShift = 4;
219 assert(((Addend & 0xF) == 0) &&
220 "128-bit LDR/STR not 16-byte aligned.");
221 }
222 break;
223 case 1:
224 assert(((Addend & 0x1) == 0) && "16-bit LDR/STR not 2-byte aligned.");
225 break;
226 case 2:
227 assert(((Addend & 0x3) == 0) && "32-bit LDR/STR not 4-byte aligned.");
228 break;
229 case 3:
230 assert(((Addend & 0x7) == 0) && "64-bit LDR/STR not 8-byte aligned.");
231 break;
232 }
233 }
234 // Compensate for implicit shift.
235 Addend >>= ImplicitShift;
236 assert(isUInt<12>(Addend) && "Addend cannot be encoded.");
237
238 // Encode the addend into the instruction.
239 *p = (*p & 0xFFC003FF) | ((uint32_t)(Addend << 10) & 0x003FFC00);
240 break;
241 }
242 }
243 }
244
Lang Hames89595312016-04-27 20:24:48 +0000245 Expected<relocation_iterator>
Lang Hamesa5216882014-07-17 18:54:50 +0000246 processRelocationRef(unsigned SectionID, relocation_iterator RelI,
Lang Hamesb5c7b1f2014-11-26 16:54:40 +0000247 const ObjectFile &BaseObjT,
248 ObjSectionToIDMap &ObjSectionToID,
Lang Hamesa5cd9502014-11-27 05:40:13 +0000249 StubMap &Stubs) override {
Lang Hamesa5216882014-07-17 18:54:50 +0000250 const MachOObjectFile &Obj =
Lang Hamesb5c7b1f2014-11-26 16:54:40 +0000251 static_cast<const MachOObjectFile &>(BaseObjT);
Lang Hamesa5216882014-07-17 18:54:50 +0000252 MachO::any_relocation_info RelInfo =
253 Obj.getRelocation(RelI->getRawDataRefImpl());
254
Lang Hames89595312016-04-27 20:24:48 +0000255 if (Obj.isRelocationScattered(RelInfo))
256 return make_error<RuntimeDyldError>("Scattered relocations not supported "
257 "for MachO AArch64");
Lang Hamesa5216882014-07-17 18:54:50 +0000258
259 // ARM64 has an ARM64_RELOC_ADDEND relocation type that carries an explicit
260 // addend for the following relocation. If found: (1) store the associated
261 // addend, (2) consume the next relocation, and (3) use the stored addend to
262 // override the addend.
Lang Hamesa5216882014-07-17 18:54:50 +0000263 int64_t ExplicitAddend = 0;
264 if (Obj.getAnyRelocationType(RelInfo) == MachO::ARM64_RELOC_ADDEND) {
265 assert(!Obj.getPlainRelocationExternal(RelInfo));
266 assert(!Obj.getAnyRelocationPCRel(RelInfo));
267 assert(Obj.getAnyRelocationLength(RelInfo) == 2);
Lang Hamesa5216882014-07-17 18:54:50 +0000268 int64_t RawAddend = Obj.getPlainRelocationSymbolNum(RelInfo);
269 // Sign-extend the 24-bit to 64-bit.
Juergen Ributzkadd19d332014-07-22 21:42:49 +0000270 ExplicitAddend = SignExtend64(RawAddend, 24);
Lang Hamesa5216882014-07-17 18:54:50 +0000271 ++RelI;
272 RelInfo = Obj.getRelocation(RelI->getRawDataRefImpl());
273 }
274
Lang Hames3db630b2016-01-21 21:59:50 +0000275 if (Obj.getAnyRelocationType(RelInfo) == MachO::ARM64_RELOC_SUBTRACTOR)
276 return processSubtractRelocation(SectionID, RelI, Obj, ObjSectionToID);
277
Lang Hamesb5c7b1f2014-11-26 16:54:40 +0000278 RelocationEntry RE(getRelocationEntry(SectionID, Obj, RelI));
Lang Hames25d93092014-08-08 23:12:22 +0000279 RE.Addend = decodeAddend(RE);
Lang Hamesa5216882014-07-17 18:54:50 +0000280
Juergen Ributzkadd19d332014-07-22 21:42:49 +0000281 assert((ExplicitAddend == 0 || RE.Addend == 0) && "Relocation has "\
282 "ARM64_RELOC_ADDEND and embedded addend in the instruction.");
Lang Hames0fd36102015-08-11 06:27:53 +0000283 if (ExplicitAddend)
Lang Hames76774a52014-07-18 20:29:36 +0000284 RE.Addend = ExplicitAddend;
Lang Hames0fd36102015-08-11 06:27:53 +0000285
Lang Hames89595312016-04-27 20:24:48 +0000286 RelocationValueRef Value;
287 if (auto ValueOrErr = getRelocationValueRef(Obj, RelI, RE, ObjSectionToID))
288 Value = *ValueOrErr;
289 else
290 return ValueOrErr.takeError();
Lang Hamesa5216882014-07-17 18:54:50 +0000291
292 bool IsExtern = Obj.getPlainRelocationExternal(RelInfo);
293 if (!IsExtern && RE.IsPCRel)
Rafael Espindola76ad2322015-07-06 14:55:37 +0000294 makeValueAddendPCRel(Value, RelI, 1 << RE.Size);
Lang Hamesa5216882014-07-17 18:54:50 +0000295
Lang Hamesca279c22014-09-07 04:03:32 +0000296 RE.Addend = Value.Offset;
Lang Hamesa5216882014-07-17 18:54:50 +0000297
298 if (RE.RelType == MachO::ARM64_RELOC_GOT_LOAD_PAGE21 ||
299 RE.RelType == MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12)
300 processGOTRelocation(RE, Value, Stubs);
301 else {
302 if (Value.SymbolName)
303 addRelocationForSymbol(RE, Value.SymbolName);
304 else
305 addRelocationForSection(RE, Value.SectionID);
306 }
307
308 return ++RelI;
309 }
310
Benjamin Kramer8c90fd72014-09-03 11:41:21 +0000311 void resolveRelocation(const RelocationEntry &RE, uint64_t Value) override {
Lang Hamesa5216882014-07-17 18:54:50 +0000312 DEBUG(dumpRelocationToResolve(RE, Value));
313
314 const SectionEntry &Section = Sections[RE.SectionID];
Sanjoy Das277776a2015-11-23 21:47:41 +0000315 uint8_t *LocalAddress = Section.getAddressWithOffset(RE.Offset);
Juergen Ributzkafbd40c32014-07-29 19:57:11 +0000316 MachO::RelocationInfoType RelType =
317 static_cast<MachO::RelocationInfoType>(RE.RelType);
Lang Hamesa5216882014-07-17 18:54:50 +0000318
Juergen Ributzkafbd40c32014-07-29 19:57:11 +0000319 switch (RelType) {
Lang Hamesa5216882014-07-17 18:54:50 +0000320 default:
321 llvm_unreachable("Invalid relocation type!");
322 case MachO::ARM64_RELOC_UNSIGNED: {
323 assert(!RE.IsPCRel && "PCRel and ARM64_RELOC_UNSIGNED not supported");
324 // Mask in the target value a byte at a time (we don't have an alignment
325 // guarantee for the target address, so this is safest).
326 if (RE.Size < 2)
327 llvm_unreachable("Invalid size for ARM64_RELOC_UNSIGNED");
328
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000329 encodeAddend(LocalAddress, 1 << RE.Size, RelType, Value + RE.Addend);
Lang Hamesa5216882014-07-17 18:54:50 +0000330 break;
331 }
332 case MachO::ARM64_RELOC_BRANCH26: {
333 assert(RE.IsPCRel && "not PCRel and ARM64_RELOC_BRANCH26 not supported");
Lang Hamesa5216882014-07-17 18:54:50 +0000334 // Check if branch is in range.
Sanjoy Das277776a2015-11-23 21:47:41 +0000335 uint64_t FinalAddress = Section.getLoadAddressWithOffset(RE.Offset);
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000336 int64_t PCRelVal = Value - FinalAddress + RE.Addend;
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000337 encodeAddend(LocalAddress, /*Size=*/4, RelType, PCRelVal);
Lang Hamesa5216882014-07-17 18:54:50 +0000338 break;
339 }
340 case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
341 case MachO::ARM64_RELOC_PAGE21: {
342 assert(RE.IsPCRel && "not PCRel and ARM64_RELOC_PAGE21 not supported");
Lang Hamesa5216882014-07-17 18:54:50 +0000343 // Adjust for PC-relative relocation and offset.
Sanjoy Das277776a2015-11-23 21:47:41 +0000344 uint64_t FinalAddress = Section.getLoadAddressWithOffset(RE.Offset);
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000345 int64_t PCRelVal =
346 ((Value + RE.Addend) & (-4096)) - (FinalAddress & (-4096));
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000347 encodeAddend(LocalAddress, /*Size=*/4, RelType, PCRelVal);
Lang Hamesa5216882014-07-17 18:54:50 +0000348 break;
349 }
350 case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
351 case MachO::ARM64_RELOC_PAGEOFF12: {
352 assert(!RE.IsPCRel && "PCRel and ARM64_RELOC_PAGEOFF21 not supported");
Lang Hamesa5216882014-07-17 18:54:50 +0000353 // Add the offset from the symbol.
354 Value += RE.Addend;
355 // Mask out the page address and only use the lower 12 bits.
356 Value &= 0xFFF;
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000357 encodeAddend(LocalAddress, /*Size=*/4, RelType, Value);
Lang Hamesa5216882014-07-17 18:54:50 +0000358 break;
359 }
Lang Hames3db630b2016-01-21 21:59:50 +0000360 case MachO::ARM64_RELOC_SUBTRACTOR: {
361 uint64_t SectionABase = Sections[RE.Sections.SectionA].getLoadAddress();
362 uint64_t SectionBBase = Sections[RE.Sections.SectionB].getLoadAddress();
363 assert((Value == SectionABase || Value == SectionBBase) &&
364 "Unexpected SUBTRACTOR relocation value.");
365 Value = SectionABase - SectionBBase + RE.Addend;
366 writeBytesUnaligned(Value, LocalAddress, 1 << RE.Size);
367 break;
368 }
Lang Hamesa5216882014-07-17 18:54:50 +0000369 case MachO::ARM64_RELOC_POINTER_TO_GOT:
370 case MachO::ARM64_RELOC_TLVP_LOAD_PAGE21:
371 case MachO::ARM64_RELOC_TLVP_LOAD_PAGEOFF12:
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000372 llvm_unreachable("Relocation type not yet implemented!");
Lang Hamesa5216882014-07-17 18:54:50 +0000373 case MachO::ARM64_RELOC_ADDEND:
374 llvm_unreachable("ARM64_RELOC_ADDEND should have been handeled by "
375 "processRelocationRef!");
376 }
377 }
378
Lang Hames89595312016-04-27 20:24:48 +0000379 Error finalizeSection(const ObjectFile &Obj, unsigned SectionID,
380 const SectionRef &Section) {
381 return Error::success();
382 }
Lang Hamesa5216882014-07-17 18:54:50 +0000383
384private:
385 void processGOTRelocation(const RelocationEntry &RE,
386 RelocationValueRef &Value, StubMap &Stubs) {
387 assert(RE.Size == 2);
388 SectionEntry &Section = Sections[RE.SectionID];
389 StubMap::const_iterator i = Stubs.find(Value);
Lang Hames41d95942014-10-21 23:41:15 +0000390 int64_t Offset;
Lang Hamesa5216882014-07-17 18:54:50 +0000391 if (i != Stubs.end())
Lang Hames41d95942014-10-21 23:41:15 +0000392 Offset = static_cast<int64_t>(i->second);
Lang Hamesa5216882014-07-17 18:54:50 +0000393 else {
394 // FIXME: There must be a better way to do this then to check and fix the
395 // alignment every time!!!
Sanjoy Das277776a2015-11-23 21:47:41 +0000396 uintptr_t BaseAddress = uintptr_t(Section.getAddress());
Lang Hamesa5216882014-07-17 18:54:50 +0000397 uintptr_t StubAlignment = getStubAlignment();
398 uintptr_t StubAddress =
Sanjoy Das277776a2015-11-23 21:47:41 +0000399 (BaseAddress + Section.getStubOffset() + StubAlignment - 1) &
Lang Hamesa5216882014-07-17 18:54:50 +0000400 -StubAlignment;
401 unsigned StubOffset = StubAddress - BaseAddress;
402 Stubs[Value] = StubOffset;
403 assert(((StubAddress % getStubAlignment()) == 0) &&
404 "GOT entry not aligned");
405 RelocationEntry GOTRE(RE.SectionID, StubOffset,
Lang Hamesca279c22014-09-07 04:03:32 +0000406 MachO::ARM64_RELOC_UNSIGNED, Value.Offset,
Lang Hamesa5216882014-07-17 18:54:50 +0000407 /*IsPCRel=*/false, /*Size=*/3);
408 if (Value.SymbolName)
409 addRelocationForSymbol(GOTRE, Value.SymbolName);
410 else
411 addRelocationForSection(GOTRE, Value.SectionID);
Sanjoy Das277776a2015-11-23 21:47:41 +0000412 Section.advanceStubOffset(getMaxStubSize());
Lang Hames41d95942014-10-21 23:41:15 +0000413 Offset = static_cast<int64_t>(StubOffset);
Lang Hamesa5216882014-07-17 18:54:50 +0000414 }
Lang Hames41d95942014-10-21 23:41:15 +0000415 RelocationEntry TargetRE(RE.SectionID, RE.Offset, RE.RelType, Offset,
Lang Hamesa5216882014-07-17 18:54:50 +0000416 RE.IsPCRel, RE.Size);
Lang Hames41d95942014-10-21 23:41:15 +0000417 addRelocationForSection(TargetRE, RE.SectionID);
Lang Hamesa5216882014-07-17 18:54:50 +0000418 }
Lang Hames3db630b2016-01-21 21:59:50 +0000419
Lang Hames4ce96c52016-05-18 05:31:24 +0000420 Expected<relocation_iterator>
Lang Hames3db630b2016-01-21 21:59:50 +0000421 processSubtractRelocation(unsigned SectionID, relocation_iterator RelI,
422 const ObjectFile &BaseObjT,
423 ObjSectionToIDMap &ObjSectionToID) {
424 const MachOObjectFile &Obj =
425 static_cast<const MachOObjectFile&>(BaseObjT);
426 MachO::any_relocation_info RE =
427 Obj.getRelocation(RelI->getRawDataRefImpl());
428
429 unsigned Size = Obj.getAnyRelocationLength(RE);
430 uint64_t Offset = RelI->getOffset();
431 uint8_t *LocalAddress = Sections[SectionID].getAddressWithOffset(Offset);
432 unsigned NumBytes = 1 << Size;
433
Kevin Enderby81e8b7d2016-04-20 21:24:34 +0000434 Expected<StringRef> SubtrahendNameOrErr = RelI->getSymbol()->getName();
Lang Hames4ce96c52016-05-18 05:31:24 +0000435 if (!SubtrahendNameOrErr)
436 return SubtrahendNameOrErr.takeError();
Lang Hames3db630b2016-01-21 21:59:50 +0000437 auto SubtrahendI = GlobalSymbolTable.find(*SubtrahendNameOrErr);
438 unsigned SectionBID = SubtrahendI->second.getSectionID();
439 uint64_t SectionBOffset = SubtrahendI->second.getOffset();
440 int64_t Addend =
441 SignExtend64(readBytesUnaligned(LocalAddress, NumBytes), NumBytes * 8);
442
443 ++RelI;
Kevin Enderby81e8b7d2016-04-20 21:24:34 +0000444 Expected<StringRef> MinuendNameOrErr = RelI->getSymbol()->getName();
Lang Hames4ce96c52016-05-18 05:31:24 +0000445 if (!MinuendNameOrErr)
446 return MinuendNameOrErr.takeError();
Lang Hames3db630b2016-01-21 21:59:50 +0000447 auto MinuendI = GlobalSymbolTable.find(*MinuendNameOrErr);
448 unsigned SectionAID = MinuendI->second.getSectionID();
449 uint64_t SectionAOffset = MinuendI->second.getOffset();
450
451 RelocationEntry R(SectionID, Offset, MachO::ARM64_RELOC_SUBTRACTOR, (uint64_t)Addend,
452 SectionAID, SectionAOffset, SectionBID, SectionBOffset,
453 false, Size);
454
455 addRelocationForSection(R, SectionAID);
456
457 return ++RelI;
458 }
459
Lang Hamesa5216882014-07-17 18:54:50 +0000460};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000461}
Lang Hamesa5216882014-07-17 18:54:50 +0000462
463#undef DEBUG_TYPE
464
Benjamin Kramera7c40ef2014-08-13 16:26:38 +0000465#endif