blob: ced85f3e55a93d9fb3aef0a355d1f036385dbcec [file] [log] [blame]
Lang Hamesa5216882014-07-17 18:54:50 +00001//===-- RuntimeDyldMachOAArch64.h -- MachO/AArch64 specific code. -*- C++ -*-=//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#ifndef LLVM_RUNTIMEDYLDMACHOAARCH64_H
11#define LLVM_RUNTIMEDYLDMACHOAARCH64_H
12
13#include "../RuntimeDyldMachO.h"
Juergen Ributzka0e913b12014-07-29 19:57:15 +000014#include "llvm/Support/Endian.h"
Lang Hamesa5216882014-07-17 18:54:50 +000015
16#define DEBUG_TYPE "dyld"
17
18namespace llvm {
19
20class RuntimeDyldMachOAArch64
21 : public RuntimeDyldMachOCRTPBase<RuntimeDyldMachOAArch64> {
22public:
23 RuntimeDyldMachOAArch64(RTDyldMemoryManager *MM)
24 : RuntimeDyldMachOCRTPBase(MM) {}
25
26 unsigned getMaxStubSize() override { return 8; }
27
Lang Hamese5fc8262014-07-17 23:11:30 +000028 unsigned getStubAlignment() override { return 8; }
Lang Hamesa5216882014-07-17 18:54:50 +000029
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000030 /// Extract the addend encoded in the instruction / memory location.
31 int64_t decodeAddend(uint8_t *LocalAddress, unsigned NumBytes,
Juergen Ributzkafbd40c32014-07-29 19:57:11 +000032 MachO::RelocationInfoType RelType) const {
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000033 int64_t Addend = 0;
34 // Verify that the relocation has the correct size and alignment.
35 switch (RelType) {
36 default:
37 llvm_unreachable("Unsupported relocation type!");
38 case MachO::ARM64_RELOC_UNSIGNED:
Juergen Ributzka0e913b12014-07-29 19:57:15 +000039 assert((NumBytes == 4 || NumBytes == 8) && "Invalid relocation size.");
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000040 break;
41 case MachO::ARM64_RELOC_BRANCH26:
42 case MachO::ARM64_RELOC_PAGE21:
43 case MachO::ARM64_RELOC_PAGEOFF12:
44 case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
45 case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
46 assert(NumBytes == 4 && "Invalid relocation size.");
47 assert((((uintptr_t)LocalAddress & 0x3) == 0) &&
48 "Instruction address is not aligned to 4 bytes.");
49 break;
50 }
51
52 switch (RelType) {
53 default:
54 llvm_unreachable("Unsupported relocation type!");
55 case MachO::ARM64_RELOC_UNSIGNED:
Juergen Ributzka0e913b12014-07-29 19:57:15 +000056 // This could be an unaligned memory location.
57 if (NumBytes == 4)
58 Addend = *reinterpret_cast<support::ulittle32_t *>(LocalAddress);
59 else
60 Addend = *reinterpret_cast<support::ulittle64_t *>(LocalAddress);
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000061 break;
62 case MachO::ARM64_RELOC_BRANCH26: {
63 // Verify that the relocation points to the expected branch instruction.
Juergen Ributzka0e913b12014-07-29 19:57:15 +000064 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000065 assert((*p & 0xFC000000) == 0x14000000 && "Expected branch instruction.");
66
67 // Get the 26 bit addend encoded in the branch instruction and sign-extend
68 // to 64 bit. The lower 2 bits are always zeros and are therefore implicit
69 // (<< 2).
70 Addend = (*p & 0x03FFFFFF) << 2;
71 Addend = SignExtend64(Addend, 28);
72 break;
73 }
74 case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
75 case MachO::ARM64_RELOC_PAGE21: {
76 // Verify that the relocation points to the expected adrp instruction.
Juergen Ributzka0e913b12014-07-29 19:57:15 +000077 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000078 assert((*p & 0x9F000000) == 0x90000000 && "Expected adrp instruction.");
79
80 // Get the 21 bit addend encoded in the adrp instruction and sign-extend
81 // to 64 bit. The lower 12 bits (4096 byte page) are always zeros and are
82 // therefore implicit (<< 12).
83 Addend = ((*p & 0x60000000) >> 29) | ((*p & 0x01FFFFE0) >> 3) << 12;
84 Addend = SignExtend64(Addend, 33);
85 break;
86 }
87 case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: {
88 // Verify that the relocation points to one of the expected load / store
89 // instructions.
Juergen Ributzka0e913b12014-07-29 19:57:15 +000090 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzka0e957cf2014-07-22 22:02:19 +000091 (void)p;
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000092 assert((*p & 0x3B000000) == 0x39000000 &&
93 "Only expected load / store instructions.");
94 } // fall-through
95 case MachO::ARM64_RELOC_PAGEOFF12: {
96 // Verify that the relocation points to one of the expected load / store
97 // or add / sub instructions.
Juergen Ributzka0e913b12014-07-29 19:57:15 +000098 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkab13b52e2014-07-22 21:42:51 +000099 assert((((*p & 0x3B000000) == 0x39000000) ||
100 ((*p & 0x11C00000) == 0x11000000) ) &&
101 "Expected load / store or add/sub instruction.");
102
103 // Get the 12 bit addend encoded in the instruction.
104 Addend = (*p & 0x003FFC00) >> 10;
105
106 // Check which instruction we are decoding to obtain the implicit shift
107 // factor of the instruction.
108 int ImplicitShift = 0;
109 if ((*p & 0x3B000000) == 0x39000000) { // << load / store
110 // For load / store instructions the size is encoded in bits 31:30.
111 ImplicitShift = ((*p >> 30) & 0x3);
112 if (ImplicitShift == 0) {
113 // Check if this a vector op to get the correct shift value.
114 if ((*p & 0x04800000) == 0x04800000)
115 ImplicitShift = 4;
116 }
117 }
118 // Compensate for implicit shift.
119 Addend <<= ImplicitShift;
120 break;
121 }
122 }
123 return Addend;
124 }
125
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000126 /// Extract the addend encoded in the instruction.
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000127 void encodeAddend(uint8_t *LocalAddress, unsigned NumBytes,
128 MachO::RelocationInfoType RelType, int64_t Addend) const {
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000129 // Verify that the relocation has the correct alignment.
130 switch (RelType) {
131 default:
132 llvm_unreachable("Unsupported relocation type!");
133 case MachO::ARM64_RELOC_UNSIGNED:
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000134 assert((NumBytes == 4 || NumBytes == 8) && "Invalid relocation size.");
135 break;
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000136 case MachO::ARM64_RELOC_BRANCH26:
137 case MachO::ARM64_RELOC_PAGE21:
138 case MachO::ARM64_RELOC_PAGEOFF12:
139 case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
140 case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000141 assert(NumBytes == 4 && "Invalid relocation size.");
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000142 assert((((uintptr_t)LocalAddress & 0x3) == 0) &&
143 "Instruction address is not aligned to 4 bytes.");
144 break;
145 }
146
147 switch (RelType) {
148 default:
149 llvm_unreachable("Unsupported relocation type!");
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000150 case MachO::ARM64_RELOC_UNSIGNED:
151 // This could be an unaligned memory location.
152 if (NumBytes == 4)
153 *reinterpret_cast<support::ulittle32_t *>(LocalAddress) = Addend;
154 else
155 *reinterpret_cast<support::ulittle64_t *>(LocalAddress) = Addend;
156 break;
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000157 case MachO::ARM64_RELOC_BRANCH26: {
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000158 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000159 // Verify that the relocation points to the expected branch instruction.
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000160 assert((*p & 0xFC000000) == 0x14000000 && "Expected branch instruction.");
161
162 // Verify addend value.
163 assert((Addend & 0x3) == 0 && "Branch target is not aligned");
164 assert(isInt<28>(Addend) && "Branch target is out of range.");
165
166 // Encode the addend as 26 bit immediate in the branch instruction.
167 *p = (*p & 0xFC000000) | ((uint32_t)(Addend >> 2) & 0x03FFFFFF);
168 break;
169 }
170 case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
171 case MachO::ARM64_RELOC_PAGE21: {
172 // Verify that the relocation points to the expected adrp instruction.
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000173 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000174 assert((*p & 0x9F000000) == 0x90000000 && "Expected adrp instruction.");
175
176 // Check that the addend fits into 21 bits (+ 12 lower bits).
177 assert((Addend & 0xFFF) == 0 && "ADRP target is not page aligned.");
178 assert(isInt<33>(Addend) && "Invalid page reloc value.");
179
180 // Encode the addend into the instruction.
181 uint32_t ImmLoValue = (uint32_t)(Addend << 17) & 0x60000000;
182 uint32_t ImmHiValue = (uint32_t)(Addend >> 9) & 0x00FFFFE0;
183 *p = (*p & 0x9F00001F) | ImmHiValue | ImmLoValue;
184 break;
185 }
186 case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: {
187 // Verify that the relocation points to one of the expected load / store
188 // instructions.
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000189 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000190 assert((*p & 0x3B000000) == 0x39000000 &&
191 "Only expected load / store instructions.");
NAKAMURA Takumiea4a8da2014-07-23 00:17:44 +0000192 (void)p;
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000193 } // fall-through
194 case MachO::ARM64_RELOC_PAGEOFF12: {
195 // Verify that the relocation points to one of the expected load / store
196 // or add / sub instructions.
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000197 auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000198 assert((((*p & 0x3B000000) == 0x39000000) ||
199 ((*p & 0x11C00000) == 0x11000000) ) &&
200 "Expected load / store or add/sub instruction.");
201
202 // Check which instruction we are decoding to obtain the implicit shift
203 // factor of the instruction and verify alignment.
204 int ImplicitShift = 0;
205 if ((*p & 0x3B000000) == 0x39000000) { // << load / store
206 // For load / store instructions the size is encoded in bits 31:30.
207 ImplicitShift = ((*p >> 30) & 0x3);
208 switch (ImplicitShift) {
209 case 0:
210 // Check if this a vector op to get the correct shift value.
211 if ((*p & 0x04800000) == 0x04800000) {
212 ImplicitShift = 4;
213 assert(((Addend & 0xF) == 0) &&
214 "128-bit LDR/STR not 16-byte aligned.");
215 }
216 break;
217 case 1:
218 assert(((Addend & 0x1) == 0) && "16-bit LDR/STR not 2-byte aligned.");
219 break;
220 case 2:
221 assert(((Addend & 0x3) == 0) && "32-bit LDR/STR not 4-byte aligned.");
222 break;
223 case 3:
224 assert(((Addend & 0x7) == 0) && "64-bit LDR/STR not 8-byte aligned.");
225 break;
226 }
227 }
228 // Compensate for implicit shift.
229 Addend >>= ImplicitShift;
230 assert(isUInt<12>(Addend) && "Addend cannot be encoded.");
231
232 // Encode the addend into the instruction.
233 *p = (*p & 0xFFC003FF) | ((uint32_t)(Addend << 10) & 0x003FFC00);
234 break;
235 }
236 }
237 }
238
Lang Hamesa5216882014-07-17 18:54:50 +0000239 relocation_iterator
240 processRelocationRef(unsigned SectionID, relocation_iterator RelI,
241 ObjectImage &ObjImg, ObjSectionToIDMap &ObjSectionToID,
242 const SymbolTableMap &Symbols, StubMap &Stubs) override {
243 const MachOObjectFile &Obj =
244 static_cast<const MachOObjectFile &>(*ObjImg.getObjectFile());
245 MachO::any_relocation_info RelInfo =
246 Obj.getRelocation(RelI->getRawDataRefImpl());
247
248 assert(!Obj.isRelocationScattered(RelInfo) && "");
249
250 // ARM64 has an ARM64_RELOC_ADDEND relocation type that carries an explicit
251 // addend for the following relocation. If found: (1) store the associated
252 // addend, (2) consume the next relocation, and (3) use the stored addend to
253 // override the addend.
Lang Hamesa5216882014-07-17 18:54:50 +0000254 int64_t ExplicitAddend = 0;
255 if (Obj.getAnyRelocationType(RelInfo) == MachO::ARM64_RELOC_ADDEND) {
256 assert(!Obj.getPlainRelocationExternal(RelInfo));
257 assert(!Obj.getAnyRelocationPCRel(RelInfo));
258 assert(Obj.getAnyRelocationLength(RelInfo) == 2);
Lang Hamesa5216882014-07-17 18:54:50 +0000259 int64_t RawAddend = Obj.getPlainRelocationSymbolNum(RelInfo);
260 // Sign-extend the 24-bit to 64-bit.
Juergen Ributzkadd19d332014-07-22 21:42:49 +0000261 ExplicitAddend = SignExtend64(RawAddend, 24);
Lang Hamesa5216882014-07-17 18:54:50 +0000262 ++RelI;
263 RelInfo = Obj.getRelocation(RelI->getRawDataRefImpl());
264 }
265
266 RelocationEntry RE(getBasicRelocationEntry(SectionID, ObjImg, RelI));
267 RelocationValueRef Value(
268 getRelocationValueRef(ObjImg, RelI, RE, ObjSectionToID, Symbols));
269
Juergen Ributzkadd19d332014-07-22 21:42:49 +0000270 assert((ExplicitAddend == 0 || RE.Addend == 0) && "Relocation has "\
271 "ARM64_RELOC_ADDEND and embedded addend in the instruction.");
272 if (ExplicitAddend) {
Lang Hames76774a52014-07-18 20:29:36 +0000273 RE.Addend = ExplicitAddend;
Lang Hamesa5216882014-07-17 18:54:50 +0000274 Value.Addend = ExplicitAddend;
Lang Hames76774a52014-07-18 20:29:36 +0000275 }
Lang Hamesa5216882014-07-17 18:54:50 +0000276
277 bool IsExtern = Obj.getPlainRelocationExternal(RelInfo);
278 if (!IsExtern && RE.IsPCRel)
Lang Hames13163652014-07-30 03:35:05 +0000279 makeValueAddendPCRel(Value, ObjImg, RelI, 1 << RE.Size);
Lang Hamesa5216882014-07-17 18:54:50 +0000280
281 RE.Addend = Value.Addend;
282
283 if (RE.RelType == MachO::ARM64_RELOC_GOT_LOAD_PAGE21 ||
284 RE.RelType == MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12)
285 processGOTRelocation(RE, Value, Stubs);
286 else {
287 if (Value.SymbolName)
288 addRelocationForSymbol(RE, Value.SymbolName);
289 else
290 addRelocationForSection(RE, Value.SectionID);
291 }
292
293 return ++RelI;
294 }
295
296 void resolveRelocation(const RelocationEntry &RE, uint64_t Value) {
297 DEBUG(dumpRelocationToResolve(RE, Value));
298
299 const SectionEntry &Section = Sections[RE.SectionID];
300 uint8_t *LocalAddress = Section.Address + RE.Offset;
Juergen Ributzkafbd40c32014-07-29 19:57:11 +0000301 MachO::RelocationInfoType RelType =
302 static_cast<MachO::RelocationInfoType>(RE.RelType);
Lang Hamesa5216882014-07-17 18:54:50 +0000303
Juergen Ributzkafbd40c32014-07-29 19:57:11 +0000304 switch (RelType) {
Lang Hamesa5216882014-07-17 18:54:50 +0000305 default:
306 llvm_unreachable("Invalid relocation type!");
307 case MachO::ARM64_RELOC_UNSIGNED: {
308 assert(!RE.IsPCRel && "PCRel and ARM64_RELOC_UNSIGNED not supported");
309 // Mask in the target value a byte at a time (we don't have an alignment
310 // guarantee for the target address, so this is safest).
311 if (RE.Size < 2)
312 llvm_unreachable("Invalid size for ARM64_RELOC_UNSIGNED");
313
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000314 encodeAddend(LocalAddress, 1 << RE.Size, RelType, Value + RE.Addend);
Lang Hamesa5216882014-07-17 18:54:50 +0000315 break;
316 }
317 case MachO::ARM64_RELOC_BRANCH26: {
318 assert(RE.IsPCRel && "not PCRel and ARM64_RELOC_BRANCH26 not supported");
Lang Hamesa5216882014-07-17 18:54:50 +0000319 // Check if branch is in range.
320 uint64_t FinalAddress = Section.LoadAddress + RE.Offset;
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000321 int64_t PCRelVal = Value - FinalAddress + RE.Addend;
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000322 encodeAddend(LocalAddress, /*Size=*/4, RelType, PCRelVal);
Lang Hamesa5216882014-07-17 18:54:50 +0000323 break;
324 }
325 case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
326 case MachO::ARM64_RELOC_PAGE21: {
327 assert(RE.IsPCRel && "not PCRel and ARM64_RELOC_PAGE21 not supported");
Lang Hamesa5216882014-07-17 18:54:50 +0000328 // Adjust for PC-relative relocation and offset.
329 uint64_t FinalAddress = Section.LoadAddress + RE.Offset;
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000330 int64_t PCRelVal =
331 ((Value + RE.Addend) & (-4096)) - (FinalAddress & (-4096));
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000332 encodeAddend(LocalAddress, /*Size=*/4, RelType, PCRelVal);
Lang Hamesa5216882014-07-17 18:54:50 +0000333 break;
334 }
335 case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
336 case MachO::ARM64_RELOC_PAGEOFF12: {
337 assert(!RE.IsPCRel && "PCRel and ARM64_RELOC_PAGEOFF21 not supported");
Lang Hamesa5216882014-07-17 18:54:50 +0000338 // Add the offset from the symbol.
339 Value += RE.Addend;
340 // Mask out the page address and only use the lower 12 bits.
341 Value &= 0xFFF;
Juergen Ributzka0e913b12014-07-29 19:57:15 +0000342 encodeAddend(LocalAddress, /*Size=*/4, RelType, Value);
Lang Hamesa5216882014-07-17 18:54:50 +0000343 break;
344 }
345 case MachO::ARM64_RELOC_SUBTRACTOR:
346 case MachO::ARM64_RELOC_POINTER_TO_GOT:
347 case MachO::ARM64_RELOC_TLVP_LOAD_PAGE21:
348 case MachO::ARM64_RELOC_TLVP_LOAD_PAGEOFF12:
Juergen Ributzkaf5609282014-07-22 21:42:55 +0000349 llvm_unreachable("Relocation type not yet implemented!");
Lang Hamesa5216882014-07-17 18:54:50 +0000350 case MachO::ARM64_RELOC_ADDEND:
351 llvm_unreachable("ARM64_RELOC_ADDEND should have been handeled by "
352 "processRelocationRef!");
353 }
354 }
355
356 void finalizeSection(ObjectImage &ObjImg, unsigned SectionID,
357 const SectionRef &Section) {}
358
359private:
360 void processGOTRelocation(const RelocationEntry &RE,
361 RelocationValueRef &Value, StubMap &Stubs) {
362 assert(RE.Size == 2);
363 SectionEntry &Section = Sections[RE.SectionID];
364 StubMap::const_iterator i = Stubs.find(Value);
365 uint8_t *Addr;
366 if (i != Stubs.end())
367 Addr = Section.Address + i->second;
368 else {
369 // FIXME: There must be a better way to do this then to check and fix the
370 // alignment every time!!!
371 uintptr_t BaseAddress = uintptr_t(Section.Address);
372 uintptr_t StubAlignment = getStubAlignment();
373 uintptr_t StubAddress =
374 (BaseAddress + Section.StubOffset + StubAlignment - 1) &
375 -StubAlignment;
376 unsigned StubOffset = StubAddress - BaseAddress;
377 Stubs[Value] = StubOffset;
378 assert(((StubAddress % getStubAlignment()) == 0) &&
379 "GOT entry not aligned");
380 RelocationEntry GOTRE(RE.SectionID, StubOffset,
381 MachO::ARM64_RELOC_UNSIGNED, Value.Addend,
382 /*IsPCRel=*/false, /*Size=*/3);
383 if (Value.SymbolName)
384 addRelocationForSymbol(GOTRE, Value.SymbolName);
385 else
386 addRelocationForSection(GOTRE, Value.SectionID);
387 Section.StubOffset = StubOffset + getMaxStubSize();
388 Addr = (uint8_t *)StubAddress;
389 }
390 RelocationEntry TargetRE(RE.SectionID, RE.Offset, RE.RelType, /*Addend=*/0,
391 RE.IsPCRel, RE.Size);
392 resolveRelocation(TargetRE, (uint64_t)Addr);
393 }
394};
395}
396
397#undef DEBUG_TYPE
398
399#endif // LLVM_RUNTIMEDYLDMACHOAARCH64_H