blob: 757666240dcda1a2c1eb03c98e52715a457854b7 [file] [log] [blame]
Nick Kledzik2458bec2014-07-16 19:49:02 +00001//===- lib/FileFormat/MachO/ArchHandler_x86.cpp ---------------------------===//
2//
3// The LLVM Linker
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "ArchHandler.h"
11#include "Atoms.h"
12#include "MachONormalizedFileBinaryUtils.h"
13
14#include "llvm/ADT/StringRef.h"
15#include "llvm/ADT/StringSwitch.h"
16#include "llvm/ADT/Triple.h"
17
18#include "llvm/Support/ErrorHandling.h"
19
20using namespace llvm::MachO;
21using namespace lld::mach_o::normalized;
22
23namespace lld {
24namespace mach_o {
25
26class ArchHandler_x86 : public ArchHandler {
27public:
28 ArchHandler_x86();
29 virtual ~ArchHandler_x86();
30
31 const Registry::KindStrings *kindStrings() override { return _sKindStrings; }
32
33 Reference::KindArch kindArch() override { return Reference::KindArch::x86; }
34
35 const StubInfo &stubInfo() override { return _sStubInfo; }
36 bool isCallSite(const Reference &) override;
Nick Kledzik4121bce2014-10-14 01:51:42 +000037 bool isNonCallBranch(const Reference &) override {
38 return false;
39 }
40
Nick Kledzik2458bec2014-07-16 19:49:02 +000041 bool isPointer(const Reference &) override;
42 bool isPairedReloc(const normalized::Relocation &) override;
Tim Northovercf78d372014-09-30 21:29:54 +000043
44 bool needsCompactUnwind() override {
45 return false;
46 }
47 Reference::KindValue imageOffsetKind() override {
48 return invalid;
49 }
50 Reference::KindValue imageOffsetKindIndirect() override {
51 return invalid;
52 }
53
Tim Northovera6a6ab92014-10-15 18:19:31 +000054 Reference::KindValue unwindRefToFunctionKind() override{
55 return delta32;
56 }
57
Tim Northover1cc4fb72014-10-15 19:32:21 +000058 Reference::KindValue unwindRefToEhFrameKind() override {
59 return invalid;
60 }
61
62
63 uint32_t dwarfCompactUnwindType() override {
64 return 0x04000000U;
65 }
66
Nick Kledzik2458bec2014-07-16 19:49:02 +000067 std::error_code getReferenceInfo(const normalized::Relocation &reloc,
68 const DefinedAtom *inAtom,
69 uint32_t offsetInAtom,
70 uint64_t fixupAddress, bool swap,
71 FindAtomBySectionAndAddress atomFromAddress,
72 FindAtomBySymbolIndex atomFromSymbolIndex,
73 Reference::KindValue *kind,
74 const lld::Atom **target,
75 Reference::Addend *addend) override;
76 std::error_code
77 getPairReferenceInfo(const normalized::Relocation &reloc1,
78 const normalized::Relocation &reloc2,
79 const DefinedAtom *inAtom,
80 uint32_t offsetInAtom,
81 uint64_t fixupAddress, bool swap,
82 FindAtomBySectionAndAddress atomFromAddress,
83 FindAtomBySymbolIndex atomFromSymbolIndex,
84 Reference::KindValue *kind,
85 const lld::Atom **target,
86 Reference::Addend *addend) override;
87
Nick Kledzik2d432352014-07-17 23:16:21 +000088 void generateAtomContent(const DefinedAtom &atom, bool relocatable,
89 FindAddressForAtom findAddress,
Tim Northover1cc4fb72014-10-15 19:32:21 +000090 FindAddressForAtom findSectionAddress,
Tim Northovercf78d372014-09-30 21:29:54 +000091 uint64_t imageBaseAddress,
Nick Kledzik2d432352014-07-17 23:16:21 +000092 uint8_t *atomContentBuffer) override;
93
94 void appendSectionRelocations(const DefinedAtom &atom,
95 uint64_t atomSectionOffset,
96 const Reference &ref,
97 FindSymbolIndexForAtom symbolIndexForAtom,
98 FindSectionIndexForAtom sectionIndexForAtom,
99 FindAddressForAtom addressForAtom,
100 normalized::Relocations &relocs) override;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000101
Nick Kledzik21921372014-07-24 23:06:56 +0000102 bool isDataInCodeTransition(Reference::KindValue refKind) override {
103 switch (refKind) {
104 case modeCode:
105 case modeData:
106 return true;
107 default:
108 return false;
109 break;
110 }
111 }
112
113 Reference::KindValue dataInCodeTransitionStart(
114 const MachODefinedAtom &atom) override {
115 return modeData;
116 }
117
118 Reference::KindValue dataInCodeTransitionEnd(
119 const MachODefinedAtom &atom) override {
120 return modeCode;
121 }
122
Nick Kledzik2458bec2014-07-16 19:49:02 +0000123private:
124 static const Registry::KindStrings _sKindStrings[];
125 static const StubInfo _sStubInfo;
126
127 enum : Reference::KindValue {
128 invalid, /// for error condition
129
Nick Kledzik21921372014-07-24 23:06:56 +0000130 modeCode, /// Content starting at this offset is code.
131 modeData, /// Content starting at this offset is data.
132
Nick Kledzik2458bec2014-07-16 19:49:02 +0000133 // Kinds found in mach-o .o files:
134 branch32, /// ex: call _foo
135 branch16, /// ex: callw _foo
136 abs32, /// ex: movl _foo, %eax
137 funcRel32, /// ex: movl _foo-L1(%eax), %eax
138 pointer32, /// ex: .long _foo
139 delta32, /// ex: .long _foo - .
Nick Kledzik03e16f22014-07-21 22:06:57 +0000140 negDelta32, /// ex: .long . - _foo
Nick Kledzik2458bec2014-07-16 19:49:02 +0000141
142 // Kinds introduced by Passes:
143 lazyPointer, /// Location contains a lazy pointer.
144 lazyImmediateLocation, /// Location contains immediate value used in stub.
145 };
146
Nick Kledzik2d432352014-07-17 23:16:21 +0000147 static bool useExternalRelocationTo(const Atom &target);
148
149 void applyFixupFinal(const Reference &ref, uint8_t *location,
150 uint64_t fixupAddress, uint64_t targetAddress,
151 uint64_t inAtomAddress);
152
153 void applyFixupRelocatable(const Reference &ref, uint8_t *location,
154 uint64_t fixupAddress,
155 uint64_t targetAddress,
156 uint64_t inAtomAddress);
157
Nick Kledzik2458bec2014-07-16 19:49:02 +0000158 const bool _swap;
159};
160
161//===----------------------------------------------------------------------===//
162// ArchHandler_x86
163//===----------------------------------------------------------------------===//
164
165ArchHandler_x86::ArchHandler_x86() :
166 _swap(!MachOLinkingContext::isHostEndian(MachOLinkingContext::arch_x86)) {}
167
168ArchHandler_x86::~ArchHandler_x86() { }
169
170const Registry::KindStrings ArchHandler_x86::_sKindStrings[] = {
171 LLD_KIND_STRING_ENTRY(invalid),
Nick Kledzik21921372014-07-24 23:06:56 +0000172 LLD_KIND_STRING_ENTRY(modeCode),
173 LLD_KIND_STRING_ENTRY(modeData),
Nick Kledzik2458bec2014-07-16 19:49:02 +0000174 LLD_KIND_STRING_ENTRY(branch32),
175 LLD_KIND_STRING_ENTRY(branch16),
176 LLD_KIND_STRING_ENTRY(abs32),
177 LLD_KIND_STRING_ENTRY(funcRel32),
178 LLD_KIND_STRING_ENTRY(pointer32),
179 LLD_KIND_STRING_ENTRY(delta32),
Nick Kledzik03e16f22014-07-21 22:06:57 +0000180 LLD_KIND_STRING_ENTRY(negDelta32),
Nick Kledzik2458bec2014-07-16 19:49:02 +0000181 LLD_KIND_STRING_ENTRY(lazyPointer),
182 LLD_KIND_STRING_ENTRY(lazyImmediateLocation),
183 LLD_KIND_STRING_END
184};
185
186const ArchHandler::StubInfo ArchHandler_x86::_sStubInfo = {
187 "dyld_stub_binder",
188
189 // Lazy pointer references
190 { Reference::KindArch::x86, pointer32, 0, 0 },
191 { Reference::KindArch::x86, lazyPointer, 0, 0 },
192
193 // GOT pointer to dyld_stub_binder
194 { Reference::KindArch::x86, pointer32, 0, 0 },
195
196 // x86 code alignment
197 1,
198
199 // Stub size and code
200 6,
201 { 0xff, 0x25, 0x00, 0x00, 0x00, 0x00 }, // jmp *lazyPointer
202 { Reference::KindArch::x86, abs32, 2, 0 },
Nick Kledzik1bebb282014-09-09 23:52:59 +0000203 { false, 0, 0, 0 },
Nick Kledzik2458bec2014-07-16 19:49:02 +0000204
205 // Stub Helper size and code
206 10,
207 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $lazy-info-offset
208 0xE9, 0x00, 0x00, 0x00, 0x00 }, // jmp helperhelper
209 { Reference::KindArch::x86, lazyImmediateLocation, 1, 0 },
210 { Reference::KindArch::x86, branch32, 6, 0 },
211
212 // Stub Helper-Common size and code
213 12,
214 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $dyld_ImageLoaderCache
215 0xFF, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp *_fast_lazy_bind
216 0x90 }, // nop
217 { Reference::KindArch::x86, abs32, 1, 0 },
Nick Kledzik1bebb282014-09-09 23:52:59 +0000218 { false, 0, 0, 0 },
219 { Reference::KindArch::x86, abs32, 7, 0 },
220 { false, 0, 0, 0 }
Nick Kledzik2458bec2014-07-16 19:49:02 +0000221};
222
223bool ArchHandler_x86::isCallSite(const Reference &ref) {
224 return (ref.kindValue() == branch32);
225}
226
227bool ArchHandler_x86::isPointer(const Reference &ref) {
228 return (ref.kindValue() == pointer32);
229}
230
231bool ArchHandler_x86::isPairedReloc(const Relocation &reloc) {
232 if (!reloc.scattered)
233 return false;
234 return (reloc.type == GENERIC_RELOC_LOCAL_SECTDIFF) ||
235 (reloc.type == GENERIC_RELOC_SECTDIFF);
236}
237
238std::error_code
239ArchHandler_x86::getReferenceInfo(const Relocation &reloc,
240 const DefinedAtom *inAtom,
241 uint32_t offsetInAtom,
242 uint64_t fixupAddress, bool swap,
243 FindAtomBySectionAndAddress atomFromAddress,
244 FindAtomBySymbolIndex atomFromSymbolIndex,
245 Reference::KindValue *kind,
246 const lld::Atom **target,
247 Reference::Addend *addend) {
248 typedef std::error_code E;
249 DefinedAtom::ContentPermissions perms;
250 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
251 uint64_t targetAddress;
252 switch (relocPattern(reloc)) {
253 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength4:
254 // ex: call _foo (and _foo undefined)
255 *kind = branch32;
256 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
257 return ec;
258 *addend = fixupAddress + 4 + readS32(swap, fixupContent);
259 break;
260 case GENERIC_RELOC_VANILLA | rPcRel | rLength4:
261 // ex: call _foo (and _foo defined)
262 *kind = branch32;
263 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
264 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
265 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000266 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4:
267 // ex: call _foo+n (and _foo defined)
268 *kind = branch32;
269 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
270 if (E ec = atomFromAddress(0, reloc.value, target, addend))
271 return ec;
272 *addend = targetAddress - reloc.value;
273 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000274 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength2:
275 // ex: callw _foo (and _foo undefined)
276 *kind = branch16;
277 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
278 return ec;
279 *addend = fixupAddress + 2 + readS16(swap, fixupContent);
280 break;
281 case GENERIC_RELOC_VANILLA | rPcRel | rLength2:
282 // ex: callw _foo (and _foo defined)
283 *kind = branch16;
284 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
285 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
286 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000287 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2:
288 // ex: callw _foo+n (and _foo defined)
289 *kind = branch16;
290 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
291 if (E ec = atomFromAddress(0, reloc.value, target, addend))
292 return ec;
293 *addend = targetAddress - reloc.value;
294 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000295 case GENERIC_RELOC_VANILLA | rExtern | rLength4:
296 // ex: movl _foo, %eax (and _foo undefined)
297 // ex: .long _foo (and _foo undefined)
298 perms = inAtom->permissions();
299 *kind =
300 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
301 : pointer32;
302 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
303 return ec;
304 *addend = readU32(swap, fixupContent);
305 break;
306 case GENERIC_RELOC_VANILLA | rLength4:
307 // ex: movl _foo, %eax (and _foo defined)
308 // ex: .long _foo (and _foo defined)
309 perms = inAtom->permissions();
310 *kind =
311 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
312 : pointer32;
313 targetAddress = readU32(swap, fixupContent);
314 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
315 break;
Nick Kledzik7e246a42014-07-18 01:05:35 +0000316 case GENERIC_RELOC_VANILLA | rScattered | rLength4:
317 // ex: .long _foo+n (and _foo defined)
318 perms = inAtom->permissions();
319 *kind =
320 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
321 : pointer32;
322 if (E ec = atomFromAddress(0, reloc.value, target, addend))
323 return ec;
324 *addend = readU32(swap, fixupContent) - reloc.value;
325 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000326 default:
327 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
328 }
329 return std::error_code();
330}
331
332std::error_code
333ArchHandler_x86::getPairReferenceInfo(const normalized::Relocation &reloc1,
334 const normalized::Relocation &reloc2,
335 const DefinedAtom *inAtom,
336 uint32_t offsetInAtom,
337 uint64_t fixupAddress, bool swap,
338 FindAtomBySectionAndAddress atomFromAddr,
339 FindAtomBySymbolIndex atomFromSymbolIndex,
340 Reference::KindValue *kind,
341 const lld::Atom **target,
342 Reference::Addend *addend) {
343 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
344 std::error_code ec;
345 DefinedAtom::ContentPermissions perms = inAtom->permissions();
346 uint32_t fromAddress;
347 uint32_t toAddress;
348 uint32_t value;
349 const lld::Atom *fromTarget;
350 Reference::Addend offsetInTo;
351 Reference::Addend offsetInFrom;
352 switch (relocPattern(reloc1) << 16 | relocPattern(reloc2)) {
Nick Kledzik2d432352014-07-17 23:16:21 +0000353 case ((GENERIC_RELOC_SECTDIFF | rScattered | rLength4) << 16 |
354 GENERIC_RELOC_PAIR | rScattered | rLength4):
355 case ((GENERIC_RELOC_LOCAL_SECTDIFF | rScattered | rLength4) << 16 |
356 GENERIC_RELOC_PAIR | rScattered | rLength4):
Nick Kledzik2458bec2014-07-16 19:49:02 +0000357 toAddress = reloc1.value;
358 fromAddress = reloc2.value;
359 value = readS32(swap, fixupContent);
360 ec = atomFromAddr(0, toAddress, target, &offsetInTo);
361 if (ec)
362 return ec;
363 ec = atomFromAddr(0, fromAddress, &fromTarget, &offsetInFrom);
364 if (ec)
365 return ec;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000366 if (fromTarget != inAtom) {
367 if (*target != inAtom)
368 return make_dynamic_error_code(Twine("SECTDIFF relocation where "
369 "neither target is in atom"));
370 *kind = negDelta32;
371 *addend = toAddress - value - fromAddress;
372 *target = fromTarget;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000373 } else {
Nick Kledzik03e16f22014-07-21 22:06:57 +0000374 if ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) {
375 // SECTDIFF relocations are used in i386 codegen where the function
376 // prolog does a CALL to the next instruction which POPs the return
377 // address into EBX which becomes the pic-base register. The POP
378 // instruction is label the used for the subtrahend in expressions.
379 // The funcRel32 kind represents the 32-bit delta to some symbol from
380 // the start of the function (atom) containing the funcRel32.
381 *kind = funcRel32;
382 uint32_t ta = fromAddress + value - toAddress;
383 *addend = ta - offsetInFrom;
384 } else {
385 *kind = delta32;
386 *addend = fromAddress + value - toAddress;
387 }
Nick Kledzik2458bec2014-07-16 19:49:02 +0000388 }
389 return std::error_code();
390 break;
391 default:
392 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
393 }
394}
395
Nick Kledzik2d432352014-07-17 23:16:21 +0000396void ArchHandler_x86::generateAtomContent(const DefinedAtom &atom,
Tim Northovercf78d372014-09-30 21:29:54 +0000397 bool relocatable,
398 FindAddressForAtom findAddress,
Tim Northover1cc4fb72014-10-15 19:32:21 +0000399 FindAddressForAtom findSectionAddress,
Tim Northovercf78d372014-09-30 21:29:54 +0000400 uint64_t imageBaseAddress,
401 uint8_t *atomContentBuffer) {
Nick Kledzik2d432352014-07-17 23:16:21 +0000402 // Copy raw bytes.
403 memcpy(atomContentBuffer, atom.rawContent().data(), atom.size());
404 // Apply fix-ups.
405 for (const Reference *ref : atom) {
406 uint32_t offset = ref->offsetInAtom();
407 const Atom *target = ref->target();
408 uint64_t targetAddress = 0;
409 if (isa<DefinedAtom>(target))
410 targetAddress = findAddress(*target);
411 uint64_t atomAddress = findAddress(atom);
412 uint64_t fixupAddress = atomAddress + offset;
413 if (relocatable) {
414 applyFixupRelocatable(*ref, &atomContentBuffer[offset],
415 fixupAddress, targetAddress,
416 atomAddress);
417 } else {
418 applyFixupFinal(*ref, &atomContentBuffer[offset],
419 fixupAddress, targetAddress,
420 atomAddress);
421 }
422 }
423}
424
425void ArchHandler_x86::applyFixupFinal(const Reference &ref, uint8_t *location,
426 uint64_t fixupAddress,
427 uint64_t targetAddress,
428 uint64_t inAtomAddress) {
429 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
Nick Kledzik2458bec2014-07-16 19:49:02 +0000430 return;
Nick Kledzik2d432352014-07-17 23:16:21 +0000431 assert(ref.kindArch() == Reference::KindArch::x86);
Nick Kledzik2458bec2014-07-16 19:49:02 +0000432 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
433 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik2d432352014-07-17 23:16:21 +0000434 switch (ref.kindValue()) {
Nick Kledzik2458bec2014-07-16 19:49:02 +0000435 case branch32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000436 write32(*loc32, _swap, (targetAddress - (fixupAddress + 4)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000437 break;
438 case branch16:
Nick Kledzik2d432352014-07-17 23:16:21 +0000439 write16(*loc16, _swap, (targetAddress - (fixupAddress + 2)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000440 break;
441 case pointer32:
442 case abs32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000443 write32(*loc32, _swap, targetAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000444 break;
445 case funcRel32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000446 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000447 break;
448 case delta32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000449 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000450 break;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000451 case negDelta32:
452 write32(*loc32, _swap, fixupAddress - targetAddress + ref.addend());
453 break;
Nick Kledzik21921372014-07-24 23:06:56 +0000454 case modeCode:
455 case modeData:
Nick Kledzik2458bec2014-07-16 19:49:02 +0000456 case lazyPointer:
457 case lazyImmediateLocation:
458 // do nothing
459 break;
460 default:
461 llvm_unreachable("invalid x86 Reference Kind");
462 break;
463 }
464}
465
Nick Kledzik2d432352014-07-17 23:16:21 +0000466void ArchHandler_x86::applyFixupRelocatable(const Reference &ref,
467 uint8_t *location,
468 uint64_t fixupAddress,
469 uint64_t targetAddress,
470 uint64_t inAtomAddress) {
471 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
472 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000473 bool useExternalReloc = useExternalRelocationTo(*ref.target());
Nick Kledzik2d432352014-07-17 23:16:21 +0000474 switch (ref.kindValue()) {
475 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000476 if (useExternalReloc)
477 write32(*loc32, _swap, ref.addend() - (fixupAddress + 4));
478 else
479 write32(*loc32, _swap, (targetAddress - (fixupAddress+4)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000480 break;
481 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000482 if (useExternalReloc)
483 write16(*loc16, _swap, ref.addend() - (fixupAddress + 2));
484 else
485 write16(*loc16, _swap, (targetAddress - (fixupAddress+2)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000486 break;
487 case pointer32:
488 case abs32:
489 write32(*loc32, _swap, targetAddress + ref.addend());
490 break;
491 case funcRel32:
492 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend()); // FIXME
493 break;
494 case delta32:
495 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
496 break;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000497 case negDelta32:
498 write32(*loc32, _swap, fixupAddress - targetAddress + ref.addend());
499 break;
Nick Kledzik21921372014-07-24 23:06:56 +0000500 case modeCode:
501 case modeData:
Nick Kledzik2d432352014-07-17 23:16:21 +0000502 case lazyPointer:
503 case lazyImmediateLocation:
504 // do nothing
505 break;
506 default:
507 llvm_unreachable("invalid x86 Reference Kind");
508 break;
509 }
510}
511
512bool ArchHandler_x86::useExternalRelocationTo(const Atom &target) {
513 // Undefined symbols are referenced via external relocations.
514 if (isa<UndefinedAtom>(&target))
515 return true;
516 if (const DefinedAtom *defAtom = dyn_cast<DefinedAtom>(&target)) {
517 switch (defAtom->merge()) {
518 case DefinedAtom::mergeAsTentative:
519 // Tentative definitions are referenced via external relocations.
520 return true;
521 case DefinedAtom::mergeAsWeak:
522 case DefinedAtom::mergeAsWeakAndAddressUsed:
523 // Global weak-defs are referenced via external relocations.
524 return (defAtom->scope() == DefinedAtom::scopeGlobal);
525 default:
526 break;
527 }
528 }
529 // Everything else is reference via an internal relocation.
530 return false;
531}
532
533
534void ArchHandler_x86::appendSectionRelocations(
535 const DefinedAtom &atom,
536 uint64_t atomSectionOffset,
537 const Reference &ref,
538 FindSymbolIndexForAtom symbolIndexForAtom,
539 FindSectionIndexForAtom sectionIndexForAtom,
540 FindAddressForAtom addressForAtom,
541 normalized::Relocations &relocs) {
542 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
543 return;
544 assert(ref.kindArch() == Reference::KindArch::x86);
545 uint32_t sectionOffset = atomSectionOffset + ref.offsetInAtom();
546 bool useExternalReloc = useExternalRelocationTo(*ref.target());
547 switch (ref.kindValue()) {
Nick Kledzik21921372014-07-24 23:06:56 +0000548 case modeCode:
549 case modeData:
550 break;
Nick Kledzik2d432352014-07-17 23:16:21 +0000551 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000552 if (useExternalReloc) {
553 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
554 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength4);
555 } else {
556 if (ref.addend() != 0)
557 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
558 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4);
559 else
560 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
561 GENERIC_RELOC_VANILLA | rPcRel | rLength4);
562 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000563 break;
564 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000565 if (useExternalReloc) {
566 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
567 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength2);
568 } else {
569 if (ref.addend() != 0)
570 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
571 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2);
572 else
573 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
574 GENERIC_RELOC_VANILLA | rPcRel | rLength2);
575 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000576 break;
577 case pointer32:
578 case abs32:
579 if (useExternalReloc)
580 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
Nick Kledzik7e246a42014-07-18 01:05:35 +0000581 GENERIC_RELOC_VANILLA | rExtern | rLength4);
582 else {
583 if (ref.addend() != 0)
584 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
585 GENERIC_RELOC_VANILLA | rScattered | rLength4);
586 else
587 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
Nick Kledzik2d432352014-07-17 23:16:21 +0000588 GENERIC_RELOC_VANILLA | rLength4);
Nick Kledzik7e246a42014-07-18 01:05:35 +0000589 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000590 break;
591 case funcRel32:
Nick Kledzik03e16f22014-07-21 22:06:57 +0000592 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
593 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
594 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) - ref.addend(),
595 GENERIC_RELOC_PAIR | rScattered | rLength4);
Nick Kledzik2d432352014-07-17 23:16:21 +0000596 break;
597 case delta32:
Nick Kledzik03e16f22014-07-21 22:06:57 +0000598 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
599 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
600 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
601 ref.offsetInAtom(),
602 GENERIC_RELOC_PAIR | rScattered | rLength4);
603 break;
604 case negDelta32:
605 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
606 ref.offsetInAtom(),
607 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
608 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
609 GENERIC_RELOC_PAIR | rScattered | rLength4);
Nick Kledzik2d432352014-07-17 23:16:21 +0000610 break;
611 case lazyPointer:
612 case lazyImmediateLocation:
613 llvm_unreachable("lazy reference kind implies Stubs pass was run");
614 break;
615 default:
616 llvm_unreachable("unknown x86 Reference Kind");
617 break;
618
619 }
620}
621
622
Nick Kledzik2458bec2014-07-16 19:49:02 +0000623std::unique_ptr<mach_o::ArchHandler> ArchHandler::create_x86() {
624 return std::unique_ptr<mach_o::ArchHandler>(new ArchHandler_x86());
625}
626
627} // namespace mach_o
628} // namespace lld