blob: cfd30d98de4cecc961b41b4b14875b883ff63ff7 [file] [log] [blame]
Nick Kledzik2458bec2014-07-16 19:49:02 +00001//===- lib/FileFormat/MachO/ArchHandler_x86.cpp ---------------------------===//
2//
3// The LLVM Linker
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "ArchHandler.h"
11#include "Atoms.h"
12#include "MachONormalizedFileBinaryUtils.h"
13
14#include "llvm/ADT/StringRef.h"
15#include "llvm/ADT/StringSwitch.h"
16#include "llvm/ADT/Triple.h"
17
18#include "llvm/Support/ErrorHandling.h"
19
20using namespace llvm::MachO;
21using namespace lld::mach_o::normalized;
22
23namespace lld {
24namespace mach_o {
25
26class ArchHandler_x86 : public ArchHandler {
27public:
28 ArchHandler_x86();
29 virtual ~ArchHandler_x86();
30
31 const Registry::KindStrings *kindStrings() override { return _sKindStrings; }
32
33 Reference::KindArch kindArch() override { return Reference::KindArch::x86; }
34
35 const StubInfo &stubInfo() override { return _sStubInfo; }
36 bool isCallSite(const Reference &) override;
37 bool isPointer(const Reference &) override;
38 bool isPairedReloc(const normalized::Relocation &) override;
39 std::error_code getReferenceInfo(const normalized::Relocation &reloc,
40 const DefinedAtom *inAtom,
41 uint32_t offsetInAtom,
42 uint64_t fixupAddress, bool swap,
43 FindAtomBySectionAndAddress atomFromAddress,
44 FindAtomBySymbolIndex atomFromSymbolIndex,
45 Reference::KindValue *kind,
46 const lld::Atom **target,
47 Reference::Addend *addend) override;
48 std::error_code
49 getPairReferenceInfo(const normalized::Relocation &reloc1,
50 const normalized::Relocation &reloc2,
51 const DefinedAtom *inAtom,
52 uint32_t offsetInAtom,
53 uint64_t fixupAddress, bool swap,
54 FindAtomBySectionAndAddress atomFromAddress,
55 FindAtomBySymbolIndex atomFromSymbolIndex,
56 Reference::KindValue *kind,
57 const lld::Atom **target,
58 Reference::Addend *addend) override;
59
Nick Kledzik2d432352014-07-17 23:16:21 +000060 void generateAtomContent(const DefinedAtom &atom, bool relocatable,
61 FindAddressForAtom findAddress,
62 uint8_t *atomContentBuffer) override;
63
64 void appendSectionRelocations(const DefinedAtom &atom,
65 uint64_t atomSectionOffset,
66 const Reference &ref,
67 FindSymbolIndexForAtom symbolIndexForAtom,
68 FindSectionIndexForAtom sectionIndexForAtom,
69 FindAddressForAtom addressForAtom,
70 normalized::Relocations &relocs) override;
Nick Kledzik2458bec2014-07-16 19:49:02 +000071
72private:
73 static const Registry::KindStrings _sKindStrings[];
74 static const StubInfo _sStubInfo;
75
76 enum : Reference::KindValue {
77 invalid, /// for error condition
78
79 // Kinds found in mach-o .o files:
80 branch32, /// ex: call _foo
81 branch16, /// ex: callw _foo
82 abs32, /// ex: movl _foo, %eax
83 funcRel32, /// ex: movl _foo-L1(%eax), %eax
84 pointer32, /// ex: .long _foo
85 delta32, /// ex: .long _foo - .
Nick Kledzik03e16f22014-07-21 22:06:57 +000086 negDelta32, /// ex: .long . - _foo
Nick Kledzik2458bec2014-07-16 19:49:02 +000087
88 // Kinds introduced by Passes:
89 lazyPointer, /// Location contains a lazy pointer.
90 lazyImmediateLocation, /// Location contains immediate value used in stub.
91 };
92
Nick Kledzik2d432352014-07-17 23:16:21 +000093 static bool useExternalRelocationTo(const Atom &target);
94
95 void applyFixupFinal(const Reference &ref, uint8_t *location,
96 uint64_t fixupAddress, uint64_t targetAddress,
97 uint64_t inAtomAddress);
98
99 void applyFixupRelocatable(const Reference &ref, uint8_t *location,
100 uint64_t fixupAddress,
101 uint64_t targetAddress,
102 uint64_t inAtomAddress);
103
Nick Kledzik2458bec2014-07-16 19:49:02 +0000104 const bool _swap;
105};
106
107//===----------------------------------------------------------------------===//
108// ArchHandler_x86
109//===----------------------------------------------------------------------===//
110
111ArchHandler_x86::ArchHandler_x86() :
112 _swap(!MachOLinkingContext::isHostEndian(MachOLinkingContext::arch_x86)) {}
113
114ArchHandler_x86::~ArchHandler_x86() { }
115
116const Registry::KindStrings ArchHandler_x86::_sKindStrings[] = {
117 LLD_KIND_STRING_ENTRY(invalid),
118 LLD_KIND_STRING_ENTRY(branch32),
119 LLD_KIND_STRING_ENTRY(branch16),
120 LLD_KIND_STRING_ENTRY(abs32),
121 LLD_KIND_STRING_ENTRY(funcRel32),
122 LLD_KIND_STRING_ENTRY(pointer32),
123 LLD_KIND_STRING_ENTRY(delta32),
Nick Kledzik03e16f22014-07-21 22:06:57 +0000124 LLD_KIND_STRING_ENTRY(negDelta32),
Nick Kledzik2458bec2014-07-16 19:49:02 +0000125 LLD_KIND_STRING_ENTRY(lazyPointer),
126 LLD_KIND_STRING_ENTRY(lazyImmediateLocation),
127 LLD_KIND_STRING_END
128};
129
130const ArchHandler::StubInfo ArchHandler_x86::_sStubInfo = {
131 "dyld_stub_binder",
132
133 // Lazy pointer references
134 { Reference::KindArch::x86, pointer32, 0, 0 },
135 { Reference::KindArch::x86, lazyPointer, 0, 0 },
136
137 // GOT pointer to dyld_stub_binder
138 { Reference::KindArch::x86, pointer32, 0, 0 },
139
140 // x86 code alignment
141 1,
142
143 // Stub size and code
144 6,
145 { 0xff, 0x25, 0x00, 0x00, 0x00, 0x00 }, // jmp *lazyPointer
146 { Reference::KindArch::x86, abs32, 2, 0 },
147
148 // Stub Helper size and code
149 10,
150 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $lazy-info-offset
151 0xE9, 0x00, 0x00, 0x00, 0x00 }, // jmp helperhelper
152 { Reference::KindArch::x86, lazyImmediateLocation, 1, 0 },
153 { Reference::KindArch::x86, branch32, 6, 0 },
154
155 // Stub Helper-Common size and code
156 12,
157 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $dyld_ImageLoaderCache
158 0xFF, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp *_fast_lazy_bind
159 0x90 }, // nop
160 { Reference::KindArch::x86, abs32, 1, 0 },
161 { Reference::KindArch::x86, abs32, 7, 0 }
162};
163
164bool ArchHandler_x86::isCallSite(const Reference &ref) {
165 return (ref.kindValue() == branch32);
166}
167
168bool ArchHandler_x86::isPointer(const Reference &ref) {
169 return (ref.kindValue() == pointer32);
170}
171
172bool ArchHandler_x86::isPairedReloc(const Relocation &reloc) {
173 if (!reloc.scattered)
174 return false;
175 return (reloc.type == GENERIC_RELOC_LOCAL_SECTDIFF) ||
176 (reloc.type == GENERIC_RELOC_SECTDIFF);
177}
178
179std::error_code
180ArchHandler_x86::getReferenceInfo(const Relocation &reloc,
181 const DefinedAtom *inAtom,
182 uint32_t offsetInAtom,
183 uint64_t fixupAddress, bool swap,
184 FindAtomBySectionAndAddress atomFromAddress,
185 FindAtomBySymbolIndex atomFromSymbolIndex,
186 Reference::KindValue *kind,
187 const lld::Atom **target,
188 Reference::Addend *addend) {
189 typedef std::error_code E;
190 DefinedAtom::ContentPermissions perms;
191 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
192 uint64_t targetAddress;
193 switch (relocPattern(reloc)) {
194 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength4:
195 // ex: call _foo (and _foo undefined)
196 *kind = branch32;
197 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
198 return ec;
199 *addend = fixupAddress + 4 + readS32(swap, fixupContent);
200 break;
201 case GENERIC_RELOC_VANILLA | rPcRel | rLength4:
202 // ex: call _foo (and _foo defined)
203 *kind = branch32;
204 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
205 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
206 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000207 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4:
208 // ex: call _foo+n (and _foo defined)
209 *kind = branch32;
210 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
211 if (E ec = atomFromAddress(0, reloc.value, target, addend))
212 return ec;
213 *addend = targetAddress - reloc.value;
214 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000215 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength2:
216 // ex: callw _foo (and _foo undefined)
217 *kind = branch16;
218 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
219 return ec;
220 *addend = fixupAddress + 2 + readS16(swap, fixupContent);
221 break;
222 case GENERIC_RELOC_VANILLA | rPcRel | rLength2:
223 // ex: callw _foo (and _foo defined)
224 *kind = branch16;
225 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
226 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
227 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000228 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2:
229 // ex: callw _foo+n (and _foo defined)
230 *kind = branch16;
231 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
232 if (E ec = atomFromAddress(0, reloc.value, target, addend))
233 return ec;
234 *addend = targetAddress - reloc.value;
235 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000236 case GENERIC_RELOC_VANILLA | rExtern | rLength4:
237 // ex: movl _foo, %eax (and _foo undefined)
238 // ex: .long _foo (and _foo undefined)
239 perms = inAtom->permissions();
240 *kind =
241 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
242 : pointer32;
243 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
244 return ec;
245 *addend = readU32(swap, fixupContent);
246 break;
247 case GENERIC_RELOC_VANILLA | rLength4:
248 // ex: movl _foo, %eax (and _foo defined)
249 // ex: .long _foo (and _foo defined)
250 perms = inAtom->permissions();
251 *kind =
252 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
253 : pointer32;
254 targetAddress = readU32(swap, fixupContent);
255 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
256 break;
Nick Kledzik7e246a42014-07-18 01:05:35 +0000257 case GENERIC_RELOC_VANILLA | rScattered | rLength4:
258 // ex: .long _foo+n (and _foo defined)
259 perms = inAtom->permissions();
260 *kind =
261 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
262 : pointer32;
263 if (E ec = atomFromAddress(0, reloc.value, target, addend))
264 return ec;
265 *addend = readU32(swap, fixupContent) - reloc.value;
266 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000267 default:
268 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
269 }
270 return std::error_code();
271}
272
273std::error_code
274ArchHandler_x86::getPairReferenceInfo(const normalized::Relocation &reloc1,
275 const normalized::Relocation &reloc2,
276 const DefinedAtom *inAtom,
277 uint32_t offsetInAtom,
278 uint64_t fixupAddress, bool swap,
279 FindAtomBySectionAndAddress atomFromAddr,
280 FindAtomBySymbolIndex atomFromSymbolIndex,
281 Reference::KindValue *kind,
282 const lld::Atom **target,
283 Reference::Addend *addend) {
284 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
285 std::error_code ec;
286 DefinedAtom::ContentPermissions perms = inAtom->permissions();
287 uint32_t fromAddress;
288 uint32_t toAddress;
289 uint32_t value;
290 const lld::Atom *fromTarget;
291 Reference::Addend offsetInTo;
292 Reference::Addend offsetInFrom;
293 switch (relocPattern(reloc1) << 16 | relocPattern(reloc2)) {
Nick Kledzik2d432352014-07-17 23:16:21 +0000294 case ((GENERIC_RELOC_SECTDIFF | rScattered | rLength4) << 16 |
295 GENERIC_RELOC_PAIR | rScattered | rLength4):
296 case ((GENERIC_RELOC_LOCAL_SECTDIFF | rScattered | rLength4) << 16 |
297 GENERIC_RELOC_PAIR | rScattered | rLength4):
Nick Kledzik2458bec2014-07-16 19:49:02 +0000298 toAddress = reloc1.value;
299 fromAddress = reloc2.value;
300 value = readS32(swap, fixupContent);
301 ec = atomFromAddr(0, toAddress, target, &offsetInTo);
302 if (ec)
303 return ec;
304 ec = atomFromAddr(0, fromAddress, &fromTarget, &offsetInFrom);
305 if (ec)
306 return ec;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000307 if (fromTarget != inAtom) {
308 if (*target != inAtom)
309 return make_dynamic_error_code(Twine("SECTDIFF relocation where "
310 "neither target is in atom"));
311 *kind = negDelta32;
312 *addend = toAddress - value - fromAddress;
313 *target = fromTarget;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000314 } else {
Nick Kledzik03e16f22014-07-21 22:06:57 +0000315 if ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) {
316 // SECTDIFF relocations are used in i386 codegen where the function
317 // prolog does a CALL to the next instruction which POPs the return
318 // address into EBX which becomes the pic-base register. The POP
319 // instruction is label the used for the subtrahend in expressions.
320 // The funcRel32 kind represents the 32-bit delta to some symbol from
321 // the start of the function (atom) containing the funcRel32.
322 *kind = funcRel32;
323 uint32_t ta = fromAddress + value - toAddress;
324 *addend = ta - offsetInFrom;
325 } else {
326 *kind = delta32;
327 *addend = fromAddress + value - toAddress;
328 }
Nick Kledzik2458bec2014-07-16 19:49:02 +0000329 }
330 return std::error_code();
331 break;
332 default:
333 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
334 }
335}
336
Nick Kledzik2d432352014-07-17 23:16:21 +0000337void ArchHandler_x86::generateAtomContent(const DefinedAtom &atom,
338 bool relocatable,
339 FindAddressForAtom findAddress,
340 uint8_t *atomContentBuffer) {
341 // Copy raw bytes.
342 memcpy(atomContentBuffer, atom.rawContent().data(), atom.size());
343 // Apply fix-ups.
344 for (const Reference *ref : atom) {
345 uint32_t offset = ref->offsetInAtom();
346 const Atom *target = ref->target();
347 uint64_t targetAddress = 0;
348 if (isa<DefinedAtom>(target))
349 targetAddress = findAddress(*target);
350 uint64_t atomAddress = findAddress(atom);
351 uint64_t fixupAddress = atomAddress + offset;
352 if (relocatable) {
353 applyFixupRelocatable(*ref, &atomContentBuffer[offset],
354 fixupAddress, targetAddress,
355 atomAddress);
356 } else {
357 applyFixupFinal(*ref, &atomContentBuffer[offset],
358 fixupAddress, targetAddress,
359 atomAddress);
360 }
361 }
362}
363
364void ArchHandler_x86::applyFixupFinal(const Reference &ref, uint8_t *location,
365 uint64_t fixupAddress,
366 uint64_t targetAddress,
367 uint64_t inAtomAddress) {
368 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
Nick Kledzik2458bec2014-07-16 19:49:02 +0000369 return;
Nick Kledzik2d432352014-07-17 23:16:21 +0000370 assert(ref.kindArch() == Reference::KindArch::x86);
Nick Kledzik2458bec2014-07-16 19:49:02 +0000371 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
372 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik2d432352014-07-17 23:16:21 +0000373 switch (ref.kindValue()) {
Nick Kledzik2458bec2014-07-16 19:49:02 +0000374 case branch32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000375 write32(*loc32, _swap, (targetAddress - (fixupAddress + 4)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000376 break;
377 case branch16:
Nick Kledzik2d432352014-07-17 23:16:21 +0000378 write16(*loc16, _swap, (targetAddress - (fixupAddress + 2)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000379 break;
380 case pointer32:
381 case abs32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000382 write32(*loc32, _swap, targetAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000383 break;
384 case funcRel32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000385 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000386 break;
387 case delta32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000388 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000389 break;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000390 case negDelta32:
391 write32(*loc32, _swap, fixupAddress - targetAddress + ref.addend());
392 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000393 case lazyPointer:
394 case lazyImmediateLocation:
395 // do nothing
396 break;
397 default:
398 llvm_unreachable("invalid x86 Reference Kind");
399 break;
400 }
401}
402
Nick Kledzik2d432352014-07-17 23:16:21 +0000403void ArchHandler_x86::applyFixupRelocatable(const Reference &ref,
404 uint8_t *location,
405 uint64_t fixupAddress,
406 uint64_t targetAddress,
407 uint64_t inAtomAddress) {
408 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
409 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000410 bool useExternalReloc = useExternalRelocationTo(*ref.target());
Nick Kledzik2d432352014-07-17 23:16:21 +0000411 switch (ref.kindValue()) {
412 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000413 if (useExternalReloc)
414 write32(*loc32, _swap, ref.addend() - (fixupAddress + 4));
415 else
416 write32(*loc32, _swap, (targetAddress - (fixupAddress+4)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000417 break;
418 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000419 if (useExternalReloc)
420 write16(*loc16, _swap, ref.addend() - (fixupAddress + 2));
421 else
422 write16(*loc16, _swap, (targetAddress - (fixupAddress+2)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000423 break;
424 case pointer32:
425 case abs32:
426 write32(*loc32, _swap, targetAddress + ref.addend());
427 break;
428 case funcRel32:
429 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend()); // FIXME
430 break;
431 case delta32:
432 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
433 break;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000434 case negDelta32:
435 write32(*loc32, _swap, fixupAddress - targetAddress + ref.addend());
436 break;
Nick Kledzik2d432352014-07-17 23:16:21 +0000437 case lazyPointer:
438 case lazyImmediateLocation:
439 // do nothing
440 break;
441 default:
442 llvm_unreachable("invalid x86 Reference Kind");
443 break;
444 }
445}
446
447bool ArchHandler_x86::useExternalRelocationTo(const Atom &target) {
448 // Undefined symbols are referenced via external relocations.
449 if (isa<UndefinedAtom>(&target))
450 return true;
451 if (const DefinedAtom *defAtom = dyn_cast<DefinedAtom>(&target)) {
452 switch (defAtom->merge()) {
453 case DefinedAtom::mergeAsTentative:
454 // Tentative definitions are referenced via external relocations.
455 return true;
456 case DefinedAtom::mergeAsWeak:
457 case DefinedAtom::mergeAsWeakAndAddressUsed:
458 // Global weak-defs are referenced via external relocations.
459 return (defAtom->scope() == DefinedAtom::scopeGlobal);
460 default:
461 break;
462 }
463 }
464 // Everything else is reference via an internal relocation.
465 return false;
466}
467
468
469void ArchHandler_x86::appendSectionRelocations(
470 const DefinedAtom &atom,
471 uint64_t atomSectionOffset,
472 const Reference &ref,
473 FindSymbolIndexForAtom symbolIndexForAtom,
474 FindSectionIndexForAtom sectionIndexForAtom,
475 FindAddressForAtom addressForAtom,
476 normalized::Relocations &relocs) {
477 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
478 return;
479 assert(ref.kindArch() == Reference::KindArch::x86);
480 uint32_t sectionOffset = atomSectionOffset + ref.offsetInAtom();
481 bool useExternalReloc = useExternalRelocationTo(*ref.target());
482 switch (ref.kindValue()) {
483 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000484 if (useExternalReloc) {
485 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
486 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength4);
487 } else {
488 if (ref.addend() != 0)
489 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
490 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4);
491 else
492 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
493 GENERIC_RELOC_VANILLA | rPcRel | rLength4);
494 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000495 break;
496 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000497 if (useExternalReloc) {
498 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
499 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength2);
500 } else {
501 if (ref.addend() != 0)
502 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
503 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2);
504 else
505 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
506 GENERIC_RELOC_VANILLA | rPcRel | rLength2);
507 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000508 break;
509 case pointer32:
510 case abs32:
511 if (useExternalReloc)
512 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
Nick Kledzik7e246a42014-07-18 01:05:35 +0000513 GENERIC_RELOC_VANILLA | rExtern | rLength4);
514 else {
515 if (ref.addend() != 0)
516 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
517 GENERIC_RELOC_VANILLA | rScattered | rLength4);
518 else
519 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
Nick Kledzik2d432352014-07-17 23:16:21 +0000520 GENERIC_RELOC_VANILLA | rLength4);
Nick Kledzik7e246a42014-07-18 01:05:35 +0000521 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000522 break;
523 case funcRel32:
Nick Kledzik03e16f22014-07-21 22:06:57 +0000524 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
525 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
526 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) - ref.addend(),
527 GENERIC_RELOC_PAIR | rScattered | rLength4);
Nick Kledzik2d432352014-07-17 23:16:21 +0000528 break;
529 case delta32:
Nick Kledzik03e16f22014-07-21 22:06:57 +0000530 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
531 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
532 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
533 ref.offsetInAtom(),
534 GENERIC_RELOC_PAIR | rScattered | rLength4);
535 break;
536 case negDelta32:
537 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
538 ref.offsetInAtom(),
539 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
540 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
541 GENERIC_RELOC_PAIR | rScattered | rLength4);
Nick Kledzik2d432352014-07-17 23:16:21 +0000542 break;
543 case lazyPointer:
544 case lazyImmediateLocation:
545 llvm_unreachable("lazy reference kind implies Stubs pass was run");
546 break;
547 default:
548 llvm_unreachable("unknown x86 Reference Kind");
549 break;
550
551 }
552}
553
554
Nick Kledzik2458bec2014-07-16 19:49:02 +0000555std::unique_ptr<mach_o::ArchHandler> ArchHandler::create_x86() {
556 return std::unique_ptr<mach_o::ArchHandler>(new ArchHandler_x86());
557}
558
559} // namespace mach_o
560} // namespace lld