blob: b54c00eb409ad6c6d8bd9d8f3a4935e11e3cac5a [file] [log] [blame]
Nick Kledzik2458bec2014-07-16 19:49:02 +00001//===- lib/FileFormat/MachO/ArchHandler_x86.cpp ---------------------------===//
2//
3// The LLVM Linker
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "ArchHandler.h"
11#include "Atoms.h"
12#include "MachONormalizedFileBinaryUtils.h"
13
14#include "llvm/ADT/StringRef.h"
15#include "llvm/ADT/StringSwitch.h"
16#include "llvm/ADT/Triple.h"
17
18#include "llvm/Support/ErrorHandling.h"
19
20using namespace llvm::MachO;
21using namespace lld::mach_o::normalized;
22
23namespace lld {
24namespace mach_o {
25
26class ArchHandler_x86 : public ArchHandler {
27public:
28 ArchHandler_x86();
29 virtual ~ArchHandler_x86();
30
31 const Registry::KindStrings *kindStrings() override { return _sKindStrings; }
32
33 Reference::KindArch kindArch() override { return Reference::KindArch::x86; }
34
35 const StubInfo &stubInfo() override { return _sStubInfo; }
36 bool isCallSite(const Reference &) override;
37 bool isPointer(const Reference &) override;
38 bool isPairedReloc(const normalized::Relocation &) override;
39 std::error_code getReferenceInfo(const normalized::Relocation &reloc,
40 const DefinedAtom *inAtom,
41 uint32_t offsetInAtom,
42 uint64_t fixupAddress, bool swap,
43 FindAtomBySectionAndAddress atomFromAddress,
44 FindAtomBySymbolIndex atomFromSymbolIndex,
45 Reference::KindValue *kind,
46 const lld::Atom **target,
47 Reference::Addend *addend) override;
48 std::error_code
49 getPairReferenceInfo(const normalized::Relocation &reloc1,
50 const normalized::Relocation &reloc2,
51 const DefinedAtom *inAtom,
52 uint32_t offsetInAtom,
53 uint64_t fixupAddress, bool swap,
54 FindAtomBySectionAndAddress atomFromAddress,
55 FindAtomBySymbolIndex atomFromSymbolIndex,
56 Reference::KindValue *kind,
57 const lld::Atom **target,
58 Reference::Addend *addend) override;
59
Nick Kledzik2d432352014-07-17 23:16:21 +000060 void generateAtomContent(const DefinedAtom &atom, bool relocatable,
61 FindAddressForAtom findAddress,
62 uint8_t *atomContentBuffer) override;
63
64 void appendSectionRelocations(const DefinedAtom &atom,
65 uint64_t atomSectionOffset,
66 const Reference &ref,
67 FindSymbolIndexForAtom symbolIndexForAtom,
68 FindSectionIndexForAtom sectionIndexForAtom,
69 FindAddressForAtom addressForAtom,
70 normalized::Relocations &relocs) override;
Nick Kledzik2458bec2014-07-16 19:49:02 +000071
72private:
73 static const Registry::KindStrings _sKindStrings[];
74 static const StubInfo _sStubInfo;
75
76 enum : Reference::KindValue {
77 invalid, /// for error condition
78
79 // Kinds found in mach-o .o files:
80 branch32, /// ex: call _foo
81 branch16, /// ex: callw _foo
82 abs32, /// ex: movl _foo, %eax
83 funcRel32, /// ex: movl _foo-L1(%eax), %eax
84 pointer32, /// ex: .long _foo
85 delta32, /// ex: .long _foo - .
86
87 // Kinds introduced by Passes:
88 lazyPointer, /// Location contains a lazy pointer.
89 lazyImmediateLocation, /// Location contains immediate value used in stub.
90 };
91
Nick Kledzik2d432352014-07-17 23:16:21 +000092 static bool useExternalRelocationTo(const Atom &target);
93
94 void applyFixupFinal(const Reference &ref, uint8_t *location,
95 uint64_t fixupAddress, uint64_t targetAddress,
96 uint64_t inAtomAddress);
97
98 void applyFixupRelocatable(const Reference &ref, uint8_t *location,
99 uint64_t fixupAddress,
100 uint64_t targetAddress,
101 uint64_t inAtomAddress);
102
Nick Kledzik2458bec2014-07-16 19:49:02 +0000103 const bool _swap;
104};
105
106//===----------------------------------------------------------------------===//
107// ArchHandler_x86
108//===----------------------------------------------------------------------===//
109
110ArchHandler_x86::ArchHandler_x86() :
111 _swap(!MachOLinkingContext::isHostEndian(MachOLinkingContext::arch_x86)) {}
112
113ArchHandler_x86::~ArchHandler_x86() { }
114
115const Registry::KindStrings ArchHandler_x86::_sKindStrings[] = {
116 LLD_KIND_STRING_ENTRY(invalid),
117 LLD_KIND_STRING_ENTRY(branch32),
118 LLD_KIND_STRING_ENTRY(branch16),
119 LLD_KIND_STRING_ENTRY(abs32),
120 LLD_KIND_STRING_ENTRY(funcRel32),
121 LLD_KIND_STRING_ENTRY(pointer32),
122 LLD_KIND_STRING_ENTRY(delta32),
123 LLD_KIND_STRING_ENTRY(lazyPointer),
124 LLD_KIND_STRING_ENTRY(lazyImmediateLocation),
125 LLD_KIND_STRING_END
126};
127
128const ArchHandler::StubInfo ArchHandler_x86::_sStubInfo = {
129 "dyld_stub_binder",
130
131 // Lazy pointer references
132 { Reference::KindArch::x86, pointer32, 0, 0 },
133 { Reference::KindArch::x86, lazyPointer, 0, 0 },
134
135 // GOT pointer to dyld_stub_binder
136 { Reference::KindArch::x86, pointer32, 0, 0 },
137
138 // x86 code alignment
139 1,
140
141 // Stub size and code
142 6,
143 { 0xff, 0x25, 0x00, 0x00, 0x00, 0x00 }, // jmp *lazyPointer
144 { Reference::KindArch::x86, abs32, 2, 0 },
145
146 // Stub Helper size and code
147 10,
148 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $lazy-info-offset
149 0xE9, 0x00, 0x00, 0x00, 0x00 }, // jmp helperhelper
150 { Reference::KindArch::x86, lazyImmediateLocation, 1, 0 },
151 { Reference::KindArch::x86, branch32, 6, 0 },
152
153 // Stub Helper-Common size and code
154 12,
155 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $dyld_ImageLoaderCache
156 0xFF, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp *_fast_lazy_bind
157 0x90 }, // nop
158 { Reference::KindArch::x86, abs32, 1, 0 },
159 { Reference::KindArch::x86, abs32, 7, 0 }
160};
161
162bool ArchHandler_x86::isCallSite(const Reference &ref) {
163 return (ref.kindValue() == branch32);
164}
165
166bool ArchHandler_x86::isPointer(const Reference &ref) {
167 return (ref.kindValue() == pointer32);
168}
169
170bool ArchHandler_x86::isPairedReloc(const Relocation &reloc) {
171 if (!reloc.scattered)
172 return false;
173 return (reloc.type == GENERIC_RELOC_LOCAL_SECTDIFF) ||
174 (reloc.type == GENERIC_RELOC_SECTDIFF);
175}
176
177std::error_code
178ArchHandler_x86::getReferenceInfo(const Relocation &reloc,
179 const DefinedAtom *inAtom,
180 uint32_t offsetInAtom,
181 uint64_t fixupAddress, bool swap,
182 FindAtomBySectionAndAddress atomFromAddress,
183 FindAtomBySymbolIndex atomFromSymbolIndex,
184 Reference::KindValue *kind,
185 const lld::Atom **target,
186 Reference::Addend *addend) {
187 typedef std::error_code E;
188 DefinedAtom::ContentPermissions perms;
189 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
190 uint64_t targetAddress;
191 switch (relocPattern(reloc)) {
192 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength4:
193 // ex: call _foo (and _foo undefined)
194 *kind = branch32;
195 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
196 return ec;
197 *addend = fixupAddress + 4 + readS32(swap, fixupContent);
198 break;
199 case GENERIC_RELOC_VANILLA | rPcRel | rLength4:
200 // ex: call _foo (and _foo defined)
201 *kind = branch32;
202 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
203 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
204 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000205 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4:
206 // ex: call _foo+n (and _foo defined)
207 *kind = branch32;
208 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
209 if (E ec = atomFromAddress(0, reloc.value, target, addend))
210 return ec;
211 *addend = targetAddress - reloc.value;
212 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000213 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength2:
214 // ex: callw _foo (and _foo undefined)
215 *kind = branch16;
216 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
217 return ec;
218 *addend = fixupAddress + 2 + readS16(swap, fixupContent);
219 break;
220 case GENERIC_RELOC_VANILLA | rPcRel | rLength2:
221 // ex: callw _foo (and _foo defined)
222 *kind = branch16;
223 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
224 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
225 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000226 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2:
227 // ex: callw _foo+n (and _foo defined)
228 *kind = branch16;
229 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
230 if (E ec = atomFromAddress(0, reloc.value, target, addend))
231 return ec;
232 *addend = targetAddress - reloc.value;
233 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000234 case GENERIC_RELOC_VANILLA | rExtern | rLength4:
235 // ex: movl _foo, %eax (and _foo undefined)
236 // ex: .long _foo (and _foo undefined)
237 perms = inAtom->permissions();
238 *kind =
239 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
240 : pointer32;
241 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
242 return ec;
243 *addend = readU32(swap, fixupContent);
244 break;
245 case GENERIC_RELOC_VANILLA | rLength4:
246 // ex: movl _foo, %eax (and _foo defined)
247 // ex: .long _foo (and _foo defined)
248 perms = inAtom->permissions();
249 *kind =
250 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
251 : pointer32;
252 targetAddress = readU32(swap, fixupContent);
253 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
254 break;
Nick Kledzik7e246a42014-07-18 01:05:35 +0000255 case GENERIC_RELOC_VANILLA | rScattered | rLength4:
256 // ex: .long _foo+n (and _foo defined)
257 perms = inAtom->permissions();
258 *kind =
259 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
260 : pointer32;
261 if (E ec = atomFromAddress(0, reloc.value, target, addend))
262 return ec;
263 *addend = readU32(swap, fixupContent) - reloc.value;
264 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000265 default:
266 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
267 }
268 return std::error_code();
269}
270
271std::error_code
272ArchHandler_x86::getPairReferenceInfo(const normalized::Relocation &reloc1,
273 const normalized::Relocation &reloc2,
274 const DefinedAtom *inAtom,
275 uint32_t offsetInAtom,
276 uint64_t fixupAddress, bool swap,
277 FindAtomBySectionAndAddress atomFromAddr,
278 FindAtomBySymbolIndex atomFromSymbolIndex,
279 Reference::KindValue *kind,
280 const lld::Atom **target,
281 Reference::Addend *addend) {
282 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
283 std::error_code ec;
284 DefinedAtom::ContentPermissions perms = inAtom->permissions();
285 uint32_t fromAddress;
286 uint32_t toAddress;
287 uint32_t value;
288 const lld::Atom *fromTarget;
289 Reference::Addend offsetInTo;
290 Reference::Addend offsetInFrom;
291 switch (relocPattern(reloc1) << 16 | relocPattern(reloc2)) {
Nick Kledzik2d432352014-07-17 23:16:21 +0000292 case ((GENERIC_RELOC_SECTDIFF | rScattered | rLength4) << 16 |
293 GENERIC_RELOC_PAIR | rScattered | rLength4):
294 case ((GENERIC_RELOC_LOCAL_SECTDIFF | rScattered | rLength4) << 16 |
295 GENERIC_RELOC_PAIR | rScattered | rLength4):
Nick Kledzik2458bec2014-07-16 19:49:02 +0000296 toAddress = reloc1.value;
297 fromAddress = reloc2.value;
298 value = readS32(swap, fixupContent);
299 ec = atomFromAddr(0, toAddress, target, &offsetInTo);
300 if (ec)
301 return ec;
302 ec = atomFromAddr(0, fromAddress, &fromTarget, &offsetInFrom);
303 if (ec)
304 return ec;
305 if (fromTarget != inAtom)
306 return make_dynamic_error_code(Twine("SECTDIFF relocation where "
307 "subtrahend label is not in atom"));
308 *kind = ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? funcRel32
309 : delta32;
310 if (*kind == funcRel32) {
311 // SECTDIFF relocations are used in i386 codegen where the function
312 // prolog does a CALL to the next instruction which POPs the return
313 // address into EBX which becomes the pic-base register. The POP
314 // instruction is label the used for the subtrahend in expressions.
315 // The funcRel32 kind represents the 32-bit delta to some symbol from
316 // the start of the function (atom) containing the funcRel32.
317 uint32_t ta = fromAddress + value - toAddress;
318 *addend = ta - offsetInFrom;
319 } else {
320 *addend = fromAddress + value - toAddress;
321 }
322 return std::error_code();
323 break;
324 default:
325 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
326 }
327}
328
Nick Kledzik2d432352014-07-17 23:16:21 +0000329void ArchHandler_x86::generateAtomContent(const DefinedAtom &atom,
330 bool relocatable,
331 FindAddressForAtom findAddress,
332 uint8_t *atomContentBuffer) {
333 // Copy raw bytes.
334 memcpy(atomContentBuffer, atom.rawContent().data(), atom.size());
335 // Apply fix-ups.
336 for (const Reference *ref : atom) {
337 uint32_t offset = ref->offsetInAtom();
338 const Atom *target = ref->target();
339 uint64_t targetAddress = 0;
340 if (isa<DefinedAtom>(target))
341 targetAddress = findAddress(*target);
342 uint64_t atomAddress = findAddress(atom);
343 uint64_t fixupAddress = atomAddress + offset;
344 if (relocatable) {
345 applyFixupRelocatable(*ref, &atomContentBuffer[offset],
346 fixupAddress, targetAddress,
347 atomAddress);
348 } else {
349 applyFixupFinal(*ref, &atomContentBuffer[offset],
350 fixupAddress, targetAddress,
351 atomAddress);
352 }
353 }
354}
355
356void ArchHandler_x86::applyFixupFinal(const Reference &ref, uint8_t *location,
357 uint64_t fixupAddress,
358 uint64_t targetAddress,
359 uint64_t inAtomAddress) {
360 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
Nick Kledzik2458bec2014-07-16 19:49:02 +0000361 return;
Nick Kledzik2d432352014-07-17 23:16:21 +0000362 assert(ref.kindArch() == Reference::KindArch::x86);
Nick Kledzik2458bec2014-07-16 19:49:02 +0000363 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
364 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik2d432352014-07-17 23:16:21 +0000365 switch (ref.kindValue()) {
Nick Kledzik2458bec2014-07-16 19:49:02 +0000366 case branch32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000367 write32(*loc32, _swap, (targetAddress - (fixupAddress + 4)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000368 break;
369 case branch16:
Nick Kledzik2d432352014-07-17 23:16:21 +0000370 write16(*loc16, _swap, (targetAddress - (fixupAddress + 2)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000371 break;
372 case pointer32:
373 case abs32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000374 write32(*loc32, _swap, targetAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000375 break;
376 case funcRel32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000377 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000378 break;
379 case delta32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000380 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000381 break;
382 case lazyPointer:
383 case lazyImmediateLocation:
384 // do nothing
385 break;
386 default:
387 llvm_unreachable("invalid x86 Reference Kind");
388 break;
389 }
390}
391
Nick Kledzik2d432352014-07-17 23:16:21 +0000392void ArchHandler_x86::applyFixupRelocatable(const Reference &ref,
393 uint8_t *location,
394 uint64_t fixupAddress,
395 uint64_t targetAddress,
396 uint64_t inAtomAddress) {
397 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
398 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000399 bool useExternalReloc = useExternalRelocationTo(*ref.target());
Nick Kledzik2d432352014-07-17 23:16:21 +0000400 switch (ref.kindValue()) {
401 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000402 if (useExternalReloc)
403 write32(*loc32, _swap, ref.addend() - (fixupAddress + 4));
404 else
405 write32(*loc32, _swap, (targetAddress - (fixupAddress+4)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000406 break;
407 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000408 if (useExternalReloc)
409 write16(*loc16, _swap, ref.addend() - (fixupAddress + 2));
410 else
411 write16(*loc16, _swap, (targetAddress - (fixupAddress+2)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000412 break;
413 case pointer32:
414 case abs32:
415 write32(*loc32, _swap, targetAddress + ref.addend());
416 break;
417 case funcRel32:
418 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend()); // FIXME
419 break;
420 case delta32:
421 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
422 break;
423 case lazyPointer:
424 case lazyImmediateLocation:
425 // do nothing
426 break;
427 default:
428 llvm_unreachable("invalid x86 Reference Kind");
429 break;
430 }
431}
432
433bool ArchHandler_x86::useExternalRelocationTo(const Atom &target) {
434 // Undefined symbols are referenced via external relocations.
435 if (isa<UndefinedAtom>(&target))
436 return true;
437 if (const DefinedAtom *defAtom = dyn_cast<DefinedAtom>(&target)) {
438 switch (defAtom->merge()) {
439 case DefinedAtom::mergeAsTentative:
440 // Tentative definitions are referenced via external relocations.
441 return true;
442 case DefinedAtom::mergeAsWeak:
443 case DefinedAtom::mergeAsWeakAndAddressUsed:
444 // Global weak-defs are referenced via external relocations.
445 return (defAtom->scope() == DefinedAtom::scopeGlobal);
446 default:
447 break;
448 }
449 }
450 // Everything else is reference via an internal relocation.
451 return false;
452}
453
454
455void ArchHandler_x86::appendSectionRelocations(
456 const DefinedAtom &atom,
457 uint64_t atomSectionOffset,
458 const Reference &ref,
459 FindSymbolIndexForAtom symbolIndexForAtom,
460 FindSectionIndexForAtom sectionIndexForAtom,
461 FindAddressForAtom addressForAtom,
462 normalized::Relocations &relocs) {
463 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
464 return;
465 assert(ref.kindArch() == Reference::KindArch::x86);
466 uint32_t sectionOffset = atomSectionOffset + ref.offsetInAtom();
467 bool useExternalReloc = useExternalRelocationTo(*ref.target());
468 switch (ref.kindValue()) {
469 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000470 if (useExternalReloc) {
471 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
472 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength4);
473 } else {
474 if (ref.addend() != 0)
475 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
476 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4);
477 else
478 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
479 GENERIC_RELOC_VANILLA | rPcRel | rLength4);
480 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000481 break;
482 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000483 if (useExternalReloc) {
484 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
485 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength2);
486 } else {
487 if (ref.addend() != 0)
488 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
489 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2);
490 else
491 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
492 GENERIC_RELOC_VANILLA | rPcRel | rLength2);
493 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000494 break;
495 case pointer32:
496 case abs32:
497 if (useExternalReloc)
498 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
Nick Kledzik7e246a42014-07-18 01:05:35 +0000499 GENERIC_RELOC_VANILLA | rExtern | rLength4);
500 else {
501 if (ref.addend() != 0)
502 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
503 GENERIC_RELOC_VANILLA | rScattered | rLength4);
504 else
505 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
Nick Kledzik2d432352014-07-17 23:16:21 +0000506 GENERIC_RELOC_VANILLA | rLength4);
Nick Kledzik7e246a42014-07-18 01:05:35 +0000507 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000508 break;
509 case funcRel32:
510 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
511 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
512 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) - ref.addend(),
513 GENERIC_RELOC_PAIR | rScattered | rLength4);
514 break;
515 case delta32:
516 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
517 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
Nick Kledzik7e246a42014-07-18 01:05:35 +0000518 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
519 ref.offsetInAtom(),
Nick Kledzik2d432352014-07-17 23:16:21 +0000520 GENERIC_RELOC_PAIR | rScattered | rLength4);
521 break;
522 case lazyPointer:
523 case lazyImmediateLocation:
524 llvm_unreachable("lazy reference kind implies Stubs pass was run");
525 break;
526 default:
527 llvm_unreachable("unknown x86 Reference Kind");
528 break;
529
530 }
531}
532
533
Nick Kledzik2458bec2014-07-16 19:49:02 +0000534std::unique_ptr<mach_o::ArchHandler> ArchHandler::create_x86() {
535 return std::unique_ptr<mach_o::ArchHandler>(new ArchHandler_x86());
536}
537
538} // namespace mach_o
539} // namespace lld