blob: d1f73b6dabb03b8a98d6ef7c00cff722d71a85be [file] [log] [blame]
Nick Kledzik2458bec2014-07-16 19:49:02 +00001//===- lib/FileFormat/MachO/ArchHandler_x86.cpp ---------------------------===//
2//
3// The LLVM Linker
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "ArchHandler.h"
11#include "Atoms.h"
12#include "MachONormalizedFileBinaryUtils.h"
13
14#include "llvm/ADT/StringRef.h"
15#include "llvm/ADT/StringSwitch.h"
16#include "llvm/ADT/Triple.h"
17
18#include "llvm/Support/ErrorHandling.h"
19
20using namespace llvm::MachO;
21using namespace lld::mach_o::normalized;
22
23namespace lld {
24namespace mach_o {
25
26class ArchHandler_x86 : public ArchHandler {
27public:
28 ArchHandler_x86();
29 virtual ~ArchHandler_x86();
30
31 const Registry::KindStrings *kindStrings() override { return _sKindStrings; }
32
33 Reference::KindArch kindArch() override { return Reference::KindArch::x86; }
34
35 const StubInfo &stubInfo() override { return _sStubInfo; }
36 bool isCallSite(const Reference &) override;
Nick Kledzik4121bce2014-10-14 01:51:42 +000037 bool isNonCallBranch(const Reference &) override {
38 return false;
39 }
40
Nick Kledzik2458bec2014-07-16 19:49:02 +000041 bool isPointer(const Reference &) override;
42 bool isPairedReloc(const normalized::Relocation &) override;
Tim Northovercf78d372014-09-30 21:29:54 +000043
44 bool needsCompactUnwind() override {
45 return false;
46 }
47 Reference::KindValue imageOffsetKind() override {
48 return invalid;
49 }
50 Reference::KindValue imageOffsetKindIndirect() override {
51 return invalid;
52 }
53
Nick Kledzik2458bec2014-07-16 19:49:02 +000054 std::error_code getReferenceInfo(const normalized::Relocation &reloc,
55 const DefinedAtom *inAtom,
56 uint32_t offsetInAtom,
57 uint64_t fixupAddress, bool swap,
58 FindAtomBySectionAndAddress atomFromAddress,
59 FindAtomBySymbolIndex atomFromSymbolIndex,
60 Reference::KindValue *kind,
61 const lld::Atom **target,
62 Reference::Addend *addend) override;
63 std::error_code
64 getPairReferenceInfo(const normalized::Relocation &reloc1,
65 const normalized::Relocation &reloc2,
66 const DefinedAtom *inAtom,
67 uint32_t offsetInAtom,
68 uint64_t fixupAddress, bool swap,
69 FindAtomBySectionAndAddress atomFromAddress,
70 FindAtomBySymbolIndex atomFromSymbolIndex,
71 Reference::KindValue *kind,
72 const lld::Atom **target,
73 Reference::Addend *addend) override;
74
Nick Kledzik2d432352014-07-17 23:16:21 +000075 void generateAtomContent(const DefinedAtom &atom, bool relocatable,
76 FindAddressForAtom findAddress,
Tim Northovercf78d372014-09-30 21:29:54 +000077 uint64_t imageBaseAddress,
Nick Kledzik2d432352014-07-17 23:16:21 +000078 uint8_t *atomContentBuffer) override;
79
80 void appendSectionRelocations(const DefinedAtom &atom,
81 uint64_t atomSectionOffset,
82 const Reference &ref,
83 FindSymbolIndexForAtom symbolIndexForAtom,
84 FindSectionIndexForAtom sectionIndexForAtom,
85 FindAddressForAtom addressForAtom,
86 normalized::Relocations &relocs) override;
Nick Kledzik2458bec2014-07-16 19:49:02 +000087
Nick Kledzik21921372014-07-24 23:06:56 +000088 bool isDataInCodeTransition(Reference::KindValue refKind) override {
89 switch (refKind) {
90 case modeCode:
91 case modeData:
92 return true;
93 default:
94 return false;
95 break;
96 }
97 }
98
99 Reference::KindValue dataInCodeTransitionStart(
100 const MachODefinedAtom &atom) override {
101 return modeData;
102 }
103
104 Reference::KindValue dataInCodeTransitionEnd(
105 const MachODefinedAtom &atom) override {
106 return modeCode;
107 }
108
Nick Kledzik2458bec2014-07-16 19:49:02 +0000109private:
110 static const Registry::KindStrings _sKindStrings[];
111 static const StubInfo _sStubInfo;
112
113 enum : Reference::KindValue {
114 invalid, /// for error condition
115
Nick Kledzik21921372014-07-24 23:06:56 +0000116 modeCode, /// Content starting at this offset is code.
117 modeData, /// Content starting at this offset is data.
118
Nick Kledzik2458bec2014-07-16 19:49:02 +0000119 // Kinds found in mach-o .o files:
120 branch32, /// ex: call _foo
121 branch16, /// ex: callw _foo
122 abs32, /// ex: movl _foo, %eax
123 funcRel32, /// ex: movl _foo-L1(%eax), %eax
124 pointer32, /// ex: .long _foo
125 delta32, /// ex: .long _foo - .
Nick Kledzik03e16f22014-07-21 22:06:57 +0000126 negDelta32, /// ex: .long . - _foo
Nick Kledzik2458bec2014-07-16 19:49:02 +0000127
128 // Kinds introduced by Passes:
129 lazyPointer, /// Location contains a lazy pointer.
130 lazyImmediateLocation, /// Location contains immediate value used in stub.
131 };
132
Nick Kledzik2d432352014-07-17 23:16:21 +0000133 static bool useExternalRelocationTo(const Atom &target);
134
135 void applyFixupFinal(const Reference &ref, uint8_t *location,
136 uint64_t fixupAddress, uint64_t targetAddress,
137 uint64_t inAtomAddress);
138
139 void applyFixupRelocatable(const Reference &ref, uint8_t *location,
140 uint64_t fixupAddress,
141 uint64_t targetAddress,
142 uint64_t inAtomAddress);
143
Nick Kledzik2458bec2014-07-16 19:49:02 +0000144 const bool _swap;
145};
146
147//===----------------------------------------------------------------------===//
148// ArchHandler_x86
149//===----------------------------------------------------------------------===//
150
151ArchHandler_x86::ArchHandler_x86() :
152 _swap(!MachOLinkingContext::isHostEndian(MachOLinkingContext::arch_x86)) {}
153
154ArchHandler_x86::~ArchHandler_x86() { }
155
156const Registry::KindStrings ArchHandler_x86::_sKindStrings[] = {
157 LLD_KIND_STRING_ENTRY(invalid),
Nick Kledzik21921372014-07-24 23:06:56 +0000158 LLD_KIND_STRING_ENTRY(modeCode),
159 LLD_KIND_STRING_ENTRY(modeData),
Nick Kledzik2458bec2014-07-16 19:49:02 +0000160 LLD_KIND_STRING_ENTRY(branch32),
161 LLD_KIND_STRING_ENTRY(branch16),
162 LLD_KIND_STRING_ENTRY(abs32),
163 LLD_KIND_STRING_ENTRY(funcRel32),
164 LLD_KIND_STRING_ENTRY(pointer32),
165 LLD_KIND_STRING_ENTRY(delta32),
Nick Kledzik03e16f22014-07-21 22:06:57 +0000166 LLD_KIND_STRING_ENTRY(negDelta32),
Nick Kledzik2458bec2014-07-16 19:49:02 +0000167 LLD_KIND_STRING_ENTRY(lazyPointer),
168 LLD_KIND_STRING_ENTRY(lazyImmediateLocation),
169 LLD_KIND_STRING_END
170};
171
172const ArchHandler::StubInfo ArchHandler_x86::_sStubInfo = {
173 "dyld_stub_binder",
174
175 // Lazy pointer references
176 { Reference::KindArch::x86, pointer32, 0, 0 },
177 { Reference::KindArch::x86, lazyPointer, 0, 0 },
178
179 // GOT pointer to dyld_stub_binder
180 { Reference::KindArch::x86, pointer32, 0, 0 },
181
182 // x86 code alignment
183 1,
184
185 // Stub size and code
186 6,
187 { 0xff, 0x25, 0x00, 0x00, 0x00, 0x00 }, // jmp *lazyPointer
188 { Reference::KindArch::x86, abs32, 2, 0 },
Nick Kledzik1bebb282014-09-09 23:52:59 +0000189 { false, 0, 0, 0 },
Nick Kledzik2458bec2014-07-16 19:49:02 +0000190
191 // Stub Helper size and code
192 10,
193 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $lazy-info-offset
194 0xE9, 0x00, 0x00, 0x00, 0x00 }, // jmp helperhelper
195 { Reference::KindArch::x86, lazyImmediateLocation, 1, 0 },
196 { Reference::KindArch::x86, branch32, 6, 0 },
197
198 // Stub Helper-Common size and code
199 12,
200 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $dyld_ImageLoaderCache
201 0xFF, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp *_fast_lazy_bind
202 0x90 }, // nop
203 { Reference::KindArch::x86, abs32, 1, 0 },
Nick Kledzik1bebb282014-09-09 23:52:59 +0000204 { false, 0, 0, 0 },
205 { Reference::KindArch::x86, abs32, 7, 0 },
206 { false, 0, 0, 0 }
Nick Kledzik2458bec2014-07-16 19:49:02 +0000207};
208
209bool ArchHandler_x86::isCallSite(const Reference &ref) {
210 return (ref.kindValue() == branch32);
211}
212
213bool ArchHandler_x86::isPointer(const Reference &ref) {
214 return (ref.kindValue() == pointer32);
215}
216
217bool ArchHandler_x86::isPairedReloc(const Relocation &reloc) {
218 if (!reloc.scattered)
219 return false;
220 return (reloc.type == GENERIC_RELOC_LOCAL_SECTDIFF) ||
221 (reloc.type == GENERIC_RELOC_SECTDIFF);
222}
223
224std::error_code
225ArchHandler_x86::getReferenceInfo(const Relocation &reloc,
226 const DefinedAtom *inAtom,
227 uint32_t offsetInAtom,
228 uint64_t fixupAddress, bool swap,
229 FindAtomBySectionAndAddress atomFromAddress,
230 FindAtomBySymbolIndex atomFromSymbolIndex,
231 Reference::KindValue *kind,
232 const lld::Atom **target,
233 Reference::Addend *addend) {
234 typedef std::error_code E;
235 DefinedAtom::ContentPermissions perms;
236 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
237 uint64_t targetAddress;
238 switch (relocPattern(reloc)) {
239 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength4:
240 // ex: call _foo (and _foo undefined)
241 *kind = branch32;
242 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
243 return ec;
244 *addend = fixupAddress + 4 + readS32(swap, fixupContent);
245 break;
246 case GENERIC_RELOC_VANILLA | rPcRel | rLength4:
247 // ex: call _foo (and _foo defined)
248 *kind = branch32;
249 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
250 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
251 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000252 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4:
253 // ex: call _foo+n (and _foo defined)
254 *kind = branch32;
255 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
256 if (E ec = atomFromAddress(0, reloc.value, target, addend))
257 return ec;
258 *addend = targetAddress - reloc.value;
259 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000260 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength2:
261 // ex: callw _foo (and _foo undefined)
262 *kind = branch16;
263 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
264 return ec;
265 *addend = fixupAddress + 2 + readS16(swap, fixupContent);
266 break;
267 case GENERIC_RELOC_VANILLA | rPcRel | rLength2:
268 // ex: callw _foo (and _foo defined)
269 *kind = branch16;
270 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
271 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
272 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000273 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2:
274 // ex: callw _foo+n (and _foo defined)
275 *kind = branch16;
276 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
277 if (E ec = atomFromAddress(0, reloc.value, target, addend))
278 return ec;
279 *addend = targetAddress - reloc.value;
280 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000281 case GENERIC_RELOC_VANILLA | rExtern | rLength4:
282 // ex: movl _foo, %eax (and _foo undefined)
283 // ex: .long _foo (and _foo undefined)
284 perms = inAtom->permissions();
285 *kind =
286 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
287 : pointer32;
288 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
289 return ec;
290 *addend = readU32(swap, fixupContent);
291 break;
292 case GENERIC_RELOC_VANILLA | rLength4:
293 // ex: movl _foo, %eax (and _foo defined)
294 // ex: .long _foo (and _foo defined)
295 perms = inAtom->permissions();
296 *kind =
297 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
298 : pointer32;
299 targetAddress = readU32(swap, fixupContent);
300 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
301 break;
Nick Kledzik7e246a42014-07-18 01:05:35 +0000302 case GENERIC_RELOC_VANILLA | rScattered | rLength4:
303 // ex: .long _foo+n (and _foo defined)
304 perms = inAtom->permissions();
305 *kind =
306 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
307 : pointer32;
308 if (E ec = atomFromAddress(0, reloc.value, target, addend))
309 return ec;
310 *addend = readU32(swap, fixupContent) - reloc.value;
311 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000312 default:
313 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
314 }
315 return std::error_code();
316}
317
318std::error_code
319ArchHandler_x86::getPairReferenceInfo(const normalized::Relocation &reloc1,
320 const normalized::Relocation &reloc2,
321 const DefinedAtom *inAtom,
322 uint32_t offsetInAtom,
323 uint64_t fixupAddress, bool swap,
324 FindAtomBySectionAndAddress atomFromAddr,
325 FindAtomBySymbolIndex atomFromSymbolIndex,
326 Reference::KindValue *kind,
327 const lld::Atom **target,
328 Reference::Addend *addend) {
329 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
330 std::error_code ec;
331 DefinedAtom::ContentPermissions perms = inAtom->permissions();
332 uint32_t fromAddress;
333 uint32_t toAddress;
334 uint32_t value;
335 const lld::Atom *fromTarget;
336 Reference::Addend offsetInTo;
337 Reference::Addend offsetInFrom;
338 switch (relocPattern(reloc1) << 16 | relocPattern(reloc2)) {
Nick Kledzik2d432352014-07-17 23:16:21 +0000339 case ((GENERIC_RELOC_SECTDIFF | rScattered | rLength4) << 16 |
340 GENERIC_RELOC_PAIR | rScattered | rLength4):
341 case ((GENERIC_RELOC_LOCAL_SECTDIFF | rScattered | rLength4) << 16 |
342 GENERIC_RELOC_PAIR | rScattered | rLength4):
Nick Kledzik2458bec2014-07-16 19:49:02 +0000343 toAddress = reloc1.value;
344 fromAddress = reloc2.value;
345 value = readS32(swap, fixupContent);
346 ec = atomFromAddr(0, toAddress, target, &offsetInTo);
347 if (ec)
348 return ec;
349 ec = atomFromAddr(0, fromAddress, &fromTarget, &offsetInFrom);
350 if (ec)
351 return ec;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000352 if (fromTarget != inAtom) {
353 if (*target != inAtom)
354 return make_dynamic_error_code(Twine("SECTDIFF relocation where "
355 "neither target is in atom"));
356 *kind = negDelta32;
357 *addend = toAddress - value - fromAddress;
358 *target = fromTarget;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000359 } else {
Nick Kledzik03e16f22014-07-21 22:06:57 +0000360 if ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) {
361 // SECTDIFF relocations are used in i386 codegen where the function
362 // prolog does a CALL to the next instruction which POPs the return
363 // address into EBX which becomes the pic-base register. The POP
364 // instruction is label the used for the subtrahend in expressions.
365 // The funcRel32 kind represents the 32-bit delta to some symbol from
366 // the start of the function (atom) containing the funcRel32.
367 *kind = funcRel32;
368 uint32_t ta = fromAddress + value - toAddress;
369 *addend = ta - offsetInFrom;
370 } else {
371 *kind = delta32;
372 *addend = fromAddress + value - toAddress;
373 }
Nick Kledzik2458bec2014-07-16 19:49:02 +0000374 }
375 return std::error_code();
376 break;
377 default:
378 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
379 }
380}
381
Nick Kledzik2d432352014-07-17 23:16:21 +0000382void ArchHandler_x86::generateAtomContent(const DefinedAtom &atom,
Tim Northovercf78d372014-09-30 21:29:54 +0000383 bool relocatable,
384 FindAddressForAtom findAddress,
385 uint64_t imageBaseAddress,
386 uint8_t *atomContentBuffer) {
Nick Kledzik2d432352014-07-17 23:16:21 +0000387 // Copy raw bytes.
388 memcpy(atomContentBuffer, atom.rawContent().data(), atom.size());
389 // Apply fix-ups.
390 for (const Reference *ref : atom) {
391 uint32_t offset = ref->offsetInAtom();
392 const Atom *target = ref->target();
393 uint64_t targetAddress = 0;
394 if (isa<DefinedAtom>(target))
395 targetAddress = findAddress(*target);
396 uint64_t atomAddress = findAddress(atom);
397 uint64_t fixupAddress = atomAddress + offset;
398 if (relocatable) {
399 applyFixupRelocatable(*ref, &atomContentBuffer[offset],
400 fixupAddress, targetAddress,
401 atomAddress);
402 } else {
403 applyFixupFinal(*ref, &atomContentBuffer[offset],
404 fixupAddress, targetAddress,
405 atomAddress);
406 }
407 }
408}
409
410void ArchHandler_x86::applyFixupFinal(const Reference &ref, uint8_t *location,
411 uint64_t fixupAddress,
412 uint64_t targetAddress,
413 uint64_t inAtomAddress) {
414 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
Nick Kledzik2458bec2014-07-16 19:49:02 +0000415 return;
Nick Kledzik2d432352014-07-17 23:16:21 +0000416 assert(ref.kindArch() == Reference::KindArch::x86);
Nick Kledzik2458bec2014-07-16 19:49:02 +0000417 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
418 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik2d432352014-07-17 23:16:21 +0000419 switch (ref.kindValue()) {
Nick Kledzik2458bec2014-07-16 19:49:02 +0000420 case branch32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000421 write32(*loc32, _swap, (targetAddress - (fixupAddress + 4)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000422 break;
423 case branch16:
Nick Kledzik2d432352014-07-17 23:16:21 +0000424 write16(*loc16, _swap, (targetAddress - (fixupAddress + 2)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000425 break;
426 case pointer32:
427 case abs32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000428 write32(*loc32, _swap, targetAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000429 break;
430 case funcRel32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000431 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000432 break;
433 case delta32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000434 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000435 break;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000436 case negDelta32:
437 write32(*loc32, _swap, fixupAddress - targetAddress + ref.addend());
438 break;
Nick Kledzik21921372014-07-24 23:06:56 +0000439 case modeCode:
440 case modeData:
Nick Kledzik2458bec2014-07-16 19:49:02 +0000441 case lazyPointer:
442 case lazyImmediateLocation:
443 // do nothing
444 break;
445 default:
446 llvm_unreachable("invalid x86 Reference Kind");
447 break;
448 }
449}
450
Nick Kledzik2d432352014-07-17 23:16:21 +0000451void ArchHandler_x86::applyFixupRelocatable(const Reference &ref,
452 uint8_t *location,
453 uint64_t fixupAddress,
454 uint64_t targetAddress,
455 uint64_t inAtomAddress) {
456 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
457 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000458 bool useExternalReloc = useExternalRelocationTo(*ref.target());
Nick Kledzik2d432352014-07-17 23:16:21 +0000459 switch (ref.kindValue()) {
460 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000461 if (useExternalReloc)
462 write32(*loc32, _swap, ref.addend() - (fixupAddress + 4));
463 else
464 write32(*loc32, _swap, (targetAddress - (fixupAddress+4)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000465 break;
466 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000467 if (useExternalReloc)
468 write16(*loc16, _swap, ref.addend() - (fixupAddress + 2));
469 else
470 write16(*loc16, _swap, (targetAddress - (fixupAddress+2)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000471 break;
472 case pointer32:
473 case abs32:
474 write32(*loc32, _swap, targetAddress + ref.addend());
475 break;
476 case funcRel32:
477 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend()); // FIXME
478 break;
479 case delta32:
480 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
481 break;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000482 case negDelta32:
483 write32(*loc32, _swap, fixupAddress - targetAddress + ref.addend());
484 break;
Nick Kledzik21921372014-07-24 23:06:56 +0000485 case modeCode:
486 case modeData:
Nick Kledzik2d432352014-07-17 23:16:21 +0000487 case lazyPointer:
488 case lazyImmediateLocation:
489 // do nothing
490 break;
491 default:
492 llvm_unreachable("invalid x86 Reference Kind");
493 break;
494 }
495}
496
497bool ArchHandler_x86::useExternalRelocationTo(const Atom &target) {
498 // Undefined symbols are referenced via external relocations.
499 if (isa<UndefinedAtom>(&target))
500 return true;
501 if (const DefinedAtom *defAtom = dyn_cast<DefinedAtom>(&target)) {
502 switch (defAtom->merge()) {
503 case DefinedAtom::mergeAsTentative:
504 // Tentative definitions are referenced via external relocations.
505 return true;
506 case DefinedAtom::mergeAsWeak:
507 case DefinedAtom::mergeAsWeakAndAddressUsed:
508 // Global weak-defs are referenced via external relocations.
509 return (defAtom->scope() == DefinedAtom::scopeGlobal);
510 default:
511 break;
512 }
513 }
514 // Everything else is reference via an internal relocation.
515 return false;
516}
517
518
519void ArchHandler_x86::appendSectionRelocations(
520 const DefinedAtom &atom,
521 uint64_t atomSectionOffset,
522 const Reference &ref,
523 FindSymbolIndexForAtom symbolIndexForAtom,
524 FindSectionIndexForAtom sectionIndexForAtom,
525 FindAddressForAtom addressForAtom,
526 normalized::Relocations &relocs) {
527 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
528 return;
529 assert(ref.kindArch() == Reference::KindArch::x86);
530 uint32_t sectionOffset = atomSectionOffset + ref.offsetInAtom();
531 bool useExternalReloc = useExternalRelocationTo(*ref.target());
532 switch (ref.kindValue()) {
Nick Kledzik21921372014-07-24 23:06:56 +0000533 case modeCode:
534 case modeData:
535 break;
Nick Kledzik2d432352014-07-17 23:16:21 +0000536 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000537 if (useExternalReloc) {
538 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
539 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength4);
540 } else {
541 if (ref.addend() != 0)
542 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
543 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4);
544 else
545 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
546 GENERIC_RELOC_VANILLA | rPcRel | rLength4);
547 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000548 break;
549 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000550 if (useExternalReloc) {
551 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
552 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength2);
553 } else {
554 if (ref.addend() != 0)
555 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
556 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2);
557 else
558 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
559 GENERIC_RELOC_VANILLA | rPcRel | rLength2);
560 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000561 break;
562 case pointer32:
563 case abs32:
564 if (useExternalReloc)
565 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
Nick Kledzik7e246a42014-07-18 01:05:35 +0000566 GENERIC_RELOC_VANILLA | rExtern | rLength4);
567 else {
568 if (ref.addend() != 0)
569 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
570 GENERIC_RELOC_VANILLA | rScattered | rLength4);
571 else
572 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
Nick Kledzik2d432352014-07-17 23:16:21 +0000573 GENERIC_RELOC_VANILLA | rLength4);
Nick Kledzik7e246a42014-07-18 01:05:35 +0000574 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000575 break;
576 case funcRel32:
Nick Kledzik03e16f22014-07-21 22:06:57 +0000577 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
578 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
579 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) - ref.addend(),
580 GENERIC_RELOC_PAIR | rScattered | rLength4);
Nick Kledzik2d432352014-07-17 23:16:21 +0000581 break;
582 case delta32:
Nick Kledzik03e16f22014-07-21 22:06:57 +0000583 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
584 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
585 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
586 ref.offsetInAtom(),
587 GENERIC_RELOC_PAIR | rScattered | rLength4);
588 break;
589 case negDelta32:
590 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
591 ref.offsetInAtom(),
592 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
593 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
594 GENERIC_RELOC_PAIR | rScattered | rLength4);
Nick Kledzik2d432352014-07-17 23:16:21 +0000595 break;
596 case lazyPointer:
597 case lazyImmediateLocation:
598 llvm_unreachable("lazy reference kind implies Stubs pass was run");
599 break;
600 default:
601 llvm_unreachable("unknown x86 Reference Kind");
602 break;
603
604 }
605}
606
607
Nick Kledzik2458bec2014-07-16 19:49:02 +0000608std::unique_ptr<mach_o::ArchHandler> ArchHandler::create_x86() {
609 return std::unique_ptr<mach_o::ArchHandler>(new ArchHandler_x86());
610}
611
612} // namespace mach_o
613} // namespace lld