blob: a6bdf31a4b0a2a17c696c11e80e50a52c2537be8 [file] [log] [blame]
Nick Kledzik2458bec2014-07-16 19:49:02 +00001//===- lib/FileFormat/MachO/ArchHandler_x86.cpp ---------------------------===//
2//
3// The LLVM Linker
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "ArchHandler.h"
11#include "Atoms.h"
12#include "MachONormalizedFileBinaryUtils.h"
13
14#include "llvm/ADT/StringRef.h"
15#include "llvm/ADT/StringSwitch.h"
16#include "llvm/ADT/Triple.h"
17
18#include "llvm/Support/ErrorHandling.h"
19
20using namespace llvm::MachO;
21using namespace lld::mach_o::normalized;
22
23namespace lld {
24namespace mach_o {
25
26class ArchHandler_x86 : public ArchHandler {
27public:
28 ArchHandler_x86();
29 virtual ~ArchHandler_x86();
30
31 const Registry::KindStrings *kindStrings() override { return _sKindStrings; }
32
33 Reference::KindArch kindArch() override { return Reference::KindArch::x86; }
34
35 const StubInfo &stubInfo() override { return _sStubInfo; }
36 bool isCallSite(const Reference &) override;
37 bool isPointer(const Reference &) override;
38 bool isPairedReloc(const normalized::Relocation &) override;
Tim Northovercf78d372014-09-30 21:29:54 +000039
40 bool needsCompactUnwind() override {
41 return false;
42 }
43 Reference::KindValue imageOffsetKind() override {
44 return invalid;
45 }
46 Reference::KindValue imageOffsetKindIndirect() override {
47 return invalid;
48 }
49
Nick Kledzik2458bec2014-07-16 19:49:02 +000050 std::error_code getReferenceInfo(const normalized::Relocation &reloc,
51 const DefinedAtom *inAtom,
52 uint32_t offsetInAtom,
53 uint64_t fixupAddress, bool swap,
54 FindAtomBySectionAndAddress atomFromAddress,
55 FindAtomBySymbolIndex atomFromSymbolIndex,
56 Reference::KindValue *kind,
57 const lld::Atom **target,
58 Reference::Addend *addend) override;
59 std::error_code
60 getPairReferenceInfo(const normalized::Relocation &reloc1,
61 const normalized::Relocation &reloc2,
62 const DefinedAtom *inAtom,
63 uint32_t offsetInAtom,
64 uint64_t fixupAddress, bool swap,
65 FindAtomBySectionAndAddress atomFromAddress,
66 FindAtomBySymbolIndex atomFromSymbolIndex,
67 Reference::KindValue *kind,
68 const lld::Atom **target,
69 Reference::Addend *addend) override;
70
Nick Kledzik2d432352014-07-17 23:16:21 +000071 void generateAtomContent(const DefinedAtom &atom, bool relocatable,
72 FindAddressForAtom findAddress,
Tim Northovercf78d372014-09-30 21:29:54 +000073 uint64_t imageBaseAddress,
Nick Kledzik2d432352014-07-17 23:16:21 +000074 uint8_t *atomContentBuffer) override;
75
76 void appendSectionRelocations(const DefinedAtom &atom,
77 uint64_t atomSectionOffset,
78 const Reference &ref,
79 FindSymbolIndexForAtom symbolIndexForAtom,
80 FindSectionIndexForAtom sectionIndexForAtom,
81 FindAddressForAtom addressForAtom,
82 normalized::Relocations &relocs) override;
Nick Kledzik2458bec2014-07-16 19:49:02 +000083
Nick Kledzik21921372014-07-24 23:06:56 +000084 bool isDataInCodeTransition(Reference::KindValue refKind) override {
85 switch (refKind) {
86 case modeCode:
87 case modeData:
88 return true;
89 default:
90 return false;
91 break;
92 }
93 }
94
95 Reference::KindValue dataInCodeTransitionStart(
96 const MachODefinedAtom &atom) override {
97 return modeData;
98 }
99
100 Reference::KindValue dataInCodeTransitionEnd(
101 const MachODefinedAtom &atom) override {
102 return modeCode;
103 }
104
Nick Kledzik2458bec2014-07-16 19:49:02 +0000105private:
106 static const Registry::KindStrings _sKindStrings[];
107 static const StubInfo _sStubInfo;
108
109 enum : Reference::KindValue {
110 invalid, /// for error condition
111
Nick Kledzik21921372014-07-24 23:06:56 +0000112 modeCode, /// Content starting at this offset is code.
113 modeData, /// Content starting at this offset is data.
114
Nick Kledzik2458bec2014-07-16 19:49:02 +0000115 // Kinds found in mach-o .o files:
116 branch32, /// ex: call _foo
117 branch16, /// ex: callw _foo
118 abs32, /// ex: movl _foo, %eax
119 funcRel32, /// ex: movl _foo-L1(%eax), %eax
120 pointer32, /// ex: .long _foo
121 delta32, /// ex: .long _foo - .
Nick Kledzik03e16f22014-07-21 22:06:57 +0000122 negDelta32, /// ex: .long . - _foo
Nick Kledzik2458bec2014-07-16 19:49:02 +0000123
124 // Kinds introduced by Passes:
125 lazyPointer, /// Location contains a lazy pointer.
126 lazyImmediateLocation, /// Location contains immediate value used in stub.
127 };
128
Nick Kledzik2d432352014-07-17 23:16:21 +0000129 static bool useExternalRelocationTo(const Atom &target);
130
131 void applyFixupFinal(const Reference &ref, uint8_t *location,
132 uint64_t fixupAddress, uint64_t targetAddress,
133 uint64_t inAtomAddress);
134
135 void applyFixupRelocatable(const Reference &ref, uint8_t *location,
136 uint64_t fixupAddress,
137 uint64_t targetAddress,
138 uint64_t inAtomAddress);
139
Nick Kledzik2458bec2014-07-16 19:49:02 +0000140 const bool _swap;
141};
142
143//===----------------------------------------------------------------------===//
144// ArchHandler_x86
145//===----------------------------------------------------------------------===//
146
147ArchHandler_x86::ArchHandler_x86() :
148 _swap(!MachOLinkingContext::isHostEndian(MachOLinkingContext::arch_x86)) {}
149
150ArchHandler_x86::~ArchHandler_x86() { }
151
152const Registry::KindStrings ArchHandler_x86::_sKindStrings[] = {
153 LLD_KIND_STRING_ENTRY(invalid),
Nick Kledzik21921372014-07-24 23:06:56 +0000154 LLD_KIND_STRING_ENTRY(modeCode),
155 LLD_KIND_STRING_ENTRY(modeData),
Nick Kledzik2458bec2014-07-16 19:49:02 +0000156 LLD_KIND_STRING_ENTRY(branch32),
157 LLD_KIND_STRING_ENTRY(branch16),
158 LLD_KIND_STRING_ENTRY(abs32),
159 LLD_KIND_STRING_ENTRY(funcRel32),
160 LLD_KIND_STRING_ENTRY(pointer32),
161 LLD_KIND_STRING_ENTRY(delta32),
Nick Kledzik03e16f22014-07-21 22:06:57 +0000162 LLD_KIND_STRING_ENTRY(negDelta32),
Nick Kledzik2458bec2014-07-16 19:49:02 +0000163 LLD_KIND_STRING_ENTRY(lazyPointer),
164 LLD_KIND_STRING_ENTRY(lazyImmediateLocation),
165 LLD_KIND_STRING_END
166};
167
168const ArchHandler::StubInfo ArchHandler_x86::_sStubInfo = {
169 "dyld_stub_binder",
170
171 // Lazy pointer references
172 { Reference::KindArch::x86, pointer32, 0, 0 },
173 { Reference::KindArch::x86, lazyPointer, 0, 0 },
174
175 // GOT pointer to dyld_stub_binder
176 { Reference::KindArch::x86, pointer32, 0, 0 },
177
178 // x86 code alignment
179 1,
180
181 // Stub size and code
182 6,
183 { 0xff, 0x25, 0x00, 0x00, 0x00, 0x00 }, // jmp *lazyPointer
184 { Reference::KindArch::x86, abs32, 2, 0 },
Nick Kledzik1bebb282014-09-09 23:52:59 +0000185 { false, 0, 0, 0 },
Nick Kledzik2458bec2014-07-16 19:49:02 +0000186
187 // Stub Helper size and code
188 10,
189 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $lazy-info-offset
190 0xE9, 0x00, 0x00, 0x00, 0x00 }, // jmp helperhelper
191 { Reference::KindArch::x86, lazyImmediateLocation, 1, 0 },
192 { Reference::KindArch::x86, branch32, 6, 0 },
193
194 // Stub Helper-Common size and code
195 12,
196 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $dyld_ImageLoaderCache
197 0xFF, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp *_fast_lazy_bind
198 0x90 }, // nop
199 { Reference::KindArch::x86, abs32, 1, 0 },
Nick Kledzik1bebb282014-09-09 23:52:59 +0000200 { false, 0, 0, 0 },
201 { Reference::KindArch::x86, abs32, 7, 0 },
202 { false, 0, 0, 0 }
Nick Kledzik2458bec2014-07-16 19:49:02 +0000203};
204
205bool ArchHandler_x86::isCallSite(const Reference &ref) {
206 return (ref.kindValue() == branch32);
207}
208
209bool ArchHandler_x86::isPointer(const Reference &ref) {
210 return (ref.kindValue() == pointer32);
211}
212
213bool ArchHandler_x86::isPairedReloc(const Relocation &reloc) {
214 if (!reloc.scattered)
215 return false;
216 return (reloc.type == GENERIC_RELOC_LOCAL_SECTDIFF) ||
217 (reloc.type == GENERIC_RELOC_SECTDIFF);
218}
219
220std::error_code
221ArchHandler_x86::getReferenceInfo(const Relocation &reloc,
222 const DefinedAtom *inAtom,
223 uint32_t offsetInAtom,
224 uint64_t fixupAddress, bool swap,
225 FindAtomBySectionAndAddress atomFromAddress,
226 FindAtomBySymbolIndex atomFromSymbolIndex,
227 Reference::KindValue *kind,
228 const lld::Atom **target,
229 Reference::Addend *addend) {
230 typedef std::error_code E;
231 DefinedAtom::ContentPermissions perms;
232 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
233 uint64_t targetAddress;
234 switch (relocPattern(reloc)) {
235 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength4:
236 // ex: call _foo (and _foo undefined)
237 *kind = branch32;
238 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
239 return ec;
240 *addend = fixupAddress + 4 + readS32(swap, fixupContent);
241 break;
242 case GENERIC_RELOC_VANILLA | rPcRel | rLength4:
243 // ex: call _foo (and _foo defined)
244 *kind = branch32;
245 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
246 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
247 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000248 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4:
249 // ex: call _foo+n (and _foo defined)
250 *kind = branch32;
251 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
252 if (E ec = atomFromAddress(0, reloc.value, target, addend))
253 return ec;
254 *addend = targetAddress - reloc.value;
255 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000256 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength2:
257 // ex: callw _foo (and _foo undefined)
258 *kind = branch16;
259 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
260 return ec;
261 *addend = fixupAddress + 2 + readS16(swap, fixupContent);
262 break;
263 case GENERIC_RELOC_VANILLA | rPcRel | rLength2:
264 // ex: callw _foo (and _foo defined)
265 *kind = branch16;
266 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
267 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
268 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000269 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2:
270 // ex: callw _foo+n (and _foo defined)
271 *kind = branch16;
272 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
273 if (E ec = atomFromAddress(0, reloc.value, target, addend))
274 return ec;
275 *addend = targetAddress - reloc.value;
276 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000277 case GENERIC_RELOC_VANILLA | rExtern | rLength4:
278 // ex: movl _foo, %eax (and _foo undefined)
279 // ex: .long _foo (and _foo undefined)
280 perms = inAtom->permissions();
281 *kind =
282 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
283 : pointer32;
284 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
285 return ec;
286 *addend = readU32(swap, fixupContent);
287 break;
288 case GENERIC_RELOC_VANILLA | rLength4:
289 // ex: movl _foo, %eax (and _foo defined)
290 // ex: .long _foo (and _foo defined)
291 perms = inAtom->permissions();
292 *kind =
293 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
294 : pointer32;
295 targetAddress = readU32(swap, fixupContent);
296 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
297 break;
Nick Kledzik7e246a42014-07-18 01:05:35 +0000298 case GENERIC_RELOC_VANILLA | rScattered | rLength4:
299 // ex: .long _foo+n (and _foo defined)
300 perms = inAtom->permissions();
301 *kind =
302 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
303 : pointer32;
304 if (E ec = atomFromAddress(0, reloc.value, target, addend))
305 return ec;
306 *addend = readU32(swap, fixupContent) - reloc.value;
307 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000308 default:
309 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
310 }
311 return std::error_code();
312}
313
314std::error_code
315ArchHandler_x86::getPairReferenceInfo(const normalized::Relocation &reloc1,
316 const normalized::Relocation &reloc2,
317 const DefinedAtom *inAtom,
318 uint32_t offsetInAtom,
319 uint64_t fixupAddress, bool swap,
320 FindAtomBySectionAndAddress atomFromAddr,
321 FindAtomBySymbolIndex atomFromSymbolIndex,
322 Reference::KindValue *kind,
323 const lld::Atom **target,
324 Reference::Addend *addend) {
325 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
326 std::error_code ec;
327 DefinedAtom::ContentPermissions perms = inAtom->permissions();
328 uint32_t fromAddress;
329 uint32_t toAddress;
330 uint32_t value;
331 const lld::Atom *fromTarget;
332 Reference::Addend offsetInTo;
333 Reference::Addend offsetInFrom;
334 switch (relocPattern(reloc1) << 16 | relocPattern(reloc2)) {
Nick Kledzik2d432352014-07-17 23:16:21 +0000335 case ((GENERIC_RELOC_SECTDIFF | rScattered | rLength4) << 16 |
336 GENERIC_RELOC_PAIR | rScattered | rLength4):
337 case ((GENERIC_RELOC_LOCAL_SECTDIFF | rScattered | rLength4) << 16 |
338 GENERIC_RELOC_PAIR | rScattered | rLength4):
Nick Kledzik2458bec2014-07-16 19:49:02 +0000339 toAddress = reloc1.value;
340 fromAddress = reloc2.value;
341 value = readS32(swap, fixupContent);
342 ec = atomFromAddr(0, toAddress, target, &offsetInTo);
343 if (ec)
344 return ec;
345 ec = atomFromAddr(0, fromAddress, &fromTarget, &offsetInFrom);
346 if (ec)
347 return ec;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000348 if (fromTarget != inAtom) {
349 if (*target != inAtom)
350 return make_dynamic_error_code(Twine("SECTDIFF relocation where "
351 "neither target is in atom"));
352 *kind = negDelta32;
353 *addend = toAddress - value - fromAddress;
354 *target = fromTarget;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000355 } else {
Nick Kledzik03e16f22014-07-21 22:06:57 +0000356 if ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) {
357 // SECTDIFF relocations are used in i386 codegen where the function
358 // prolog does a CALL to the next instruction which POPs the return
359 // address into EBX which becomes the pic-base register. The POP
360 // instruction is label the used for the subtrahend in expressions.
361 // The funcRel32 kind represents the 32-bit delta to some symbol from
362 // the start of the function (atom) containing the funcRel32.
363 *kind = funcRel32;
364 uint32_t ta = fromAddress + value - toAddress;
365 *addend = ta - offsetInFrom;
366 } else {
367 *kind = delta32;
368 *addend = fromAddress + value - toAddress;
369 }
Nick Kledzik2458bec2014-07-16 19:49:02 +0000370 }
371 return std::error_code();
372 break;
373 default:
374 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
375 }
376}
377
Nick Kledzik2d432352014-07-17 23:16:21 +0000378void ArchHandler_x86::generateAtomContent(const DefinedAtom &atom,
Tim Northovercf78d372014-09-30 21:29:54 +0000379 bool relocatable,
380 FindAddressForAtom findAddress,
381 uint64_t imageBaseAddress,
382 uint8_t *atomContentBuffer) {
Nick Kledzik2d432352014-07-17 23:16:21 +0000383 // Copy raw bytes.
384 memcpy(atomContentBuffer, atom.rawContent().data(), atom.size());
385 // Apply fix-ups.
386 for (const Reference *ref : atom) {
387 uint32_t offset = ref->offsetInAtom();
388 const Atom *target = ref->target();
389 uint64_t targetAddress = 0;
390 if (isa<DefinedAtom>(target))
391 targetAddress = findAddress(*target);
392 uint64_t atomAddress = findAddress(atom);
393 uint64_t fixupAddress = atomAddress + offset;
394 if (relocatable) {
395 applyFixupRelocatable(*ref, &atomContentBuffer[offset],
396 fixupAddress, targetAddress,
397 atomAddress);
398 } else {
399 applyFixupFinal(*ref, &atomContentBuffer[offset],
400 fixupAddress, targetAddress,
401 atomAddress);
402 }
403 }
404}
405
406void ArchHandler_x86::applyFixupFinal(const Reference &ref, uint8_t *location,
407 uint64_t fixupAddress,
408 uint64_t targetAddress,
409 uint64_t inAtomAddress) {
410 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
Nick Kledzik2458bec2014-07-16 19:49:02 +0000411 return;
Nick Kledzik2d432352014-07-17 23:16:21 +0000412 assert(ref.kindArch() == Reference::KindArch::x86);
Nick Kledzik2458bec2014-07-16 19:49:02 +0000413 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
414 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik2d432352014-07-17 23:16:21 +0000415 switch (ref.kindValue()) {
Nick Kledzik2458bec2014-07-16 19:49:02 +0000416 case branch32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000417 write32(*loc32, _swap, (targetAddress - (fixupAddress + 4)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000418 break;
419 case branch16:
Nick Kledzik2d432352014-07-17 23:16:21 +0000420 write16(*loc16, _swap, (targetAddress - (fixupAddress + 2)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000421 break;
422 case pointer32:
423 case abs32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000424 write32(*loc32, _swap, targetAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000425 break;
426 case funcRel32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000427 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000428 break;
429 case delta32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000430 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000431 break;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000432 case negDelta32:
433 write32(*loc32, _swap, fixupAddress - targetAddress + ref.addend());
434 break;
Nick Kledzik21921372014-07-24 23:06:56 +0000435 case modeCode:
436 case modeData:
Nick Kledzik2458bec2014-07-16 19:49:02 +0000437 case lazyPointer:
438 case lazyImmediateLocation:
439 // do nothing
440 break;
441 default:
442 llvm_unreachable("invalid x86 Reference Kind");
443 break;
444 }
445}
446
Nick Kledzik2d432352014-07-17 23:16:21 +0000447void ArchHandler_x86::applyFixupRelocatable(const Reference &ref,
448 uint8_t *location,
449 uint64_t fixupAddress,
450 uint64_t targetAddress,
451 uint64_t inAtomAddress) {
452 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
453 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000454 bool useExternalReloc = useExternalRelocationTo(*ref.target());
Nick Kledzik2d432352014-07-17 23:16:21 +0000455 switch (ref.kindValue()) {
456 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000457 if (useExternalReloc)
458 write32(*loc32, _swap, ref.addend() - (fixupAddress + 4));
459 else
460 write32(*loc32, _swap, (targetAddress - (fixupAddress+4)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000461 break;
462 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000463 if (useExternalReloc)
464 write16(*loc16, _swap, ref.addend() - (fixupAddress + 2));
465 else
466 write16(*loc16, _swap, (targetAddress - (fixupAddress+2)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000467 break;
468 case pointer32:
469 case abs32:
470 write32(*loc32, _swap, targetAddress + ref.addend());
471 break;
472 case funcRel32:
473 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend()); // FIXME
474 break;
475 case delta32:
476 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
477 break;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000478 case negDelta32:
479 write32(*loc32, _swap, fixupAddress - targetAddress + ref.addend());
480 break;
Nick Kledzik21921372014-07-24 23:06:56 +0000481 case modeCode:
482 case modeData:
Nick Kledzik2d432352014-07-17 23:16:21 +0000483 case lazyPointer:
484 case lazyImmediateLocation:
485 // do nothing
486 break;
487 default:
488 llvm_unreachable("invalid x86 Reference Kind");
489 break;
490 }
491}
492
493bool ArchHandler_x86::useExternalRelocationTo(const Atom &target) {
494 // Undefined symbols are referenced via external relocations.
495 if (isa<UndefinedAtom>(&target))
496 return true;
497 if (const DefinedAtom *defAtom = dyn_cast<DefinedAtom>(&target)) {
498 switch (defAtom->merge()) {
499 case DefinedAtom::mergeAsTentative:
500 // Tentative definitions are referenced via external relocations.
501 return true;
502 case DefinedAtom::mergeAsWeak:
503 case DefinedAtom::mergeAsWeakAndAddressUsed:
504 // Global weak-defs are referenced via external relocations.
505 return (defAtom->scope() == DefinedAtom::scopeGlobal);
506 default:
507 break;
508 }
509 }
510 // Everything else is reference via an internal relocation.
511 return false;
512}
513
514
515void ArchHandler_x86::appendSectionRelocations(
516 const DefinedAtom &atom,
517 uint64_t atomSectionOffset,
518 const Reference &ref,
519 FindSymbolIndexForAtom symbolIndexForAtom,
520 FindSectionIndexForAtom sectionIndexForAtom,
521 FindAddressForAtom addressForAtom,
522 normalized::Relocations &relocs) {
523 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
524 return;
525 assert(ref.kindArch() == Reference::KindArch::x86);
526 uint32_t sectionOffset = atomSectionOffset + ref.offsetInAtom();
527 bool useExternalReloc = useExternalRelocationTo(*ref.target());
528 switch (ref.kindValue()) {
Nick Kledzik21921372014-07-24 23:06:56 +0000529 case modeCode:
530 case modeData:
531 break;
Nick Kledzik2d432352014-07-17 23:16:21 +0000532 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000533 if (useExternalReloc) {
534 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
535 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength4);
536 } else {
537 if (ref.addend() != 0)
538 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
539 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4);
540 else
541 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
542 GENERIC_RELOC_VANILLA | rPcRel | rLength4);
543 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000544 break;
545 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000546 if (useExternalReloc) {
547 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
548 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength2);
549 } else {
550 if (ref.addend() != 0)
551 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
552 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2);
553 else
554 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
555 GENERIC_RELOC_VANILLA | rPcRel | rLength2);
556 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000557 break;
558 case pointer32:
559 case abs32:
560 if (useExternalReloc)
561 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
Nick Kledzik7e246a42014-07-18 01:05:35 +0000562 GENERIC_RELOC_VANILLA | rExtern | rLength4);
563 else {
564 if (ref.addend() != 0)
565 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
566 GENERIC_RELOC_VANILLA | rScattered | rLength4);
567 else
568 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
Nick Kledzik2d432352014-07-17 23:16:21 +0000569 GENERIC_RELOC_VANILLA | rLength4);
Nick Kledzik7e246a42014-07-18 01:05:35 +0000570 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000571 break;
572 case funcRel32:
Nick Kledzik03e16f22014-07-21 22:06:57 +0000573 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
574 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
575 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) - ref.addend(),
576 GENERIC_RELOC_PAIR | rScattered | rLength4);
Nick Kledzik2d432352014-07-17 23:16:21 +0000577 break;
578 case delta32:
Nick Kledzik03e16f22014-07-21 22:06:57 +0000579 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
580 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
581 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
582 ref.offsetInAtom(),
583 GENERIC_RELOC_PAIR | rScattered | rLength4);
584 break;
585 case negDelta32:
586 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
587 ref.offsetInAtom(),
588 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
589 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
590 GENERIC_RELOC_PAIR | rScattered | rLength4);
Nick Kledzik2d432352014-07-17 23:16:21 +0000591 break;
592 case lazyPointer:
593 case lazyImmediateLocation:
594 llvm_unreachable("lazy reference kind implies Stubs pass was run");
595 break;
596 default:
597 llvm_unreachable("unknown x86 Reference Kind");
598 break;
599
600 }
601}
602
603
Nick Kledzik2458bec2014-07-16 19:49:02 +0000604std::unique_ptr<mach_o::ArchHandler> ArchHandler::create_x86() {
605 return std::unique_ptr<mach_o::ArchHandler>(new ArchHandler_x86());
606}
607
608} // namespace mach_o
609} // namespace lld