blob: 1d6b6c09a27a68ed2d42138a535a114938a0be83 [file] [log] [blame]
Nick Kledzik2458bec2014-07-16 19:49:02 +00001//===- lib/FileFormat/MachO/ArchHandler_x86.cpp ---------------------------===//
2//
3// The LLVM Linker
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "ArchHandler.h"
11#include "Atoms.h"
12#include "MachONormalizedFileBinaryUtils.h"
Nick Kledzik2458bec2014-07-16 19:49:02 +000013#include "llvm/ADT/StringRef.h"
14#include "llvm/ADT/StringSwitch.h"
15#include "llvm/ADT/Triple.h"
Nick Kledzik2458bec2014-07-16 19:49:02 +000016#include "llvm/Support/ErrorHandling.h"
17
18using namespace llvm::MachO;
19using namespace lld::mach_o::normalized;
20
21namespace lld {
22namespace mach_o {
23
24class ArchHandler_x86 : public ArchHandler {
25public:
26 ArchHandler_x86();
27 virtual ~ArchHandler_x86();
28
29 const Registry::KindStrings *kindStrings() override { return _sKindStrings; }
30
31 Reference::KindArch kindArch() override { return Reference::KindArch::x86; }
32
33 const StubInfo &stubInfo() override { return _sStubInfo; }
34 bool isCallSite(const Reference &) override;
Nick Kledzik4121bce2014-10-14 01:51:42 +000035 bool isNonCallBranch(const Reference &) override {
36 return false;
37 }
38
Nick Kledzik2458bec2014-07-16 19:49:02 +000039 bool isPointer(const Reference &) override;
40 bool isPairedReloc(const normalized::Relocation &) override;
Tim Northovercf78d372014-09-30 21:29:54 +000041
42 bool needsCompactUnwind() override {
43 return false;
44 }
45 Reference::KindValue imageOffsetKind() override {
46 return invalid;
47 }
48 Reference::KindValue imageOffsetKindIndirect() override {
49 return invalid;
50 }
51
Tim Northover995abe32014-10-15 20:26:24 +000052 Reference::KindValue unwindRefToCIEKind() override {
53 return negDelta32;
54 }
55
Tim Northovera6a6ab92014-10-15 18:19:31 +000056 Reference::KindValue unwindRefToFunctionKind() override{
57 return delta32;
58 }
59
Tim Northover1cc4fb72014-10-15 19:32:21 +000060 Reference::KindValue unwindRefToEhFrameKind() override {
61 return invalid;
62 }
63
64
65 uint32_t dwarfCompactUnwindType() override {
66 return 0x04000000U;
67 }
68
Nick Kledzik2458bec2014-07-16 19:49:02 +000069 std::error_code getReferenceInfo(const normalized::Relocation &reloc,
70 const DefinedAtom *inAtom,
71 uint32_t offsetInAtom,
72 uint64_t fixupAddress, bool swap,
73 FindAtomBySectionAndAddress atomFromAddress,
74 FindAtomBySymbolIndex atomFromSymbolIndex,
75 Reference::KindValue *kind,
76 const lld::Atom **target,
77 Reference::Addend *addend) override;
78 std::error_code
79 getPairReferenceInfo(const normalized::Relocation &reloc1,
80 const normalized::Relocation &reloc2,
81 const DefinedAtom *inAtom,
82 uint32_t offsetInAtom,
Nick Kledzik9133f8c2014-10-21 23:45:37 +000083 uint64_t fixupAddress, bool swap, bool scatterable,
Nick Kledzik2458bec2014-07-16 19:49:02 +000084 FindAtomBySectionAndAddress atomFromAddress,
85 FindAtomBySymbolIndex atomFromSymbolIndex,
86 Reference::KindValue *kind,
87 const lld::Atom **target,
88 Reference::Addend *addend) override;
89
Nick Kledzik2d432352014-07-17 23:16:21 +000090 void generateAtomContent(const DefinedAtom &atom, bool relocatable,
91 FindAddressForAtom findAddress,
Tim Northover1cc4fb72014-10-15 19:32:21 +000092 FindAddressForAtom findSectionAddress,
Tim Northovercf78d372014-09-30 21:29:54 +000093 uint64_t imageBaseAddress,
Nick Kledzik2d432352014-07-17 23:16:21 +000094 uint8_t *atomContentBuffer) override;
95
96 void appendSectionRelocations(const DefinedAtom &atom,
97 uint64_t atomSectionOffset,
98 const Reference &ref,
99 FindSymbolIndexForAtom symbolIndexForAtom,
100 FindSectionIndexForAtom sectionIndexForAtom,
101 FindAddressForAtom addressForAtom,
102 normalized::Relocations &relocs) override;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000103
Nick Kledzik21921372014-07-24 23:06:56 +0000104 bool isDataInCodeTransition(Reference::KindValue refKind) override {
105 switch (refKind) {
106 case modeCode:
107 case modeData:
108 return true;
109 default:
110 return false;
111 break;
112 }
113 }
114
115 Reference::KindValue dataInCodeTransitionStart(
116 const MachODefinedAtom &atom) override {
117 return modeData;
118 }
119
120 Reference::KindValue dataInCodeTransitionEnd(
121 const MachODefinedAtom &atom) override {
122 return modeCode;
123 }
124
Nick Kledzik2458bec2014-07-16 19:49:02 +0000125private:
126 static const Registry::KindStrings _sKindStrings[];
127 static const StubInfo _sStubInfo;
128
129 enum : Reference::KindValue {
130 invalid, /// for error condition
131
Nick Kledzik21921372014-07-24 23:06:56 +0000132 modeCode, /// Content starting at this offset is code.
133 modeData, /// Content starting at this offset is data.
134
Nick Kledzik2458bec2014-07-16 19:49:02 +0000135 // Kinds found in mach-o .o files:
136 branch32, /// ex: call _foo
137 branch16, /// ex: callw _foo
138 abs32, /// ex: movl _foo, %eax
139 funcRel32, /// ex: movl _foo-L1(%eax), %eax
140 pointer32, /// ex: .long _foo
141 delta32, /// ex: .long _foo - .
Nick Kledzik03e16f22014-07-21 22:06:57 +0000142 negDelta32, /// ex: .long . - _foo
Nick Kledzik2458bec2014-07-16 19:49:02 +0000143
144 // Kinds introduced by Passes:
145 lazyPointer, /// Location contains a lazy pointer.
146 lazyImmediateLocation, /// Location contains immediate value used in stub.
147 };
148
Nick Kledzik2d432352014-07-17 23:16:21 +0000149 static bool useExternalRelocationTo(const Atom &target);
150
151 void applyFixupFinal(const Reference &ref, uint8_t *location,
152 uint64_t fixupAddress, uint64_t targetAddress,
153 uint64_t inAtomAddress);
154
155 void applyFixupRelocatable(const Reference &ref, uint8_t *location,
156 uint64_t fixupAddress,
157 uint64_t targetAddress,
158 uint64_t inAtomAddress);
159
Nick Kledzik2458bec2014-07-16 19:49:02 +0000160 const bool _swap;
161};
162
163//===----------------------------------------------------------------------===//
164// ArchHandler_x86
165//===----------------------------------------------------------------------===//
166
167ArchHandler_x86::ArchHandler_x86() :
168 _swap(!MachOLinkingContext::isHostEndian(MachOLinkingContext::arch_x86)) {}
169
170ArchHandler_x86::~ArchHandler_x86() { }
171
172const Registry::KindStrings ArchHandler_x86::_sKindStrings[] = {
173 LLD_KIND_STRING_ENTRY(invalid),
Nick Kledzik21921372014-07-24 23:06:56 +0000174 LLD_KIND_STRING_ENTRY(modeCode),
175 LLD_KIND_STRING_ENTRY(modeData),
Nick Kledzik2458bec2014-07-16 19:49:02 +0000176 LLD_KIND_STRING_ENTRY(branch32),
177 LLD_KIND_STRING_ENTRY(branch16),
178 LLD_KIND_STRING_ENTRY(abs32),
179 LLD_KIND_STRING_ENTRY(funcRel32),
180 LLD_KIND_STRING_ENTRY(pointer32),
181 LLD_KIND_STRING_ENTRY(delta32),
Nick Kledzik03e16f22014-07-21 22:06:57 +0000182 LLD_KIND_STRING_ENTRY(negDelta32),
Nick Kledzik2458bec2014-07-16 19:49:02 +0000183 LLD_KIND_STRING_ENTRY(lazyPointer),
184 LLD_KIND_STRING_ENTRY(lazyImmediateLocation),
185 LLD_KIND_STRING_END
186};
187
188const ArchHandler::StubInfo ArchHandler_x86::_sStubInfo = {
189 "dyld_stub_binder",
190
191 // Lazy pointer references
192 { Reference::KindArch::x86, pointer32, 0, 0 },
193 { Reference::KindArch::x86, lazyPointer, 0, 0 },
194
195 // GOT pointer to dyld_stub_binder
196 { Reference::KindArch::x86, pointer32, 0, 0 },
197
198 // x86 code alignment
199 1,
200
201 // Stub size and code
202 6,
203 { 0xff, 0x25, 0x00, 0x00, 0x00, 0x00 }, // jmp *lazyPointer
204 { Reference::KindArch::x86, abs32, 2, 0 },
Nick Kledzik1bebb282014-09-09 23:52:59 +0000205 { false, 0, 0, 0 },
Nick Kledzik2458bec2014-07-16 19:49:02 +0000206
207 // Stub Helper size and code
208 10,
209 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $lazy-info-offset
210 0xE9, 0x00, 0x00, 0x00, 0x00 }, // jmp helperhelper
211 { Reference::KindArch::x86, lazyImmediateLocation, 1, 0 },
212 { Reference::KindArch::x86, branch32, 6, 0 },
213
214 // Stub Helper-Common size and code
215 12,
216 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $dyld_ImageLoaderCache
217 0xFF, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp *_fast_lazy_bind
218 0x90 }, // nop
219 { Reference::KindArch::x86, abs32, 1, 0 },
Nick Kledzik1bebb282014-09-09 23:52:59 +0000220 { false, 0, 0, 0 },
221 { Reference::KindArch::x86, abs32, 7, 0 },
222 { false, 0, 0, 0 }
Nick Kledzik2458bec2014-07-16 19:49:02 +0000223};
224
225bool ArchHandler_x86::isCallSite(const Reference &ref) {
226 return (ref.kindValue() == branch32);
227}
228
229bool ArchHandler_x86::isPointer(const Reference &ref) {
230 return (ref.kindValue() == pointer32);
231}
232
233bool ArchHandler_x86::isPairedReloc(const Relocation &reloc) {
234 if (!reloc.scattered)
235 return false;
236 return (reloc.type == GENERIC_RELOC_LOCAL_SECTDIFF) ||
237 (reloc.type == GENERIC_RELOC_SECTDIFF);
238}
239
240std::error_code
241ArchHandler_x86::getReferenceInfo(const Relocation &reloc,
242 const DefinedAtom *inAtom,
243 uint32_t offsetInAtom,
244 uint64_t fixupAddress, bool swap,
245 FindAtomBySectionAndAddress atomFromAddress,
246 FindAtomBySymbolIndex atomFromSymbolIndex,
247 Reference::KindValue *kind,
248 const lld::Atom **target,
249 Reference::Addend *addend) {
250 typedef std::error_code E;
251 DefinedAtom::ContentPermissions perms;
252 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
253 uint64_t targetAddress;
254 switch (relocPattern(reloc)) {
255 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength4:
256 // ex: call _foo (and _foo undefined)
257 *kind = branch32;
258 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
259 return ec;
260 *addend = fixupAddress + 4 + readS32(swap, fixupContent);
261 break;
262 case GENERIC_RELOC_VANILLA | rPcRel | rLength4:
263 // ex: call _foo (and _foo defined)
264 *kind = branch32;
265 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
266 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
267 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000268 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4:
269 // ex: call _foo+n (and _foo defined)
270 *kind = branch32;
271 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
272 if (E ec = atomFromAddress(0, reloc.value, target, addend))
273 return ec;
274 *addend = targetAddress - reloc.value;
275 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000276 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength2:
277 // ex: callw _foo (and _foo undefined)
278 *kind = branch16;
279 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
280 return ec;
281 *addend = fixupAddress + 2 + readS16(swap, fixupContent);
282 break;
283 case GENERIC_RELOC_VANILLA | rPcRel | rLength2:
284 // ex: callw _foo (and _foo defined)
285 *kind = branch16;
286 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
287 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
288 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000289 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2:
290 // ex: callw _foo+n (and _foo defined)
291 *kind = branch16;
292 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
293 if (E ec = atomFromAddress(0, reloc.value, target, addend))
294 return ec;
295 *addend = targetAddress - reloc.value;
296 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000297 case GENERIC_RELOC_VANILLA | rExtern | rLength4:
298 // ex: movl _foo, %eax (and _foo undefined)
299 // ex: .long _foo (and _foo undefined)
300 perms = inAtom->permissions();
301 *kind =
302 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
303 : pointer32;
304 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
305 return ec;
306 *addend = readU32(swap, fixupContent);
307 break;
308 case GENERIC_RELOC_VANILLA | rLength4:
309 // ex: movl _foo, %eax (and _foo defined)
310 // ex: .long _foo (and _foo defined)
311 perms = inAtom->permissions();
312 *kind =
313 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
314 : pointer32;
315 targetAddress = readU32(swap, fixupContent);
316 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
317 break;
Nick Kledzik7e246a42014-07-18 01:05:35 +0000318 case GENERIC_RELOC_VANILLA | rScattered | rLength4:
319 // ex: .long _foo+n (and _foo defined)
320 perms = inAtom->permissions();
321 *kind =
322 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
323 : pointer32;
324 if (E ec = atomFromAddress(0, reloc.value, target, addend))
325 return ec;
326 *addend = readU32(swap, fixupContent) - reloc.value;
327 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000328 default:
329 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
330 }
331 return std::error_code();
332}
333
334std::error_code
335ArchHandler_x86::getPairReferenceInfo(const normalized::Relocation &reloc1,
336 const normalized::Relocation &reloc2,
337 const DefinedAtom *inAtom,
338 uint32_t offsetInAtom,
339 uint64_t fixupAddress, bool swap,
Nick Kledzik9133f8c2014-10-21 23:45:37 +0000340 bool scatterable,
Nick Kledzik2458bec2014-07-16 19:49:02 +0000341 FindAtomBySectionAndAddress atomFromAddr,
342 FindAtomBySymbolIndex atomFromSymbolIndex,
343 Reference::KindValue *kind,
344 const lld::Atom **target,
345 Reference::Addend *addend) {
346 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
347 std::error_code ec;
348 DefinedAtom::ContentPermissions perms = inAtom->permissions();
349 uint32_t fromAddress;
350 uint32_t toAddress;
351 uint32_t value;
352 const lld::Atom *fromTarget;
353 Reference::Addend offsetInTo;
354 Reference::Addend offsetInFrom;
355 switch (relocPattern(reloc1) << 16 | relocPattern(reloc2)) {
Nick Kledzik2d432352014-07-17 23:16:21 +0000356 case ((GENERIC_RELOC_SECTDIFF | rScattered | rLength4) << 16 |
357 GENERIC_RELOC_PAIR | rScattered | rLength4):
358 case ((GENERIC_RELOC_LOCAL_SECTDIFF | rScattered | rLength4) << 16 |
359 GENERIC_RELOC_PAIR | rScattered | rLength4):
Nick Kledzik2458bec2014-07-16 19:49:02 +0000360 toAddress = reloc1.value;
361 fromAddress = reloc2.value;
362 value = readS32(swap, fixupContent);
363 ec = atomFromAddr(0, toAddress, target, &offsetInTo);
364 if (ec)
365 return ec;
366 ec = atomFromAddr(0, fromAddress, &fromTarget, &offsetInFrom);
367 if (ec)
368 return ec;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000369 if (fromTarget != inAtom) {
370 if (*target != inAtom)
371 return make_dynamic_error_code(Twine("SECTDIFF relocation where "
372 "neither target is in atom"));
373 *kind = negDelta32;
374 *addend = toAddress - value - fromAddress;
375 *target = fromTarget;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000376 } else {
Nick Kledzik03e16f22014-07-21 22:06:57 +0000377 if ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) {
378 // SECTDIFF relocations are used in i386 codegen where the function
379 // prolog does a CALL to the next instruction which POPs the return
380 // address into EBX which becomes the pic-base register. The POP
381 // instruction is label the used for the subtrahend in expressions.
382 // The funcRel32 kind represents the 32-bit delta to some symbol from
383 // the start of the function (atom) containing the funcRel32.
384 *kind = funcRel32;
385 uint32_t ta = fromAddress + value - toAddress;
386 *addend = ta - offsetInFrom;
387 } else {
388 *kind = delta32;
389 *addend = fromAddress + value - toAddress;
390 }
Nick Kledzik2458bec2014-07-16 19:49:02 +0000391 }
392 return std::error_code();
393 break;
394 default:
395 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
396 }
397}
398
Nick Kledzik2d432352014-07-17 23:16:21 +0000399void ArchHandler_x86::generateAtomContent(const DefinedAtom &atom,
Tim Northovercf78d372014-09-30 21:29:54 +0000400 bool relocatable,
401 FindAddressForAtom findAddress,
Tim Northover1cc4fb72014-10-15 19:32:21 +0000402 FindAddressForAtom findSectionAddress,
Tim Northovercf78d372014-09-30 21:29:54 +0000403 uint64_t imageBaseAddress,
404 uint8_t *atomContentBuffer) {
Nick Kledzik2d432352014-07-17 23:16:21 +0000405 // Copy raw bytes.
406 memcpy(atomContentBuffer, atom.rawContent().data(), atom.size());
407 // Apply fix-ups.
408 for (const Reference *ref : atom) {
409 uint32_t offset = ref->offsetInAtom();
410 const Atom *target = ref->target();
411 uint64_t targetAddress = 0;
412 if (isa<DefinedAtom>(target))
413 targetAddress = findAddress(*target);
414 uint64_t atomAddress = findAddress(atom);
415 uint64_t fixupAddress = atomAddress + offset;
416 if (relocatable) {
417 applyFixupRelocatable(*ref, &atomContentBuffer[offset],
418 fixupAddress, targetAddress,
419 atomAddress);
420 } else {
421 applyFixupFinal(*ref, &atomContentBuffer[offset],
422 fixupAddress, targetAddress,
423 atomAddress);
424 }
425 }
426}
427
428void ArchHandler_x86::applyFixupFinal(const Reference &ref, uint8_t *location,
429 uint64_t fixupAddress,
430 uint64_t targetAddress,
431 uint64_t inAtomAddress) {
432 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
Nick Kledzik2458bec2014-07-16 19:49:02 +0000433 return;
Nick Kledzik2d432352014-07-17 23:16:21 +0000434 assert(ref.kindArch() == Reference::KindArch::x86);
Nick Kledzik2458bec2014-07-16 19:49:02 +0000435 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
436 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik2d432352014-07-17 23:16:21 +0000437 switch (ref.kindValue()) {
Nick Kledzik2458bec2014-07-16 19:49:02 +0000438 case branch32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000439 write32(*loc32, _swap, (targetAddress - (fixupAddress + 4)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000440 break;
441 case branch16:
Nick Kledzik2d432352014-07-17 23:16:21 +0000442 write16(*loc16, _swap, (targetAddress - (fixupAddress + 2)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000443 break;
444 case pointer32:
445 case abs32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000446 write32(*loc32, _swap, targetAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000447 break;
448 case funcRel32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000449 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000450 break;
451 case delta32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000452 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000453 break;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000454 case negDelta32:
455 write32(*loc32, _swap, fixupAddress - targetAddress + ref.addend());
456 break;
Nick Kledzik21921372014-07-24 23:06:56 +0000457 case modeCode:
458 case modeData:
Nick Kledzik2458bec2014-07-16 19:49:02 +0000459 case lazyPointer:
460 case lazyImmediateLocation:
461 // do nothing
462 break;
463 default:
464 llvm_unreachable("invalid x86 Reference Kind");
465 break;
466 }
467}
468
Nick Kledzik2d432352014-07-17 23:16:21 +0000469void ArchHandler_x86::applyFixupRelocatable(const Reference &ref,
470 uint8_t *location,
471 uint64_t fixupAddress,
472 uint64_t targetAddress,
473 uint64_t inAtomAddress) {
474 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
475 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000476 bool useExternalReloc = useExternalRelocationTo(*ref.target());
Nick Kledzik2d432352014-07-17 23:16:21 +0000477 switch (ref.kindValue()) {
478 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000479 if (useExternalReloc)
480 write32(*loc32, _swap, ref.addend() - (fixupAddress + 4));
481 else
482 write32(*loc32, _swap, (targetAddress - (fixupAddress+4)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000483 break;
484 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000485 if (useExternalReloc)
486 write16(*loc16, _swap, ref.addend() - (fixupAddress + 2));
487 else
488 write16(*loc16, _swap, (targetAddress - (fixupAddress+2)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000489 break;
490 case pointer32:
491 case abs32:
492 write32(*loc32, _swap, targetAddress + ref.addend());
493 break;
494 case funcRel32:
495 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend()); // FIXME
496 break;
497 case delta32:
498 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
499 break;
Nick Kledzik03e16f22014-07-21 22:06:57 +0000500 case negDelta32:
501 write32(*loc32, _swap, fixupAddress - targetAddress + ref.addend());
502 break;
Nick Kledzik21921372014-07-24 23:06:56 +0000503 case modeCode:
504 case modeData:
Nick Kledzik2d432352014-07-17 23:16:21 +0000505 case lazyPointer:
506 case lazyImmediateLocation:
507 // do nothing
508 break;
509 default:
510 llvm_unreachable("invalid x86 Reference Kind");
511 break;
512 }
513}
514
515bool ArchHandler_x86::useExternalRelocationTo(const Atom &target) {
516 // Undefined symbols are referenced via external relocations.
517 if (isa<UndefinedAtom>(&target))
518 return true;
519 if (const DefinedAtom *defAtom = dyn_cast<DefinedAtom>(&target)) {
520 switch (defAtom->merge()) {
521 case DefinedAtom::mergeAsTentative:
522 // Tentative definitions are referenced via external relocations.
523 return true;
524 case DefinedAtom::mergeAsWeak:
525 case DefinedAtom::mergeAsWeakAndAddressUsed:
526 // Global weak-defs are referenced via external relocations.
527 return (defAtom->scope() == DefinedAtom::scopeGlobal);
528 default:
529 break;
530 }
531 }
532 // Everything else is reference via an internal relocation.
533 return false;
534}
535
536
537void ArchHandler_x86::appendSectionRelocations(
538 const DefinedAtom &atom,
539 uint64_t atomSectionOffset,
540 const Reference &ref,
541 FindSymbolIndexForAtom symbolIndexForAtom,
542 FindSectionIndexForAtom sectionIndexForAtom,
543 FindAddressForAtom addressForAtom,
544 normalized::Relocations &relocs) {
545 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
546 return;
547 assert(ref.kindArch() == Reference::KindArch::x86);
548 uint32_t sectionOffset = atomSectionOffset + ref.offsetInAtom();
549 bool useExternalReloc = useExternalRelocationTo(*ref.target());
550 switch (ref.kindValue()) {
Nick Kledzik21921372014-07-24 23:06:56 +0000551 case modeCode:
552 case modeData:
553 break;
Nick Kledzik2d432352014-07-17 23:16:21 +0000554 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000555 if (useExternalReloc) {
556 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
557 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength4);
558 } else {
559 if (ref.addend() != 0)
560 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
561 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4);
562 else
563 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
564 GENERIC_RELOC_VANILLA | rPcRel | rLength4);
565 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000566 break;
567 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000568 if (useExternalReloc) {
569 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
570 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength2);
571 } else {
572 if (ref.addend() != 0)
573 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
574 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2);
575 else
576 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
577 GENERIC_RELOC_VANILLA | rPcRel | rLength2);
578 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000579 break;
580 case pointer32:
581 case abs32:
582 if (useExternalReloc)
583 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
Nick Kledzik7e246a42014-07-18 01:05:35 +0000584 GENERIC_RELOC_VANILLA | rExtern | rLength4);
585 else {
586 if (ref.addend() != 0)
587 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
588 GENERIC_RELOC_VANILLA | rScattered | rLength4);
589 else
590 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
Nick Kledzik2d432352014-07-17 23:16:21 +0000591 GENERIC_RELOC_VANILLA | rLength4);
Nick Kledzik7e246a42014-07-18 01:05:35 +0000592 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000593 break;
594 case funcRel32:
Nick Kledzik03e16f22014-07-21 22:06:57 +0000595 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
596 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
597 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) - ref.addend(),
598 GENERIC_RELOC_PAIR | rScattered | rLength4);
Nick Kledzik2d432352014-07-17 23:16:21 +0000599 break;
600 case delta32:
Nick Kledzik03e16f22014-07-21 22:06:57 +0000601 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
602 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
603 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
604 ref.offsetInAtom(),
605 GENERIC_RELOC_PAIR | rScattered | rLength4);
606 break;
607 case negDelta32:
608 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
609 ref.offsetInAtom(),
610 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
611 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
612 GENERIC_RELOC_PAIR | rScattered | rLength4);
Nick Kledzik2d432352014-07-17 23:16:21 +0000613 break;
614 case lazyPointer:
615 case lazyImmediateLocation:
616 llvm_unreachable("lazy reference kind implies Stubs pass was run");
617 break;
618 default:
619 llvm_unreachable("unknown x86 Reference Kind");
620 break;
621
622 }
623}
624
625
Nick Kledzik2458bec2014-07-16 19:49:02 +0000626std::unique_ptr<mach_o::ArchHandler> ArchHandler::create_x86() {
627 return std::unique_ptr<mach_o::ArchHandler>(new ArchHandler_x86());
628}
629
630} // namespace mach_o
631} // namespace lld