blob: ae59a958fb6c015b59b102194895c5ac103b16f1 [file] [log] [blame]
Nick Kledzik2458bec2014-07-16 19:49:02 +00001//===- lib/FileFormat/MachO/ArchHandler_x86.cpp ---------------------------===//
2//
3// The LLVM Linker
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "ArchHandler.h"
11#include "Atoms.h"
12#include "MachONormalizedFileBinaryUtils.h"
13
14#include "llvm/ADT/StringRef.h"
15#include "llvm/ADT/StringSwitch.h"
16#include "llvm/ADT/Triple.h"
17
18#include "llvm/Support/ErrorHandling.h"
19
20using namespace llvm::MachO;
21using namespace lld::mach_o::normalized;
22
23namespace lld {
24namespace mach_o {
25
26class ArchHandler_x86 : public ArchHandler {
27public:
28 ArchHandler_x86();
29 virtual ~ArchHandler_x86();
30
31 const Registry::KindStrings *kindStrings() override { return _sKindStrings; }
32
33 Reference::KindArch kindArch() override { return Reference::KindArch::x86; }
34
35 const StubInfo &stubInfo() override { return _sStubInfo; }
36 bool isCallSite(const Reference &) override;
37 bool isPointer(const Reference &) override;
38 bool isPairedReloc(const normalized::Relocation &) override;
39 std::error_code getReferenceInfo(const normalized::Relocation &reloc,
40 const DefinedAtom *inAtom,
41 uint32_t offsetInAtom,
42 uint64_t fixupAddress, bool swap,
43 FindAtomBySectionAndAddress atomFromAddress,
44 FindAtomBySymbolIndex atomFromSymbolIndex,
45 Reference::KindValue *kind,
46 const lld::Atom **target,
47 Reference::Addend *addend) override;
48 std::error_code
49 getPairReferenceInfo(const normalized::Relocation &reloc1,
50 const normalized::Relocation &reloc2,
51 const DefinedAtom *inAtom,
52 uint32_t offsetInAtom,
53 uint64_t fixupAddress, bool swap,
54 FindAtomBySectionAndAddress atomFromAddress,
55 FindAtomBySymbolIndex atomFromSymbolIndex,
56 Reference::KindValue *kind,
57 const lld::Atom **target,
58 Reference::Addend *addend) override;
59
Nick Kledzik2d432352014-07-17 23:16:21 +000060 void generateAtomContent(const DefinedAtom &atom, bool relocatable,
61 FindAddressForAtom findAddress,
62 uint8_t *atomContentBuffer) override;
63
64 void appendSectionRelocations(const DefinedAtom &atom,
65 uint64_t atomSectionOffset,
66 const Reference &ref,
67 FindSymbolIndexForAtom symbolIndexForAtom,
68 FindSectionIndexForAtom sectionIndexForAtom,
69 FindAddressForAtom addressForAtom,
70 normalized::Relocations &relocs) override;
Nick Kledzik2458bec2014-07-16 19:49:02 +000071
72private:
73 static const Registry::KindStrings _sKindStrings[];
74 static const StubInfo _sStubInfo;
75
76 enum : Reference::KindValue {
77 invalid, /// for error condition
78
79 // Kinds found in mach-o .o files:
80 branch32, /// ex: call _foo
81 branch16, /// ex: callw _foo
82 abs32, /// ex: movl _foo, %eax
83 funcRel32, /// ex: movl _foo-L1(%eax), %eax
84 pointer32, /// ex: .long _foo
85 delta32, /// ex: .long _foo - .
86
87 // Kinds introduced by Passes:
88 lazyPointer, /// Location contains a lazy pointer.
89 lazyImmediateLocation, /// Location contains immediate value used in stub.
90 };
91
Nick Kledzik2d432352014-07-17 23:16:21 +000092 static bool useExternalRelocationTo(const Atom &target);
93
94 void applyFixupFinal(const Reference &ref, uint8_t *location,
95 uint64_t fixupAddress, uint64_t targetAddress,
96 uint64_t inAtomAddress);
97
98 void applyFixupRelocatable(const Reference &ref, uint8_t *location,
99 uint64_t fixupAddress,
100 uint64_t targetAddress,
101 uint64_t inAtomAddress);
102
Nick Kledzik2458bec2014-07-16 19:49:02 +0000103 const bool _swap;
104};
105
106//===----------------------------------------------------------------------===//
107// ArchHandler_x86
108//===----------------------------------------------------------------------===//
109
110ArchHandler_x86::ArchHandler_x86() :
111 _swap(!MachOLinkingContext::isHostEndian(MachOLinkingContext::arch_x86)) {}
112
113ArchHandler_x86::~ArchHandler_x86() { }
114
115const Registry::KindStrings ArchHandler_x86::_sKindStrings[] = {
116 LLD_KIND_STRING_ENTRY(invalid),
117 LLD_KIND_STRING_ENTRY(branch32),
118 LLD_KIND_STRING_ENTRY(branch16),
119 LLD_KIND_STRING_ENTRY(abs32),
120 LLD_KIND_STRING_ENTRY(funcRel32),
121 LLD_KIND_STRING_ENTRY(pointer32),
122 LLD_KIND_STRING_ENTRY(delta32),
123 LLD_KIND_STRING_ENTRY(lazyPointer),
124 LLD_KIND_STRING_ENTRY(lazyImmediateLocation),
125 LLD_KIND_STRING_END
126};
127
128const ArchHandler::StubInfo ArchHandler_x86::_sStubInfo = {
129 "dyld_stub_binder",
130
131 // Lazy pointer references
132 { Reference::KindArch::x86, pointer32, 0, 0 },
133 { Reference::KindArch::x86, lazyPointer, 0, 0 },
134
135 // GOT pointer to dyld_stub_binder
136 { Reference::KindArch::x86, pointer32, 0, 0 },
137
138 // x86 code alignment
139 1,
140
141 // Stub size and code
142 6,
143 { 0xff, 0x25, 0x00, 0x00, 0x00, 0x00 }, // jmp *lazyPointer
144 { Reference::KindArch::x86, abs32, 2, 0 },
145
146 // Stub Helper size and code
147 10,
148 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $lazy-info-offset
149 0xE9, 0x00, 0x00, 0x00, 0x00 }, // jmp helperhelper
150 { Reference::KindArch::x86, lazyImmediateLocation, 1, 0 },
151 { Reference::KindArch::x86, branch32, 6, 0 },
152
153 // Stub Helper-Common size and code
154 12,
155 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushl $dyld_ImageLoaderCache
156 0xFF, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp *_fast_lazy_bind
157 0x90 }, // nop
158 { Reference::KindArch::x86, abs32, 1, 0 },
159 { Reference::KindArch::x86, abs32, 7, 0 }
160};
161
162bool ArchHandler_x86::isCallSite(const Reference &ref) {
163 return (ref.kindValue() == branch32);
164}
165
166bool ArchHandler_x86::isPointer(const Reference &ref) {
167 return (ref.kindValue() == pointer32);
168}
169
170bool ArchHandler_x86::isPairedReloc(const Relocation &reloc) {
171 if (!reloc.scattered)
172 return false;
173 return (reloc.type == GENERIC_RELOC_LOCAL_SECTDIFF) ||
174 (reloc.type == GENERIC_RELOC_SECTDIFF);
175}
176
177std::error_code
178ArchHandler_x86::getReferenceInfo(const Relocation &reloc,
179 const DefinedAtom *inAtom,
180 uint32_t offsetInAtom,
181 uint64_t fixupAddress, bool swap,
182 FindAtomBySectionAndAddress atomFromAddress,
183 FindAtomBySymbolIndex atomFromSymbolIndex,
184 Reference::KindValue *kind,
185 const lld::Atom **target,
186 Reference::Addend *addend) {
187 typedef std::error_code E;
188 DefinedAtom::ContentPermissions perms;
189 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
190 uint64_t targetAddress;
191 switch (relocPattern(reloc)) {
192 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength4:
193 // ex: call _foo (and _foo undefined)
194 *kind = branch32;
195 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
196 return ec;
197 *addend = fixupAddress + 4 + readS32(swap, fixupContent);
198 break;
199 case GENERIC_RELOC_VANILLA | rPcRel | rLength4:
200 // ex: call _foo (and _foo defined)
201 *kind = branch32;
202 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
203 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
204 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000205 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4:
206 // ex: call _foo+n (and _foo defined)
207 *kind = branch32;
208 targetAddress = fixupAddress + 4 + readS32(swap, fixupContent);
209 if (E ec = atomFromAddress(0, reloc.value, target, addend))
210 return ec;
211 *addend = targetAddress - reloc.value;
212 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000213 case GENERIC_RELOC_VANILLA | rPcRel | rExtern | rLength2:
214 // ex: callw _foo (and _foo undefined)
215 *kind = branch16;
216 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
217 return ec;
218 *addend = fixupAddress + 2 + readS16(swap, fixupContent);
219 break;
220 case GENERIC_RELOC_VANILLA | rPcRel | rLength2:
221 // ex: callw _foo (and _foo defined)
222 *kind = branch16;
223 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
224 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
225 break;
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000226 case GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2:
227 // ex: callw _foo+n (and _foo defined)
228 *kind = branch16;
229 targetAddress = fixupAddress + 2 + readS16(swap, fixupContent);
230 if (E ec = atomFromAddress(0, reloc.value, target, addend))
231 return ec;
232 *addend = targetAddress - reloc.value;
233 break;
Nick Kledzik2458bec2014-07-16 19:49:02 +0000234 case GENERIC_RELOC_VANILLA | rExtern | rLength4:
235 // ex: movl _foo, %eax (and _foo undefined)
236 // ex: .long _foo (and _foo undefined)
237 perms = inAtom->permissions();
238 *kind =
239 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
240 : pointer32;
241 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
242 return ec;
243 *addend = readU32(swap, fixupContent);
244 break;
245 case GENERIC_RELOC_VANILLA | rLength4:
246 // ex: movl _foo, %eax (and _foo defined)
247 // ex: .long _foo (and _foo defined)
248 perms = inAtom->permissions();
249 *kind =
250 ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? abs32
251 : pointer32;
252 targetAddress = readU32(swap, fixupContent);
253 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
254 break;
255 default:
256 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
257 }
258 return std::error_code();
259}
260
261std::error_code
262ArchHandler_x86::getPairReferenceInfo(const normalized::Relocation &reloc1,
263 const normalized::Relocation &reloc2,
264 const DefinedAtom *inAtom,
265 uint32_t offsetInAtom,
266 uint64_t fixupAddress, bool swap,
267 FindAtomBySectionAndAddress atomFromAddr,
268 FindAtomBySymbolIndex atomFromSymbolIndex,
269 Reference::KindValue *kind,
270 const lld::Atom **target,
271 Reference::Addend *addend) {
272 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
273 std::error_code ec;
274 DefinedAtom::ContentPermissions perms = inAtom->permissions();
275 uint32_t fromAddress;
276 uint32_t toAddress;
277 uint32_t value;
278 const lld::Atom *fromTarget;
279 Reference::Addend offsetInTo;
280 Reference::Addend offsetInFrom;
281 switch (relocPattern(reloc1) << 16 | relocPattern(reloc2)) {
Nick Kledzik2d432352014-07-17 23:16:21 +0000282 case ((GENERIC_RELOC_SECTDIFF | rScattered | rLength4) << 16 |
283 GENERIC_RELOC_PAIR | rScattered | rLength4):
284 case ((GENERIC_RELOC_LOCAL_SECTDIFF | rScattered | rLength4) << 16 |
285 GENERIC_RELOC_PAIR | rScattered | rLength4):
Nick Kledzik2458bec2014-07-16 19:49:02 +0000286 toAddress = reloc1.value;
287 fromAddress = reloc2.value;
288 value = readS32(swap, fixupContent);
289 ec = atomFromAddr(0, toAddress, target, &offsetInTo);
290 if (ec)
291 return ec;
292 ec = atomFromAddr(0, fromAddress, &fromTarget, &offsetInFrom);
293 if (ec)
294 return ec;
295 if (fromTarget != inAtom)
296 return make_dynamic_error_code(Twine("SECTDIFF relocation where "
297 "subtrahend label is not in atom"));
298 *kind = ((perms & DefinedAtom::permR_X) == DefinedAtom::permR_X) ? funcRel32
299 : delta32;
300 if (*kind == funcRel32) {
301 // SECTDIFF relocations are used in i386 codegen where the function
302 // prolog does a CALL to the next instruction which POPs the return
303 // address into EBX which becomes the pic-base register. The POP
304 // instruction is label the used for the subtrahend in expressions.
305 // The funcRel32 kind represents the 32-bit delta to some symbol from
306 // the start of the function (atom) containing the funcRel32.
307 uint32_t ta = fromAddress + value - toAddress;
308 *addend = ta - offsetInFrom;
309 } else {
310 *addend = fromAddress + value - toAddress;
311 }
312 return std::error_code();
313 break;
314 default:
315 return make_dynamic_error_code(Twine("unsupported i386 relocation type"));
316 }
317}
318
Nick Kledzik2d432352014-07-17 23:16:21 +0000319void ArchHandler_x86::generateAtomContent(const DefinedAtom &atom,
320 bool relocatable,
321 FindAddressForAtom findAddress,
322 uint8_t *atomContentBuffer) {
323 // Copy raw bytes.
324 memcpy(atomContentBuffer, atom.rawContent().data(), atom.size());
325 // Apply fix-ups.
326 for (const Reference *ref : atom) {
327 uint32_t offset = ref->offsetInAtom();
328 const Atom *target = ref->target();
329 uint64_t targetAddress = 0;
330 if (isa<DefinedAtom>(target))
331 targetAddress = findAddress(*target);
332 uint64_t atomAddress = findAddress(atom);
333 uint64_t fixupAddress = atomAddress + offset;
334 if (relocatable) {
335 applyFixupRelocatable(*ref, &atomContentBuffer[offset],
336 fixupAddress, targetAddress,
337 atomAddress);
338 } else {
339 applyFixupFinal(*ref, &atomContentBuffer[offset],
340 fixupAddress, targetAddress,
341 atomAddress);
342 }
343 }
344}
345
346void ArchHandler_x86::applyFixupFinal(const Reference &ref, uint8_t *location,
347 uint64_t fixupAddress,
348 uint64_t targetAddress,
349 uint64_t inAtomAddress) {
350 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
Nick Kledzik2458bec2014-07-16 19:49:02 +0000351 return;
Nick Kledzik2d432352014-07-17 23:16:21 +0000352 assert(ref.kindArch() == Reference::KindArch::x86);
Nick Kledzik2458bec2014-07-16 19:49:02 +0000353 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
354 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik2d432352014-07-17 23:16:21 +0000355 switch (ref.kindValue()) {
Nick Kledzik2458bec2014-07-16 19:49:02 +0000356 case branch32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000357 write32(*loc32, _swap, (targetAddress - (fixupAddress + 4)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000358 break;
359 case branch16:
Nick Kledzik2d432352014-07-17 23:16:21 +0000360 write16(*loc16, _swap, (targetAddress - (fixupAddress + 2)) + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000361 break;
362 case pointer32:
363 case abs32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000364 write32(*loc32, _swap, targetAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000365 break;
366 case funcRel32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000367 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000368 break;
369 case delta32:
Nick Kledzik2d432352014-07-17 23:16:21 +0000370 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
Nick Kledzik2458bec2014-07-16 19:49:02 +0000371 break;
372 case lazyPointer:
373 case lazyImmediateLocation:
374 // do nothing
375 break;
376 default:
377 llvm_unreachable("invalid x86 Reference Kind");
378 break;
379 }
380}
381
Nick Kledzik2d432352014-07-17 23:16:21 +0000382void ArchHandler_x86::applyFixupRelocatable(const Reference &ref,
383 uint8_t *location,
384 uint64_t fixupAddress,
385 uint64_t targetAddress,
386 uint64_t inAtomAddress) {
387 int32_t *loc32 = reinterpret_cast<int32_t *>(location);
388 int16_t *loc16 = reinterpret_cast<int16_t *>(location);
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000389 bool useExternalReloc = useExternalRelocationTo(*ref.target());
Nick Kledzik2d432352014-07-17 23:16:21 +0000390 switch (ref.kindValue()) {
391 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000392 if (useExternalReloc)
393 write32(*loc32, _swap, ref.addend() - (fixupAddress + 4));
394 else
395 write32(*loc32, _swap, (targetAddress - (fixupAddress+4)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000396 break;
397 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000398 if (useExternalReloc)
399 write16(*loc16, _swap, ref.addend() - (fixupAddress + 2));
400 else
401 write16(*loc16, _swap, (targetAddress - (fixupAddress+2)) + ref.addend());
Nick Kledzik2d432352014-07-17 23:16:21 +0000402 break;
403 case pointer32:
404 case abs32:
405 write32(*loc32, _swap, targetAddress + ref.addend());
406 break;
407 case funcRel32:
408 write32(*loc32, _swap, targetAddress - inAtomAddress + ref.addend()); // FIXME
409 break;
410 case delta32:
411 write32(*loc32, _swap, targetAddress - fixupAddress + ref.addend());
412 break;
413 case lazyPointer:
414 case lazyImmediateLocation:
415 // do nothing
416 break;
417 default:
418 llvm_unreachable("invalid x86 Reference Kind");
419 break;
420 }
421}
422
423bool ArchHandler_x86::useExternalRelocationTo(const Atom &target) {
424 // Undefined symbols are referenced via external relocations.
425 if (isa<UndefinedAtom>(&target))
426 return true;
427 if (const DefinedAtom *defAtom = dyn_cast<DefinedAtom>(&target)) {
428 switch (defAtom->merge()) {
429 case DefinedAtom::mergeAsTentative:
430 // Tentative definitions are referenced via external relocations.
431 return true;
432 case DefinedAtom::mergeAsWeak:
433 case DefinedAtom::mergeAsWeakAndAddressUsed:
434 // Global weak-defs are referenced via external relocations.
435 return (defAtom->scope() == DefinedAtom::scopeGlobal);
436 default:
437 break;
438 }
439 }
440 // Everything else is reference via an internal relocation.
441 return false;
442}
443
444
445void ArchHandler_x86::appendSectionRelocations(
446 const DefinedAtom &atom,
447 uint64_t atomSectionOffset,
448 const Reference &ref,
449 FindSymbolIndexForAtom symbolIndexForAtom,
450 FindSectionIndexForAtom sectionIndexForAtom,
451 FindAddressForAtom addressForAtom,
452 normalized::Relocations &relocs) {
453 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
454 return;
455 assert(ref.kindArch() == Reference::KindArch::x86);
456 uint32_t sectionOffset = atomSectionOffset + ref.offsetInAtom();
457 bool useExternalReloc = useExternalRelocationTo(*ref.target());
458 switch (ref.kindValue()) {
459 case branch32:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000460 if (useExternalReloc) {
461 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
462 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength4);
463 } else {
464 if (ref.addend() != 0)
465 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
466 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength4);
467 else
468 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
469 GENERIC_RELOC_VANILLA | rPcRel | rLength4);
470 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000471 break;
472 case branch16:
Nick Kledzik68a1abd2014-07-18 00:37:52 +0000473 if (useExternalReloc) {
474 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
475 GENERIC_RELOC_VANILLA | rExtern | rPcRel | rLength2);
476 } else {
477 if (ref.addend() != 0)
478 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
479 GENERIC_RELOC_VANILLA | rScattered | rPcRel | rLength2);
480 else
481 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
482 GENERIC_RELOC_VANILLA | rPcRel | rLength2);
483 }
Nick Kledzik2d432352014-07-17 23:16:21 +0000484 break;
485 case pointer32:
486 case abs32:
487 if (useExternalReloc)
488 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
489 GENERIC_RELOC_VANILLA | rExtern | rLength4);
490 else
491 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
492 GENERIC_RELOC_VANILLA | rLength4);
493 break;
494 case funcRel32:
495 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
496 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
497 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) - ref.addend(),
498 GENERIC_RELOC_PAIR | rScattered | rLength4);
499 break;
500 case delta32:
501 appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
502 GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
503 appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) + ref.offsetInAtom(),
504 GENERIC_RELOC_PAIR | rScattered | rLength4);
505 break;
506 case lazyPointer:
507 case lazyImmediateLocation:
508 llvm_unreachable("lazy reference kind implies Stubs pass was run");
509 break;
510 default:
511 llvm_unreachable("unknown x86 Reference Kind");
512 break;
513
514 }
515}
516
517
Nick Kledzik2458bec2014-07-16 19:49:02 +0000518std::unique_ptr<mach_o::ArchHandler> ArchHandler::create_x86() {
519 return std::unique_ptr<mach_o::ArchHandler>(new ArchHandler_x86());
520}
521
522} // namespace mach_o
523} // namespace lld