blob: 1a7d290b1afde627fab007fc6edb059d26b56808 [file] [log] [blame]
Clement Courbet44b4c542018-06-19 11:28:59 +00001//===-- Target.cpp ----------------------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9#include "../Target.h"
10
Clement Courbet4860b982018-06-26 08:49:30 +000011#include "../Latency.h"
12#include "../Uops.h"
Clement Courbet717c9762018-06-28 07:41:16 +000013#include "MCTargetDesc/X86BaseInfo.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000014#include "MCTargetDesc/X86MCTargetDesc.h"
Clement Courbet6fd00e32018-06-20 11:54:35 +000015#include "X86.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000016#include "X86RegisterInfo.h"
Clement Courbete7851692018-07-03 06:17:05 +000017#include "X86Subtarget.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000018#include "llvm/MC/MCInstBuilder.h"
Clement Courbet6fd00e32018-06-20 11:54:35 +000019
Clement Courbet44b4c542018-06-19 11:28:59 +000020namespace exegesis {
21
22namespace {
23
Guillaume Chatelet946fb052018-10-12 15:12:22 +000024static llvm::Error IsInvalidOpcode(const Instruction &Instr) {
25 const auto OpcodeName = Instr.Name;
26 if (OpcodeName.startswith("POPF") || OpcodeName.startswith("PUSHF") ||
27 OpcodeName.startswith("ADJCALLSTACK"))
28 return llvm::make_error<BenchmarkFailure>(
Clement Courbet8d0dd0b2018-10-19 12:24:49 +000029 "unsupported opcode: Push/Pop/AdjCallStack");
30 // We do not handle second-form X87 instructions. We only handle first-form
31 // ones (_Fp), see comment in X86InstrFPStack.td.
32 for (const Operand &Op : Instr.Operands)
33 if (Op.isReg() && Op.isExplicit() &&
34 Op.getExplicitOperandInfo().RegClass == llvm::X86::RSTRegClassID)
35 return llvm::make_error<BenchmarkFailure>(
36 "unsupported second-form X87 instruction");
Guillaume Chatelet946fb052018-10-12 15:12:22 +000037 return llvm::Error::success();
38}
39
40static unsigned GetX86FPFlags(const Instruction &Instr) {
41 return Instr.Description->TSFlags & llvm::X86II::FPTypeMask;
42}
43
44class X86LatencySnippetGenerator : public LatencySnippetGenerator {
45public:
46 using LatencySnippetGenerator::LatencySnippetGenerator;
Clement Courbet4860b982018-06-26 08:49:30 +000047
Guillaume Chatelet296a8622018-10-15 09:09:19 +000048 llvm::Expected<std::vector<CodeTemplate>>
49 generateCodeTemplates(const Instruction &Instr) const override {
Guillaume Chatelet946fb052018-10-12 15:12:22 +000050 if (auto E = IsInvalidOpcode(Instr))
51 return std::move(E);
Clement Courbet717c9762018-06-28 07:41:16 +000052
Guillaume Chatelet946fb052018-10-12 15:12:22 +000053 switch (GetX86FPFlags(Instr)) {
Clement Courbet717c9762018-06-28 07:41:16 +000054 case llvm::X86II::NotFP:
Guillaume Chatelet296a8622018-10-15 09:09:19 +000055 return LatencySnippetGenerator::generateCodeTemplates(Instr);
Clement Courbet717c9762018-06-28 07:41:16 +000056 case llvm::X86II::ZeroArgFP:
Clement Courbet717c9762018-06-28 07:41:16 +000057 case llvm::X86II::OneArgFP:
Guillaume Chatelet946fb052018-10-12 15:12:22 +000058 case llvm::X86II::SpecialFP:
59 case llvm::X86II::CompareFP:
60 case llvm::X86II::CondMovFP:
61 return llvm::make_error<BenchmarkFailure>("Unsupported x87 Instruction");
Clement Courbet717c9762018-06-28 07:41:16 +000062 case llvm::X86II::OneArgFPRW:
Guillaume Chatelet946fb052018-10-12 15:12:22 +000063 case llvm::X86II::TwoArgFP:
64 // These are instructions like
65 // - `ST(0) = fsqrt(ST(0))` (OneArgFPRW)
66 // - `ST(0) = ST(0) + ST(i)` (TwoArgFP)
67 // They are intrinsically serial and do not modify the state of the stack.
Guillaume Chatelet296a8622018-10-15 09:09:19 +000068 return generateSelfAliasingCodeTemplates(Instr);
Guillaume Chatelet946fb052018-10-12 15:12:22 +000069 default:
70 llvm_unreachable("Unknown FP Type!");
71 }
72 }
73};
74
75class X86UopsSnippetGenerator : public UopsSnippetGenerator {
76public:
77 using UopsSnippetGenerator::UopsSnippetGenerator;
78
Guillaume Chatelet296a8622018-10-15 09:09:19 +000079 llvm::Expected<std::vector<CodeTemplate>>
80 generateCodeTemplates(const Instruction &Instr) const override {
Guillaume Chatelet946fb052018-10-12 15:12:22 +000081 if (auto E = IsInvalidOpcode(Instr))
82 return std::move(E);
83
84 switch (GetX86FPFlags(Instr)) {
85 case llvm::X86II::NotFP:
Guillaume Chatelet296a8622018-10-15 09:09:19 +000086 return UopsSnippetGenerator::generateCodeTemplates(Instr);
Guillaume Chatelet946fb052018-10-12 15:12:22 +000087 case llvm::X86II::ZeroArgFP:
88 case llvm::X86II::OneArgFP:
89 case llvm::X86II::SpecialFP:
90 return llvm::make_error<BenchmarkFailure>("Unsupported x87 Instruction");
91 case llvm::X86II::OneArgFPRW:
92 case llvm::X86II::TwoArgFP:
Clement Courbet717c9762018-06-28 07:41:16 +000093 // These are instructions like
94 // - `ST(0) = fsqrt(ST(0))` (OneArgFPRW)
95 // - `ST(0) = ST(0) + ST(i)` (TwoArgFP)
96 // They are intrinsically serial and do not modify the state of the stack.
97 // We generate the same code for latency and uops.
Guillaume Chatelet296a8622018-10-15 09:09:19 +000098 return generateSelfAliasingCodeTemplates(Instr);
Clement Courbet717c9762018-06-28 07:41:16 +000099 case llvm::X86II::CompareFP:
Clement Courbet717c9762018-06-28 07:41:16 +0000100 case llvm::X86II::CondMovFP:
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000101 // We can compute uops for any FP instruction that does not grow or shrink
102 // the stack (either do not touch the stack or push as much as they pop).
Guillaume Chatelet296a8622018-10-15 09:09:19 +0000103 return generateUnconstrainedCodeTemplates(
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000104 Instr, "instruction does not grow/shrink the FP stack");
Clement Courbet717c9762018-06-28 07:41:16 +0000105 default:
106 llvm_unreachable("Unknown FP Type!");
107 }
Clement Courbet4860b982018-06-26 08:49:30 +0000108 }
109};
110
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000111static unsigned GetLoadImmediateOpcode(unsigned RegBitWidth) {
112 switch (RegBitWidth) {
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000113 case 8:
114 return llvm::X86::MOV8ri;
115 case 16:
116 return llvm::X86::MOV16ri;
117 case 32:
118 return llvm::X86::MOV32ri;
119 case 64:
120 return llvm::X86::MOV64ri;
121 }
122 llvm_unreachable("Invalid Value Width");
123}
124
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000125// Generates instruction to load an immediate value into a register.
126static llvm::MCInst loadImmediate(unsigned Reg, unsigned RegBitWidth,
127 const llvm::APInt &Value) {
128 if (Value.getBitWidth() > RegBitWidth)
129 llvm_unreachable("Value must fit in the Register");
130 return llvm::MCInstBuilder(GetLoadImmediateOpcode(RegBitWidth))
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000131 .addReg(Reg)
132 .addImm(Value.getZExtValue());
133}
134
135// Allocates scratch memory on the stack.
136static llvm::MCInst allocateStackSpace(unsigned Bytes) {
137 return llvm::MCInstBuilder(llvm::X86::SUB64ri8)
138 .addReg(llvm::X86::RSP)
139 .addReg(llvm::X86::RSP)
140 .addImm(Bytes);
141}
142
143// Fills scratch memory at offset `OffsetBytes` with value `Imm`.
144static llvm::MCInst fillStackSpace(unsigned MovOpcode, unsigned OffsetBytes,
145 uint64_t Imm) {
146 return llvm::MCInstBuilder(MovOpcode)
147 // Address = ESP
148 .addReg(llvm::X86::RSP) // BaseReg
149 .addImm(1) // ScaleAmt
150 .addReg(0) // IndexReg
151 .addImm(OffsetBytes) // Disp
152 .addReg(0) // Segment
153 // Immediate.
154 .addImm(Imm);
155}
156
157// Loads scratch memory into register `Reg` using opcode `RMOpcode`.
158static llvm::MCInst loadToReg(unsigned Reg, unsigned RMOpcode) {
159 return llvm::MCInstBuilder(RMOpcode)
160 .addReg(Reg)
161 // Address = ESP
162 .addReg(llvm::X86::RSP) // BaseReg
163 .addImm(1) // ScaleAmt
164 .addReg(0) // IndexReg
165 .addImm(0) // Disp
166 .addReg(0); // Segment
167}
168
169// Releases scratch memory.
170static llvm::MCInst releaseStackSpace(unsigned Bytes) {
171 return llvm::MCInstBuilder(llvm::X86::ADD64ri8)
172 .addReg(llvm::X86::RSP)
173 .addReg(llvm::X86::RSP)
174 .addImm(Bytes);
175}
176
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000177// Reserves some space on the stack, fills it with the content of the provided
178// constant and provide methods to load the stack value into a register.
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000179struct ConstantInliner {
Clement Courbet78b2e732018-09-25 07:31:44 +0000180 explicit ConstantInliner(const llvm::APInt &Constant) : Constant_(Constant) {}
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000181
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000182 std::vector<llvm::MCInst> loadAndFinalize(unsigned Reg, unsigned RegBitWidth,
183 unsigned Opcode) {
Clement Courbet78b2e732018-09-25 07:31:44 +0000184 assert((RegBitWidth & 7) == 0 &&
185 "RegBitWidth must be a multiple of 8 bits");
186 initStack(RegBitWidth / 8);
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000187 add(loadToReg(Reg, Opcode));
Clement Courbet78b2e732018-09-25 07:31:44 +0000188 add(releaseStackSpace(RegBitWidth / 8));
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000189 return std::move(Instructions);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000190 }
191
Clement Courbetc51f4522018-10-19 09:56:54 +0000192 std::vector<llvm::MCInst> loadX87STAndFinalize(unsigned Reg) {
193 initStack(kF80Bytes);
194 add(llvm::MCInstBuilder(llvm::X86::LD_F80m)
195 // Address = ESP
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000196 .addReg(llvm::X86::RSP) // BaseReg
197 .addImm(1) // ScaleAmt
198 .addReg(0) // IndexReg
199 .addImm(0) // Disp
200 .addReg(0)); // Segment
201 if (Reg != llvm::X86::ST0)
202 add(llvm::MCInstBuilder(llvm::X86::ST_Frr).addReg(Reg));
Clement Courbetc51f4522018-10-19 09:56:54 +0000203 add(releaseStackSpace(kF80Bytes));
204 return std::move(Instructions);
205 }
206
207 std::vector<llvm::MCInst> loadX87FPAndFinalize(unsigned Reg) {
208 initStack(kF80Bytes);
209 add(llvm::MCInstBuilder(llvm::X86::LD_Fp80m)
210 .addReg(Reg)
211 // Address = ESP
212 .addReg(llvm::X86::RSP) // BaseReg
213 .addImm(1) // ScaleAmt
214 .addReg(0) // IndexReg
215 .addImm(0) // Disp
216 .addReg(0)); // Segment
217 add(releaseStackSpace(kF80Bytes));
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000218 return std::move(Instructions);
219 }
220
221 std::vector<llvm::MCInst> popFlagAndFinalize() {
Clement Courbet78b2e732018-09-25 07:31:44 +0000222 initStack(8);
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000223 add(llvm::MCInstBuilder(llvm::X86::POPF64));
Simon Pilgrimf652ef32018-09-18 15:38:16 +0000224 return std::move(Instructions);
225 }
226
227private:
Clement Courbetc51f4522018-10-19 09:56:54 +0000228 static constexpr const unsigned kF80Bytes = 10; // 80 bits.
229
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000230 ConstantInliner &add(const llvm::MCInst &Inst) {
231 Instructions.push_back(Inst);
232 return *this;
233 }
234
Clement Courbet78b2e732018-09-25 07:31:44 +0000235 void initStack(unsigned Bytes) {
236 assert(Constant_.getBitWidth() <= Bytes * 8 &&
237 "Value does not have the correct size");
238 const llvm::APInt WideConstant = Constant_.getBitWidth() < Bytes * 8
239 ? Constant_.sext(Bytes * 8)
240 : Constant_;
241 add(allocateStackSpace(Bytes));
242 size_t ByteOffset = 0;
243 for (; Bytes - ByteOffset >= 4; ByteOffset += 4)
244 add(fillStackSpace(
245 llvm::X86::MOV32mi, ByteOffset,
246 WideConstant.extractBits(32, ByteOffset * 8).getZExtValue()));
247 if (Bytes - ByteOffset >= 2) {
248 add(fillStackSpace(
249 llvm::X86::MOV16mi, ByteOffset,
250 WideConstant.extractBits(16, ByteOffset * 8).getZExtValue()));
251 ByteOffset += 2;
252 }
253 if (Bytes - ByteOffset >= 1)
254 add(fillStackSpace(
255 llvm::X86::MOV8mi, ByteOffset,
256 WideConstant.extractBits(8, ByteOffset * 8).getZExtValue()));
257 }
258
259 llvm::APInt Constant_;
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000260 std::vector<llvm::MCInst> Instructions;
261};
262
Clement Courbet44b4c542018-06-19 11:28:59 +0000263class ExegesisX86Target : public ExegesisTarget {
Clement Courbet6fd00e32018-06-20 11:54:35 +0000264 void addTargetSpecificPasses(llvm::PassManagerBase &PM) const override {
265 // Lowers FP pseudo-instructions, e.g. ABS_Fp32 -> ABS_F.
Clement Courbet717c9762018-06-28 07:41:16 +0000266 PM.add(llvm::createX86FloatingPointStackifierPass());
Clement Courbet6fd00e32018-06-20 11:54:35 +0000267 }
268
Guillaume Chateletfb943542018-08-01 14:41:45 +0000269 unsigned getScratchMemoryRegister(const llvm::Triple &TT) const override {
270 if (!TT.isArch64Bit()) {
271 // FIXME: This would require popping from the stack, so we would have to
272 // add some additional setup code.
273 return 0;
274 }
275 return TT.isOSWindows() ? llvm::X86::RCX : llvm::X86::RDI;
276 }
277
278 unsigned getMaxMemoryAccessSize() const override { return 64; }
279
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000280 void fillMemoryOperands(InstructionTemplate &IT, unsigned Reg,
Guillaume Chateletfb943542018-08-01 14:41:45 +0000281 unsigned Offset) const override {
282 // FIXME: For instructions that read AND write to memory, we use the same
283 // value for input and output.
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000284 for (size_t I = 0, E = IT.Instr.Operands.size(); I < E; ++I) {
285 const Operand *Op = &IT.Instr.Operands[I];
Guillaume Chatelet09c28392018-10-09 08:59:10 +0000286 if (Op->isExplicit() && Op->isMemory()) {
Guillaume Chateletfb943542018-08-01 14:41:45 +0000287 // Case 1: 5-op memory.
288 assert((I + 5 <= E) && "x86 memory references are always 5 ops");
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000289 IT.getValueFor(*Op) = llvm::MCOperand::createReg(Reg); // BaseReg
290 Op = &IT.Instr.Operands[++I];
Guillaume Chatelet09c28392018-10-09 08:59:10 +0000291 assert(Op->isMemory());
292 assert(Op->isExplicit());
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000293 IT.getValueFor(*Op) = llvm::MCOperand::createImm(1); // ScaleAmt
294 Op = &IT.Instr.Operands[++I];
Guillaume Chatelet09c28392018-10-09 08:59:10 +0000295 assert(Op->isMemory());
296 assert(Op->isExplicit());
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000297 IT.getValueFor(*Op) = llvm::MCOperand::createReg(0); // IndexReg
298 Op = &IT.Instr.Operands[++I];
Guillaume Chatelet09c28392018-10-09 08:59:10 +0000299 assert(Op->isMemory());
300 assert(Op->isExplicit());
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000301 IT.getValueFor(*Op) = llvm::MCOperand::createImm(Offset); // Disp
302 Op = &IT.Instr.Operands[++I];
Guillaume Chatelet09c28392018-10-09 08:59:10 +0000303 assert(Op->isMemory());
304 assert(Op->isExplicit());
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000305 IT.getValueFor(*Op) = llvm::MCOperand::createReg(0); // Segment
Guillaume Chateletfb943542018-08-01 14:41:45 +0000306 // Case2: segment:index addressing. We assume that ES is 0.
307 }
308 }
309 }
310
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000311 std::vector<llvm::MCInst> setRegTo(const llvm::MCSubtargetInfo &STI,
312 unsigned Reg,
313 const llvm::APInt &Value) const override {
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000314 if (llvm::X86::GR8RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000315 return {loadImmediate(Reg, 8, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000316 if (llvm::X86::GR16RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000317 return {loadImmediate(Reg, 16, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000318 if (llvm::X86::GR32RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000319 return {loadImmediate(Reg, 32, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000320 if (llvm::X86::GR64RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000321 return {loadImmediate(Reg, 64, Value)};
322 ConstantInliner CI(Value);
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000323 if (llvm::X86::VR64RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000324 return CI.loadAndFinalize(Reg, 64, llvm::X86::MMX_MOVQ64rm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000325 if (llvm::X86::VR128XRegClass.contains(Reg)) {
326 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000327 return CI.loadAndFinalize(Reg, 128, llvm::X86::VMOVDQU32Z128rm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000328 if (STI.getFeatureBits()[llvm::X86::FeatureAVX])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000329 return CI.loadAndFinalize(Reg, 128, llvm::X86::VMOVDQUrm);
330 return CI.loadAndFinalize(Reg, 128, llvm::X86::MOVDQUrm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000331 }
332 if (llvm::X86::VR256XRegClass.contains(Reg)) {
333 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000334 return CI.loadAndFinalize(Reg, 256, llvm::X86::VMOVDQU32Z256rm);
335 if (STI.getFeatureBits()[llvm::X86::FeatureAVX])
336 return CI.loadAndFinalize(Reg, 256, llvm::X86::VMOVDQUYrm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000337 }
338 if (llvm::X86::VR512RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000339 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
340 return CI.loadAndFinalize(Reg, 512, llvm::X86::VMOVDQU32Zrm);
341 if (llvm::X86::RSTRegClass.contains(Reg)) {
Clement Courbetc51f4522018-10-19 09:56:54 +0000342 return CI.loadX87STAndFinalize(Reg);
343 }
344 if (llvm::X86::RFP32RegClass.contains(Reg) ||
345 llvm::X86::RFP64RegClass.contains(Reg) ||
346 llvm::X86::RFP80RegClass.contains(Reg)) {
347 return CI.loadX87FPAndFinalize(Reg);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000348 }
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000349 if (Reg == llvm::X86::EFLAGS)
350 return CI.popFlagAndFinalize();
351 return {}; // Not yet implemented.
Clement Courbeta51efc22018-06-25 13:12:02 +0000352 }
353
Clement Courbetd939f6d2018-09-13 07:40:53 +0000354 std::unique_ptr<SnippetGenerator>
355 createLatencySnippetGenerator(const LLVMState &State) const override {
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000356 return llvm::make_unique<X86LatencySnippetGenerator>(State);
Clement Courbet4860b982018-06-26 08:49:30 +0000357 }
358
Clement Courbetd939f6d2018-09-13 07:40:53 +0000359 std::unique_ptr<SnippetGenerator>
360 createUopsSnippetGenerator(const LLVMState &State) const override {
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000361 return llvm::make_unique<X86UopsSnippetGenerator>(State);
Clement Courbet4860b982018-06-26 08:49:30 +0000362 }
363
Clement Courbet44b4c542018-06-19 11:28:59 +0000364 bool matchesArch(llvm::Triple::ArchType Arch) const override {
365 return Arch == llvm::Triple::x86_64 || Arch == llvm::Triple::x86;
366 }
367};
368
369} // namespace
370
Clement Courbetcff2caa2018-06-25 11:22:23 +0000371static ExegesisTarget *getTheExegesisX86Target() {
Clement Courbet44b4c542018-06-19 11:28:59 +0000372 static ExegesisX86Target Target;
373 return &Target;
374}
375
376void InitializeX86ExegesisTarget() {
377 ExegesisTarget::registerTarget(getTheExegesisX86Target());
378}
379
Clement Courbetcff2caa2018-06-25 11:22:23 +0000380} // namespace exegesis