blob: 69804849e62b44409ff82fa36423ac20e279bc15 [file] [log] [blame]
Clement Courbet44b4c542018-06-19 11:28:59 +00001//===-- Target.cpp ----------------------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9#include "../Target.h"
10
Clement Courbet4860b982018-06-26 08:49:30 +000011#include "../Latency.h"
12#include "../Uops.h"
Clement Courbet717c9762018-06-28 07:41:16 +000013#include "MCTargetDesc/X86BaseInfo.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000014#include "MCTargetDesc/X86MCTargetDesc.h"
Clement Courbet6fd00e32018-06-20 11:54:35 +000015#include "X86.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000016#include "X86RegisterInfo.h"
Clement Courbete7851692018-07-03 06:17:05 +000017#include "X86Subtarget.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000018#include "llvm/MC/MCInstBuilder.h"
Clement Courbet6fd00e32018-06-20 11:54:35 +000019
Fangrui Song32401af2018-10-22 17:10:47 +000020namespace llvm {
Clement Courbet44b4c542018-06-19 11:28:59 +000021namespace exegesis {
22
23namespace {
24
Guillaume Chatelet3c639f32018-10-22 14:46:08 +000025// A chunk of instruction's operands that represents a single memory access.
26struct MemoryOperandRange {
27 MemoryOperandRange(llvm::ArrayRef<Operand> Operands) : Ops(Operands) {}
28
29 // Setup InstructionTemplate so the memory access represented by this object
30 // points to [reg] + offset.
31 void fillOrDie(InstructionTemplate &IT, unsigned Reg, unsigned Offset) {
32 switch (Ops.size()) {
33 case 5:
34 IT.getValueFor(Ops[0]) = llvm::MCOperand::createReg(Reg); // BaseReg
35 IT.getValueFor(Ops[1]) = llvm::MCOperand::createImm(1); // ScaleAmt
36 IT.getValueFor(Ops[2]) = llvm::MCOperand::createReg(0); // IndexReg
37 IT.getValueFor(Ops[3]) = llvm::MCOperand::createImm(Offset); // Disp
38 IT.getValueFor(Ops[4]) = llvm::MCOperand::createReg(0); // Segment
39 break;
40 default:
41 llvm::errs() << Ops.size() << "-op are not handled right now ("
42 << IT.Instr.Name << ")\n";
43 llvm_unreachable("Invalid memory configuration");
44 }
45 }
46
47 // Returns whether Range can be filled.
48 static bool isValid(const MemoryOperandRange &Range) {
49 return Range.Ops.size() == 5;
50 }
51
52 // Returns whether Op is a valid memory operand.
53 static bool isMemoryOperand(const Operand &Op) {
54 return Op.isMemory() && Op.isExplicit();
55 }
56
57 llvm::ArrayRef<Operand> Ops;
58};
59
60// X86 memory access involve non constant number of operands, this function
61// extracts contiguous memory operands into MemoryOperandRange so it's easier to
62// check and fill.
63static std::vector<MemoryOperandRange>
64getMemoryOperandRanges(llvm::ArrayRef<Operand> Operands) {
65 std::vector<MemoryOperandRange> Result;
66 while (!Operands.empty()) {
67 Operands = Operands.drop_until(MemoryOperandRange::isMemoryOperand);
68 auto MemoryOps = Operands.take_while(MemoryOperandRange::isMemoryOperand);
69 if (!MemoryOps.empty())
70 Result.push_back(MemoryOps);
71 Operands = Operands.drop_front(MemoryOps.size());
72 }
73 return Result;
74}
75
Guillaume Chatelet946fb052018-10-12 15:12:22 +000076static llvm::Error IsInvalidOpcode(const Instruction &Instr) {
77 const auto OpcodeName = Instr.Name;
78 if (OpcodeName.startswith("POPF") || OpcodeName.startswith("PUSHF") ||
79 OpcodeName.startswith("ADJCALLSTACK"))
80 return llvm::make_error<BenchmarkFailure>(
Clement Courbet8d0dd0b2018-10-19 12:24:49 +000081 "unsupported opcode: Push/Pop/AdjCallStack");
Guillaume Chatelet3c639f32018-10-22 14:46:08 +000082 const bool ValidMemoryOperands = llvm::all_of(
83 getMemoryOperandRanges(Instr.Operands), MemoryOperandRange::isValid);
84 if (!ValidMemoryOperands)
85 return llvm::make_error<BenchmarkFailure>(
86 "unsupported opcode: non uniform memory access");
87 // We do not handle instructions with OPERAND_PCREL.
88 for (const Operand &Op : Instr.Operands)
89 if (Op.isExplicit() &&
90 Op.getExplicitOperandInfo().OperandType == llvm::MCOI::OPERAND_PCREL)
91 return llvm::make_error<BenchmarkFailure>(
92 "unsupported opcode: PC relative operand");
Guillaume Chatelet02f70a32018-10-22 14:55:43 +000093 for (const Operand &Op : Instr.Operands)
94 if (Op.isReg() && Op.isExplicit() &&
95 Op.getExplicitOperandInfo().RegClass ==
96 llvm::X86::SEGMENT_REGRegClassID)
97 return llvm::make_error<BenchmarkFailure>(
98 "unsupported opcode: access segment memory");
Clement Courbet8d0dd0b2018-10-19 12:24:49 +000099 // We do not handle second-form X87 instructions. We only handle first-form
100 // ones (_Fp), see comment in X86InstrFPStack.td.
101 for (const Operand &Op : Instr.Operands)
102 if (Op.isReg() && Op.isExplicit() &&
103 Op.getExplicitOperandInfo().RegClass == llvm::X86::RSTRegClassID)
104 return llvm::make_error<BenchmarkFailure>(
105 "unsupported second-form X87 instruction");
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000106 return llvm::Error::success();
107}
108
109static unsigned GetX86FPFlags(const Instruction &Instr) {
110 return Instr.Description->TSFlags & llvm::X86II::FPTypeMask;
111}
112
113class X86LatencySnippetGenerator : public LatencySnippetGenerator {
114public:
115 using LatencySnippetGenerator::LatencySnippetGenerator;
Clement Courbet4860b982018-06-26 08:49:30 +0000116
Guillaume Chatelet296a8622018-10-15 09:09:19 +0000117 llvm::Expected<std::vector<CodeTemplate>>
118 generateCodeTemplates(const Instruction &Instr) const override {
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000119 if (auto E = IsInvalidOpcode(Instr))
120 return std::move(E);
Clement Courbet717c9762018-06-28 07:41:16 +0000121
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000122 switch (GetX86FPFlags(Instr)) {
Clement Courbet717c9762018-06-28 07:41:16 +0000123 case llvm::X86II::NotFP:
Guillaume Chatelet296a8622018-10-15 09:09:19 +0000124 return LatencySnippetGenerator::generateCodeTemplates(Instr);
Clement Courbet717c9762018-06-28 07:41:16 +0000125 case llvm::X86II::ZeroArgFP:
Clement Courbet717c9762018-06-28 07:41:16 +0000126 case llvm::X86II::OneArgFP:
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000127 case llvm::X86II::SpecialFP:
128 case llvm::X86II::CompareFP:
129 case llvm::X86II::CondMovFP:
130 return llvm::make_error<BenchmarkFailure>("Unsupported x87 Instruction");
Clement Courbet717c9762018-06-28 07:41:16 +0000131 case llvm::X86II::OneArgFPRW:
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000132 case llvm::X86II::TwoArgFP:
133 // These are instructions like
134 // - `ST(0) = fsqrt(ST(0))` (OneArgFPRW)
135 // - `ST(0) = ST(0) + ST(i)` (TwoArgFP)
136 // They are intrinsically serial and do not modify the state of the stack.
Guillaume Chatelet296a8622018-10-15 09:09:19 +0000137 return generateSelfAliasingCodeTemplates(Instr);
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000138 default:
139 llvm_unreachable("Unknown FP Type!");
140 }
141 }
142};
143
144class X86UopsSnippetGenerator : public UopsSnippetGenerator {
145public:
146 using UopsSnippetGenerator::UopsSnippetGenerator;
147
Guillaume Chatelet296a8622018-10-15 09:09:19 +0000148 llvm::Expected<std::vector<CodeTemplate>>
149 generateCodeTemplates(const Instruction &Instr) const override {
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000150 if (auto E = IsInvalidOpcode(Instr))
151 return std::move(E);
152
153 switch (GetX86FPFlags(Instr)) {
154 case llvm::X86II::NotFP:
Guillaume Chatelet296a8622018-10-15 09:09:19 +0000155 return UopsSnippetGenerator::generateCodeTemplates(Instr);
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000156 case llvm::X86II::ZeroArgFP:
157 case llvm::X86II::OneArgFP:
158 case llvm::X86II::SpecialFP:
159 return llvm::make_error<BenchmarkFailure>("Unsupported x87 Instruction");
160 case llvm::X86II::OneArgFPRW:
161 case llvm::X86II::TwoArgFP:
Clement Courbet717c9762018-06-28 07:41:16 +0000162 // These are instructions like
163 // - `ST(0) = fsqrt(ST(0))` (OneArgFPRW)
164 // - `ST(0) = ST(0) + ST(i)` (TwoArgFP)
165 // They are intrinsically serial and do not modify the state of the stack.
166 // We generate the same code for latency and uops.
Guillaume Chatelet296a8622018-10-15 09:09:19 +0000167 return generateSelfAliasingCodeTemplates(Instr);
Clement Courbet717c9762018-06-28 07:41:16 +0000168 case llvm::X86II::CompareFP:
Clement Courbet717c9762018-06-28 07:41:16 +0000169 case llvm::X86II::CondMovFP:
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000170 // We can compute uops for any FP instruction that does not grow or shrink
171 // the stack (either do not touch the stack or push as much as they pop).
Guillaume Chatelet296a8622018-10-15 09:09:19 +0000172 return generateUnconstrainedCodeTemplates(
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000173 Instr, "instruction does not grow/shrink the FP stack");
Clement Courbet717c9762018-06-28 07:41:16 +0000174 default:
175 llvm_unreachable("Unknown FP Type!");
176 }
Clement Courbet4860b982018-06-26 08:49:30 +0000177 }
178};
179
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000180static unsigned GetLoadImmediateOpcode(unsigned RegBitWidth) {
181 switch (RegBitWidth) {
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000182 case 8:
183 return llvm::X86::MOV8ri;
184 case 16:
185 return llvm::X86::MOV16ri;
186 case 32:
187 return llvm::X86::MOV32ri;
188 case 64:
189 return llvm::X86::MOV64ri;
190 }
191 llvm_unreachable("Invalid Value Width");
192}
193
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000194// Generates instruction to load an immediate value into a register.
195static llvm::MCInst loadImmediate(unsigned Reg, unsigned RegBitWidth,
196 const llvm::APInt &Value) {
197 if (Value.getBitWidth() > RegBitWidth)
198 llvm_unreachable("Value must fit in the Register");
199 return llvm::MCInstBuilder(GetLoadImmediateOpcode(RegBitWidth))
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000200 .addReg(Reg)
201 .addImm(Value.getZExtValue());
202}
203
204// Allocates scratch memory on the stack.
205static llvm::MCInst allocateStackSpace(unsigned Bytes) {
206 return llvm::MCInstBuilder(llvm::X86::SUB64ri8)
207 .addReg(llvm::X86::RSP)
208 .addReg(llvm::X86::RSP)
209 .addImm(Bytes);
210}
211
212// Fills scratch memory at offset `OffsetBytes` with value `Imm`.
213static llvm::MCInst fillStackSpace(unsigned MovOpcode, unsigned OffsetBytes,
214 uint64_t Imm) {
215 return llvm::MCInstBuilder(MovOpcode)
216 // Address = ESP
217 .addReg(llvm::X86::RSP) // BaseReg
218 .addImm(1) // ScaleAmt
219 .addReg(0) // IndexReg
220 .addImm(OffsetBytes) // Disp
221 .addReg(0) // Segment
222 // Immediate.
223 .addImm(Imm);
224}
225
226// Loads scratch memory into register `Reg` using opcode `RMOpcode`.
227static llvm::MCInst loadToReg(unsigned Reg, unsigned RMOpcode) {
228 return llvm::MCInstBuilder(RMOpcode)
229 .addReg(Reg)
230 // Address = ESP
231 .addReg(llvm::X86::RSP) // BaseReg
232 .addImm(1) // ScaleAmt
233 .addReg(0) // IndexReg
234 .addImm(0) // Disp
235 .addReg(0); // Segment
236}
237
238// Releases scratch memory.
239static llvm::MCInst releaseStackSpace(unsigned Bytes) {
240 return llvm::MCInstBuilder(llvm::X86::ADD64ri8)
241 .addReg(llvm::X86::RSP)
242 .addReg(llvm::X86::RSP)
243 .addImm(Bytes);
244}
245
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000246// Reserves some space on the stack, fills it with the content of the provided
247// constant and provide methods to load the stack value into a register.
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000248struct ConstantInliner {
Clement Courbet78b2e732018-09-25 07:31:44 +0000249 explicit ConstantInliner(const llvm::APInt &Constant) : Constant_(Constant) {}
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000250
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000251 std::vector<llvm::MCInst> loadAndFinalize(unsigned Reg, unsigned RegBitWidth,
252 unsigned Opcode) {
Clement Courbet78b2e732018-09-25 07:31:44 +0000253 assert((RegBitWidth & 7) == 0 &&
254 "RegBitWidth must be a multiple of 8 bits");
255 initStack(RegBitWidth / 8);
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000256 add(loadToReg(Reg, Opcode));
Clement Courbet78b2e732018-09-25 07:31:44 +0000257 add(releaseStackSpace(RegBitWidth / 8));
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000258 return std::move(Instructions);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000259 }
260
Clement Courbetc51f4522018-10-19 09:56:54 +0000261 std::vector<llvm::MCInst> loadX87STAndFinalize(unsigned Reg) {
262 initStack(kF80Bytes);
263 add(llvm::MCInstBuilder(llvm::X86::LD_F80m)
264 // Address = ESP
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000265 .addReg(llvm::X86::RSP) // BaseReg
266 .addImm(1) // ScaleAmt
267 .addReg(0) // IndexReg
268 .addImm(0) // Disp
269 .addReg(0)); // Segment
270 if (Reg != llvm::X86::ST0)
271 add(llvm::MCInstBuilder(llvm::X86::ST_Frr).addReg(Reg));
Clement Courbetc51f4522018-10-19 09:56:54 +0000272 add(releaseStackSpace(kF80Bytes));
273 return std::move(Instructions);
274 }
275
276 std::vector<llvm::MCInst> loadX87FPAndFinalize(unsigned Reg) {
277 initStack(kF80Bytes);
278 add(llvm::MCInstBuilder(llvm::X86::LD_Fp80m)
279 .addReg(Reg)
280 // Address = ESP
281 .addReg(llvm::X86::RSP) // BaseReg
282 .addImm(1) // ScaleAmt
283 .addReg(0) // IndexReg
284 .addImm(0) // Disp
285 .addReg(0)); // Segment
286 add(releaseStackSpace(kF80Bytes));
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000287 return std::move(Instructions);
288 }
289
290 std::vector<llvm::MCInst> popFlagAndFinalize() {
Clement Courbet78b2e732018-09-25 07:31:44 +0000291 initStack(8);
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000292 add(llvm::MCInstBuilder(llvm::X86::POPF64));
Simon Pilgrimf652ef32018-09-18 15:38:16 +0000293 return std::move(Instructions);
294 }
295
296private:
Clement Courbetc51f4522018-10-19 09:56:54 +0000297 static constexpr const unsigned kF80Bytes = 10; // 80 bits.
298
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000299 ConstantInliner &add(const llvm::MCInst &Inst) {
300 Instructions.push_back(Inst);
301 return *this;
302 }
303
Clement Courbet78b2e732018-09-25 07:31:44 +0000304 void initStack(unsigned Bytes) {
305 assert(Constant_.getBitWidth() <= Bytes * 8 &&
306 "Value does not have the correct size");
307 const llvm::APInt WideConstant = Constant_.getBitWidth() < Bytes * 8
308 ? Constant_.sext(Bytes * 8)
309 : Constant_;
310 add(allocateStackSpace(Bytes));
311 size_t ByteOffset = 0;
312 for (; Bytes - ByteOffset >= 4; ByteOffset += 4)
313 add(fillStackSpace(
314 llvm::X86::MOV32mi, ByteOffset,
315 WideConstant.extractBits(32, ByteOffset * 8).getZExtValue()));
316 if (Bytes - ByteOffset >= 2) {
317 add(fillStackSpace(
318 llvm::X86::MOV16mi, ByteOffset,
319 WideConstant.extractBits(16, ByteOffset * 8).getZExtValue()));
320 ByteOffset += 2;
321 }
322 if (Bytes - ByteOffset >= 1)
323 add(fillStackSpace(
324 llvm::X86::MOV8mi, ByteOffset,
325 WideConstant.extractBits(8, ByteOffset * 8).getZExtValue()));
326 }
327
328 llvm::APInt Constant_;
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000329 std::vector<llvm::MCInst> Instructions;
330};
331
Clement Courbet44b4c542018-06-19 11:28:59 +0000332class ExegesisX86Target : public ExegesisTarget {
Clement Courbet6fd00e32018-06-20 11:54:35 +0000333 void addTargetSpecificPasses(llvm::PassManagerBase &PM) const override {
334 // Lowers FP pseudo-instructions, e.g. ABS_Fp32 -> ABS_F.
Clement Courbet717c9762018-06-28 07:41:16 +0000335 PM.add(llvm::createX86FloatingPointStackifierPass());
Clement Courbet6fd00e32018-06-20 11:54:35 +0000336 }
337
Guillaume Chateletfb943542018-08-01 14:41:45 +0000338 unsigned getScratchMemoryRegister(const llvm::Triple &TT) const override {
339 if (!TT.isArch64Bit()) {
340 // FIXME: This would require popping from the stack, so we would have to
341 // add some additional setup code.
342 return 0;
343 }
344 return TT.isOSWindows() ? llvm::X86::RCX : llvm::X86::RDI;
345 }
346
347 unsigned getMaxMemoryAccessSize() const override { return 64; }
348
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000349 void fillMemoryOperands(InstructionTemplate &IT, unsigned Reg,
Guillaume Chateletfb943542018-08-01 14:41:45 +0000350 unsigned Offset) const override {
351 // FIXME: For instructions that read AND write to memory, we use the same
352 // value for input and output.
Guillaume Chatelet3c639f32018-10-22 14:46:08 +0000353 for (auto &MemoryRange : getMemoryOperandRanges(IT.Instr.Operands))
354 MemoryRange.fillOrDie(IT, Reg, Offset);
Guillaume Chateletfb943542018-08-01 14:41:45 +0000355 }
356
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000357 std::vector<llvm::MCInst> setRegTo(const llvm::MCSubtargetInfo &STI,
358 unsigned Reg,
359 const llvm::APInt &Value) const override {
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000360 if (llvm::X86::GR8RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000361 return {loadImmediate(Reg, 8, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000362 if (llvm::X86::GR16RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000363 return {loadImmediate(Reg, 16, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000364 if (llvm::X86::GR32RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000365 return {loadImmediate(Reg, 32, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000366 if (llvm::X86::GR64RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000367 return {loadImmediate(Reg, 64, Value)};
368 ConstantInliner CI(Value);
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000369 if (llvm::X86::VR64RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000370 return CI.loadAndFinalize(Reg, 64, llvm::X86::MMX_MOVQ64rm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000371 if (llvm::X86::VR128XRegClass.contains(Reg)) {
372 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000373 return CI.loadAndFinalize(Reg, 128, llvm::X86::VMOVDQU32Z128rm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000374 if (STI.getFeatureBits()[llvm::X86::FeatureAVX])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000375 return CI.loadAndFinalize(Reg, 128, llvm::X86::VMOVDQUrm);
376 return CI.loadAndFinalize(Reg, 128, llvm::X86::MOVDQUrm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000377 }
378 if (llvm::X86::VR256XRegClass.contains(Reg)) {
379 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000380 return CI.loadAndFinalize(Reg, 256, llvm::X86::VMOVDQU32Z256rm);
381 if (STI.getFeatureBits()[llvm::X86::FeatureAVX])
382 return CI.loadAndFinalize(Reg, 256, llvm::X86::VMOVDQUYrm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000383 }
384 if (llvm::X86::VR512RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000385 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
386 return CI.loadAndFinalize(Reg, 512, llvm::X86::VMOVDQU32Zrm);
387 if (llvm::X86::RSTRegClass.contains(Reg)) {
Clement Courbetc51f4522018-10-19 09:56:54 +0000388 return CI.loadX87STAndFinalize(Reg);
389 }
390 if (llvm::X86::RFP32RegClass.contains(Reg) ||
391 llvm::X86::RFP64RegClass.contains(Reg) ||
392 llvm::X86::RFP80RegClass.contains(Reg)) {
393 return CI.loadX87FPAndFinalize(Reg);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000394 }
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000395 if (Reg == llvm::X86::EFLAGS)
396 return CI.popFlagAndFinalize();
397 return {}; // Not yet implemented.
Clement Courbeta51efc22018-06-25 13:12:02 +0000398 }
399
Clement Courbetd939f6d2018-09-13 07:40:53 +0000400 std::unique_ptr<SnippetGenerator>
401 createLatencySnippetGenerator(const LLVMState &State) const override {
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000402 return llvm::make_unique<X86LatencySnippetGenerator>(State);
Clement Courbet4860b982018-06-26 08:49:30 +0000403 }
404
Clement Courbetd939f6d2018-09-13 07:40:53 +0000405 std::unique_ptr<SnippetGenerator>
406 createUopsSnippetGenerator(const LLVMState &State) const override {
Guillaume Chatelet946fb052018-10-12 15:12:22 +0000407 return llvm::make_unique<X86UopsSnippetGenerator>(State);
Clement Courbet4860b982018-06-26 08:49:30 +0000408 }
409
Clement Courbet44b4c542018-06-19 11:28:59 +0000410 bool matchesArch(llvm::Triple::ArchType Arch) const override {
411 return Arch == llvm::Triple::x86_64 || Arch == llvm::Triple::x86;
412 }
413};
414
415} // namespace
416
Clement Courbetcff2caa2018-06-25 11:22:23 +0000417static ExegesisTarget *getTheExegesisX86Target() {
Clement Courbet44b4c542018-06-19 11:28:59 +0000418 static ExegesisX86Target Target;
419 return &Target;
420}
421
422void InitializeX86ExegesisTarget() {
423 ExegesisTarget::registerTarget(getTheExegesisX86Target());
424}
425
Clement Courbetcff2caa2018-06-25 11:22:23 +0000426} // namespace exegesis
Fangrui Song32401af2018-10-22 17:10:47 +0000427} // namespace llvm