blob: cf35b717750803370b4f08afc3bcf5c5920cd627 [file] [log] [blame]
Clement Courbet44b4c542018-06-19 11:28:59 +00001//===-- Target.cpp ----------------------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9#include "../Target.h"
10
Clement Courbet4860b982018-06-26 08:49:30 +000011#include "../Latency.h"
12#include "../Uops.h"
Clement Courbet717c9762018-06-28 07:41:16 +000013#include "MCTargetDesc/X86BaseInfo.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000014#include "MCTargetDesc/X86MCTargetDesc.h"
Clement Courbet6fd00e32018-06-20 11:54:35 +000015#include "X86.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000016#include "X86RegisterInfo.h"
Clement Courbete7851692018-07-03 06:17:05 +000017#include "X86Subtarget.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000018#include "llvm/MC/MCInstBuilder.h"
Clement Courbet6fd00e32018-06-20 11:54:35 +000019
Clement Courbet44b4c542018-06-19 11:28:59 +000020namespace exegesis {
21
22namespace {
23
Clement Courbet717c9762018-06-28 07:41:16 +000024// Common code for X86 Uops and Latency runners.
Clement Courbetd939f6d2018-09-13 07:40:53 +000025template <typename Impl> class X86SnippetGenerator : public Impl {
Clement Courbet717c9762018-06-28 07:41:16 +000026 using Impl::Impl;
Clement Courbet4860b982018-06-26 08:49:30 +000027
Guillaume Chatelete60866a2018-08-03 09:29:38 +000028 llvm::Expected<CodeTemplate>
29 generateCodeTemplate(unsigned Opcode) const override {
Clement Courbet717c9762018-06-28 07:41:16 +000030 // Test whether we can generate a snippet for this instruction.
31 const auto &InstrInfo = this->State.getInstrInfo();
32 const auto OpcodeName = InstrInfo.getName(Opcode);
33 if (OpcodeName.startswith("POPF") || OpcodeName.startswith("PUSHF") ||
34 OpcodeName.startswith("ADJCALLSTACK")) {
35 return llvm::make_error<BenchmarkFailure>(
36 "Unsupported opcode: Push/Pop/AdjCallStack");
Clement Courbet4860b982018-06-26 08:49:30 +000037 }
Clement Courbet717c9762018-06-28 07:41:16 +000038
39 // Handle X87.
40 const auto &InstrDesc = InstrInfo.get(Opcode);
41 const unsigned FPInstClass = InstrDesc.TSFlags & llvm::X86II::FPTypeMask;
42 const Instruction Instr(InstrDesc, this->RATC);
43 switch (FPInstClass) {
44 case llvm::X86II::NotFP:
45 break;
46 case llvm::X86II::ZeroArgFP:
Clement Courbetf9a0bb32018-07-05 13:54:51 +000047 return llvm::make_error<BenchmarkFailure>("Unsupported x87 ZeroArgFP");
Clement Courbet717c9762018-06-28 07:41:16 +000048 case llvm::X86II::OneArgFP:
Clement Courbetf9a0bb32018-07-05 13:54:51 +000049 return llvm::make_error<BenchmarkFailure>("Unsupported x87 OneArgFP");
Clement Courbet717c9762018-06-28 07:41:16 +000050 case llvm::X86II::OneArgFPRW:
51 case llvm::X86II::TwoArgFP: {
52 // These are instructions like
53 // - `ST(0) = fsqrt(ST(0))` (OneArgFPRW)
54 // - `ST(0) = ST(0) + ST(i)` (TwoArgFP)
55 // They are intrinsically serial and do not modify the state of the stack.
56 // We generate the same code for latency and uops.
Guillaume Chatelete60866a2018-08-03 09:29:38 +000057 return this->generateSelfAliasingCodeTemplate(Instr);
Clement Courbet717c9762018-06-28 07:41:16 +000058 }
59 case llvm::X86II::CompareFP:
60 return Impl::handleCompareFP(Instr);
61 case llvm::X86II::CondMovFP:
62 return Impl::handleCondMovFP(Instr);
63 case llvm::X86II::SpecialFP:
Clement Courbetf9a0bb32018-07-05 13:54:51 +000064 return llvm::make_error<BenchmarkFailure>("Unsupported x87 SpecialFP");
Clement Courbet717c9762018-06-28 07:41:16 +000065 default:
66 llvm_unreachable("Unknown FP Type!");
67 }
68
69 // Fallback to generic implementation.
Guillaume Chatelete60866a2018-08-03 09:29:38 +000070 return Impl::Base::generateCodeTemplate(Opcode);
Clement Courbet4860b982018-06-26 08:49:30 +000071 }
72};
73
Clement Courbetd939f6d2018-09-13 07:40:53 +000074class X86LatencyImpl : public LatencySnippetGenerator {
Clement Courbet717c9762018-06-28 07:41:16 +000075protected:
Clement Courbetd939f6d2018-09-13 07:40:53 +000076 using Base = LatencySnippetGenerator;
Clement Courbet717c9762018-06-28 07:41:16 +000077 using Base::Base;
Guillaume Chatelete60866a2018-08-03 09:29:38 +000078 llvm::Expected<CodeTemplate> handleCompareFP(const Instruction &Instr) const {
Clement Courbetd939f6d2018-09-13 07:40:53 +000079 return llvm::make_error<SnippetGeneratorFailure>(
80 "Unsupported x87 CompareFP");
Clement Courbet717c9762018-06-28 07:41:16 +000081 }
Guillaume Chatelete60866a2018-08-03 09:29:38 +000082 llvm::Expected<CodeTemplate> handleCondMovFP(const Instruction &Instr) const {
Clement Courbetd939f6d2018-09-13 07:40:53 +000083 return llvm::make_error<SnippetGeneratorFailure>(
84 "Unsupported x87 CondMovFP");
Clement Courbet717c9762018-06-28 07:41:16 +000085 }
Clement Courbet717c9762018-06-28 07:41:16 +000086};
87
Clement Courbetd939f6d2018-09-13 07:40:53 +000088class X86UopsImpl : public UopsSnippetGenerator {
Clement Courbet717c9762018-06-28 07:41:16 +000089protected:
Clement Courbetd939f6d2018-09-13 07:40:53 +000090 using Base = UopsSnippetGenerator;
Clement Courbet717c9762018-06-28 07:41:16 +000091 using Base::Base;
Clement Courbetf9a0bb32018-07-05 13:54:51 +000092 // We can compute uops for any FP instruction that does not grow or shrink the
93 // stack (either do not touch the stack or push as much as they pop).
Guillaume Chatelete60866a2018-08-03 09:29:38 +000094 llvm::Expected<CodeTemplate> handleCompareFP(const Instruction &Instr) const {
95 return generateUnconstrainedCodeTemplate(
Clement Courbetf9a0bb32018-07-05 13:54:51 +000096 Instr, "instruction does not grow/shrink the FP stack");
Clement Courbet717c9762018-06-28 07:41:16 +000097 }
Guillaume Chatelete60866a2018-08-03 09:29:38 +000098 llvm::Expected<CodeTemplate> handleCondMovFP(const Instruction &Instr) const {
99 return generateUnconstrainedCodeTemplate(
Clement Courbetf9a0bb32018-07-05 13:54:51 +0000100 Instr, "instruction does not grow/shrink the FP stack");
Clement Courbet4860b982018-06-26 08:49:30 +0000101 }
102};
103
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000104static unsigned GetLoadImmediateOpcode(unsigned RegBitWidth) {
105 switch (RegBitWidth) {
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000106 case 8:
107 return llvm::X86::MOV8ri;
108 case 16:
109 return llvm::X86::MOV16ri;
110 case 32:
111 return llvm::X86::MOV32ri;
112 case 64:
113 return llvm::X86::MOV64ri;
114 }
115 llvm_unreachable("Invalid Value Width");
116}
117
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000118// Generates instruction to load an immediate value into a register.
119static llvm::MCInst loadImmediate(unsigned Reg, unsigned RegBitWidth,
120 const llvm::APInt &Value) {
121 if (Value.getBitWidth() > RegBitWidth)
122 llvm_unreachable("Value must fit in the Register");
123 return llvm::MCInstBuilder(GetLoadImmediateOpcode(RegBitWidth))
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000124 .addReg(Reg)
125 .addImm(Value.getZExtValue());
126}
127
128// Allocates scratch memory on the stack.
129static llvm::MCInst allocateStackSpace(unsigned Bytes) {
130 return llvm::MCInstBuilder(llvm::X86::SUB64ri8)
131 .addReg(llvm::X86::RSP)
132 .addReg(llvm::X86::RSP)
133 .addImm(Bytes);
134}
135
136// Fills scratch memory at offset `OffsetBytes` with value `Imm`.
137static llvm::MCInst fillStackSpace(unsigned MovOpcode, unsigned OffsetBytes,
138 uint64_t Imm) {
139 return llvm::MCInstBuilder(MovOpcode)
140 // Address = ESP
141 .addReg(llvm::X86::RSP) // BaseReg
142 .addImm(1) // ScaleAmt
143 .addReg(0) // IndexReg
144 .addImm(OffsetBytes) // Disp
145 .addReg(0) // Segment
146 // Immediate.
147 .addImm(Imm);
148}
149
150// Loads scratch memory into register `Reg` using opcode `RMOpcode`.
151static llvm::MCInst loadToReg(unsigned Reg, unsigned RMOpcode) {
152 return llvm::MCInstBuilder(RMOpcode)
153 .addReg(Reg)
154 // Address = ESP
155 .addReg(llvm::X86::RSP) // BaseReg
156 .addImm(1) // ScaleAmt
157 .addReg(0) // IndexReg
158 .addImm(0) // Disp
159 .addReg(0); // Segment
160}
161
162// Releases scratch memory.
163static llvm::MCInst releaseStackSpace(unsigned Bytes) {
164 return llvm::MCInstBuilder(llvm::X86::ADD64ri8)
165 .addReg(llvm::X86::RSP)
166 .addReg(llvm::X86::RSP)
167 .addImm(Bytes);
168}
169
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000170// Reserves some space on the stack, fills it with the content of the provided
171// constant and provide methods to load the stack value into a register.
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000172struct ConstantInliner {
Clement Courbet78b2e732018-09-25 07:31:44 +0000173 explicit ConstantInliner(const llvm::APInt &Constant) : Constant_(Constant) {}
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000174
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000175 std::vector<llvm::MCInst> loadAndFinalize(unsigned Reg, unsigned RegBitWidth,
176 unsigned Opcode) {
Clement Courbet78b2e732018-09-25 07:31:44 +0000177 assert((RegBitWidth & 7) == 0 &&
178 "RegBitWidth must be a multiple of 8 bits");
179 initStack(RegBitWidth / 8);
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000180 add(loadToReg(Reg, Opcode));
Clement Courbet78b2e732018-09-25 07:31:44 +0000181 add(releaseStackSpace(RegBitWidth / 8));
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000182 return std::move(Instructions);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000183 }
184
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000185 std::vector<llvm::MCInst>
186 loadX87AndFinalize(unsigned Reg, unsigned RegBitWidth, unsigned Opcode) {
Clement Courbet78b2e732018-09-25 07:31:44 +0000187 assert((RegBitWidth & 7) == 0 &&
188 "RegBitWidth must be a multiple of 8 bits");
189 initStack(RegBitWidth / 8);
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000190 add(llvm::MCInstBuilder(Opcode)
191 .addReg(llvm::X86::RSP) // BaseReg
192 .addImm(1) // ScaleAmt
193 .addReg(0) // IndexReg
194 .addImm(0) // Disp
195 .addReg(0)); // Segment
196 if (Reg != llvm::X86::ST0)
197 add(llvm::MCInstBuilder(llvm::X86::ST_Frr).addReg(Reg));
Clement Courbet78b2e732018-09-25 07:31:44 +0000198 add(releaseStackSpace(RegBitWidth / 8));
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000199 return std::move(Instructions);
200 }
201
202 std::vector<llvm::MCInst> popFlagAndFinalize() {
Clement Courbet78b2e732018-09-25 07:31:44 +0000203 initStack(8);
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000204 add(llvm::MCInstBuilder(llvm::X86::POPF64));
Simon Pilgrimf652ef32018-09-18 15:38:16 +0000205 return std::move(Instructions);
206 }
207
208private:
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000209 ConstantInliner &add(const llvm::MCInst &Inst) {
210 Instructions.push_back(Inst);
211 return *this;
212 }
213
Clement Courbet78b2e732018-09-25 07:31:44 +0000214 void initStack(unsigned Bytes) {
215 assert(Constant_.getBitWidth() <= Bytes * 8 &&
216 "Value does not have the correct size");
217 const llvm::APInt WideConstant = Constant_.getBitWidth() < Bytes * 8
218 ? Constant_.sext(Bytes * 8)
219 : Constant_;
220 add(allocateStackSpace(Bytes));
221 size_t ByteOffset = 0;
222 for (; Bytes - ByteOffset >= 4; ByteOffset += 4)
223 add(fillStackSpace(
224 llvm::X86::MOV32mi, ByteOffset,
225 WideConstant.extractBits(32, ByteOffset * 8).getZExtValue()));
226 if (Bytes - ByteOffset >= 2) {
227 add(fillStackSpace(
228 llvm::X86::MOV16mi, ByteOffset,
229 WideConstant.extractBits(16, ByteOffset * 8).getZExtValue()));
230 ByteOffset += 2;
231 }
232 if (Bytes - ByteOffset >= 1)
233 add(fillStackSpace(
234 llvm::X86::MOV8mi, ByteOffset,
235 WideConstant.extractBits(8, ByteOffset * 8).getZExtValue()));
236 }
237
238 llvm::APInt Constant_;
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000239 std::vector<llvm::MCInst> Instructions;
240};
241
Clement Courbet44b4c542018-06-19 11:28:59 +0000242class ExegesisX86Target : public ExegesisTarget {
Clement Courbet6fd00e32018-06-20 11:54:35 +0000243 void addTargetSpecificPasses(llvm::PassManagerBase &PM) const override {
244 // Lowers FP pseudo-instructions, e.g. ABS_Fp32 -> ABS_F.
Clement Courbet717c9762018-06-28 07:41:16 +0000245 PM.add(llvm::createX86FloatingPointStackifierPass());
Clement Courbet6fd00e32018-06-20 11:54:35 +0000246 }
247
Guillaume Chateletfb943542018-08-01 14:41:45 +0000248 unsigned getScratchMemoryRegister(const llvm::Triple &TT) const override {
249 if (!TT.isArch64Bit()) {
250 // FIXME: This would require popping from the stack, so we would have to
251 // add some additional setup code.
252 return 0;
253 }
254 return TT.isOSWindows() ? llvm::X86::RCX : llvm::X86::RDI;
255 }
256
257 unsigned getMaxMemoryAccessSize() const override { return 64; }
258
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000259 void fillMemoryOperands(InstructionTemplate &IT, unsigned Reg,
Guillaume Chateletfb943542018-08-01 14:41:45 +0000260 unsigned Offset) const override {
261 // FIXME: For instructions that read AND write to memory, we use the same
262 // value for input and output.
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000263 for (size_t I = 0, E = IT.Instr.Operands.size(); I < E; ++I) {
264 const Operand *Op = &IT.Instr.Operands[I];
Guillaume Chateletfb943542018-08-01 14:41:45 +0000265 if (Op->IsExplicit && Op->IsMem) {
266 // Case 1: 5-op memory.
267 assert((I + 5 <= E) && "x86 memory references are always 5 ops");
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000268 IT.getValueFor(*Op) = llvm::MCOperand::createReg(Reg); // BaseReg
269 Op = &IT.Instr.Operands[++I];
Guillaume Chateletfb943542018-08-01 14:41:45 +0000270 assert(Op->IsMem);
271 assert(Op->IsExplicit);
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000272 IT.getValueFor(*Op) = llvm::MCOperand::createImm(1); // ScaleAmt
273 Op = &IT.Instr.Operands[++I];
Guillaume Chateletfb943542018-08-01 14:41:45 +0000274 assert(Op->IsMem);
275 assert(Op->IsExplicit);
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000276 IT.getValueFor(*Op) = llvm::MCOperand::createReg(0); // IndexReg
277 Op = &IT.Instr.Operands[++I];
Guillaume Chateletfb943542018-08-01 14:41:45 +0000278 assert(Op->IsMem);
279 assert(Op->IsExplicit);
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000280 IT.getValueFor(*Op) = llvm::MCOperand::createImm(Offset); // Disp
281 Op = &IT.Instr.Operands[++I];
Guillaume Chateletfb943542018-08-01 14:41:45 +0000282 assert(Op->IsMem);
283 assert(Op->IsExplicit);
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000284 IT.getValueFor(*Op) = llvm::MCOperand::createReg(0); // Segment
Guillaume Chateletfb943542018-08-01 14:41:45 +0000285 // Case2: segment:index addressing. We assume that ES is 0.
286 }
287 }
288 }
289
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000290 std::vector<llvm::MCInst> setRegTo(const llvm::MCSubtargetInfo &STI,
291 unsigned Reg,
292 const llvm::APInt &Value) const override {
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000293 if (llvm::X86::GR8RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000294 return {loadImmediate(Reg, 8, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000295 if (llvm::X86::GR16RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000296 return {loadImmediate(Reg, 16, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000297 if (llvm::X86::GR32RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000298 return {loadImmediate(Reg, 32, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000299 if (llvm::X86::GR64RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000300 return {loadImmediate(Reg, 64, Value)};
301 ConstantInliner CI(Value);
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000302 if (llvm::X86::VR64RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000303 return CI.loadAndFinalize(Reg, 64, llvm::X86::MMX_MOVQ64rm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000304 if (llvm::X86::VR128XRegClass.contains(Reg)) {
305 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000306 return CI.loadAndFinalize(Reg, 128, llvm::X86::VMOVDQU32Z128rm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000307 if (STI.getFeatureBits()[llvm::X86::FeatureAVX])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000308 return CI.loadAndFinalize(Reg, 128, llvm::X86::VMOVDQUrm);
309 return CI.loadAndFinalize(Reg, 128, llvm::X86::MOVDQUrm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000310 }
311 if (llvm::X86::VR256XRegClass.contains(Reg)) {
312 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000313 return CI.loadAndFinalize(Reg, 256, llvm::X86::VMOVDQU32Z256rm);
314 if (STI.getFeatureBits()[llvm::X86::FeatureAVX])
315 return CI.loadAndFinalize(Reg, 256, llvm::X86::VMOVDQUYrm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000316 }
317 if (llvm::X86::VR512RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000318 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
319 return CI.loadAndFinalize(Reg, 512, llvm::X86::VMOVDQU32Zrm);
320 if (llvm::X86::RSTRegClass.contains(Reg)) {
321 if (Value.getBitWidth() == 32)
322 return CI.loadX87AndFinalize(Reg, 32, llvm::X86::LD_F32m);
323 if (Value.getBitWidth() == 64)
324 return CI.loadX87AndFinalize(Reg, 64, llvm::X86::LD_F64m);
325 if (Value.getBitWidth() == 80)
326 return CI.loadX87AndFinalize(Reg, 80, llvm::X86::LD_F80m);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000327 }
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000328 if (Reg == llvm::X86::EFLAGS)
329 return CI.popFlagAndFinalize();
330 return {}; // Not yet implemented.
Clement Courbeta51efc22018-06-25 13:12:02 +0000331 }
332
Clement Courbetd939f6d2018-09-13 07:40:53 +0000333 std::unique_ptr<SnippetGenerator>
334 createLatencySnippetGenerator(const LLVMState &State) const override {
335 return llvm::make_unique<X86SnippetGenerator<X86LatencyImpl>>(State);
Clement Courbet4860b982018-06-26 08:49:30 +0000336 }
337
Clement Courbetd939f6d2018-09-13 07:40:53 +0000338 std::unique_ptr<SnippetGenerator>
339 createUopsSnippetGenerator(const LLVMState &State) const override {
340 return llvm::make_unique<X86SnippetGenerator<X86UopsImpl>>(State);
Clement Courbet4860b982018-06-26 08:49:30 +0000341 }
342
Clement Courbet44b4c542018-06-19 11:28:59 +0000343 bool matchesArch(llvm::Triple::ArchType Arch) const override {
344 return Arch == llvm::Triple::x86_64 || Arch == llvm::Triple::x86;
345 }
346};
347
348} // namespace
349
Clement Courbetcff2caa2018-06-25 11:22:23 +0000350static ExegesisTarget *getTheExegesisX86Target() {
Clement Courbet44b4c542018-06-19 11:28:59 +0000351 static ExegesisX86Target Target;
352 return &Target;
353}
354
355void InitializeX86ExegesisTarget() {
356 ExegesisTarget::registerTarget(getTheExegesisX86Target());
357}
358
Clement Courbetcff2caa2018-06-25 11:22:23 +0000359} // namespace exegesis