blob: d65328be39c0b477e99963814505cd84af3e3e99 [file] [log] [blame]
Quentin Colombetb4e71182016-12-22 21:56:19 +00001//===- llvm/CodeGen/GlobalISel/Utils.cpp -------------------------*- C++ -*-==//
2//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Quentin Colombetb4e71182016-12-22 21:56:19 +00006//
7//===----------------------------------------------------------------------===//
8/// \file This file implements the utility functions used by the GlobalISel
9/// pipeline.
10//===----------------------------------------------------------------------===//
11
12#include "llvm/CodeGen/GlobalISel/Utils.h"
Aditya Nandakumar91fc4e02018-03-09 17:31:51 +000013#include "llvm/ADT/APFloat.h"
Ahmed Bougachaae9dade2017-02-23 21:05:42 +000014#include "llvm/ADT/Twine.h"
Quentin Colombetb4e71182016-12-22 21:56:19 +000015#include "llvm/CodeGen/GlobalISel/RegisterBankInfo.h"
16#include "llvm/CodeGen/MachineInstr.h"
17#include "llvm/CodeGen/MachineInstrBuilder.h"
Ahmed Bougachaae9dade2017-02-23 21:05:42 +000018#include "llvm/CodeGen/MachineOptimizationRemarkEmitter.h"
Quentin Colombetb4e71182016-12-22 21:56:19 +000019#include "llvm/CodeGen/MachineRegisterInfo.h"
Matthias Braun90ad6832018-07-13 00:08:38 +000020#include "llvm/CodeGen/StackProtector.h"
David Blaikie3f833ed2017-11-08 01:01:31 +000021#include "llvm/CodeGen/TargetInstrInfo.h"
Ahmed Bougachaae9dade2017-02-23 21:05:42 +000022#include "llvm/CodeGen/TargetPassConfig.h"
David Blaikieb3bde2e2017-11-17 01:07:10 +000023#include "llvm/CodeGen/TargetRegisterInfo.h"
Aditya Nandakumar75ad9cc2017-04-19 20:48:50 +000024#include "llvm/IR/Constants.h"
Quentin Colombetb4e71182016-12-22 21:56:19 +000025
26#define DEBUG_TYPE "globalisel-utils"
27
28using namespace llvm;
29
Daniel Sandersa6e2ceb2017-06-20 12:36:34 +000030unsigned llvm::constrainRegToClass(MachineRegisterInfo &MRI,
31 const TargetInstrInfo &TII,
32 const RegisterBankInfo &RBI,
33 MachineInstr &InsertPt, unsigned Reg,
34 const TargetRegisterClass &RegClass) {
35 if (!RBI.constrainGenericRegister(Reg, RegClass, MRI)) {
36 unsigned NewReg = MRI.createVirtualRegister(&RegClass);
37 BuildMI(*InsertPt.getParent(), InsertPt, InsertPt.getDebugLoc(),
38 TII.get(TargetOpcode::COPY), NewReg)
39 .addReg(Reg);
40 return NewReg;
41 }
42
43 return Reg;
44}
45
Quentin Colombetb4e71182016-12-22 21:56:19 +000046unsigned llvm::constrainOperandRegClass(
47 const MachineFunction &MF, const TargetRegisterInfo &TRI,
48 MachineRegisterInfo &MRI, const TargetInstrInfo &TII,
49 const RegisterBankInfo &RBI, MachineInstr &InsertPt, const MCInstrDesc &II,
Aditya Nandakumar59999052018-02-26 22:56:21 +000050 const MachineOperand &RegMO, unsigned OpIdx) {
51 unsigned Reg = RegMO.getReg();
Quentin Colombetb4e71182016-12-22 21:56:19 +000052 // Assume physical registers are properly constrained.
53 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
54 "PhysReg not implemented");
55
56 const TargetRegisterClass *RegClass = TII.getRegClass(II, OpIdx, &TRI, MF);
Daniel Sanders08464522018-01-29 21:09:12 +000057 // Some of the target independent instructions, like COPY, may not impose any
Aditya Nandakumar59999052018-02-26 22:56:21 +000058 // register class constraints on some of their operands: If it's a use, we can
59 // skip constraining as the instruction defining the register would constrain
60 // it.
Tom Stellardabc98712018-05-03 21:44:16 +000061
62 // We can't constrain unallocatable register classes, because we can't create
63 // virtual registers for these classes, so we need to let targets handled this
64 // case.
65 if (RegClass && !RegClass->isAllocatable())
66 RegClass = TRI.getConstrainedRegClassForOperand(RegMO, MRI);
67
Daniel Sanders08464522018-01-29 21:09:12 +000068 if (!RegClass) {
Aditya Nandakumar59999052018-02-26 22:56:21 +000069 assert((!isTargetSpecificOpcode(II.getOpcode()) || RegMO.isUse()) &&
70 "Register class constraint is required unless either the "
71 "instruction is target independent or the operand is a use");
Daniel Sanders08464522018-01-29 21:09:12 +000072 // FIXME: Just bailing out like this here could be not enough, unless we
73 // expect the users of this function to do the right thing for PHIs and
74 // COPY:
75 // v1 = COPY v0
76 // v2 = COPY v1
77 // v1 here may end up not being constrained at all. Please notice that to
78 // reproduce the issue we likely need a destination pattern of a selection
79 // rule producing such extra copies, not just an input GMIR with them as
80 // every existing target using selectImpl handles copies before calling it
81 // and they never reach this function.
82 return Reg;
83 }
Daniel Sandersa6e2ceb2017-06-20 12:36:34 +000084 return constrainRegToClass(MRI, TII, RBI, InsertPt, Reg, *RegClass);
Quentin Colombetb4e71182016-12-22 21:56:19 +000085}
Ahmed Bougachaae9dade2017-02-23 21:05:42 +000086
Aditya Nandakumar18b3f9d2018-01-17 19:31:33 +000087bool llvm::constrainSelectedInstRegOperands(MachineInstr &I,
88 const TargetInstrInfo &TII,
89 const TargetRegisterInfo &TRI,
90 const RegisterBankInfo &RBI) {
Daniel Sanders08464522018-01-29 21:09:12 +000091 assert(!isPreISelGenericOpcode(I.getOpcode()) &&
92 "A selected instruction is expected");
Aditya Nandakumar18b3f9d2018-01-17 19:31:33 +000093 MachineBasicBlock &MBB = *I.getParent();
94 MachineFunction &MF = *MBB.getParent();
95 MachineRegisterInfo &MRI = MF.getRegInfo();
96
97 for (unsigned OpI = 0, OpE = I.getNumExplicitOperands(); OpI != OpE; ++OpI) {
98 MachineOperand &MO = I.getOperand(OpI);
99
100 // There's nothing to be done on non-register operands.
101 if (!MO.isReg())
102 continue;
103
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000104 LLVM_DEBUG(dbgs() << "Converting operand: " << MO << '\n');
Aditya Nandakumar18b3f9d2018-01-17 19:31:33 +0000105 assert(MO.isReg() && "Unsupported non-reg operand");
106
107 unsigned Reg = MO.getReg();
108 // Physical registers don't need to be constrained.
109 if (TRI.isPhysicalRegister(Reg))
110 continue;
111
112 // Register operands with a value of 0 (e.g. predicate operands) don't need
113 // to be constrained.
114 if (Reg == 0)
115 continue;
116
117 // If the operand is a vreg, we should constrain its regclass, and only
118 // insert COPYs if that's impossible.
119 // constrainOperandRegClass does that for us.
120 MO.setReg(constrainOperandRegClass(MF, TRI, MRI, TII, RBI, I, I.getDesc(),
Aditya Nandakumar59999052018-02-26 22:56:21 +0000121 MO, OpI));
Aditya Nandakumar18b3f9d2018-01-17 19:31:33 +0000122
123 // Tie uses to defs as indicated in MCInstrDesc if this hasn't already been
124 // done.
125 if (MO.isUse()) {
126 int DefIdx = I.getDesc().getOperandConstraint(OpI, MCOI::TIED_TO);
127 if (DefIdx != -1 && !I.isRegTiedToUseOperand(DefIdx))
128 I.tieOperands(DefIdx, OpI);
129 }
130 }
131 return true;
132}
133
Volkan Keles47debae2017-03-21 10:47:35 +0000134bool llvm::isTriviallyDead(const MachineInstr &MI,
135 const MachineRegisterInfo &MRI) {
136 // If we can move an instruction, we can remove it. Otherwise, it has
137 // a side-effect of some sort.
138 bool SawStore = false;
Aditya Nandakumarcd04e362018-10-19 20:11:52 +0000139 if (!MI.isSafeToMove(/*AA=*/nullptr, SawStore) && !MI.isPHI())
Volkan Keles47debae2017-03-21 10:47:35 +0000140 return false;
141
142 // Instructions without side-effects are dead iff they only define dead vregs.
143 for (auto &MO : MI.operands()) {
144 if (!MO.isReg() || !MO.isDef())
145 continue;
146
147 unsigned Reg = MO.getReg();
Ahmed Bougacha15b3e8a2017-03-21 23:42:54 +0000148 if (TargetRegisterInfo::isPhysicalRegister(Reg) ||
149 !MRI.use_nodbg_empty(Reg))
Volkan Keles47debae2017-03-21 10:47:35 +0000150 return false;
151 }
152 return true;
153}
154
Ahmed Bougachaae9dade2017-02-23 21:05:42 +0000155void llvm::reportGISelFailure(MachineFunction &MF, const TargetPassConfig &TPC,
156 MachineOptimizationRemarkEmitter &MORE,
157 MachineOptimizationRemarkMissed &R) {
158 MF.getProperties().set(MachineFunctionProperties::Property::FailedISel);
159
160 // Print the function name explicitly if we don't have a debug location (which
161 // makes the diagnostic less useful) or if we're going to emit a raw error.
162 if (!R.getLocation().isValid() || TPC.isGlobalISelAbortEnabled())
163 R << (" (in function: " + MF.getName() + ")").str();
164
165 if (TPC.isGlobalISelAbortEnabled())
166 report_fatal_error(R.getMsg());
167 else
168 MORE.emit(R);
169}
170
171void llvm::reportGISelFailure(MachineFunction &MF, const TargetPassConfig &TPC,
172 MachineOptimizationRemarkEmitter &MORE,
173 const char *PassName, StringRef Msg,
174 const MachineInstr &MI) {
175 MachineOptimizationRemarkMissed R(PassName, "GISelFailure: ",
176 MI.getDebugLoc(), MI.getParent());
Ahmed Bougachad630a922017-09-18 18:50:09 +0000177 R << Msg;
178 // Printing MI is expensive; only do it if expensive remarks are enabled.
Aditya Nandakumarabf75942018-02-27 18:04:23 +0000179 if (TPC.isGlobalISelAbortEnabled() || MORE.allowExtraAnalysis(PassName))
Ahmed Bougachad630a922017-09-18 18:50:09 +0000180 R << ": " << ore::MNV("Inst", MI);
Ahmed Bougachaae9dade2017-02-23 21:05:42 +0000181 reportGISelFailure(MF, TPC, MORE, R);
182}
Aditya Nandakumar75ad9cc2017-04-19 20:48:50 +0000183
184Optional<int64_t> llvm::getConstantVRegVal(unsigned VReg,
185 const MachineRegisterInfo &MRI) {
Quentin Colombete77e5f42019-03-14 01:37:13 +0000186 Optional<ValueAndVReg> ValAndVReg =
187 getConstantVRegValWithLookThrough(VReg, MRI, /*LookThroughInstrs*/ false);
188 assert((!ValAndVReg || ValAndVReg->VReg == VReg) &&
189 "Value found while looking through instrs");
190 if (!ValAndVReg)
191 return None;
192 return ValAndVReg->Value;
193}
194
195Optional<ValueAndVReg> llvm::getConstantVRegValWithLookThrough(
196 unsigned VReg, const MachineRegisterInfo &MRI, bool LookThroughInstrs) {
197 SmallVector<std::pair<unsigned, unsigned>, 4> SeenOpcodes;
198 MachineInstr *MI;
199 while ((MI = MRI.getVRegDef(VReg)) &&
200 MI->getOpcode() != TargetOpcode::G_CONSTANT && LookThroughInstrs) {
201 switch (MI->getOpcode()) {
202 case TargetOpcode::G_TRUNC:
203 case TargetOpcode::G_SEXT:
204 case TargetOpcode::G_ZEXT:
205 SeenOpcodes.push_back(std::make_pair(
206 MI->getOpcode(),
207 MRI.getType(MI->getOperand(0).getReg()).getSizeInBits()));
208 VReg = MI->getOperand(1).getReg();
209 break;
210 case TargetOpcode::COPY:
211 VReg = MI->getOperand(1).getReg();
212 if (TargetRegisterInfo::isPhysicalRegister(VReg))
213 return None;
214 break;
215 default:
216 return None;
217 }
218 }
219 if (!MI || MI->getOpcode() != TargetOpcode::G_CONSTANT ||
220 (!MI->getOperand(1).isImm() && !MI->getOperand(1).isCImm()))
Aditya Nandakumar75ad9cc2017-04-19 20:48:50 +0000221 return None;
222
Quentin Colombete77e5f42019-03-14 01:37:13 +0000223 const MachineOperand &CstVal = MI->getOperand(1);
224 unsigned BitWidth = MRI.getType(MI->getOperand(0).getReg()).getSizeInBits();
225 APInt Val = CstVal.isImm() ? APInt(BitWidth, CstVal.getImm())
226 : CstVal.getCImm()->getValue();
227 assert(Val.getBitWidth() == BitWidth &&
228 "Value bitwidth doesn't match definition type");
229 while (!SeenOpcodes.empty()) {
230 std::pair<unsigned, unsigned> OpcodeAndSize = SeenOpcodes.pop_back_val();
231 switch (OpcodeAndSize.first) {
232 case TargetOpcode::G_TRUNC:
233 Val = Val.trunc(OpcodeAndSize.second);
234 break;
235 case TargetOpcode::G_SEXT:
236 Val = Val.sext(OpcodeAndSize.second);
237 break;
238 case TargetOpcode::G_ZEXT:
239 Val = Val.zext(OpcodeAndSize.second);
240 break;
241 }
242 }
Aditya Nandakumar75ad9cc2017-04-19 20:48:50 +0000243
Quentin Colombete77e5f42019-03-14 01:37:13 +0000244 if (Val.getBitWidth() > 64)
245 return None;
Aditya Nandakumar75ad9cc2017-04-19 20:48:50 +0000246
Quentin Colombete77e5f42019-03-14 01:37:13 +0000247 return ValueAndVReg{Val.getSExtValue(), VReg};
Aditya Nandakumar75ad9cc2017-04-19 20:48:50 +0000248}
Aditya Nandakumar2a735422017-05-12 22:54:52 +0000249
250const llvm::ConstantFP* llvm::getConstantFPVRegVal(unsigned VReg,
251 const MachineRegisterInfo &MRI) {
252 MachineInstr *MI = MRI.getVRegDef(VReg);
253 if (TargetOpcode::G_FCONSTANT != MI->getOpcode())
254 return nullptr;
255 return MI->getOperand(1).getFPImm();
256}
Aditya Nandakumar954eea02017-11-15 23:45:04 +0000257
258llvm::MachineInstr *llvm::getOpcodeDef(unsigned Opcode, unsigned Reg,
259 const MachineRegisterInfo &MRI) {
260 auto *DefMI = MRI.getVRegDef(Reg);
261 auto DstTy = MRI.getType(DefMI->getOperand(0).getReg());
262 if (!DstTy.isValid())
263 return nullptr;
264 while (DefMI->getOpcode() == TargetOpcode::COPY) {
265 unsigned SrcReg = DefMI->getOperand(1).getReg();
266 auto SrcTy = MRI.getType(SrcReg);
267 if (!SrcTy.isValid() || SrcTy != DstTy)
268 break;
269 DefMI = MRI.getVRegDef(SrcReg);
270 }
271 return DefMI->getOpcode() == Opcode ? DefMI : nullptr;
272}
Aditya Nandakumar91fc4e02018-03-09 17:31:51 +0000273
274APFloat llvm::getAPFloatFromSize(double Val, unsigned Size) {
275 if (Size == 32)
276 return APFloat(float(Val));
277 if (Size == 64)
278 return APFloat(Val);
279 if (Size != 16)
280 llvm_unreachable("Unsupported FPConstant size");
281 bool Ignored;
282 APFloat APF(Val);
283 APF.convert(APFloat::IEEEhalf(), APFloat::rmNearestTiesToEven, &Ignored);
284 return APF;
285}
Matthias Braun90ad6832018-07-13 00:08:38 +0000286
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +0000287Optional<APInt> llvm::ConstantFoldBinOp(unsigned Opcode, const unsigned Op1,
288 const unsigned Op2,
289 const MachineRegisterInfo &MRI) {
290 auto MaybeOp1Cst = getConstantVRegVal(Op1, MRI);
291 auto MaybeOp2Cst = getConstantVRegVal(Op2, MRI);
292 if (MaybeOp1Cst && MaybeOp2Cst) {
293 LLT Ty = MRI.getType(Op1);
294 APInt C1(Ty.getSizeInBits(), *MaybeOp1Cst, true);
295 APInt C2(Ty.getSizeInBits(), *MaybeOp2Cst, true);
296 switch (Opcode) {
297 default:
298 break;
299 case TargetOpcode::G_ADD:
300 return C1 + C2;
301 case TargetOpcode::G_AND:
302 return C1 & C2;
303 case TargetOpcode::G_ASHR:
304 return C1.ashr(C2);
305 case TargetOpcode::G_LSHR:
306 return C1.lshr(C2);
307 case TargetOpcode::G_MUL:
308 return C1 * C2;
309 case TargetOpcode::G_OR:
310 return C1 | C2;
311 case TargetOpcode::G_SHL:
312 return C1 << C2;
313 case TargetOpcode::G_SUB:
314 return C1 - C2;
315 case TargetOpcode::G_XOR:
316 return C1 ^ C2;
317 case TargetOpcode::G_UDIV:
318 if (!C2.getBoolValue())
319 break;
320 return C1.udiv(C2);
321 case TargetOpcode::G_SDIV:
322 if (!C2.getBoolValue())
323 break;
324 return C1.sdiv(C2);
325 case TargetOpcode::G_UREM:
326 if (!C2.getBoolValue())
327 break;
328 return C1.urem(C2);
329 case TargetOpcode::G_SREM:
330 if (!C2.getBoolValue())
331 break;
332 return C1.srem(C2);
333 }
334 }
335 return None;
336}
337
Matthias Braun90ad6832018-07-13 00:08:38 +0000338void llvm::getSelectionDAGFallbackAnalysisUsage(AnalysisUsage &AU) {
339 AU.addPreserved<StackProtector>();
340}