Quentin Colombet | b4e7118 | 2016-12-22 21:56:19 +0000 | [diff] [blame] | 1 | //===- llvm/CodeGen/GlobalISel/Utils.cpp -------------------------*- C++ -*-==// |
| 2 | // |
Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Quentin Colombet | b4e7118 | 2016-12-22 21:56:19 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | /// \file This file implements the utility functions used by the GlobalISel |
| 9 | /// pipeline. |
| 10 | //===----------------------------------------------------------------------===// |
| 11 | |
| 12 | #include "llvm/CodeGen/GlobalISel/Utils.h" |
Aditya Nandakumar | 91fc4e0 | 2018-03-09 17:31:51 +0000 | [diff] [blame] | 13 | #include "llvm/ADT/APFloat.h" |
Ahmed Bougacha | ae9dade | 2017-02-23 21:05:42 +0000 | [diff] [blame] | 14 | #include "llvm/ADT/Twine.h" |
Quentin Colombet | b4e7118 | 2016-12-22 21:56:19 +0000 | [diff] [blame] | 15 | #include "llvm/CodeGen/GlobalISel/RegisterBankInfo.h" |
| 16 | #include "llvm/CodeGen/MachineInstr.h" |
| 17 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
Ahmed Bougacha | ae9dade | 2017-02-23 21:05:42 +0000 | [diff] [blame] | 18 | #include "llvm/CodeGen/MachineOptimizationRemarkEmitter.h" |
Quentin Colombet | b4e7118 | 2016-12-22 21:56:19 +0000 | [diff] [blame] | 19 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
Matthias Braun | 90ad683 | 2018-07-13 00:08:38 +0000 | [diff] [blame] | 20 | #include "llvm/CodeGen/StackProtector.h" |
David Blaikie | 3f833ed | 2017-11-08 01:01:31 +0000 | [diff] [blame] | 21 | #include "llvm/CodeGen/TargetInstrInfo.h" |
Ahmed Bougacha | ae9dade | 2017-02-23 21:05:42 +0000 | [diff] [blame] | 22 | #include "llvm/CodeGen/TargetPassConfig.h" |
David Blaikie | b3bde2e | 2017-11-17 01:07:10 +0000 | [diff] [blame] | 23 | #include "llvm/CodeGen/TargetRegisterInfo.h" |
Aditya Nandakumar | 75ad9cc | 2017-04-19 20:48:50 +0000 | [diff] [blame] | 24 | #include "llvm/IR/Constants.h" |
Quentin Colombet | b4e7118 | 2016-12-22 21:56:19 +0000 | [diff] [blame] | 25 | |
| 26 | #define DEBUG_TYPE "globalisel-utils" |
| 27 | |
| 28 | using namespace llvm; |
| 29 | |
Daniel Sanders | a6e2ceb | 2017-06-20 12:36:34 +0000 | [diff] [blame] | 30 | unsigned llvm::constrainRegToClass(MachineRegisterInfo &MRI, |
| 31 | const TargetInstrInfo &TII, |
Marcello Maggioni | c596584 | 2019-04-26 07:21:56 +0000 | [diff] [blame] | 32 | const RegisterBankInfo &RBI, unsigned Reg, |
Daniel Sanders | a6e2ceb | 2017-06-20 12:36:34 +0000 | [diff] [blame] | 33 | const TargetRegisterClass &RegClass) { |
Marcello Maggioni | c596584 | 2019-04-26 07:21:56 +0000 | [diff] [blame] | 34 | if (!RBI.constrainGenericRegister(Reg, RegClass, MRI)) |
| 35 | return MRI.createVirtualRegister(&RegClass); |
Daniel Sanders | a6e2ceb | 2017-06-20 12:36:34 +0000 | [diff] [blame] | 36 | |
| 37 | return Reg; |
| 38 | } |
| 39 | |
Quentin Colombet | b4e7118 | 2016-12-22 21:56:19 +0000 | [diff] [blame] | 40 | unsigned llvm::constrainOperandRegClass( |
| 41 | const MachineFunction &MF, const TargetRegisterInfo &TRI, |
| 42 | MachineRegisterInfo &MRI, const TargetInstrInfo &TII, |
Marcello Maggioni | c596584 | 2019-04-26 07:21:56 +0000 | [diff] [blame] | 43 | const RegisterBankInfo &RBI, MachineInstr &InsertPt, |
| 44 | const TargetRegisterClass &RegClass, const MachineOperand &RegMO, |
| 45 | unsigned OpIdx) { |
| 46 | unsigned Reg = RegMO.getReg(); |
| 47 | // Assume physical registers are properly constrained. |
| 48 | assert(TargetRegisterInfo::isVirtualRegister(Reg) && |
| 49 | "PhysReg not implemented"); |
| 50 | |
| 51 | unsigned ConstrainedReg = constrainRegToClass(MRI, TII, RBI, Reg, RegClass); |
| 52 | // If we created a new virtual register because the class is not compatible |
| 53 | // then create a copy between the new and the old register. |
| 54 | if (ConstrainedReg != Reg) { |
| 55 | MachineBasicBlock::iterator InsertIt(&InsertPt); |
| 56 | MachineBasicBlock &MBB = *InsertPt.getParent(); |
| 57 | if (RegMO.isUse()) { |
| 58 | BuildMI(MBB, InsertIt, InsertPt.getDebugLoc(), |
| 59 | TII.get(TargetOpcode::COPY), ConstrainedReg) |
| 60 | .addReg(Reg); |
| 61 | } else { |
| 62 | assert(RegMO.isDef() && "Must be a definition"); |
| 63 | BuildMI(MBB, std::next(InsertIt), InsertPt.getDebugLoc(), |
| 64 | TII.get(TargetOpcode::COPY), Reg) |
| 65 | .addReg(ConstrainedReg); |
| 66 | } |
| 67 | } |
| 68 | return ConstrainedReg; |
| 69 | } |
| 70 | |
| 71 | unsigned llvm::constrainOperandRegClass( |
| 72 | const MachineFunction &MF, const TargetRegisterInfo &TRI, |
| 73 | MachineRegisterInfo &MRI, const TargetInstrInfo &TII, |
Quentin Colombet | b4e7118 | 2016-12-22 21:56:19 +0000 | [diff] [blame] | 74 | const RegisterBankInfo &RBI, MachineInstr &InsertPt, const MCInstrDesc &II, |
Aditya Nandakumar | 5999905 | 2018-02-26 22:56:21 +0000 | [diff] [blame] | 75 | const MachineOperand &RegMO, unsigned OpIdx) { |
| 76 | unsigned Reg = RegMO.getReg(); |
Quentin Colombet | b4e7118 | 2016-12-22 21:56:19 +0000 | [diff] [blame] | 77 | // Assume physical registers are properly constrained. |
| 78 | assert(TargetRegisterInfo::isVirtualRegister(Reg) && |
| 79 | "PhysReg not implemented"); |
| 80 | |
| 81 | const TargetRegisterClass *RegClass = TII.getRegClass(II, OpIdx, &TRI, MF); |
Daniel Sanders | 0846452 | 2018-01-29 21:09:12 +0000 | [diff] [blame] | 82 | // Some of the target independent instructions, like COPY, may not impose any |
Aditya Nandakumar | 5999905 | 2018-02-26 22:56:21 +0000 | [diff] [blame] | 83 | // register class constraints on some of their operands: If it's a use, we can |
| 84 | // skip constraining as the instruction defining the register would constrain |
| 85 | // it. |
Tom Stellard | abc9871 | 2018-05-03 21:44:16 +0000 | [diff] [blame] | 86 | |
| 87 | // We can't constrain unallocatable register classes, because we can't create |
| 88 | // virtual registers for these classes, so we need to let targets handled this |
| 89 | // case. |
| 90 | if (RegClass && !RegClass->isAllocatable()) |
| 91 | RegClass = TRI.getConstrainedRegClassForOperand(RegMO, MRI); |
| 92 | |
Daniel Sanders | 0846452 | 2018-01-29 21:09:12 +0000 | [diff] [blame] | 93 | if (!RegClass) { |
Aditya Nandakumar | 5999905 | 2018-02-26 22:56:21 +0000 | [diff] [blame] | 94 | assert((!isTargetSpecificOpcode(II.getOpcode()) || RegMO.isUse()) && |
| 95 | "Register class constraint is required unless either the " |
| 96 | "instruction is target independent or the operand is a use"); |
Daniel Sanders | 0846452 | 2018-01-29 21:09:12 +0000 | [diff] [blame] | 97 | // FIXME: Just bailing out like this here could be not enough, unless we |
| 98 | // expect the users of this function to do the right thing for PHIs and |
| 99 | // COPY: |
| 100 | // v1 = COPY v0 |
| 101 | // v2 = COPY v1 |
| 102 | // v1 here may end up not being constrained at all. Please notice that to |
| 103 | // reproduce the issue we likely need a destination pattern of a selection |
| 104 | // rule producing such extra copies, not just an input GMIR with them as |
| 105 | // every existing target using selectImpl handles copies before calling it |
| 106 | // and they never reach this function. |
| 107 | return Reg; |
| 108 | } |
Marcello Maggioni | c596584 | 2019-04-26 07:21:56 +0000 | [diff] [blame] | 109 | return constrainOperandRegClass(MF, TRI, MRI, TII, RBI, InsertPt, *RegClass, |
| 110 | RegMO, OpIdx); |
Quentin Colombet | b4e7118 | 2016-12-22 21:56:19 +0000 | [diff] [blame] | 111 | } |
Ahmed Bougacha | ae9dade | 2017-02-23 21:05:42 +0000 | [diff] [blame] | 112 | |
Aditya Nandakumar | 18b3f9d | 2018-01-17 19:31:33 +0000 | [diff] [blame] | 113 | bool llvm::constrainSelectedInstRegOperands(MachineInstr &I, |
| 114 | const TargetInstrInfo &TII, |
| 115 | const TargetRegisterInfo &TRI, |
| 116 | const RegisterBankInfo &RBI) { |
Daniel Sanders | 0846452 | 2018-01-29 21:09:12 +0000 | [diff] [blame] | 117 | assert(!isPreISelGenericOpcode(I.getOpcode()) && |
| 118 | "A selected instruction is expected"); |
Aditya Nandakumar | 18b3f9d | 2018-01-17 19:31:33 +0000 | [diff] [blame] | 119 | MachineBasicBlock &MBB = *I.getParent(); |
| 120 | MachineFunction &MF = *MBB.getParent(); |
| 121 | MachineRegisterInfo &MRI = MF.getRegInfo(); |
| 122 | |
| 123 | for (unsigned OpI = 0, OpE = I.getNumExplicitOperands(); OpI != OpE; ++OpI) { |
| 124 | MachineOperand &MO = I.getOperand(OpI); |
| 125 | |
| 126 | // There's nothing to be done on non-register operands. |
| 127 | if (!MO.isReg()) |
| 128 | continue; |
| 129 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 130 | LLVM_DEBUG(dbgs() << "Converting operand: " << MO << '\n'); |
Aditya Nandakumar | 18b3f9d | 2018-01-17 19:31:33 +0000 | [diff] [blame] | 131 | assert(MO.isReg() && "Unsupported non-reg operand"); |
| 132 | |
| 133 | unsigned Reg = MO.getReg(); |
| 134 | // Physical registers don't need to be constrained. |
| 135 | if (TRI.isPhysicalRegister(Reg)) |
| 136 | continue; |
| 137 | |
| 138 | // Register operands with a value of 0 (e.g. predicate operands) don't need |
| 139 | // to be constrained. |
| 140 | if (Reg == 0) |
| 141 | continue; |
| 142 | |
| 143 | // If the operand is a vreg, we should constrain its regclass, and only |
| 144 | // insert COPYs if that's impossible. |
| 145 | // constrainOperandRegClass does that for us. |
| 146 | MO.setReg(constrainOperandRegClass(MF, TRI, MRI, TII, RBI, I, I.getDesc(), |
Aditya Nandakumar | 5999905 | 2018-02-26 22:56:21 +0000 | [diff] [blame] | 147 | MO, OpI)); |
Aditya Nandakumar | 18b3f9d | 2018-01-17 19:31:33 +0000 | [diff] [blame] | 148 | |
| 149 | // Tie uses to defs as indicated in MCInstrDesc if this hasn't already been |
| 150 | // done. |
| 151 | if (MO.isUse()) { |
| 152 | int DefIdx = I.getDesc().getOperandConstraint(OpI, MCOI::TIED_TO); |
| 153 | if (DefIdx != -1 && !I.isRegTiedToUseOperand(DefIdx)) |
| 154 | I.tieOperands(DefIdx, OpI); |
| 155 | } |
| 156 | } |
| 157 | return true; |
| 158 | } |
| 159 | |
Volkan Keles | 47debae | 2017-03-21 10:47:35 +0000 | [diff] [blame] | 160 | bool llvm::isTriviallyDead(const MachineInstr &MI, |
| 161 | const MachineRegisterInfo &MRI) { |
| 162 | // If we can move an instruction, we can remove it. Otherwise, it has |
| 163 | // a side-effect of some sort. |
| 164 | bool SawStore = false; |
Aditya Nandakumar | cd04e36 | 2018-10-19 20:11:52 +0000 | [diff] [blame] | 165 | if (!MI.isSafeToMove(/*AA=*/nullptr, SawStore) && !MI.isPHI()) |
Volkan Keles | 47debae | 2017-03-21 10:47:35 +0000 | [diff] [blame] | 166 | return false; |
| 167 | |
| 168 | // Instructions without side-effects are dead iff they only define dead vregs. |
| 169 | for (auto &MO : MI.operands()) { |
| 170 | if (!MO.isReg() || !MO.isDef()) |
| 171 | continue; |
| 172 | |
| 173 | unsigned Reg = MO.getReg(); |
Ahmed Bougacha | 15b3e8a | 2017-03-21 23:42:54 +0000 | [diff] [blame] | 174 | if (TargetRegisterInfo::isPhysicalRegister(Reg) || |
| 175 | !MRI.use_nodbg_empty(Reg)) |
Volkan Keles | 47debae | 2017-03-21 10:47:35 +0000 | [diff] [blame] | 176 | return false; |
| 177 | } |
| 178 | return true; |
| 179 | } |
| 180 | |
Ahmed Bougacha | ae9dade | 2017-02-23 21:05:42 +0000 | [diff] [blame] | 181 | void llvm::reportGISelFailure(MachineFunction &MF, const TargetPassConfig &TPC, |
| 182 | MachineOptimizationRemarkEmitter &MORE, |
| 183 | MachineOptimizationRemarkMissed &R) { |
| 184 | MF.getProperties().set(MachineFunctionProperties::Property::FailedISel); |
| 185 | |
| 186 | // Print the function name explicitly if we don't have a debug location (which |
| 187 | // makes the diagnostic less useful) or if we're going to emit a raw error. |
| 188 | if (!R.getLocation().isValid() || TPC.isGlobalISelAbortEnabled()) |
| 189 | R << (" (in function: " + MF.getName() + ")").str(); |
| 190 | |
| 191 | if (TPC.isGlobalISelAbortEnabled()) |
| 192 | report_fatal_error(R.getMsg()); |
| 193 | else |
| 194 | MORE.emit(R); |
| 195 | } |
| 196 | |
| 197 | void llvm::reportGISelFailure(MachineFunction &MF, const TargetPassConfig &TPC, |
| 198 | MachineOptimizationRemarkEmitter &MORE, |
| 199 | const char *PassName, StringRef Msg, |
| 200 | const MachineInstr &MI) { |
| 201 | MachineOptimizationRemarkMissed R(PassName, "GISelFailure: ", |
| 202 | MI.getDebugLoc(), MI.getParent()); |
Ahmed Bougacha | d630a92 | 2017-09-18 18:50:09 +0000 | [diff] [blame] | 203 | R << Msg; |
| 204 | // Printing MI is expensive; only do it if expensive remarks are enabled. |
Aditya Nandakumar | abf7594 | 2018-02-27 18:04:23 +0000 | [diff] [blame] | 205 | if (TPC.isGlobalISelAbortEnabled() || MORE.allowExtraAnalysis(PassName)) |
Ahmed Bougacha | d630a92 | 2017-09-18 18:50:09 +0000 | [diff] [blame] | 206 | R << ": " << ore::MNV("Inst", MI); |
Ahmed Bougacha | ae9dade | 2017-02-23 21:05:42 +0000 | [diff] [blame] | 207 | reportGISelFailure(MF, TPC, MORE, R); |
| 208 | } |
Aditya Nandakumar | 75ad9cc | 2017-04-19 20:48:50 +0000 | [diff] [blame] | 209 | |
| 210 | Optional<int64_t> llvm::getConstantVRegVal(unsigned VReg, |
| 211 | const MachineRegisterInfo &MRI) { |
Quentin Colombet | e77e5f4 | 2019-03-14 01:37:13 +0000 | [diff] [blame] | 212 | Optional<ValueAndVReg> ValAndVReg = |
| 213 | getConstantVRegValWithLookThrough(VReg, MRI, /*LookThroughInstrs*/ false); |
| 214 | assert((!ValAndVReg || ValAndVReg->VReg == VReg) && |
| 215 | "Value found while looking through instrs"); |
| 216 | if (!ValAndVReg) |
| 217 | return None; |
| 218 | return ValAndVReg->Value; |
| 219 | } |
| 220 | |
| 221 | Optional<ValueAndVReg> llvm::getConstantVRegValWithLookThrough( |
| 222 | unsigned VReg, const MachineRegisterInfo &MRI, bool LookThroughInstrs) { |
| 223 | SmallVector<std::pair<unsigned, unsigned>, 4> SeenOpcodes; |
| 224 | MachineInstr *MI; |
| 225 | while ((MI = MRI.getVRegDef(VReg)) && |
| 226 | MI->getOpcode() != TargetOpcode::G_CONSTANT && LookThroughInstrs) { |
| 227 | switch (MI->getOpcode()) { |
| 228 | case TargetOpcode::G_TRUNC: |
| 229 | case TargetOpcode::G_SEXT: |
| 230 | case TargetOpcode::G_ZEXT: |
| 231 | SeenOpcodes.push_back(std::make_pair( |
| 232 | MI->getOpcode(), |
| 233 | MRI.getType(MI->getOperand(0).getReg()).getSizeInBits())); |
| 234 | VReg = MI->getOperand(1).getReg(); |
| 235 | break; |
| 236 | case TargetOpcode::COPY: |
| 237 | VReg = MI->getOperand(1).getReg(); |
| 238 | if (TargetRegisterInfo::isPhysicalRegister(VReg)) |
| 239 | return None; |
| 240 | break; |
| 241 | default: |
| 242 | return None; |
| 243 | } |
| 244 | } |
| 245 | if (!MI || MI->getOpcode() != TargetOpcode::G_CONSTANT || |
| 246 | (!MI->getOperand(1).isImm() && !MI->getOperand(1).isCImm())) |
Aditya Nandakumar | 75ad9cc | 2017-04-19 20:48:50 +0000 | [diff] [blame] | 247 | return None; |
| 248 | |
Quentin Colombet | e77e5f4 | 2019-03-14 01:37:13 +0000 | [diff] [blame] | 249 | const MachineOperand &CstVal = MI->getOperand(1); |
| 250 | unsigned BitWidth = MRI.getType(MI->getOperand(0).getReg()).getSizeInBits(); |
| 251 | APInt Val = CstVal.isImm() ? APInt(BitWidth, CstVal.getImm()) |
| 252 | : CstVal.getCImm()->getValue(); |
| 253 | assert(Val.getBitWidth() == BitWidth && |
| 254 | "Value bitwidth doesn't match definition type"); |
| 255 | while (!SeenOpcodes.empty()) { |
| 256 | std::pair<unsigned, unsigned> OpcodeAndSize = SeenOpcodes.pop_back_val(); |
| 257 | switch (OpcodeAndSize.first) { |
| 258 | case TargetOpcode::G_TRUNC: |
| 259 | Val = Val.trunc(OpcodeAndSize.second); |
| 260 | break; |
| 261 | case TargetOpcode::G_SEXT: |
| 262 | Val = Val.sext(OpcodeAndSize.second); |
| 263 | break; |
| 264 | case TargetOpcode::G_ZEXT: |
| 265 | Val = Val.zext(OpcodeAndSize.second); |
| 266 | break; |
| 267 | } |
| 268 | } |
Aditya Nandakumar | 75ad9cc | 2017-04-19 20:48:50 +0000 | [diff] [blame] | 269 | |
Quentin Colombet | e77e5f4 | 2019-03-14 01:37:13 +0000 | [diff] [blame] | 270 | if (Val.getBitWidth() > 64) |
| 271 | return None; |
Aditya Nandakumar | 75ad9cc | 2017-04-19 20:48:50 +0000 | [diff] [blame] | 272 | |
Quentin Colombet | e77e5f4 | 2019-03-14 01:37:13 +0000 | [diff] [blame] | 273 | return ValueAndVReg{Val.getSExtValue(), VReg}; |
Aditya Nandakumar | 75ad9cc | 2017-04-19 20:48:50 +0000 | [diff] [blame] | 274 | } |
Aditya Nandakumar | 2a73542 | 2017-05-12 22:54:52 +0000 | [diff] [blame] | 275 | |
| 276 | const llvm::ConstantFP* llvm::getConstantFPVRegVal(unsigned VReg, |
| 277 | const MachineRegisterInfo &MRI) { |
| 278 | MachineInstr *MI = MRI.getVRegDef(VReg); |
| 279 | if (TargetOpcode::G_FCONSTANT != MI->getOpcode()) |
| 280 | return nullptr; |
| 281 | return MI->getOperand(1).getFPImm(); |
| 282 | } |
Aditya Nandakumar | 954eea0 | 2017-11-15 23:45:04 +0000 | [diff] [blame] | 283 | |
Matt Arsenault | 14a4495 | 2019-07-09 22:19:13 +0000 | [diff] [blame] | 284 | llvm::MachineInstr *llvm::getDefIgnoringCopies(Register Reg, |
| 285 | const MachineRegisterInfo &MRI) { |
Aditya Nandakumar | 954eea0 | 2017-11-15 23:45:04 +0000 | [diff] [blame] | 286 | auto *DefMI = MRI.getVRegDef(Reg); |
| 287 | auto DstTy = MRI.getType(DefMI->getOperand(0).getReg()); |
| 288 | if (!DstTy.isValid()) |
| 289 | return nullptr; |
| 290 | while (DefMI->getOpcode() == TargetOpcode::COPY) { |
| 291 | unsigned SrcReg = DefMI->getOperand(1).getReg(); |
| 292 | auto SrcTy = MRI.getType(SrcReg); |
| 293 | if (!SrcTy.isValid() || SrcTy != DstTy) |
| 294 | break; |
| 295 | DefMI = MRI.getVRegDef(SrcReg); |
| 296 | } |
Matt Arsenault | 14a4495 | 2019-07-09 22:19:13 +0000 | [diff] [blame] | 297 | return DefMI; |
| 298 | } |
| 299 | |
| 300 | llvm::MachineInstr *llvm::getOpcodeDef(unsigned Opcode, Register Reg, |
| 301 | const MachineRegisterInfo &MRI) { |
| 302 | MachineInstr *DefMI = getDefIgnoringCopies(Reg, MRI); |
| 303 | return DefMI && DefMI->getOpcode() == Opcode ? DefMI : nullptr; |
Aditya Nandakumar | 954eea0 | 2017-11-15 23:45:04 +0000 | [diff] [blame] | 304 | } |
Aditya Nandakumar | 91fc4e0 | 2018-03-09 17:31:51 +0000 | [diff] [blame] | 305 | |
| 306 | APFloat llvm::getAPFloatFromSize(double Val, unsigned Size) { |
| 307 | if (Size == 32) |
| 308 | return APFloat(float(Val)); |
| 309 | if (Size == 64) |
| 310 | return APFloat(Val); |
| 311 | if (Size != 16) |
| 312 | llvm_unreachable("Unsupported FPConstant size"); |
| 313 | bool Ignored; |
| 314 | APFloat APF(Val); |
| 315 | APF.convert(APFloat::IEEEhalf(), APFloat::rmNearestTiesToEven, &Ignored); |
| 316 | return APF; |
| 317 | } |
Matthias Braun | 90ad683 | 2018-07-13 00:08:38 +0000 | [diff] [blame] | 318 | |
Aditya Nandakumar | 500e3ea | 2019-01-16 00:40:37 +0000 | [diff] [blame] | 319 | Optional<APInt> llvm::ConstantFoldBinOp(unsigned Opcode, const unsigned Op1, |
| 320 | const unsigned Op2, |
| 321 | const MachineRegisterInfo &MRI) { |
| 322 | auto MaybeOp1Cst = getConstantVRegVal(Op1, MRI); |
| 323 | auto MaybeOp2Cst = getConstantVRegVal(Op2, MRI); |
| 324 | if (MaybeOp1Cst && MaybeOp2Cst) { |
| 325 | LLT Ty = MRI.getType(Op1); |
| 326 | APInt C1(Ty.getSizeInBits(), *MaybeOp1Cst, true); |
| 327 | APInt C2(Ty.getSizeInBits(), *MaybeOp2Cst, true); |
| 328 | switch (Opcode) { |
| 329 | default: |
| 330 | break; |
| 331 | case TargetOpcode::G_ADD: |
| 332 | return C1 + C2; |
| 333 | case TargetOpcode::G_AND: |
| 334 | return C1 & C2; |
| 335 | case TargetOpcode::G_ASHR: |
| 336 | return C1.ashr(C2); |
| 337 | case TargetOpcode::G_LSHR: |
| 338 | return C1.lshr(C2); |
| 339 | case TargetOpcode::G_MUL: |
| 340 | return C1 * C2; |
| 341 | case TargetOpcode::G_OR: |
| 342 | return C1 | C2; |
| 343 | case TargetOpcode::G_SHL: |
| 344 | return C1 << C2; |
| 345 | case TargetOpcode::G_SUB: |
| 346 | return C1 - C2; |
| 347 | case TargetOpcode::G_XOR: |
| 348 | return C1 ^ C2; |
| 349 | case TargetOpcode::G_UDIV: |
| 350 | if (!C2.getBoolValue()) |
| 351 | break; |
| 352 | return C1.udiv(C2); |
| 353 | case TargetOpcode::G_SDIV: |
| 354 | if (!C2.getBoolValue()) |
| 355 | break; |
| 356 | return C1.sdiv(C2); |
| 357 | case TargetOpcode::G_UREM: |
| 358 | if (!C2.getBoolValue()) |
| 359 | break; |
| 360 | return C1.urem(C2); |
| 361 | case TargetOpcode::G_SREM: |
| 362 | if (!C2.getBoolValue()) |
| 363 | break; |
| 364 | return C1.srem(C2); |
| 365 | } |
| 366 | } |
| 367 | return None; |
| 368 | } |
| 369 | |
Matt Arsenault | 6ce1b4f | 2019-07-10 16:31:19 +0000 | [diff] [blame] | 370 | bool llvm::isKnownNeverNaN(Register Val, const MachineRegisterInfo &MRI, |
| 371 | bool SNaN) { |
| 372 | const MachineInstr *DefMI = MRI.getVRegDef(Val); |
| 373 | if (!DefMI) |
| 374 | return false; |
| 375 | |
| 376 | if (DefMI->getFlag(MachineInstr::FmNoNans)) |
| 377 | return true; |
| 378 | |
| 379 | if (SNaN) { |
| 380 | // FP operations quiet. For now, just handle the ones inserted during |
| 381 | // legalization. |
| 382 | switch (DefMI->getOpcode()) { |
| 383 | case TargetOpcode::G_FPEXT: |
| 384 | case TargetOpcode::G_FPTRUNC: |
| 385 | case TargetOpcode::G_FCANONICALIZE: |
| 386 | return true; |
| 387 | default: |
| 388 | return false; |
| 389 | } |
| 390 | } |
| 391 | |
| 392 | return false; |
| 393 | } |
| 394 | |
Matthias Braun | 90ad683 | 2018-07-13 00:08:38 +0000 | [diff] [blame] | 395 | void llvm::getSelectionDAGFallbackAnalysisUsage(AnalysisUsage &AU) { |
| 396 | AU.addPreserved<StackProtector>(); |
| 397 | } |