Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1 | //===- InstCombineSimplifyDemanded.cpp ------------------------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file contains logic for simplifying instructions based on information |
| 11 | // about how they are used. |
| 12 | // |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
Chandler Carruth | a917458 | 2015-01-22 05:25:13 +0000 | [diff] [blame] | 15 | #include "InstCombineInternal.h" |
James Molloy | 2b21a7c | 2015-05-20 18:41:25 +0000 | [diff] [blame] | 16 | #include "llvm/Analysis/ValueTracking.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 17 | #include "llvm/IR/IntrinsicInst.h" |
Chandler Carruth | 820a908 | 2014-03-04 11:08:18 +0000 | [diff] [blame] | 18 | #include "llvm/IR/PatternMatch.h" |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 19 | |
| 20 | using namespace llvm; |
Shuxin Yang | 63e999e | 2012-12-04 00:04:54 +0000 | [diff] [blame] | 21 | using namespace llvm::PatternMatch; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 22 | |
Chandler Carruth | 964daaa | 2014-04-22 02:55:47 +0000 | [diff] [blame] | 23 | #define DEBUG_TYPE "instcombine" |
| 24 | |
Sanjay Patel | bbbb3ce | 2016-07-14 20:54:43 +0000 | [diff] [blame] | 25 | /// Check to see if the specified operand of the specified instruction is a |
| 26 | /// constant integer. If so, check to see if there are any bits set in the |
| 27 | /// constant that are not demanded. If so, shrink the constant and return true. |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 28 | static bool ShrinkDemandedConstant(Instruction *I, unsigned OpNo, |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 29 | APInt Demanded) { |
| 30 | assert(I && "No instruction?"); |
| 31 | assert(OpNo < I->getNumOperands() && "Operand index too large"); |
| 32 | |
Sanjay Patel | ae3b43e | 2017-02-09 21:43:06 +0000 | [diff] [blame] | 33 | // The operand must be a constant integer or splat integer. |
| 34 | Value *Op = I->getOperand(OpNo); |
| 35 | const APInt *C; |
| 36 | if (!match(Op, m_APInt(C))) |
| 37 | return false; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 38 | |
| 39 | // If there are no bits set that aren't demanded, nothing to do. |
Sanjay Patel | ae3b43e | 2017-02-09 21:43:06 +0000 | [diff] [blame] | 40 | Demanded = Demanded.zextOrTrunc(C->getBitWidth()); |
| 41 | if ((~Demanded & *C) == 0) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 42 | return false; |
| 43 | |
| 44 | // This instruction is producing bits that are not demanded. Shrink the RHS. |
Sanjay Patel | ae3b43e | 2017-02-09 21:43:06 +0000 | [diff] [blame] | 45 | Demanded &= *C; |
| 46 | I->setOperand(OpNo, ConstantInt::get(Op->getType(), Demanded)); |
David Majnemer | 42b83a5 | 2014-08-22 07:56:32 +0000 | [diff] [blame] | 47 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 48 | return true; |
| 49 | } |
| 50 | |
| 51 | |
| 52 | |
Sanjay Patel | bbbb3ce | 2016-07-14 20:54:43 +0000 | [diff] [blame] | 53 | /// Inst is an integer instruction that SimplifyDemandedBits knows about. See if |
| 54 | /// the instruction has any properties that allow us to simplify its operands. |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 55 | bool InstCombiner::SimplifyDemandedInstructionBits(Instruction &Inst) { |
| 56 | unsigned BitWidth = Inst.getType()->getScalarSizeInBits(); |
| 57 | APInt KnownZero(BitWidth, 0), KnownOne(BitWidth, 0); |
| 58 | APInt DemandedMask(APInt::getAllOnesValue(BitWidth)); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 59 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 60 | Value *V = SimplifyDemandedUseBits(&Inst, DemandedMask, KnownZero, KnownOne, |
| 61 | 0, &Inst); |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 62 | if (!V) return false; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 63 | if (V == &Inst) return true; |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 64 | replaceInstUsesWith(Inst, V); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 65 | return true; |
| 66 | } |
| 67 | |
Sanjay Patel | bbbb3ce | 2016-07-14 20:54:43 +0000 | [diff] [blame] | 68 | /// This form of SimplifyDemandedBits simplifies the specified instruction |
| 69 | /// operand if possible, updating it in place. It returns true if it made any |
| 70 | /// change and false otherwise. |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 71 | bool InstCombiner::SimplifyDemandedBits(Instruction *I, unsigned OpNo, |
| 72 | const APInt &DemandedMask, |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 73 | APInt &KnownZero, APInt &KnownOne, |
| 74 | unsigned Depth) { |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 75 | Use &U = I->getOperandUse(OpNo); |
David Majnemer | fe58d13 | 2015-04-22 20:59:28 +0000 | [diff] [blame] | 76 | Value *NewVal = SimplifyDemandedUseBits(U.get(), DemandedMask, KnownZero, |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 77 | KnownOne, Depth, I); |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 78 | if (!NewVal) return false; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 79 | U = NewVal; |
| 80 | return true; |
| 81 | } |
| 82 | |
| 83 | |
Sanjay Patel | bbbb3ce | 2016-07-14 20:54:43 +0000 | [diff] [blame] | 84 | /// This function attempts to replace V with a simpler value based on the |
| 85 | /// demanded bits. When this function is called, it is known that only the bits |
| 86 | /// set in DemandedMask of the result of V are ever used downstream. |
| 87 | /// Consequently, depending on the mask and V, it may be possible to replace V |
| 88 | /// with a constant or one of its operands. In such cases, this function does |
| 89 | /// the replacement and returns true. In all other cases, it returns false after |
| 90 | /// analyzing the expression and setting KnownOne and known to be one in the |
| 91 | /// expression. KnownZero contains all the bits that are known to be zero in the |
| 92 | /// expression. These are provided to potentially allow the caller (which might |
| 93 | /// recursively be SimplifyDemandedBits itself) to simplify the expression. |
| 94 | /// KnownOne and KnownZero always follow the invariant that: |
| 95 | /// KnownOne & KnownZero == 0. |
| 96 | /// That is, a bit can't be both 1 and 0. Note that the bits in KnownOne and |
| 97 | /// KnownZero may only be accurate for those bits set in DemandedMask. Note also |
| 98 | /// that the bitwidth of V, DemandedMask, KnownZero and KnownOne must all be the |
| 99 | /// same. |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 100 | /// |
| 101 | /// This returns null if it did not change anything and it permits no |
| 102 | /// simplification. This returns V itself if it did some simplification of V's |
| 103 | /// operands based on the information about what bits are demanded. This returns |
| 104 | /// some other non-null value if it found out that V is equal to another value |
| 105 | /// in the context where the specified bits are demanded, but not for all users. |
| 106 | Value *InstCombiner::SimplifyDemandedUseBits(Value *V, APInt DemandedMask, |
| 107 | APInt &KnownZero, APInt &KnownOne, |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 108 | unsigned Depth, |
| 109 | Instruction *CxtI) { |
Craig Topper | e73658d | 2014-04-28 04:05:08 +0000 | [diff] [blame] | 110 | assert(V != nullptr && "Null pointer of Value???"); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 111 | assert(Depth <= 6 && "Limit Search Depth"); |
| 112 | uint32_t BitWidth = DemandedMask.getBitWidth(); |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 113 | Type *VTy = V->getType(); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 114 | assert( |
| 115 | (!VTy->isIntOrIntVectorTy() || VTy->getScalarSizeInBits() == BitWidth) && |
| 116 | KnownZero.getBitWidth() == BitWidth && |
| 117 | KnownOne.getBitWidth() == BitWidth && |
| 118 | "Value *V, DemandedMask, KnownZero and KnownOne " |
| 119 | "must have same BitWidth"); |
Sanjay Patel | ae3b43e | 2017-02-09 21:43:06 +0000 | [diff] [blame] | 120 | const APInt *C; |
| 121 | if (match(V, m_APInt(C))) { |
| 122 | // We know all of the bits for a scalar constant or a splat vector constant! |
| 123 | KnownOne = *C & DemandedMask; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 124 | KnownZero = ~KnownOne & DemandedMask; |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 125 | return nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 126 | } |
| 127 | if (isa<ConstantPointerNull>(V)) { |
| 128 | // We know all of the bits for a constant! |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 129 | KnownOne.clearAllBits(); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 130 | KnownZero = DemandedMask; |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 131 | return nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 132 | } |
| 133 | |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 134 | KnownZero.clearAllBits(); |
| 135 | KnownOne.clearAllBits(); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 136 | if (DemandedMask == 0) { // Not demanding any bits from V. |
| 137 | if (isa<UndefValue>(V)) |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 138 | return nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 139 | return UndefValue::get(VTy); |
| 140 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 141 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 142 | if (Depth == 6) // Limit search depth. |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 143 | return nullptr; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 144 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 145 | APInt LHSKnownZero(BitWidth, 0), LHSKnownOne(BitWidth, 0); |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 146 | APInt RHSKnownZero(BitWidth, 0), RHSKnownOne(BitWidth, 0); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 147 | |
| 148 | Instruction *I = dyn_cast<Instruction>(V); |
| 149 | if (!I) { |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 150 | computeKnownBits(V, KnownZero, KnownOne, Depth, CxtI); |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 151 | return nullptr; // Only analyze instructions. |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 152 | } |
| 153 | |
| 154 | // If there are multiple uses of this value and we aren't at the root, then |
| 155 | // we can't do any simplifications of the operands, because DemandedMask |
| 156 | // only reflects the bits demanded by *one* of the users. |
| 157 | if (Depth != 0 && !I->hasOneUse()) { |
| 158 | // Despite the fact that we can't simplify this instruction in all User's |
| 159 | // context, we can at least compute the knownzero/knownone bits, and we can |
| 160 | // do simplifications that apply to *just* the one user if we know that |
| 161 | // this instruction has a simpler value in that context. |
| 162 | if (I->getOpcode() == Instruction::And) { |
| 163 | // If either the LHS or the RHS are Zero, the result is zero. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 164 | computeKnownBits(I->getOperand(1), RHSKnownZero, RHSKnownOne, Depth + 1, |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 165 | CxtI); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 166 | computeKnownBits(I->getOperand(0), LHSKnownZero, LHSKnownOne, Depth + 1, |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 167 | CxtI); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 168 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 169 | // If all of the demanded bits are known 1 on one side, return the other. |
| 170 | // These bits cannot contribute to the result of the 'and' in this |
| 171 | // context. |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 172 | if ((DemandedMask & ~LHSKnownZero & RHSKnownOne) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 173 | (DemandedMask & ~LHSKnownZero)) |
| 174 | return I->getOperand(0); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 175 | if ((DemandedMask & ~RHSKnownZero & LHSKnownOne) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 176 | (DemandedMask & ~RHSKnownZero)) |
| 177 | return I->getOperand(1); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 178 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 179 | // If all of the demanded bits in the inputs are known zeros, return zero. |
| 180 | if ((DemandedMask & (RHSKnownZero|LHSKnownZero)) == DemandedMask) |
| 181 | return Constant::getNullValue(VTy); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 182 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 183 | } else if (I->getOpcode() == Instruction::Or) { |
| 184 | // We can simplify (X|Y) -> X or Y in the user's context if we know that |
| 185 | // only bits from X or Y are demanded. |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 186 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 187 | // If either the LHS or the RHS are One, the result is One. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 188 | computeKnownBits(I->getOperand(1), RHSKnownZero, RHSKnownOne, Depth + 1, |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 189 | CxtI); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 190 | computeKnownBits(I->getOperand(0), LHSKnownZero, LHSKnownOne, Depth + 1, |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 191 | CxtI); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 192 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 193 | // If all of the demanded bits are known zero on one side, return the |
| 194 | // other. These bits cannot contribute to the result of the 'or' in this |
| 195 | // context. |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 196 | if ((DemandedMask & ~LHSKnownOne & RHSKnownZero) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 197 | (DemandedMask & ~LHSKnownOne)) |
| 198 | return I->getOperand(0); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 199 | if ((DemandedMask & ~RHSKnownOne & LHSKnownZero) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 200 | (DemandedMask & ~RHSKnownOne)) |
| 201 | return I->getOperand(1); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 202 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 203 | // If all of the potentially set bits on one side are known to be set on |
| 204 | // the other side, just use the 'other' side. |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 205 | if ((DemandedMask & (~RHSKnownZero) & LHSKnownOne) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 206 | (DemandedMask & (~RHSKnownZero))) |
| 207 | return I->getOperand(0); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 208 | if ((DemandedMask & (~LHSKnownZero) & RHSKnownOne) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 209 | (DemandedMask & (~LHSKnownZero))) |
| 210 | return I->getOperand(1); |
Shuxin Yang | 7328593 | 2012-12-04 22:15:32 +0000 | [diff] [blame] | 211 | } else if (I->getOpcode() == Instruction::Xor) { |
| 212 | // We can simplify (X^Y) -> X or Y in the user's context if we know that |
| 213 | // only bits from X or Y are demanded. |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 214 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 215 | computeKnownBits(I->getOperand(1), RHSKnownZero, RHSKnownOne, Depth + 1, |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 216 | CxtI); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 217 | computeKnownBits(I->getOperand(0), LHSKnownZero, LHSKnownOne, Depth + 1, |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 218 | CxtI); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 219 | |
Shuxin Yang | 7328593 | 2012-12-04 22:15:32 +0000 | [diff] [blame] | 220 | // If all of the demanded bits are known zero on one side, return the |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 221 | // other. |
Shuxin Yang | 7328593 | 2012-12-04 22:15:32 +0000 | [diff] [blame] | 222 | if ((DemandedMask & RHSKnownZero) == DemandedMask) |
| 223 | return I->getOperand(0); |
| 224 | if ((DemandedMask & LHSKnownZero) == DemandedMask) |
| 225 | return I->getOperand(1); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 226 | } |
Shuxin Yang | 7328593 | 2012-12-04 22:15:32 +0000 | [diff] [blame] | 227 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 228 | // Compute the KnownZero/KnownOne bits to simplify things downstream. |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 229 | computeKnownBits(I, KnownZero, KnownOne, Depth, CxtI); |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 230 | return nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 231 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 232 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 233 | // If this is the root being simplified, allow it to have multiple uses, |
| 234 | // just set the DemandedMask to all bits so that we can try to simplify the |
| 235 | // operands. This allows visitTruncInst (for example) to simplify the |
| 236 | // operand of a trunc without duplicating all the logic below. |
| 237 | if (Depth == 0 && !V->hasOneUse()) |
Craig Topper | e06b6bc | 2017-04-04 05:03:02 +0000 | [diff] [blame] | 238 | DemandedMask.setAllBits(); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 239 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 240 | switch (I->getOpcode()) { |
| 241 | default: |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 242 | computeKnownBits(I, KnownZero, KnownOne, Depth, CxtI); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 243 | break; |
| 244 | case Instruction::And: |
| 245 | // If either the LHS or the RHS are Zero, the result is zero. |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 246 | if (SimplifyDemandedBits(I, 1, DemandedMask, RHSKnownZero, RHSKnownOne, |
| 247 | Depth + 1) || |
| 248 | SimplifyDemandedBits(I, 0, DemandedMask & ~RHSKnownZero, LHSKnownZero, |
| 249 | LHSKnownOne, Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 250 | return I; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 251 | assert(!(RHSKnownZero & RHSKnownOne) && "Bits known to be one AND zero?"); |
| 252 | assert(!(LHSKnownZero & LHSKnownOne) && "Bits known to be one AND zero?"); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 253 | |
Hal Finkel | 15aeaaf | 2014-09-07 19:21:07 +0000 | [diff] [blame] | 254 | // If the client is only demanding bits that we know, return the known |
| 255 | // constant. |
| 256 | if ((DemandedMask & ((RHSKnownZero | LHSKnownZero)| |
| 257 | (RHSKnownOne & LHSKnownOne))) == DemandedMask) |
| 258 | return Constant::getIntegerValue(VTy, RHSKnownOne & LHSKnownOne); |
| 259 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 260 | // If all of the demanded bits are known 1 on one side, return the other. |
| 261 | // These bits cannot contribute to the result of the 'and'. |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 262 | if ((DemandedMask & ~LHSKnownZero & RHSKnownOne) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 263 | (DemandedMask & ~LHSKnownZero)) |
| 264 | return I->getOperand(0); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 265 | if ((DemandedMask & ~RHSKnownZero & LHSKnownOne) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 266 | (DemandedMask & ~RHSKnownZero)) |
| 267 | return I->getOperand(1); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 268 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 269 | // If all of the demanded bits in the inputs are known zeros, return zero. |
| 270 | if ((DemandedMask & (RHSKnownZero|LHSKnownZero)) == DemandedMask) |
| 271 | return Constant::getNullValue(VTy); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 272 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 273 | // If the RHS is a constant, see if we can simplify it. |
| 274 | if (ShrinkDemandedConstant(I, 1, DemandedMask & ~LHSKnownZero)) |
| 275 | return I; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 276 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 277 | // Output known-1 bits are only known if set in both the LHS & RHS. |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 278 | KnownOne = RHSKnownOne & LHSKnownOne; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 279 | // Output known-0 are known to be clear if zero in either the LHS | RHS. |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 280 | KnownZero = RHSKnownZero | LHSKnownZero; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 281 | break; |
| 282 | case Instruction::Or: |
| 283 | // If either the LHS or the RHS are One, the result is One. |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 284 | if (SimplifyDemandedBits(I, 1, DemandedMask, RHSKnownZero, RHSKnownOne, |
| 285 | Depth + 1) || |
| 286 | SimplifyDemandedBits(I, 0, DemandedMask & ~RHSKnownOne, LHSKnownZero, |
| 287 | LHSKnownOne, Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 288 | return I; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 289 | assert(!(RHSKnownZero & RHSKnownOne) && "Bits known to be one AND zero?"); |
| 290 | assert(!(LHSKnownZero & LHSKnownOne) && "Bits known to be one AND zero?"); |
| 291 | |
Hal Finkel | 15aeaaf | 2014-09-07 19:21:07 +0000 | [diff] [blame] | 292 | // If the client is only demanding bits that we know, return the known |
| 293 | // constant. |
| 294 | if ((DemandedMask & ((RHSKnownZero & LHSKnownZero)| |
| 295 | (RHSKnownOne | LHSKnownOne))) == DemandedMask) |
| 296 | return Constant::getIntegerValue(VTy, RHSKnownOne | LHSKnownOne); |
| 297 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 298 | // If all of the demanded bits are known zero on one side, return the other. |
| 299 | // These bits cannot contribute to the result of the 'or'. |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 300 | if ((DemandedMask & ~LHSKnownOne & RHSKnownZero) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 301 | (DemandedMask & ~LHSKnownOne)) |
| 302 | return I->getOperand(0); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 303 | if ((DemandedMask & ~RHSKnownOne & LHSKnownZero) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 304 | (DemandedMask & ~RHSKnownOne)) |
| 305 | return I->getOperand(1); |
| 306 | |
| 307 | // If all of the potentially set bits on one side are known to be set on |
| 308 | // the other side, just use the 'other' side. |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 309 | if ((DemandedMask & (~RHSKnownZero) & LHSKnownOne) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 310 | (DemandedMask & (~RHSKnownZero))) |
| 311 | return I->getOperand(0); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 312 | if ((DemandedMask & (~LHSKnownZero) & RHSKnownOne) == |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 313 | (DemandedMask & (~LHSKnownZero))) |
| 314 | return I->getOperand(1); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 315 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 316 | // If the RHS is a constant, see if we can simplify it. |
| 317 | if (ShrinkDemandedConstant(I, 1, DemandedMask)) |
| 318 | return I; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 319 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 320 | // Output known-0 bits are only known if clear in both the LHS & RHS. |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 321 | KnownZero = RHSKnownZero & LHSKnownZero; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 322 | // Output known-1 are known to be set if set in either the LHS | RHS. |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 323 | KnownOne = RHSKnownOne | LHSKnownOne; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 324 | break; |
| 325 | case Instruction::Xor: { |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 326 | if (SimplifyDemandedBits(I, 1, DemandedMask, RHSKnownZero, RHSKnownOne, |
| 327 | Depth + 1) || |
| 328 | SimplifyDemandedBits(I, 0, DemandedMask, LHSKnownZero, LHSKnownOne, |
| 329 | Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 330 | return I; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 331 | assert(!(RHSKnownZero & RHSKnownOne) && "Bits known to be one AND zero?"); |
| 332 | assert(!(LHSKnownZero & LHSKnownOne) && "Bits known to be one AND zero?"); |
| 333 | |
Hal Finkel | 15aeaaf | 2014-09-07 19:21:07 +0000 | [diff] [blame] | 334 | // Output known-0 bits are known if clear or set in both the LHS & RHS. |
| 335 | APInt IKnownZero = (RHSKnownZero & LHSKnownZero) | |
| 336 | (RHSKnownOne & LHSKnownOne); |
| 337 | // Output known-1 are known to be set if set in only one of the LHS, RHS. |
| 338 | APInt IKnownOne = (RHSKnownZero & LHSKnownOne) | |
| 339 | (RHSKnownOne & LHSKnownZero); |
| 340 | |
| 341 | // If the client is only demanding bits that we know, return the known |
| 342 | // constant. |
| 343 | if ((DemandedMask & (IKnownZero|IKnownOne)) == DemandedMask) |
| 344 | return Constant::getIntegerValue(VTy, IKnownOne); |
| 345 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 346 | // If all of the demanded bits are known zero on one side, return the other. |
| 347 | // These bits cannot contribute to the result of the 'xor'. |
| 348 | if ((DemandedMask & RHSKnownZero) == DemandedMask) |
| 349 | return I->getOperand(0); |
| 350 | if ((DemandedMask & LHSKnownZero) == DemandedMask) |
| 351 | return I->getOperand(1); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 352 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 353 | // If all of the demanded bits are known to be zero on one side or the |
| 354 | // other, turn this into an *inclusive* or. |
Sylvestre Ledru | 91ce36c | 2012-09-27 10:14:43 +0000 | [diff] [blame] | 355 | // e.g. (A & C1)^(B & C2) -> (A & C1)|(B & C2) iff C1&C2 == 0 |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 356 | if ((DemandedMask & ~RHSKnownZero & ~LHSKnownZero) == 0) { |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 357 | Instruction *Or = |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 358 | BinaryOperator::CreateOr(I->getOperand(0), I->getOperand(1), |
| 359 | I->getName()); |
Eli Friedman | 6efb64e | 2011-05-19 01:20:42 +0000 | [diff] [blame] | 360 | return InsertNewInstWith(Or, *I); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 361 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 362 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 363 | // If all of the demanded bits on one side are known, and all of the set |
| 364 | // bits on that side are also known to be set on the other side, turn this |
| 365 | // into an AND, as we know the bits will be cleared. |
Sylvestre Ledru | 91ce36c | 2012-09-27 10:14:43 +0000 | [diff] [blame] | 366 | // e.g. (X | C1) ^ C2 --> (X | C1) & ~C2 iff (C1&C2) == C2 |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 367 | if ((DemandedMask & (RHSKnownZero|RHSKnownOne)) == DemandedMask) { |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 368 | // all known |
| 369 | if ((RHSKnownOne & LHSKnownOne) == RHSKnownOne) { |
| 370 | Constant *AndC = Constant::getIntegerValue(VTy, |
| 371 | ~RHSKnownOne & DemandedMask); |
Benjamin Kramer | 547b6c5 | 2011-09-27 20:39:19 +0000 | [diff] [blame] | 372 | Instruction *And = BinaryOperator::CreateAnd(I->getOperand(0), AndC); |
Eli Friedman | 6efb64e | 2011-05-19 01:20:42 +0000 | [diff] [blame] | 373 | return InsertNewInstWith(And, *I); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 374 | } |
| 375 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 376 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 377 | // If the RHS is a constant, see if we can simplify it. |
| 378 | // FIXME: for XOR, we prefer to force bits to 1 if they will make a -1. |
| 379 | if (ShrinkDemandedConstant(I, 1, DemandedMask)) |
| 380 | return I; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 381 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 382 | // If our LHS is an 'and' and if it has one use, and if any of the bits we |
| 383 | // are flipping are known to be set, then the xor is just resetting those |
| 384 | // bits to zero. We can just knock out bits from the 'and' and the 'xor', |
| 385 | // simplifying both of them. |
| 386 | if (Instruction *LHSInst = dyn_cast<Instruction>(I->getOperand(0))) |
| 387 | if (LHSInst->getOpcode() == Instruction::And && LHSInst->hasOneUse() && |
| 388 | isa<ConstantInt>(I->getOperand(1)) && |
| 389 | isa<ConstantInt>(LHSInst->getOperand(1)) && |
| 390 | (LHSKnownOne & RHSKnownOne & DemandedMask) != 0) { |
| 391 | ConstantInt *AndRHS = cast<ConstantInt>(LHSInst->getOperand(1)); |
| 392 | ConstantInt *XorRHS = cast<ConstantInt>(I->getOperand(1)); |
| 393 | APInt NewMask = ~(LHSKnownOne & RHSKnownOne & DemandedMask); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 394 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 395 | Constant *AndC = |
| 396 | ConstantInt::get(I->getType(), NewMask & AndRHS->getValue()); |
Benjamin Kramer | 547b6c5 | 2011-09-27 20:39:19 +0000 | [diff] [blame] | 397 | Instruction *NewAnd = BinaryOperator::CreateAnd(I->getOperand(0), AndC); |
Eli Friedman | 6efb64e | 2011-05-19 01:20:42 +0000 | [diff] [blame] | 398 | InsertNewInstWith(NewAnd, *I); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 399 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 400 | Constant *XorC = |
| 401 | ConstantInt::get(I->getType(), NewMask & XorRHS->getValue()); |
Benjamin Kramer | 547b6c5 | 2011-09-27 20:39:19 +0000 | [diff] [blame] | 402 | Instruction *NewXor = BinaryOperator::CreateXor(NewAnd, XorC); |
Eli Friedman | 6efb64e | 2011-05-19 01:20:42 +0000 | [diff] [blame] | 403 | return InsertNewInstWith(NewXor, *I); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 404 | } |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 405 | |
| 406 | // Output known-0 bits are known if clear or set in both the LHS & RHS. |
| 407 | KnownZero= (RHSKnownZero & LHSKnownZero) | (RHSKnownOne & LHSKnownOne); |
| 408 | // Output known-1 are known to be set if set in only one of the LHS, RHS. |
| 409 | KnownOne = (RHSKnownZero & LHSKnownOne) | (RHSKnownOne & LHSKnownZero); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 410 | break; |
| 411 | } |
| 412 | case Instruction::Select: |
James Molloy | 2b21a7c | 2015-05-20 18:41:25 +0000 | [diff] [blame] | 413 | // If this is a select as part of a min/max pattern, don't simplify any |
| 414 | // further in case we break the structure. |
| 415 | Value *LHS, *RHS; |
James Molloy | 134bec2 | 2015-08-11 09:12:57 +0000 | [diff] [blame] | 416 | if (matchSelectPattern(I, LHS, RHS).Flavor != SPF_UNKNOWN) |
James Molloy | 2b21a7c | 2015-05-20 18:41:25 +0000 | [diff] [blame] | 417 | return nullptr; |
Simon Pilgrim | 61116dd | 2015-09-17 20:32:45 +0000 | [diff] [blame] | 418 | |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 419 | if (SimplifyDemandedBits(I, 2, DemandedMask, RHSKnownZero, RHSKnownOne, |
| 420 | Depth + 1) || |
| 421 | SimplifyDemandedBits(I, 1, DemandedMask, LHSKnownZero, LHSKnownOne, |
| 422 | Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 423 | return I; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 424 | assert(!(RHSKnownZero & RHSKnownOne) && "Bits known to be one AND zero?"); |
| 425 | assert(!(LHSKnownZero & LHSKnownOne) && "Bits known to be one AND zero?"); |
| 426 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 427 | // If the operands are constants, see if we can simplify them. |
| 428 | if (ShrinkDemandedConstant(I, 1, DemandedMask) || |
| 429 | ShrinkDemandedConstant(I, 2, DemandedMask)) |
| 430 | return I; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 431 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 432 | // Only known if known in both the LHS and RHS. |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 433 | KnownOne = RHSKnownOne & LHSKnownOne; |
| 434 | KnownZero = RHSKnownZero & LHSKnownZero; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 435 | break; |
| 436 | case Instruction::Trunc: { |
| 437 | unsigned truncBf = I->getOperand(0)->getType()->getScalarSizeInBits(); |
Jay Foad | 583abbc | 2010-12-07 08:25:19 +0000 | [diff] [blame] | 438 | DemandedMask = DemandedMask.zext(truncBf); |
| 439 | KnownZero = KnownZero.zext(truncBf); |
| 440 | KnownOne = KnownOne.zext(truncBf); |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 441 | if (SimplifyDemandedBits(I, 0, DemandedMask, KnownZero, KnownOne, |
| 442 | Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 443 | return I; |
Jay Foad | 583abbc | 2010-12-07 08:25:19 +0000 | [diff] [blame] | 444 | DemandedMask = DemandedMask.trunc(BitWidth); |
| 445 | KnownZero = KnownZero.trunc(BitWidth); |
| 446 | KnownOne = KnownOne.trunc(BitWidth); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 447 | assert(!(KnownZero & KnownOne) && "Bits known to be one AND zero?"); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 448 | break; |
| 449 | } |
| 450 | case Instruction::BitCast: |
Duncan Sands | 9dff9be | 2010-02-15 16:12:20 +0000 | [diff] [blame] | 451 | if (!I->getOperand(0)->getType()->isIntOrIntVectorTy()) |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 452 | return nullptr; // vector->int or fp->int? |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 453 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 454 | if (VectorType *DstVTy = dyn_cast<VectorType>(I->getType())) { |
| 455 | if (VectorType *SrcVTy = |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 456 | dyn_cast<VectorType>(I->getOperand(0)->getType())) { |
| 457 | if (DstVTy->getNumElements() != SrcVTy->getNumElements()) |
| 458 | // Don't touch a bitcast between vectors of different element counts. |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 459 | return nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 460 | } else |
| 461 | // Don't touch a scalar-to-vector bitcast. |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 462 | return nullptr; |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 463 | } else if (I->getOperand(0)->getType()->isVectorTy()) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 464 | // Don't touch a vector-to-scalar bitcast. |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 465 | return nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 466 | |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 467 | if (SimplifyDemandedBits(I, 0, DemandedMask, KnownZero, KnownOne, |
| 468 | Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 469 | return I; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 470 | assert(!(KnownZero & KnownOne) && "Bits known to be one AND zero?"); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 471 | break; |
| 472 | case Instruction::ZExt: { |
| 473 | // Compute the bits in the result that are not present in the input. |
| 474 | unsigned SrcBitWidth =I->getOperand(0)->getType()->getScalarSizeInBits(); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 475 | |
Jay Foad | 583abbc | 2010-12-07 08:25:19 +0000 | [diff] [blame] | 476 | DemandedMask = DemandedMask.trunc(SrcBitWidth); |
| 477 | KnownZero = KnownZero.trunc(SrcBitWidth); |
| 478 | KnownOne = KnownOne.trunc(SrcBitWidth); |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 479 | if (SimplifyDemandedBits(I, 0, DemandedMask, KnownZero, KnownOne, |
| 480 | Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 481 | return I; |
Jay Foad | 583abbc | 2010-12-07 08:25:19 +0000 | [diff] [blame] | 482 | DemandedMask = DemandedMask.zext(BitWidth); |
| 483 | KnownZero = KnownZero.zext(BitWidth); |
| 484 | KnownOne = KnownOne.zext(BitWidth); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 485 | assert(!(KnownZero & KnownOne) && "Bits known to be one AND zero?"); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 486 | // The top bits are known to be zero. |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 487 | KnownZero.setBitsFrom(SrcBitWidth); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 488 | break; |
| 489 | } |
| 490 | case Instruction::SExt: { |
| 491 | // Compute the bits in the result that are not present in the input. |
| 492 | unsigned SrcBitWidth =I->getOperand(0)->getType()->getScalarSizeInBits(); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 493 | |
| 494 | APInt InputDemandedBits = DemandedMask & |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 495 | APInt::getLowBitsSet(BitWidth, SrcBitWidth); |
| 496 | |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 497 | APInt NewBits(APInt::getBitsSetFrom(BitWidth, SrcBitWidth)); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 498 | // If any of the sign extended bits are demanded, we know that the sign |
| 499 | // bit is demanded. |
| 500 | if ((NewBits & DemandedMask) != 0) |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 501 | InputDemandedBits.setBit(SrcBitWidth-1); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 502 | |
Jay Foad | 583abbc | 2010-12-07 08:25:19 +0000 | [diff] [blame] | 503 | InputDemandedBits = InputDemandedBits.trunc(SrcBitWidth); |
| 504 | KnownZero = KnownZero.trunc(SrcBitWidth); |
| 505 | KnownOne = KnownOne.trunc(SrcBitWidth); |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 506 | if (SimplifyDemandedBits(I, 0, InputDemandedBits, KnownZero, KnownOne, |
| 507 | Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 508 | return I; |
Jay Foad | 583abbc | 2010-12-07 08:25:19 +0000 | [diff] [blame] | 509 | InputDemandedBits = InputDemandedBits.zext(BitWidth); |
| 510 | KnownZero = KnownZero.zext(BitWidth); |
| 511 | KnownOne = KnownOne.zext(BitWidth); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 512 | assert(!(KnownZero & KnownOne) && "Bits known to be one AND zero?"); |
| 513 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 514 | // If the sign bit of the input is known set or clear, then we know the |
| 515 | // top bits of the result. |
| 516 | |
| 517 | // If the input sign bit is known zero, or if the NewBits are not demanded |
| 518 | // convert this into a zero extension. |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 519 | if (KnownZero[SrcBitWidth-1] || (NewBits & ~DemandedMask) == NewBits) { |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 520 | // Convert to ZExt cast |
| 521 | CastInst *NewCast = new ZExtInst(I->getOperand(0), VTy, I->getName()); |
Eli Friedman | 6efb64e | 2011-05-19 01:20:42 +0000 | [diff] [blame] | 522 | return InsertNewInstWith(NewCast, *I); |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 523 | } else if (KnownOne[SrcBitWidth-1]) { // Input sign bit known set |
| 524 | KnownOne |= NewBits; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 525 | } |
| 526 | break; |
| 527 | } |
Matthias Braun | e48484c | 2015-04-30 22:05:30 +0000 | [diff] [blame] | 528 | case Instruction::Add: |
| 529 | case Instruction::Sub: { |
| 530 | /// If the high-bits of an ADD/SUB are not demanded, then we do not care |
| 531 | /// about the high bits of the operands. |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 532 | unsigned NLZ = DemandedMask.countLeadingZeros(); |
Matthias Braun | e48484c | 2015-04-30 22:05:30 +0000 | [diff] [blame] | 533 | if (NLZ > 0) { |
| 534 | // Right fill the mask of bits for this ADD/SUB to demand the most |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 535 | // significant bit and all those below it. |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 536 | APInt DemandedFromOps(APInt::getLowBitsSet(BitWidth, BitWidth-NLZ)); |
Craig Topper | 07f2915 | 2017-03-22 04:03:53 +0000 | [diff] [blame] | 537 | if (ShrinkDemandedConstant(I, 0, DemandedFromOps) || |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 538 | SimplifyDemandedBits(I, 0, DemandedFromOps, LHSKnownZero, LHSKnownOne, |
| 539 | Depth + 1) || |
Matthias Braun | e48484c | 2015-04-30 22:05:30 +0000 | [diff] [blame] | 540 | ShrinkDemandedConstant(I, 1, DemandedFromOps) || |
Craig Topper | 845033a | 2017-04-12 16:49:59 +0000 | [diff] [blame^] | 541 | SimplifyDemandedBits(I, 1, DemandedFromOps, RHSKnownZero, RHSKnownOne, |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 542 | Depth + 1)) { |
Matthias Braun | e48484c | 2015-04-30 22:05:30 +0000 | [diff] [blame] | 543 | // Disable the nsw and nuw flags here: We can no longer guarantee that |
| 544 | // we won't wrap after simplification. Removing the nsw/nuw flags is |
| 545 | // legal here because the top bit is not demanded. |
| 546 | BinaryOperator &BinOP = *cast<BinaryOperator>(I); |
| 547 | BinOP.setHasNoSignedWrap(false); |
| 548 | BinOP.setHasNoUnsignedWrap(false); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 549 | return I; |
David Majnemer | 7d0e99c | 2015-04-22 22:42:05 +0000 | [diff] [blame] | 550 | } |
Craig Topper | 845033a | 2017-04-12 16:49:59 +0000 | [diff] [blame^] | 551 | |
| 552 | // If we are known to be adding/subtracting zeros to every bit below |
| 553 | // the highest demanded bit, we just return the other side. |
| 554 | if ((DemandedFromOps & RHSKnownZero) == DemandedFromOps) |
| 555 | return I->getOperand(0); |
| 556 | // We can't do this with the LHS for subtraction. |
| 557 | if (I->getOpcode() == Instruction::Add && |
| 558 | (DemandedFromOps & LHSKnownZero) == DemandedFromOps) |
| 559 | return I->getOperand(1); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 560 | } |
Benjamin Kramer | 010337c | 2011-12-24 17:31:38 +0000 | [diff] [blame] | 561 | |
Craig Topper | 8fbb74b | 2017-03-24 22:12:10 +0000 | [diff] [blame] | 562 | // Otherwise just hand the add/sub off to computeKnownBits to fill in |
| 563 | // the known zeros and ones. |
| 564 | computeKnownBits(V, KnownZero, KnownOne, Depth, CxtI); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 565 | break; |
Matthias Braun | e48484c | 2015-04-30 22:05:30 +0000 | [diff] [blame] | 566 | } |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 567 | case Instruction::Shl: |
| 568 | if (ConstantInt *SA = dyn_cast<ConstantInt>(I->getOperand(1))) { |
Shuxin Yang | 63e999e | 2012-12-04 00:04:54 +0000 | [diff] [blame] | 569 | { |
| 570 | Value *VarX; ConstantInt *C1; |
| 571 | if (match(I->getOperand(0), m_Shr(m_Value(VarX), m_ConstantInt(C1)))) { |
| 572 | Instruction *Shr = cast<Instruction>(I->getOperand(0)); |
| 573 | Value *R = SimplifyShrShlDemandedBits(Shr, I, DemandedMask, |
| 574 | KnownZero, KnownOne); |
| 575 | if (R) |
| 576 | return R; |
| 577 | } |
| 578 | } |
| 579 | |
Chris Lattner | 768003c | 2011-02-10 05:09:34 +0000 | [diff] [blame] | 580 | uint64_t ShiftAmt = SA->getLimitedValue(BitWidth-1); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 581 | APInt DemandedMaskIn(DemandedMask.lshr(ShiftAmt)); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 582 | |
Chris Lattner | 768003c | 2011-02-10 05:09:34 +0000 | [diff] [blame] | 583 | // If the shift is NUW/NSW, then it does demand the high bits. |
| 584 | ShlOperator *IOp = cast<ShlOperator>(I); |
| 585 | if (IOp->hasNoSignedWrap()) |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 586 | DemandedMaskIn.setHighBits(ShiftAmt+1); |
Chris Lattner | 768003c | 2011-02-10 05:09:34 +0000 | [diff] [blame] | 587 | else if (IOp->hasNoUnsignedWrap()) |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 588 | DemandedMaskIn.setHighBits(ShiftAmt); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 589 | |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 590 | if (SimplifyDemandedBits(I, 0, DemandedMaskIn, KnownZero, KnownOne, |
| 591 | Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 592 | return I; |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 593 | assert(!(KnownZero & KnownOne) && "Bits known to be one AND zero?"); |
| 594 | KnownZero <<= ShiftAmt; |
| 595 | KnownOne <<= ShiftAmt; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 596 | // low bits known zero. |
| 597 | if (ShiftAmt) |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 598 | KnownZero.setLowBits(ShiftAmt); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 599 | } |
| 600 | break; |
| 601 | case Instruction::LShr: |
| 602 | // For a logical shift right |
| 603 | if (ConstantInt *SA = dyn_cast<ConstantInt>(I->getOperand(1))) { |
Chris Lattner | 768003c | 2011-02-10 05:09:34 +0000 | [diff] [blame] | 604 | uint64_t ShiftAmt = SA->getLimitedValue(BitWidth-1); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 605 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 606 | // Unsigned shift right. |
| 607 | APInt DemandedMaskIn(DemandedMask.shl(ShiftAmt)); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 608 | |
Chris Lattner | 768003c | 2011-02-10 05:09:34 +0000 | [diff] [blame] | 609 | // If the shift is exact, then it does demand the low bits (and knows that |
| 610 | // they are zero). |
| 611 | if (cast<LShrOperator>(I)->isExact()) |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 612 | DemandedMaskIn.setLowBits(ShiftAmt); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 613 | |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 614 | if (SimplifyDemandedBits(I, 0, DemandedMaskIn, KnownZero, KnownOne, |
| 615 | Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 616 | return I; |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 617 | assert(!(KnownZero & KnownOne) && "Bits known to be one AND zero?"); |
Craig Topper | 885fa12 | 2017-03-31 20:01:16 +0000 | [diff] [blame] | 618 | KnownZero = KnownZero.lshr(ShiftAmt); |
| 619 | KnownOne = KnownOne.lshr(ShiftAmt); |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 620 | if (ShiftAmt) |
| 621 | KnownZero.setHighBits(ShiftAmt); // high bits known zero. |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 622 | } |
| 623 | break; |
| 624 | case Instruction::AShr: |
| 625 | // If this is an arithmetic shift right and only the low-bit is set, we can |
| 626 | // always convert this into a logical shr, even if the shift amount is |
| 627 | // variable. The low bit of the shift cannot be an input sign bit unless |
| 628 | // the shift amount is >= the size of the datatype, which is undefined. |
| 629 | if (DemandedMask == 1) { |
| 630 | // Perform the logical shift right. |
| 631 | Instruction *NewVal = BinaryOperator::CreateLShr( |
| 632 | I->getOperand(0), I->getOperand(1), I->getName()); |
Eli Friedman | 6efb64e | 2011-05-19 01:20:42 +0000 | [diff] [blame] | 633 | return InsertNewInstWith(NewVal, *I); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 634 | } |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 635 | |
| 636 | // If the sign bit is the only bit demanded by this ashr, then there is no |
| 637 | // need to do it, the shift doesn't change the high bit. |
| 638 | if (DemandedMask.isSignBit()) |
| 639 | return I->getOperand(0); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 640 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 641 | if (ConstantInt *SA = dyn_cast<ConstantInt>(I->getOperand(1))) { |
Chris Lattner | 768003c | 2011-02-10 05:09:34 +0000 | [diff] [blame] | 642 | uint32_t ShiftAmt = SA->getLimitedValue(BitWidth-1); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 643 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 644 | // Signed shift right. |
| 645 | APInt DemandedMaskIn(DemandedMask.shl(ShiftAmt)); |
| 646 | // If any of the "high bits" are demanded, we should set the sign bit as |
| 647 | // demanded. |
| 648 | if (DemandedMask.countLeadingZeros() <= ShiftAmt) |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 649 | DemandedMaskIn.setBit(BitWidth-1); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 650 | |
Chris Lattner | 768003c | 2011-02-10 05:09:34 +0000 | [diff] [blame] | 651 | // If the shift is exact, then it does demand the low bits (and knows that |
| 652 | // they are zero). |
| 653 | if (cast<AShrOperator>(I)->isExact()) |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 654 | DemandedMaskIn.setLowBits(ShiftAmt); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 655 | |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 656 | if (SimplifyDemandedBits(I, 0, DemandedMaskIn, KnownZero, KnownOne, |
| 657 | Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 658 | return I; |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 659 | assert(!(KnownZero & KnownOne) && "Bits known to be one AND zero?"); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 660 | // Compute the new bits that are at the top now. |
| 661 | APInt HighBits(APInt::getHighBitsSet(BitWidth, ShiftAmt)); |
Craig Topper | 885fa12 | 2017-03-31 20:01:16 +0000 | [diff] [blame] | 662 | KnownZero = KnownZero.lshr(ShiftAmt); |
| 663 | KnownOne = KnownOne.lshr(ShiftAmt); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 664 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 665 | // Handle the sign bits. |
| 666 | APInt SignBit(APInt::getSignBit(BitWidth)); |
| 667 | // Adjust to where it is now in the mask. |
Craig Topper | 885fa12 | 2017-03-31 20:01:16 +0000 | [diff] [blame] | 668 | SignBit = SignBit.lshr(ShiftAmt); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 669 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 670 | // If the input sign bit is known to be zero, or if none of the top bits |
| 671 | // are demanded, turn this into an unsigned shift right. |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 672 | if (BitWidth <= ShiftAmt || KnownZero[BitWidth-ShiftAmt-1] || |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 673 | (HighBits & ~DemandedMask) == HighBits) { |
| 674 | // Perform the logical shift right. |
Nick Lewycky | 0c48afa | 2012-01-04 09:28:29 +0000 | [diff] [blame] | 675 | BinaryOperator *NewVal = BinaryOperator::CreateLShr(I->getOperand(0), |
| 676 | SA, I->getName()); |
| 677 | NewVal->setIsExact(cast<BinaryOperator>(I)->isExact()); |
Eli Friedman | 6efb64e | 2011-05-19 01:20:42 +0000 | [diff] [blame] | 678 | return InsertNewInstWith(NewVal, *I); |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 679 | } else if ((KnownOne & SignBit) != 0) { // New bits are known one. |
| 680 | KnownOne |= HighBits; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 681 | } |
| 682 | } |
| 683 | break; |
| 684 | case Instruction::SRem: |
| 685 | if (ConstantInt *Rem = dyn_cast<ConstantInt>(I->getOperand(1))) { |
Eli Friedman | a81a82d | 2011-03-09 01:28:35 +0000 | [diff] [blame] | 686 | // X % -1 demands all the bits because we don't want to introduce |
| 687 | // INT_MIN % -1 (== undef) by accident. |
| 688 | if (Rem->isAllOnesValue()) |
| 689 | break; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 690 | APInt RA = Rem->getValue().abs(); |
| 691 | if (RA.isPowerOf2()) { |
| 692 | if (DemandedMask.ult(RA)) // srem won't affect demanded bits |
| 693 | return I->getOperand(0); |
| 694 | |
| 695 | APInt LowBits = RA - 1; |
| 696 | APInt Mask2 = LowBits | APInt::getSignBit(BitWidth); |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 697 | if (SimplifyDemandedBits(I, 0, Mask2, LHSKnownZero, LHSKnownOne, |
| 698 | Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 699 | return I; |
| 700 | |
Duncan Sands | 3a48b87 | 2010-01-28 17:22:42 +0000 | [diff] [blame] | 701 | // The low bits of LHS are unchanged by the srem. |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 702 | KnownZero = LHSKnownZero & LowBits; |
| 703 | KnownOne = LHSKnownOne & LowBits; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 704 | |
Duncan Sands | 3a48b87 | 2010-01-28 17:22:42 +0000 | [diff] [blame] | 705 | // If LHS is non-negative or has all low bits zero, then the upper bits |
| 706 | // are all zero. |
| 707 | if (LHSKnownZero[BitWidth-1] || ((LHSKnownZero & LowBits) == LowBits)) |
| 708 | KnownZero |= ~LowBits; |
| 709 | |
| 710 | // If LHS is negative and not all low bits are zero, then the upper bits |
| 711 | // are all one. |
| 712 | if (LHSKnownOne[BitWidth-1] && ((LHSKnownOne & LowBits) != 0)) |
| 713 | KnownOne |= ~LowBits; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 714 | |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 715 | assert(!(KnownZero & KnownOne) && "Bits known to be one AND zero?"); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 716 | } |
| 717 | } |
Nick Lewycky | e467979 | 2011-03-07 01:50:10 +0000 | [diff] [blame] | 718 | |
| 719 | // The sign bit is the LHS's sign bit, except when the result of the |
| 720 | // remainder is zero. |
| 721 | if (DemandedMask.isNegative() && KnownZero.isNonNegative()) { |
Nick Lewycky | e467979 | 2011-03-07 01:50:10 +0000 | [diff] [blame] | 722 | APInt LHSKnownZero(BitWidth, 0), LHSKnownOne(BitWidth, 0); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 723 | computeKnownBits(I->getOperand(0), LHSKnownZero, LHSKnownOne, Depth + 1, |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 724 | CxtI); |
Nick Lewycky | e467979 | 2011-03-07 01:50:10 +0000 | [diff] [blame] | 725 | // If it's known zero, our sign bit is also zero. |
| 726 | if (LHSKnownZero.isNegative()) |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 727 | KnownZero.setSignBit(); |
Nick Lewycky | e467979 | 2011-03-07 01:50:10 +0000 | [diff] [blame] | 728 | } |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 729 | break; |
| 730 | case Instruction::URem: { |
| 731 | APInt KnownZero2(BitWidth, 0), KnownOne2(BitWidth, 0); |
| 732 | APInt AllOnes = APInt::getAllOnesValue(BitWidth); |
Craig Topper | 47596dd | 2017-03-25 06:52:52 +0000 | [diff] [blame] | 733 | if (SimplifyDemandedBits(I, 0, AllOnes, KnownZero2, KnownOne2, Depth + 1) || |
| 734 | SimplifyDemandedBits(I, 1, AllOnes, KnownZero2, KnownOne2, Depth + 1)) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 735 | return I; |
| 736 | |
| 737 | unsigned Leaders = KnownZero2.countLeadingOnes(); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 738 | KnownZero = APInt::getHighBitsSet(BitWidth, Leaders) & DemandedMask; |
| 739 | break; |
| 740 | } |
| 741 | case Instruction::Call: |
| 742 | if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) { |
| 743 | switch (II->getIntrinsicID()) { |
| 744 | default: break; |
| 745 | case Intrinsic::bswap: { |
| 746 | // If the only bits demanded come from one byte of the bswap result, |
| 747 | // just shift the input byte into position to eliminate the bswap. |
| 748 | unsigned NLZ = DemandedMask.countLeadingZeros(); |
| 749 | unsigned NTZ = DemandedMask.countTrailingZeros(); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 750 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 751 | // Round NTZ down to the next byte. If we have 11 trailing zeros, then |
| 752 | // we need all the bits down to bit 8. Likewise, round NLZ. If we |
| 753 | // have 14 leading zeros, round to 8. |
| 754 | NLZ &= ~7; |
| 755 | NTZ &= ~7; |
| 756 | // If we need exactly one byte, we can do this transformation. |
| 757 | if (BitWidth-NLZ-NTZ == 8) { |
| 758 | unsigned ResultBit = NTZ; |
| 759 | unsigned InputBit = BitWidth-NTZ-8; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 760 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 761 | // Replace this with either a left or right shift to get the byte into |
| 762 | // the right place. |
| 763 | Instruction *NewVal; |
| 764 | if (InputBit > ResultBit) |
Gabor Greif | 7943017 | 2010-06-24 12:35:13 +0000 | [diff] [blame] | 765 | NewVal = BinaryOperator::CreateLShr(II->getArgOperand(0), |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 766 | ConstantInt::get(I->getType(), InputBit-ResultBit)); |
| 767 | else |
Gabor Greif | 7943017 | 2010-06-24 12:35:13 +0000 | [diff] [blame] | 768 | NewVal = BinaryOperator::CreateShl(II->getArgOperand(0), |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 769 | ConstantInt::get(I->getType(), ResultBit-InputBit)); |
| 770 | NewVal->takeName(I); |
Eli Friedman | 6efb64e | 2011-05-19 01:20:42 +0000 | [diff] [blame] | 771 | return InsertNewInstWith(NewVal, *I); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 772 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 773 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 774 | // TODO: Could compute known zero/one bits based on the input. |
| 775 | break; |
| 776 | } |
Simon Pilgrim | fda22d6 | 2016-06-04 13:42:46 +0000 | [diff] [blame] | 777 | case Intrinsic::x86_mmx_pmovmskb: |
Simon Pilgrim | bd4a3be | 2016-04-28 12:22:53 +0000 | [diff] [blame] | 778 | case Intrinsic::x86_sse_movmsk_ps: |
| 779 | case Intrinsic::x86_sse2_movmsk_pd: |
| 780 | case Intrinsic::x86_sse2_pmovmskb_128: |
| 781 | case Intrinsic::x86_avx_movmsk_ps_256: |
| 782 | case Intrinsic::x86_avx_movmsk_pd_256: |
| 783 | case Intrinsic::x86_avx2_pmovmskb: { |
| 784 | // MOVMSK copies the vector elements' sign bits to the low bits |
| 785 | // and zeros the high bits. |
Simon Pilgrim | fda22d6 | 2016-06-04 13:42:46 +0000 | [diff] [blame] | 786 | unsigned ArgWidth; |
| 787 | if (II->getIntrinsicID() == Intrinsic::x86_mmx_pmovmskb) { |
| 788 | ArgWidth = 8; // Arg is x86_mmx, but treated as <8 x i8>. |
| 789 | } else { |
| 790 | auto Arg = II->getArgOperand(0); |
| 791 | auto ArgType = cast<VectorType>(Arg->getType()); |
| 792 | ArgWidth = ArgType->getNumElements(); |
| 793 | } |
Simon Pilgrim | bd4a3be | 2016-04-28 12:22:53 +0000 | [diff] [blame] | 794 | |
| 795 | // If we don't need any of low bits then return zero, |
| 796 | // we know that DemandedMask is non-zero already. |
| 797 | APInt DemandedElts = DemandedMask.zextOrTrunc(ArgWidth); |
| 798 | if (DemandedElts == 0) |
| 799 | return ConstantInt::getNullValue(VTy); |
| 800 | |
Ahmed Bougacha | 17482a5 | 2016-04-28 14:36:07 +0000 | [diff] [blame] | 801 | // We know that the upper bits are set to zero. |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 802 | KnownZero.setBitsFrom(ArgWidth); |
Simon Pilgrim | bd4a3be | 2016-04-28 12:22:53 +0000 | [diff] [blame] | 803 | return nullptr; |
| 804 | } |
Chad Rosier | b362884 | 2011-05-26 23:13:19 +0000 | [diff] [blame] | 805 | case Intrinsic::x86_sse42_crc32_64_64: |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 806 | KnownZero.setBitsFrom(32); |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 807 | return nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 808 | } |
| 809 | } |
Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 810 | computeKnownBits(V, KnownZero, KnownOne, Depth, CxtI); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 811 | break; |
| 812 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 813 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 814 | // If the client is only demanding bits that we know, return the known |
| 815 | // constant. |
Duncan Sands | c8a3e56 | 2010-01-29 06:18:46 +0000 | [diff] [blame] | 816 | if ((DemandedMask & (KnownZero|KnownOne)) == DemandedMask) |
| 817 | return Constant::getIntegerValue(VTy, KnownOne); |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 818 | return nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 819 | } |
| 820 | |
Shuxin Yang | 63e999e | 2012-12-04 00:04:54 +0000 | [diff] [blame] | 821 | /// Helper routine of SimplifyDemandedUseBits. It tries to simplify |
| 822 | /// "E1 = (X lsr C1) << C2", where the C1 and C2 are constant, into |
| 823 | /// "E2 = X << (C2 - C1)" or "E2 = X >> (C1 - C2)", depending on the sign |
| 824 | /// of "C2-C1". |
| 825 | /// |
| 826 | /// Suppose E1 and E2 are generally different in bits S={bm, bm+1, |
| 827 | /// ..., bn}, without considering the specific value X is holding. |
| 828 | /// This transformation is legal iff one of following conditions is hold: |
| 829 | /// 1) All the bit in S are 0, in this case E1 == E2. |
| 830 | /// 2) We don't care those bits in S, per the input DemandedMask. |
| 831 | /// 3) Combination of 1) and 2). Some bits in S are 0, and we don't care the |
| 832 | /// rest bits. |
| 833 | /// |
| 834 | /// Currently we only test condition 2). |
| 835 | /// |
| 836 | /// As with SimplifyDemandedUseBits, it returns NULL if the simplification was |
| 837 | /// not successful. |
| 838 | Value *InstCombiner::SimplifyShrShlDemandedBits(Instruction *Shr, |
Benjamin Kramer | c321e53 | 2016-06-08 19:09:22 +0000 | [diff] [blame] | 839 | Instruction *Shl, |
| 840 | const APInt &DemandedMask, |
| 841 | APInt &KnownZero, |
| 842 | APInt &KnownOne) { |
Shuxin Yang | 63e999e | 2012-12-04 00:04:54 +0000 | [diff] [blame] | 843 | |
Benjamin Kramer | 010f108 | 2013-08-30 14:35:35 +0000 | [diff] [blame] | 844 | const APInt &ShlOp1 = cast<ConstantInt>(Shl->getOperand(1))->getValue(); |
| 845 | const APInt &ShrOp1 = cast<ConstantInt>(Shr->getOperand(1))->getValue(); |
| 846 | if (!ShlOp1 || !ShrOp1) |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 847 | return nullptr; // Noop. |
Benjamin Kramer | 010f108 | 2013-08-30 14:35:35 +0000 | [diff] [blame] | 848 | |
| 849 | Value *VarX = Shr->getOperand(0); |
| 850 | Type *Ty = VarX->getType(); |
| 851 | unsigned BitWidth = Ty->getIntegerBitWidth(); |
| 852 | if (ShlOp1.uge(BitWidth) || ShrOp1.uge(BitWidth)) |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 853 | return nullptr; // Undef. |
Benjamin Kramer | 010f108 | 2013-08-30 14:35:35 +0000 | [diff] [blame] | 854 | |
| 855 | unsigned ShlAmt = ShlOp1.getZExtValue(); |
| 856 | unsigned ShrAmt = ShrOp1.getZExtValue(); |
Shuxin Yang | 63e999e | 2012-12-04 00:04:54 +0000 | [diff] [blame] | 857 | |
| 858 | KnownOne.clearAllBits(); |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 859 | KnownZero.setLowBits(ShlAmt - 1); |
Shuxin Yang | 63e999e | 2012-12-04 00:04:54 +0000 | [diff] [blame] | 860 | KnownZero &= DemandedMask; |
| 861 | |
Benjamin Kramer | 010f108 | 2013-08-30 14:35:35 +0000 | [diff] [blame] | 862 | APInt BitMask1(APInt::getAllOnesValue(BitWidth)); |
| 863 | APInt BitMask2(APInt::getAllOnesValue(BitWidth)); |
Shuxin Yang | 63e999e | 2012-12-04 00:04:54 +0000 | [diff] [blame] | 864 | |
| 865 | bool isLshr = (Shr->getOpcode() == Instruction::LShr); |
| 866 | BitMask1 = isLshr ? (BitMask1.lshr(ShrAmt) << ShlAmt) : |
| 867 | (BitMask1.ashr(ShrAmt) << ShlAmt); |
| 868 | |
| 869 | if (ShrAmt <= ShlAmt) { |
| 870 | BitMask2 <<= (ShlAmt - ShrAmt); |
| 871 | } else { |
| 872 | BitMask2 = isLshr ? BitMask2.lshr(ShrAmt - ShlAmt): |
| 873 | BitMask2.ashr(ShrAmt - ShlAmt); |
| 874 | } |
| 875 | |
| 876 | // Check if condition-2 (see the comment to this function) is satified. |
| 877 | if ((BitMask1 & DemandedMask) == (BitMask2 & DemandedMask)) { |
| 878 | if (ShrAmt == ShlAmt) |
| 879 | return VarX; |
| 880 | |
| 881 | if (!Shr->hasOneUse()) |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 882 | return nullptr; |
Shuxin Yang | 63e999e | 2012-12-04 00:04:54 +0000 | [diff] [blame] | 883 | |
| 884 | BinaryOperator *New; |
| 885 | if (ShrAmt < ShlAmt) { |
| 886 | Constant *Amt = ConstantInt::get(VarX->getType(), ShlAmt - ShrAmt); |
| 887 | New = BinaryOperator::CreateShl(VarX, Amt); |
| 888 | BinaryOperator *Orig = cast<BinaryOperator>(Shl); |
| 889 | New->setHasNoSignedWrap(Orig->hasNoSignedWrap()); |
| 890 | New->setHasNoUnsignedWrap(Orig->hasNoUnsignedWrap()); |
| 891 | } else { |
| 892 | Constant *Amt = ConstantInt::get(VarX->getType(), ShrAmt - ShlAmt); |
Shuxin Yang | 86c0e23 | 2012-12-04 03:28:32 +0000 | [diff] [blame] | 893 | New = isLshr ? BinaryOperator::CreateLShr(VarX, Amt) : |
| 894 | BinaryOperator::CreateAShr(VarX, Amt); |
Shuxin Yang | 81b3678 | 2012-12-12 00:29:03 +0000 | [diff] [blame] | 895 | if (cast<BinaryOperator>(Shr)->isExact()) |
| 896 | New->setIsExact(true); |
Shuxin Yang | 63e999e | 2012-12-04 00:04:54 +0000 | [diff] [blame] | 897 | } |
| 898 | |
| 899 | return InsertNewInstWith(New, *Shl); |
| 900 | } |
| 901 | |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 902 | return nullptr; |
Shuxin Yang | 63e999e | 2012-12-04 00:04:54 +0000 | [diff] [blame] | 903 | } |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 904 | |
Sanjay Patel | bbbb3ce | 2016-07-14 20:54:43 +0000 | [diff] [blame] | 905 | /// The specified value produces a vector with any number of elements. |
| 906 | /// DemandedElts contains the set of elements that are actually used by the |
| 907 | /// caller. This method analyzes which elements of the operand are undef and |
| 908 | /// returns that information in UndefElts. |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 909 | /// |
| 910 | /// If the information about demanded elements can be used to simplify the |
| 911 | /// operation, the operation is simplified, then the resultant value is |
| 912 | /// returned. This returns null if no change was made. |
| 913 | Value *InstCombiner::SimplifyDemandedVectorElts(Value *V, APInt DemandedElts, |
Chris Lattner | b22423c | 2010-02-08 23:56:03 +0000 | [diff] [blame] | 914 | APInt &UndefElts, |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 915 | unsigned Depth) { |
Sanjay Patel | 9190b4a | 2016-04-29 20:54:56 +0000 | [diff] [blame] | 916 | unsigned VWidth = V->getType()->getVectorNumElements(); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 917 | APInt EltMask(APInt::getAllOnesValue(VWidth)); |
| 918 | assert((DemandedElts & ~EltMask) == 0 && "Invalid DemandedElts!"); |
| 919 | |
| 920 | if (isa<UndefValue>(V)) { |
| 921 | // If the entire vector is undefined, just return this info. |
| 922 | UndefElts = EltMask; |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 923 | return nullptr; |
Chris Lattner | b22423c | 2010-02-08 23:56:03 +0000 | [diff] [blame] | 924 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 925 | |
Chris Lattner | b22423c | 2010-02-08 23:56:03 +0000 | [diff] [blame] | 926 | if (DemandedElts == 0) { // If nothing is demanded, provide undef. |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 927 | UndefElts = EltMask; |
| 928 | return UndefValue::get(V->getType()); |
| 929 | } |
| 930 | |
| 931 | UndefElts = 0; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 932 | |
Chris Lattner | 6705883 | 2012-01-25 06:48:06 +0000 | [diff] [blame] | 933 | // Handle ConstantAggregateZero, ConstantVector, ConstantDataSequential. |
| 934 | if (Constant *C = dyn_cast<Constant>(V)) { |
| 935 | // Check if this is identity. If so, return 0 since we are not simplifying |
| 936 | // anything. |
| 937 | if (DemandedElts.isAllOnesValue()) |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 938 | return nullptr; |
Chris Lattner | 6705883 | 2012-01-25 06:48:06 +0000 | [diff] [blame] | 939 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 940 | Type *EltTy = cast<VectorType>(V->getType())->getElementType(); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 941 | Constant *Undef = UndefValue::get(EltTy); |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 942 | |
Chris Lattner | 6705883 | 2012-01-25 06:48:06 +0000 | [diff] [blame] | 943 | SmallVector<Constant*, 16> Elts; |
| 944 | for (unsigned i = 0; i != VWidth; ++i) { |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 945 | if (!DemandedElts[i]) { // If not demanded, set to undef. |
| 946 | Elts.push_back(Undef); |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 947 | UndefElts.setBit(i); |
Chris Lattner | 6705883 | 2012-01-25 06:48:06 +0000 | [diff] [blame] | 948 | continue; |
| 949 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 950 | |
Chris Lattner | 6705883 | 2012-01-25 06:48:06 +0000 | [diff] [blame] | 951 | Constant *Elt = C->getAggregateElement(i); |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 952 | if (!Elt) return nullptr; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 953 | |
Chris Lattner | 6705883 | 2012-01-25 06:48:06 +0000 | [diff] [blame] | 954 | if (isa<UndefValue>(Elt)) { // Already undef. |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 955 | Elts.push_back(Undef); |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 956 | UndefElts.setBit(i); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 957 | } else { // Otherwise, defined. |
Chris Lattner | 6705883 | 2012-01-25 06:48:06 +0000 | [diff] [blame] | 958 | Elts.push_back(Elt); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 959 | } |
Chris Lattner | 6705883 | 2012-01-25 06:48:06 +0000 | [diff] [blame] | 960 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 961 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 962 | // If we changed the constant, return it. |
Chris Lattner | 47a86bd | 2012-01-25 06:02:56 +0000 | [diff] [blame] | 963 | Constant *NewCV = ConstantVector::get(Elts); |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 964 | return NewCV != C ? NewCV : nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 965 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 966 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 967 | // Limit search depth. |
| 968 | if (Depth == 10) |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 969 | return nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 970 | |
Stuart Hastings | 5bd18b6 | 2011-05-17 22:13:31 +0000 | [diff] [blame] | 971 | // If multiple users are using the root value, proceed with |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 972 | // simplification conservatively assuming that all elements |
| 973 | // are needed. |
| 974 | if (!V->hasOneUse()) { |
| 975 | // Quit if we find multiple users of a non-root value though. |
| 976 | // They'll be handled when it's their turn to be visited by |
| 977 | // the main instcombine process. |
| 978 | if (Depth != 0) |
| 979 | // TODO: Just compute the UndefElts information recursively. |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 980 | return nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 981 | |
| 982 | // Conservatively assume that all elements are needed. |
| 983 | DemandedElts = EltMask; |
| 984 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 985 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 986 | Instruction *I = dyn_cast<Instruction>(V); |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 987 | if (!I) return nullptr; // Only analyze instructions. |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 988 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 989 | bool MadeChange = false; |
| 990 | APInt UndefElts2(VWidth, 0); |
Craig Topper | 23ebd95 | 2016-12-11 08:54:52 +0000 | [diff] [blame] | 991 | APInt UndefElts3(VWidth, 0); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 992 | Value *TmpV; |
| 993 | switch (I->getOpcode()) { |
| 994 | default: break; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 995 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 996 | case Instruction::InsertElement: { |
| 997 | // If this is a variable index, we don't know which element it overwrites. |
| 998 | // demand exactly the same input as we produce. |
| 999 | ConstantInt *Idx = dyn_cast<ConstantInt>(I->getOperand(2)); |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1000 | if (!Idx) { |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1001 | // Note that we can't propagate undef elt info, because we don't know |
| 1002 | // which elt is getting updated. |
| 1003 | TmpV = SimplifyDemandedVectorElts(I->getOperand(0), DemandedElts, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1004 | UndefElts2, Depth + 1); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1005 | if (TmpV) { I->setOperand(0, TmpV); MadeChange = true; } |
| 1006 | break; |
| 1007 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 1008 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1009 | // If this is inserting an element that isn't demanded, remove this |
| 1010 | // insertelement. |
| 1011 | unsigned IdxNo = Idx->getZExtValue(); |
| 1012 | if (IdxNo >= VWidth || !DemandedElts[IdxNo]) { |
| 1013 | Worklist.Add(I); |
| 1014 | return I->getOperand(0); |
| 1015 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 1016 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1017 | // Otherwise, the element inserted overwrites whatever was there, so the |
| 1018 | // input demanded set is simpler than the output set. |
| 1019 | APInt DemandedElts2 = DemandedElts; |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 1020 | DemandedElts2.clearBit(IdxNo); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1021 | TmpV = SimplifyDemandedVectorElts(I->getOperand(0), DemandedElts2, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1022 | UndefElts, Depth + 1); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1023 | if (TmpV) { I->setOperand(0, TmpV); MadeChange = true; } |
| 1024 | |
| 1025 | // The inserted element is defined. |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 1026 | UndefElts.clearBit(IdxNo); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1027 | break; |
| 1028 | } |
| 1029 | case Instruction::ShuffleVector: { |
| 1030 | ShuffleVectorInst *Shuffle = cast<ShuffleVectorInst>(I); |
Craig Topper | 2e18bcf | 2016-12-29 04:24:32 +0000 | [diff] [blame] | 1031 | unsigned LHSVWidth = |
| 1032 | Shuffle->getOperand(0)->getType()->getVectorNumElements(); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1033 | APInt LeftDemanded(LHSVWidth, 0), RightDemanded(LHSVWidth, 0); |
| 1034 | for (unsigned i = 0; i < VWidth; i++) { |
| 1035 | if (DemandedElts[i]) { |
| 1036 | unsigned MaskVal = Shuffle->getMaskValue(i); |
| 1037 | if (MaskVal != -1u) { |
| 1038 | assert(MaskVal < LHSVWidth * 2 && |
| 1039 | "shufflevector mask index out of range!"); |
| 1040 | if (MaskVal < LHSVWidth) |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 1041 | LeftDemanded.setBit(MaskVal); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1042 | else |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 1043 | RightDemanded.setBit(MaskVal - LHSVWidth); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1044 | } |
| 1045 | } |
| 1046 | } |
| 1047 | |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1048 | APInt LHSUndefElts(LHSVWidth, 0); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1049 | TmpV = SimplifyDemandedVectorElts(I->getOperand(0), LeftDemanded, |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1050 | LHSUndefElts, Depth + 1); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1051 | if (TmpV) { I->setOperand(0, TmpV); MadeChange = true; } |
| 1052 | |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1053 | APInt RHSUndefElts(LHSVWidth, 0); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1054 | TmpV = SimplifyDemandedVectorElts(I->getOperand(1), RightDemanded, |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1055 | RHSUndefElts, Depth + 1); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1056 | if (TmpV) { I->setOperand(1, TmpV); MadeChange = true; } |
| 1057 | |
| 1058 | bool NewUndefElts = false; |
Alexey Bataev | 793c946 | 2016-09-26 13:18:59 +0000 | [diff] [blame] | 1059 | unsigned LHSIdx = -1u, LHSValIdx = -1u; |
| 1060 | unsigned RHSIdx = -1u, RHSValIdx = -1u; |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1061 | bool LHSUniform = true; |
| 1062 | bool RHSUniform = true; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1063 | for (unsigned i = 0; i < VWidth; i++) { |
| 1064 | unsigned MaskVal = Shuffle->getMaskValue(i); |
| 1065 | if (MaskVal == -1u) { |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 1066 | UndefElts.setBit(i); |
Eli Friedman | 888bea0 | 2011-09-15 01:14:29 +0000 | [diff] [blame] | 1067 | } else if (!DemandedElts[i]) { |
| 1068 | NewUndefElts = true; |
| 1069 | UndefElts.setBit(i); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1070 | } else if (MaskVal < LHSVWidth) { |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1071 | if (LHSUndefElts[MaskVal]) { |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1072 | NewUndefElts = true; |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 1073 | UndefElts.setBit(i); |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1074 | } else { |
Alexey Bataev | 793c946 | 2016-09-26 13:18:59 +0000 | [diff] [blame] | 1075 | LHSIdx = LHSIdx == -1u ? i : LHSVWidth; |
| 1076 | LHSValIdx = LHSValIdx == -1u ? MaskVal : LHSVWidth; |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1077 | LHSUniform = LHSUniform && (MaskVal == i); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1078 | } |
| 1079 | } else { |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1080 | if (RHSUndefElts[MaskVal - LHSVWidth]) { |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1081 | NewUndefElts = true; |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 1082 | UndefElts.setBit(i); |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1083 | } else { |
Alexey Bataev | 793c946 | 2016-09-26 13:18:59 +0000 | [diff] [blame] | 1084 | RHSIdx = RHSIdx == -1u ? i : LHSVWidth; |
| 1085 | RHSValIdx = RHSValIdx == -1u ? MaskVal - LHSVWidth : LHSVWidth; |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1086 | RHSUniform = RHSUniform && (MaskVal - LHSVWidth == i); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1087 | } |
| 1088 | } |
| 1089 | } |
| 1090 | |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1091 | // Try to transform shuffle with constant vector and single element from |
| 1092 | // this constant vector to single insertelement instruction. |
| 1093 | // shufflevector V, C, <v1, v2, .., ci, .., vm> -> |
| 1094 | // insertelement V, C[ci], ci-n |
| 1095 | if (LHSVWidth == Shuffle->getType()->getNumElements()) { |
| 1096 | Value *Op = nullptr; |
| 1097 | Constant *Value = nullptr; |
| 1098 | unsigned Idx = -1u; |
| 1099 | |
Craig Topper | 62f06e2 | 2016-12-29 05:38:31 +0000 | [diff] [blame] | 1100 | // Find constant vector with the single element in shuffle (LHS or RHS). |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1101 | if (LHSIdx < LHSVWidth && RHSUniform) { |
| 1102 | if (auto *CV = dyn_cast<ConstantVector>(Shuffle->getOperand(0))) { |
| 1103 | Op = Shuffle->getOperand(1); |
Alexey Bataev | 793c946 | 2016-09-26 13:18:59 +0000 | [diff] [blame] | 1104 | Value = CV->getOperand(LHSValIdx); |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1105 | Idx = LHSIdx; |
| 1106 | } |
| 1107 | } |
| 1108 | if (RHSIdx < LHSVWidth && LHSUniform) { |
| 1109 | if (auto *CV = dyn_cast<ConstantVector>(Shuffle->getOperand(1))) { |
| 1110 | Op = Shuffle->getOperand(0); |
Alexey Bataev | 793c946 | 2016-09-26 13:18:59 +0000 | [diff] [blame] | 1111 | Value = CV->getOperand(RHSValIdx); |
Alexey Bataev | fee9078 | 2016-09-23 09:14:08 +0000 | [diff] [blame] | 1112 | Idx = RHSIdx; |
| 1113 | } |
| 1114 | } |
| 1115 | // Found constant vector with single element - convert to insertelement. |
| 1116 | if (Op && Value) { |
| 1117 | Instruction *New = InsertElementInst::Create( |
| 1118 | Op, Value, ConstantInt::get(Type::getInt32Ty(I->getContext()), Idx), |
| 1119 | Shuffle->getName()); |
| 1120 | InsertNewInstWith(New, *Shuffle); |
| 1121 | return New; |
| 1122 | } |
| 1123 | } |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1124 | if (NewUndefElts) { |
| 1125 | // Add additional discovered undefs. |
Chris Lattner | 0256be9 | 2012-01-27 03:08:05 +0000 | [diff] [blame] | 1126 | SmallVector<Constant*, 16> Elts; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1127 | for (unsigned i = 0; i < VWidth; ++i) { |
| 1128 | if (UndefElts[i]) |
| 1129 | Elts.push_back(UndefValue::get(Type::getInt32Ty(I->getContext()))); |
| 1130 | else |
| 1131 | Elts.push_back(ConstantInt::get(Type::getInt32Ty(I->getContext()), |
| 1132 | Shuffle->getMaskValue(i))); |
| 1133 | } |
| 1134 | I->setOperand(2, ConstantVector::get(Elts)); |
| 1135 | MadeChange = true; |
| 1136 | } |
| 1137 | break; |
| 1138 | } |
Pete Cooper | abc13af | 2012-07-26 23:10:24 +0000 | [diff] [blame] | 1139 | case Instruction::Select: { |
| 1140 | APInt LeftDemanded(DemandedElts), RightDemanded(DemandedElts); |
| 1141 | if (ConstantVector* CV = dyn_cast<ConstantVector>(I->getOperand(0))) { |
| 1142 | for (unsigned i = 0; i < VWidth; i++) { |
Andrea Di Biagio | 40f59e4 | 2015-10-06 10:34:53 +0000 | [diff] [blame] | 1143 | Constant *CElt = CV->getAggregateElement(i); |
| 1144 | // Method isNullValue always returns false when called on a |
| 1145 | // ConstantExpr. If CElt is a ConstantExpr then skip it in order to |
| 1146 | // to avoid propagating incorrect information. |
| 1147 | if (isa<ConstantExpr>(CElt)) |
| 1148 | continue; |
| 1149 | if (CElt->isNullValue()) |
Pete Cooper | abc13af | 2012-07-26 23:10:24 +0000 | [diff] [blame] | 1150 | LeftDemanded.clearBit(i); |
| 1151 | else |
| 1152 | RightDemanded.clearBit(i); |
| 1153 | } |
| 1154 | } |
| 1155 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1156 | TmpV = SimplifyDemandedVectorElts(I->getOperand(1), LeftDemanded, UndefElts, |
| 1157 | Depth + 1); |
Pete Cooper | abc13af | 2012-07-26 23:10:24 +0000 | [diff] [blame] | 1158 | if (TmpV) { I->setOperand(1, TmpV); MadeChange = true; } |
| 1159 | |
| 1160 | TmpV = SimplifyDemandedVectorElts(I->getOperand(2), RightDemanded, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1161 | UndefElts2, Depth + 1); |
Pete Cooper | abc13af | 2012-07-26 23:10:24 +0000 | [diff] [blame] | 1162 | if (TmpV) { I->setOperand(2, TmpV); MadeChange = true; } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 1163 | |
Pete Cooper | abc13af | 2012-07-26 23:10:24 +0000 | [diff] [blame] | 1164 | // Output elements are undefined if both are undefined. |
| 1165 | UndefElts &= UndefElts2; |
| 1166 | break; |
| 1167 | } |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1168 | case Instruction::BitCast: { |
| 1169 | // Vector->vector casts only. |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 1170 | VectorType *VTy = dyn_cast<VectorType>(I->getOperand(0)->getType()); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1171 | if (!VTy) break; |
| 1172 | unsigned InVWidth = VTy->getNumElements(); |
| 1173 | APInt InputDemandedElts(InVWidth, 0); |
Simon Pilgrim | 43f5e08 | 2015-09-29 08:19:11 +0000 | [diff] [blame] | 1174 | UndefElts2 = APInt(InVWidth, 0); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1175 | unsigned Ratio; |
| 1176 | |
| 1177 | if (VWidth == InVWidth) { |
| 1178 | // If we are converting from <4 x i32> -> <4 x f32>, we demand the same |
| 1179 | // elements as are demanded of us. |
| 1180 | Ratio = 1; |
| 1181 | InputDemandedElts = DemandedElts; |
Simon Pilgrim | 43f5e08 | 2015-09-29 08:19:11 +0000 | [diff] [blame] | 1182 | } else if ((VWidth % InVWidth) == 0) { |
| 1183 | // If the number of elements in the output is a multiple of the number of |
| 1184 | // elements in the input then an input element is live if any of the |
| 1185 | // corresponding output elements are live. |
| 1186 | Ratio = VWidth / InVWidth; |
| 1187 | for (unsigned OutIdx = 0; OutIdx != VWidth; ++OutIdx) |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1188 | if (DemandedElts[OutIdx]) |
Simon Pilgrim | 43f5e08 | 2015-09-29 08:19:11 +0000 | [diff] [blame] | 1189 | InputDemandedElts.setBit(OutIdx / Ratio); |
| 1190 | } else if ((InVWidth % VWidth) == 0) { |
| 1191 | // If the number of elements in the input is a multiple of the number of |
| 1192 | // elements in the output then an input element is live if the |
| 1193 | // corresponding output element is live. |
| 1194 | Ratio = InVWidth / VWidth; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1195 | for (unsigned InIdx = 0; InIdx != InVWidth; ++InIdx) |
Simon Pilgrim | 43f5e08 | 2015-09-29 08:19:11 +0000 | [diff] [blame] | 1196 | if (DemandedElts[InIdx / Ratio]) |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 1197 | InputDemandedElts.setBit(InIdx); |
Simon Pilgrim | 43f5e08 | 2015-09-29 08:19:11 +0000 | [diff] [blame] | 1198 | } else { |
| 1199 | // Unsupported so far. |
| 1200 | break; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1201 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 1202 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1203 | // div/rem demand all inputs, because they don't want divide by zero. |
| 1204 | TmpV = SimplifyDemandedVectorElts(I->getOperand(0), InputDemandedElts, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1205 | UndefElts2, Depth + 1); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1206 | if (TmpV) { |
| 1207 | I->setOperand(0, TmpV); |
| 1208 | MadeChange = true; |
| 1209 | } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 1210 | |
Simon Pilgrim | 43f5e08 | 2015-09-29 08:19:11 +0000 | [diff] [blame] | 1211 | if (VWidth == InVWidth) { |
| 1212 | UndefElts = UndefElts2; |
| 1213 | } else if ((VWidth % InVWidth) == 0) { |
| 1214 | // If the number of elements in the output is a multiple of the number of |
| 1215 | // elements in the input then an output element is undef if the |
| 1216 | // corresponding input element is undef. |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1217 | for (unsigned OutIdx = 0; OutIdx != VWidth; ++OutIdx) |
Simon Pilgrim | 43f5e08 | 2015-09-29 08:19:11 +0000 | [diff] [blame] | 1218 | if (UndefElts2[OutIdx / Ratio]) |
Jay Foad | 25a5e4c | 2010-12-01 08:53:58 +0000 | [diff] [blame] | 1219 | UndefElts.setBit(OutIdx); |
Simon Pilgrim | 43f5e08 | 2015-09-29 08:19:11 +0000 | [diff] [blame] | 1220 | } else if ((InVWidth % VWidth) == 0) { |
| 1221 | // If the number of elements in the input is a multiple of the number of |
| 1222 | // elements in the output then an output element is undef if all of the |
| 1223 | // corresponding input elements are undef. |
| 1224 | for (unsigned OutIdx = 0; OutIdx != VWidth; ++OutIdx) { |
| 1225 | APInt SubUndef = UndefElts2.lshr(OutIdx * Ratio).zextOrTrunc(Ratio); |
| 1226 | if (SubUndef.countPopulation() == Ratio) |
| 1227 | UndefElts.setBit(OutIdx); |
| 1228 | } |
| 1229 | } else { |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1230 | llvm_unreachable("Unimp"); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1231 | } |
| 1232 | break; |
| 1233 | } |
| 1234 | case Instruction::And: |
| 1235 | case Instruction::Or: |
| 1236 | case Instruction::Xor: |
| 1237 | case Instruction::Add: |
| 1238 | case Instruction::Sub: |
| 1239 | case Instruction::Mul: |
| 1240 | // div/rem demand all inputs, because they don't want divide by zero. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1241 | TmpV = SimplifyDemandedVectorElts(I->getOperand(0), DemandedElts, UndefElts, |
| 1242 | Depth + 1); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1243 | if (TmpV) { I->setOperand(0, TmpV); MadeChange = true; } |
| 1244 | TmpV = SimplifyDemandedVectorElts(I->getOperand(1), DemandedElts, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1245 | UndefElts2, Depth + 1); |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1246 | if (TmpV) { I->setOperand(1, TmpV); MadeChange = true; } |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 1247 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1248 | // Output elements are undefined if both are undefined. Consider things |
| 1249 | // like undef&0. The result is known zero, not undef. |
| 1250 | UndefElts &= UndefElts2; |
| 1251 | break; |
Pete Cooper | e807e45 | 2012-07-26 22:37:04 +0000 | [diff] [blame] | 1252 | case Instruction::FPTrunc: |
| 1253 | case Instruction::FPExt: |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1254 | TmpV = SimplifyDemandedVectorElts(I->getOperand(0), DemandedElts, UndefElts, |
| 1255 | Depth + 1); |
Pete Cooper | e807e45 | 2012-07-26 22:37:04 +0000 | [diff] [blame] | 1256 | if (TmpV) { I->setOperand(0, TmpV); MadeChange = true; } |
| 1257 | break; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 1258 | |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1259 | case Instruction::Call: { |
| 1260 | IntrinsicInst *II = dyn_cast<IntrinsicInst>(I); |
| 1261 | if (!II) break; |
| 1262 | switch (II->getIntrinsicID()) { |
| 1263 | default: break; |
Craig Topper | 4c94775 | 2012-12-22 18:09:02 +0000 | [diff] [blame] | 1264 | |
Craig Topper | 7fc6d34 | 2016-12-11 22:32:38 +0000 | [diff] [blame] | 1265 | case Intrinsic::x86_xop_vfrcz_ss: |
| 1266 | case Intrinsic::x86_xop_vfrcz_sd: |
| 1267 | // The instructions for these intrinsics are speced to zero upper bits not |
| 1268 | // pass them through like other scalar intrinsics. So we shouldn't just |
| 1269 | // use Arg0 if DemandedElts[0] is clear like we do for other intrinsics. |
| 1270 | // Instead we should return a zero vector. |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1271 | if (!DemandedElts[0]) { |
| 1272 | Worklist.Add(II); |
Craig Topper | 7fc6d34 | 2016-12-11 22:32:38 +0000 | [diff] [blame] | 1273 | return ConstantAggregateZero::get(II->getType()); |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1274 | } |
Craig Topper | 7fc6d34 | 2016-12-11 22:32:38 +0000 | [diff] [blame] | 1275 | |
Craig Topper | ac75bca | 2016-12-13 07:45:45 +0000 | [diff] [blame] | 1276 | // Only the lower element is used. |
| 1277 | DemandedElts = 1; |
Craig Topper | 7fc6d34 | 2016-12-11 22:32:38 +0000 | [diff] [blame] | 1278 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(0), DemandedElts, |
| 1279 | UndefElts, Depth + 1); |
| 1280 | if (TmpV) { II->setArgOperand(0, TmpV); MadeChange = true; } |
Craig Topper | ac75bca | 2016-12-13 07:45:45 +0000 | [diff] [blame] | 1281 | |
| 1282 | // Only the lower element is undefined. The high elements are zero. |
| 1283 | UndefElts = UndefElts[0]; |
Craig Topper | 7fc6d34 | 2016-12-11 22:32:38 +0000 | [diff] [blame] | 1284 | break; |
| 1285 | |
Simon Pilgrim | 4c564ad | 2016-04-24 19:31:56 +0000 | [diff] [blame] | 1286 | // Unary scalar-as-vector operations that work column-wise. |
Simon Pilgrim | 8302094 | 2016-04-24 18:23:14 +0000 | [diff] [blame] | 1287 | case Intrinsic::x86_sse_rcp_ss: |
| 1288 | case Intrinsic::x86_sse_rsqrt_ss: |
| 1289 | case Intrinsic::x86_sse_sqrt_ss: |
| 1290 | case Intrinsic::x86_sse2_sqrt_sd: |
Simon Pilgrim | 8302094 | 2016-04-24 18:23:14 +0000 | [diff] [blame] | 1291 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(0), DemandedElts, |
| 1292 | UndefElts, Depth + 1); |
| 1293 | if (TmpV) { II->setArgOperand(0, TmpV); MadeChange = true; } |
| 1294 | |
| 1295 | // If lowest element of a scalar op isn't used then use Arg0. |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1296 | if (!DemandedElts[0]) { |
| 1297 | Worklist.Add(II); |
Simon Pilgrim | 8302094 | 2016-04-24 18:23:14 +0000 | [diff] [blame] | 1298 | return II->getArgOperand(0); |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1299 | } |
Simon Pilgrim | 4c564ad | 2016-04-24 19:31:56 +0000 | [diff] [blame] | 1300 | // TODO: If only low elt lower SQRT to FSQRT (with rounding/exceptions |
| 1301 | // checks). |
Simon Pilgrim | 8302094 | 2016-04-24 18:23:14 +0000 | [diff] [blame] | 1302 | break; |
| 1303 | |
Craig Topper | a0372de | 2016-12-14 03:17:27 +0000 | [diff] [blame] | 1304 | // Binary scalar-as-vector operations that work column-wise. The high |
| 1305 | // elements come from operand 0. The low element is a function of both |
| 1306 | // operands. |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1307 | case Intrinsic::x86_sse_min_ss: |
| 1308 | case Intrinsic::x86_sse_max_ss: |
Simon Pilgrim | 8302094 | 2016-04-24 18:23:14 +0000 | [diff] [blame] | 1309 | case Intrinsic::x86_sse_cmp_ss: |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1310 | case Intrinsic::x86_sse2_min_sd: |
| 1311 | case Intrinsic::x86_sse2_max_sd: |
Craig Topper | a0372de | 2016-12-14 03:17:27 +0000 | [diff] [blame] | 1312 | case Intrinsic::x86_sse2_cmp_sd: { |
| 1313 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(0), DemandedElts, |
| 1314 | UndefElts, Depth + 1); |
| 1315 | if (TmpV) { II->setArgOperand(0, TmpV); MadeChange = true; } |
| 1316 | |
| 1317 | // If lowest element of a scalar op isn't used then use Arg0. |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1318 | if (!DemandedElts[0]) { |
| 1319 | Worklist.Add(II); |
Craig Topper | a0372de | 2016-12-14 03:17:27 +0000 | [diff] [blame] | 1320 | return II->getArgOperand(0); |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1321 | } |
Craig Topper | a0372de | 2016-12-14 03:17:27 +0000 | [diff] [blame] | 1322 | |
| 1323 | // Only lower element is used for operand 1. |
| 1324 | DemandedElts = 1; |
| 1325 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(1), DemandedElts, |
| 1326 | UndefElts2, Depth + 1); |
| 1327 | if (TmpV) { II->setArgOperand(1, TmpV); MadeChange = true; } |
| 1328 | |
| 1329 | // Lower element is undefined if both lower elements are undefined. |
| 1330 | // Consider things like undef&0. The result is known zero, not undef. |
| 1331 | if (!UndefElts2[0]) |
| 1332 | UndefElts.clearBit(0); |
| 1333 | |
| 1334 | break; |
| 1335 | } |
| 1336 | |
Craig Topper | eb6a20e | 2016-12-14 03:17:30 +0000 | [diff] [blame] | 1337 | // Binary scalar-as-vector operations that work column-wise. The high |
| 1338 | // elements come from operand 0 and the low element comes from operand 1. |
Simon Pilgrim | 8302094 | 2016-04-24 18:23:14 +0000 | [diff] [blame] | 1339 | case Intrinsic::x86_sse41_round_ss: |
Craig Topper | eb6a20e | 2016-12-14 03:17:30 +0000 | [diff] [blame] | 1340 | case Intrinsic::x86_sse41_round_sd: { |
| 1341 | // Don't use the low element of operand 0. |
| 1342 | APInt DemandedElts2 = DemandedElts; |
| 1343 | DemandedElts2.clearBit(0); |
| 1344 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(0), DemandedElts2, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1345 | UndefElts, Depth + 1); |
Gabor Greif | e23efee | 2010-06-28 16:45:00 +0000 | [diff] [blame] | 1346 | if (TmpV) { II->setArgOperand(0, TmpV); MadeChange = true; } |
Craig Topper | eb6a20e | 2016-12-14 03:17:30 +0000 | [diff] [blame] | 1347 | |
| 1348 | // If lowest element of a scalar op isn't used then use Arg0. |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1349 | if (!DemandedElts[0]) { |
| 1350 | Worklist.Add(II); |
Craig Topper | eb6a20e | 2016-12-14 03:17:30 +0000 | [diff] [blame] | 1351 | return II->getArgOperand(0); |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1352 | } |
Craig Topper | eb6a20e | 2016-12-14 03:17:30 +0000 | [diff] [blame] | 1353 | |
| 1354 | // Only lower element is used for operand 1. |
| 1355 | DemandedElts = 1; |
Gabor Greif | e23efee | 2010-06-28 16:45:00 +0000 | [diff] [blame] | 1356 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(1), DemandedElts, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1357 | UndefElts2, Depth + 1); |
Gabor Greif | e23efee | 2010-06-28 16:45:00 +0000 | [diff] [blame] | 1358 | if (TmpV) { II->setArgOperand(1, TmpV); MadeChange = true; } |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1359 | |
Craig Topper | eb6a20e | 2016-12-14 03:17:30 +0000 | [diff] [blame] | 1360 | // Take the high undef elements from operand 0 and take the lower element |
| 1361 | // from operand 1. |
| 1362 | UndefElts.clearBit(0); |
| 1363 | UndefElts |= UndefElts2[0]; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1364 | break; |
Craig Topper | eb6a20e | 2016-12-14 03:17:30 +0000 | [diff] [blame] | 1365 | } |
Simon Pilgrim | 61116dd | 2015-09-17 20:32:45 +0000 | [diff] [blame] | 1366 | |
Craig Topper | dfd268d | 2016-12-14 05:43:05 +0000 | [diff] [blame] | 1367 | // Three input scalar-as-vector operations that work column-wise. The high |
| 1368 | // elements come from operand 0 and the low element is a function of all |
| 1369 | // three inputs. |
Craig Topper | 268b3ab | 2016-12-14 06:06:58 +0000 | [diff] [blame] | 1370 | case Intrinsic::x86_avx512_mask_add_ss_round: |
| 1371 | case Intrinsic::x86_avx512_mask_div_ss_round: |
| 1372 | case Intrinsic::x86_avx512_mask_mul_ss_round: |
| 1373 | case Intrinsic::x86_avx512_mask_sub_ss_round: |
| 1374 | case Intrinsic::x86_avx512_mask_max_ss_round: |
| 1375 | case Intrinsic::x86_avx512_mask_min_ss_round: |
| 1376 | case Intrinsic::x86_avx512_mask_add_sd_round: |
| 1377 | case Intrinsic::x86_avx512_mask_div_sd_round: |
| 1378 | case Intrinsic::x86_avx512_mask_mul_sd_round: |
| 1379 | case Intrinsic::x86_avx512_mask_sub_sd_round: |
| 1380 | case Intrinsic::x86_avx512_mask_max_sd_round: |
| 1381 | case Intrinsic::x86_avx512_mask_min_sd_round: |
Craig Topper | 23ebd95 | 2016-12-11 08:54:52 +0000 | [diff] [blame] | 1382 | case Intrinsic::x86_fma_vfmadd_ss: |
| 1383 | case Intrinsic::x86_fma_vfmsub_ss: |
| 1384 | case Intrinsic::x86_fma_vfnmadd_ss: |
| 1385 | case Intrinsic::x86_fma_vfnmsub_ss: |
| 1386 | case Intrinsic::x86_fma_vfmadd_sd: |
| 1387 | case Intrinsic::x86_fma_vfmsub_sd: |
| 1388 | case Intrinsic::x86_fma_vfnmadd_sd: |
| 1389 | case Intrinsic::x86_fma_vfnmsub_sd: |
Craig Topper | ab5f355 | 2016-12-15 03:49:45 +0000 | [diff] [blame] | 1390 | case Intrinsic::x86_avx512_mask_vfmadd_ss: |
| 1391 | case Intrinsic::x86_avx512_mask_vfmadd_sd: |
| 1392 | case Intrinsic::x86_avx512_maskz_vfmadd_ss: |
| 1393 | case Intrinsic::x86_avx512_maskz_vfmadd_sd: |
Craig Topper | 23ebd95 | 2016-12-11 08:54:52 +0000 | [diff] [blame] | 1394 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(0), DemandedElts, |
| 1395 | UndefElts, Depth + 1); |
| 1396 | if (TmpV) { II->setArgOperand(0, TmpV); MadeChange = true; } |
Craig Topper | dfd268d | 2016-12-14 05:43:05 +0000 | [diff] [blame] | 1397 | |
| 1398 | // If lowest element of a scalar op isn't used then use Arg0. |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1399 | if (!DemandedElts[0]) { |
| 1400 | Worklist.Add(II); |
Craig Topper | dfd268d | 2016-12-14 05:43:05 +0000 | [diff] [blame] | 1401 | return II->getArgOperand(0); |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1402 | } |
Craig Topper | dfd268d | 2016-12-14 05:43:05 +0000 | [diff] [blame] | 1403 | |
| 1404 | // Only lower element is used for operand 1 and 2. |
| 1405 | DemandedElts = 1; |
Craig Topper | 23ebd95 | 2016-12-11 08:54:52 +0000 | [diff] [blame] | 1406 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(1), DemandedElts, |
| 1407 | UndefElts2, Depth + 1); |
| 1408 | if (TmpV) { II->setArgOperand(1, TmpV); MadeChange = true; } |
| 1409 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(2), DemandedElts, |
| 1410 | UndefElts3, Depth + 1); |
| 1411 | if (TmpV) { II->setArgOperand(2, TmpV); MadeChange = true; } |
| 1412 | |
Craig Topper | dfd268d | 2016-12-14 05:43:05 +0000 | [diff] [blame] | 1413 | // Lower element is undefined if all three lower elements are undefined. |
| 1414 | // Consider things like undef&0. The result is known zero, not undef. |
| 1415 | if (!UndefElts2[0] || !UndefElts3[0]) |
| 1416 | UndefElts.clearBit(0); |
Craig Topper | 23ebd95 | 2016-12-11 08:54:52 +0000 | [diff] [blame] | 1417 | |
Craig Topper | 23ebd95 | 2016-12-11 08:54:52 +0000 | [diff] [blame] | 1418 | break; |
| 1419 | |
Craig Topper | ab5f355 | 2016-12-15 03:49:45 +0000 | [diff] [blame] | 1420 | case Intrinsic::x86_avx512_mask3_vfmadd_ss: |
| 1421 | case Intrinsic::x86_avx512_mask3_vfmadd_sd: |
| 1422 | case Intrinsic::x86_avx512_mask3_vfmsub_ss: |
| 1423 | case Intrinsic::x86_avx512_mask3_vfmsub_sd: |
| 1424 | case Intrinsic::x86_avx512_mask3_vfnmsub_ss: |
| 1425 | case Intrinsic::x86_avx512_mask3_vfnmsub_sd: |
| 1426 | // These intrinsics get the passthru bits from operand 2. |
| 1427 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(2), DemandedElts, |
| 1428 | UndefElts, Depth + 1); |
| 1429 | if (TmpV) { II->setArgOperand(2, TmpV); MadeChange = true; } |
| 1430 | |
| 1431 | // If lowest element of a scalar op isn't used then use Arg2. |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1432 | if (!DemandedElts[0]) { |
| 1433 | Worklist.Add(II); |
Craig Topper | ab5f355 | 2016-12-15 03:49:45 +0000 | [diff] [blame] | 1434 | return II->getArgOperand(2); |
Craig Topper | 1a8a337 | 2016-12-29 03:30:17 +0000 | [diff] [blame] | 1435 | } |
Craig Topper | ab5f355 | 2016-12-15 03:49:45 +0000 | [diff] [blame] | 1436 | |
| 1437 | // Only lower element is used for operand 0 and 1. |
| 1438 | DemandedElts = 1; |
| 1439 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(0), DemandedElts, |
| 1440 | UndefElts2, Depth + 1); |
| 1441 | if (TmpV) { II->setArgOperand(0, TmpV); MadeChange = true; } |
| 1442 | TmpV = SimplifyDemandedVectorElts(II->getArgOperand(1), DemandedElts, |
| 1443 | UndefElts3, Depth + 1); |
| 1444 | if (TmpV) { II->setArgOperand(1, TmpV); MadeChange = true; } |
| 1445 | |
| 1446 | // Lower element is undefined if all three lower elements are undefined. |
| 1447 | // Consider things like undef&0. The result is known zero, not undef. |
| 1448 | if (!UndefElts2[0] || !UndefElts3[0]) |
| 1449 | UndefElts.clearBit(0); |
| 1450 | |
| 1451 | break; |
| 1452 | |
Simon Pilgrim | c9cf7fc | 2016-12-26 23:28:17 +0000 | [diff] [blame] | 1453 | case Intrinsic::x86_sse2_pmulu_dq: |
| 1454 | case Intrinsic::x86_sse41_pmuldq: |
| 1455 | case Intrinsic::x86_avx2_pmul_dq: |
Craig Topper | 72f2d4e | 2016-12-27 05:30:09 +0000 | [diff] [blame] | 1456 | case Intrinsic::x86_avx2_pmulu_dq: |
| 1457 | case Intrinsic::x86_avx512_pmul_dq_512: |
| 1458 | case Intrinsic::x86_avx512_pmulu_dq_512: { |
Simon Pilgrim | c9cf7fc | 2016-12-26 23:28:17 +0000 | [diff] [blame] | 1459 | Value *Op0 = II->getArgOperand(0); |
| 1460 | Value *Op1 = II->getArgOperand(1); |
| 1461 | unsigned InnerVWidth = Op0->getType()->getVectorNumElements(); |
| 1462 | assert((VWidth * 2) == InnerVWidth && "Unexpected input size"); |
| 1463 | |
| 1464 | APInt InnerDemandedElts(InnerVWidth, 0); |
| 1465 | for (unsigned i = 0; i != VWidth; ++i) |
| 1466 | if (DemandedElts[i]) |
| 1467 | InnerDemandedElts.setBit(i * 2); |
| 1468 | |
| 1469 | UndefElts2 = APInt(InnerVWidth, 0); |
| 1470 | TmpV = SimplifyDemandedVectorElts(Op0, InnerDemandedElts, UndefElts2, |
| 1471 | Depth + 1); |
| 1472 | if (TmpV) { II->setArgOperand(0, TmpV); MadeChange = true; } |
| 1473 | |
| 1474 | UndefElts3 = APInt(InnerVWidth, 0); |
| 1475 | TmpV = SimplifyDemandedVectorElts(Op1, InnerDemandedElts, UndefElts3, |
| 1476 | Depth + 1); |
| 1477 | if (TmpV) { II->setArgOperand(1, TmpV); MadeChange = true; } |
| 1478 | |
| 1479 | break; |
| 1480 | } |
| 1481 | |
Simon Pilgrim | 51b3b98 | 2017-01-20 09:28:21 +0000 | [diff] [blame] | 1482 | case Intrinsic::x86_sse2_packssdw_128: |
| 1483 | case Intrinsic::x86_sse2_packsswb_128: |
| 1484 | case Intrinsic::x86_sse2_packuswb_128: |
| 1485 | case Intrinsic::x86_sse41_packusdw: |
| 1486 | case Intrinsic::x86_avx2_packssdw: |
| 1487 | case Intrinsic::x86_avx2_packsswb: |
| 1488 | case Intrinsic::x86_avx2_packusdw: |
Craig Topper | 3731f4d | 2017-02-16 07:35:23 +0000 | [diff] [blame] | 1489 | case Intrinsic::x86_avx2_packuswb: |
| 1490 | case Intrinsic::x86_avx512_packssdw_512: |
| 1491 | case Intrinsic::x86_avx512_packsswb_512: |
| 1492 | case Intrinsic::x86_avx512_packusdw_512: |
| 1493 | case Intrinsic::x86_avx512_packuswb_512: { |
Simon Pilgrim | 51b3b98 | 2017-01-20 09:28:21 +0000 | [diff] [blame] | 1494 | auto *Ty0 = II->getArgOperand(0)->getType(); |
| 1495 | unsigned InnerVWidth = Ty0->getVectorNumElements(); |
| 1496 | assert(VWidth == (InnerVWidth * 2) && "Unexpected input size"); |
| 1497 | |
| 1498 | unsigned NumLanes = Ty0->getPrimitiveSizeInBits() / 128; |
| 1499 | unsigned VWidthPerLane = VWidth / NumLanes; |
| 1500 | unsigned InnerVWidthPerLane = InnerVWidth / NumLanes; |
| 1501 | |
| 1502 | // Per lane, pack the elements of the first input and then the second. |
| 1503 | // e.g. |
| 1504 | // v8i16 PACK(v4i32 X, v4i32 Y) - (X[0..3],Y[0..3]) |
| 1505 | // v32i8 PACK(v16i16 X, v16i16 Y) - (X[0..7],Y[0..7]),(X[8..15],Y[8..15]) |
| 1506 | for (int OpNum = 0; OpNum != 2; ++OpNum) { |
| 1507 | APInt OpDemandedElts(InnerVWidth, 0); |
| 1508 | for (unsigned Lane = 0; Lane != NumLanes; ++Lane) { |
| 1509 | unsigned LaneIdx = Lane * VWidthPerLane; |
| 1510 | for (unsigned Elt = 0; Elt != InnerVWidthPerLane; ++Elt) { |
| 1511 | unsigned Idx = LaneIdx + Elt + InnerVWidthPerLane * OpNum; |
| 1512 | if (DemandedElts[Idx]) |
| 1513 | OpDemandedElts.setBit((Lane * InnerVWidthPerLane) + Elt); |
| 1514 | } |
| 1515 | } |
| 1516 | |
| 1517 | // Demand elements from the operand. |
| 1518 | auto *Op = II->getArgOperand(OpNum); |
| 1519 | APInt OpUndefElts(InnerVWidth, 0); |
| 1520 | TmpV = SimplifyDemandedVectorElts(Op, OpDemandedElts, OpUndefElts, |
| 1521 | Depth + 1); |
| 1522 | if (TmpV) { |
| 1523 | II->setArgOperand(OpNum, TmpV); |
| 1524 | MadeChange = true; |
| 1525 | } |
| 1526 | |
| 1527 | // Pack the operand's UNDEF elements, one lane at a time. |
| 1528 | OpUndefElts = OpUndefElts.zext(VWidth); |
| 1529 | for (unsigned Lane = 0; Lane != NumLanes; ++Lane) { |
| 1530 | APInt LaneElts = OpUndefElts.lshr(InnerVWidthPerLane * Lane); |
| 1531 | LaneElts = LaneElts.getLoBits(InnerVWidthPerLane); |
| 1532 | LaneElts = LaneElts.shl(InnerVWidthPerLane * (2 * Lane + OpNum)); |
| 1533 | UndefElts |= LaneElts; |
| 1534 | } |
| 1535 | } |
| 1536 | break; |
| 1537 | } |
| 1538 | |
Simon Pilgrim | d4eb800 | 2017-01-17 11:35:03 +0000 | [diff] [blame] | 1539 | // PSHUFB |
Simon Pilgrim | 73a68c2 | 2017-01-16 11:30:41 +0000 | [diff] [blame] | 1540 | case Intrinsic::x86_ssse3_pshuf_b_128: |
| 1541 | case Intrinsic::x86_avx2_pshuf_b: |
Simon Pilgrim | d4eb800 | 2017-01-17 11:35:03 +0000 | [diff] [blame] | 1542 | case Intrinsic::x86_avx512_pshuf_b_512: |
| 1543 | // PERMILVAR |
| 1544 | case Intrinsic::x86_avx_vpermilvar_ps: |
| 1545 | case Intrinsic::x86_avx_vpermilvar_ps_256: |
| 1546 | case Intrinsic::x86_avx512_vpermilvar_ps_512: |
| 1547 | case Intrinsic::x86_avx_vpermilvar_pd: |
| 1548 | case Intrinsic::x86_avx_vpermilvar_pd_256: |
Simon Pilgrim | fe2c0ed | 2017-01-18 14:47:49 +0000 | [diff] [blame] | 1549 | case Intrinsic::x86_avx512_vpermilvar_pd_512: |
| 1550 | // PERMV |
| 1551 | case Intrinsic::x86_avx2_permd: |
| 1552 | case Intrinsic::x86_avx2_permps: { |
Simon Pilgrim | 73a68c2 | 2017-01-16 11:30:41 +0000 | [diff] [blame] | 1553 | Value *Op1 = II->getArgOperand(1); |
| 1554 | TmpV = SimplifyDemandedVectorElts(Op1, DemandedElts, UndefElts, |
| 1555 | Depth + 1); |
| 1556 | if (TmpV) { II->setArgOperand(1, TmpV); MadeChange = true; } |
| 1557 | break; |
| 1558 | } |
| 1559 | |
Simon Pilgrim | 61116dd | 2015-09-17 20:32:45 +0000 | [diff] [blame] | 1560 | // SSE4A instructions leave the upper 64-bits of the 128-bit result |
| 1561 | // in an undefined state. |
| 1562 | case Intrinsic::x86_sse4a_extrq: |
| 1563 | case Intrinsic::x86_sse4a_extrqi: |
| 1564 | case Intrinsic::x86_sse4a_insertq: |
| 1565 | case Intrinsic::x86_sse4a_insertqi: |
Craig Topper | 3a86a04 | 2017-03-19 05:49:16 +0000 | [diff] [blame] | 1566 | UndefElts.setHighBits(VWidth / 2); |
Simon Pilgrim | 61116dd | 2015-09-17 20:32:45 +0000 | [diff] [blame] | 1567 | break; |
Matt Arsenault | efe949c | 2017-03-09 20:34:27 +0000 | [diff] [blame] | 1568 | case Intrinsic::amdgcn_buffer_load: |
| 1569 | case Intrinsic::amdgcn_buffer_load_format: { |
Craig Topper | d33ee1b | 2017-04-03 16:34:59 +0000 | [diff] [blame] | 1570 | if (VWidth == 1 || !DemandedElts.isMask()) |
Matt Arsenault | efe949c | 2017-03-09 20:34:27 +0000 | [diff] [blame] | 1571 | return nullptr; |
| 1572 | |
| 1573 | // TODO: Handle 3 vectors when supported in code gen. |
| 1574 | unsigned NewNumElts = PowerOf2Ceil(DemandedElts.countTrailingOnes()); |
| 1575 | if (NewNumElts == VWidth) |
| 1576 | return nullptr; |
| 1577 | |
| 1578 | Module *M = II->getParent()->getParent()->getParent(); |
| 1579 | Type *EltTy = V->getType()->getVectorElementType(); |
| 1580 | |
| 1581 | Type *NewTy = (NewNumElts == 1) ? EltTy : |
| 1582 | VectorType::get(EltTy, NewNumElts); |
| 1583 | |
| 1584 | Function *NewIntrin = Intrinsic::getDeclaration(M, II->getIntrinsicID(), |
| 1585 | NewTy); |
| 1586 | |
| 1587 | SmallVector<Value *, 5> Args; |
| 1588 | for (unsigned I = 0, E = II->getNumArgOperands(); I != E; ++I) |
| 1589 | Args.push_back(II->getArgOperand(I)); |
| 1590 | |
Matt Arsenault | a3bdd8f | 2017-03-10 05:25:49 +0000 | [diff] [blame] | 1591 | IRBuilderBase::InsertPointGuard Guard(*Builder); |
| 1592 | Builder->SetInsertPoint(II); |
| 1593 | |
Matt Arsenault | efe949c | 2017-03-09 20:34:27 +0000 | [diff] [blame] | 1594 | CallInst *NewCall = Builder->CreateCall(NewIntrin, Args); |
| 1595 | NewCall->takeName(II); |
| 1596 | NewCall->copyMetadata(*II); |
| 1597 | if (NewNumElts == 1) { |
| 1598 | return Builder->CreateInsertElement(UndefValue::get(V->getType()), |
| 1599 | NewCall, static_cast<uint64_t>(0)); |
| 1600 | } |
| 1601 | |
| 1602 | SmallVector<uint32_t, 8> EltMask; |
| 1603 | for (unsigned I = 0; I < VWidth; ++I) |
| 1604 | EltMask.push_back(I); |
| 1605 | |
| 1606 | Value *Shuffle = Builder->CreateShuffleVector( |
| 1607 | NewCall, UndefValue::get(NewTy), EltMask); |
| 1608 | |
| 1609 | MadeChange = true; |
| 1610 | return Shuffle; |
| 1611 | } |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1612 | } |
| 1613 | break; |
| 1614 | } |
| 1615 | } |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1616 | return MadeChange ? I : nullptr; |
Chris Lattner | 7e04491 | 2010-01-04 07:17:19 +0000 | [diff] [blame] | 1617 | } |