| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1 | //===- InstCombineMulDivRem.cpp -------------------------------------------===// |
| 2 | // |
| Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // This file implements the visit functions for mul, fmul, sdiv, udiv, fdiv, |
| 10 | // srem, urem, frem. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| Chandler Carruth | a917458 | 2015-01-22 05:25:13 +0000 | [diff] [blame] | 14 | #include "InstCombineInternal.h" |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 15 | #include "llvm/ADT/APFloat.h" |
| 16 | #include "llvm/ADT/APInt.h" |
| 17 | #include "llvm/ADT/SmallVector.h" |
| Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 18 | #include "llvm/Analysis/InstructionSimplify.h" |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 19 | #include "llvm/IR/BasicBlock.h" |
| 20 | #include "llvm/IR/Constant.h" |
| 21 | #include "llvm/IR/Constants.h" |
| 22 | #include "llvm/IR/InstrTypes.h" |
| 23 | #include "llvm/IR/Instruction.h" |
| 24 | #include "llvm/IR/Instructions.h" |
| Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 25 | #include "llvm/IR/IntrinsicInst.h" |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 26 | #include "llvm/IR/Intrinsics.h" |
| 27 | #include "llvm/IR/Operator.h" |
| Chandler Carruth | 820a908 | 2014-03-04 11:08:18 +0000 | [diff] [blame] | 28 | #include "llvm/IR/PatternMatch.h" |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 29 | #include "llvm/IR/Type.h" |
| 30 | #include "llvm/IR/Value.h" |
| 31 | #include "llvm/Support/Casting.h" |
| 32 | #include "llvm/Support/ErrorHandling.h" |
| 33 | #include "llvm/Support/KnownBits.h" |
| 34 | #include "llvm/Transforms/InstCombine/InstCombineWorklist.h" |
| Dmitry Venikov | e5fbf59 | 2018-01-11 06:33:00 +0000 | [diff] [blame] | 35 | #include "llvm/Transforms/Utils/BuildLibCalls.h" |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 36 | #include <cassert> |
| 37 | #include <cstddef> |
| 38 | #include <cstdint> |
| 39 | #include <utility> |
| 40 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 41 | using namespace llvm; |
| 42 | using namespace PatternMatch; |
| 43 | |
| Chandler Carruth | 964daaa | 2014-04-22 02:55:47 +0000 | [diff] [blame] | 44 | #define DEBUG_TYPE "instcombine" |
| 45 | |
| Sanjay Patel | 6eccf48 | 2015-09-09 15:24:36 +0000 | [diff] [blame] | 46 | /// The specific integer value is used in a context where it is known to be |
| 47 | /// non-zero. If this allows us to simplify the computation, do so and return |
| 48 | /// the new operand, otherwise return null. |
| Hal Finkel | 60db058 | 2014-09-07 18:57:58 +0000 | [diff] [blame] | 49 | static Value *simplifyValueKnownNonZero(Value *V, InstCombiner &IC, |
| Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 50 | Instruction &CxtI) { |
| Chris Lattner | 7c99f19 | 2011-05-22 18:18:41 +0000 | [diff] [blame] | 51 | // If V has multiple uses, then we would have to do more analysis to determine |
| 52 | // if this is safe. For example, the use could be in dynamically unreached |
| 53 | // code. |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 54 | if (!V->hasOneUse()) return nullptr; |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 55 | |
| Chris Lattner | 388cb8a | 2011-05-23 00:32:19 +0000 | [diff] [blame] | 56 | bool MadeChange = false; |
| 57 | |
| Chris Lattner | 7c99f19 | 2011-05-22 18:18:41 +0000 | [diff] [blame] | 58 | // ((1 << A) >>u B) --> (1 << (A-B)) |
| 59 | // Because V cannot be zero, we know that B is less than A. |
| David Majnemer | dad2103 | 2014-10-14 20:28:40 +0000 | [diff] [blame] | 60 | Value *A = nullptr, *B = nullptr, *One = nullptr; |
| 61 | if (match(V, m_LShr(m_OneUse(m_Shl(m_Value(One), m_Value(A))), m_Value(B))) && |
| 62 | match(One, m_One())) { |
| Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 63 | A = IC.Builder.CreateSub(A, B); |
| 64 | return IC.Builder.CreateShl(One, A); |
| Chris Lattner | 7c99f19 | 2011-05-22 18:18:41 +0000 | [diff] [blame] | 65 | } |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 66 | |
| Chris Lattner | 388cb8a | 2011-05-23 00:32:19 +0000 | [diff] [blame] | 67 | // (PowerOfTwo >>u B) --> isExact since shifting out the result would make it |
| 68 | // inexact. Similarly for <<. |
| Sanjay Patel | a8ef4a5 | 2016-05-22 17:08:52 +0000 | [diff] [blame] | 69 | BinaryOperator *I = dyn_cast<BinaryOperator>(V); |
| 70 | if (I && I->isLogicalShift() && |
| Craig Topper | d4039f7 | 2017-05-25 21:51:12 +0000 | [diff] [blame] | 71 | IC.isKnownToBeAPowerOfTwo(I->getOperand(0), false, 0, &CxtI)) { |
| Sanjay Patel | a8ef4a5 | 2016-05-22 17:08:52 +0000 | [diff] [blame] | 72 | // We know that this is an exact/nuw shift and that the input is a |
| 73 | // non-zero context as well. |
| 74 | if (Value *V2 = simplifyValueKnownNonZero(I->getOperand(0), IC, CxtI)) { |
| Nikita Popov | 4b35c81 | 2020-03-30 22:00:16 +0200 | [diff] [blame] | 75 | IC.replaceOperand(*I, 0, V2); |
| Sanjay Patel | a8ef4a5 | 2016-05-22 17:08:52 +0000 | [diff] [blame] | 76 | MadeChange = true; |
| Chris Lattner | 388cb8a | 2011-05-23 00:32:19 +0000 | [diff] [blame] | 77 | } |
| 78 | |
| Sanjay Patel | a8ef4a5 | 2016-05-22 17:08:52 +0000 | [diff] [blame] | 79 | if (I->getOpcode() == Instruction::LShr && !I->isExact()) { |
| 80 | I->setIsExact(); |
| 81 | MadeChange = true; |
| 82 | } |
| 83 | |
| 84 | if (I->getOpcode() == Instruction::Shl && !I->hasNoUnsignedWrap()) { |
| 85 | I->setHasNoUnsignedWrap(); |
| 86 | MadeChange = true; |
| 87 | } |
| 88 | } |
| 89 | |
| Chris Lattner | 162dfc3 | 2011-05-22 18:26:48 +0000 | [diff] [blame] | 90 | // TODO: Lots more we could do here: |
| Chris Lattner | 162dfc3 | 2011-05-22 18:26:48 +0000 | [diff] [blame] | 91 | // If V is a phi node, we can call this on each of its operands. |
| 92 | // "select cond, X, 0" can simplify to "X". |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 93 | |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 94 | return MadeChange ? V : nullptr; |
| Chris Lattner | 7c99f19 | 2011-05-22 18:18:41 +0000 | [diff] [blame] | 95 | } |
| 96 | |
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 97 | /// A helper routine of InstCombiner::visitMul(). |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 98 | /// |
| Christopher Tetreault | 855e02e | 2020-05-05 14:21:59 -0700 | [diff] [blame] | 99 | /// If C is a scalar/fixed width vector of known powers of 2, then this |
| 100 | /// function returns a new scalar/fixed width vector obtained from logBase2 |
| 101 | /// of C. |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 102 | /// Return a null pointer otherwise. |
| Simon Pilgrim | 0b9f391 | 2018-02-08 14:10:01 +0000 | [diff] [blame] | 103 | static Constant *getLogBase2(Type *Ty, Constant *C) { |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 104 | const APInt *IVal; |
| Simon Pilgrim | be0dd72 | 2018-02-13 13:16:26 +0000 | [diff] [blame] | 105 | if (match(C, m_APInt(IVal)) && IVal->isPowerOf2()) |
| 106 | return ConstantInt::get(Ty, IVal->logBase2()); |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 107 | |
| Christopher Tetreault | 855e02e | 2020-05-05 14:21:59 -0700 | [diff] [blame] | 108 | // FIXME: We can extract pow of 2 of splat constant for scalable vectors. |
| 109 | if (!isa<FixedVectorType>(Ty)) |
| Simon Pilgrim | 0b9f391 | 2018-02-08 14:10:01 +0000 | [diff] [blame] | 110 | return nullptr; |
| 111 | |
| 112 | SmallVector<Constant *, 4> Elts; |
| Christopher Tetreault | 855e02e | 2020-05-05 14:21:59 -0700 | [diff] [blame] | 113 | for (unsigned I = 0, E = cast<FixedVectorType>(Ty)->getNumElements(); I != E; |
| Christopher Tetreault | 155740c | 2020-04-08 10:42:22 -0700 | [diff] [blame] | 114 | ++I) { |
| Simon Pilgrim | 0b9f391 | 2018-02-08 14:10:01 +0000 | [diff] [blame] | 115 | Constant *Elt = C->getAggregateElement(I); |
| 116 | if (!Elt) |
| 117 | return nullptr; |
| 118 | if (isa<UndefValue>(Elt)) { |
| 119 | Elts.push_back(UndefValue::get(Ty->getScalarType())); |
| 120 | continue; |
| 121 | } |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 122 | if (!match(Elt, m_APInt(IVal)) || !IVal->isPowerOf2()) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 123 | return nullptr; |
| Simon Pilgrim | 0b9f391 | 2018-02-08 14:10:01 +0000 | [diff] [blame] | 124 | Elts.push_back(ConstantInt::get(Ty->getScalarType(), IVal->logBase2())); |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 125 | } |
| 126 | |
| 127 | return ConstantVector::get(Elts); |
| 128 | } |
| 129 | |
| Sanjay Patel | aab8b3a | 2019-10-06 14:15:48 +0000 | [diff] [blame] | 130 | // TODO: This is a specific form of a much more general pattern. |
| 131 | // We could detect a select with any binop identity constant, or we |
| 132 | // could use SimplifyBinOp to see if either arm of the select reduces. |
| 133 | // But that needs to be done carefully and/or while removing potential |
| 134 | // reverse canonicalizations as in InstCombiner::foldSelectIntoOp(). |
| 135 | static Value *foldMulSelectToNegate(BinaryOperator &I, |
| 136 | InstCombiner::BuilderTy &Builder) { |
| 137 | Value *Cond, *OtherOp; |
| 138 | |
| 139 | // mul (select Cond, 1, -1), OtherOp --> select Cond, OtherOp, -OtherOp |
| 140 | // mul OtherOp, (select Cond, 1, -1) --> select Cond, OtherOp, -OtherOp |
| 141 | if (match(&I, m_c_Mul(m_OneUse(m_Select(m_Value(Cond), m_One(), m_AllOnes())), |
| 142 | m_Value(OtherOp)))) |
| 143 | return Builder.CreateSelect(Cond, OtherOp, Builder.CreateNeg(OtherOp)); |
| 144 | |
| 145 | // mul (select Cond, -1, 1), OtherOp --> select Cond, -OtherOp, OtherOp |
| 146 | // mul OtherOp, (select Cond, -1, 1) --> select Cond, -OtherOp, OtherOp |
| 147 | if (match(&I, m_c_Mul(m_OneUse(m_Select(m_Value(Cond), m_AllOnes(), m_One())), |
| 148 | m_Value(OtherOp)))) |
| 149 | return Builder.CreateSelect(Cond, Builder.CreateNeg(OtherOp), OtherOp); |
| 150 | |
| 151 | // fmul (select Cond, 1.0, -1.0), OtherOp --> select Cond, OtherOp, -OtherOp |
| 152 | // fmul OtherOp, (select Cond, 1.0, -1.0) --> select Cond, OtherOp, -OtherOp |
| 153 | if (match(&I, m_c_FMul(m_OneUse(m_Select(m_Value(Cond), m_SpecificFP(1.0), |
| 154 | m_SpecificFP(-1.0))), |
| 155 | m_Value(OtherOp)))) { |
| 156 | IRBuilder<>::FastMathFlagGuard FMFGuard(Builder); |
| 157 | Builder.setFastMathFlags(I.getFastMathFlags()); |
| 158 | return Builder.CreateSelect(Cond, OtherOp, Builder.CreateFNeg(OtherOp)); |
| 159 | } |
| 160 | |
| 161 | // fmul (select Cond, -1.0, 1.0), OtherOp --> select Cond, -OtherOp, OtherOp |
| 162 | // fmul OtherOp, (select Cond, -1.0, 1.0) --> select Cond, -OtherOp, OtherOp |
| 163 | if (match(&I, m_c_FMul(m_OneUse(m_Select(m_Value(Cond), m_SpecificFP(-1.0), |
| 164 | m_SpecificFP(1.0))), |
| 165 | m_Value(OtherOp)))) { |
| 166 | IRBuilder<>::FastMathFlagGuard FMFGuard(Builder); |
| 167 | Builder.setFastMathFlags(I.getFastMathFlags()); |
| 168 | return Builder.CreateSelect(Cond, Builder.CreateFNeg(OtherOp), OtherOp); |
| 169 | } |
| 170 | |
| 171 | return nullptr; |
| 172 | } |
| 173 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 174 | Instruction *InstCombiner::visitMul(BinaryOperator &I) { |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 175 | if (Value *V = SimplifyMulInst(I.getOperand(0), I.getOperand(1), |
| 176 | SQ.getWithInstruction(&I))) |
| Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 177 | return replaceInstUsesWith(I, V); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 178 | |
| Sanjay Patel | 70043b7 | 2018-07-13 01:18:07 +0000 | [diff] [blame] | 179 | if (SimplifyAssociativeOrCommutative(I)) |
| 180 | return &I; |
| 181 | |
| Sanjay Patel | 79dceb2 | 2018-10-03 15:20:58 +0000 | [diff] [blame] | 182 | if (Instruction *X = foldVectorBinop(I)) |
| Sanjay Patel | bbc6d60 | 2018-06-02 16:27:44 +0000 | [diff] [blame] | 183 | return X; |
| 184 | |
| Duncan Sands | fbb9ac3 | 2010-12-22 13:36:08 +0000 | [diff] [blame] | 185 | if (Value *V = SimplifyUsingDistributiveLaws(I)) |
| Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 186 | return replaceInstUsesWith(I, V); |
| Duncan Sands | fbb9ac3 | 2010-12-22 13:36:08 +0000 | [diff] [blame] | 187 | |
| David Majnemer | 027bc80 | 2014-11-22 04:52:38 +0000 | [diff] [blame] | 188 | // X * -1 == 0 - X |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 189 | Value *Op0 = I.getOperand(0), *Op1 = I.getOperand(1); |
| David Majnemer | 027bc80 | 2014-11-22 04:52:38 +0000 | [diff] [blame] | 190 | if (match(Op1, m_AllOnes())) { |
| 191 | BinaryOperator *BO = BinaryOperator::CreateNeg(Op0, I.getName()); |
| 192 | if (I.hasNoSignedWrap()) |
| 193 | BO->setHasNoSignedWrap(); |
| 194 | return BO; |
| 195 | } |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 196 | |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 197 | // Also allow combining multiply instructions on vectors. |
| 198 | { |
| 199 | Value *NewOp; |
| 200 | Constant *C1, *C2; |
| 201 | const APInt *IVal; |
| 202 | if (match(&I, m_Mul(m_Shl(m_Value(NewOp), m_Constant(C2)), |
| 203 | m_Constant(C1))) && |
| David Majnemer | fd4a6d2 | 2014-11-22 04:52:52 +0000 | [diff] [blame] | 204 | match(C1, m_APInt(IVal))) { |
| 205 | // ((X << C2)*C1) == (X * (C1 << C2)) |
| 206 | Constant *Shl = ConstantExpr::getShl(C1, C2); |
| 207 | BinaryOperator *Mul = cast<BinaryOperator>(I.getOperand(0)); |
| 208 | BinaryOperator *BO = BinaryOperator::CreateMul(NewOp, Shl); |
| 209 | if (I.hasNoUnsignedWrap() && Mul->hasNoUnsignedWrap()) |
| 210 | BO->setHasNoUnsignedWrap(); |
| 211 | if (I.hasNoSignedWrap() && Mul->hasNoSignedWrap() && |
| 212 | Shl->isNotMinSignedValue()) |
| 213 | BO->setHasNoSignedWrap(); |
| 214 | return BO; |
| 215 | } |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 216 | |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 217 | if (match(&I, m_Mul(m_Value(NewOp), m_Constant(C1)))) { |
| Simon Pilgrim | 0b9f391 | 2018-02-08 14:10:01 +0000 | [diff] [blame] | 218 | // Replace X*(2^C) with X << C, where C is either a scalar or a vector. |
| 219 | if (Constant *NewCst = getLogBase2(NewOp->getType(), C1)) { |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 220 | BinaryOperator *Shl = BinaryOperator::CreateShl(NewOp, NewCst); |
| Tilmann Scheller | 2bc5cb6 | 2014-10-07 10:19:34 +0000 | [diff] [blame] | 221 | |
| Tilmann Scheller | 2bc5cb6 | 2014-10-07 10:19:34 +0000 | [diff] [blame] | 222 | if (I.hasNoUnsignedWrap()) |
| 223 | Shl->setHasNoUnsignedWrap(); |
| David Majnemer | 45951a6 | 2015-04-18 04:41:30 +0000 | [diff] [blame] | 224 | if (I.hasNoSignedWrap()) { |
| Craig Topper | 5fe0197 | 2017-06-27 19:57:53 +0000 | [diff] [blame] | 225 | const APInt *V; |
| Craig Topper | 4e63db8 | 2018-09-11 17:57:20 +0000 | [diff] [blame] | 226 | if (match(NewCst, m_APInt(V)) && *V != V->getBitWidth() - 1) |
| David Majnemer | 45951a6 | 2015-04-18 04:41:30 +0000 | [diff] [blame] | 227 | Shl->setHasNoSignedWrap(); |
| 228 | } |
| Tilmann Scheller | 2bc5cb6 | 2014-10-07 10:19:34 +0000 | [diff] [blame] | 229 | |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 230 | return Shl; |
| 231 | } |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 232 | } |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 233 | } |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 234 | |
| Rafael Espindola | 65281bf | 2013-05-31 14:27:15 +0000 | [diff] [blame] | 235 | if (ConstantInt *CI = dyn_cast<ConstantInt>(Op1)) { |
| Stuart Hastings | 2380483 | 2011-06-01 16:42:47 +0000 | [diff] [blame] | 236 | // (Y - X) * (-(2**n)) -> (X - Y) * (2**n), for positive nonzero n |
| 237 | // (Y + const) * (-(2**n)) -> (-constY) * (2**n), for positive nonzero n |
| 238 | // The "* (2**n)" thus becomes a potential shifting opportunity. |
| Stuart Hastings | 8284374 | 2011-05-30 20:00:33 +0000 | [diff] [blame] | 239 | { |
| 240 | const APInt & Val = CI->getValue(); |
| 241 | const APInt &PosVal = Val.abs(); |
| 242 | if (Val.isNegative() && PosVal.isPowerOf2()) { |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 243 | Value *X = nullptr, *Y = nullptr; |
| Stuart Hastings | 2380483 | 2011-06-01 16:42:47 +0000 | [diff] [blame] | 244 | if (Op0->hasOneUse()) { |
| 245 | ConstantInt *C1; |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 246 | Value *Sub = nullptr; |
| Stuart Hastings | 2380483 | 2011-06-01 16:42:47 +0000 | [diff] [blame] | 247 | if (match(Op0, m_Sub(m_Value(Y), m_Value(X)))) |
| Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 248 | Sub = Builder.CreateSub(X, Y, "suba"); |
| Stuart Hastings | 2380483 | 2011-06-01 16:42:47 +0000 | [diff] [blame] | 249 | else if (match(Op0, m_Add(m_Value(Y), m_ConstantInt(C1)))) |
| Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 250 | Sub = Builder.CreateSub(Builder.CreateNeg(C1), Y, "subc"); |
| Stuart Hastings | 2380483 | 2011-06-01 16:42:47 +0000 | [diff] [blame] | 251 | if (Sub) |
| 252 | return |
| 253 | BinaryOperator::CreateMul(Sub, |
| 254 | ConstantInt::get(Y->getType(), PosVal)); |
| Stuart Hastings | 8284374 | 2011-05-30 20:00:33 +0000 | [diff] [blame] | 255 | } |
| 256 | } |
| 257 | } |
| Chris Lattner | 6b657ae | 2011-02-10 05:36:31 +0000 | [diff] [blame] | 258 | } |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 259 | |
| Sanjay Patel | 8fdd87f | 2018-02-28 16:36:24 +0000 | [diff] [blame] | 260 | if (Instruction *FoldedMul = foldBinOpIntoSelectOrPhi(I)) |
| 261 | return FoldedMul; |
| 262 | |
| Sanjay Patel | aab8b3a | 2019-10-06 14:15:48 +0000 | [diff] [blame] | 263 | if (Value *FoldedMul = foldMulSelectToNegate(I, Builder)) |
| 264 | return replaceInstUsesWith(I, FoldedMul); |
| Sanjay Patel | 712b7c2 | 2019-09-30 17:02:26 +0000 | [diff] [blame] | 265 | |
| Chris Lattner | 6b657ae | 2011-02-10 05:36:31 +0000 | [diff] [blame] | 266 | // Simplify mul instructions with a constant RHS. |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 267 | if (isa<Constant>(Op1)) { |
| Benjamin Kramer | 72196f3 | 2014-01-19 15:24:22 +0000 | [diff] [blame] | 268 | // Canonicalize (X+C1)*CI -> X*CI+C1*CI. |
| Sanjay Patel | 8fdd87f | 2018-02-28 16:36:24 +0000 | [diff] [blame] | 269 | Value *X; |
| 270 | Constant *C1; |
| 271 | if (match(Op0, m_OneUse(m_Add(m_Value(X), m_Constant(C1))))) { |
| 272 | Value *Mul = Builder.CreateMul(C1, Op1); |
| 273 | // Only go forward with the transform if C1*CI simplifies to a tidier |
| 274 | // constant. |
| 275 | if (!match(Mul, m_Mul(m_Value(), m_Value()))) |
| 276 | return BinaryOperator::CreateAdd(Builder.CreateMul(X, Op1), Mul); |
| Benjamin Kramer | 72196f3 | 2014-01-19 15:24:22 +0000 | [diff] [blame] | 277 | } |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 278 | } |
| 279 | |
| Simon Pilgrim | 9400614 | 2020-05-04 15:21:52 +0100 | [diff] [blame] | 280 | // abs(X) * abs(X) -> X * X |
| 281 | // nabs(X) * nabs(X) -> X * X |
| 282 | if (Op0 == Op1) { |
| 283 | Value *X, *Y; |
| 284 | SelectPatternFlavor SPF = matchSelectPattern(Op0, X, Y).Flavor; |
| 285 | if (SPF == SPF_ABS || SPF == SPF_NABS) |
| 286 | return BinaryOperator::CreateMul(X, X); |
| 287 | } |
| 288 | |
| Sanjay Patel | 604cb9e | 2018-02-14 16:50:55 +0000 | [diff] [blame] | 289 | // -X * C --> X * -C |
| 290 | Value *X, *Y; |
| 291 | Constant *Op1C; |
| 292 | if (match(Op0, m_Neg(m_Value(X))) && match(Op1, m_Constant(Op1C))) |
| 293 | return BinaryOperator::CreateMul(X, ConstantExpr::getNeg(Op1C)); |
| 294 | |
| 295 | // -X * -Y --> X * Y |
| 296 | if (match(Op0, m_Neg(m_Value(X))) && match(Op1, m_Neg(m_Value(Y)))) { |
| 297 | auto *NewMul = BinaryOperator::CreateMul(X, Y); |
| 298 | if (I.hasNoSignedWrap() && |
| 299 | cast<OverflowingBinaryOperator>(Op0)->hasNoSignedWrap() && |
| 300 | cast<OverflowingBinaryOperator>(Op1)->hasNoSignedWrap()) |
| 301 | NewMul->setHasNoSignedWrap(); |
| 302 | return NewMul; |
| David Majnemer | 8279a750 | 2014-11-22 07:25:19 +0000 | [diff] [blame] | 303 | } |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 304 | |
| Chen Zheng | 4952e66 | 2019-01-01 01:09:20 +0000 | [diff] [blame] | 305 | // -X * Y --> -(X * Y) |
| 306 | // X * -Y --> -(X * Y) |
| 307 | if (match(&I, m_c_Mul(m_OneUse(m_Neg(m_Value(X))), m_Value(Y)))) |
| 308 | return BinaryOperator::CreateNeg(Builder.CreateMul(X, Y)); |
| 309 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 310 | // (X / Y) * Y = X - (X % Y) |
| 311 | // (X / Y) * -Y = (X % Y) - X |
| 312 | { |
| Sanjay Patel | a0a5682 | 2017-03-14 17:27:27 +0000 | [diff] [blame] | 313 | Value *Y = Op1; |
| 314 | BinaryOperator *Div = dyn_cast<BinaryOperator>(Op0); |
| 315 | if (!Div || (Div->getOpcode() != Instruction::UDiv && |
| 316 | Div->getOpcode() != Instruction::SDiv)) { |
| 317 | Y = Op0; |
| 318 | Div = dyn_cast<BinaryOperator>(Op1); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 319 | } |
| Sanjay Patel | a0a5682 | 2017-03-14 17:27:27 +0000 | [diff] [blame] | 320 | Value *Neg = dyn_castNegVal(Y); |
| 321 | if (Div && Div->hasOneUse() && |
| 322 | (Div->getOperand(1) == Y || Div->getOperand(1) == Neg) && |
| 323 | (Div->getOpcode() == Instruction::UDiv || |
| 324 | Div->getOpcode() == Instruction::SDiv)) { |
| 325 | Value *X = Div->getOperand(0), *DivOp1 = Div->getOperand(1); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 326 | |
| Chris Lattner | 35315d0 | 2011-02-06 21:44:57 +0000 | [diff] [blame] | 327 | // If the division is exact, X % Y is zero, so we end up with X or -X. |
| Sanjay Patel | a0a5682 | 2017-03-14 17:27:27 +0000 | [diff] [blame] | 328 | if (Div->isExact()) { |
| 329 | if (DivOp1 == Y) |
| 330 | return replaceInstUsesWith(I, X); |
| 331 | return BinaryOperator::CreateNeg(X); |
| 332 | } |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 333 | |
| Sanjay Patel | a0a5682 | 2017-03-14 17:27:27 +0000 | [diff] [blame] | 334 | auto RemOpc = Div->getOpcode() == Instruction::UDiv ? Instruction::URem |
| 335 | : Instruction::SRem; |
| Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 336 | Value *Rem = Builder.CreateBinOp(RemOpc, X, DivOp1); |
| Sanjay Patel | a0a5682 | 2017-03-14 17:27:27 +0000 | [diff] [blame] | 337 | if (DivOp1 == Y) |
| 338 | return BinaryOperator::CreateSub(X, Rem); |
| 339 | return BinaryOperator::CreateSub(Rem, X); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 340 | } |
| 341 | } |
| 342 | |
| 343 | /// i1 mul -> i1 and. |
| Craig Topper | fde4723 | 2017-07-09 07:04:03 +0000 | [diff] [blame] | 344 | if (I.getType()->isIntOrIntVectorTy(1)) |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 345 | return BinaryOperator::CreateAnd(Op0, Op1); |
| 346 | |
| 347 | // X*(1 << Y) --> X << Y |
| 348 | // (1 << Y)*X --> X << Y |
| 349 | { |
| 350 | Value *Y; |
| David Majnemer | 546f810 | 2014-11-22 08:57:02 +0000 | [diff] [blame] | 351 | BinaryOperator *BO = nullptr; |
| 352 | bool ShlNSW = false; |
| 353 | if (match(Op0, m_Shl(m_One(), m_Value(Y)))) { |
| 354 | BO = BinaryOperator::CreateShl(Op1, Y); |
| David Majnemer | 087dc8b | 2015-01-04 07:36:02 +0000 | [diff] [blame] | 355 | ShlNSW = cast<ShlOperator>(Op0)->hasNoSignedWrap(); |
| David Majnemer | 8e6f6a9 | 2014-11-24 16:41:13 +0000 | [diff] [blame] | 356 | } else if (match(Op1, m_Shl(m_One(), m_Value(Y)))) { |
| David Majnemer | 546f810 | 2014-11-22 08:57:02 +0000 | [diff] [blame] | 357 | BO = BinaryOperator::CreateShl(Op0, Y); |
| David Majnemer | 087dc8b | 2015-01-04 07:36:02 +0000 | [diff] [blame] | 358 | ShlNSW = cast<ShlOperator>(Op1)->hasNoSignedWrap(); |
| David Majnemer | 546f810 | 2014-11-22 08:57:02 +0000 | [diff] [blame] | 359 | } |
| 360 | if (BO) { |
| 361 | if (I.hasNoUnsignedWrap()) |
| 362 | BO->setHasNoUnsignedWrap(); |
| 363 | if (I.hasNoSignedWrap() && ShlNSW) |
| 364 | BO->setHasNoSignedWrap(); |
| 365 | return BO; |
| 366 | } |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 367 | } |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 368 | |
| Sanjay Patel | 40fcc42 | 2020-07-03 13:08:59 -0400 | [diff] [blame] | 369 | // (zext bool X) * (zext bool Y) --> zext (and X, Y) |
| Sanjay Patel | 7fd8af1 | 2020-07-03 15:34:55 -0400 | [diff] [blame] | 370 | // (sext bool X) * (sext bool Y) --> zext (and X, Y) |
| 371 | // Note: -1 * -1 == 1 * 1 == 1 (if the extends match, the result is the same) |
| 372 | if (((match(Op0, m_ZExt(m_Value(X))) && match(Op1, m_ZExt(m_Value(Y)))) || |
| 373 | (match(Op0, m_SExt(m_Value(X))) && match(Op1, m_SExt(m_Value(Y))))) && |
| 374 | X->getType()->isIntOrIntVectorTy(1) && X->getType() == Y->getType() && |
| Sanjay Patel | 40fcc42 | 2020-07-03 13:08:59 -0400 | [diff] [blame] | 375 | (Op0->hasOneUse() || Op1->hasOneUse())) { |
| 376 | Value *And = Builder.CreateAnd(X, Y, "mulbool"); |
| 377 | return CastInst::Create(Instruction::ZExt, And, I.getType()); |
| 378 | } |
| Sanjay Patel | 4458973 | 2020-07-12 15:56:26 -0400 | [diff] [blame] | 379 | // (sext bool X) * (zext bool Y) --> sext (and X, Y) |
| 380 | // (zext bool X) * (sext bool Y) --> sext (and X, Y) |
| 381 | // Note: -1 * 1 == 1 * -1 == -1 |
| 382 | if (((match(Op0, m_SExt(m_Value(X))) && match(Op1, m_ZExt(m_Value(Y)))) || |
| 383 | (match(Op0, m_ZExt(m_Value(X))) && match(Op1, m_SExt(m_Value(Y))))) && |
| 384 | X->getType()->isIntOrIntVectorTy(1) && X->getType() == Y->getType() && |
| 385 | (Op0->hasOneUse() || Op1->hasOneUse())) { |
| 386 | Value *And = Builder.CreateAnd(X, Y, "mulbool"); |
| 387 | return CastInst::Create(Instruction::SExt, And, I.getType()); |
| 388 | } |
| Sanjay Patel | 40fcc42 | 2020-07-03 13:08:59 -0400 | [diff] [blame] | 389 | |
| Sanjay Patel | cb8ac00 | 2018-02-13 20:41:22 +0000 | [diff] [blame] | 390 | // (bool X) * Y --> X ? Y : 0 |
| Sanjay Patel | 7558d86 | 2018-02-13 22:24:37 +0000 | [diff] [blame] | 391 | // Y * (bool X) --> X ? Y : 0 |
| Sanjay Patel | cb8ac00 | 2018-02-13 20:41:22 +0000 | [diff] [blame] | 392 | if (match(Op0, m_ZExt(m_Value(X))) && X->getType()->isIntOrIntVectorTy(1)) |
| 393 | return SelectInst::Create(X, Op1, ConstantInt::get(I.getType(), 0)); |
| Sanjay Patel | cb8ac00 | 2018-02-13 20:41:22 +0000 | [diff] [blame] | 394 | if (match(Op1, m_ZExt(m_Value(X))) && X->getType()->isIntOrIntVectorTy(1)) |
| 395 | return SelectInst::Create(X, Op0, ConstantInt::get(I.getType(), 0)); |
| 396 | |
| Sanjay Patel | 7558d86 | 2018-02-13 22:24:37 +0000 | [diff] [blame] | 397 | // (lshr X, 31) * Y --> (ashr X, 31) & Y |
| 398 | // Y * (lshr X, 31) --> (ashr X, 31) & Y |
| 399 | // TODO: We are not checking one-use because the elimination of the multiply |
| 400 | // is better for analysis? |
| 401 | // TODO: Should we canonicalize to '(X < 0) ? Y : 0' instead? That would be |
| 402 | // more similar to what we're doing above. |
| 403 | const APInt *C; |
| 404 | if (match(Op0, m_LShr(m_Value(X), m_APInt(C))) && *C == C->getBitWidth() - 1) |
| 405 | return BinaryOperator::CreateAnd(Builder.CreateAShr(X, *C), Op1); |
| 406 | if (match(Op1, m_LShr(m_Value(X), m_APInt(C))) && *C == C->getBitWidth() - 1) |
| 407 | return BinaryOperator::CreateAnd(Builder.CreateAShr(X, *C), Op0); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 408 | |
| Sanjay Patel | 90a3634 | 2018-09-14 22:23:35 +0000 | [diff] [blame] | 409 | if (Instruction *Ext = narrowMathIfNoOverflow(I)) |
| 410 | return Ext; |
| David Majnemer | a1cfd7c | 2016-12-30 00:28:58 +0000 | [diff] [blame] | 411 | |
| Sanjay Patel | 70043b7 | 2018-07-13 01:18:07 +0000 | [diff] [blame] | 412 | bool Changed = false; |
| Craig Topper | 2b1fc32 | 2017-05-22 06:25:31 +0000 | [diff] [blame] | 413 | if (!I.hasNoSignedWrap() && willNotOverflowSignedMul(Op0, Op1, I)) { |
| David Majnemer | 54c2ca2 | 2014-12-26 09:10:14 +0000 | [diff] [blame] | 414 | Changed = true; |
| 415 | I.setHasNoSignedWrap(true); |
| 416 | } |
| 417 | |
| Craig Topper | bb97372 | 2017-05-15 02:44:08 +0000 | [diff] [blame] | 418 | if (!I.hasNoUnsignedWrap() && willNotOverflowUnsignedMul(Op0, Op1, I)) { |
| David Majnemer | b1296ec | 2014-12-26 09:50:35 +0000 | [diff] [blame] | 419 | Changed = true; |
| 420 | I.setHasNoUnsignedWrap(true); |
| 421 | } |
| 422 | |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 423 | return Changed ? &I : nullptr; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 424 | } |
| 425 | |
| Sanjay Patel | c9e8c9e | 2020-06-25 11:28:04 -0400 | [diff] [blame] | 426 | Instruction *InstCombiner::foldFPSignBitOps(BinaryOperator &I) { |
| Sanjay Patel | d84cdb8 | 2020-06-20 10:20:21 -0400 | [diff] [blame] | 427 | BinaryOperator::BinaryOps Opcode = I.getOpcode(); |
| Sanjay Patel | 7b201bf | 2020-06-20 11:47:00 -0400 | [diff] [blame] | 428 | assert((Opcode == Instruction::FMul || Opcode == Instruction::FDiv) && |
| 429 | "Expected fmul or fdiv"); |
| 430 | |
| 431 | Value *Op0 = I.getOperand(0), *Op1 = I.getOperand(1); |
| Sanjay Patel | d84cdb8 | 2020-06-20 10:20:21 -0400 | [diff] [blame] | 432 | Value *X, *Y; |
| 433 | |
| 434 | // -X * -Y --> X * Y |
| 435 | // -X / -Y --> X / Y |
| 436 | if (match(Op0, m_FNeg(m_Value(X))) && match(Op1, m_FNeg(m_Value(Y)))) |
| 437 | return BinaryOperator::CreateWithCopiedFlags(Opcode, X, Y, &I); |
| 438 | |
| 439 | // fabs(X) * fabs(X) -> X * X |
| 440 | // fabs(X) / fabs(X) -> X / X |
| 441 | if (Op0 == Op1 && match(Op0, m_Intrinsic<Intrinsic::fabs>(m_Value(X)))) |
| 442 | return BinaryOperator::CreateWithCopiedFlags(Opcode, X, X, &I); |
| 443 | |
| Sanjay Patel | c9e8c9e | 2020-06-25 11:28:04 -0400 | [diff] [blame] | 444 | // fabs(X) * fabs(Y) --> fabs(X * Y) |
| 445 | // fabs(X) / fabs(Y) --> fabs(X / Y) |
| 446 | if (match(Op0, m_Intrinsic<Intrinsic::fabs>(m_Value(X))) && |
| 447 | match(Op1, m_Intrinsic<Intrinsic::fabs>(m_Value(Y))) && |
| 448 | (Op0->hasOneUse() || Op1->hasOneUse())) { |
| 449 | IRBuilder<>::FastMathFlagGuard FMFGuard(Builder); |
| 450 | Builder.setFastMathFlags(I.getFastMathFlags()); |
| 451 | Value *XY = Builder.CreateBinOp(Opcode, X, Y); |
| 452 | Value *Fabs = Builder.CreateUnaryIntrinsic(Intrinsic::fabs, XY); |
| 453 | Fabs->takeName(&I); |
| 454 | return replaceInstUsesWith(I, Fabs); |
| 455 | } |
| 456 | |
| Sanjay Patel | d84cdb8 | 2020-06-20 10:20:21 -0400 | [diff] [blame] | 457 | return nullptr; |
| 458 | } |
| 459 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 460 | Instruction *InstCombiner::visitFMul(BinaryOperator &I) { |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 461 | if (Value *V = SimplifyFMulInst(I.getOperand(0), I.getOperand(1), |
| 462 | I.getFastMathFlags(), |
| Craig Topper | a420562 | 2017-06-09 03:21:29 +0000 | [diff] [blame] | 463 | SQ.getWithInstruction(&I))) |
| Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 464 | return replaceInstUsesWith(I, V); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 465 | |
| Sanjay Patel | 70043b7 | 2018-07-13 01:18:07 +0000 | [diff] [blame] | 466 | if (SimplifyAssociativeOrCommutative(I)) |
| 467 | return &I; |
| 468 | |
| Sanjay Patel | 79dceb2 | 2018-10-03 15:20:58 +0000 | [diff] [blame] | 469 | if (Instruction *X = foldVectorBinop(I)) |
| Sanjay Patel | bbc6d60 | 2018-06-02 16:27:44 +0000 | [diff] [blame] | 470 | return X; |
| 471 | |
| Sanjay Patel | 8fdd87f | 2018-02-28 16:36:24 +0000 | [diff] [blame] | 472 | if (Instruction *FoldedMul = foldBinOpIntoSelectOrPhi(I)) |
| 473 | return FoldedMul; |
| 474 | |
| Sanjay Patel | aab8b3a | 2019-10-06 14:15:48 +0000 | [diff] [blame] | 475 | if (Value *FoldedMul = foldMulSelectToNegate(I, Builder)) |
| 476 | return replaceInstUsesWith(I, FoldedMul); |
| 477 | |
| Sanjay Patel | 7b201bf | 2020-06-20 11:47:00 -0400 | [diff] [blame] | 478 | if (Instruction *R = foldFPSignBitOps(I)) |
| Sanjay Patel | d84cdb8 | 2020-06-20 10:20:21 -0400 | [diff] [blame] | 479 | return R; |
| 480 | |
| Sanjay Patel | e29375d | 2018-03-02 23:06:45 +0000 | [diff] [blame] | 481 | // X * -1.0 --> -X |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 482 | Value *Op0 = I.getOperand(0), *Op1 = I.getOperand(1); |
| Sanjay Patel | e29375d | 2018-03-02 23:06:45 +0000 | [diff] [blame] | 483 | if (match(Op1, m_SpecificFP(-1.0))) |
| Simon Moll | ddd1127 | 2020-02-27 09:05:54 -0800 | [diff] [blame] | 484 | return UnaryOperator::CreateFNegFMF(Op0, &I); |
| Sanjay Patel | 6b9c7a9 | 2018-02-23 17:14:28 +0000 | [diff] [blame] | 485 | |
| Sanjay Patel | e29375d | 2018-03-02 23:06:45 +0000 | [diff] [blame] | 486 | // -X * C --> X * -C |
| Sanjay Patel | d84cdb8 | 2020-06-20 10:20:21 -0400 | [diff] [blame] | 487 | Value *X, *Y; |
| Sanjay Patel | e29375d | 2018-03-02 23:06:45 +0000 | [diff] [blame] | 488 | Constant *C; |
| 489 | if (match(Op0, m_FNeg(m_Value(X))) && match(Op1, m_Constant(C))) |
| 490 | return BinaryOperator::CreateFMulFMF(X, ConstantExpr::getFNeg(C), &I); |
| Shuxin Yang | df0e61e | 2013-01-07 21:39:23 +0000 | [diff] [blame] | 491 | |
| Sanjay Patel | e29375d | 2018-03-02 23:06:45 +0000 | [diff] [blame] | 492 | // (select A, B, C) * (select A, D, E) --> select A, (B*D), (C*E) |
| 493 | if (Value *V = SimplifySelectsFeedingBinaryOp(I, Op0, Op1)) |
| 494 | return replaceInstUsesWith(I, V); |
| 495 | |
| Sanjay Patel | 81b3b10 | 2018-04-03 22:19:19 +0000 | [diff] [blame] | 496 | if (I.hasAllowReassoc()) { |
| 497 | // Reassociate constant RHS with another constant to form constant |
| 498 | // expression. |
| 499 | if (match(Op1, m_Constant(C)) && C->isFiniteNonZeroFP()) { |
| 500 | Constant *C1; |
| 501 | if (match(Op0, m_OneUse(m_FDiv(m_Constant(C1), m_Value(X))))) { |
| 502 | // (C1 / X) * C --> (C * C1) / X |
| 503 | Constant *CC1 = ConstantExpr::getFMul(C, C1); |
| 504 | if (CC1->isNormalFP()) |
| 505 | return BinaryOperator::CreateFDivFMF(CC1, X, &I); |
| 506 | } |
| 507 | if (match(Op0, m_FDiv(m_Value(X), m_Constant(C1)))) { |
| 508 | // (X / C1) * C --> X * (C / C1) |
| 509 | Constant *CDivC1 = ConstantExpr::getFDiv(C, C1); |
| 510 | if (CDivC1->isNormalFP()) |
| 511 | return BinaryOperator::CreateFMulFMF(X, CDivC1, &I); |
| Sanjay Patel | 204edec | 2018-03-13 14:46:32 +0000 | [diff] [blame] | 512 | |
| Sanjay Patel | 81b3b10 | 2018-04-03 22:19:19 +0000 | [diff] [blame] | 513 | // If the constant was a denormal, try reassociating differently. |
| 514 | // (X / C1) * C --> X / (C1 / C) |
| 515 | Constant *C1DivC = ConstantExpr::getFDiv(C1, C); |
| 516 | if (Op0->hasOneUse() && C1DivC->isNormalFP()) |
| 517 | return BinaryOperator::CreateFDivFMF(X, C1DivC, &I); |
| 518 | } |
| 519 | |
| 520 | // We do not need to match 'fadd C, X' and 'fsub X, C' because they are |
| 521 | // canonicalized to 'fadd X, C'. Distributing the multiply may allow |
| 522 | // further folds and (X * C) + C2 is 'fma'. |
| 523 | if (match(Op0, m_OneUse(m_FAdd(m_Value(X), m_Constant(C1))))) { |
| 524 | // (X + C1) * C --> (X * C) + (C * C1) |
| 525 | Constant *CC1 = ConstantExpr::getFMul(C, C1); |
| 526 | Value *XC = Builder.CreateFMulFMF(X, C, &I); |
| 527 | return BinaryOperator::CreateFAddFMF(XC, CC1, &I); |
| 528 | } |
| 529 | if (match(Op0, m_OneUse(m_FSub(m_Constant(C1), m_Value(X))))) { |
| 530 | // (C1 - X) * C --> (C * C1) - (X * C) |
| 531 | Constant *CC1 = ConstantExpr::getFMul(C, C1); |
| 532 | Value *XC = Builder.CreateFMulFMF(X, C, &I); |
| 533 | return BinaryOperator::CreateFSubFMF(CC1, XC, &I); |
| 534 | } |
| Sanjay Patel | 204edec | 2018-03-13 14:46:32 +0000 | [diff] [blame] | 535 | } |
| 536 | |
| Sanjay Patel | 5e13cd2 | 2019-04-15 13:23:38 +0000 | [diff] [blame] | 537 | Value *Z; |
| 538 | if (match(&I, m_c_FMul(m_OneUse(m_FDiv(m_Value(X), m_Value(Y))), |
| 539 | m_Value(Z)))) { |
| 540 | // Sink division: (X / Y) * Z --> (X * Z) / Y |
| 541 | Value *NewFMul = Builder.CreateFMulFMF(X, Z, &I); |
| 542 | return BinaryOperator::CreateFDivFMF(NewFMul, Y, &I); |
| 543 | } |
| 544 | |
| Sanjay Patel | 81b3b10 | 2018-04-03 22:19:19 +0000 | [diff] [blame] | 545 | // sqrt(X) * sqrt(Y) -> sqrt(X * Y) |
| 546 | // nnan disallows the possibility of returning a number if both operands are |
| 547 | // negative (in that case, we should return NaN). |
| 548 | if (I.hasNoNaNs() && |
| 549 | match(Op0, m_OneUse(m_Intrinsic<Intrinsic::sqrt>(m_Value(X)))) && |
| 550 | match(Op1, m_OneUse(m_Intrinsic<Intrinsic::sqrt>(m_Value(Y))))) { |
| 551 | Value *XY = Builder.CreateFMulFMF(X, Y, &I); |
| Neil Henning | 57f5d0a | 2018-10-08 10:32:33 +0000 | [diff] [blame] | 552 | Value *Sqrt = Builder.CreateUnaryIntrinsic(Intrinsic::sqrt, XY, &I); |
| Sanjay Patel | 81b3b10 | 2018-04-03 22:19:19 +0000 | [diff] [blame] | 553 | return replaceInstUsesWith(I, Sqrt); |
| Sanjay Patel | 4fd4fd6 | 2018-03-26 15:03:57 +0000 | [diff] [blame] | 554 | } |
| Sanjay Patel | 81b3b10 | 2018-04-03 22:19:19 +0000 | [diff] [blame] | 555 | |
| Sanjay Patel | 773e04c | 2019-04-08 21:23:50 +0000 | [diff] [blame] | 556 | // Like the similar transform in instsimplify, this requires 'nsz' because |
| 557 | // sqrt(-0.0) = -0.0, and -0.0 * -0.0 does not simplify to -0.0. |
| 558 | if (I.hasNoNaNs() && I.hasNoSignedZeros() && Op0 == Op1 && |
| 559 | Op0->hasNUses(2)) { |
| 560 | // Peek through fdiv to find squaring of square root: |
| 561 | // (X / sqrt(Y)) * (X / sqrt(Y)) --> (X * X) / Y |
| 562 | if (match(Op0, m_FDiv(m_Value(X), |
| 563 | m_Intrinsic<Intrinsic::sqrt>(m_Value(Y))))) { |
| 564 | Value *XX = Builder.CreateFMulFMF(X, X, &I); |
| 565 | return BinaryOperator::CreateFDivFMF(XX, Y, &I); |
| 566 | } |
| 567 | // (sqrt(Y) / X) * (sqrt(Y) / X) --> Y / (X * X) |
| 568 | if (match(Op0, m_FDiv(m_Intrinsic<Intrinsic::sqrt>(m_Value(Y)), |
| 569 | m_Value(X)))) { |
| 570 | Value *XX = Builder.CreateFMulFMF(X, X, &I); |
| 571 | return BinaryOperator::CreateFDivFMF(Y, XX, &I); |
| 572 | } |
| 573 | } |
| 574 | |
| Dmitry Venikov | 8817658 | 2019-01-31 06:28:10 +0000 | [diff] [blame] | 575 | // exp(X) * exp(Y) -> exp(X + Y) |
| 576 | // Match as long as at least one of exp has only one use. |
| 577 | if (match(Op0, m_Intrinsic<Intrinsic::exp>(m_Value(X))) && |
| 578 | match(Op1, m_Intrinsic<Intrinsic::exp>(m_Value(Y))) && |
| 579 | (Op0->hasOneUse() || Op1->hasOneUse())) { |
| 580 | Value *XY = Builder.CreateFAddFMF(X, Y, &I); |
| 581 | Value *Exp = Builder.CreateUnaryIntrinsic(Intrinsic::exp, XY, &I); |
| 582 | return replaceInstUsesWith(I, Exp); |
| 583 | } |
| 584 | |
| 585 | // exp2(X) * exp2(Y) -> exp2(X + Y) |
| 586 | // Match as long as at least one of exp2 has only one use. |
| 587 | if (match(Op0, m_Intrinsic<Intrinsic::exp2>(m_Value(X))) && |
| 588 | match(Op1, m_Intrinsic<Intrinsic::exp2>(m_Value(Y))) && |
| 589 | (Op0->hasOneUse() || Op1->hasOneUse())) { |
| 590 | Value *XY = Builder.CreateFAddFMF(X, Y, &I); |
| 591 | Value *Exp2 = Builder.CreateUnaryIntrinsic(Intrinsic::exp2, XY, &I); |
| 592 | return replaceInstUsesWith(I, Exp2); |
| 593 | } |
| 594 | |
| Sanjay Patel | 81b3b10 | 2018-04-03 22:19:19 +0000 | [diff] [blame] | 595 | // (X*Y) * X => (X*X) * Y where Y != X |
| 596 | // The purpose is two-fold: |
| 597 | // 1) to form a power expression (of X). |
| 598 | // 2) potentially shorten the critical path: After transformation, the |
| 599 | // latency of the instruction Y is amortized by the expression of X*X, |
| 600 | // and therefore Y is in a "less critical" position compared to what it |
| 601 | // was before the transformation. |
| 602 | if (match(Op0, m_OneUse(m_c_FMul(m_Specific(Op1), m_Value(Y)))) && |
| 603 | Op1 != Y) { |
| 604 | Value *XX = Builder.CreateFMulFMF(Op1, Op1, &I); |
| 605 | return BinaryOperator::CreateFMulFMF(XX, Y, &I); |
| 606 | } |
| 607 | if (match(Op1, m_OneUse(m_c_FMul(m_Specific(Op0), m_Value(Y)))) && |
| 608 | Op0 != Y) { |
| 609 | Value *XX = Builder.CreateFMulFMF(Op0, Op0, &I); |
| 610 | return BinaryOperator::CreateFMulFMF(XX, Y, &I); |
| Shuxin Yang | df0e61e | 2013-01-07 21:39:23 +0000 | [diff] [blame] | 611 | } |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 612 | } |
| 613 | |
| Sanjay Patel | 2fd0acf | 2018-03-02 20:32:46 +0000 | [diff] [blame] | 614 | // log2(X * 0.5) * Y = log2(X) * Y - Y |
| 615 | if (I.isFast()) { |
| 616 | IntrinsicInst *Log2 = nullptr; |
| 617 | if (match(Op0, m_OneUse(m_Intrinsic<Intrinsic::log2>( |
| 618 | m_OneUse(m_FMul(m_Value(X), m_SpecificFP(0.5))))))) { |
| 619 | Log2 = cast<IntrinsicInst>(Op0); |
| 620 | Y = Op1; |
| Pedro Artigas | d879504 | 2012-11-30 19:09:41 +0000 | [diff] [blame] | 621 | } |
| Sanjay Patel | 2fd0acf | 2018-03-02 20:32:46 +0000 | [diff] [blame] | 622 | if (match(Op1, m_OneUse(m_Intrinsic<Intrinsic::log2>( |
| 623 | m_OneUse(m_FMul(m_Value(X), m_SpecificFP(0.5))))))) { |
| 624 | Log2 = cast<IntrinsicInst>(Op1); |
| 625 | Y = Op0; |
| 626 | } |
| 627 | if (Log2) { |
| Nikita Popov | 893c630 | 2020-02-16 10:15:53 +0100 | [diff] [blame] | 628 | Value *Log2 = Builder.CreateUnaryIntrinsic(Intrinsic::log2, X, &I); |
| Sanjay Patel | 2fd0acf | 2018-03-02 20:32:46 +0000 | [diff] [blame] | 629 | Value *LogXTimesY = Builder.CreateFMulFMF(Log2, Y, &I); |
| 630 | return BinaryOperator::CreateFSubFMF(LogXTimesY, Y, &I); |
| Pedro Artigas | d879504 | 2012-11-30 19:09:41 +0000 | [diff] [blame] | 631 | } |
| 632 | } |
| 633 | |
| Sanjay Patel | 70043b7 | 2018-07-13 01:18:07 +0000 | [diff] [blame] | 634 | return nullptr; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 635 | } |
| 636 | |
| Sanjay Patel | ae2e3a4 | 2017-10-06 23:20:16 +0000 | [diff] [blame] | 637 | /// Fold a divide or remainder with a select instruction divisor when one of the |
| 638 | /// select operands is zero. In that case, we can use the other select operand |
| 639 | /// because div/rem by zero is undefined. |
| 640 | bool InstCombiner::simplifyDivRemOfSelectWithZeroOp(BinaryOperator &I) { |
| 641 | SelectInst *SI = dyn_cast<SelectInst>(I.getOperand(1)); |
| 642 | if (!SI) |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 643 | return false; |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 644 | |
| Sanjay Patel | ae2e3a4 | 2017-10-06 23:20:16 +0000 | [diff] [blame] | 645 | int NonNullOperand; |
| 646 | if (match(SI->getTrueValue(), m_Zero())) |
| 647 | // div/rem X, (Cond ? 0 : Y) -> div/rem X, Y |
| 648 | NonNullOperand = 2; |
| 649 | else if (match(SI->getFalseValue(), m_Zero())) |
| 650 | // div/rem X, (Cond ? Y : 0) -> div/rem X, Y |
| 651 | NonNullOperand = 1; |
| 652 | else |
| 653 | return false; |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 654 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 655 | // Change the div/rem to use 'Y' instead of the select. |
| Nikita Popov | 4b35c81 | 2020-03-30 22:00:16 +0200 | [diff] [blame] | 656 | replaceOperand(I, 1, SI->getOperand(NonNullOperand)); |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 657 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 658 | // Okay, we know we replace the operand of the div/rem with 'Y' with no |
| 659 | // problem. However, the select, or the condition of the select may have |
| 660 | // multiple uses. Based on our knowledge that the operand must be non-zero, |
| 661 | // propagate the known value for the select into other uses of it, and |
| 662 | // propagate a known value of the condition into its other users. |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 663 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 664 | // If the select and condition only have a single use, don't bother with this, |
| 665 | // early exit. |
| Sanjay Patel | ae2e3a4 | 2017-10-06 23:20:16 +0000 | [diff] [blame] | 666 | Value *SelectCond = SI->getCondition(); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 667 | if (SI->use_empty() && SelectCond->hasOneUse()) |
| 668 | return true; |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 669 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 670 | // Scan the current block backward, looking for other uses of SI. |
| Duncan P. N. Exon Smith | 9f8aaf2 | 2015-10-13 16:59:33 +0000 | [diff] [blame] | 671 | BasicBlock::iterator BBI = I.getIterator(), BBFront = I.getParent()->begin(); |
| Sanjay Patel | 72d339a | 2017-10-06 23:43:06 +0000 | [diff] [blame] | 672 | Type *CondTy = SelectCond->getType(); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 673 | while (BBI != BBFront) { |
| 674 | --BBI; |
| Serguei Katkov | d894fb4 | 2018-06-04 02:52:36 +0000 | [diff] [blame] | 675 | // If we found an instruction that we can't assume will return, so |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 676 | // information from below it cannot be propagated above it. |
| Serguei Katkov | d894fb4 | 2018-06-04 02:52:36 +0000 | [diff] [blame] | 677 | if (!isGuaranteedToTransferExecutionToSuccessor(&*BBI)) |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 678 | break; |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 679 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 680 | // Replace uses of the select or its condition with the known values. |
| 681 | for (Instruction::op_iterator I = BBI->op_begin(), E = BBI->op_end(); |
| 682 | I != E; ++I) { |
| 683 | if (*I == SI) { |
| Nikita Popov | 4b35c81 | 2020-03-30 22:00:16 +0200 | [diff] [blame] | 684 | replaceUse(*I, SI->getOperand(NonNullOperand)); |
| Nikita Popov | e6c9ab4 | 2020-01-30 22:32:46 +0100 | [diff] [blame] | 685 | Worklist.push(&*BBI); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 686 | } else if (*I == SelectCond) { |
| Nikita Popov | 4b35c81 | 2020-03-30 22:00:16 +0200 | [diff] [blame] | 687 | replaceUse(*I, NonNullOperand == 1 ? ConstantInt::getTrue(CondTy) |
| 688 | : ConstantInt::getFalse(CondTy)); |
| Nikita Popov | e6c9ab4 | 2020-01-30 22:32:46 +0100 | [diff] [blame] | 689 | Worklist.push(&*BBI); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 690 | } |
| 691 | } |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 692 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 693 | // If we past the instruction, quit looking for it. |
| 694 | if (&*BBI == SI) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 695 | SI = nullptr; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 696 | if (&*BBI == SelectCond) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 697 | SelectCond = nullptr; |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 698 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 699 | // If we ran out of things to eliminate, break out of the loop. |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 700 | if (!SelectCond && !SI) |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 701 | break; |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 702 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 703 | } |
| 704 | return true; |
| 705 | } |
| 706 | |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 707 | /// True if the multiply can not be expressed in an int this size. |
| 708 | static bool multiplyOverflows(const APInt &C1, const APInt &C2, APInt &Product, |
| 709 | bool IsSigned) { |
| 710 | bool Overflow; |
| 711 | Product = IsSigned ? C1.smul_ov(C2, Overflow) : C1.umul_ov(C2, Overflow); |
| 712 | return Overflow; |
| 713 | } |
| 714 | |
| Sanjay Patel | 9d2099c | 2018-07-15 17:06:59 +0000 | [diff] [blame] | 715 | /// True if C1 is a multiple of C2. Quotient contains C1/C2. |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 716 | static bool isMultiple(const APInt &C1, const APInt &C2, APInt &Quotient, |
| 717 | bool IsSigned) { |
| 718 | assert(C1.getBitWidth() == C2.getBitWidth() && "Constant widths not equal"); |
| 719 | |
| 720 | // Bail if we will divide by zero. |
| 721 | if (C2.isNullValue()) |
| 722 | return false; |
| 723 | |
| 724 | // Bail if we would divide INT_MIN by -1. |
| 725 | if (IsSigned && C1.isMinSignedValue() && C2.isAllOnesValue()) |
| 726 | return false; |
| 727 | |
| Rui Ueyama | 49a3ad2 | 2019-07-16 04:46:31 +0000 | [diff] [blame] | 728 | APInt Remainder(C1.getBitWidth(), /*val=*/0ULL, IsSigned); |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 729 | if (IsSigned) |
| 730 | APInt::sdivrem(C1, C2, Quotient, Remainder); |
| 731 | else |
| 732 | APInt::udivrem(C1, C2, Quotient, Remainder); |
| 733 | |
| 734 | return Remainder.isMinValue(); |
| 735 | } |
| 736 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 737 | /// This function implements the transforms common to both integer division |
| 738 | /// instructions (udiv and sdiv). It is called by the visitors to those integer |
| 739 | /// division instructions. |
| Adrian Prantl | 4dfcc4a | 2018-05-01 16:10:38 +0000 | [diff] [blame] | 740 | /// Common integer divide transforms |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 741 | Instruction *InstCombiner::commonIDivTransforms(BinaryOperator &I) { |
| 742 | Value *Op0 = I.getOperand(0), *Op1 = I.getOperand(1); |
| Sanjay Patel | 9530f18 | 2018-01-21 16:14:51 +0000 | [diff] [blame] | 743 | bool IsSigned = I.getOpcode() == Instruction::SDiv; |
| Sanjay Patel | 39059d2 | 2018-02-12 14:14:56 +0000 | [diff] [blame] | 744 | Type *Ty = I.getType(); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 745 | |
| Chris Lattner | 7c99f19 | 2011-05-22 18:18:41 +0000 | [diff] [blame] | 746 | // The RHS is known non-zero. |
| Nikita Popov | 1e36302 | 2020-03-29 17:08:04 +0200 | [diff] [blame] | 747 | if (Value *V = simplifyValueKnownNonZero(I.getOperand(1), *this, I)) |
| 748 | return replaceOperand(I, 1, V); |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 749 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 750 | // Handle cases involving: [su]div X, (select Cond, Y, Z) |
| 751 | // This does not apply for fdiv. |
| Sanjay Patel | ae2e3a4 | 2017-10-06 23:20:16 +0000 | [diff] [blame] | 752 | if (simplifyDivRemOfSelectWithZeroOp(I)) |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 753 | return &I; |
| 754 | |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 755 | const APInt *C2; |
| 756 | if (match(Op1, m_APInt(C2))) { |
| 757 | Value *X; |
| 758 | const APInt *C1; |
| David Majnemer | f9a095d | 2014-08-16 08:55:06 +0000 | [diff] [blame] | 759 | |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 760 | // (X / C1) / C2 -> X / (C1*C2) |
| 761 | if ((IsSigned && match(Op0, m_SDiv(m_Value(X), m_APInt(C1)))) || |
| 762 | (!IsSigned && match(Op0, m_UDiv(m_Value(X), m_APInt(C1))))) { |
| Rui Ueyama | 49a3ad2 | 2019-07-16 04:46:31 +0000 | [diff] [blame] | 763 | APInt Product(C1->getBitWidth(), /*val=*/0ULL, IsSigned); |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 764 | if (!multiplyOverflows(*C1, *C2, Product, IsSigned)) |
| 765 | return BinaryOperator::Create(I.getOpcode(), X, |
| 766 | ConstantInt::get(Ty, Product)); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 767 | } |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 768 | |
| 769 | if ((IsSigned && match(Op0, m_NSWMul(m_Value(X), m_APInt(C1)))) || |
| 770 | (!IsSigned && match(Op0, m_NUWMul(m_Value(X), m_APInt(C1))))) { |
| Rui Ueyama | 49a3ad2 | 2019-07-16 04:46:31 +0000 | [diff] [blame] | 771 | APInt Quotient(C1->getBitWidth(), /*val=*/0ULL, IsSigned); |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 772 | |
| 773 | // (X * C1) / C2 -> X / (C2 / C1) if C2 is a multiple of C1. |
| 774 | if (isMultiple(*C2, *C1, Quotient, IsSigned)) { |
| 775 | auto *NewDiv = BinaryOperator::Create(I.getOpcode(), X, |
| 776 | ConstantInt::get(Ty, Quotient)); |
| 777 | NewDiv->setIsExact(I.isExact()); |
| 778 | return NewDiv; |
| 779 | } |
| 780 | |
| 781 | // (X * C1) / C2 -> X * (C1 / C2) if C1 is a multiple of C2. |
| 782 | if (isMultiple(*C1, *C2, Quotient, IsSigned)) { |
| 783 | auto *Mul = BinaryOperator::Create(Instruction::Mul, X, |
| 784 | ConstantInt::get(Ty, Quotient)); |
| 785 | auto *OBO = cast<OverflowingBinaryOperator>(Op0); |
| 786 | Mul->setHasNoUnsignedWrap(!IsSigned && OBO->hasNoUnsignedWrap()); |
| 787 | Mul->setHasNoSignedWrap(OBO->hasNoSignedWrap()); |
| 788 | return Mul; |
| 789 | } |
| 790 | } |
| 791 | |
| 792 | if ((IsSigned && match(Op0, m_NSWShl(m_Value(X), m_APInt(C1))) && |
| 793 | *C1 != C1->getBitWidth() - 1) || |
| 794 | (!IsSigned && match(Op0, m_NUWShl(m_Value(X), m_APInt(C1))))) { |
| Rui Ueyama | 49a3ad2 | 2019-07-16 04:46:31 +0000 | [diff] [blame] | 795 | APInt Quotient(C1->getBitWidth(), /*val=*/0ULL, IsSigned); |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 796 | APInt C1Shifted = APInt::getOneBitSet( |
| 797 | C1->getBitWidth(), static_cast<unsigned>(C1->getLimitedValue())); |
| 798 | |
| Sanjay Patel | 9d2099c | 2018-07-15 17:06:59 +0000 | [diff] [blame] | 799 | // (X << C1) / C2 -> X / (C2 >> C1) if C2 is a multiple of 1 << C1. |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 800 | if (isMultiple(*C2, C1Shifted, Quotient, IsSigned)) { |
| 801 | auto *BO = BinaryOperator::Create(I.getOpcode(), X, |
| 802 | ConstantInt::get(Ty, Quotient)); |
| 803 | BO->setIsExact(I.isExact()); |
| 804 | return BO; |
| 805 | } |
| 806 | |
| Sanjay Patel | 9d2099c | 2018-07-15 17:06:59 +0000 | [diff] [blame] | 807 | // (X << C1) / C2 -> X * ((1 << C1) / C2) if 1 << C1 is a multiple of C2. |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 808 | if (isMultiple(C1Shifted, *C2, Quotient, IsSigned)) { |
| 809 | auto *Mul = BinaryOperator::Create(Instruction::Mul, X, |
| 810 | ConstantInt::get(Ty, Quotient)); |
| 811 | auto *OBO = cast<OverflowingBinaryOperator>(Op0); |
| 812 | Mul->setHasNoUnsignedWrap(!IsSigned && OBO->hasNoUnsignedWrap()); |
| 813 | Mul->setHasNoSignedWrap(OBO->hasNoSignedWrap()); |
| 814 | return Mul; |
| 815 | } |
| 816 | } |
| 817 | |
| 818 | if (!C2->isNullValue()) // avoid X udiv 0 |
| Sanjay Patel | 8fdd87f | 2018-02-28 16:36:24 +0000 | [diff] [blame] | 819 | if (Instruction *FoldedDiv = foldBinOpIntoSelectOrPhi(I)) |
| Sanjay Patel | 1998cc6 | 2018-02-12 18:38:35 +0000 | [diff] [blame] | 820 | return FoldedDiv; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 821 | } |
| 822 | |
| Craig Topper | 218a359 | 2017-04-17 03:41:47 +0000 | [diff] [blame] | 823 | if (match(Op0, m_One())) { |
| Sanjay Patel | 39059d2 | 2018-02-12 14:14:56 +0000 | [diff] [blame] | 824 | assert(!Ty->isIntOrIntVectorTy(1) && "i1 divide not removed?"); |
| 825 | if (IsSigned) { |
| Craig Topper | 218a359 | 2017-04-17 03:41:47 +0000 | [diff] [blame] | 826 | // If Op1 is 0 then it's undefined behaviour, if Op1 is 1 then the |
| 827 | // result is one, if Op1 is -1 then the result is minus one, otherwise |
| 828 | // it's zero. |
| Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 829 | Value *Inc = Builder.CreateAdd(Op1, Op0); |
| Sanjay Patel | 39059d2 | 2018-02-12 14:14:56 +0000 | [diff] [blame] | 830 | Value *Cmp = Builder.CreateICmpULT(Inc, ConstantInt::get(Ty, 3)); |
| 831 | return SelectInst::Create(Cmp, Op1, ConstantInt::get(Ty, 0)); |
| Craig Topper | 218a359 | 2017-04-17 03:41:47 +0000 | [diff] [blame] | 832 | } else { |
| 833 | // If Op1 is 0 then it's undefined behaviour. If Op1 is 1 then the |
| 834 | // result is one, otherwise it's zero. |
| Sanjay Patel | 39059d2 | 2018-02-12 14:14:56 +0000 | [diff] [blame] | 835 | return new ZExtInst(Builder.CreateICmpEQ(Op1, Op0), Ty); |
| Nick Lewycky | f0cf8fa | 2014-05-14 03:03:05 +0000 | [diff] [blame] | 836 | } |
| 837 | } |
| 838 | |
| Benjamin Kramer | 57b3df5 | 2011-04-30 18:16:00 +0000 | [diff] [blame] | 839 | // See if we can fold away this div instruction. |
| 840 | if (SimplifyDemandedInstructionBits(I)) |
| 841 | return &I; |
| 842 | |
| Duncan Sands | 771e82a | 2011-01-28 16:51:11 +0000 | [diff] [blame] | 843 | // (X - (X rem Y)) / Y -> X / Y; usually originates as ((X / Y) * Y) / Y |
| Sanjay Patel | 9530f18 | 2018-01-21 16:14:51 +0000 | [diff] [blame] | 844 | Value *X, *Z; |
| 845 | if (match(Op0, m_Sub(m_Value(X), m_Value(Z)))) // (X - Z) / Y; Y = Op1 |
| 846 | if ((IsSigned && match(Z, m_SRem(m_Specific(X), m_Specific(Op1)))) || |
| 847 | (!IsSigned && match(Z, m_URem(m_Specific(X), m_Specific(Op1))))) |
| Duncan Sands | 771e82a | 2011-01-28 16:51:11 +0000 | [diff] [blame] | 848 | return BinaryOperator::Create(I.getOpcode(), X, Op1); |
| Sanjay Patel | 9530f18 | 2018-01-21 16:14:51 +0000 | [diff] [blame] | 849 | |
| 850 | // (X << Y) / X -> 1 << Y |
| 851 | Value *Y; |
| 852 | if (IsSigned && match(Op0, m_NSWShl(m_Specific(Op1), m_Value(Y)))) |
| Sanjay Patel | 39059d2 | 2018-02-12 14:14:56 +0000 | [diff] [blame] | 853 | return BinaryOperator::CreateNSWShl(ConstantInt::get(Ty, 1), Y); |
| Sanjay Patel | 9530f18 | 2018-01-21 16:14:51 +0000 | [diff] [blame] | 854 | if (!IsSigned && match(Op0, m_NUWShl(m_Specific(Op1), m_Value(Y)))) |
| Sanjay Patel | 39059d2 | 2018-02-12 14:14:56 +0000 | [diff] [blame] | 855 | return BinaryOperator::CreateNUWShl(ConstantInt::get(Ty, 1), Y); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 856 | |
| Sanjay Patel | 510d647 | 2018-02-11 17:20:32 +0000 | [diff] [blame] | 857 | // X / (X * Y) -> 1 / Y if the multiplication does not overflow. |
| 858 | if (match(Op1, m_c_Mul(m_Specific(Op0), m_Value(Y)))) { |
| 859 | bool HasNSW = cast<OverflowingBinaryOperator>(Op1)->hasNoSignedWrap(); |
| 860 | bool HasNUW = cast<OverflowingBinaryOperator>(Op1)->hasNoUnsignedWrap(); |
| 861 | if ((IsSigned && HasNSW) || (!IsSigned && HasNUW)) { |
| Nikita Popov | 1e36302 | 2020-03-29 17:08:04 +0200 | [diff] [blame] | 862 | replaceOperand(I, 0, ConstantInt::get(Ty, 1)); |
| 863 | replaceOperand(I, 1, Y); |
| Sanjay Patel | 510d647 | 2018-02-11 17:20:32 +0000 | [diff] [blame] | 864 | return &I; |
| 865 | } |
| 866 | } |
| 867 | |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 868 | return nullptr; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 869 | } |
| 870 | |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 871 | static const unsigned MaxDepth = 6; |
| 872 | |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 873 | namespace { |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 874 | |
| 875 | using FoldUDivOperandCb = Instruction *(*)(Value *Op0, Value *Op1, |
| 876 | const BinaryOperator &I, |
| 877 | InstCombiner &IC); |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 878 | |
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 879 | /// Used to maintain state for visitUDivOperand(). |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 880 | struct UDivFoldAction { |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 881 | /// Informs visitUDiv() how to fold this operand. This can be zero if this |
| 882 | /// action joins two actions together. |
| 883 | FoldUDivOperandCb FoldAction; |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 884 | |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 885 | /// Which operand to fold. |
| 886 | Value *OperandToFold; |
| 887 | |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 888 | union { |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 889 | /// The instruction returned when FoldAction is invoked. |
| 890 | Instruction *FoldResult; |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 891 | |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 892 | /// Stores the LHS action index if this action joins two actions together. |
| 893 | size_t SelectLHSIdx; |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 894 | }; |
| 895 | |
| 896 | UDivFoldAction(FoldUDivOperandCb FA, Value *InputOperand) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 897 | : FoldAction(FA), OperandToFold(InputOperand), FoldResult(nullptr) {} |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 898 | UDivFoldAction(FoldUDivOperandCb FA, Value *InputOperand, size_t SLHS) |
| 899 | : FoldAction(FA), OperandToFold(InputOperand), SelectLHSIdx(SLHS) {} |
| 900 | }; |
| Eugene Zelenko | 7f0f9bc | 2017-10-24 21:24:53 +0000 | [diff] [blame] | 901 | |
| 902 | } // end anonymous namespace |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 903 | |
| 904 | // X udiv 2^C -> X >> C |
| 905 | static Instruction *foldUDivPow2Cst(Value *Op0, Value *Op1, |
| 906 | const BinaryOperator &I, InstCombiner &IC) { |
| Simon Pilgrim | 94cc89d | 2018-02-08 14:46:10 +0000 | [diff] [blame] | 907 | Constant *C1 = getLogBase2(Op0->getType(), cast<Constant>(Op1)); |
| 908 | if (!C1) |
| 909 | llvm_unreachable("Failed to constant fold udiv -> logbase2"); |
| 910 | BinaryOperator *LShr = BinaryOperator::CreateLShr(Op0, C1); |
| Suyog Sarda | 65f5ae9 | 2014-10-07 12:04:07 +0000 | [diff] [blame] | 911 | if (I.isExact()) |
| 912 | LShr->setIsExact(); |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 913 | return LShr; |
| 914 | } |
| 915 | |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 916 | // X udiv (C1 << N), where C1 is "1<<C2" --> X >> (N+C2) |
| Andrea Di Biagio | a82d52d | 2016-09-26 12:07:23 +0000 | [diff] [blame] | 917 | // X udiv (zext (C1 << N)), where C1 is "1<<C2" --> X >> (N+C2) |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 918 | static Instruction *foldUDivShl(Value *Op0, Value *Op1, const BinaryOperator &I, |
| 919 | InstCombiner &IC) { |
| Andrea Di Biagio | a82d52d | 2016-09-26 12:07:23 +0000 | [diff] [blame] | 920 | Value *ShiftLeft; |
| 921 | if (!match(Op1, m_ZExt(m_Value(ShiftLeft)))) |
| 922 | ShiftLeft = Op1; |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 923 | |
| Simon Pilgrim | 2a90acd | 2018-02-08 15:19:38 +0000 | [diff] [blame] | 924 | Constant *CI; |
| Andrea Di Biagio | a82d52d | 2016-09-26 12:07:23 +0000 | [diff] [blame] | 925 | Value *N; |
| Simon Pilgrim | 2a90acd | 2018-02-08 15:19:38 +0000 | [diff] [blame] | 926 | if (!match(ShiftLeft, m_Shl(m_Constant(CI), m_Value(N)))) |
| Andrea Di Biagio | a82d52d | 2016-09-26 12:07:23 +0000 | [diff] [blame] | 927 | llvm_unreachable("match should never fail here!"); |
| Simon Pilgrim | 2a90acd | 2018-02-08 15:19:38 +0000 | [diff] [blame] | 928 | Constant *Log2Base = getLogBase2(N->getType(), CI); |
| 929 | if (!Log2Base) |
| 930 | llvm_unreachable("getLogBase2 should never fail here!"); |
| 931 | N = IC.Builder.CreateAdd(N, Log2Base); |
| Andrea Di Biagio | a82d52d | 2016-09-26 12:07:23 +0000 | [diff] [blame] | 932 | if (Op1 != ShiftLeft) |
| Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 933 | N = IC.Builder.CreateZExt(N, Op1->getType()); |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 934 | BinaryOperator *LShr = BinaryOperator::CreateLShr(Op0, N); |
| Suyog Sarda | 65f5ae9 | 2014-10-07 12:04:07 +0000 | [diff] [blame] | 935 | if (I.isExact()) |
| 936 | LShr->setIsExact(); |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 937 | return LShr; |
| 938 | } |
| 939 | |
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 940 | // Recursively visits the possible right hand operands of a udiv |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 941 | // instruction, seeing through select instructions, to determine if we can |
| 942 | // replace the udiv with something simpler. If we find that an operand is not |
| 943 | // able to simplify the udiv, we abort the entire transformation. |
| 944 | static size_t visitUDivOperand(Value *Op0, Value *Op1, const BinaryOperator &I, |
| 945 | SmallVectorImpl<UDivFoldAction> &Actions, |
| 946 | unsigned Depth = 0) { |
| 947 | // Check to see if this is an unsigned division with an exact power of 2, |
| 948 | // if so, convert to a right shift. |
| 949 | if (match(Op1, m_Power2())) { |
| 950 | Actions.push_back(UDivFoldAction(foldUDivPow2Cst, Op1)); |
| 951 | return Actions.size(); |
| 952 | } |
| 953 | |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 954 | // X udiv (C1 << N), where C1 is "1<<C2" --> X >> (N+C2) |
| 955 | if (match(Op1, m_Shl(m_Power2(), m_Value())) || |
| 956 | match(Op1, m_ZExt(m_Shl(m_Power2(), m_Value())))) { |
| 957 | Actions.push_back(UDivFoldAction(foldUDivShl, Op1)); |
| 958 | return Actions.size(); |
| 959 | } |
| 960 | |
| 961 | // The remaining tests are all recursive, so bail out if we hit the limit. |
| 962 | if (Depth++ == MaxDepth) |
| 963 | return 0; |
| 964 | |
| 965 | if (SelectInst *SI = dyn_cast<SelectInst>(Op1)) |
| David Majnemer | 492e612 | 2014-08-30 09:19:05 +0000 | [diff] [blame] | 966 | if (size_t LHSIdx = |
| 967 | visitUDivOperand(Op0, SI->getOperand(1), I, Actions, Depth)) |
| 968 | if (visitUDivOperand(Op0, SI->getOperand(2), I, Actions, Depth)) { |
| 969 | Actions.push_back(UDivFoldAction(nullptr, Op1, LHSIdx - 1)); |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 970 | return Actions.size(); |
| 971 | } |
| 972 | |
| 973 | return 0; |
| 974 | } |
| 975 | |
| Sanjay Patel | bb78938 | 2017-08-24 22:54:01 +0000 | [diff] [blame] | 976 | /// If we have zero-extended operands of an unsigned div or rem, we may be able |
| 977 | /// to narrow the operation (sink the zext below the math). |
| 978 | static Instruction *narrowUDivURem(BinaryOperator &I, |
| 979 | InstCombiner::BuilderTy &Builder) { |
| 980 | Instruction::BinaryOps Opcode = I.getOpcode(); |
| 981 | Value *N = I.getOperand(0); |
| 982 | Value *D = I.getOperand(1); |
| 983 | Type *Ty = I.getType(); |
| 984 | Value *X, *Y; |
| 985 | if (match(N, m_ZExt(m_Value(X))) && match(D, m_ZExt(m_Value(Y))) && |
| 986 | X->getType() == Y->getType() && (N->hasOneUse() || D->hasOneUse())) { |
| 987 | // udiv (zext X), (zext Y) --> zext (udiv X, Y) |
| 988 | // urem (zext X), (zext Y) --> zext (urem X, Y) |
| 989 | Value *NarrowOp = Builder.CreateBinOp(Opcode, X, Y); |
| 990 | return new ZExtInst(NarrowOp, Ty); |
| 991 | } |
| 992 | |
| 993 | Constant *C; |
| 994 | if ((match(N, m_OneUse(m_ZExt(m_Value(X)))) && match(D, m_Constant(C))) || |
| 995 | (match(D, m_OneUse(m_ZExt(m_Value(X)))) && match(N, m_Constant(C)))) { |
| 996 | // If the constant is the same in the smaller type, use the narrow version. |
| 997 | Constant *TruncC = ConstantExpr::getTrunc(C, X->getType()); |
| 998 | if (ConstantExpr::getZExt(TruncC, Ty) != C) |
| 999 | return nullptr; |
| 1000 | |
| 1001 | // udiv (zext X), C --> zext (udiv X, C') |
| 1002 | // urem (zext X), C --> zext (urem X, C') |
| 1003 | // udiv C, (zext X) --> zext (udiv C', X) |
| 1004 | // urem C, (zext X) --> zext (urem C', X) |
| 1005 | Value *NarrowOp = isa<Constant>(D) ? Builder.CreateBinOp(Opcode, X, TruncC) |
| 1006 | : Builder.CreateBinOp(Opcode, TruncC, X); |
| 1007 | return new ZExtInst(NarrowOp, Ty); |
| 1008 | } |
| 1009 | |
| 1010 | return nullptr; |
| 1011 | } |
| 1012 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1013 | Instruction *InstCombiner::visitUDiv(BinaryOperator &I) { |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 1014 | if (Value *V = SimplifyUDivInst(I.getOperand(0), I.getOperand(1), |
| 1015 | SQ.getWithInstruction(&I))) |
| Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1016 | return replaceInstUsesWith(I, V); |
| Duncan Sands | 771e82a | 2011-01-28 16:51:11 +0000 | [diff] [blame] | 1017 | |
| Sanjay Patel | 79dceb2 | 2018-10-03 15:20:58 +0000 | [diff] [blame] | 1018 | if (Instruction *X = foldVectorBinop(I)) |
| Sanjay Patel | bbc6d60 | 2018-06-02 16:27:44 +0000 | [diff] [blame] | 1019 | return X; |
| 1020 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1021 | // Handle the integer div common cases |
| 1022 | if (Instruction *Common = commonIDivTransforms(I)) |
| 1023 | return Common; |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1024 | |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 1025 | Value *Op0 = I.getOperand(0), *Op1 = I.getOperand(1); |
| Sanjay Patel | 38a86d3 | 2018-06-25 22:50:26 +0000 | [diff] [blame] | 1026 | Value *X; |
| 1027 | const APInt *C1, *C2; |
| 1028 | if (match(Op0, m_LShr(m_Value(X), m_APInt(C1))) && match(Op1, m_APInt(C2))) { |
| 1029 | // (X lshr C1) udiv C2 --> X udiv (C2 << C1) |
| 1030 | bool Overflow; |
| 1031 | APInt C2ShlC1 = C2->ushl_ov(*C1, Overflow); |
| 1032 | if (!Overflow) { |
| 1033 | bool IsExact = I.isExact() && match(Op0, m_Exact(m_Value())); |
| 1034 | BinaryOperator *BO = BinaryOperator::CreateUDiv( |
| 1035 | X, ConstantInt::get(X->getType(), C2ShlC1)); |
| 1036 | if (IsExact) |
| 1037 | BO->setIsExact(); |
| 1038 | return BO; |
| David Majnemer | a252138 | 2014-10-13 21:48:30 +0000 | [diff] [blame] | 1039 | } |
| Nadav Rotem | 11935b2 | 2012-08-28 10:01:43 +0000 | [diff] [blame] | 1040 | } |
| 1041 | |
| Sanjay Patel | 38a86d3 | 2018-06-25 22:50:26 +0000 | [diff] [blame] | 1042 | // Op0 / C where C is large (negative) --> zext (Op0 >= C) |
| 1043 | // TODO: Could use isKnownNegative() to handle non-constant values. |
| Sanjay Patel | 7c45deb | 2018-06-26 12:41:15 +0000 | [diff] [blame] | 1044 | Type *Ty = I.getType(); |
| Sanjay Patel | 38a86d3 | 2018-06-25 22:50:26 +0000 | [diff] [blame] | 1045 | if (match(Op1, m_Negative())) { |
| 1046 | Value *Cmp = Builder.CreateICmpUGE(Op0, Op1); |
| Sanjay Patel | 7c45deb | 2018-06-26 12:41:15 +0000 | [diff] [blame] | 1047 | return CastInst::CreateZExtOrBitCast(Cmp, Ty); |
| 1048 | } |
| 1049 | // Op0 / (sext i1 X) --> zext (Op0 == -1) (if X is 0, the div is undefined) |
| 1050 | if (match(Op1, m_SExt(m_Value(X))) && X->getType()->isIntOrIntVectorTy(1)) { |
| 1051 | Value *Cmp = Builder.CreateICmpEQ(Op0, ConstantInt::getAllOnesValue(Ty)); |
| 1052 | return CastInst::CreateZExtOrBitCast(Cmp, Ty); |
| Sanjay Patel | 38a86d3 | 2018-06-25 22:50:26 +0000 | [diff] [blame] | 1053 | } |
| 1054 | |
| Sanjay Patel | bb78938 | 2017-08-24 22:54:01 +0000 | [diff] [blame] | 1055 | if (Instruction *NarrowDiv = narrowUDivURem(I, Builder)) |
| 1056 | return NarrowDiv; |
| Benjamin Kramer | 9aa91b1 | 2011-04-30 18:16:07 +0000 | [diff] [blame] | 1057 | |
| Sanjay Patel | 6d6eab6 | 2018-07-26 19:22:41 +0000 | [diff] [blame] | 1058 | // If the udiv operands are non-overflowing multiplies with a common operand, |
| 1059 | // then eliminate the common factor: |
| 1060 | // (A * B) / (A * X) --> B / X (and commuted variants) |
| 1061 | // TODO: The code would be reduced if we had m_c_NUWMul pattern matching. |
| 1062 | // TODO: If -reassociation handled this generally, we could remove this. |
| 1063 | Value *A, *B; |
| 1064 | if (match(Op0, m_NUWMul(m_Value(A), m_Value(B)))) { |
| 1065 | if (match(Op1, m_NUWMul(m_Specific(A), m_Value(X))) || |
| 1066 | match(Op1, m_NUWMul(m_Value(X), m_Specific(A)))) |
| 1067 | return BinaryOperator::CreateUDiv(B, X); |
| 1068 | if (match(Op1, m_NUWMul(m_Specific(B), m_Value(X))) || |
| 1069 | match(Op1, m_NUWMul(m_Value(X), m_Specific(B)))) |
| 1070 | return BinaryOperator::CreateUDiv(A, X); |
| 1071 | } |
| 1072 | |
| David Majnemer | 37f8f44 | 2013-07-04 21:17:49 +0000 | [diff] [blame] | 1073 | // (LHS udiv (select (select (...)))) -> (LHS >> (select (select (...)))) |
| 1074 | SmallVector<UDivFoldAction, 6> UDivActions; |
| 1075 | if (visitUDivOperand(Op0, Op1, I, UDivActions)) |
| 1076 | for (unsigned i = 0, e = UDivActions.size(); i != e; ++i) { |
| 1077 | FoldUDivOperandCb Action = UDivActions[i].FoldAction; |
| 1078 | Value *ActionOp1 = UDivActions[i].OperandToFold; |
| 1079 | Instruction *Inst; |
| 1080 | if (Action) |
| 1081 | Inst = Action(Op0, ActionOp1, I, *this); |
| 1082 | else { |
| 1083 | // This action joins two actions together. The RHS of this action is |
| 1084 | // simply the last action we processed, we saved the LHS action index in |
| 1085 | // the joining action. |
| 1086 | size_t SelectRHSIdx = i - 1; |
| 1087 | Value *SelectRHS = UDivActions[SelectRHSIdx].FoldResult; |
| 1088 | size_t SelectLHSIdx = UDivActions[i].SelectLHSIdx; |
| 1089 | Value *SelectLHS = UDivActions[SelectLHSIdx].FoldResult; |
| 1090 | Inst = SelectInst::Create(cast<SelectInst>(ActionOp1)->getCondition(), |
| 1091 | SelectLHS, SelectRHS); |
| 1092 | } |
| 1093 | |
| 1094 | // If this is the last action to process, return it to the InstCombiner. |
| 1095 | // Otherwise, we insert it before the UDiv and record it so that we may |
| 1096 | // use it as part of a joining action (i.e., a SelectInst). |
| 1097 | if (e - i != 1) { |
| 1098 | Inst->insertBefore(&I); |
| 1099 | UDivActions[i].FoldResult = Inst; |
| 1100 | } else |
| 1101 | return Inst; |
| 1102 | } |
| 1103 | |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1104 | return nullptr; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1105 | } |
| 1106 | |
| 1107 | Instruction *InstCombiner::visitSDiv(BinaryOperator &I) { |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 1108 | if (Value *V = SimplifySDivInst(I.getOperand(0), I.getOperand(1), |
| 1109 | SQ.getWithInstruction(&I))) |
| Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1110 | return replaceInstUsesWith(I, V); |
| Duncan Sands | 771e82a | 2011-01-28 16:51:11 +0000 | [diff] [blame] | 1111 | |
| Sanjay Patel | 79dceb2 | 2018-10-03 15:20:58 +0000 | [diff] [blame] | 1112 | if (Instruction *X = foldVectorBinop(I)) |
| Sanjay Patel | bbc6d60 | 2018-06-02 16:27:44 +0000 | [diff] [blame] | 1113 | return X; |
| 1114 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1115 | // Handle the integer div common cases |
| 1116 | if (Instruction *Common = commonIDivTransforms(I)) |
| 1117 | return Common; |
| 1118 | |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 1119 | Value *Op0 = I.getOperand(0), *Op1 = I.getOperand(1); |
| Sanjay Patel | 6a96d90 | 2018-06-25 21:39:41 +0000 | [diff] [blame] | 1120 | Value *X; |
| 1121 | // sdiv Op0, -1 --> -Op0 |
| 1122 | // sdiv Op0, (sext i1 X) --> -Op0 (because if X is 0, the op is undefined) |
| 1123 | if (match(Op1, m_AllOnes()) || |
| 1124 | (match(Op1, m_SExt(m_Value(X))) && X->getType()->isIntOrIntVectorTy(1))) |
| 1125 | return BinaryOperator::CreateNeg(Op0); |
| 1126 | |
| Sanjay Patel | 49d9d17 | 2019-04-09 15:13:03 +0000 | [diff] [blame] | 1127 | // X / INT_MIN --> X == INT_MIN |
| 1128 | if (match(Op1, m_SignMask())) |
| 1129 | return new ZExtInst(Builder.CreateICmpEQ(Op0, Op1), I.getType()); |
| 1130 | |
| Sanjay Patel | c6ada53 | 2016-06-27 17:25:57 +0000 | [diff] [blame] | 1131 | const APInt *Op1C; |
| Sanjay Patel | bedd1f9 | 2016-06-27 18:38:40 +0000 | [diff] [blame] | 1132 | if (match(Op1, m_APInt(Op1C))) { |
| Sanjay Patel | bedd1f9 | 2016-06-27 18:38:40 +0000 | [diff] [blame] | 1133 | // sdiv exact X, C --> ashr exact X, log2(C) |
| 1134 | if (I.isExact() && Op1C->isNonNegative() && Op1C->isPowerOf2()) { |
| 1135 | Value *ShAmt = ConstantInt::get(Op1->getType(), Op1C->exactLogBase2()); |
| 1136 | return BinaryOperator::CreateExactAShr(Op0, ShAmt, I.getName()); |
| 1137 | } |
| Sanjay Patel | 59ed2ff | 2016-06-27 22:27:11 +0000 | [diff] [blame] | 1138 | |
| 1139 | // If the dividend is sign-extended and the constant divisor is small enough |
| 1140 | // to fit in the source type, shrink the division to the narrower type: |
| 1141 | // (sext X) sdiv C --> sext (X sdiv C) |
| 1142 | Value *Op0Src; |
| 1143 | if (match(Op0, m_OneUse(m_SExt(m_Value(Op0Src)))) && |
| 1144 | Op0Src->getType()->getScalarSizeInBits() >= Op1C->getMinSignedBits()) { |
| 1145 | |
| 1146 | // In the general case, we need to make sure that the dividend is not the |
| 1147 | // minimum signed value because dividing that by -1 is UB. But here, we |
| 1148 | // know that the -1 divisor case is already handled above. |
| 1149 | |
| 1150 | Constant *NarrowDivisor = |
| 1151 | ConstantExpr::getTrunc(cast<Constant>(Op1), Op0Src->getType()); |
| Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 1152 | Value *NarrowOp = Builder.CreateSDiv(Op0Src, NarrowDivisor); |
| Sanjay Patel | 59ed2ff | 2016-06-27 22:27:11 +0000 | [diff] [blame] | 1153 | return new SExtInst(NarrowOp, Op0->getType()); |
| 1154 | } |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1155 | |
| Sanjay Patel | 49d9d17 | 2019-04-09 15:13:03 +0000 | [diff] [blame] | 1156 | // -X / C --> X / -C (if the negation doesn't overflow). |
| 1157 | // TODO: This could be enhanced to handle arbitrary vector constants by |
| 1158 | // checking if all elements are not the min-signed-val. |
| 1159 | if (!Op1C->isMinSignedValue() && |
| 1160 | match(Op0, m_NSWSub(m_Zero(), m_Value(X)))) { |
| 1161 | Constant *NegC = ConstantInt::get(I.getType(), -(*Op1C)); |
| 1162 | Instruction *BO = BinaryOperator::CreateSDiv(X, NegC); |
| David Majnemer | fa4699e | 2014-11-22 20:00:34 +0000 | [diff] [blame] | 1163 | BO->setIsExact(I.isExact()); |
| 1164 | return BO; |
| 1165 | } |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1166 | } |
| 1167 | |
| Chen Zheng | 5e13ff1 | 2019-04-10 06:52:09 +0000 | [diff] [blame] | 1168 | // -X / Y --> -(X / Y) |
| 1169 | Value *Y; |
| 1170 | if (match(&I, m_SDiv(m_OneUse(m_NSWSub(m_Zero(), m_Value(X))), m_Value(Y)))) |
| 1171 | return BinaryOperator::CreateNSWNeg( |
| 1172 | Builder.CreateSDiv(X, Y, I.getName(), I.isExact())); |
| 1173 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1174 | // If the sign bits of both operands are zero (i.e. we can prove they are |
| 1175 | // unsigned inputs), turn this into a udiv. |
| Craig Topper | bcfd2d1 | 2017-04-20 16:56:25 +0000 | [diff] [blame] | 1176 | APInt Mask(APInt::getSignMask(I.getType()->getScalarSizeInBits())); |
| Craig Topper | f248468 | 2017-04-17 01:51:19 +0000 | [diff] [blame] | 1177 | if (MaskedValueIsZero(Op0, Mask, 0, &I)) { |
| 1178 | if (MaskedValueIsZero(Op1, Mask, 0, &I)) { |
| 1179 | // X sdiv Y -> X udiv Y, iff X and Y don't have sign bit set |
| 1180 | auto *BO = BinaryOperator::CreateUDiv(Op0, Op1, I.getName()); |
| 1181 | BO->setIsExact(I.isExact()); |
| 1182 | return BO; |
| 1183 | } |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1184 | |
| Roman Lebedev | 0fdcca0 | 2020-07-17 22:35:21 +0300 | [diff] [blame] | 1185 | if (match(Op1, m_NegatedPower2())) { |
| 1186 | // X sdiv (-(1 << C)) -> -(X sdiv (1 << C)) -> |
| 1187 | // -> -(X udiv (1 << C)) -> -(X u>> C) |
| 1188 | return BinaryOperator::CreateNeg(Builder.Insert(foldUDivPow2Cst( |
| 1189 | Op0, ConstantExpr::getNeg(cast<Constant>(Op1)), I, *this))); |
| 1190 | } |
| 1191 | |
| Craig Topper | d4039f7 | 2017-05-25 21:51:12 +0000 | [diff] [blame] | 1192 | if (isKnownToBeAPowerOfTwo(Op1, /*OrZero*/ true, 0, &I)) { |
| Craig Topper | f248468 | 2017-04-17 01:51:19 +0000 | [diff] [blame] | 1193 | // X sdiv (1 << Y) -> X udiv (1 << Y) ( -> X u>> Y) |
| 1194 | // Safe because the only negative value (1 << Y) can take on is |
| 1195 | // INT_MIN, and X sdiv INT_MIN == X udiv INT_MIN == 0 if X doesn't have |
| 1196 | // the sign bit set. |
| 1197 | auto *BO = BinaryOperator::CreateUDiv(Op0, Op1, I.getName()); |
| 1198 | BO->setIsExact(I.isExact()); |
| 1199 | return BO; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1200 | } |
| 1201 | } |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1202 | |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1203 | return nullptr; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1204 | } |
| 1205 | |
| Sanjay Patel | d8dd015 | 2018-02-20 23:51:16 +0000 | [diff] [blame] | 1206 | /// Remove negation and try to convert division into multiplication. |
| Sanjay Patel | 90f4c8e | 2018-02-20 16:08:15 +0000 | [diff] [blame] | 1207 | static Instruction *foldFDivConstantDivisor(BinaryOperator &I) { |
| 1208 | Constant *C; |
| 1209 | if (!match(I.getOperand(1), m_Constant(C))) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1210 | return nullptr; |
| Benjamin Kramer | 76b15d0 | 2014-01-19 13:36:27 +0000 | [diff] [blame] | 1211 | |
| Sanjay Patel | d8dd015 | 2018-02-20 23:51:16 +0000 | [diff] [blame] | 1212 | // -X / C --> X / -C |
| 1213 | Value *X; |
| 1214 | if (match(I.getOperand(0), m_FNeg(m_Value(X)))) |
| Sanjay Patel | 5a6f904 | 2018-02-21 22:18:55 +0000 | [diff] [blame] | 1215 | return BinaryOperator::CreateFDivFMF(X, ConstantExpr::getFNeg(C), &I); |
| Sanjay Patel | d8dd015 | 2018-02-20 23:51:16 +0000 | [diff] [blame] | 1216 | |
| Sanjay Patel | 90f4c8e | 2018-02-20 16:08:15 +0000 | [diff] [blame] | 1217 | // If the constant divisor has an exact inverse, this is always safe. If not, |
| 1218 | // then we can still create a reciprocal if fast-math-flags allow it and the |
| 1219 | // constant is a regular number (not zero, infinite, or denormal). |
| 1220 | if (!(C->hasExactInverseFP() || (I.hasAllowReciprocal() && C->isNormalFP()))) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1221 | return nullptr; |
| Shuxin Yang | 320f52a | 2013-01-14 22:48:41 +0000 | [diff] [blame] | 1222 | |
| Sanjay Patel | 90f4c8e | 2018-02-20 16:08:15 +0000 | [diff] [blame] | 1223 | // Disallow denormal constants because we don't know what would happen |
| 1224 | // on all targets. |
| 1225 | // TODO: Use Intrinsic::canonicalize or let function attributes tell us that |
| 1226 | // denorms are flushed? |
| 1227 | auto *RecipC = ConstantExpr::getFDiv(ConstantFP::get(I.getType(), 1.0), C); |
| 1228 | if (!RecipC->isNormalFP()) |
| 1229 | return nullptr; |
| 1230 | |
| Sanjay Patel | d8dd015 | 2018-02-20 23:51:16 +0000 | [diff] [blame] | 1231 | // X / C --> X * (1 / C) |
| Sanjay Patel | 5a6f904 | 2018-02-21 22:18:55 +0000 | [diff] [blame] | 1232 | return BinaryOperator::CreateFMulFMF(I.getOperand(0), RecipC, &I); |
| Shuxin Yang | 320f52a | 2013-01-14 22:48:41 +0000 | [diff] [blame] | 1233 | } |
| 1234 | |
| Sanjay Patel | 6f716a7 | 2018-02-21 00:01:45 +0000 | [diff] [blame] | 1235 | /// Remove negation and try to reassociate constant math. |
| Sanjay Patel | e412954 | 2018-02-19 21:17:58 +0000 | [diff] [blame] | 1236 | static Instruction *foldFDivConstantDividend(BinaryOperator &I) { |
| Sanjay Patel | 6f716a7 | 2018-02-21 00:01:45 +0000 | [diff] [blame] | 1237 | Constant *C; |
| 1238 | if (!match(I.getOperand(0), m_Constant(C))) |
| Sanjay Patel | e412954 | 2018-02-19 21:17:58 +0000 | [diff] [blame] | 1239 | return nullptr; |
| 1240 | |
| Sanjay Patel | 6f716a7 | 2018-02-21 00:01:45 +0000 | [diff] [blame] | 1241 | // C / -X --> -C / X |
| Sanjay Patel | e412954 | 2018-02-19 21:17:58 +0000 | [diff] [blame] | 1242 | Value *X; |
| Sanjay Patel | 5a6f904 | 2018-02-21 22:18:55 +0000 | [diff] [blame] | 1243 | if (match(I.getOperand(1), m_FNeg(m_Value(X)))) |
| 1244 | return BinaryOperator::CreateFDivFMF(ConstantExpr::getFNeg(C), X, &I); |
| Sanjay Patel | 6f716a7 | 2018-02-21 00:01:45 +0000 | [diff] [blame] | 1245 | |
| 1246 | if (!I.hasAllowReassoc() || !I.hasAllowReciprocal()) |
| 1247 | return nullptr; |
| 1248 | |
| 1249 | // Try to reassociate C / X expressions where X includes another constant. |
| Sanjay Patel | e412954 | 2018-02-19 21:17:58 +0000 | [diff] [blame] | 1250 | Constant *C2, *NewC = nullptr; |
| 1251 | if (match(I.getOperand(1), m_FMul(m_Value(X), m_Constant(C2)))) { |
| Sanjay Patel | 6f716a7 | 2018-02-21 00:01:45 +0000 | [diff] [blame] | 1252 | // C / (X * C2) --> (C / C2) / X |
| 1253 | NewC = ConstantExpr::getFDiv(C, C2); |
| Sanjay Patel | e412954 | 2018-02-19 21:17:58 +0000 | [diff] [blame] | 1254 | } else if (match(I.getOperand(1), m_FDiv(m_Value(X), m_Constant(C2)))) { |
| Sanjay Patel | 6f716a7 | 2018-02-21 00:01:45 +0000 | [diff] [blame] | 1255 | // C / (X / C2) --> (C * C2) / X |
| 1256 | NewC = ConstantExpr::getFMul(C, C2); |
| Sanjay Patel | e412954 | 2018-02-19 21:17:58 +0000 | [diff] [blame] | 1257 | } |
| 1258 | // Disallow denormal constants because we don't know what would happen |
| 1259 | // on all targets. |
| 1260 | // TODO: Use Intrinsic::canonicalize or let function attributes tell us that |
| 1261 | // denorms are flushed? |
| 1262 | if (!NewC || !NewC->isNormalFP()) |
| 1263 | return nullptr; |
| 1264 | |
| Sanjay Patel | 5a6f904 | 2018-02-21 22:18:55 +0000 | [diff] [blame] | 1265 | return BinaryOperator::CreateFDivFMF(NewC, X, &I); |
| Sanjay Patel | e412954 | 2018-02-19 21:17:58 +0000 | [diff] [blame] | 1266 | } |
| 1267 | |
| Frits van Bommel | 2a55951 | 2011-01-29 17:50:27 +0000 | [diff] [blame] | 1268 | Instruction *InstCombiner::visitFDiv(BinaryOperator &I) { |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 1269 | if (Value *V = SimplifyFDivInst(I.getOperand(0), I.getOperand(1), |
| 1270 | I.getFastMathFlags(), |
| Craig Topper | a420562 | 2017-06-09 03:21:29 +0000 | [diff] [blame] | 1271 | SQ.getWithInstruction(&I))) |
| Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1272 | return replaceInstUsesWith(I, V); |
| Frits van Bommel | 2a55951 | 2011-01-29 17:50:27 +0000 | [diff] [blame] | 1273 | |
| Sanjay Patel | 79dceb2 | 2018-10-03 15:20:58 +0000 | [diff] [blame] | 1274 | if (Instruction *X = foldVectorBinop(I)) |
| Sanjay Patel | bbc6d60 | 2018-06-02 16:27:44 +0000 | [diff] [blame] | 1275 | return X; |
| 1276 | |
| Sanjay Patel | d8dd015 | 2018-02-20 23:51:16 +0000 | [diff] [blame] | 1277 | if (Instruction *R = foldFDivConstantDivisor(I)) |
| 1278 | return R; |
| Sanjay Patel | 2816560 | 2018-02-19 23:09:03 +0000 | [diff] [blame] | 1279 | |
| Sanjay Patel | d8dd015 | 2018-02-20 23:51:16 +0000 | [diff] [blame] | 1280 | if (Instruction *R = foldFDivConstantDividend(I)) |
| 1281 | return R; |
| Sanjay Patel | b39bcc0 | 2018-02-14 23:04:17 +0000 | [diff] [blame] | 1282 | |
| Sanjay Patel | 7b201bf | 2020-06-20 11:47:00 -0400 | [diff] [blame] | 1283 | if (Instruction *R = foldFPSignBitOps(I)) |
| Sanjay Patel | d84cdb8 | 2020-06-20 10:20:21 -0400 | [diff] [blame] | 1284 | return R; |
| 1285 | |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 1286 | Value *Op0 = I.getOperand(0), *Op1 = I.getOperand(1); |
| Stephen Lin | a9b57f6 | 2013-07-20 07:13:13 +0000 | [diff] [blame] | 1287 | if (isa<Constant>(Op0)) |
| 1288 | if (SelectInst *SI = dyn_cast<SelectInst>(Op1)) |
| 1289 | if (Instruction *R = FoldOpIntoSelect(I, SI)) |
| 1290 | return R; |
| 1291 | |
| Sanjay Patel | 29b98ae | 2018-02-20 17:14:53 +0000 | [diff] [blame] | 1292 | if (isa<Constant>(Op1)) |
| Stephen Lin | a9b57f6 | 2013-07-20 07:13:13 +0000 | [diff] [blame] | 1293 | if (SelectInst *SI = dyn_cast<SelectInst>(Op0)) |
| 1294 | if (Instruction *R = FoldOpIntoSelect(I, SI)) |
| 1295 | return R; |
| Shuxin Yang | 320f52a | 2013-01-14 22:48:41 +0000 | [diff] [blame] | 1296 | |
| Sanjay Patel | 31a9046 | 2018-02-26 16:02:45 +0000 | [diff] [blame] | 1297 | if (I.hasAllowReassoc() && I.hasAllowReciprocal()) { |
| Shuxin Yang | 320f52a | 2013-01-14 22:48:41 +0000 | [diff] [blame] | 1298 | Value *X, *Y; |
| Sanjay Patel | 91bb775 | 2018-02-16 17:52:32 +0000 | [diff] [blame] | 1299 | if (match(Op0, m_OneUse(m_FDiv(m_Value(X), m_Value(Y)))) && |
| 1300 | (!isa<Constant>(Y) || !isa<Constant>(Op1))) { |
| 1301 | // (X / Y) / Z => X / (Y * Z) |
| Sanjay Patel | 31a9046 | 2018-02-26 16:02:45 +0000 | [diff] [blame] | 1302 | Value *YZ = Builder.CreateFMulFMF(Y, Op1, &I); |
| Sanjay Patel | 5a6f904 | 2018-02-21 22:18:55 +0000 | [diff] [blame] | 1303 | return BinaryOperator::CreateFDivFMF(X, YZ, &I); |
| Shuxin Yang | 320f52a | 2013-01-14 22:48:41 +0000 | [diff] [blame] | 1304 | } |
| Sanjay Patel | 91bb775 | 2018-02-16 17:52:32 +0000 | [diff] [blame] | 1305 | if (match(Op1, m_OneUse(m_FDiv(m_Value(X), m_Value(Y)))) && |
| 1306 | (!isa<Constant>(Y) || !isa<Constant>(Op0))) { |
| 1307 | // Z / (X / Y) => (Y * Z) / X |
| Sanjay Patel | 31a9046 | 2018-02-26 16:02:45 +0000 | [diff] [blame] | 1308 | Value *YZ = Builder.CreateFMulFMF(Y, Op0, &I); |
| Sanjay Patel | 5a6f904 | 2018-02-21 22:18:55 +0000 | [diff] [blame] | 1309 | return BinaryOperator::CreateFDivFMF(YZ, X, &I); |
| Benjamin Kramer | 8564e0d | 2011-03-30 15:42:35 +0000 | [diff] [blame] | 1310 | } |
| @raghesh (Raghesh Aloor) | 6c04ef4 | 2020-01-09 10:52:39 -0500 | [diff] [blame] | 1311 | // Z / (1.0 / Y) => (Y * Z) |
| 1312 | // |
| 1313 | // This is a special case of Z / (X / Y) => (Y * Z) / X, with X = 1.0. The |
| 1314 | // m_OneUse check is avoided because even in the case of the multiple uses |
| 1315 | // for 1.0/Y, the number of instructions remain the same and a division is |
| 1316 | // replaced by a multiplication. |
| 1317 | if (match(Op1, m_FDiv(m_SpecificFP(1.0), m_Value(Y)))) |
| 1318 | return BinaryOperator::CreateFMulFMF(Y, Op0, &I); |
| Benjamin Kramer | 8564e0d | 2011-03-30 15:42:35 +0000 | [diff] [blame] | 1319 | } |
| 1320 | |
| Sanjay Patel | 339b4d3 | 2018-02-15 15:07:12 +0000 | [diff] [blame] | 1321 | if (I.hasAllowReassoc() && Op0->hasOneUse() && Op1->hasOneUse()) { |
| Sanjay Patel | 65da14d | 2018-02-16 16:13:20 +0000 | [diff] [blame] | 1322 | // sin(X) / cos(X) -> tan(X) |
| 1323 | // cos(X) / sin(X) -> 1/tan(X) (cotangent) |
| 1324 | Value *X; |
| 1325 | bool IsTan = match(Op0, m_Intrinsic<Intrinsic::sin>(m_Value(X))) && |
| 1326 | match(Op1, m_Intrinsic<Intrinsic::cos>(m_Specific(X))); |
| 1327 | bool IsCot = |
| 1328 | !IsTan && match(Op0, m_Intrinsic<Intrinsic::cos>(m_Value(X))) && |
| 1329 | match(Op1, m_Intrinsic<Intrinsic::sin>(m_Specific(X))); |
| Dmitry Venikov | e5fbf59 | 2018-01-11 06:33:00 +0000 | [diff] [blame] | 1330 | |
| Evandro Menezes | c6c00cd | 2019-08-09 16:04:18 +0000 | [diff] [blame] | 1331 | if ((IsTan || IsCot) && |
| 1332 | hasFloatFn(&TLI, I.getType(), LibFunc_tan, LibFunc_tanf, LibFunc_tanl)) { |
| Sanjay Patel | 65da14d | 2018-02-16 16:13:20 +0000 | [diff] [blame] | 1333 | IRBuilder<> B(&I); |
| 1334 | IRBuilder<>::FastMathFlagGuard FMFGuard(B); |
| 1335 | B.setFastMathFlags(I.getFastMathFlags()); |
| Craig Topper | c1892ec | 2019-01-31 17:23:29 +0000 | [diff] [blame] | 1336 | AttributeList Attrs = |
| 1337 | cast<CallBase>(Op0)->getCalledFunction()->getAttributes(); |
| Mikael Holmen | e3605d0 | 2018-10-18 06:27:53 +0000 | [diff] [blame] | 1338 | Value *Res = emitUnaryFloatFnCall(X, &TLI, LibFunc_tan, LibFunc_tanf, |
| 1339 | LibFunc_tanl, B, Attrs); |
| Sanjay Patel | 65da14d | 2018-02-16 16:13:20 +0000 | [diff] [blame] | 1340 | if (IsCot) |
| 1341 | Res = B.CreateFDiv(ConstantFP::get(I.getType(), 1.0), Res); |
| 1342 | return replaceInstUsesWith(I, Res); |
| Dmitry Venikov | e5fbf59 | 2018-01-11 06:33:00 +0000 | [diff] [blame] | 1343 | } |
| 1344 | } |
| 1345 | |
| Sanjay Patel | 4a4f35f | 2018-02-12 19:39:21 +0000 | [diff] [blame] | 1346 | // X / (X * Y) --> 1.0 / Y |
| 1347 | // Reassociate to (X / X -> 1.0) is legal when NaNs are not allowed. |
| 1348 | // We can ignore the possibility that X is infinity because INF/INF is NaN. |
| Sanjay Patel | d84cdb8 | 2020-06-20 10:20:21 -0400 | [diff] [blame] | 1349 | Value *X, *Y; |
| Sanjay Patel | 4a4f35f | 2018-02-12 19:39:21 +0000 | [diff] [blame] | 1350 | if (I.hasNoNaNs() && I.hasAllowReassoc() && |
| 1351 | match(Op1, m_c_FMul(m_Specific(Op0), m_Value(Y)))) { |
| Nikita Popov | 1e36302 | 2020-03-29 17:08:04 +0200 | [diff] [blame] | 1352 | replaceOperand(I, 0, ConstantFP::get(I.getType(), 1.0)); |
| 1353 | replaceOperand(I, 1, Y); |
| Sanjay Patel | 4a4f35f | 2018-02-12 19:39:21 +0000 | [diff] [blame] | 1354 | return &I; |
| 1355 | } |
| 1356 | |
| David Bolvansky | 20d37fa | 2019-08-12 13:43:35 +0000 | [diff] [blame] | 1357 | // X / fabs(X) -> copysign(1.0, X) |
| 1358 | // fabs(X) / X -> copysign(1.0, X) |
| 1359 | if (I.hasNoNaNs() && I.hasNoInfs() && |
| 1360 | (match(&I, |
| 1361 | m_FDiv(m_Value(X), m_Intrinsic<Intrinsic::fabs>(m_Deferred(X)))) || |
| 1362 | match(&I, m_FDiv(m_Intrinsic<Intrinsic::fabs>(m_Value(X)), |
| 1363 | m_Deferred(X))))) { |
| 1364 | Value *V = Builder.CreateBinaryIntrinsic( |
| 1365 | Intrinsic::copysign, ConstantFP::get(I.getType(), 1.0), X, &I); |
| 1366 | return replaceInstUsesWith(I, V); |
| 1367 | } |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1368 | return nullptr; |
| Frits van Bommel | 2a55951 | 2011-01-29 17:50:27 +0000 | [diff] [blame] | 1369 | } |
| 1370 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1371 | /// This function implements the transforms common to both integer remainder |
| 1372 | /// instructions (urem and srem). It is called by the visitors to those integer |
| 1373 | /// remainder instructions. |
| Adrian Prantl | 4dfcc4a | 2018-05-01 16:10:38 +0000 | [diff] [blame] | 1374 | /// Common integer remainder transforms |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1375 | Instruction *InstCombiner::commonIRemTransforms(BinaryOperator &I) { |
| 1376 | Value *Op0 = I.getOperand(0), *Op1 = I.getOperand(1); |
| 1377 | |
| Chris Lattner | 7c99f19 | 2011-05-22 18:18:41 +0000 | [diff] [blame] | 1378 | // The RHS is known non-zero. |
| Nikita Popov | 1e36302 | 2020-03-29 17:08:04 +0200 | [diff] [blame] | 1379 | if (Value *V = simplifyValueKnownNonZero(I.getOperand(1), *this, I)) |
| 1380 | return replaceOperand(I, 1, V); |
| Chris Lattner | 7c99f19 | 2011-05-22 18:18:41 +0000 | [diff] [blame] | 1381 | |
| Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1382 | // Handle cases involving: rem X, (select Cond, Y, Z) |
| Sanjay Patel | ae2e3a4 | 2017-10-06 23:20:16 +0000 | [diff] [blame] | 1383 | if (simplifyDivRemOfSelectWithZeroOp(I)) |
| Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1384 | return &I; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1385 | |
| Benjamin Kramer | 72196f3 | 2014-01-19 15:24:22 +0000 | [diff] [blame] | 1386 | if (isa<Constant>(Op1)) { |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1387 | if (Instruction *Op0I = dyn_cast<Instruction>(Op0)) { |
| 1388 | if (SelectInst *SI = dyn_cast<SelectInst>(Op0I)) { |
| 1389 | if (Instruction *R = FoldOpIntoSelect(I, SI)) |
| 1390 | return R; |
| Craig Topper | fb71b7d | 2017-04-14 19:20:12 +0000 | [diff] [blame] | 1391 | } else if (auto *PN = dyn_cast<PHINode>(Op0I)) { |
| Sanjoy Das | b7e861a | 2016-06-05 21:17:04 +0000 | [diff] [blame] | 1392 | const APInt *Op1Int; |
| 1393 | if (match(Op1, m_APInt(Op1Int)) && !Op1Int->isMinValue() && |
| 1394 | (I.getOpcode() == Instruction::URem || |
| 1395 | !Op1Int->isMinSignedValue())) { |
| Craig Topper | fb71b7d | 2017-04-14 19:20:12 +0000 | [diff] [blame] | 1396 | // foldOpIntoPhi will speculate instructions to the end of the PHI's |
| Sanjoy Das | b7e861a | 2016-06-05 21:17:04 +0000 | [diff] [blame] | 1397 | // predecessor blocks, so do this only if we know the srem or urem |
| 1398 | // will not fault. |
| Craig Topper | fb71b7d | 2017-04-14 19:20:12 +0000 | [diff] [blame] | 1399 | if (Instruction *NV = foldOpIntoPhi(I, PN)) |
| Sanjoy Das | b7e861a | 2016-06-05 21:17:04 +0000 | [diff] [blame] | 1400 | return NV; |
| 1401 | } |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1402 | } |
| 1403 | |
| 1404 | // See if we can fold away this rem instruction. |
| 1405 | if (SimplifyDemandedInstructionBits(I)) |
| 1406 | return &I; |
| 1407 | } |
| 1408 | } |
| 1409 | |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1410 | return nullptr; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1411 | } |
| 1412 | |
| 1413 | Instruction *InstCombiner::visitURem(BinaryOperator &I) { |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 1414 | if (Value *V = SimplifyURemInst(I.getOperand(0), I.getOperand(1), |
| 1415 | SQ.getWithInstruction(&I))) |
| Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1416 | return replaceInstUsesWith(I, V); |
| Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1417 | |
| Sanjay Patel | 79dceb2 | 2018-10-03 15:20:58 +0000 | [diff] [blame] | 1418 | if (Instruction *X = foldVectorBinop(I)) |
| Sanjay Patel | bbc6d60 | 2018-06-02 16:27:44 +0000 | [diff] [blame] | 1419 | return X; |
| 1420 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1421 | if (Instruction *common = commonIRemTransforms(I)) |
| 1422 | return common; |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1423 | |
| Sanjay Patel | bb78938 | 2017-08-24 22:54:01 +0000 | [diff] [blame] | 1424 | if (Instruction *NarrowRem = narrowUDivURem(I, Builder)) |
| 1425 | return NarrowRem; |
| David Majnemer | 6c30f49 | 2013-05-12 00:07:05 +0000 | [diff] [blame] | 1426 | |
| David Majnemer | 470b077 | 2013-05-11 09:01:28 +0000 | [diff] [blame] | 1427 | // X urem Y -> X and Y-1, where Y is a power of 2, |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 1428 | Value *Op0 = I.getOperand(0), *Op1 = I.getOperand(1); |
| Sanjay Patel | 3575f0c | 2018-06-26 16:30:00 +0000 | [diff] [blame] | 1429 | Type *Ty = I.getType(); |
| Craig Topper | d4039f7 | 2017-05-25 21:51:12 +0000 | [diff] [blame] | 1430 | if (isKnownToBeAPowerOfTwo(Op1, /*OrZero*/ true, 0, &I)) { |
| Roman Lebedev | be612ea | 2019-07-30 15:28:22 +0000 | [diff] [blame] | 1431 | // This may increase instruction count, we don't enforce that Y is a |
| 1432 | // constant. |
| Sanjay Patel | 3575f0c | 2018-06-26 16:30:00 +0000 | [diff] [blame] | 1433 | Constant *N1 = Constant::getAllOnesValue(Ty); |
| Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 1434 | Value *Add = Builder.CreateAdd(Op1, N1); |
| Chris Lattner | 6b657ae | 2011-02-10 05:36:31 +0000 | [diff] [blame] | 1435 | return BinaryOperator::CreateAnd(Op0, Add); |
| 1436 | } |
| 1437 | |
| Nick Lewycky | 7459be6 | 2013-07-13 01:16:47 +0000 | [diff] [blame] | 1438 | // 1 urem X -> zext(X != 1) |
| Sanjay Patel | af4e599 | 2019-12-02 12:10:05 -0500 | [diff] [blame] | 1439 | if (match(Op0, m_One())) { |
| 1440 | Value *Cmp = Builder.CreateICmpNE(Op1, ConstantInt::get(Ty, 1)); |
| 1441 | return CastInst::CreateZExtOrBitCast(Cmp, Ty); |
| 1442 | } |
| Nick Lewycky | 7459be6 | 2013-07-13 01:16:47 +0000 | [diff] [blame] | 1443 | |
| Sanjay Patel | 30ef70b | 2016-09-22 22:36:26 +0000 | [diff] [blame] | 1444 | // X urem C -> X < C ? X : X - C, where C >= signbit. |
| Simon Pilgrim | 1889f26 | 2018-02-08 18:36:01 +0000 | [diff] [blame] | 1445 | if (match(Op1, m_Negative())) { |
| Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 1446 | Value *Cmp = Builder.CreateICmpULT(Op0, Op1); |
| 1447 | Value *Sub = Builder.CreateSub(Op0, Op1); |
| Sanjay Patel | 30ef70b | 2016-09-22 22:36:26 +0000 | [diff] [blame] | 1448 | return SelectInst::Create(Cmp, Op0, Sub); |
| 1449 | } |
| 1450 | |
| Sanjay Patel | 3575f0c | 2018-06-26 16:30:00 +0000 | [diff] [blame] | 1451 | // If the divisor is a sext of a boolean, then the divisor must be max |
| 1452 | // unsigned value (-1). Therefore, the remainder is Op0 unless Op0 is also |
| 1453 | // max unsigned value. In that case, the remainder is 0: |
| 1454 | // urem Op0, (sext i1 X) --> (Op0 == -1) ? 0 : Op0 |
| 1455 | Value *X; |
| 1456 | if (match(Op1, m_SExt(m_Value(X))) && X->getType()->isIntOrIntVectorTy(1)) { |
| 1457 | Value *Cmp = Builder.CreateICmpEQ(Op0, ConstantInt::getAllOnesValue(Ty)); |
| 1458 | return SelectInst::Create(Cmp, ConstantInt::getNullValue(Ty), Op0); |
| 1459 | } |
| 1460 | |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1461 | return nullptr; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1462 | } |
| 1463 | |
| 1464 | Instruction *InstCombiner::visitSRem(BinaryOperator &I) { |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 1465 | if (Value *V = SimplifySRemInst(I.getOperand(0), I.getOperand(1), |
| 1466 | SQ.getWithInstruction(&I))) |
| Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1467 | return replaceInstUsesWith(I, V); |
| Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1468 | |
| Sanjay Patel | 79dceb2 | 2018-10-03 15:20:58 +0000 | [diff] [blame] | 1469 | if (Instruction *X = foldVectorBinop(I)) |
| Sanjay Patel | bbc6d60 | 2018-06-02 16:27:44 +0000 | [diff] [blame] | 1470 | return X; |
| 1471 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1472 | // Handle the integer rem common cases |
| 1473 | if (Instruction *Common = commonIRemTransforms(I)) |
| 1474 | return Common; |
| Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1475 | |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 1476 | Value *Op0 = I.getOperand(0), *Op1 = I.getOperand(1); |
| David Majnemer | db07730 | 2014-10-13 22:37:51 +0000 | [diff] [blame] | 1477 | { |
| 1478 | const APInt *Y; |
| 1479 | // X % -Y -> X % Y |
| Nikita Popov | 878cb38 | 2020-01-31 22:23:33 +0100 | [diff] [blame] | 1480 | if (match(Op1, m_Negative(Y)) && !Y->isMinSignedValue()) |
| 1481 | return replaceOperand(I, 1, ConstantInt::get(I.getType(), -*Y)); |
| David Majnemer | db07730 | 2014-10-13 22:37:51 +0000 | [diff] [blame] | 1482 | } |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1483 | |
| Chen Zheng | 87dd0e0 | 2019-04-13 09:21:22 +0000 | [diff] [blame] | 1484 | // -X srem Y --> -(X srem Y) |
| 1485 | Value *X, *Y; |
| 1486 | if (match(&I, m_SRem(m_OneUse(m_NSWSub(m_Zero(), m_Value(X))), m_Value(Y)))) |
| 1487 | return BinaryOperator::CreateNSWNeg(Builder.CreateSRem(X, Y)); |
| 1488 | |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1489 | // If the sign bits of both operands are zero (i.e. we can prove they are |
| 1490 | // unsigned inputs), turn this into a urem. |
| Craig Topper | bcfd2d1 | 2017-04-20 16:56:25 +0000 | [diff] [blame] | 1491 | APInt Mask(APInt::getSignMask(I.getType()->getScalarSizeInBits())); |
| Craig Topper | 1a18a7c | 2017-04-17 01:51:24 +0000 | [diff] [blame] | 1492 | if (MaskedValueIsZero(Op1, Mask, 0, &I) && |
| 1493 | MaskedValueIsZero(Op0, Mask, 0, &I)) { |
| 1494 | // X srem Y -> X urem Y, iff X and Y don't have sign bit set |
| 1495 | return BinaryOperator::CreateURem(Op0, Op1, I.getName()); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1496 | } |
| 1497 | |
| 1498 | // If it's a constant vector, flip any negative values positive. |
| Chris Lattner | 0256be9 | 2012-01-27 03:08:05 +0000 | [diff] [blame] | 1499 | if (isa<ConstantVector>(Op1) || isa<ConstantDataVector>(Op1)) { |
| 1500 | Constant *C = cast<Constant>(Op1); |
| Christopher Tetreault | 155740c | 2020-04-08 10:42:22 -0700 | [diff] [blame] | 1501 | unsigned VWidth = cast<VectorType>(C->getType())->getNumElements(); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1502 | |
| 1503 | bool hasNegative = false; |
| Chris Lattner | 0256be9 | 2012-01-27 03:08:05 +0000 | [diff] [blame] | 1504 | bool hasMissing = false; |
| 1505 | for (unsigned i = 0; i != VWidth; ++i) { |
| 1506 | Constant *Elt = C->getAggregateElement(i); |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1507 | if (!Elt) { |
| Chris Lattner | 0256be9 | 2012-01-27 03:08:05 +0000 | [diff] [blame] | 1508 | hasMissing = true; |
| 1509 | break; |
| 1510 | } |
| 1511 | |
| 1512 | if (ConstantInt *RHS = dyn_cast<ConstantInt>(Elt)) |
| Chris Lattner | b1a1512 | 2011-07-15 06:08:15 +0000 | [diff] [blame] | 1513 | if (RHS->isNegative()) |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1514 | hasNegative = true; |
| Chris Lattner | 0256be9 | 2012-01-27 03:08:05 +0000 | [diff] [blame] | 1515 | } |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1516 | |
| Chris Lattner | 0256be9 | 2012-01-27 03:08:05 +0000 | [diff] [blame] | 1517 | if (hasNegative && !hasMissing) { |
| Chris Lattner | 47a86bd | 2012-01-25 06:02:56 +0000 | [diff] [blame] | 1518 | SmallVector<Constant *, 16> Elts(VWidth); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1519 | for (unsigned i = 0; i != VWidth; ++i) { |
| Chris Lattner | 8213c8a | 2012-02-06 21:56:39 +0000 | [diff] [blame] | 1520 | Elts[i] = C->getAggregateElement(i); // Handle undef, etc. |
| Chris Lattner | 0256be9 | 2012-01-27 03:08:05 +0000 | [diff] [blame] | 1521 | if (ConstantInt *RHS = dyn_cast<ConstantInt>(Elts[i])) { |
| Chris Lattner | b1a1512 | 2011-07-15 06:08:15 +0000 | [diff] [blame] | 1522 | if (RHS->isNegative()) |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1523 | Elts[i] = cast<ConstantInt>(ConstantExpr::getNeg(RHS)); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1524 | } |
| 1525 | } |
| 1526 | |
| 1527 | Constant *NewRHSV = ConstantVector::get(Elts); |
| Nikita Popov | 878cb38 | 2020-01-31 22:23:33 +0100 | [diff] [blame] | 1528 | if (NewRHSV != C) // Don't loop on -MININT |
| 1529 | return replaceOperand(I, 1, NewRHSV); |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1530 | } |
| 1531 | } |
| 1532 | |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1533 | return nullptr; |
| Chris Lattner | dc054bf | 2010-01-05 06:09:35 +0000 | [diff] [blame] | 1534 | } |
| 1535 | |
| 1536 | Instruction *InstCombiner::visitFRem(BinaryOperator &I) { |
| Sanjay Patel | 7b0fc75 | 2018-06-21 17:06:36 +0000 | [diff] [blame] | 1537 | if (Value *V = SimplifyFRemInst(I.getOperand(0), I.getOperand(1), |
| 1538 | I.getFastMathFlags(), |
| Craig Topper | a420562 | 2017-06-09 03:21:29 +0000 | [diff] [blame] | 1539 | SQ.getWithInstruction(&I))) |
| Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1540 | return replaceInstUsesWith(I, V); |
| Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1541 | |
| Sanjay Patel | 79dceb2 | 2018-10-03 15:20:58 +0000 | [diff] [blame] | 1542 | if (Instruction *X = foldVectorBinop(I)) |
| Sanjay Patel | bbc6d60 | 2018-06-02 16:27:44 +0000 | [diff] [blame] | 1543 | return X; |
| 1544 | |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1545 | return nullptr; |
| Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1546 | } |