Chris Lattner | 084a1b5 | 2009-11-09 22:57:59 +0000 | [diff] [blame] | 1 | //===- InstructionSimplify.cpp - Fold instruction operands ----------------===// |
| 2 | // |
Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Chris Lattner | 084a1b5 | 2009-11-09 22:57:59 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // This file implements routines for folding instructions into simpler forms |
Duncan Sands | a021988 | 2010-11-23 10:50:08 +0000 | [diff] [blame] | 10 | // that do not require creating new instructions. This does constant folding |
| 11 | // ("add i32 1, 1" -> "2") but can also handle non-constant operands, either |
| 12 | // returning a constant ("and i32 %x, 0" -> "0") or an already existing value |
Duncan Sands | ed6d6c3 | 2010-12-20 14:47:04 +0000 | [diff] [blame] | 13 | // ("and i32 %x, %x" -> "%x"). All operands are assumed to have already been |
| 14 | // simplified: This is usually true and assuming it simplifies the logic (if |
| 15 | // they have not been simplified then results are correct but maybe suboptimal). |
Chris Lattner | 084a1b5 | 2009-11-09 22:57:59 +0000 | [diff] [blame] | 16 | // |
| 17 | //===----------------------------------------------------------------------===// |
| 18 | |
| 19 | #include "llvm/Analysis/InstructionSimplify.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 20 | #include "llvm/ADT/SetVector.h" |
| 21 | #include "llvm/ADT/Statistic.h" |
Hal Finkel | afcd8db | 2014-12-01 23:38:06 +0000 | [diff] [blame] | 22 | #include "llvm/Analysis/AliasAnalysis.h" |
Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 23 | #include "llvm/Analysis/AssumptionCache.h" |
Anna Thomas | 43d7e1c | 2016-05-03 14:58:21 +0000 | [diff] [blame] | 24 | #include "llvm/Analysis/CaptureTracking.h" |
Craig Topper | 0aa3a19 | 2017-08-14 21:39:51 +0000 | [diff] [blame] | 25 | #include "llvm/Analysis/CmpInstAnalysis.h" |
Chris Lattner | 084a1b5 | 2009-11-09 22:57:59 +0000 | [diff] [blame] | 26 | #include "llvm/Analysis/ConstantFolding.h" |
Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 27 | #include "llvm/Analysis/LoopAnalysisManager.h" |
Dan Gohman | b3e2d3a | 2013-02-01 00:11:13 +0000 | [diff] [blame] | 28 | #include "llvm/Analysis/MemoryBuiltins.h" |
Chandler Carruth | 8a8cd2b | 2014-01-07 11:48:04 +0000 | [diff] [blame] | 29 | #include "llvm/Analysis/ValueTracking.h" |
David Majnemer | 599ca44 | 2015-07-13 01:15:53 +0000 | [diff] [blame] | 30 | #include "llvm/Analysis/VectorUtils.h" |
Chandler Carruth | 8cd041e | 2014-03-04 12:24:34 +0000 | [diff] [blame] | 31 | #include "llvm/IR/ConstantRange.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 32 | #include "llvm/IR/DataLayout.h" |
Chandler Carruth | 5ad5f15 | 2014-01-13 09:26:24 +0000 | [diff] [blame] | 33 | #include "llvm/IR/Dominators.h" |
Chandler Carruth | 03eb0de | 2014-03-04 10:40:04 +0000 | [diff] [blame] | 34 | #include "llvm/IR/GetElementPtrTypeIterator.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 35 | #include "llvm/IR/GlobalAlias.h" |
Chandler Carruth | dac20a8 | 2019-02-11 07:54:10 +0000 | [diff] [blame] | 36 | #include "llvm/IR/InstrTypes.h" |
| 37 | #include "llvm/IR/Instructions.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 38 | #include "llvm/IR/Operator.h" |
Chandler Carruth | 820a908 | 2014-03-04 11:08:18 +0000 | [diff] [blame] | 39 | #include "llvm/IR/PatternMatch.h" |
Chandler Carruth | 4220e9c | 2014-03-04 11:17:44 +0000 | [diff] [blame] | 40 | #include "llvm/IR/ValueHandle.h" |
Craig Topper | b45eabc | 2017-04-26 16:39:58 +0000 | [diff] [blame] | 41 | #include "llvm/Support/KnownBits.h" |
Hal Finkel | afcd8db | 2014-12-01 23:38:06 +0000 | [diff] [blame] | 42 | #include <algorithm> |
Chris Lattner | 084a1b5 | 2009-11-09 22:57:59 +0000 | [diff] [blame] | 43 | using namespace llvm; |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 44 | using namespace llvm::PatternMatch; |
Chris Lattner | 084a1b5 | 2009-11-09 22:57:59 +0000 | [diff] [blame] | 45 | |
Chandler Carruth | f1221bd | 2014-04-22 02:48:03 +0000 | [diff] [blame] | 46 | #define DEBUG_TYPE "instsimplify" |
| 47 | |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 48 | enum { RecursionLimit = 3 }; |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 49 | |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 50 | STATISTIC(NumExpand, "Number of expansions"); |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 51 | STATISTIC(NumReassoc, "Number of reassociations"); |
| 52 | |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 53 | static Value *SimplifyAndInst(Value *, Value *, const SimplifyQuery &, unsigned); |
Cameron McInally | c316769 | 2019-05-06 16:05:10 +0000 | [diff] [blame] | 54 | static Value *simplifyUnOp(unsigned, Value *, const SimplifyQuery &, unsigned); |
| 55 | static Value *simplifyFPUnOp(unsigned, Value *, const FastMathFlags &, |
| 56 | const SimplifyQuery &, unsigned); |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 57 | static Value *SimplifyBinOp(unsigned, Value *, Value *, const SimplifyQuery &, |
Chad Rosier | c24b86f | 2011-12-01 03:08:23 +0000 | [diff] [blame] | 58 | unsigned); |
Jay Foad | 565c543 | 2019-07-24 12:50:10 +0000 | [diff] [blame] | 59 | static Value *SimplifyBinOp(unsigned, Value *, Value *, const FastMathFlags &, |
| 60 | const SimplifyQuery &, unsigned); |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 61 | static Value *SimplifyCmpInst(unsigned, Value *, Value *, const SimplifyQuery &, |
Chad Rosier | c24b86f | 2011-12-01 03:08:23 +0000 | [diff] [blame] | 62 | unsigned); |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 63 | static Value *SimplifyICmpInst(unsigned Predicate, Value *LHS, Value *RHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 64 | const SimplifyQuery &Q, unsigned MaxRecurse); |
| 65 | static Value *SimplifyOrInst(Value *, Value *, const SimplifyQuery &, unsigned); |
| 66 | static Value *SimplifyXorInst(Value *, Value *, const SimplifyQuery &, unsigned); |
David Majnemer | 6774d61 | 2016-07-26 17:58:05 +0000 | [diff] [blame] | 67 | static Value *SimplifyCastInst(unsigned, Value *, Type *, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 68 | const SimplifyQuery &, unsigned); |
George Burgess IV | 8e807bf | 2018-04-24 00:25:01 +0000 | [diff] [blame] | 69 | static Value *SimplifyGEPInst(Type *, ArrayRef<Value *>, const SimplifyQuery &, |
| 70 | unsigned); |
Duncan Sands | 5ffc298 | 2010-11-16 12:16:38 +0000 | [diff] [blame] | 71 | |
David Bolvansky | f947608 | 2018-07-28 06:55:51 +0000 | [diff] [blame] | 72 | static Value *foldSelectWithBinaryOp(Value *Cond, Value *TrueVal, |
| 73 | Value *FalseVal) { |
| 74 | BinaryOperator::BinaryOps BinOpCode; |
| 75 | if (auto *BO = dyn_cast<BinaryOperator>(Cond)) |
| 76 | BinOpCode = BO->getOpcode(); |
| 77 | else |
| 78 | return nullptr; |
| 79 | |
David Bolvansky | 16d8a69 | 2018-07-31 14:17:15 +0000 | [diff] [blame] | 80 | CmpInst::Predicate ExpectedPred, Pred1, Pred2; |
David Bolvansky | f947608 | 2018-07-28 06:55:51 +0000 | [diff] [blame] | 81 | if (BinOpCode == BinaryOperator::Or) { |
| 82 | ExpectedPred = ICmpInst::ICMP_NE; |
| 83 | } else if (BinOpCode == BinaryOperator::And) { |
| 84 | ExpectedPred = ICmpInst::ICMP_EQ; |
| 85 | } else |
| 86 | return nullptr; |
| 87 | |
David Bolvansky | 16d8a69 | 2018-07-31 14:17:15 +0000 | [diff] [blame] | 88 | // %A = icmp eq %TV, %FV |
| 89 | // %B = icmp eq %X, %Y (and one of these is a select operand) |
| 90 | // %C = and %A, %B |
| 91 | // %D = select %C, %TV, %FV |
| 92 | // --> |
| 93 | // %FV |
| 94 | |
| 95 | // %A = icmp ne %TV, %FV |
| 96 | // %B = icmp ne %X, %Y (and one of these is a select operand) |
| 97 | // %C = or %A, %B |
| 98 | // %D = select %C, %TV, %FV |
| 99 | // --> |
| 100 | // %TV |
| 101 | Value *X, *Y; |
| 102 | if (!match(Cond, m_c_BinOp(m_c_ICmp(Pred1, m_Specific(TrueVal), |
| 103 | m_Specific(FalseVal)), |
| 104 | m_ICmp(Pred2, m_Value(X), m_Value(Y)))) || |
David Bolvansky | f947608 | 2018-07-28 06:55:51 +0000 | [diff] [blame] | 105 | Pred1 != Pred2 || Pred1 != ExpectedPred) |
| 106 | return nullptr; |
| 107 | |
David Bolvansky | 16d8a69 | 2018-07-31 14:17:15 +0000 | [diff] [blame] | 108 | if (X == TrueVal || X == FalseVal || Y == TrueVal || Y == FalseVal) |
| 109 | return BinOpCode == BinaryOperator::Or ? TrueVal : FalseVal; |
| 110 | |
| 111 | return nullptr; |
David Bolvansky | f947608 | 2018-07-28 06:55:51 +0000 | [diff] [blame] | 112 | } |
| 113 | |
Sanjay Patel | 35ed241 | 2017-04-16 17:43:11 +0000 | [diff] [blame] | 114 | /// For a boolean type or a vector of boolean type, return false or a vector |
| 115 | /// with every element false. |
Duncan Sands | c1c9271 | 2011-07-26 15:03:53 +0000 | [diff] [blame] | 116 | static Constant *getFalse(Type *Ty) { |
Sanjay Patel | 35ed241 | 2017-04-16 17:43:11 +0000 | [diff] [blame] | 117 | return ConstantInt::getFalse(Ty); |
Duncan Sands | c1c9271 | 2011-07-26 15:03:53 +0000 | [diff] [blame] | 118 | } |
| 119 | |
Sanjay Patel | 35ed241 | 2017-04-16 17:43:11 +0000 | [diff] [blame] | 120 | /// For a boolean type or a vector of boolean type, return true or a vector |
| 121 | /// with every element true. |
Duncan Sands | c1c9271 | 2011-07-26 15:03:53 +0000 | [diff] [blame] | 122 | static Constant *getTrue(Type *Ty) { |
Sanjay Patel | 35ed241 | 2017-04-16 17:43:11 +0000 | [diff] [blame] | 123 | return ConstantInt::getTrue(Ty); |
Duncan Sands | c1c9271 | 2011-07-26 15:03:53 +0000 | [diff] [blame] | 124 | } |
| 125 | |
Duncan Sands | 3d5692a | 2011-10-30 19:56:36 +0000 | [diff] [blame] | 126 | /// isSameCompare - Is V equivalent to the comparison "LHS Pred RHS"? |
| 127 | static bool isSameCompare(Value *V, CmpInst::Predicate Pred, Value *LHS, |
| 128 | Value *RHS) { |
| 129 | CmpInst *Cmp = dyn_cast<CmpInst>(V); |
| 130 | if (!Cmp) |
| 131 | return false; |
| 132 | CmpInst::Predicate CPred = Cmp->getPredicate(); |
| 133 | Value *CLHS = Cmp->getOperand(0), *CRHS = Cmp->getOperand(1); |
| 134 | if (CPred == Pred && CLHS == LHS && CRHS == RHS) |
| 135 | return true; |
| 136 | return CPred == CmpInst::getSwappedPredicate(Pred) && CLHS == RHS && |
| 137 | CRHS == LHS; |
| 138 | } |
| 139 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 140 | /// Does the given value dominate the specified phi node? |
Sanjay Patel | 5da361a | 2018-04-10 18:38:19 +0000 | [diff] [blame] | 141 | static bool valueDominatesPHI(Value *V, PHINode *P, const DominatorTree *DT) { |
Duncan Sands | 5ffc298 | 2010-11-16 12:16:38 +0000 | [diff] [blame] | 142 | Instruction *I = dyn_cast<Instruction>(V); |
| 143 | if (!I) |
| 144 | // Arguments and constants dominate all instructions. |
| 145 | return true; |
| 146 | |
Chandler Carruth | 3ffccb3 | 2012-03-21 10:58:47 +0000 | [diff] [blame] | 147 | // If we are processing instructions (and/or basic blocks) that have not been |
| 148 | // fully added to a function, the parent nodes may still be null. Simply |
| 149 | // return the conservative answer in these cases. |
Sanjay Patel | 5da361a | 2018-04-10 18:38:19 +0000 | [diff] [blame] | 150 | if (!I->getParent() || !P->getParent() || !I->getFunction()) |
Chandler Carruth | 3ffccb3 | 2012-03-21 10:58:47 +0000 | [diff] [blame] | 151 | return false; |
| 152 | |
Duncan Sands | 5ffc298 | 2010-11-16 12:16:38 +0000 | [diff] [blame] | 153 | // If we have a DominatorTree then do a precise test. |
Daniel Berlin | 71ff663 | 2017-05-31 01:47:24 +0000 | [diff] [blame] | 154 | if (DT) |
Eli Friedman | c8cbd06 | 2012-03-13 01:06:07 +0000 | [diff] [blame] | 155 | return DT->dominates(I, P); |
Duncan Sands | 5ffc298 | 2010-11-16 12:16:38 +0000 | [diff] [blame] | 156 | |
David Majnemer | 8a1c45d | 2015-12-12 05:38:55 +0000 | [diff] [blame] | 157 | // Otherwise, if the instruction is in the entry block and is not an invoke, |
| 158 | // then it obviously dominates all phi nodes. |
Sanjay Patel | 5da361a | 2018-04-10 18:38:19 +0000 | [diff] [blame] | 159 | if (I->getParent() == &I->getFunction()->getEntryBlock() && |
David Majnemer | 8a1c45d | 2015-12-12 05:38:55 +0000 | [diff] [blame] | 160 | !isa<InvokeInst>(I)) |
Duncan Sands | 5ffc298 | 2010-11-16 12:16:38 +0000 | [diff] [blame] | 161 | return true; |
| 162 | |
| 163 | return false; |
| 164 | } |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 165 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 166 | /// Simplify "A op (B op' C)" by distributing op over op', turning it into |
| 167 | /// "(A op B) op' (A op C)". Here "op" is given by Opcode and "op'" is |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 168 | /// given by OpcodeToExpand, while "A" corresponds to LHS and "B op' C" to RHS. |
| 169 | /// Also performs the transform "(A op' B) op C" -> "(A op C) op' (B op C)". |
| 170 | /// Returns the simplified value, or null if no simplification was performed. |
Craig Topper | 60dd9cd | 2017-04-07 05:57:51 +0000 | [diff] [blame] | 171 | static Value *ExpandBinOp(Instruction::BinaryOps Opcode, Value *LHS, Value *RHS, |
Craig Topper | 9c913bf | 2017-05-19 16:56:53 +0000 | [diff] [blame] | 172 | Instruction::BinaryOps OpcodeToExpand, |
| 173 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 174 | // Recursion is always used, so bail out at once if we already hit the limit. |
| 175 | if (!MaxRecurse--) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 176 | return nullptr; |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 177 | |
| 178 | // Check whether the expression has the form "(A op' B) op C". |
| 179 | if (BinaryOperator *Op0 = dyn_cast<BinaryOperator>(LHS)) |
| 180 | if (Op0->getOpcode() == OpcodeToExpand) { |
| 181 | // It does! Try turning it into "(A op C) op' (B op C)". |
| 182 | Value *A = Op0->getOperand(0), *B = Op0->getOperand(1), *C = RHS; |
| 183 | // Do "A op C" and "B op C" both simplify? |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 184 | if (Value *L = SimplifyBinOp(Opcode, A, C, Q, MaxRecurse)) |
| 185 | if (Value *R = SimplifyBinOp(Opcode, B, C, Q, MaxRecurse)) { |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 186 | // They do! Return "L op' R" if it simplifies or is already available. |
| 187 | // If "L op' R" equals "A op' B" then "L op' R" is just the LHS. |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 188 | if ((L == A && R == B) || (Instruction::isCommutative(OpcodeToExpand) |
| 189 | && L == B && R == A)) { |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 190 | ++NumExpand; |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 191 | return LHS; |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 192 | } |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 193 | // Otherwise return "L op' R" if it simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 194 | if (Value *V = SimplifyBinOp(OpcodeToExpand, L, R, Q, MaxRecurse)) { |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 195 | ++NumExpand; |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 196 | return V; |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 197 | } |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 198 | } |
| 199 | } |
| 200 | |
| 201 | // Check whether the expression has the form "A op (B op' C)". |
| 202 | if (BinaryOperator *Op1 = dyn_cast<BinaryOperator>(RHS)) |
| 203 | if (Op1->getOpcode() == OpcodeToExpand) { |
| 204 | // It does! Try turning it into "(A op B) op' (A op C)". |
| 205 | Value *A = LHS, *B = Op1->getOperand(0), *C = Op1->getOperand(1); |
| 206 | // Do "A op B" and "A op C" both simplify? |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 207 | if (Value *L = SimplifyBinOp(Opcode, A, B, Q, MaxRecurse)) |
| 208 | if (Value *R = SimplifyBinOp(Opcode, A, C, Q, MaxRecurse)) { |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 209 | // They do! Return "L op' R" if it simplifies or is already available. |
| 210 | // If "L op' R" equals "B op' C" then "L op' R" is just the RHS. |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 211 | if ((L == B && R == C) || (Instruction::isCommutative(OpcodeToExpand) |
| 212 | && L == C && R == B)) { |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 213 | ++NumExpand; |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 214 | return RHS; |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 215 | } |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 216 | // Otherwise return "L op' R" if it simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 217 | if (Value *V = SimplifyBinOp(OpcodeToExpand, L, R, Q, MaxRecurse)) { |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 218 | ++NumExpand; |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 219 | return V; |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 220 | } |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 221 | } |
| 222 | } |
| 223 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 224 | return nullptr; |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 225 | } |
| 226 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 227 | /// Generic simplifications for associative binary operations. |
| 228 | /// Returns the simpler value, or null if none was found. |
Craig Topper | 60dd9cd | 2017-04-07 05:57:51 +0000 | [diff] [blame] | 229 | static Value *SimplifyAssociativeBinOp(Instruction::BinaryOps Opcode, |
Craig Topper | 9c913bf | 2017-05-19 16:56:53 +0000 | [diff] [blame] | 230 | Value *LHS, Value *RHS, |
| 231 | const SimplifyQuery &Q, |
Craig Topper | 60dd9cd | 2017-04-07 05:57:51 +0000 | [diff] [blame] | 232 | unsigned MaxRecurse) { |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 233 | assert(Instruction::isAssociative(Opcode) && "Not an associative operation!"); |
| 234 | |
| 235 | // Recursion is always used, so bail out at once if we already hit the limit. |
| 236 | if (!MaxRecurse--) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 237 | return nullptr; |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 238 | |
| 239 | BinaryOperator *Op0 = dyn_cast<BinaryOperator>(LHS); |
| 240 | BinaryOperator *Op1 = dyn_cast<BinaryOperator>(RHS); |
| 241 | |
| 242 | // Transform: "(A op B) op C" ==> "A op (B op C)" if it simplifies completely. |
| 243 | if (Op0 && Op0->getOpcode() == Opcode) { |
| 244 | Value *A = Op0->getOperand(0); |
| 245 | Value *B = Op0->getOperand(1); |
| 246 | Value *C = RHS; |
| 247 | |
| 248 | // Does "B op C" simplify? |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 249 | if (Value *V = SimplifyBinOp(Opcode, B, C, Q, MaxRecurse)) { |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 250 | // It does! Return "A op V" if it simplifies or is already available. |
| 251 | // If V equals B then "A op V" is just the LHS. |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 252 | if (V == B) return LHS; |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 253 | // Otherwise return "A op V" if it simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 254 | if (Value *W = SimplifyBinOp(Opcode, A, V, Q, MaxRecurse)) { |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 255 | ++NumReassoc; |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 256 | return W; |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 257 | } |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 258 | } |
| 259 | } |
| 260 | |
| 261 | // Transform: "A op (B op C)" ==> "(A op B) op C" if it simplifies completely. |
| 262 | if (Op1 && Op1->getOpcode() == Opcode) { |
| 263 | Value *A = LHS; |
| 264 | Value *B = Op1->getOperand(0); |
| 265 | Value *C = Op1->getOperand(1); |
| 266 | |
| 267 | // Does "A op B" simplify? |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 268 | if (Value *V = SimplifyBinOp(Opcode, A, B, Q, MaxRecurse)) { |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 269 | // It does! Return "V op C" if it simplifies or is already available. |
| 270 | // If V equals B then "V op C" is just the RHS. |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 271 | if (V == B) return RHS; |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 272 | // Otherwise return "V op C" if it simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 273 | if (Value *W = SimplifyBinOp(Opcode, V, C, Q, MaxRecurse)) { |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 274 | ++NumReassoc; |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 275 | return W; |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 276 | } |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 277 | } |
| 278 | } |
| 279 | |
| 280 | // The remaining transforms require commutativity as well as associativity. |
| 281 | if (!Instruction::isCommutative(Opcode)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 282 | return nullptr; |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 283 | |
| 284 | // Transform: "(A op B) op C" ==> "(C op A) op B" if it simplifies completely. |
| 285 | if (Op0 && Op0->getOpcode() == Opcode) { |
| 286 | Value *A = Op0->getOperand(0); |
| 287 | Value *B = Op0->getOperand(1); |
| 288 | Value *C = RHS; |
| 289 | |
| 290 | // Does "C op A" simplify? |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 291 | if (Value *V = SimplifyBinOp(Opcode, C, A, Q, MaxRecurse)) { |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 292 | // It does! Return "V op B" if it simplifies or is already available. |
| 293 | // If V equals A then "V op B" is just the LHS. |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 294 | if (V == A) return LHS; |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 295 | // Otherwise return "V op B" if it simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 296 | if (Value *W = SimplifyBinOp(Opcode, V, B, Q, MaxRecurse)) { |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 297 | ++NumReassoc; |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 298 | return W; |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 299 | } |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 300 | } |
| 301 | } |
| 302 | |
| 303 | // Transform: "A op (B op C)" ==> "B op (C op A)" if it simplifies completely. |
| 304 | if (Op1 && Op1->getOpcode() == Opcode) { |
| 305 | Value *A = LHS; |
| 306 | Value *B = Op1->getOperand(0); |
| 307 | Value *C = Op1->getOperand(1); |
| 308 | |
| 309 | // Does "C op A" simplify? |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 310 | if (Value *V = SimplifyBinOp(Opcode, C, A, Q, MaxRecurse)) { |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 311 | // It does! Return "B op V" if it simplifies or is already available. |
| 312 | // If V equals C then "B op V" is just the RHS. |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 313 | if (V == C) return RHS; |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 314 | // Otherwise return "B op V" if it simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 315 | if (Value *W = SimplifyBinOp(Opcode, B, V, Q, MaxRecurse)) { |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 316 | ++NumReassoc; |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 317 | return W; |
Duncan Sands | 3547d2e | 2010-12-22 09:40:51 +0000 | [diff] [blame] | 318 | } |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 319 | } |
| 320 | } |
| 321 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 322 | return nullptr; |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 323 | } |
| 324 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 325 | /// In the case of a binary operation with a select instruction as an operand, |
| 326 | /// try to simplify the binop by seeing whether evaluating it on both branches |
| 327 | /// of the select results in the same value. Returns the common value if so, |
| 328 | /// otherwise returns null. |
Craig Topper | 60dd9cd | 2017-04-07 05:57:51 +0000 | [diff] [blame] | 329 | static Value *ThreadBinOpOverSelect(Instruction::BinaryOps Opcode, Value *LHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 330 | Value *RHS, const SimplifyQuery &Q, |
Craig Topper | 60dd9cd | 2017-04-07 05:57:51 +0000 | [diff] [blame] | 331 | unsigned MaxRecurse) { |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 332 | // Recursion is always used, so bail out at once if we already hit the limit. |
| 333 | if (!MaxRecurse--) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 334 | return nullptr; |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 335 | |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 336 | SelectInst *SI; |
| 337 | if (isa<SelectInst>(LHS)) { |
| 338 | SI = cast<SelectInst>(LHS); |
| 339 | } else { |
| 340 | assert(isa<SelectInst>(RHS) && "No select instruction operand!"); |
| 341 | SI = cast<SelectInst>(RHS); |
| 342 | } |
| 343 | |
| 344 | // Evaluate the BinOp on the true and false branches of the select. |
| 345 | Value *TV; |
| 346 | Value *FV; |
| 347 | if (SI == LHS) { |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 348 | TV = SimplifyBinOp(Opcode, SI->getTrueValue(), RHS, Q, MaxRecurse); |
| 349 | FV = SimplifyBinOp(Opcode, SI->getFalseValue(), RHS, Q, MaxRecurse); |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 350 | } else { |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 351 | TV = SimplifyBinOp(Opcode, LHS, SI->getTrueValue(), Q, MaxRecurse); |
| 352 | FV = SimplifyBinOp(Opcode, LHS, SI->getFalseValue(), Q, MaxRecurse); |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 353 | } |
| 354 | |
Duncan Sands | e3c5395 | 2011-01-01 16:12:09 +0000 | [diff] [blame] | 355 | // If they simplified to the same value, then return the common value. |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 356 | // If they both failed to simplify then return null. |
| 357 | if (TV == FV) |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 358 | return TV; |
| 359 | |
| 360 | // If one branch simplified to undef, return the other one. |
| 361 | if (TV && isa<UndefValue>(TV)) |
| 362 | return FV; |
| 363 | if (FV && isa<UndefValue>(FV)) |
| 364 | return TV; |
| 365 | |
| 366 | // If applying the operation did not change the true and false select values, |
| 367 | // then the result of the binop is the select itself. |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 368 | if (TV == SI->getTrueValue() && FV == SI->getFalseValue()) |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 369 | return SI; |
| 370 | |
| 371 | // If one branch simplified and the other did not, and the simplified |
| 372 | // value is equal to the unsimplified one, return the simplified value. |
| 373 | // For example, select (cond, X, X & Z) & Z -> X & Z. |
| 374 | if ((FV && !TV) || (TV && !FV)) { |
| 375 | // Check that the simplified value has the form "X op Y" where "op" is the |
| 376 | // same as the original operation. |
| 377 | Instruction *Simplified = dyn_cast<Instruction>(FV ? FV : TV); |
Zachary Turner | 260fe3e | 2017-12-14 22:07:03 +0000 | [diff] [blame] | 378 | if (Simplified && Simplified->getOpcode() == unsigned(Opcode)) { |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 379 | // The value that didn't simplify is "UnsimplifiedLHS op UnsimplifiedRHS". |
| 380 | // We already know that "op" is the same as for the simplified value. See |
| 381 | // if the operands match too. If so, return the simplified value. |
| 382 | Value *UnsimplifiedBranch = FV ? SI->getTrueValue() : SI->getFalseValue(); |
| 383 | Value *UnsimplifiedLHS = SI == LHS ? UnsimplifiedBranch : LHS; |
| 384 | Value *UnsimplifiedRHS = SI == LHS ? RHS : UnsimplifiedBranch; |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 385 | if (Simplified->getOperand(0) == UnsimplifiedLHS && |
| 386 | Simplified->getOperand(1) == UnsimplifiedRHS) |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 387 | return Simplified; |
| 388 | if (Simplified->isCommutative() && |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 389 | Simplified->getOperand(1) == UnsimplifiedLHS && |
| 390 | Simplified->getOperand(0) == UnsimplifiedRHS) |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 391 | return Simplified; |
| 392 | } |
| 393 | } |
| 394 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 395 | return nullptr; |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 396 | } |
| 397 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 398 | /// In the case of a comparison with a select instruction, try to simplify the |
| 399 | /// comparison by seeing whether both branches of the select result in the same |
| 400 | /// value. Returns the common value if so, otherwise returns null. |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 401 | static Value *ThreadCmpOverSelect(CmpInst::Predicate Pred, Value *LHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 402 | Value *RHS, const SimplifyQuery &Q, |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 403 | unsigned MaxRecurse) { |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 404 | // Recursion is always used, so bail out at once if we already hit the limit. |
| 405 | if (!MaxRecurse--) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 406 | return nullptr; |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 407 | |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 408 | // Make sure the select is on the LHS. |
| 409 | if (!isa<SelectInst>(LHS)) { |
| 410 | std::swap(LHS, RHS); |
| 411 | Pred = CmpInst::getSwappedPredicate(Pred); |
| 412 | } |
| 413 | assert(isa<SelectInst>(LHS) && "Not comparing with a select instruction!"); |
| 414 | SelectInst *SI = cast<SelectInst>(LHS); |
Duncan Sands | 3d5692a | 2011-10-30 19:56:36 +0000 | [diff] [blame] | 415 | Value *Cond = SI->getCondition(); |
| 416 | Value *TV = SI->getTrueValue(); |
| 417 | Value *FV = SI->getFalseValue(); |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 418 | |
Duncan Sands | 0650402 | 2011-02-03 09:37:39 +0000 | [diff] [blame] | 419 | // Now that we have "cmp select(Cond, TV, FV), RHS", analyse it. |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 420 | // Does "cmp TV, RHS" simplify? |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 421 | Value *TCmp = SimplifyCmpInst(Pred, TV, RHS, Q, MaxRecurse); |
Duncan Sands | 3d5692a | 2011-10-30 19:56:36 +0000 | [diff] [blame] | 422 | if (TCmp == Cond) { |
| 423 | // It not only simplified, it simplified to the select condition. Replace |
| 424 | // it with 'true'. |
| 425 | TCmp = getTrue(Cond->getType()); |
| 426 | } else if (!TCmp) { |
| 427 | // It didn't simplify. However if "cmp TV, RHS" is equal to the select |
| 428 | // condition then we can replace it with 'true'. Otherwise give up. |
| 429 | if (!isSameCompare(Cond, Pred, TV, RHS)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 430 | return nullptr; |
Duncan Sands | 3d5692a | 2011-10-30 19:56:36 +0000 | [diff] [blame] | 431 | TCmp = getTrue(Cond->getType()); |
Duncan Sands | 0650402 | 2011-02-03 09:37:39 +0000 | [diff] [blame] | 432 | } |
| 433 | |
Duncan Sands | 3d5692a | 2011-10-30 19:56:36 +0000 | [diff] [blame] | 434 | // Does "cmp FV, RHS" simplify? |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 435 | Value *FCmp = SimplifyCmpInst(Pred, FV, RHS, Q, MaxRecurse); |
Duncan Sands | 3d5692a | 2011-10-30 19:56:36 +0000 | [diff] [blame] | 436 | if (FCmp == Cond) { |
| 437 | // It not only simplified, it simplified to the select condition. Replace |
| 438 | // it with 'false'. |
| 439 | FCmp = getFalse(Cond->getType()); |
| 440 | } else if (!FCmp) { |
| 441 | // It didn't simplify. However if "cmp FV, RHS" is equal to the select |
| 442 | // condition then we can replace it with 'false'. Otherwise give up. |
| 443 | if (!isSameCompare(Cond, Pred, FV, RHS)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 444 | return nullptr; |
Duncan Sands | 3d5692a | 2011-10-30 19:56:36 +0000 | [diff] [blame] | 445 | FCmp = getFalse(Cond->getType()); |
| 446 | } |
| 447 | |
| 448 | // If both sides simplified to the same value, then use it as the result of |
| 449 | // the original comparison. |
| 450 | if (TCmp == FCmp) |
| 451 | return TCmp; |
Duncan Sands | 26641d7 | 2012-02-10 14:31:24 +0000 | [diff] [blame] | 452 | |
| 453 | // The remaining cases only make sense if the select condition has the same |
| 454 | // type as the result of the comparison, so bail out if this is not so. |
| 455 | if (Cond->getType()->isVectorTy() != RHS->getType()->isVectorTy()) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 456 | return nullptr; |
Duncan Sands | 3d5692a | 2011-10-30 19:56:36 +0000 | [diff] [blame] | 457 | // If the false value simplified to false, then the result of the compare |
| 458 | // is equal to "Cond && TCmp". This also catches the case when the false |
| 459 | // value simplified to false and the true value to true, returning "Cond". |
| 460 | if (match(FCmp, m_Zero())) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 461 | if (Value *V = SimplifyAndInst(Cond, TCmp, Q, MaxRecurse)) |
Duncan Sands | 3d5692a | 2011-10-30 19:56:36 +0000 | [diff] [blame] | 462 | return V; |
| 463 | // If the true value simplified to true, then the result of the compare |
| 464 | // is equal to "Cond || FCmp". |
| 465 | if (match(TCmp, m_One())) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 466 | if (Value *V = SimplifyOrInst(Cond, FCmp, Q, MaxRecurse)) |
Duncan Sands | 3d5692a | 2011-10-30 19:56:36 +0000 | [diff] [blame] | 467 | return V; |
| 468 | // Finally, if the false value simplified to true and the true value to |
| 469 | // false, then the result of the compare is equal to "!Cond". |
| 470 | if (match(FCmp, m_One()) && match(TCmp, m_Zero())) |
| 471 | if (Value *V = |
| 472 | SimplifyXorInst(Cond, Constant::getAllOnesValue(Cond->getType()), |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 473 | Q, MaxRecurse)) |
Duncan Sands | 3d5692a | 2011-10-30 19:56:36 +0000 | [diff] [blame] | 474 | return V; |
| 475 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 476 | return nullptr; |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 477 | } |
| 478 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 479 | /// In the case of a binary operation with an operand that is a PHI instruction, |
| 480 | /// try to simplify the binop by seeing whether evaluating it on the incoming |
| 481 | /// phi values yields the same result for every value. If so returns the common |
| 482 | /// value, otherwise returns null. |
Craig Topper | 60dd9cd | 2017-04-07 05:57:51 +0000 | [diff] [blame] | 483 | static Value *ThreadBinOpOverPHI(Instruction::BinaryOps Opcode, Value *LHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 484 | Value *RHS, const SimplifyQuery &Q, |
Craig Topper | 60dd9cd | 2017-04-07 05:57:51 +0000 | [diff] [blame] | 485 | unsigned MaxRecurse) { |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 486 | // Recursion is always used, so bail out at once if we already hit the limit. |
| 487 | if (!MaxRecurse--) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 488 | return nullptr; |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 489 | |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 490 | PHINode *PI; |
| 491 | if (isa<PHINode>(LHS)) { |
| 492 | PI = cast<PHINode>(LHS); |
Duncan Sands | 5ffc298 | 2010-11-16 12:16:38 +0000 | [diff] [blame] | 493 | // Bail out if RHS and the phi may be mutually interdependent due to a loop. |
Sanjay Patel | 5da361a | 2018-04-10 18:38:19 +0000 | [diff] [blame] | 494 | if (!valueDominatesPHI(RHS, PI, Q.DT)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 495 | return nullptr; |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 496 | } else { |
| 497 | assert(isa<PHINode>(RHS) && "No PHI instruction operand!"); |
| 498 | PI = cast<PHINode>(RHS); |
Duncan Sands | 5ffc298 | 2010-11-16 12:16:38 +0000 | [diff] [blame] | 499 | // Bail out if LHS and the phi may be mutually interdependent due to a loop. |
Sanjay Patel | 5da361a | 2018-04-10 18:38:19 +0000 | [diff] [blame] | 500 | if (!valueDominatesPHI(LHS, PI, Q.DT)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 501 | return nullptr; |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 502 | } |
| 503 | |
| 504 | // Evaluate the BinOp on the incoming phi values. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 505 | Value *CommonValue = nullptr; |
Pete Cooper | 833f34d | 2015-05-12 20:05:31 +0000 | [diff] [blame] | 506 | for (Value *Incoming : PI->incoming_values()) { |
Duncan Sands | 7412f6e | 2010-11-17 04:30:22 +0000 | [diff] [blame] | 507 | // If the incoming value is the phi node itself, it can safely be skipped. |
Duncan Sands | f12ba1d | 2010-11-15 17:52:45 +0000 | [diff] [blame] | 508 | if (Incoming == PI) continue; |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 509 | Value *V = PI == LHS ? |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 510 | SimplifyBinOp(Opcode, Incoming, RHS, Q, MaxRecurse) : |
| 511 | SimplifyBinOp(Opcode, LHS, Incoming, Q, MaxRecurse); |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 512 | // If the operation failed to simplify, or simplified to a different value |
| 513 | // to previously, then give up. |
| 514 | if (!V || (CommonValue && V != CommonValue)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 515 | return nullptr; |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 516 | CommonValue = V; |
| 517 | } |
| 518 | |
| 519 | return CommonValue; |
| 520 | } |
| 521 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 522 | /// In the case of a comparison with a PHI instruction, try to simplify the |
| 523 | /// comparison by seeing whether comparing with all of the incoming phi values |
| 524 | /// yields the same result every time. If so returns the common result, |
| 525 | /// otherwise returns null. |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 526 | static Value *ThreadCmpOverPHI(CmpInst::Predicate Pred, Value *LHS, Value *RHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 527 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 528 | // Recursion is always used, so bail out at once if we already hit the limit. |
| 529 | if (!MaxRecurse--) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 530 | return nullptr; |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 531 | |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 532 | // Make sure the phi is on the LHS. |
| 533 | if (!isa<PHINode>(LHS)) { |
| 534 | std::swap(LHS, RHS); |
| 535 | Pred = CmpInst::getSwappedPredicate(Pred); |
| 536 | } |
| 537 | assert(isa<PHINode>(LHS) && "Not comparing with a phi instruction!"); |
| 538 | PHINode *PI = cast<PHINode>(LHS); |
| 539 | |
Duncan Sands | 5ffc298 | 2010-11-16 12:16:38 +0000 | [diff] [blame] | 540 | // Bail out if RHS and the phi may be mutually interdependent due to a loop. |
Sanjay Patel | 5da361a | 2018-04-10 18:38:19 +0000 | [diff] [blame] | 541 | if (!valueDominatesPHI(RHS, PI, Q.DT)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 542 | return nullptr; |
Duncan Sands | 5ffc298 | 2010-11-16 12:16:38 +0000 | [diff] [blame] | 543 | |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 544 | // Evaluate the BinOp on the incoming phi values. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 545 | Value *CommonValue = nullptr; |
Pete Cooper | 833f34d | 2015-05-12 20:05:31 +0000 | [diff] [blame] | 546 | for (Value *Incoming : PI->incoming_values()) { |
Duncan Sands | 7412f6e | 2010-11-17 04:30:22 +0000 | [diff] [blame] | 547 | // If the incoming value is the phi node itself, it can safely be skipped. |
Duncan Sands | f12ba1d | 2010-11-15 17:52:45 +0000 | [diff] [blame] | 548 | if (Incoming == PI) continue; |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 549 | Value *V = SimplifyCmpInst(Pred, Incoming, RHS, Q, MaxRecurse); |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 550 | // If the operation failed to simplify, or simplified to a different value |
| 551 | // to previously, then give up. |
| 552 | if (!V || (CommonValue && V != CommonValue)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 553 | return nullptr; |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 554 | CommonValue = V; |
| 555 | } |
| 556 | |
| 557 | return CommonValue; |
| 558 | } |
| 559 | |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 560 | static Constant *foldOrCommuteConstant(Instruction::BinaryOps Opcode, |
| 561 | Value *&Op0, Value *&Op1, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 562 | const SimplifyQuery &Q) { |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 563 | if (auto *CLHS = dyn_cast<Constant>(Op0)) { |
| 564 | if (auto *CRHS = dyn_cast<Constant>(Op1)) |
| 565 | return ConstantFoldBinaryOpOperands(Opcode, CLHS, CRHS, Q.DL); |
| 566 | |
| 567 | // Canonicalize the constant to the RHS if this is a commutative operation. |
| 568 | if (Instruction::isCommutative(Opcode)) |
| 569 | std::swap(Op0, Op1); |
| 570 | } |
| 571 | return nullptr; |
| 572 | } |
| 573 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 574 | /// Given operands for an Add, see if we can fold the result. |
| 575 | /// If not, this returns null. |
Roman Lebedev | f87321a | 2018-06-08 15:44:53 +0000 | [diff] [blame] | 576 | static Value *SimplifyAddInst(Value *Op0, Value *Op1, bool IsNSW, bool IsNUW, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 577 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 578 | if (Constant *C = foldOrCommuteConstant(Instruction::Add, Op0, Op1, Q)) |
| 579 | return C; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 580 | |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 581 | // X + undef -> undef |
Duncan Sands | a29ea9a | 2011-02-01 09:06:20 +0000 | [diff] [blame] | 582 | if (match(Op1, m_Undef())) |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 583 | return Op1; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 584 | |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 585 | // X + 0 -> X |
| 586 | if (match(Op1, m_Zero())) |
| 587 | return Op0; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 588 | |
Chen Zheng | fdf13ef | 2018-07-12 03:06:04 +0000 | [diff] [blame] | 589 | // If two operands are negative, return 0. |
| 590 | if (isKnownNegation(Op0, Op1)) |
| 591 | return Constant::getNullValue(Op0->getType()); |
| 592 | |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 593 | // X + (Y - X) -> Y |
| 594 | // (Y - X) + X -> Y |
Duncan Sands | ed6d6c3 | 2010-12-20 14:47:04 +0000 | [diff] [blame] | 595 | // Eg: X + -X -> 0 |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 596 | Value *Y = nullptr; |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 597 | if (match(Op1, m_Sub(m_Value(Y), m_Specific(Op0))) || |
| 598 | match(Op0, m_Sub(m_Value(Y), m_Specific(Op1)))) |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 599 | return Y; |
| 600 | |
| 601 | // X + ~X -> -1 since ~X = -X-1 |
Sanjay Patel | fe67255 | 2017-02-18 21:59:09 +0000 | [diff] [blame] | 602 | Type *Ty = Op0->getType(); |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 603 | if (match(Op0, m_Not(m_Specific(Op1))) || |
| 604 | match(Op1, m_Not(m_Specific(Op0)))) |
Sanjay Patel | fe67255 | 2017-02-18 21:59:09 +0000 | [diff] [blame] | 605 | return Constant::getAllOnesValue(Ty); |
| 606 | |
Craig Topper | bcfd2d1 | 2017-04-20 16:56:25 +0000 | [diff] [blame] | 607 | // add nsw/nuw (xor Y, signmask), signmask --> Y |
Sanjay Patel | fe67255 | 2017-02-18 21:59:09 +0000 | [diff] [blame] | 608 | // The no-wrapping add guarantees that the top bit will be set by the add. |
| 609 | // Therefore, the xor must be clearing the already set sign bit of Y. |
Roman Lebedev | f87321a | 2018-06-08 15:44:53 +0000 | [diff] [blame] | 610 | if ((IsNSW || IsNUW) && match(Op1, m_SignMask()) && |
Craig Topper | bcfd2d1 | 2017-04-20 16:56:25 +0000 | [diff] [blame] | 611 | match(Op0, m_Xor(m_Value(Y), m_SignMask()))) |
Sanjay Patel | fe67255 | 2017-02-18 21:59:09 +0000 | [diff] [blame] | 612 | return Y; |
Duncan Sands | b238de0 | 2010-11-19 09:20:39 +0000 | [diff] [blame] | 613 | |
Roman Lebedev | b060ce4 | 2018-06-08 15:44:47 +0000 | [diff] [blame] | 614 | // add nuw %x, -1 -> -1, because %x can only be 0. |
Roman Lebedev | f87321a | 2018-06-08 15:44:53 +0000 | [diff] [blame] | 615 | if (IsNUW && match(Op1, m_AllOnes())) |
Roman Lebedev | b060ce4 | 2018-06-08 15:44:47 +0000 | [diff] [blame] | 616 | return Op1; // Which is -1. |
| 617 | |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 618 | /// i1 add -> xor. |
Craig Topper | fde4723 | 2017-07-09 07:04:03 +0000 | [diff] [blame] | 619 | if (MaxRecurse && Op0->getType()->isIntOrIntVectorTy(1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 620 | if (Value *V = SimplifyXorInst(Op0, Op1, Q, MaxRecurse-1)) |
Duncan Sands | fecc642 | 2010-12-21 15:03:43 +0000 | [diff] [blame] | 621 | return V; |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 622 | |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 623 | // Try some generic simplifications for associative operations. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 624 | if (Value *V = SimplifyAssociativeBinOp(Instruction::Add, Op0, Op1, Q, |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 625 | MaxRecurse)) |
| 626 | return V; |
| 627 | |
Duncan Sands | b238de0 | 2010-11-19 09:20:39 +0000 | [diff] [blame] | 628 | // Threading Add over selects and phi nodes is pointless, so don't bother. |
| 629 | // Threading over the select in "A + select(cond, B, C)" means evaluating |
| 630 | // "A+B" and "A+C" and seeing if they are equal; but they are equal if and |
| 631 | // only if B and C are equal. If B and C are equal then (since we assume |
| 632 | // that operands have already been simplified) "select(cond, B, C)" should |
| 633 | // have been simplified to the common value of B and C already. Analysing |
| 634 | // "A+B" and "A+C" thus gains nothing, but costs compile time. Similarly |
| 635 | // for threading over phi nodes. |
| 636 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 637 | return nullptr; |
Chris Lattner | 3d9823b | 2009-11-27 17:42:22 +0000 | [diff] [blame] | 638 | } |
| 639 | |
Roman Lebedev | f87321a | 2018-06-08 15:44:53 +0000 | [diff] [blame] | 640 | Value *llvm::SimplifyAddInst(Value *Op0, Value *Op1, bool IsNSW, bool IsNUW, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 641 | const SimplifyQuery &Query) { |
Roman Lebedev | f87321a | 2018-06-08 15:44:53 +0000 | [diff] [blame] | 642 | return ::SimplifyAddInst(Op0, Op1, IsNSW, IsNUW, Query, RecursionLimit); |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 643 | } |
| 644 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 645 | /// Compute the base pointer and cumulative constant offsets for V. |
Chandler Carruth | a079655 | 2012-03-12 11:19:31 +0000 | [diff] [blame] | 646 | /// |
| 647 | /// This strips all constant offsets off of V, leaving it the base pointer, and |
| 648 | /// accumulates the total constant offset applied in the returned constant. It |
| 649 | /// returns 0 if V is not a pointer, and returns the constant '0' if there are |
| 650 | /// no constant offsets applied. |
Dan Gohman | 36fa839 | 2013-01-31 02:45:26 +0000 | [diff] [blame] | 651 | /// |
| 652 | /// This is very similar to GetPointerBaseWithConstantOffset except it doesn't |
| 653 | /// follow non-inbounds geps. This allows it to remain usable for icmp ult/etc. |
| 654 | /// folding. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 655 | static Constant *stripAndComputeConstantOffsets(const DataLayout &DL, Value *&V, |
Benjamin Kramer | 942dfe6 | 2013-09-23 14:16:38 +0000 | [diff] [blame] | 656 | bool AllowNonInbounds = false) { |
Craig Topper | 95d2347 | 2017-07-09 07:04:00 +0000 | [diff] [blame] | 657 | assert(V->getType()->isPtrOrPtrVectorTy()); |
Chandler Carruth | a079655 | 2012-03-12 11:19:31 +0000 | [diff] [blame] | 658 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 659 | Type *IntPtrTy = DL.getIntPtrType(V->getType())->getScalarType(); |
Matt Arsenault | 2f9cce2 | 2013-08-03 01:03:12 +0000 | [diff] [blame] | 660 | APInt Offset = APInt::getNullValue(IntPtrTy->getIntegerBitWidth()); |
Chandler Carruth | a079655 | 2012-03-12 11:19:31 +0000 | [diff] [blame] | 661 | |
Johannes Doerfert | 3ed286a | 2019-07-11 01:14:48 +0000 | [diff] [blame] | 662 | V = V->stripAndAccumulateConstantOffsets(DL, Offset, AllowNonInbounds); |
Michael Liao | 543ba4e | 2019-07-16 01:03:06 +0000 | [diff] [blame] | 663 | // As that strip may trace through `addrspacecast`, need to sext or trunc |
| 664 | // the offset calculated. |
| 665 | IntPtrTy = DL.getIntPtrType(V->getType())->getScalarType(); |
| 666 | Offset = Offset.sextOrTrunc(IntPtrTy->getIntegerBitWidth()); |
Chandler Carruth | a079655 | 2012-03-12 11:19:31 +0000 | [diff] [blame] | 667 | |
Benjamin Kramer | c05aa95 | 2013-02-01 15:21:10 +0000 | [diff] [blame] | 668 | Constant *OffsetIntPtr = ConstantInt::get(IntPtrTy, Offset); |
| 669 | if (V->getType()->isVectorTy()) |
| 670 | return ConstantVector::getSplat(V->getType()->getVectorNumElements(), |
| 671 | OffsetIntPtr); |
| 672 | return OffsetIntPtr; |
Chandler Carruth | a079655 | 2012-03-12 11:19:31 +0000 | [diff] [blame] | 673 | } |
| 674 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 675 | /// Compute the constant difference between two pointer values. |
Chandler Carruth | a079655 | 2012-03-12 11:19:31 +0000 | [diff] [blame] | 676 | /// If the difference is not a constant, returns zero. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 677 | static Constant *computePointerDifference(const DataLayout &DL, Value *LHS, |
| 678 | Value *RHS) { |
Rafael Espindola | 37dc9e1 | 2014-02-21 00:06:31 +0000 | [diff] [blame] | 679 | Constant *LHSOffset = stripAndComputeConstantOffsets(DL, LHS); |
| 680 | Constant *RHSOffset = stripAndComputeConstantOffsets(DL, RHS); |
Chandler Carruth | a079655 | 2012-03-12 11:19:31 +0000 | [diff] [blame] | 681 | |
| 682 | // If LHS and RHS are not related via constant offsets to the same base |
| 683 | // value, there is nothing we can do here. |
| 684 | if (LHS != RHS) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 685 | return nullptr; |
Chandler Carruth | a079655 | 2012-03-12 11:19:31 +0000 | [diff] [blame] | 686 | |
| 687 | // Otherwise, the difference of LHS - RHS can be computed as: |
| 688 | // LHS - RHS |
| 689 | // = (LHSOffset + Base) - (RHSOffset + Base) |
| 690 | // = LHSOffset - RHSOffset |
| 691 | return ConstantExpr::getSub(LHSOffset, RHSOffset); |
| 692 | } |
| 693 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 694 | /// Given operands for a Sub, see if we can fold the result. |
| 695 | /// If not, this returns null. |
Duncan Sands | ed6d6c3 | 2010-12-20 14:47:04 +0000 | [diff] [blame] | 696 | static Value *SimplifySubInst(Value *Op0, Value *Op1, bool isNSW, bool isNUW, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 697 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 698 | if (Constant *C = foldOrCommuteConstant(Instruction::Sub, Op0, Op1, Q)) |
| 699 | return C; |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 700 | |
| 701 | // X - undef -> undef |
| 702 | // undef - X -> undef |
Duncan Sands | a29ea9a | 2011-02-01 09:06:20 +0000 | [diff] [blame] | 703 | if (match(Op0, m_Undef()) || match(Op1, m_Undef())) |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 704 | return UndefValue::get(Op0->getType()); |
| 705 | |
| 706 | // X - 0 -> X |
| 707 | if (match(Op1, m_Zero())) |
| 708 | return Op0; |
| 709 | |
| 710 | // X - X -> 0 |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 711 | if (Op0 == Op1) |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 712 | return Constant::getNullValue(Op0->getType()); |
| 713 | |
Sanjay Patel | efd8885 | 2016-10-19 21:23:45 +0000 | [diff] [blame] | 714 | // Is this a negation? |
| 715 | if (match(Op0, m_Zero())) { |
| 716 | // 0 - X -> 0 if the sub is NUW. |
| 717 | if (isNUW) |
Sanjay Patel | 30be665 | 2018-04-22 17:07:44 +0000 | [diff] [blame] | 718 | return Constant::getNullValue(Op0->getType()); |
Sanjay Patel | efd8885 | 2016-10-19 21:23:45 +0000 | [diff] [blame] | 719 | |
Craig Topper | 8205a1a | 2017-05-24 16:53:07 +0000 | [diff] [blame] | 720 | KnownBits Known = computeKnownBits(Op1, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
Craig Topper | b45eabc | 2017-04-26 16:39:58 +0000 | [diff] [blame] | 721 | if (Known.Zero.isMaxSignedValue()) { |
Sanjay Patel | efd8885 | 2016-10-19 21:23:45 +0000 | [diff] [blame] | 722 | // Op1 is either 0 or the minimum signed value. If the sub is NSW, then |
| 723 | // Op1 must be 0 because negating the minimum signed value is undefined. |
| 724 | if (isNSW) |
Sanjay Patel | 30be665 | 2018-04-22 17:07:44 +0000 | [diff] [blame] | 725 | return Constant::getNullValue(Op0->getType()); |
Sanjay Patel | efd8885 | 2016-10-19 21:23:45 +0000 | [diff] [blame] | 726 | |
| 727 | // 0 - X -> X if X is 0 or the minimum signed value. |
| 728 | return Op1; |
| 729 | } |
| 730 | } |
David Majnemer | cd4fbcd | 2014-07-31 04:49:18 +0000 | [diff] [blame] | 731 | |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 732 | // (X + Y) - Z -> X + (Y - Z) or Y + (X - Z) if everything simplifies. |
| 733 | // For example, (X + Y) - Y -> X; (Y + X) - Y -> X |
Dinesh Dwivedi | 99281a0 | 2014-06-26 08:57:33 +0000 | [diff] [blame] | 734 | Value *X = nullptr, *Y = nullptr, *Z = Op1; |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 735 | if (MaxRecurse && match(Op0, m_Add(m_Value(X), m_Value(Y)))) { // (X + Y) - Z |
| 736 | // See if "V === Y - Z" simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 737 | if (Value *V = SimplifyBinOp(Instruction::Sub, Y, Z, Q, MaxRecurse-1)) |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 738 | // It does! Now see if "X + V" simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 739 | if (Value *W = SimplifyBinOp(Instruction::Add, X, V, Q, MaxRecurse-1)) { |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 740 | // It does, we successfully reassociated! |
| 741 | ++NumReassoc; |
| 742 | return W; |
| 743 | } |
| 744 | // See if "V === X - Z" simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 745 | if (Value *V = SimplifyBinOp(Instruction::Sub, X, Z, Q, MaxRecurse-1)) |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 746 | // It does! Now see if "Y + V" simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 747 | if (Value *W = SimplifyBinOp(Instruction::Add, Y, V, Q, MaxRecurse-1)) { |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 748 | // It does, we successfully reassociated! |
| 749 | ++NumReassoc; |
| 750 | return W; |
| 751 | } |
| 752 | } |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 753 | |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 754 | // X - (Y + Z) -> (X - Y) - Z or (X - Z) - Y if everything simplifies. |
| 755 | // For example, X - (X + 1) -> -1 |
| 756 | X = Op0; |
| 757 | if (MaxRecurse && match(Op1, m_Add(m_Value(Y), m_Value(Z)))) { // X - (Y + Z) |
| 758 | // See if "V === X - Y" simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 759 | if (Value *V = SimplifyBinOp(Instruction::Sub, X, Y, Q, MaxRecurse-1)) |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 760 | // It does! Now see if "V - Z" simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 761 | if (Value *W = SimplifyBinOp(Instruction::Sub, V, Z, Q, MaxRecurse-1)) { |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 762 | // It does, we successfully reassociated! |
| 763 | ++NumReassoc; |
| 764 | return W; |
| 765 | } |
| 766 | // See if "V === X - Z" simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 767 | if (Value *V = SimplifyBinOp(Instruction::Sub, X, Z, Q, MaxRecurse-1)) |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 768 | // It does! Now see if "V - Y" simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 769 | if (Value *W = SimplifyBinOp(Instruction::Sub, V, Y, Q, MaxRecurse-1)) { |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 770 | // It does, we successfully reassociated! |
| 771 | ++NumReassoc; |
| 772 | return W; |
| 773 | } |
| 774 | } |
| 775 | |
| 776 | // Z - (X - Y) -> (Z - X) + Y if everything simplifies. |
| 777 | // For example, X - (X - Y) -> Y. |
| 778 | Z = Op0; |
Duncan Sands | d6f1a95 | 2011-01-14 15:26:10 +0000 | [diff] [blame] | 779 | if (MaxRecurse && match(Op1, m_Sub(m_Value(X), m_Value(Y)))) // Z - (X - Y) |
| 780 | // See if "V === Z - X" simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 781 | if (Value *V = SimplifyBinOp(Instruction::Sub, Z, X, Q, MaxRecurse-1)) |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 782 | // It does! Now see if "V + Y" simplifies. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 783 | if (Value *W = SimplifyBinOp(Instruction::Add, V, Y, Q, MaxRecurse-1)) { |
Duncan Sands | d6f1a95 | 2011-01-14 15:26:10 +0000 | [diff] [blame] | 784 | // It does, we successfully reassociated! |
| 785 | ++NumReassoc; |
| 786 | return W; |
| 787 | } |
| 788 | |
Duncan Sands | 395ac42d | 2012-03-13 14:07:05 +0000 | [diff] [blame] | 789 | // trunc(X) - trunc(Y) -> trunc(X - Y) if everything simplifies. |
| 790 | if (MaxRecurse && match(Op0, m_Trunc(m_Value(X))) && |
| 791 | match(Op1, m_Trunc(m_Value(Y)))) |
| 792 | if (X->getType() == Y->getType()) |
| 793 | // See if "V === X - Y" simplifies. |
| 794 | if (Value *V = SimplifyBinOp(Instruction::Sub, X, Y, Q, MaxRecurse-1)) |
| 795 | // It does! Now see if "trunc V" simplifies. |
David Majnemer | 6774d61 | 2016-07-26 17:58:05 +0000 | [diff] [blame] | 796 | if (Value *W = SimplifyCastInst(Instruction::Trunc, V, Op0->getType(), |
| 797 | Q, MaxRecurse - 1)) |
Duncan Sands | 395ac42d | 2012-03-13 14:07:05 +0000 | [diff] [blame] | 798 | // It does, return the simplified "trunc V". |
| 799 | return W; |
| 800 | |
| 801 | // Variations on GEP(base, I, ...) - GEP(base, i, ...) -> GEP(null, I-i, ...). |
Dan Gohman | 18c77a1 | 2013-01-31 02:50:36 +0000 | [diff] [blame] | 802 | if (match(Op0, m_PtrToInt(m_Value(X))) && |
Duncan Sands | 395ac42d | 2012-03-13 14:07:05 +0000 | [diff] [blame] | 803 | match(Op1, m_PtrToInt(m_Value(Y)))) |
Rafael Espindola | 37dc9e1 | 2014-02-21 00:06:31 +0000 | [diff] [blame] | 804 | if (Constant *Result = computePointerDifference(Q.DL, X, Y)) |
Duncan Sands | 395ac42d | 2012-03-13 14:07:05 +0000 | [diff] [blame] | 805 | return ConstantExpr::getIntegerCast(Result, Op0->getType(), true); |
| 806 | |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 807 | // i1 sub -> xor. |
Craig Topper | fde4723 | 2017-07-09 07:04:03 +0000 | [diff] [blame] | 808 | if (MaxRecurse && Op0->getType()->isIntOrIntVectorTy(1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 809 | if (Value *V = SimplifyXorInst(Op0, Op1, Q, MaxRecurse-1)) |
Duncan Sands | 99589d0 | 2011-01-18 11:50:19 +0000 | [diff] [blame] | 810 | return V; |
| 811 | |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 812 | // Threading Sub over selects and phi nodes is pointless, so don't bother. |
| 813 | // Threading over the select in "A - select(cond, B, C)" means evaluating |
| 814 | // "A-B" and "A-C" and seeing if they are equal; but they are equal if and |
| 815 | // only if B and C are equal. If B and C are equal then (since we assume |
| 816 | // that operands have already been simplified) "select(cond, B, C)" should |
| 817 | // have been simplified to the common value of B and C already. Analysing |
| 818 | // "A-B" and "A-C" thus gains nothing, but costs compile time. Similarly |
| 819 | // for threading over phi nodes. |
| 820 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 821 | return nullptr; |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 822 | } |
| 823 | |
Duncan Sands | ed6d6c3 | 2010-12-20 14:47:04 +0000 | [diff] [blame] | 824 | Value *llvm::SimplifySubInst(Value *Op0, Value *Op1, bool isNSW, bool isNUW, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 825 | const SimplifyQuery &Q) { |
| 826 | return ::SimplifySubInst(Op0, Op1, isNSW, isNUW, Q, RecursionLimit); |
| 827 | } |
| 828 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 829 | /// Given operands for a Mul, see if we can fold the result. |
| 830 | /// If not, this returns null. |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 831 | static Value *SimplifyMulInst(Value *Op0, Value *Op1, const SimplifyQuery &Q, |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 832 | unsigned MaxRecurse) { |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 833 | if (Constant *C = foldOrCommuteConstant(Instruction::Mul, Op0, Op1, Q)) |
| 834 | return C; |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 835 | |
| 836 | // X * undef -> 0 |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 837 | // X * 0 -> 0 |
Sanjay Patel | 30be665 | 2018-04-22 17:07:44 +0000 | [diff] [blame] | 838 | if (match(Op1, m_CombineOr(m_Undef(), m_Zero()))) |
| 839 | return Constant::getNullValue(Op0->getType()); |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 840 | |
| 841 | // X * 1 -> X |
| 842 | if (match(Op1, m_One())) |
| 843 | return Op0; |
| 844 | |
Duncan Sands | b67edc6 | 2011-01-30 18:03:50 +0000 | [diff] [blame] | 845 | // (X / Y) * Y -> X if the division is exact. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 846 | Value *X = nullptr; |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 847 | if (Q.IIQ.UseInstrInfo && |
| 848 | (match(Op0, |
| 849 | m_Exact(m_IDiv(m_Value(X), m_Specific(Op1)))) || // (X / Y) * Y |
| 850 | match(Op1, m_Exact(m_IDiv(m_Value(X), m_Specific(Op0)))))) // Y * (X / Y) |
Benjamin Kramer | 9442cd0 | 2012-01-01 17:55:30 +0000 | [diff] [blame] | 851 | return X; |
Duncan Sands | b67edc6 | 2011-01-30 18:03:50 +0000 | [diff] [blame] | 852 | |
Nick Lewycky | b89d9a4 | 2011-01-29 19:55:23 +0000 | [diff] [blame] | 853 | // i1 mul -> and. |
Craig Topper | fde4723 | 2017-07-09 07:04:03 +0000 | [diff] [blame] | 854 | if (MaxRecurse && Op0->getType()->isIntOrIntVectorTy(1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 855 | if (Value *V = SimplifyAndInst(Op0, Op1, Q, MaxRecurse-1)) |
Duncan Sands | fecc642 | 2010-12-21 15:03:43 +0000 | [diff] [blame] | 856 | return V; |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 857 | |
| 858 | // Try some generic simplifications for associative operations. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 859 | if (Value *V = SimplifyAssociativeBinOp(Instruction::Mul, Op0, Op1, Q, |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 860 | MaxRecurse)) |
| 861 | return V; |
| 862 | |
Dmitry Venikov | d2257be | 2018-01-02 05:47:42 +0000 | [diff] [blame] | 863 | // Mul distributes over Add. Try some generic simplifications based on this. |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 864 | if (Value *V = ExpandBinOp(Instruction::Mul, Op0, Op1, Instruction::Add, |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 865 | Q, MaxRecurse)) |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 866 | return V; |
| 867 | |
| 868 | // If the operation is with the result of a select instruction, check whether |
| 869 | // operating on either branch of the select always yields the same value. |
| 870 | if (isa<SelectInst>(Op0) || isa<SelectInst>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 871 | if (Value *V = ThreadBinOpOverSelect(Instruction::Mul, Op0, Op1, Q, |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 872 | MaxRecurse)) |
| 873 | return V; |
| 874 | |
| 875 | // If the operation is with the result of a phi instruction, check whether |
| 876 | // operating on all incoming values of the phi always yields the same value. |
| 877 | if (isa<PHINode>(Op0) || isa<PHINode>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 878 | if (Value *V = ThreadBinOpOverPHI(Instruction::Mul, Op0, Op1, Q, |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 879 | MaxRecurse)) |
| 880 | return V; |
| 881 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 882 | return nullptr; |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 883 | } |
| 884 | |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 885 | Value *llvm::SimplifyMulInst(Value *Op0, Value *Op1, const SimplifyQuery &Q) { |
| 886 | return ::SimplifyMulInst(Op0, Op1, Q, RecursionLimit); |
| 887 | } |
| 888 | |
Sanjay Patel | 0cb2ee9 | 2017-03-06 19:08:35 +0000 | [diff] [blame] | 889 | /// Check for common or similar folds of integer division or integer remainder. |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 890 | /// This applies to all 4 opcodes (sdiv/udiv/srem/urem). |
Sanjay Patel | 0cb2ee9 | 2017-03-06 19:08:35 +0000 | [diff] [blame] | 891 | static Value *simplifyDivRem(Value *Op0, Value *Op1, bool IsDiv) { |
| 892 | Type *Ty = Op0->getType(); |
| 893 | |
| 894 | // X / undef -> undef |
| 895 | // X % undef -> undef |
| 896 | if (match(Op1, m_Undef())) |
| 897 | return Op1; |
| 898 | |
| 899 | // X / 0 -> undef |
| 900 | // X % 0 -> undef |
| 901 | // We don't need to preserve faults! |
| 902 | if (match(Op1, m_Zero())) |
| 903 | return UndefValue::get(Ty); |
| 904 | |
Zvi Rackover | 51f0d64 | 2018-01-24 17:22:00 +0000 | [diff] [blame] | 905 | // If any element of a constant divisor vector is zero or undef, the whole op |
| 906 | // is undef. |
Sanjay Patel | 2b1f6f4 | 2017-03-09 16:20:52 +0000 | [diff] [blame] | 907 | auto *Op1C = dyn_cast<Constant>(Op1); |
| 908 | if (Op1C && Ty->isVectorTy()) { |
| 909 | unsigned NumElts = Ty->getVectorNumElements(); |
| 910 | for (unsigned i = 0; i != NumElts; ++i) { |
| 911 | Constant *Elt = Op1C->getAggregateElement(i); |
Zvi Rackover | 51f0d64 | 2018-01-24 17:22:00 +0000 | [diff] [blame] | 912 | if (Elt && (Elt->isNullValue() || isa<UndefValue>(Elt))) |
Sanjay Patel | 2b1f6f4 | 2017-03-09 16:20:52 +0000 | [diff] [blame] | 913 | return UndefValue::get(Ty); |
| 914 | } |
| 915 | } |
| 916 | |
Sanjay Patel | 0cb2ee9 | 2017-03-06 19:08:35 +0000 | [diff] [blame] | 917 | // undef / X -> 0 |
| 918 | // undef % X -> 0 |
| 919 | if (match(Op0, m_Undef())) |
| 920 | return Constant::getNullValue(Ty); |
| 921 | |
| 922 | // 0 / X -> 0 |
| 923 | // 0 % X -> 0 |
| 924 | if (match(Op0, m_Zero())) |
Sanjay Patel | 30be665 | 2018-04-22 17:07:44 +0000 | [diff] [blame] | 925 | return Constant::getNullValue(Op0->getType()); |
Sanjay Patel | 0cb2ee9 | 2017-03-06 19:08:35 +0000 | [diff] [blame] | 926 | |
| 927 | // X / X -> 1 |
| 928 | // X % X -> 0 |
| 929 | if (Op0 == Op1) |
| 930 | return IsDiv ? ConstantInt::get(Ty, 1) : Constant::getNullValue(Ty); |
| 931 | |
| 932 | // X / 1 -> X |
| 933 | // X % 1 -> 0 |
Sanjay Patel | 962a843 | 2017-03-09 21:56:03 +0000 | [diff] [blame] | 934 | // If this is a boolean op (single-bit element type), we can't have |
| 935 | // division-by-zero or remainder-by-zero, so assume the divisor is 1. |
Sanjay Patel | 1e911fa | 2018-06-25 18:51:21 +0000 | [diff] [blame] | 936 | // Similarly, if we're zero-extending a boolean divisor, then assume it's a 1. |
| 937 | Value *X; |
| 938 | if (match(Op1, m_One()) || Ty->isIntOrIntVectorTy(1) || |
| 939 | (match(Op1, m_ZExt(m_Value(X))) && X->getType()->isIntOrIntVectorTy(1))) |
Sanjay Patel | 0cb2ee9 | 2017-03-06 19:08:35 +0000 | [diff] [blame] | 940 | return IsDiv ? Op0 : Constant::getNullValue(Ty); |
| 941 | |
| 942 | return nullptr; |
| 943 | } |
| 944 | |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 945 | /// Given a predicate and two operands, return true if the comparison is true. |
| 946 | /// This is a helper for div/rem simplification where we return some other value |
| 947 | /// when we can prove a relationship between the operands. |
| 948 | static bool isICmpTrue(ICmpInst::Predicate Pred, Value *LHS, Value *RHS, |
| 949 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
| 950 | Value *V = SimplifyICmpInst(Pred, LHS, RHS, Q, MaxRecurse); |
| 951 | Constant *C = dyn_cast_or_null<Constant>(V); |
| 952 | return (C && C->isAllOnesValue()); |
| 953 | } |
| 954 | |
| 955 | /// Return true if we can simplify X / Y to 0. Remainder can adapt that answer |
| 956 | /// to simplify X % Y to X. |
Sanjay Patel | 0d4fd5b | 2017-09-14 14:59:07 +0000 | [diff] [blame] | 957 | static bool isDivZero(Value *X, Value *Y, const SimplifyQuery &Q, |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 958 | unsigned MaxRecurse, bool IsSigned) { |
| 959 | // Recursion is always used, so bail out at once if we already hit the limit. |
| 960 | if (!MaxRecurse--) |
| 961 | return false; |
| 962 | |
| 963 | if (IsSigned) { |
Sanjay Patel | 0d4fd5b | 2017-09-14 14:59:07 +0000 | [diff] [blame] | 964 | // |X| / |Y| --> 0 |
| 965 | // |
| 966 | // We require that 1 operand is a simple constant. That could be extended to |
| 967 | // 2 variables if we computed the sign bit for each. |
| 968 | // |
| 969 | // Make sure that a constant is not the minimum signed value because taking |
| 970 | // the abs() of that is undefined. |
| 971 | Type *Ty = X->getType(); |
| 972 | const APInt *C; |
| 973 | if (match(X, m_APInt(C)) && !C->isMinSignedValue()) { |
| 974 | // Is the variable divisor magnitude always greater than the constant |
| 975 | // dividend magnitude? |
| 976 | // |Y| > |C| --> Y < -abs(C) or Y > abs(C) |
| 977 | Constant *PosDividendC = ConstantInt::get(Ty, C->abs()); |
| 978 | Constant *NegDividendC = ConstantInt::get(Ty, -C->abs()); |
| 979 | if (isICmpTrue(CmpInst::ICMP_SLT, Y, NegDividendC, Q, MaxRecurse) || |
| 980 | isICmpTrue(CmpInst::ICMP_SGT, Y, PosDividendC, Q, MaxRecurse)) |
| 981 | return true; |
| 982 | } |
| 983 | if (match(Y, m_APInt(C))) { |
| 984 | // Special-case: we can't take the abs() of a minimum signed value. If |
| 985 | // that's the divisor, then all we have to do is prove that the dividend |
| 986 | // is also not the minimum signed value. |
| 987 | if (C->isMinSignedValue()) |
| 988 | return isICmpTrue(CmpInst::ICMP_NE, X, Y, Q, MaxRecurse); |
| 989 | |
| 990 | // Is the variable dividend magnitude always less than the constant |
| 991 | // divisor magnitude? |
| 992 | // |X| < |C| --> X > -abs(C) and X < abs(C) |
| 993 | Constant *PosDivisorC = ConstantInt::get(Ty, C->abs()); |
| 994 | Constant *NegDivisorC = ConstantInt::get(Ty, -C->abs()); |
| 995 | if (isICmpTrue(CmpInst::ICMP_SGT, X, NegDivisorC, Q, MaxRecurse) && |
| 996 | isICmpTrue(CmpInst::ICMP_SLT, X, PosDivisorC, Q, MaxRecurse)) |
| 997 | return true; |
| 998 | } |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 999 | return false; |
| 1000 | } |
| 1001 | |
| 1002 | // IsSigned == false. |
Sanjay Patel | 0d4fd5b | 2017-09-14 14:59:07 +0000 | [diff] [blame] | 1003 | // Is the dividend unsigned less than the divisor? |
| 1004 | return isICmpTrue(ICmpInst::ICMP_ULT, X, Y, Q, MaxRecurse); |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 1005 | } |
| 1006 | |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 1007 | /// These are simplifications common to SDiv and UDiv. |
| 1008 | static Value *simplifyDiv(Instruction::BinaryOps Opcode, Value *Op0, Value *Op1, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1009 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 1010 | if (Constant *C = foldOrCommuteConstant(Opcode, Op0, Op1, Q)) |
| 1011 | return C; |
Duncan Sands | 771e82a | 2011-01-28 16:51:11 +0000 | [diff] [blame] | 1012 | |
Sanjay Patel | 0cb2ee9 | 2017-03-06 19:08:35 +0000 | [diff] [blame] | 1013 | if (Value *V = simplifyDivRem(Op0, Op1, true)) |
| 1014 | return V; |
| 1015 | |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 1016 | bool IsSigned = Opcode == Instruction::SDiv; |
Duncan Sands | 65995fa | 2011-01-28 18:50:50 +0000 | [diff] [blame] | 1017 | |
Duncan Sands | 771e82a | 2011-01-28 16:51:11 +0000 | [diff] [blame] | 1018 | // (X * Y) / Y -> X if the multiplication does not overflow. |
Sanjay Patel | 33cb845 | 2018-01-19 16:12:55 +0000 | [diff] [blame] | 1019 | Value *X; |
| 1020 | if (match(Op0, m_c_Mul(m_Value(X), m_Specific(Op1)))) { |
| 1021 | auto *Mul = cast<OverflowingBinaryOperator>(Op0); |
| 1022 | // If the Mul does not overflow, then we are good to go. |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1023 | if ((IsSigned && Q.IIQ.hasNoSignedWrap(Mul)) || |
| 1024 | (!IsSigned && Q.IIQ.hasNoUnsignedWrap(Mul))) |
Duncan Sands | 5747aba | 2011-02-02 20:52:00 +0000 | [diff] [blame] | 1025 | return X; |
Sanjay Patel | 33cb845 | 2018-01-19 16:12:55 +0000 | [diff] [blame] | 1026 | // If X has the form X = A / Y, then X * Y cannot overflow. |
| 1027 | if ((IsSigned && match(X, m_SDiv(m_Value(), m_Specific(Op1)))) || |
| 1028 | (!IsSigned && match(X, m_UDiv(m_Value(), m_Specific(Op1))))) |
| 1029 | return X; |
Duncan Sands | 771e82a | 2011-01-28 16:51:11 +0000 | [diff] [blame] | 1030 | } |
| 1031 | |
Duncan Sands | 65995fa | 2011-01-28 18:50:50 +0000 | [diff] [blame] | 1032 | // (X rem Y) / Y -> 0 |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 1033 | if ((IsSigned && match(Op0, m_SRem(m_Value(), m_Specific(Op1)))) || |
| 1034 | (!IsSigned && match(Op0, m_URem(m_Value(), m_Specific(Op1))))) |
Duncan Sands | 65995fa | 2011-01-28 18:50:50 +0000 | [diff] [blame] | 1035 | return Constant::getNullValue(Op0->getType()); |
| 1036 | |
David Majnemer | cb9d596 | 2014-10-11 10:20:01 +0000 | [diff] [blame] | 1037 | // (X /u C1) /u C2 -> 0 if C1 * C2 overflow |
| 1038 | ConstantInt *C1, *C2; |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 1039 | if (!IsSigned && match(Op0, m_UDiv(m_Value(X), m_ConstantInt(C1))) && |
David Majnemer | cb9d596 | 2014-10-11 10:20:01 +0000 | [diff] [blame] | 1040 | match(Op1, m_ConstantInt(C2))) { |
| 1041 | bool Overflow; |
Craig Topper | 9b71a40 | 2017-04-19 21:09:45 +0000 | [diff] [blame] | 1042 | (void)C1->getValue().umul_ov(C2->getValue(), Overflow); |
David Majnemer | cb9d596 | 2014-10-11 10:20:01 +0000 | [diff] [blame] | 1043 | if (Overflow) |
| 1044 | return Constant::getNullValue(Op0->getType()); |
| 1045 | } |
| 1046 | |
Duncan Sands | 65995fa | 2011-01-28 18:50:50 +0000 | [diff] [blame] | 1047 | // If the operation is with the result of a select instruction, check whether |
| 1048 | // operating on either branch of the select always yields the same value. |
| 1049 | if (isa<SelectInst>(Op0) || isa<SelectInst>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1050 | if (Value *V = ThreadBinOpOverSelect(Opcode, Op0, Op1, Q, MaxRecurse)) |
Duncan Sands | 65995fa | 2011-01-28 18:50:50 +0000 | [diff] [blame] | 1051 | return V; |
| 1052 | |
| 1053 | // If the operation is with the result of a phi instruction, check whether |
| 1054 | // operating on all incoming values of the phi always yields the same value. |
| 1055 | if (isa<PHINode>(Op0) || isa<PHINode>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1056 | if (Value *V = ThreadBinOpOverPHI(Opcode, Op0, Op1, Q, MaxRecurse)) |
Duncan Sands | 65995fa | 2011-01-28 18:50:50 +0000 | [diff] [blame] | 1057 | return V; |
| 1058 | |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 1059 | if (isDivZero(Op0, Op1, Q, MaxRecurse, IsSigned)) |
| 1060 | return Constant::getNullValue(Op0->getType()); |
| 1061 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 1062 | return nullptr; |
Duncan Sands | 771e82a | 2011-01-28 16:51:11 +0000 | [diff] [blame] | 1063 | } |
| 1064 | |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 1065 | /// These are simplifications common to SRem and URem. |
| 1066 | static Value *simplifyRem(Instruction::BinaryOps Opcode, Value *Op0, Value *Op1, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1067 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 1068 | if (Constant *C = foldOrCommuteConstant(Opcode, Op0, Op1, Q)) |
| 1069 | return C; |
Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1070 | |
Sanjay Patel | 0cb2ee9 | 2017-03-06 19:08:35 +0000 | [diff] [blame] | 1071 | if (Value *V = simplifyDivRem(Op0, Op1, false)) |
| 1072 | return V; |
Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1073 | |
David Majnemer | b435a42 | 2014-09-17 04:16:35 +0000 | [diff] [blame] | 1074 | // (X % Y) % Y -> X % Y |
| 1075 | if ((Opcode == Instruction::SRem && |
| 1076 | match(Op0, m_SRem(m_Value(), m_Specific(Op1)))) || |
| 1077 | (Opcode == Instruction::URem && |
| 1078 | match(Op0, m_URem(m_Value(), m_Specific(Op1))))) |
David Majnemer | ac717f0 | 2014-09-17 03:34:34 +0000 | [diff] [blame] | 1079 | return Op0; |
David Majnemer | ac717f0 | 2014-09-17 03:34:34 +0000 | [diff] [blame] | 1080 | |
Anton Bikineev | 82f6115 | 2018-01-23 09:27:47 +0000 | [diff] [blame] | 1081 | // (X << Y) % X -> 0 |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1082 | if (Q.IIQ.UseInstrInfo && |
| 1083 | ((Opcode == Instruction::SRem && |
| 1084 | match(Op0, m_NSWShl(m_Specific(Op1), m_Value()))) || |
| 1085 | (Opcode == Instruction::URem && |
| 1086 | match(Op0, m_NUWShl(m_Specific(Op1), m_Value()))))) |
Anton Bikineev | 82f6115 | 2018-01-23 09:27:47 +0000 | [diff] [blame] | 1087 | return Constant::getNullValue(Op0->getType()); |
| 1088 | |
Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1089 | // If the operation is with the result of a select instruction, check whether |
| 1090 | // operating on either branch of the select always yields the same value. |
| 1091 | if (isa<SelectInst>(Op0) || isa<SelectInst>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1092 | if (Value *V = ThreadBinOpOverSelect(Opcode, Op0, Op1, Q, MaxRecurse)) |
Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1093 | return V; |
| 1094 | |
| 1095 | // If the operation is with the result of a phi instruction, check whether |
| 1096 | // operating on all incoming values of the phi always yields the same value. |
| 1097 | if (isa<PHINode>(Op0) || isa<PHINode>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1098 | if (Value *V = ThreadBinOpOverPHI(Opcode, Op0, Op1, Q, MaxRecurse)) |
Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1099 | return V; |
| 1100 | |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 1101 | // If X / Y == 0, then X % Y == X. |
| 1102 | if (isDivZero(Op0, Op1, Q, MaxRecurse, Opcode == Instruction::SRem)) |
| 1103 | return Op0; |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 1104 | |
| 1105 | return nullptr; |
| 1106 | } |
| 1107 | |
| 1108 | /// Given operands for an SDiv, see if we can fold the result. |
| 1109 | /// If not, this returns null. |
| 1110 | static Value *SimplifySDivInst(Value *Op0, Value *Op1, const SimplifyQuery &Q, |
| 1111 | unsigned MaxRecurse) { |
Chen Zheng | 69bb064 | 2018-07-21 12:27:54 +0000 | [diff] [blame] | 1112 | // If two operands are negated and no signed overflow, return -1. |
| 1113 | if (isKnownNegation(Op0, Op1, /*NeedNSW=*/true)) |
| 1114 | return Constant::getAllOnesValue(Op0->getType()); |
| 1115 | |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 1116 | return simplifyDiv(Instruction::SDiv, Op0, Op1, Q, MaxRecurse); |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 1117 | } |
| 1118 | |
| 1119 | Value *llvm::SimplifySDivInst(Value *Op0, Value *Op1, const SimplifyQuery &Q) { |
| 1120 | return ::SimplifySDivInst(Op0, Op1, Q, RecursionLimit); |
| 1121 | } |
| 1122 | |
| 1123 | /// Given operands for a UDiv, see if we can fold the result. |
| 1124 | /// If not, this returns null. |
| 1125 | static Value *SimplifyUDivInst(Value *Op0, Value *Op1, const SimplifyQuery &Q, |
| 1126 | unsigned MaxRecurse) { |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 1127 | return simplifyDiv(Instruction::UDiv, Op0, Op1, Q, MaxRecurse); |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 1128 | } |
| 1129 | |
| 1130 | Value *llvm::SimplifyUDivInst(Value *Op0, Value *Op1, const SimplifyQuery &Q) { |
| 1131 | return ::SimplifyUDivInst(Op0, Op1, Q, RecursionLimit); |
| 1132 | } |
| 1133 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 1134 | /// Given operands for an SRem, see if we can fold the result. |
| 1135 | /// If not, this returns null. |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1136 | static Value *SimplifySRemInst(Value *Op0, Value *Op1, const SimplifyQuery &Q, |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1137 | unsigned MaxRecurse) { |
Sanjay Patel | 2b7e310 | 2018-06-26 15:32:54 +0000 | [diff] [blame] | 1138 | // If the divisor is 0, the result is undefined, so assume the divisor is -1. |
| 1139 | // srem Op0, (sext i1 X) --> srem Op0, -1 --> 0 |
| 1140 | Value *X; |
| 1141 | if (match(Op1, m_SExt(m_Value(X))) && X->getType()->isIntOrIntVectorTy(1)) |
| 1142 | return ConstantInt::getNullValue(Op0->getType()); |
| 1143 | |
Chen Zheng | f801d0f | 2018-07-20 13:00:47 +0000 | [diff] [blame] | 1144 | // If the two operands are negated, return 0. |
| 1145 | if (isKnownNegation(Op0, Op1)) |
Chen Zheng | 69bb064 | 2018-07-21 12:27:54 +0000 | [diff] [blame] | 1146 | return ConstantInt::getNullValue(Op0->getType()); |
Chen Zheng | f801d0f | 2018-07-20 13:00:47 +0000 | [diff] [blame] | 1147 | |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 1148 | return simplifyRem(Instruction::SRem, Op0, Op1, Q, MaxRecurse); |
Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1149 | } |
| 1150 | |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1151 | Value *llvm::SimplifySRemInst(Value *Op0, Value *Op1, const SimplifyQuery &Q) { |
| 1152 | return ::SimplifySRemInst(Op0, Op1, Q, RecursionLimit); |
| 1153 | } |
| 1154 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 1155 | /// Given operands for a URem, see if we can fold the result. |
| 1156 | /// If not, this returns null. |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1157 | static Value *SimplifyURemInst(Value *Op0, Value *Op1, const SimplifyQuery &Q, |
Chad Rosier | c24b86f | 2011-12-01 03:08:23 +0000 | [diff] [blame] | 1158 | unsigned MaxRecurse) { |
Sanjay Patel | cca8f78 | 2017-09-14 14:09:11 +0000 | [diff] [blame] | 1159 | return simplifyRem(Instruction::URem, Op0, Op1, Q, MaxRecurse); |
Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 1160 | } |
| 1161 | |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1162 | Value *llvm::SimplifyURemInst(Value *Op0, Value *Op1, const SimplifyQuery &Q) { |
| 1163 | return ::SimplifyURemInst(Op0, Op1, Q, RecursionLimit); |
| 1164 | } |
| 1165 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 1166 | /// Returns true if a shift by \c Amount always yields undef. |
Benjamin Kramer | 5e1794e | 2014-01-24 17:09:53 +0000 | [diff] [blame] | 1167 | static bool isUndefShift(Value *Amount) { |
| 1168 | Constant *C = dyn_cast<Constant>(Amount); |
| 1169 | if (!C) |
| 1170 | return false; |
| 1171 | |
| 1172 | // X shift by undef -> undef because it may shift by the bitwidth. |
| 1173 | if (isa<UndefValue>(C)) |
| 1174 | return true; |
| 1175 | |
| 1176 | // Shifting by the bitwidth or more is undefined. |
| 1177 | if (ConstantInt *CI = dyn_cast<ConstantInt>(C)) |
| 1178 | if (CI->getValue().getLimitedValue() >= |
| 1179 | CI->getType()->getScalarSizeInBits()) |
| 1180 | return true; |
| 1181 | |
| 1182 | // If all lanes of a vector shift are undefined the whole shift is. |
| 1183 | if (isa<ConstantVector>(C) || isa<ConstantDataVector>(C)) { |
| 1184 | for (unsigned I = 0, E = C->getType()->getVectorNumElements(); I != E; ++I) |
| 1185 | if (!isUndefShift(C->getAggregateElement(I))) |
| 1186 | return false; |
| 1187 | return true; |
| 1188 | } |
| 1189 | |
| 1190 | return false; |
| 1191 | } |
| 1192 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 1193 | /// Given operands for an Shl, LShr or AShr, see if we can fold the result. |
| 1194 | /// If not, this returns null. |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 1195 | static Value *SimplifyShift(Instruction::BinaryOps Opcode, Value *Op0, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1196 | Value *Op1, const SimplifyQuery &Q, unsigned MaxRecurse) { |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 1197 | if (Constant *C = foldOrCommuteConstant(Opcode, Op0, Op1, Q)) |
| 1198 | return C; |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 1199 | |
Duncan Sands | 571fd9a | 2011-01-14 14:44:12 +0000 | [diff] [blame] | 1200 | // 0 shift by X -> 0 |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 1201 | if (match(Op0, m_Zero())) |
Sanjay Patel | 30be665 | 2018-04-22 17:07:44 +0000 | [diff] [blame] | 1202 | return Constant::getNullValue(Op0->getType()); |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 1203 | |
Duncan Sands | 571fd9a | 2011-01-14 14:44:12 +0000 | [diff] [blame] | 1204 | // X shift by 0 -> X |
Sanjay Patel | ad0bfb8 | 2018-06-26 17:31:38 +0000 | [diff] [blame] | 1205 | // Shift-by-sign-extended bool must be shift-by-0 because shift-by-all-ones |
| 1206 | // would be poison. |
| 1207 | Value *X; |
| 1208 | if (match(Op1, m_Zero()) || |
| 1209 | (match(Op1, m_SExt(m_Value(X))) && X->getType()->isIntOrIntVectorTy(1))) |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 1210 | return Op0; |
| 1211 | |
Benjamin Kramer | 5e1794e | 2014-01-24 17:09:53 +0000 | [diff] [blame] | 1212 | // Fold undefined shifts. |
| 1213 | if (isUndefShift(Op1)) |
| 1214 | return UndefValue::get(Op0->getType()); |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 1215 | |
Duncan Sands | 571fd9a | 2011-01-14 14:44:12 +0000 | [diff] [blame] | 1216 | // If the operation is with the result of a select instruction, check whether |
| 1217 | // operating on either branch of the select always yields the same value. |
| 1218 | if (isa<SelectInst>(Op0) || isa<SelectInst>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1219 | if (Value *V = ThreadBinOpOverSelect(Opcode, Op0, Op1, Q, MaxRecurse)) |
Duncan Sands | 571fd9a | 2011-01-14 14:44:12 +0000 | [diff] [blame] | 1220 | return V; |
| 1221 | |
| 1222 | // If the operation is with the result of a phi instruction, check whether |
| 1223 | // operating on all incoming values of the phi always yields the same value. |
| 1224 | if (isa<PHINode>(Op0) || isa<PHINode>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1225 | if (Value *V = ThreadBinOpOverPHI(Opcode, Op0, Op1, Q, MaxRecurse)) |
Duncan Sands | 571fd9a | 2011-01-14 14:44:12 +0000 | [diff] [blame] | 1226 | return V; |
| 1227 | |
Sanjay Patel | 6786bc5 | 2016-05-10 20:46:54 +0000 | [diff] [blame] | 1228 | // If any bits in the shift amount make that value greater than or equal to |
| 1229 | // the number of bits in the type, the shift is undefined. |
Craig Topper | 8205a1a | 2017-05-24 16:53:07 +0000 | [diff] [blame] | 1230 | KnownBits Known = computeKnownBits(Op1, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 1231 | if (Known.One.getLimitedValue() >= Known.getBitWidth()) |
Sanjay Patel | 6786bc5 | 2016-05-10 20:46:54 +0000 | [diff] [blame] | 1232 | return UndefValue::get(Op0->getType()); |
| 1233 | |
| 1234 | // If all valid bits in the shift amount are known zero, the first operand is |
| 1235 | // unchanged. |
Craig Topper | 8205a1a | 2017-05-24 16:53:07 +0000 | [diff] [blame] | 1236 | unsigned NumValidShiftBits = Log2_32_Ceil(Known.getBitWidth()); |
Craig Topper | 8df66c6 | 2017-05-12 17:20:30 +0000 | [diff] [blame] | 1237 | if (Known.countMinTrailingZeros() >= NumValidShiftBits) |
Sanjay Patel | 6786bc5 | 2016-05-10 20:46:54 +0000 | [diff] [blame] | 1238 | return Op0; |
| 1239 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 1240 | return nullptr; |
Duncan Sands | 571fd9a | 2011-01-14 14:44:12 +0000 | [diff] [blame] | 1241 | } |
| 1242 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 1243 | /// Given operands for an Shl, LShr or AShr, see if we can |
David Majnemer | bf7550e | 2014-11-05 00:59:59 +0000 | [diff] [blame] | 1244 | /// fold the result. If not, this returns null. |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 1245 | static Value *SimplifyRightShift(Instruction::BinaryOps Opcode, Value *Op0, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1246 | Value *Op1, bool isExact, const SimplifyQuery &Q, |
David Majnemer | bf7550e | 2014-11-05 00:59:59 +0000 | [diff] [blame] | 1247 | unsigned MaxRecurse) { |
| 1248 | if (Value *V = SimplifyShift(Opcode, Op0, Op1, Q, MaxRecurse)) |
| 1249 | return V; |
| 1250 | |
| 1251 | // X >> X -> 0 |
| 1252 | if (Op0 == Op1) |
| 1253 | return Constant::getNullValue(Op0->getType()); |
| 1254 | |
David Majnemer | 65c52ae | 2014-12-17 01:54:33 +0000 | [diff] [blame] | 1255 | // undef >> X -> 0 |
| 1256 | // undef >> X -> undef (if it's exact) |
| 1257 | if (match(Op0, m_Undef())) |
| 1258 | return isExact ? Op0 : Constant::getNullValue(Op0->getType()); |
| 1259 | |
David Majnemer | bf7550e | 2014-11-05 00:59:59 +0000 | [diff] [blame] | 1260 | // The low bit cannot be shifted out of an exact shift if it is set. |
| 1261 | if (isExact) { |
Craig Topper | 8205a1a | 2017-05-24 16:53:07 +0000 | [diff] [blame] | 1262 | KnownBits Op0Known = computeKnownBits(Op0, Q.DL, /*Depth=*/0, Q.AC, Q.CxtI, Q.DT); |
Craig Topper | b45eabc | 2017-04-26 16:39:58 +0000 | [diff] [blame] | 1263 | if (Op0Known.One[0]) |
David Majnemer | bf7550e | 2014-11-05 00:59:59 +0000 | [diff] [blame] | 1264 | return Op0; |
| 1265 | } |
| 1266 | |
| 1267 | return nullptr; |
| 1268 | } |
| 1269 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 1270 | /// Given operands for an Shl, see if we can fold the result. |
| 1271 | /// If not, this returns null. |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1272 | static Value *SimplifyShlInst(Value *Op0, Value *Op1, bool isNSW, bool isNUW, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1273 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1274 | if (Value *V = SimplifyShift(Instruction::Shl, Op0, Op1, Q, MaxRecurse)) |
Duncan Sands | 571fd9a | 2011-01-14 14:44:12 +0000 | [diff] [blame] | 1275 | return V; |
| 1276 | |
| 1277 | // undef << X -> 0 |
David Majnemer | 65c52ae | 2014-12-17 01:54:33 +0000 | [diff] [blame] | 1278 | // undef << X -> undef if (if it's NSW/NUW) |
Duncan Sands | a29ea9a | 2011-02-01 09:06:20 +0000 | [diff] [blame] | 1279 | if (match(Op0, m_Undef())) |
David Majnemer | 65c52ae | 2014-12-17 01:54:33 +0000 | [diff] [blame] | 1280 | return isNSW || isNUW ? Op0 : Constant::getNullValue(Op0->getType()); |
Duncan Sands | 571fd9a | 2011-01-14 14:44:12 +0000 | [diff] [blame] | 1281 | |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1282 | // (X >> A) << A -> X |
| 1283 | Value *X; |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1284 | if (Q.IIQ.UseInstrInfo && |
| 1285 | match(Op0, m_Exact(m_Shr(m_Value(X), m_Specific(Op1))))) |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1286 | return X; |
Roman Lebedev | 2683802 | 2018-06-07 20:03:45 +0000 | [diff] [blame] | 1287 | |
| 1288 | // shl nuw i8 C, %x -> C iff C has sign bit set. |
| 1289 | if (isNUW && match(Op0, m_Negative())) |
| 1290 | return Op0; |
| 1291 | // NOTE: could use computeKnownBits() / LazyValueInfo, |
| 1292 | // but the cost-benefit analysis suggests it isn't worth it. |
| 1293 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 1294 | return nullptr; |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 1295 | } |
| 1296 | |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1297 | Value *llvm::SimplifyShlInst(Value *Op0, Value *Op1, bool isNSW, bool isNUW, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1298 | const SimplifyQuery &Q) { |
| 1299 | return ::SimplifyShlInst(Op0, Op1, isNSW, isNUW, Q, RecursionLimit); |
| 1300 | } |
| 1301 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 1302 | /// Given operands for an LShr, see if we can fold the result. |
| 1303 | /// If not, this returns null. |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1304 | static Value *SimplifyLShrInst(Value *Op0, Value *Op1, bool isExact, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1305 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
David Majnemer | bf7550e | 2014-11-05 00:59:59 +0000 | [diff] [blame] | 1306 | if (Value *V = SimplifyRightShift(Instruction::LShr, Op0, Op1, isExact, Q, |
| 1307 | MaxRecurse)) |
| 1308 | return V; |
David Majnemer | a80fed7 | 2013-07-09 22:01:22 +0000 | [diff] [blame] | 1309 | |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1310 | // (X << A) >> A -> X |
| 1311 | Value *X; |
David Majnemer | 4f43837 | 2014-11-04 17:38:50 +0000 | [diff] [blame] | 1312 | if (match(Op0, m_NUWShl(m_Value(X), m_Specific(Op1)))) |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1313 | return X; |
Duncan Sands | d114ab3 | 2011-02-13 17:15:40 +0000 | [diff] [blame] | 1314 | |
Hiroshi Inoue | 02f79ea | 2018-08-01 04:40:32 +0000 | [diff] [blame] | 1315 | // ((X << A) | Y) >> A -> X if effective width of Y is not larger than A. |
| 1316 | // We can return X as we do in the above case since OR alters no bits in X. |
| 1317 | // SimplifyDemandedBits in InstCombine can do more general optimization for |
| 1318 | // bit manipulation. This pattern aims to provide opportunities for other |
| 1319 | // optimizers by supporting a simple but common case in InstSimplify. |
| 1320 | Value *Y; |
| 1321 | const APInt *ShRAmt, *ShLAmt; |
| 1322 | if (match(Op1, m_APInt(ShRAmt)) && |
| 1323 | match(Op0, m_c_Or(m_NUWShl(m_Value(X), m_APInt(ShLAmt)), m_Value(Y))) && |
| 1324 | *ShRAmt == *ShLAmt) { |
| 1325 | const KnownBits YKnown = computeKnownBits(Y, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 1326 | const unsigned Width = Op0->getType()->getScalarSizeInBits(); |
| 1327 | const unsigned EffWidthY = Width - YKnown.countMinLeadingZeros(); |
Benjamin Kramer | bae6aab | 2018-08-12 11:43:03 +0000 | [diff] [blame] | 1328 | if (ShRAmt->uge(EffWidthY)) |
Hiroshi Inoue | 02f79ea | 2018-08-01 04:40:32 +0000 | [diff] [blame] | 1329 | return X; |
| 1330 | } |
| 1331 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 1332 | return nullptr; |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 1333 | } |
| 1334 | |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1335 | Value *llvm::SimplifyLShrInst(Value *Op0, Value *Op1, bool isExact, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1336 | const SimplifyQuery &Q) { |
| 1337 | return ::SimplifyLShrInst(Op0, Op1, isExact, Q, RecursionLimit); |
| 1338 | } |
| 1339 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 1340 | /// Given operands for an AShr, see if we can fold the result. |
| 1341 | /// If not, this returns null. |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1342 | static Value *SimplifyAShrInst(Value *Op0, Value *Op1, bool isExact, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1343 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
David Majnemer | bf7550e | 2014-11-05 00:59:59 +0000 | [diff] [blame] | 1344 | if (Value *V = SimplifyRightShift(Instruction::AShr, Op0, Op1, isExact, Q, |
| 1345 | MaxRecurse)) |
Duncan Sands | 571fd9a | 2011-01-14 14:44:12 +0000 | [diff] [blame] | 1346 | return V; |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 1347 | |
Sanjay Patel | adf6e88 | 2018-02-18 18:05:08 +0000 | [diff] [blame] | 1348 | // all ones >>a X -> -1 |
| 1349 | // Do not return Op0 because it may contain undef elements if it's a vector. |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 1350 | if (match(Op0, m_AllOnes())) |
Sanjay Patel | adf6e88 | 2018-02-18 18:05:08 +0000 | [diff] [blame] | 1351 | return Constant::getAllOnesValue(Op0->getType()); |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 1352 | |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1353 | // (X << A) >> A -> X |
| 1354 | Value *X; |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1355 | if (Q.IIQ.UseInstrInfo && match(Op0, m_NSWShl(m_Value(X), m_Specific(Op1)))) |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1356 | return X; |
Duncan Sands | d114ab3 | 2011-02-13 17:15:40 +0000 | [diff] [blame] | 1357 | |
Suyog Sarda | 6886241 | 2014-07-17 06:28:15 +0000 | [diff] [blame] | 1358 | // Arithmetic shifting an all-sign-bit value is a no-op. |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1359 | unsigned NumSignBits = ComputeNumSignBits(Op0, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
Suyog Sarda | 6886241 | 2014-07-17 06:28:15 +0000 | [diff] [blame] | 1360 | if (NumSignBits == Op0->getType()->getScalarSizeInBits()) |
| 1361 | return Op0; |
| 1362 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 1363 | return nullptr; |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 1364 | } |
| 1365 | |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1366 | Value *llvm::SimplifyAShrInst(Value *Op0, Value *Op1, bool isExact, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1367 | const SimplifyQuery &Q) { |
| 1368 | return ::SimplifyAShrInst(Op0, Op1, isExact, Q, RecursionLimit); |
| 1369 | } |
| 1370 | |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1371 | /// Commuted variants are assumed to be handled by calling this function again |
| 1372 | /// with the parameters swapped. |
David Majnemer | 1af36e5 | 2014-12-06 10:51:40 +0000 | [diff] [blame] | 1373 | static Value *simplifyUnsignedRangeCheck(ICmpInst *ZeroICmp, |
Roman Lebedev | 6e2c5c8 | 2019-09-08 20:14:15 +0000 | [diff] [blame] | 1374 | ICmpInst *UnsignedICmp, bool IsAnd, |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1375 | const SimplifyQuery &Q) { |
David Majnemer | 1af36e5 | 2014-12-06 10:51:40 +0000 | [diff] [blame] | 1376 | Value *X, *Y; |
| 1377 | |
| 1378 | ICmpInst::Predicate EqPred; |
David Majnemer | d5b3aa4 | 2014-12-08 18:30:43 +0000 | [diff] [blame] | 1379 | if (!match(ZeroICmp, m_ICmp(EqPred, m_Value(Y), m_Zero())) || |
| 1380 | !ICmpInst::isEquality(EqPred)) |
David Majnemer | 1af36e5 | 2014-12-06 10:51:40 +0000 | [diff] [blame] | 1381 | return nullptr; |
| 1382 | |
| 1383 | ICmpInst::Predicate UnsignedPred; |
Roman Lebedev | f128662 | 2019-09-12 09:26:17 +0000 | [diff] [blame^] | 1384 | |
| 1385 | // Y = (A - B); Y >= A && Y != 0 --> Y >= A iff B != 0 |
| 1386 | // Y = (A - B); Y < A || Y == 0 --> Y < A iff B != 0 |
| 1387 | Value *A, *B; |
| 1388 | if (match(Y, m_Sub(m_Value(A), m_Value(B))) && |
| 1389 | match(UnsignedICmp, |
| 1390 | m_c_ICmp(UnsignedPred, m_Specific(Y), m_Specific(A)))) { |
| 1391 | if (UnsignedICmp->getOperand(0) != Y) |
| 1392 | UnsignedPred = ICmpInst::getSwappedPredicate(UnsignedPred); |
| 1393 | |
| 1394 | if (UnsignedPred == ICmpInst::ICMP_UGE && IsAnd && |
| 1395 | EqPred == ICmpInst::ICMP_NE && |
| 1396 | isKnownNonZero(B, Q.DL, /*Depth=*/0, Q.AC, Q.CxtI, Q.DT)) |
| 1397 | return UnsignedICmp; |
| 1398 | if (UnsignedPred == ICmpInst::ICMP_ULT && !IsAnd && |
| 1399 | EqPred == ICmpInst::ICMP_EQ && |
| 1400 | isKnownNonZero(B, Q.DL, /*Depth=*/0, Q.AC, Q.CxtI, Q.DT)) |
| 1401 | return UnsignedICmp; |
| 1402 | } |
| 1403 | |
David Majnemer | 1af36e5 | 2014-12-06 10:51:40 +0000 | [diff] [blame] | 1404 | if (match(UnsignedICmp, m_ICmp(UnsignedPred, m_Value(X), m_Specific(Y))) && |
| 1405 | ICmpInst::isUnsigned(UnsignedPred)) |
| 1406 | ; |
| 1407 | else if (match(UnsignedICmp, |
Sanjay Patel | 0c57de4 | 2018-06-20 14:22:49 +0000 | [diff] [blame] | 1408 | m_ICmp(UnsignedPred, m_Specific(Y), m_Value(X))) && |
David Majnemer | 1af36e5 | 2014-12-06 10:51:40 +0000 | [diff] [blame] | 1409 | ICmpInst::isUnsigned(UnsignedPred)) |
| 1410 | UnsignedPred = ICmpInst::getSwappedPredicate(UnsignedPred); |
| 1411 | else |
| 1412 | return nullptr; |
| 1413 | |
| 1414 | // X < Y && Y != 0 --> X < Y |
| 1415 | // X < Y || Y != 0 --> Y != 0 |
| 1416 | if (UnsignedPred == ICmpInst::ICMP_ULT && EqPred == ICmpInst::ICMP_NE) |
| 1417 | return IsAnd ? UnsignedICmp : ZeroICmp; |
| 1418 | |
Roman Lebedev | 6e2c5c8 | 2019-09-08 20:14:15 +0000 | [diff] [blame] | 1419 | // X <= Y && Y != 0 --> X <= Y iff X != 0 |
| 1420 | // X <= Y || Y != 0 --> Y != 0 iff X != 0 |
| 1421 | if (UnsignedPred == ICmpInst::ICMP_ULE && EqPred == ICmpInst::ICMP_NE && |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1422 | isKnownNonZero(X, Q.DL, /*Depth=*/0, Q.AC, Q.CxtI, Q.DT)) |
Roman Lebedev | 6e2c5c8 | 2019-09-08 20:14:15 +0000 | [diff] [blame] | 1423 | return IsAnd ? UnsignedICmp : ZeroICmp; |
| 1424 | |
| 1425 | // X > Y && Y == 0 --> Y == 0 iff X != 0 |
| 1426 | // X > Y || Y == 0 --> X > Y iff X != 0 |
| 1427 | if (UnsignedPred == ICmpInst::ICMP_UGT && EqPred == ICmpInst::ICMP_EQ && |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1428 | isKnownNonZero(X, Q.DL, /*Depth=*/0, Q.AC, Q.CxtI, Q.DT)) |
Roman Lebedev | 6e2c5c8 | 2019-09-08 20:14:15 +0000 | [diff] [blame] | 1429 | return IsAnd ? ZeroICmp : UnsignedICmp; |
| 1430 | |
David Majnemer | 1af36e5 | 2014-12-06 10:51:40 +0000 | [diff] [blame] | 1431 | // X >= Y || Y != 0 --> true |
| 1432 | // X >= Y || Y == 0 --> X >= Y |
| 1433 | if (UnsignedPred == ICmpInst::ICMP_UGE && !IsAnd) { |
| 1434 | if (EqPred == ICmpInst::ICMP_NE) |
| 1435 | return getTrue(UnsignedICmp->getType()); |
| 1436 | return UnsignedICmp; |
| 1437 | } |
| 1438 | |
David Majnemer | d5b3aa4 | 2014-12-08 18:30:43 +0000 | [diff] [blame] | 1439 | // X < Y && Y == 0 --> false |
| 1440 | if (UnsignedPred == ICmpInst::ICMP_ULT && EqPred == ICmpInst::ICMP_EQ && |
| 1441 | IsAnd) |
| 1442 | return getFalse(UnsignedICmp->getType()); |
| 1443 | |
David Majnemer | 1af36e5 | 2014-12-06 10:51:40 +0000 | [diff] [blame] | 1444 | return nullptr; |
| 1445 | } |
| 1446 | |
Sanjay Patel | 9b1b2de | 2016-12-06 19:05:46 +0000 | [diff] [blame] | 1447 | /// Commuted variants are assumed to be handled by calling this function again |
| 1448 | /// with the parameters swapped. |
| 1449 | static Value *simplifyAndOfICmpsWithSameOperands(ICmpInst *Op0, ICmpInst *Op1) { |
| 1450 | ICmpInst::Predicate Pred0, Pred1; |
| 1451 | Value *A ,*B; |
Sanjay Patel | 5369775 | 2016-12-06 22:09:52 +0000 | [diff] [blame] | 1452 | if (!match(Op0, m_ICmp(Pred0, m_Value(A), m_Value(B))) || |
| 1453 | !match(Op1, m_ICmp(Pred1, m_Specific(A), m_Specific(B)))) |
Sanjay Patel | 9b1b2de | 2016-12-06 19:05:46 +0000 | [diff] [blame] | 1454 | return nullptr; |
| 1455 | |
| 1456 | // We have (icmp Pred0, A, B) & (icmp Pred1, A, B). |
| 1457 | // If Op1 is always implied true by Op0, then Op0 is a subset of Op1, and we |
| 1458 | // can eliminate Op1 from this 'and'. |
| 1459 | if (ICmpInst::isImpliedTrueByMatchingCmp(Pred0, Pred1)) |
| 1460 | return Op0; |
| 1461 | |
| 1462 | // Check for any combination of predicates that are guaranteed to be disjoint. |
| 1463 | if ((Pred0 == ICmpInst::getInversePredicate(Pred1)) || |
| 1464 | (Pred0 == ICmpInst::ICMP_EQ && ICmpInst::isFalseWhenEqual(Pred1)) || |
| 1465 | (Pred0 == ICmpInst::ICMP_SLT && Pred1 == ICmpInst::ICMP_SGT) || |
| 1466 | (Pred0 == ICmpInst::ICMP_ULT && Pred1 == ICmpInst::ICMP_UGT)) |
| 1467 | return getFalse(Op0->getType()); |
| 1468 | |
| 1469 | return nullptr; |
| 1470 | } |
| 1471 | |
| 1472 | /// Commuted variants are assumed to be handled by calling this function again |
| 1473 | /// with the parameters swapped. |
Sanjay Patel | 142cb83 | 2017-05-04 18:19:17 +0000 | [diff] [blame] | 1474 | static Value *simplifyOrOfICmpsWithSameOperands(ICmpInst *Op0, ICmpInst *Op1) { |
| 1475 | ICmpInst::Predicate Pred0, Pred1; |
| 1476 | Value *A ,*B; |
| 1477 | if (!match(Op0, m_ICmp(Pred0, m_Value(A), m_Value(B))) || |
| 1478 | !match(Op1, m_ICmp(Pred1, m_Specific(A), m_Specific(B)))) |
| 1479 | return nullptr; |
| 1480 | |
| 1481 | // We have (icmp Pred0, A, B) | (icmp Pred1, A, B). |
| 1482 | // If Op1 is always implied true by Op0, then Op0 is a subset of Op1, and we |
| 1483 | // can eliminate Op0 from this 'or'. |
| 1484 | if (ICmpInst::isImpliedTrueByMatchingCmp(Pred0, Pred1)) |
| 1485 | return Op1; |
| 1486 | |
| 1487 | // Check for any combination of predicates that cover the entire range of |
| 1488 | // possibilities. |
| 1489 | if ((Pred0 == ICmpInst::getInversePredicate(Pred1)) || |
| 1490 | (Pred0 == ICmpInst::ICMP_NE && ICmpInst::isTrueWhenEqual(Pred1)) || |
| 1491 | (Pred0 == ICmpInst::ICMP_SLE && Pred1 == ICmpInst::ICMP_SGE) || |
| 1492 | (Pred0 == ICmpInst::ICMP_ULE && Pred1 == ICmpInst::ICMP_UGE)) |
| 1493 | return getTrue(Op0->getType()); |
| 1494 | |
| 1495 | return nullptr; |
| 1496 | } |
| 1497 | |
Sanjay Patel | 599e65b | 2017-05-07 15:11:40 +0000 | [diff] [blame] | 1498 | /// Test if a pair of compares with a shared operand and 2 constants has an |
| 1499 | /// empty set intersection, full set union, or if one compare is a superset of |
| 1500 | /// the other. |
| 1501 | static Value *simplifyAndOrOfICmpsWithConstants(ICmpInst *Cmp0, ICmpInst *Cmp1, |
| 1502 | bool IsAnd) { |
| 1503 | // Look for this pattern: {and/or} (icmp X, C0), (icmp X, C1)). |
| 1504 | if (Cmp0->getOperand(0) != Cmp1->getOperand(0)) |
| 1505 | return nullptr; |
| 1506 | |
| 1507 | const APInt *C0, *C1; |
| 1508 | if (!match(Cmp0->getOperand(1), m_APInt(C0)) || |
| 1509 | !match(Cmp1->getOperand(1), m_APInt(C1))) |
| 1510 | return nullptr; |
| 1511 | |
| 1512 | auto Range0 = ConstantRange::makeExactICmpRegion(Cmp0->getPredicate(), *C0); |
| 1513 | auto Range1 = ConstantRange::makeExactICmpRegion(Cmp1->getPredicate(), *C1); |
| 1514 | |
Sanjay Patel | 6745447 | 2017-05-08 16:35:02 +0000 | [diff] [blame] | 1515 | // For and-of-compares, check if the intersection is empty: |
Sanjay Patel | 599e65b | 2017-05-07 15:11:40 +0000 | [diff] [blame] | 1516 | // (icmp X, C0) && (icmp X, C1) --> empty set --> false |
| 1517 | if (IsAnd && Range0.intersectWith(Range1).isEmptySet()) |
| 1518 | return getFalse(Cmp0->getType()); |
| 1519 | |
| 1520 | // For or-of-compares, check if the union is full: |
| 1521 | // (icmp X, C0) || (icmp X, C1) --> full set --> true |
| 1522 | if (!IsAnd && Range0.unionWith(Range1).isFullSet()) |
| 1523 | return getTrue(Cmp0->getType()); |
| 1524 | |
| 1525 | // Is one range a superset of the other? |
| 1526 | // If this is and-of-compares, take the smaller set: |
| 1527 | // (icmp sgt X, 4) && (icmp sgt X, 42) --> icmp sgt X, 42 |
| 1528 | // If this is or-of-compares, take the larger set: |
| 1529 | // (icmp sgt X, 4) || (icmp sgt X, 42) --> icmp sgt X, 4 |
| 1530 | if (Range0.contains(Range1)) |
| 1531 | return IsAnd ? Cmp1 : Cmp0; |
| 1532 | if (Range1.contains(Range0)) |
| 1533 | return IsAnd ? Cmp0 : Cmp1; |
| 1534 | |
| 1535 | return nullptr; |
| 1536 | } |
| 1537 | |
Sanjay Patel | 6ef6aa9 | 2018-01-11 23:27:37 +0000 | [diff] [blame] | 1538 | static Value *simplifyAndOrOfICmpsWithZero(ICmpInst *Cmp0, ICmpInst *Cmp1, |
| 1539 | bool IsAnd) { |
| 1540 | ICmpInst::Predicate P0 = Cmp0->getPredicate(), P1 = Cmp1->getPredicate(); |
| 1541 | if (!match(Cmp0->getOperand(1), m_Zero()) || |
| 1542 | !match(Cmp1->getOperand(1), m_Zero()) || P0 != P1) |
| 1543 | return nullptr; |
| 1544 | |
| 1545 | if ((IsAnd && P0 != ICmpInst::ICMP_NE) || (!IsAnd && P1 != ICmpInst::ICMP_EQ)) |
| 1546 | return nullptr; |
| 1547 | |
Sanjay Patel | 4158eff | 2018-01-13 15:44:44 +0000 | [diff] [blame] | 1548 | // We have either "(X == 0 || Y == 0)" or "(X != 0 && Y != 0)". |
Sanjay Patel | 6ef6aa9 | 2018-01-11 23:27:37 +0000 | [diff] [blame] | 1549 | Value *X = Cmp0->getOperand(0); |
| 1550 | Value *Y = Cmp1->getOperand(0); |
| 1551 | |
| 1552 | // If one of the compares is a masked version of a (not) null check, then |
Sanjay Patel | 4158eff | 2018-01-13 15:44:44 +0000 | [diff] [blame] | 1553 | // that compare implies the other, so we eliminate the other. Optionally, look |
| 1554 | // through a pointer-to-int cast to match a null check of a pointer type. |
Sanjay Patel | 6ef6aa9 | 2018-01-11 23:27:37 +0000 | [diff] [blame] | 1555 | |
Sanjay Patel | 9568f42 | 2018-01-14 15:58:18 +0000 | [diff] [blame] | 1556 | // (X == 0) || (([ptrtoint] X & ?) == 0) --> ([ptrtoint] X & ?) == 0 |
| 1557 | // (X == 0) || ((? & [ptrtoint] X) == 0) --> (? & [ptrtoint] X) == 0 |
| 1558 | // (X != 0) && (([ptrtoint] X & ?) != 0) --> ([ptrtoint] X & ?) != 0 |
| 1559 | // (X != 0) && ((? & [ptrtoint] X) != 0) --> (? & [ptrtoint] X) != 0 |
Sanjay Patel | 4158eff | 2018-01-13 15:44:44 +0000 | [diff] [blame] | 1560 | if (match(Y, m_c_And(m_Specific(X), m_Value())) || |
| 1561 | match(Y, m_c_And(m_PtrToInt(m_Specific(X)), m_Value()))) |
Sanjay Patel | 6ef6aa9 | 2018-01-11 23:27:37 +0000 | [diff] [blame] | 1562 | return Cmp1; |
| 1563 | |
Sanjay Patel | 9568f42 | 2018-01-14 15:58:18 +0000 | [diff] [blame] | 1564 | // (([ptrtoint] Y & ?) == 0) || (Y == 0) --> ([ptrtoint] Y & ?) == 0 |
| 1565 | // ((? & [ptrtoint] Y) == 0) || (Y == 0) --> (? & [ptrtoint] Y) == 0 |
| 1566 | // (([ptrtoint] Y & ?) != 0) && (Y != 0) --> ([ptrtoint] Y & ?) != 0 |
| 1567 | // ((? & [ptrtoint] Y) != 0) && (Y != 0) --> (? & [ptrtoint] Y) != 0 |
Sanjay Patel | 4158eff | 2018-01-13 15:44:44 +0000 | [diff] [blame] | 1568 | if (match(X, m_c_And(m_Specific(Y), m_Value())) || |
| 1569 | match(X, m_c_And(m_PtrToInt(m_Specific(Y)), m_Value()))) |
Sanjay Patel | 6ef6aa9 | 2018-01-11 23:27:37 +0000 | [diff] [blame] | 1570 | return Cmp0; |
| 1571 | |
| 1572 | return nullptr; |
| 1573 | } |
| 1574 | |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1575 | static Value *simplifyAndOfICmpsWithAdd(ICmpInst *Op0, ICmpInst *Op1, |
| 1576 | const InstrInfoQuery &IIQ) { |
Sanjay Patel | 599e65b | 2017-05-07 15:11:40 +0000 | [diff] [blame] | 1577 | // (icmp (add V, C0), C1) & (icmp V, C0) |
Sanjay Patel | b2332e1 | 2016-09-20 14:36:14 +0000 | [diff] [blame] | 1578 | ICmpInst::Predicate Pred0, Pred1; |
Sanjay Patel | 9ad8fb6 | 2016-06-20 20:59:59 +0000 | [diff] [blame] | 1579 | const APInt *C0, *C1; |
Sanjay Patel | b2332e1 | 2016-09-20 14:36:14 +0000 | [diff] [blame] | 1580 | Value *V; |
Sanjay Patel | 1b312ad | 2016-09-28 13:53:13 +0000 | [diff] [blame] | 1581 | if (!match(Op0, m_ICmp(Pred0, m_Add(m_Value(V), m_APInt(C0)), m_APInt(C1)))) |
Sanjay Patel | f8ee0e0 | 2016-06-19 17:20:27 +0000 | [diff] [blame] | 1582 | return nullptr; |
David Majnemer | a315bd8 | 2014-09-15 08:15:28 +0000 | [diff] [blame] | 1583 | |
Sanjay Patel | 1b312ad | 2016-09-28 13:53:13 +0000 | [diff] [blame] | 1584 | if (!match(Op1, m_ICmp(Pred1, m_Specific(V), m_Value()))) |
David Majnemer | a315bd8 | 2014-09-15 08:15:28 +0000 | [diff] [blame] | 1585 | return nullptr; |
| 1586 | |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1587 | auto *AddInst = cast<OverflowingBinaryOperator>(Op0->getOperand(0)); |
Sanjay Patel | 1b312ad | 2016-09-28 13:53:13 +0000 | [diff] [blame] | 1588 | if (AddInst->getOperand(1) != Op1->getOperand(1)) |
| 1589 | return nullptr; |
| 1590 | |
Craig Topper | 9bce1ad | 2017-05-26 19:04:02 +0000 | [diff] [blame] | 1591 | Type *ITy = Op0->getType(); |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1592 | bool isNSW = IIQ.hasNoSignedWrap(AddInst); |
| 1593 | bool isNUW = IIQ.hasNoUnsignedWrap(AddInst); |
David Majnemer | a315bd8 | 2014-09-15 08:15:28 +0000 | [diff] [blame] | 1594 | |
Sanjay Patel | 1b312ad | 2016-09-28 13:53:13 +0000 | [diff] [blame] | 1595 | const APInt Delta = *C1 - *C0; |
| 1596 | if (C0->isStrictlyPositive()) { |
David Majnemer | a315bd8 | 2014-09-15 08:15:28 +0000 | [diff] [blame] | 1597 | if (Delta == 2) { |
| 1598 | if (Pred0 == ICmpInst::ICMP_ULT && Pred1 == ICmpInst::ICMP_SGT) |
| 1599 | return getFalse(ITy); |
| 1600 | if (Pred0 == ICmpInst::ICMP_SLT && Pred1 == ICmpInst::ICMP_SGT && isNSW) |
| 1601 | return getFalse(ITy); |
| 1602 | } |
| 1603 | if (Delta == 1) { |
| 1604 | if (Pred0 == ICmpInst::ICMP_ULE && Pred1 == ICmpInst::ICMP_SGT) |
| 1605 | return getFalse(ITy); |
| 1606 | if (Pred0 == ICmpInst::ICMP_SLE && Pred1 == ICmpInst::ICMP_SGT && isNSW) |
| 1607 | return getFalse(ITy); |
| 1608 | } |
| 1609 | } |
Sanjay Patel | 1b312ad | 2016-09-28 13:53:13 +0000 | [diff] [blame] | 1610 | if (C0->getBoolValue() && isNUW) { |
David Majnemer | a315bd8 | 2014-09-15 08:15:28 +0000 | [diff] [blame] | 1611 | if (Delta == 2) |
| 1612 | if (Pred0 == ICmpInst::ICMP_ULT && Pred1 == ICmpInst::ICMP_UGT) |
| 1613 | return getFalse(ITy); |
| 1614 | if (Delta == 1) |
| 1615 | if (Pred0 == ICmpInst::ICMP_ULE && Pred1 == ICmpInst::ICMP_UGT) |
| 1616 | return getFalse(ITy); |
| 1617 | } |
| 1618 | |
| 1619 | return nullptr; |
| 1620 | } |
| 1621 | |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1622 | static Value *simplifyAndOfICmps(ICmpInst *Op0, ICmpInst *Op1, |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1623 | const SimplifyQuery &Q) { |
| 1624 | if (Value *X = simplifyUnsignedRangeCheck(Op0, Op1, /*IsAnd=*/true, Q)) |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1625 | return X; |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1626 | if (Value *X = simplifyUnsignedRangeCheck(Op1, Op0, /*IsAnd=*/true, Q)) |
Sanjay Patel | 142cb83 | 2017-05-04 18:19:17 +0000 | [diff] [blame] | 1627 | return X; |
| 1628 | |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1629 | if (Value *X = simplifyAndOfICmpsWithSameOperands(Op0, Op1)) |
| 1630 | return X; |
| 1631 | if (Value *X = simplifyAndOfICmpsWithSameOperands(Op1, Op0)) |
Sanjay Patel | 142cb83 | 2017-05-04 18:19:17 +0000 | [diff] [blame] | 1632 | return X; |
| 1633 | |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1634 | if (Value *X = simplifyAndOrOfICmpsWithConstants(Op0, Op1, true)) |
Sanjay Patel | 599e65b | 2017-05-07 15:11:40 +0000 | [diff] [blame] | 1635 | return X; |
| 1636 | |
Sanjay Patel | 6ef6aa9 | 2018-01-11 23:27:37 +0000 | [diff] [blame] | 1637 | if (Value *X = simplifyAndOrOfICmpsWithZero(Op0, Op1, true)) |
| 1638 | return X; |
| 1639 | |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1640 | if (Value *X = simplifyAndOfICmpsWithAdd(Op0, Op1, Q.IIQ)) |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1641 | return X; |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1642 | if (Value *X = simplifyAndOfICmpsWithAdd(Op1, Op0, Q.IIQ)) |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1643 | return X; |
| 1644 | |
| 1645 | return nullptr; |
| 1646 | } |
| 1647 | |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1648 | static Value *simplifyOrOfICmpsWithAdd(ICmpInst *Op0, ICmpInst *Op1, |
| 1649 | const InstrInfoQuery &IIQ) { |
Sanjay Patel | 142cb83 | 2017-05-04 18:19:17 +0000 | [diff] [blame] | 1650 | // (icmp (add V, C0), C1) | (icmp V, C0) |
| 1651 | ICmpInst::Predicate Pred0, Pred1; |
| 1652 | const APInt *C0, *C1; |
| 1653 | Value *V; |
| 1654 | if (!match(Op0, m_ICmp(Pred0, m_Add(m_Value(V), m_APInt(C0)), m_APInt(C1)))) |
| 1655 | return nullptr; |
| 1656 | |
| 1657 | if (!match(Op1, m_ICmp(Pred1, m_Specific(V), m_Value()))) |
| 1658 | return nullptr; |
| 1659 | |
| 1660 | auto *AddInst = cast<BinaryOperator>(Op0->getOperand(0)); |
| 1661 | if (AddInst->getOperand(1) != Op1->getOperand(1)) |
| 1662 | return nullptr; |
| 1663 | |
| 1664 | Type *ITy = Op0->getType(); |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1665 | bool isNSW = IIQ.hasNoSignedWrap(AddInst); |
| 1666 | bool isNUW = IIQ.hasNoUnsignedWrap(AddInst); |
Sanjay Patel | 142cb83 | 2017-05-04 18:19:17 +0000 | [diff] [blame] | 1667 | |
| 1668 | const APInt Delta = *C1 - *C0; |
| 1669 | if (C0->isStrictlyPositive()) { |
| 1670 | if (Delta == 2) { |
| 1671 | if (Pred0 == ICmpInst::ICMP_UGE && Pred1 == ICmpInst::ICMP_SLE) |
| 1672 | return getTrue(ITy); |
| 1673 | if (Pred0 == ICmpInst::ICMP_SGE && Pred1 == ICmpInst::ICMP_SLE && isNSW) |
| 1674 | return getTrue(ITy); |
| 1675 | } |
| 1676 | if (Delta == 1) { |
| 1677 | if (Pred0 == ICmpInst::ICMP_UGT && Pred1 == ICmpInst::ICMP_SLE) |
| 1678 | return getTrue(ITy); |
| 1679 | if (Pred0 == ICmpInst::ICMP_SGT && Pred1 == ICmpInst::ICMP_SLE && isNSW) |
| 1680 | return getTrue(ITy); |
| 1681 | } |
| 1682 | } |
| 1683 | if (C0->getBoolValue() && isNUW) { |
| 1684 | if (Delta == 2) |
| 1685 | if (Pred0 == ICmpInst::ICMP_UGE && Pred1 == ICmpInst::ICMP_ULE) |
| 1686 | return getTrue(ITy); |
| 1687 | if (Delta == 1) |
| 1688 | if (Pred0 == ICmpInst::ICMP_UGT && Pred1 == ICmpInst::ICMP_ULE) |
| 1689 | return getTrue(ITy); |
| 1690 | } |
| 1691 | |
| 1692 | return nullptr; |
| 1693 | } |
| 1694 | |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1695 | static Value *simplifyOrOfICmps(ICmpInst *Op0, ICmpInst *Op1, |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1696 | const SimplifyQuery &Q) { |
| 1697 | if (Value *X = simplifyUnsignedRangeCheck(Op0, Op1, /*IsAnd=*/false, Q)) |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1698 | return X; |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1699 | if (Value *X = simplifyUnsignedRangeCheck(Op1, Op0, /*IsAnd=*/false, Q)) |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1700 | return X; |
Sanjay Patel | e42b4d5 | 2017-05-04 19:51:34 +0000 | [diff] [blame] | 1701 | |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1702 | if (Value *X = simplifyOrOfICmpsWithSameOperands(Op0, Op1)) |
| 1703 | return X; |
| 1704 | if (Value *X = simplifyOrOfICmpsWithSameOperands(Op1, Op0)) |
| 1705 | return X; |
| 1706 | |
| 1707 | if (Value *X = simplifyAndOrOfICmpsWithConstants(Op0, Op1, false)) |
| 1708 | return X; |
| 1709 | |
Sanjay Patel | 6ef6aa9 | 2018-01-11 23:27:37 +0000 | [diff] [blame] | 1710 | if (Value *X = simplifyAndOrOfICmpsWithZero(Op0, Op1, false)) |
| 1711 | return X; |
| 1712 | |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1713 | if (Value *X = simplifyOrOfICmpsWithAdd(Op0, Op1, Q.IIQ)) |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1714 | return X; |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1715 | if (Value *X = simplifyOrOfICmpsWithAdd(Op1, Op0, Q.IIQ)) |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1716 | return X; |
Sanjay Patel | e42b4d5 | 2017-05-04 19:51:34 +0000 | [diff] [blame] | 1717 | |
| 1718 | return nullptr; |
| 1719 | } |
| 1720 | |
Matt Arsenault | d54b7f0 | 2018-08-09 22:40:08 +0000 | [diff] [blame] | 1721 | static Value *simplifyAndOrOfFCmps(const TargetLibraryInfo *TLI, |
| 1722 | FCmpInst *LHS, FCmpInst *RHS, bool IsAnd) { |
Sanjay Patel | eb731b0 | 2017-11-19 15:34:27 +0000 | [diff] [blame] | 1723 | Value *LHS0 = LHS->getOperand(0), *LHS1 = LHS->getOperand(1); |
| 1724 | Value *RHS0 = RHS->getOperand(0), *RHS1 = RHS->getOperand(1); |
| 1725 | if (LHS0->getType() != RHS0->getType()) |
| 1726 | return nullptr; |
| 1727 | |
| 1728 | FCmpInst::Predicate PredL = LHS->getPredicate(), PredR = RHS->getPredicate(); |
| 1729 | if ((PredL == FCmpInst::FCMP_ORD && PredR == FCmpInst::FCMP_ORD && IsAnd) || |
| 1730 | (PredL == FCmpInst::FCMP_UNO && PredR == FCmpInst::FCMP_UNO && !IsAnd)) { |
| 1731 | // (fcmp ord NNAN, X) & (fcmp ord X, Y) --> fcmp ord X, Y |
| 1732 | // (fcmp ord NNAN, X) & (fcmp ord Y, X) --> fcmp ord Y, X |
| 1733 | // (fcmp ord X, NNAN) & (fcmp ord X, Y) --> fcmp ord X, Y |
| 1734 | // (fcmp ord X, NNAN) & (fcmp ord Y, X) --> fcmp ord Y, X |
| 1735 | // (fcmp uno NNAN, X) | (fcmp uno X, Y) --> fcmp uno X, Y |
| 1736 | // (fcmp uno NNAN, X) | (fcmp uno Y, X) --> fcmp uno Y, X |
| 1737 | // (fcmp uno X, NNAN) | (fcmp uno X, Y) --> fcmp uno X, Y |
| 1738 | // (fcmp uno X, NNAN) | (fcmp uno Y, X) --> fcmp uno Y, X |
Matt Arsenault | d54b7f0 | 2018-08-09 22:40:08 +0000 | [diff] [blame] | 1739 | if ((isKnownNeverNaN(LHS0, TLI) && (LHS1 == RHS0 || LHS1 == RHS1)) || |
| 1740 | (isKnownNeverNaN(LHS1, TLI) && (LHS0 == RHS0 || LHS0 == RHS1))) |
Sanjay Patel | eb731b0 | 2017-11-19 15:34:27 +0000 | [diff] [blame] | 1741 | return RHS; |
| 1742 | |
| 1743 | // (fcmp ord X, Y) & (fcmp ord NNAN, X) --> fcmp ord X, Y |
| 1744 | // (fcmp ord Y, X) & (fcmp ord NNAN, X) --> fcmp ord Y, X |
| 1745 | // (fcmp ord X, Y) & (fcmp ord X, NNAN) --> fcmp ord X, Y |
| 1746 | // (fcmp ord Y, X) & (fcmp ord X, NNAN) --> fcmp ord Y, X |
| 1747 | // (fcmp uno X, Y) | (fcmp uno NNAN, X) --> fcmp uno X, Y |
| 1748 | // (fcmp uno Y, X) | (fcmp uno NNAN, X) --> fcmp uno Y, X |
| 1749 | // (fcmp uno X, Y) | (fcmp uno X, NNAN) --> fcmp uno X, Y |
| 1750 | // (fcmp uno Y, X) | (fcmp uno X, NNAN) --> fcmp uno Y, X |
Matt Arsenault | d54b7f0 | 2018-08-09 22:40:08 +0000 | [diff] [blame] | 1751 | if ((isKnownNeverNaN(RHS0, TLI) && (RHS1 == LHS0 || RHS1 == LHS1)) || |
| 1752 | (isKnownNeverNaN(RHS1, TLI) && (RHS0 == LHS0 || RHS0 == LHS1))) |
Sanjay Patel | eb731b0 | 2017-11-19 15:34:27 +0000 | [diff] [blame] | 1753 | return LHS; |
| 1754 | } |
| 1755 | |
| 1756 | return nullptr; |
| 1757 | } |
| 1758 | |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1759 | static Value *simplifyAndOrOfCmps(const SimplifyQuery &Q, |
Matt Arsenault | d54b7f0 | 2018-08-09 22:40:08 +0000 | [diff] [blame] | 1760 | Value *Op0, Value *Op1, bool IsAnd) { |
Sanjay Patel | e42b4d5 | 2017-05-04 19:51:34 +0000 | [diff] [blame] | 1761 | // Look through casts of the 'and' operands to find compares. |
| 1762 | auto *Cast0 = dyn_cast<CastInst>(Op0); |
| 1763 | auto *Cast1 = dyn_cast<CastInst>(Op1); |
| 1764 | if (Cast0 && Cast1 && Cast0->getOpcode() == Cast1->getOpcode() && |
| 1765 | Cast0->getSrcTy() == Cast1->getSrcTy()) { |
| 1766 | Op0 = Cast0->getOperand(0); |
| 1767 | Op1 = Cast1->getOperand(0); |
| 1768 | } |
| 1769 | |
Sanjay Patel | eb731b0 | 2017-11-19 15:34:27 +0000 | [diff] [blame] | 1770 | Value *V = nullptr; |
| 1771 | auto *ICmp0 = dyn_cast<ICmpInst>(Op0); |
| 1772 | auto *ICmp1 = dyn_cast<ICmpInst>(Op1); |
| 1773 | if (ICmp0 && ICmp1) |
Roman Lebedev | 00c1ee4 | 2019-09-11 15:32:46 +0000 | [diff] [blame] | 1774 | V = IsAnd ? simplifyAndOfICmps(ICmp0, ICmp1, Q) |
| 1775 | : simplifyOrOfICmps(ICmp0, ICmp1, Q); |
Sanjay Patel | e42b4d5 | 2017-05-04 19:51:34 +0000 | [diff] [blame] | 1776 | |
Sanjay Patel | eb731b0 | 2017-11-19 15:34:27 +0000 | [diff] [blame] | 1777 | auto *FCmp0 = dyn_cast<FCmpInst>(Op0); |
| 1778 | auto *FCmp1 = dyn_cast<FCmpInst>(Op1); |
| 1779 | if (FCmp0 && FCmp1) |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1780 | V = simplifyAndOrOfFCmps(Q.TLI, FCmp0, FCmp1, IsAnd); |
Sanjay Patel | eb731b0 | 2017-11-19 15:34:27 +0000 | [diff] [blame] | 1781 | |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1782 | if (!V) |
| 1783 | return nullptr; |
| 1784 | if (!Cast0) |
Sanjay Patel | e42b4d5 | 2017-05-04 19:51:34 +0000 | [diff] [blame] | 1785 | return V; |
Craig Topper | 348314d | 2017-05-26 22:42:34 +0000 | [diff] [blame] | 1786 | |
| 1787 | // If we looked through casts, we can only handle a constant simplification |
| 1788 | // because we are not allowed to create a cast instruction here. |
| 1789 | if (auto *C = dyn_cast<Constant>(V)) |
| 1790 | return ConstantExpr::getCast(Cast0->getOpcode(), C, Cast0->getType()); |
Sanjay Patel | e42b4d5 | 2017-05-04 19:51:34 +0000 | [diff] [blame] | 1791 | |
| 1792 | return nullptr; |
| 1793 | } |
| 1794 | |
Roman Lebedev | c584786 | 2019-08-29 12:48:04 +0000 | [diff] [blame] | 1795 | /// Check that the Op1 is in expected form, i.e.: |
| 1796 | /// %Agg = tail call { i4, i1 } @llvm.[us]mul.with.overflow.i4(i4 %X, i4 %???) |
| 1797 | /// %Op1 = extractvalue { i4, i1 } %Agg, 1 |
| 1798 | static bool omitCheckForZeroBeforeMulWithOverflowInternal(Value *Op1, |
| 1799 | Value *X) { |
| 1800 | auto *Extract = dyn_cast<ExtractValueInst>(Op1); |
| 1801 | // We should only be extracting the overflow bit. |
| 1802 | if (!Extract || !Extract->getIndices().equals(1)) |
| 1803 | return false; |
| 1804 | Value *Agg = Extract->getAggregateOperand(); |
| 1805 | // This should be a multiplication-with-overflow intrinsic. |
| 1806 | if (!match(Agg, m_CombineOr(m_Intrinsic<Intrinsic::umul_with_overflow>(), |
| 1807 | m_Intrinsic<Intrinsic::smul_with_overflow>()))) |
| 1808 | return false; |
| 1809 | // One of its multipliers should be the value we checked for zero before. |
| 1810 | if (!match(Agg, m_CombineOr(m_Argument<0>(m_Specific(X)), |
| 1811 | m_Argument<1>(m_Specific(X))))) |
| 1812 | return false; |
| 1813 | return true; |
| 1814 | } |
| 1815 | |
Roman Lebedev | aaf6ab4 | 2019-08-29 12:47:50 +0000 | [diff] [blame] | 1816 | /// The @llvm.[us]mul.with.overflow intrinsic could have been folded from some |
| 1817 | /// other form of check, e.g. one that was using division; it may have been |
| 1818 | /// guarded against division-by-zero. We can drop that check now. |
| 1819 | /// Look for: |
| 1820 | /// %Op0 = icmp ne i4 %X, 0 |
| 1821 | /// %Agg = tail call { i4, i1 } @llvm.[us]mul.with.overflow.i4(i4 %X, i4 %???) |
| 1822 | /// %Op1 = extractvalue { i4, i1 } %Agg, 1 |
| 1823 | /// %??? = and i1 %Op0, %Op1 |
| 1824 | /// We can just return %Op1 |
| 1825 | static Value *omitCheckForZeroBeforeMulWithOverflow(Value *Op0, Value *Op1) { |
| 1826 | ICmpInst::Predicate Pred; |
| 1827 | Value *X; |
| 1828 | if (!match(Op0, m_ICmp(Pred, m_Value(X), m_Zero())) || |
| 1829 | Pred != ICmpInst::Predicate::ICMP_NE) |
| 1830 | return nullptr; |
Roman Lebedev | c584786 | 2019-08-29 12:48:04 +0000 | [diff] [blame] | 1831 | // Is Op1 in expected form? |
| 1832 | if (!omitCheckForZeroBeforeMulWithOverflowInternal(Op1, X)) |
Roman Lebedev | aaf6ab4 | 2019-08-29 12:47:50 +0000 | [diff] [blame] | 1833 | return nullptr; |
| 1834 | // Can omit 'and', and just return the overflow bit. |
| 1835 | return Op1; |
| 1836 | } |
| 1837 | |
Roman Lebedev | c584786 | 2019-08-29 12:48:04 +0000 | [diff] [blame] | 1838 | /// The @llvm.[us]mul.with.overflow intrinsic could have been folded from some |
| 1839 | /// other form of check, e.g. one that was using division; it may have been |
| 1840 | /// guarded against division-by-zero. We can drop that check now. |
| 1841 | /// Look for: |
| 1842 | /// %Op0 = icmp eq i4 %X, 0 |
| 1843 | /// %Agg = tail call { i4, i1 } @llvm.[us]mul.with.overflow.i4(i4 %X, i4 %???) |
| 1844 | /// %Op1 = extractvalue { i4, i1 } %Agg, 1 |
| 1845 | /// %NotOp1 = xor i1 %Op1, true |
| 1846 | /// %or = or i1 %Op0, %NotOp1 |
| 1847 | /// We can just return %NotOp1 |
| 1848 | static Value *omitCheckForZeroBeforeInvertedMulWithOverflow(Value *Op0, |
| 1849 | Value *NotOp1) { |
| 1850 | ICmpInst::Predicate Pred; |
| 1851 | Value *X; |
| 1852 | if (!match(Op0, m_ICmp(Pred, m_Value(X), m_Zero())) || |
| 1853 | Pred != ICmpInst::Predicate::ICMP_EQ) |
| 1854 | return nullptr; |
| 1855 | // We expect the other hand of an 'or' to be a 'not'. |
| 1856 | Value *Op1; |
| 1857 | if (!match(NotOp1, m_Not(m_Value(Op1)))) |
| 1858 | return nullptr; |
| 1859 | // Is Op1 in expected form? |
| 1860 | if (!omitCheckForZeroBeforeMulWithOverflowInternal(Op1, X)) |
| 1861 | return nullptr; |
| 1862 | // Can omit 'and', and just return the inverted overflow bit. |
| 1863 | return NotOp1; |
| 1864 | } |
| 1865 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 1866 | /// Given operands for an And, see if we can fold the result. |
| 1867 | /// If not, this returns null. |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 1868 | static Value *SimplifyAndInst(Value *Op0, Value *Op1, const SimplifyQuery &Q, |
Chad Rosier | c24b86f | 2011-12-01 03:08:23 +0000 | [diff] [blame] | 1869 | unsigned MaxRecurse) { |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 1870 | if (Constant *C = foldOrCommuteConstant(Instruction::And, Op0, Op1, Q)) |
| 1871 | return C; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 1872 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 1873 | // X & undef -> 0 |
Duncan Sands | a29ea9a | 2011-02-01 09:06:20 +0000 | [diff] [blame] | 1874 | if (match(Op1, m_Undef())) |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 1875 | return Constant::getNullValue(Op0->getType()); |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 1876 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 1877 | // X & X = X |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 1878 | if (Op0 == Op1) |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 1879 | return Op0; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 1880 | |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 1881 | // X & 0 = 0 |
| 1882 | if (match(Op1, m_Zero())) |
Sanjay Patel | 30be665 | 2018-04-22 17:07:44 +0000 | [diff] [blame] | 1883 | return Constant::getNullValue(Op0->getType()); |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 1884 | |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 1885 | // X & -1 = X |
| 1886 | if (match(Op1, m_AllOnes())) |
| 1887 | return Op0; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 1888 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 1889 | // A & ~A = ~A & A = 0 |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 1890 | if (match(Op0, m_Not(m_Specific(Op1))) || |
| 1891 | match(Op1, m_Not(m_Specific(Op0)))) |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 1892 | return Constant::getNullValue(Op0->getType()); |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 1893 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 1894 | // (A | ?) & A = A |
Craig Topper | dad7d8d | 2017-07-16 06:57:41 +0000 | [diff] [blame] | 1895 | if (match(Op0, m_c_Or(m_Specific(Op1), m_Value()))) |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 1896 | return Op1; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 1897 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 1898 | // A & (A | ?) = A |
Craig Topper | dad7d8d | 2017-07-16 06:57:41 +0000 | [diff] [blame] | 1899 | if (match(Op1, m_c_Or(m_Specific(Op0), m_Value()))) |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 1900 | return Op0; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 1901 | |
Sanjay Patel | 877364f | 2017-05-16 21:51:04 +0000 | [diff] [blame] | 1902 | // A mask that only clears known zeros of a shifted value is a no-op. |
| 1903 | Value *X; |
| 1904 | const APInt *Mask; |
| 1905 | const APInt *ShAmt; |
| 1906 | if (match(Op1, m_APInt(Mask))) { |
| 1907 | // If all bits in the inverted and shifted mask are clear: |
| 1908 | // and (shl X, ShAmt), Mask --> shl X, ShAmt |
| 1909 | if (match(Op0, m_Shl(m_Value(X), m_APInt(ShAmt))) && |
| 1910 | (~(*Mask)).lshr(*ShAmt).isNullValue()) |
| 1911 | return Op0; |
| 1912 | |
| 1913 | // If all bits in the inverted and shifted mask are clear: |
| 1914 | // and (lshr X, ShAmt), Mask --> lshr X, ShAmt |
| 1915 | if (match(Op0, m_LShr(m_Value(X), m_APInt(ShAmt))) && |
| 1916 | (~(*Mask)).shl(*ShAmt).isNullValue()) |
| 1917 | return Op0; |
| 1918 | } |
| 1919 | |
Roman Lebedev | aaf6ab4 | 2019-08-29 12:47:50 +0000 | [diff] [blame] | 1920 | // If we have a multiplication overflow check that is being 'and'ed with a |
| 1921 | // check that one of the multipliers is not zero, we can omit the 'and', and |
| 1922 | // only keep the overflow check. |
| 1923 | if (Value *V = omitCheckForZeroBeforeMulWithOverflow(Op0, Op1)) |
| 1924 | return V; |
| 1925 | if (Value *V = omitCheckForZeroBeforeMulWithOverflow(Op1, Op0)) |
| 1926 | return V; |
| 1927 | |
Duncan Sands | ba286d7 | 2011-10-26 20:55:21 +0000 | [diff] [blame] | 1928 | // A & (-A) = A if A is a power of two or zero. |
| 1929 | if (match(Op0, m_Neg(m_Specific(Op1))) || |
| 1930 | match(Op1, m_Neg(m_Specific(Op0)))) { |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1931 | if (isKnownToBeAPowerOfTwo(Op0, Q.DL, /*OrZero*/ true, 0, Q.AC, Q.CxtI, |
| 1932 | Q.DT)) |
Duncan Sands | ba286d7 | 2011-10-26 20:55:21 +0000 | [diff] [blame] | 1933 | return Op0; |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1934 | if (isKnownToBeAPowerOfTwo(Op1, Q.DL, /*OrZero*/ true, 0, Q.AC, Q.CxtI, |
| 1935 | Q.DT)) |
Duncan Sands | ba286d7 | 2011-10-26 20:55:21 +0000 | [diff] [blame] | 1936 | return Op1; |
| 1937 | } |
| 1938 | |
Sanjay Patel | b342f02 | 2019-06-20 22:55:28 +0000 | [diff] [blame] | 1939 | // This is a similar pattern used for checking if a value is a power-of-2: |
| 1940 | // (A - 1) & A --> 0 (if A is a power-of-2 or 0) |
| 1941 | // A & (A - 1) --> 0 (if A is a power-of-2 or 0) |
| 1942 | if (match(Op0, m_Add(m_Specific(Op1), m_AllOnes())) && |
| 1943 | isKnownToBeAPowerOfTwo(Op1, Q.DL, /*OrZero*/ true, 0, Q.AC, Q.CxtI, Q.DT)) |
| 1944 | return Constant::getNullValue(Op1->getType()); |
| 1945 | if (match(Op1, m_Add(m_Specific(Op0), m_AllOnes())) && |
| 1946 | isKnownToBeAPowerOfTwo(Op0, Q.DL, /*OrZero*/ true, 0, Q.AC, Q.CxtI, Q.DT)) |
| 1947 | return Constant::getNullValue(Op0->getType()); |
| 1948 | |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 1949 | if (Value *V = simplifyAndOrOfCmps(Q, Op0, Op1, true)) |
Sanjay Patel | e42b4d5 | 2017-05-04 19:51:34 +0000 | [diff] [blame] | 1950 | return V; |
Sanjay Patel | 9ad8fb6 | 2016-06-20 20:59:59 +0000 | [diff] [blame] | 1951 | |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 1952 | // Try some generic simplifications for associative operations. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1953 | if (Value *V = SimplifyAssociativeBinOp(Instruction::And, Op0, Op1, Q, |
| 1954 | MaxRecurse)) |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 1955 | return V; |
Benjamin Kramer | 8c35fb0 | 2010-09-10 22:39:55 +0000 | [diff] [blame] | 1956 | |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 1957 | // And distributes over Or. Try some generic simplifications based on this. |
| 1958 | if (Value *V = ExpandBinOp(Instruction::And, Op0, Op1, Instruction::Or, |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1959 | Q, MaxRecurse)) |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 1960 | return V; |
| 1961 | |
| 1962 | // And distributes over Xor. Try some generic simplifications based on this. |
| 1963 | if (Value *V = ExpandBinOp(Instruction::And, Op0, Op1, Instruction::Xor, |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1964 | Q, MaxRecurse)) |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 1965 | return V; |
| 1966 | |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 1967 | // If the operation is with the result of a select instruction, check whether |
| 1968 | // operating on either branch of the select always yields the same value. |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 1969 | if (isa<SelectInst>(Op0) || isa<SelectInst>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1970 | if (Value *V = ThreadBinOpOverSelect(Instruction::And, Op0, Op1, Q, |
| 1971 | MaxRecurse)) |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 1972 | return V; |
| 1973 | |
| 1974 | // If the operation is with the result of a phi instruction, check whether |
| 1975 | // operating on all incoming values of the phi always yields the same value. |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 1976 | if (isa<PHINode>(Op0) || isa<PHINode>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 1977 | if (Value *V = ThreadBinOpOverPHI(Instruction::And, Op0, Op1, Q, |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 1978 | MaxRecurse)) |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 1979 | return V; |
| 1980 | |
Hiroshi Inoue | 73f8b25 | 2018-08-03 05:39:48 +0000 | [diff] [blame] | 1981 | // Assuming the effective width of Y is not larger than A, i.e. all bits |
| 1982 | // from X and Y are disjoint in (X << A) | Y, |
| 1983 | // if the mask of this AND op covers all bits of X or Y, while it covers |
| 1984 | // no bits from the other, we can bypass this AND op. E.g., |
| 1985 | // ((X << A) | Y) & Mask -> Y, |
| 1986 | // if Mask = ((1 << effective_width_of(Y)) - 1) |
| 1987 | // ((X << A) | Y) & Mask -> X << A, |
| 1988 | // if Mask = ((1 << effective_width_of(X)) - 1) << A |
| 1989 | // SimplifyDemandedBits in InstCombine can optimize the general case. |
| 1990 | // This pattern aims to help other passes for a common case. |
| 1991 | Value *Y, *XShifted; |
| 1992 | if (match(Op1, m_APInt(Mask)) && |
| 1993 | match(Op0, m_c_Or(m_CombineAnd(m_NUWShl(m_Value(X), m_APInt(ShAmt)), |
| 1994 | m_Value(XShifted)), |
| 1995 | m_Value(Y)))) { |
Hiroshi Inoue | 73f8b25 | 2018-08-03 05:39:48 +0000 | [diff] [blame] | 1996 | const unsigned Width = Op0->getType()->getScalarSizeInBits(); |
Benjamin Kramer | bae6aab | 2018-08-12 11:43:03 +0000 | [diff] [blame] | 1997 | const unsigned ShftCnt = ShAmt->getLimitedValue(Width); |
| 1998 | const KnownBits YKnown = computeKnownBits(Y, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
Hiroshi Inoue | 73f8b25 | 2018-08-03 05:39:48 +0000 | [diff] [blame] | 1999 | const unsigned EffWidthY = Width - YKnown.countMinLeadingZeros(); |
| 2000 | if (EffWidthY <= ShftCnt) { |
| 2001 | const KnownBits XKnown = computeKnownBits(X, Q.DL, 0, Q.AC, Q.CxtI, |
| 2002 | Q.DT); |
| 2003 | const unsigned EffWidthX = Width - XKnown.countMinLeadingZeros(); |
| 2004 | const APInt EffBitsY = APInt::getLowBitsSet(Width, EffWidthY); |
| 2005 | const APInt EffBitsX = APInt::getLowBitsSet(Width, EffWidthX) << ShftCnt; |
| 2006 | // If the mask is extracting all bits from X or Y as is, we can skip |
| 2007 | // this AND op. |
| 2008 | if (EffBitsY.isSubsetOf(*Mask) && !EffBitsX.intersects(*Mask)) |
| 2009 | return Y; |
| 2010 | if (EffBitsX.isSubsetOf(*Mask) && !EffBitsY.intersects(*Mask)) |
| 2011 | return XShifted; |
| 2012 | } |
| 2013 | } |
| 2014 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2015 | return nullptr; |
Chris Lattner | 084a1b5 | 2009-11-09 22:57:59 +0000 | [diff] [blame] | 2016 | } |
| 2017 | |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 2018 | Value *llvm::SimplifyAndInst(Value *Op0, Value *Op1, const SimplifyQuery &Q) { |
| 2019 | return ::SimplifyAndInst(Op0, Op1, Q, RecursionLimit); |
| 2020 | } |
| 2021 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 2022 | /// Given operands for an Or, see if we can fold the result. |
| 2023 | /// If not, this returns null. |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 2024 | static Value *SimplifyOrInst(Value *Op0, Value *Op1, const SimplifyQuery &Q, |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 2025 | unsigned MaxRecurse) { |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 2026 | if (Constant *C = foldOrCommuteConstant(Instruction::Or, Op0, Op1, Q)) |
| 2027 | return C; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 2028 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 2029 | // X | undef -> -1 |
Sanjay Patel | adf6e88 | 2018-02-18 18:05:08 +0000 | [diff] [blame] | 2030 | // X | -1 = -1 |
| 2031 | // Do not return Op1 because it may contain undef elements if it's a vector. |
| 2032 | if (match(Op1, m_Undef()) || match(Op1, m_AllOnes())) |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 2033 | return Constant::getAllOnesValue(Op0->getType()); |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 2034 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 2035 | // X | X = X |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 2036 | // X | 0 = X |
Sanjay Patel | adf6e88 | 2018-02-18 18:05:08 +0000 | [diff] [blame] | 2037 | if (Op0 == Op1 || match(Op1, m_Zero())) |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 2038 | return Op0; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 2039 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 2040 | // A | ~A = ~A | A = -1 |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 2041 | if (match(Op0, m_Not(m_Specific(Op1))) || |
| 2042 | match(Op1, m_Not(m_Specific(Op0)))) |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 2043 | return Constant::getAllOnesValue(Op0->getType()); |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 2044 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 2045 | // (A & ?) | A = A |
Craig Topper | dad7d8d | 2017-07-16 06:57:41 +0000 | [diff] [blame] | 2046 | if (match(Op0, m_c_And(m_Specific(Op1), m_Value()))) |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 2047 | return Op1; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 2048 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 2049 | // A | (A & ?) = A |
Craig Topper | dad7d8d | 2017-07-16 06:57:41 +0000 | [diff] [blame] | 2050 | if (match(Op1, m_c_And(m_Specific(Op0), m_Value()))) |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 2051 | return Op0; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 2052 | |
Benjamin Kramer | 5b7a4e0 | 2011-02-20 15:20:01 +0000 | [diff] [blame] | 2053 | // ~(A & ?) | A = -1 |
Craig Topper | dad7d8d | 2017-07-16 06:57:41 +0000 | [diff] [blame] | 2054 | if (match(Op0, m_Not(m_c_And(m_Specific(Op1), m_Value())))) |
Benjamin Kramer | 5b7a4e0 | 2011-02-20 15:20:01 +0000 | [diff] [blame] | 2055 | return Constant::getAllOnesValue(Op1->getType()); |
| 2056 | |
| 2057 | // A | ~(A & ?) = -1 |
Craig Topper | dad7d8d | 2017-07-16 06:57:41 +0000 | [diff] [blame] | 2058 | if (match(Op1, m_Not(m_c_And(m_Specific(Op1), m_Value())))) |
Benjamin Kramer | 5b7a4e0 | 2011-02-20 15:20:01 +0000 | [diff] [blame] | 2059 | return Constant::getAllOnesValue(Op0->getType()); |
| 2060 | |
Craig Topper | dad7d8d | 2017-07-16 06:57:41 +0000 | [diff] [blame] | 2061 | Value *A, *B; |
Sanjay Patel | 0889225 | 2017-04-24 18:24:36 +0000 | [diff] [blame] | 2062 | // (A & ~B) | (A ^ B) -> (A ^ B) |
| 2063 | // (~B & A) | (A ^ B) -> (A ^ B) |
Craig Topper | 0b650d3 | 2017-04-25 17:01:32 +0000 | [diff] [blame] | 2064 | // (A & ~B) | (B ^ A) -> (B ^ A) |
| 2065 | // (~B & A) | (B ^ A) -> (B ^ A) |
| 2066 | if (match(Op1, m_Xor(m_Value(A), m_Value(B))) && |
| 2067 | (match(Op0, m_c_And(m_Specific(A), m_Not(m_Specific(B)))) || |
| 2068 | match(Op0, m_c_And(m_Not(m_Specific(A)), m_Specific(B))))) |
Sanjay Patel | 0889225 | 2017-04-24 18:24:36 +0000 | [diff] [blame] | 2069 | return Op1; |
| 2070 | |
| 2071 | // Commute the 'or' operands. |
| 2072 | // (A ^ B) | (A & ~B) -> (A ^ B) |
| 2073 | // (A ^ B) | (~B & A) -> (A ^ B) |
Craig Topper | 0b650d3 | 2017-04-25 17:01:32 +0000 | [diff] [blame] | 2074 | // (B ^ A) | (A & ~B) -> (B ^ A) |
| 2075 | // (B ^ A) | (~B & A) -> (B ^ A) |
| 2076 | if (match(Op0, m_Xor(m_Value(A), m_Value(B))) && |
| 2077 | (match(Op1, m_c_And(m_Specific(A), m_Not(m_Specific(B)))) || |
| 2078 | match(Op1, m_c_And(m_Not(m_Specific(A)), m_Specific(B))))) |
Sanjay Patel | 0889225 | 2017-04-24 18:24:36 +0000 | [diff] [blame] | 2079 | return Op0; |
| 2080 | |
Craig Topper | 479daaf | 2017-05-14 07:54:43 +0000 | [diff] [blame] | 2081 | // (A & B) | (~A ^ B) -> (~A ^ B) |
| 2082 | // (B & A) | (~A ^ B) -> (~A ^ B) |
| 2083 | // (A & B) | (B ^ ~A) -> (B ^ ~A) |
| 2084 | // (B & A) | (B ^ ~A) -> (B ^ ~A) |
| 2085 | if (match(Op0, m_And(m_Value(A), m_Value(B))) && |
| 2086 | (match(Op1, m_c_Xor(m_Specific(A), m_Not(m_Specific(B)))) || |
| 2087 | match(Op1, m_c_Xor(m_Not(m_Specific(A)), m_Specific(B))))) |
| 2088 | return Op1; |
| 2089 | |
| 2090 | // (~A ^ B) | (A & B) -> (~A ^ B) |
| 2091 | // (~A ^ B) | (B & A) -> (~A ^ B) |
| 2092 | // (B ^ ~A) | (A & B) -> (B ^ ~A) |
| 2093 | // (B ^ ~A) | (B & A) -> (B ^ ~A) |
| 2094 | if (match(Op1, m_And(m_Value(A), m_Value(B))) && |
| 2095 | (match(Op0, m_c_Xor(m_Specific(A), m_Not(m_Specific(B)))) || |
| 2096 | match(Op0, m_c_Xor(m_Not(m_Specific(A)), m_Specific(B))))) |
| 2097 | return Op0; |
| 2098 | |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2099 | if (Value *V = simplifyAndOrOfCmps(Q, Op0, Op1, false)) |
Sanjay Patel | e42b4d5 | 2017-05-04 19:51:34 +0000 | [diff] [blame] | 2100 | return V; |
David Majnemer | a315bd8 | 2014-09-15 08:15:28 +0000 | [diff] [blame] | 2101 | |
Roman Lebedev | c584786 | 2019-08-29 12:48:04 +0000 | [diff] [blame] | 2102 | // If we have a multiplication overflow check that is being 'and'ed with a |
| 2103 | // check that one of the multipliers is not zero, we can omit the 'and', and |
| 2104 | // only keep the overflow check. |
| 2105 | if (Value *V = omitCheckForZeroBeforeInvertedMulWithOverflow(Op0, Op1)) |
| 2106 | return V; |
| 2107 | if (Value *V = omitCheckForZeroBeforeInvertedMulWithOverflow(Op1, Op0)) |
| 2108 | return V; |
| 2109 | |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 2110 | // Try some generic simplifications for associative operations. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 2111 | if (Value *V = SimplifyAssociativeBinOp(Instruction::Or, Op0, Op1, Q, |
| 2112 | MaxRecurse)) |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 2113 | return V; |
Benjamin Kramer | 8c35fb0 | 2010-09-10 22:39:55 +0000 | [diff] [blame] | 2114 | |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 2115 | // Or distributes over And. Try some generic simplifications based on this. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 2116 | if (Value *V = ExpandBinOp(Instruction::Or, Op0, Op1, Instruction::And, Q, |
| 2117 | MaxRecurse)) |
Duncan Sands | ee3ec6e | 2010-12-21 13:32:22 +0000 | [diff] [blame] | 2118 | return V; |
| 2119 | |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 2120 | // If the operation is with the result of a select instruction, check whether |
| 2121 | // operating on either branch of the select always yields the same value. |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 2122 | if (isa<SelectInst>(Op0) || isa<SelectInst>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 2123 | if (Value *V = ThreadBinOpOverSelect(Instruction::Or, Op0, Op1, Q, |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 2124 | MaxRecurse)) |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 2125 | return V; |
| 2126 | |
Craig Topper | 50500d5 | 2017-05-26 05:16:20 +0000 | [diff] [blame] | 2127 | // (A & C1)|(B & C2) |
Craig Topper | 1da22c3 | 2017-05-26 19:03:53 +0000 | [diff] [blame] | 2128 | const APInt *C1, *C2; |
| 2129 | if (match(Op0, m_And(m_Value(A), m_APInt(C1))) && |
| 2130 | match(Op1, m_And(m_Value(B), m_APInt(C2)))) { |
| 2131 | if (*C1 == ~*C2) { |
Nick Lewycky | 8561a49 | 2014-06-19 03:51:46 +0000 | [diff] [blame] | 2132 | // (A & C1)|(B & C2) |
| 2133 | // If we have: ((V + N) & C1) | (V & C2) |
| 2134 | // .. and C2 = ~C1 and C2 is 0+1+ and (N & C2) == 0 |
| 2135 | // replace with V+N. |
Craig Topper | c8bebb1 | 2017-05-26 19:03:59 +0000 | [diff] [blame] | 2136 | Value *N; |
Craig Topper | 1da22c3 | 2017-05-26 19:03:53 +0000 | [diff] [blame] | 2137 | if (C2->isMask() && // C2 == 0+1+ |
Craig Topper | c8bebb1 | 2017-05-26 19:03:59 +0000 | [diff] [blame] | 2138 | match(A, m_c_Add(m_Specific(B), m_Value(N)))) { |
Nick Lewycky | 8561a49 | 2014-06-19 03:51:46 +0000 | [diff] [blame] | 2139 | // Add commutes, try both ways. |
Craig Topper | c8bebb1 | 2017-05-26 19:03:59 +0000 | [diff] [blame] | 2140 | if (MaskedValueIsZero(N, *C2, Q.DL, 0, Q.AC, Q.CxtI, Q.DT)) |
Nick Lewycky | 8561a49 | 2014-06-19 03:51:46 +0000 | [diff] [blame] | 2141 | return A; |
| 2142 | } |
| 2143 | // Or commutes, try both ways. |
Craig Topper | 1da22c3 | 2017-05-26 19:03:53 +0000 | [diff] [blame] | 2144 | if (C1->isMask() && |
Craig Topper | c8bebb1 | 2017-05-26 19:03:59 +0000 | [diff] [blame] | 2145 | match(B, m_c_Add(m_Specific(A), m_Value(N)))) { |
Nick Lewycky | 8561a49 | 2014-06-19 03:51:46 +0000 | [diff] [blame] | 2146 | // Add commutes, try both ways. |
Craig Topper | c8bebb1 | 2017-05-26 19:03:59 +0000 | [diff] [blame] | 2147 | if (MaskedValueIsZero(N, *C1, Q.DL, 0, Q.AC, Q.CxtI, Q.DT)) |
Nick Lewycky | 8561a49 | 2014-06-19 03:51:46 +0000 | [diff] [blame] | 2148 | return B; |
| 2149 | } |
| 2150 | } |
| 2151 | } |
| 2152 | |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 2153 | // If the operation is with the result of a phi instruction, check whether |
| 2154 | // operating on all incoming values of the phi always yields the same value. |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 2155 | if (isa<PHINode>(Op0) || isa<PHINode>(Op1)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 2156 | if (Value *V = ThreadBinOpOverPHI(Instruction::Or, Op0, Op1, Q, MaxRecurse)) |
Duncan Sands | b0579e9 | 2010-11-10 13:00:08 +0000 | [diff] [blame] | 2157 | return V; |
| 2158 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2159 | return nullptr; |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 2160 | } |
| 2161 | |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 2162 | Value *llvm::SimplifyOrInst(Value *Op0, Value *Op1, const SimplifyQuery &Q) { |
| 2163 | return ::SimplifyOrInst(Op0, Op1, Q, RecursionLimit); |
| 2164 | } |
| 2165 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 2166 | /// Given operands for a Xor, see if we can fold the result. |
| 2167 | /// If not, this returns null. |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 2168 | static Value *SimplifyXorInst(Value *Op0, Value *Op1, const SimplifyQuery &Q, |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 2169 | unsigned MaxRecurse) { |
Sanjay Patel | 8b5ad3f | 2017-04-01 19:05:11 +0000 | [diff] [blame] | 2170 | if (Constant *C = foldOrCommuteConstant(Instruction::Xor, Op0, Op1, Q)) |
| 2171 | return C; |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 2172 | |
| 2173 | // A ^ undef -> undef |
Duncan Sands | a29ea9a | 2011-02-01 09:06:20 +0000 | [diff] [blame] | 2174 | if (match(Op1, m_Undef())) |
Duncan Sands | 019a418 | 2010-12-15 11:02:22 +0000 | [diff] [blame] | 2175 | return Op1; |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 2176 | |
| 2177 | // A ^ 0 = A |
| 2178 | if (match(Op1, m_Zero())) |
| 2179 | return Op0; |
| 2180 | |
Eli Friedman | ad3cfe7 | 2011-08-17 19:31:49 +0000 | [diff] [blame] | 2181 | // A ^ A = 0 |
| 2182 | if (Op0 == Op1) |
| 2183 | return Constant::getNullValue(Op0->getType()); |
| 2184 | |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 2185 | // A ^ ~A = ~A ^ A = -1 |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 2186 | if (match(Op0, m_Not(m_Specific(Op1))) || |
| 2187 | match(Op1, m_Not(m_Specific(Op0)))) |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 2188 | return Constant::getAllOnesValue(Op0->getType()); |
| 2189 | |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 2190 | // Try some generic simplifications for associative operations. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 2191 | if (Value *V = SimplifyAssociativeBinOp(Instruction::Xor, Op0, Op1, Q, |
| 2192 | MaxRecurse)) |
Duncan Sands | 6c7a52c | 2010-12-21 08:49:00 +0000 | [diff] [blame] | 2193 | return V; |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 2194 | |
Duncan Sands | b238de0 | 2010-11-19 09:20:39 +0000 | [diff] [blame] | 2195 | // Threading Xor over selects and phi nodes is pointless, so don't bother. |
| 2196 | // Threading over the select in "A ^ select(cond, B, C)" means evaluating |
| 2197 | // "A^B" and "A^C" and seeing if they are equal; but they are equal if and |
| 2198 | // only if B and C are equal. If B and C are equal then (since we assume |
| 2199 | // that operands have already been simplified) "select(cond, B, C)" should |
| 2200 | // have been simplified to the common value of B and C already. Analysing |
| 2201 | // "A^B" and "A^C" thus gains nothing, but costs compile time. Similarly |
| 2202 | // for threading over phi nodes. |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 2203 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2204 | return nullptr; |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 2205 | } |
| 2206 | |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 2207 | Value *llvm::SimplifyXorInst(Value *Op0, Value *Op1, const SimplifyQuery &Q) { |
| 2208 | return ::SimplifyXorInst(Op0, Op1, Q, RecursionLimit); |
| 2209 | } |
| 2210 | |
| 2211 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 2212 | static Type *GetCompareTy(Value *Op) { |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 2213 | return CmpInst::makeCmpResultType(Op->getType()); |
| 2214 | } |
| 2215 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 2216 | /// Rummage around inside V looking for something equivalent to the comparison |
| 2217 | /// "LHS Pred RHS". Return such a value if found, otherwise return null. |
| 2218 | /// Helper function for analyzing max/min idioms. |
Duncan Sands | af32728 | 2011-05-07 16:56:49 +0000 | [diff] [blame] | 2219 | static Value *ExtractEquivalentCondition(Value *V, CmpInst::Predicate Pred, |
| 2220 | Value *LHS, Value *RHS) { |
| 2221 | SelectInst *SI = dyn_cast<SelectInst>(V); |
| 2222 | if (!SI) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2223 | return nullptr; |
Duncan Sands | af32728 | 2011-05-07 16:56:49 +0000 | [diff] [blame] | 2224 | CmpInst *Cmp = dyn_cast<CmpInst>(SI->getCondition()); |
| 2225 | if (!Cmp) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2226 | return nullptr; |
Duncan Sands | af32728 | 2011-05-07 16:56:49 +0000 | [diff] [blame] | 2227 | Value *CmpLHS = Cmp->getOperand(0), *CmpRHS = Cmp->getOperand(1); |
| 2228 | if (Pred == Cmp->getPredicate() && LHS == CmpLHS && RHS == CmpRHS) |
| 2229 | return Cmp; |
| 2230 | if (Pred == CmpInst::getSwappedPredicate(Cmp->getPredicate()) && |
| 2231 | LHS == CmpRHS && RHS == CmpLHS) |
| 2232 | return Cmp; |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2233 | return nullptr; |
Duncan Sands | af32728 | 2011-05-07 16:56:49 +0000 | [diff] [blame] | 2234 | } |
| 2235 | |
Dan Gohman | 9631d90 | 2013-02-01 00:49:06 +0000 | [diff] [blame] | 2236 | // A significant optimization not implemented here is assuming that alloca |
| 2237 | // addresses are not equal to incoming argument values. They don't *alias*, |
| 2238 | // as we say, but that doesn't mean they aren't equal, so we take a |
| 2239 | // conservative approach. |
| 2240 | // |
| 2241 | // This is inspired in part by C++11 5.10p1: |
| 2242 | // "Two pointers of the same type compare equal if and only if they are both |
| 2243 | // null, both point to the same function, or both represent the same |
| 2244 | // address." |
| 2245 | // |
| 2246 | // This is pretty permissive. |
| 2247 | // |
| 2248 | // It's also partly due to C11 6.5.9p6: |
| 2249 | // "Two pointers compare equal if and only if both are null pointers, both are |
| 2250 | // pointers to the same object (including a pointer to an object and a |
| 2251 | // subobject at its beginning) or function, both are pointers to one past the |
| 2252 | // last element of the same array object, or one is a pointer to one past the |
| 2253 | // end of one array object and the other is a pointer to the start of a |
NAKAMURA Takumi | 065fd35 | 2013-04-08 23:05:21 +0000 | [diff] [blame] | 2254 | // different array object that happens to immediately follow the first array |
Dan Gohman | 9631d90 | 2013-02-01 00:49:06 +0000 | [diff] [blame] | 2255 | // object in the address space.) |
| 2256 | // |
| 2257 | // C11's version is more restrictive, however there's no reason why an argument |
| 2258 | // couldn't be a one-past-the-end value for a stack object in the caller and be |
| 2259 | // equal to the beginning of a stack object in the callee. |
| 2260 | // |
| 2261 | // If the C and C++ standards are ever made sufficiently restrictive in this |
| 2262 | // area, it may be possible to update LLVM's semantics accordingly and reinstate |
| 2263 | // this optimization. |
Anna Thomas | 43d7e1c | 2016-05-03 14:58:21 +0000 | [diff] [blame] | 2264 | static Constant * |
| 2265 | computePointerICmp(const DataLayout &DL, const TargetLibraryInfo *TLI, |
| 2266 | const DominatorTree *DT, CmpInst::Predicate Pred, |
Nuno Lopes | 404f106 | 2017-09-09 18:23:11 +0000 | [diff] [blame] | 2267 | AssumptionCache *AC, const Instruction *CxtI, |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2268 | const InstrInfoQuery &IIQ, Value *LHS, Value *RHS) { |
Dan Gohman | b3e2d3a | 2013-02-01 00:11:13 +0000 | [diff] [blame] | 2269 | // First, skip past any trivial no-ops. |
| 2270 | LHS = LHS->stripPointerCasts(); |
| 2271 | RHS = RHS->stripPointerCasts(); |
| 2272 | |
| 2273 | // A non-null pointer is not equal to a null pointer. |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2274 | if (llvm::isKnownNonZero(LHS, DL, 0, nullptr, nullptr, nullptr, |
| 2275 | IIQ.UseInstrInfo) && |
| 2276 | isa<ConstantPointerNull>(RHS) && |
Dan Gohman | b3e2d3a | 2013-02-01 00:11:13 +0000 | [diff] [blame] | 2277 | (Pred == CmpInst::ICMP_EQ || Pred == CmpInst::ICMP_NE)) |
| 2278 | return ConstantInt::get(GetCompareTy(LHS), |
| 2279 | !CmpInst::isTrueWhenEqual(Pred)); |
| 2280 | |
Chandler Carruth | 8059c84 | 2012-03-25 21:28:14 +0000 | [diff] [blame] | 2281 | // We can only fold certain predicates on pointer comparisons. |
| 2282 | switch (Pred) { |
| 2283 | default: |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2284 | return nullptr; |
Chandler Carruth | 8059c84 | 2012-03-25 21:28:14 +0000 | [diff] [blame] | 2285 | |
| 2286 | // Equality comaprisons are easy to fold. |
| 2287 | case CmpInst::ICMP_EQ: |
| 2288 | case CmpInst::ICMP_NE: |
| 2289 | break; |
| 2290 | |
| 2291 | // We can only handle unsigned relational comparisons because 'inbounds' on |
| 2292 | // a GEP only protects against unsigned wrapping. |
| 2293 | case CmpInst::ICMP_UGT: |
| 2294 | case CmpInst::ICMP_UGE: |
| 2295 | case CmpInst::ICMP_ULT: |
| 2296 | case CmpInst::ICMP_ULE: |
| 2297 | // However, we have to switch them to their signed variants to handle |
| 2298 | // negative indices from the base pointer. |
| 2299 | Pred = ICmpInst::getSignedPredicate(Pred); |
| 2300 | break; |
| 2301 | } |
| 2302 | |
Dan Gohman | b3e2d3a | 2013-02-01 00:11:13 +0000 | [diff] [blame] | 2303 | // Strip off any constant offsets so that we can reason about them. |
| 2304 | // It's tempting to use getUnderlyingObject or even just stripInBoundsOffsets |
| 2305 | // here and compare base addresses like AliasAnalysis does, however there are |
| 2306 | // numerous hazards. AliasAnalysis and its utilities rely on special rules |
| 2307 | // governing loads and stores which don't apply to icmps. Also, AliasAnalysis |
| 2308 | // doesn't need to guarantee pointer inequality when it says NoAlias. |
Rafael Espindola | 37dc9e1 | 2014-02-21 00:06:31 +0000 | [diff] [blame] | 2309 | Constant *LHSOffset = stripAndComputeConstantOffsets(DL, LHS); |
| 2310 | Constant *RHSOffset = stripAndComputeConstantOffsets(DL, RHS); |
Chandler Carruth | 8059c84 | 2012-03-25 21:28:14 +0000 | [diff] [blame] | 2311 | |
Dan Gohman | b3e2d3a | 2013-02-01 00:11:13 +0000 | [diff] [blame] | 2312 | // If LHS and RHS are related via constant offsets to the same base |
| 2313 | // value, we can replace it with an icmp which just compares the offsets. |
| 2314 | if (LHS == RHS) |
| 2315 | return ConstantExpr::getICmp(Pred, LHSOffset, RHSOffset); |
Chandler Carruth | 8059c84 | 2012-03-25 21:28:14 +0000 | [diff] [blame] | 2316 | |
Dan Gohman | b3e2d3a | 2013-02-01 00:11:13 +0000 | [diff] [blame] | 2317 | // Various optimizations for (in)equality comparisons. |
| 2318 | if (Pred == CmpInst::ICMP_EQ || Pred == CmpInst::ICMP_NE) { |
| 2319 | // Different non-empty allocations that exist at the same time have |
| 2320 | // different addresses (if the program can tell). Global variables always |
| 2321 | // exist, so they always exist during the lifetime of each other and all |
| 2322 | // allocas. Two different allocas usually have different addresses... |
| 2323 | // |
| 2324 | // However, if there's an @llvm.stackrestore dynamically in between two |
| 2325 | // allocas, they may have the same address. It's tempting to reduce the |
| 2326 | // scope of the problem by only looking at *static* allocas here. That would |
| 2327 | // cover the majority of allocas while significantly reducing the likelihood |
| 2328 | // of having an @llvm.stackrestore pop up in the middle. However, it's not |
| 2329 | // actually impossible for an @llvm.stackrestore to pop up in the middle of |
| 2330 | // an entry block. Also, if we have a block that's not attached to a |
| 2331 | // function, we can't tell if it's "static" under the current definition. |
| 2332 | // Theoretically, this problem could be fixed by creating a new kind of |
| 2333 | // instruction kind specifically for static allocas. Such a new instruction |
| 2334 | // could be required to be at the top of the entry block, thus preventing it |
| 2335 | // from being subject to a @llvm.stackrestore. Instcombine could even |
| 2336 | // convert regular allocas into these special allocas. It'd be nifty. |
| 2337 | // However, until then, this problem remains open. |
| 2338 | // |
| 2339 | // So, we'll assume that two non-empty allocas have different addresses |
| 2340 | // for now. |
| 2341 | // |
| 2342 | // With all that, if the offsets are within the bounds of their allocations |
| 2343 | // (and not one-past-the-end! so we can't use inbounds!), and their |
| 2344 | // allocations aren't the same, the pointers are not equal. |
| 2345 | // |
| 2346 | // Note that it's not necessary to check for LHS being a global variable |
| 2347 | // address, due to canonicalization and constant folding. |
| 2348 | if (isa<AllocaInst>(LHS) && |
| 2349 | (isa<AllocaInst>(RHS) || isa<GlobalVariable>(RHS))) { |
Benjamin Kramer | c05aa95 | 2013-02-01 15:21:10 +0000 | [diff] [blame] | 2350 | ConstantInt *LHSOffsetCI = dyn_cast<ConstantInt>(LHSOffset); |
| 2351 | ConstantInt *RHSOffsetCI = dyn_cast<ConstantInt>(RHSOffset); |
Dan Gohman | b3e2d3a | 2013-02-01 00:11:13 +0000 | [diff] [blame] | 2352 | uint64_t LHSSize, RHSSize; |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 2353 | ObjectSizeOpts Opts; |
| 2354 | Opts.NullIsUnknownSize = |
| 2355 | NullPointerIsDefined(cast<AllocaInst>(LHS)->getFunction()); |
Benjamin Kramer | c05aa95 | 2013-02-01 15:21:10 +0000 | [diff] [blame] | 2356 | if (LHSOffsetCI && RHSOffsetCI && |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 2357 | getObjectSize(LHS, LHSSize, DL, TLI, Opts) && |
| 2358 | getObjectSize(RHS, RHSSize, DL, TLI, Opts)) { |
Benjamin Kramer | c05aa95 | 2013-02-01 15:21:10 +0000 | [diff] [blame] | 2359 | const APInt &LHSOffsetValue = LHSOffsetCI->getValue(); |
| 2360 | const APInt &RHSOffsetValue = RHSOffsetCI->getValue(); |
Dan Gohman | b3e2d3a | 2013-02-01 00:11:13 +0000 | [diff] [blame] | 2361 | if (!LHSOffsetValue.isNegative() && |
| 2362 | !RHSOffsetValue.isNegative() && |
| 2363 | LHSOffsetValue.ult(LHSSize) && |
| 2364 | RHSOffsetValue.ult(RHSSize)) { |
| 2365 | return ConstantInt::get(GetCompareTy(LHS), |
| 2366 | !CmpInst::isTrueWhenEqual(Pred)); |
| 2367 | } |
| 2368 | } |
| 2369 | |
| 2370 | // Repeat the above check but this time without depending on DataLayout |
| 2371 | // or being able to compute a precise size. |
| 2372 | if (!cast<PointerType>(LHS->getType())->isEmptyTy() && |
| 2373 | !cast<PointerType>(RHS->getType())->isEmptyTy() && |
| 2374 | LHSOffset->isNullValue() && |
| 2375 | RHSOffset->isNullValue()) |
| 2376 | return ConstantInt::get(GetCompareTy(LHS), |
| 2377 | !CmpInst::isTrueWhenEqual(Pred)); |
| 2378 | } |
Benjamin Kramer | 942dfe6 | 2013-09-23 14:16:38 +0000 | [diff] [blame] | 2379 | |
| 2380 | // Even if an non-inbounds GEP occurs along the path we can still optimize |
| 2381 | // equality comparisons concerning the result. We avoid walking the whole |
| 2382 | // chain again by starting where the last calls to |
| 2383 | // stripAndComputeConstantOffsets left off and accumulate the offsets. |
Rafael Espindola | 37dc9e1 | 2014-02-21 00:06:31 +0000 | [diff] [blame] | 2384 | Constant *LHSNoBound = stripAndComputeConstantOffsets(DL, LHS, true); |
| 2385 | Constant *RHSNoBound = stripAndComputeConstantOffsets(DL, RHS, true); |
Benjamin Kramer | 942dfe6 | 2013-09-23 14:16:38 +0000 | [diff] [blame] | 2386 | if (LHS == RHS) |
| 2387 | return ConstantExpr::getICmp(Pred, |
| 2388 | ConstantExpr::getAdd(LHSOffset, LHSNoBound), |
| 2389 | ConstantExpr::getAdd(RHSOffset, RHSNoBound)); |
Hal Finkel | afcd8db | 2014-12-01 23:38:06 +0000 | [diff] [blame] | 2390 | |
| 2391 | // If one side of the equality comparison must come from a noalias call |
| 2392 | // (meaning a system memory allocation function), and the other side must |
| 2393 | // come from a pointer that cannot overlap with dynamically-allocated |
| 2394 | // memory within the lifetime of the current function (allocas, byval |
| 2395 | // arguments, globals), then determine the comparison result here. |
Bjorn Pettersson | 71e8c6f | 2019-04-24 06:55:50 +0000 | [diff] [blame] | 2396 | SmallVector<const Value *, 8> LHSUObjs, RHSUObjs; |
Hal Finkel | afcd8db | 2014-12-01 23:38:06 +0000 | [diff] [blame] | 2397 | GetUnderlyingObjects(LHS, LHSUObjs, DL); |
| 2398 | GetUnderlyingObjects(RHS, RHSUObjs, DL); |
| 2399 | |
| 2400 | // Is the set of underlying objects all noalias calls? |
Bjorn Pettersson | 71e8c6f | 2019-04-24 06:55:50 +0000 | [diff] [blame] | 2401 | auto IsNAC = [](ArrayRef<const Value *> Objects) { |
David Majnemer | 0a16c22 | 2016-08-11 21:15:00 +0000 | [diff] [blame] | 2402 | return all_of(Objects, isNoAliasCall); |
Hal Finkel | afcd8db | 2014-12-01 23:38:06 +0000 | [diff] [blame] | 2403 | }; |
| 2404 | |
| 2405 | // Is the set of underlying objects all things which must be disjoint from |
Hal Finkel | aa19baf | 2014-12-04 17:45:19 +0000 | [diff] [blame] | 2406 | // noalias calls. For allocas, we consider only static ones (dynamic |
| 2407 | // allocas might be transformed into calls to malloc not simultaneously |
| 2408 | // live with the compared-to allocation). For globals, we exclude symbols |
| 2409 | // that might be resolve lazily to symbols in another dynamically-loaded |
| 2410 | // library (and, thus, could be malloc'ed by the implementation). |
Bjorn Pettersson | 71e8c6f | 2019-04-24 06:55:50 +0000 | [diff] [blame] | 2411 | auto IsAllocDisjoint = [](ArrayRef<const Value *> Objects) { |
| 2412 | return all_of(Objects, [](const Value *V) { |
Sanjay Patel | 34ea70a | 2016-01-11 22:24:35 +0000 | [diff] [blame] | 2413 | if (const AllocaInst *AI = dyn_cast<AllocaInst>(V)) |
| 2414 | return AI->getParent() && AI->getFunction() && AI->isStaticAlloca(); |
| 2415 | if (const GlobalValue *GV = dyn_cast<GlobalValue>(V)) |
| 2416 | return (GV->hasLocalLinkage() || GV->hasHiddenVisibility() || |
Peter Collingbourne | 96efdd6 | 2016-06-14 21:01:22 +0000 | [diff] [blame] | 2417 | GV->hasProtectedVisibility() || GV->hasGlobalUnnamedAddr()) && |
Sanjay Patel | 34ea70a | 2016-01-11 22:24:35 +0000 | [diff] [blame] | 2418 | !GV->isThreadLocal(); |
| 2419 | if (const Argument *A = dyn_cast<Argument>(V)) |
| 2420 | return A->hasByValAttr(); |
| 2421 | return false; |
| 2422 | }); |
Hal Finkel | afcd8db | 2014-12-01 23:38:06 +0000 | [diff] [blame] | 2423 | }; |
| 2424 | |
| 2425 | if ((IsNAC(LHSUObjs) && IsAllocDisjoint(RHSUObjs)) || |
| 2426 | (IsNAC(RHSUObjs) && IsAllocDisjoint(LHSUObjs))) |
| 2427 | return ConstantInt::get(GetCompareTy(LHS), |
| 2428 | !CmpInst::isTrueWhenEqual(Pred)); |
Anna Thomas | 43d7e1c | 2016-05-03 14:58:21 +0000 | [diff] [blame] | 2429 | |
| 2430 | // Fold comparisons for non-escaping pointer even if the allocation call |
| 2431 | // cannot be elided. We cannot fold malloc comparison to null. Also, the |
| 2432 | // dynamic allocation call could be either of the operands. |
| 2433 | Value *MI = nullptr; |
Nuno Lopes | 404f106 | 2017-09-09 18:23:11 +0000 | [diff] [blame] | 2434 | if (isAllocLikeFn(LHS, TLI) && |
| 2435 | llvm::isKnownNonZero(RHS, DL, 0, nullptr, CxtI, DT)) |
Anna Thomas | 43d7e1c | 2016-05-03 14:58:21 +0000 | [diff] [blame] | 2436 | MI = LHS; |
Nuno Lopes | 404f106 | 2017-09-09 18:23:11 +0000 | [diff] [blame] | 2437 | else if (isAllocLikeFn(RHS, TLI) && |
| 2438 | llvm::isKnownNonZero(LHS, DL, 0, nullptr, CxtI, DT)) |
Anna Thomas | 43d7e1c | 2016-05-03 14:58:21 +0000 | [diff] [blame] | 2439 | MI = RHS; |
| 2440 | // FIXME: We should also fold the compare when the pointer escapes, but the |
| 2441 | // compare dominates the pointer escape |
| 2442 | if (MI && !PointerMayBeCaptured(MI, true, true)) |
| 2443 | return ConstantInt::get(GetCompareTy(LHS), |
| 2444 | CmpInst::isFalseWhenEqual(Pred)); |
Dan Gohman | b3e2d3a | 2013-02-01 00:11:13 +0000 | [diff] [blame] | 2445 | } |
| 2446 | |
| 2447 | // Otherwise, fail. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 2448 | return nullptr; |
Chandler Carruth | 8059c84 | 2012-03-25 21:28:14 +0000 | [diff] [blame] | 2449 | } |
Chris Lattner | 01990f0 | 2012-02-24 19:01:58 +0000 | [diff] [blame] | 2450 | |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2451 | /// Fold an icmp when its operands have i1 scalar type. |
| 2452 | static Value *simplifyICmpOfBools(CmpInst::Predicate Pred, Value *LHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 2453 | Value *RHS, const SimplifyQuery &Q) { |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2454 | Type *ITy = GetCompareTy(LHS); // The return type. |
| 2455 | Type *OpTy = LHS->getType(); // The operand type. |
Craig Topper | fde4723 | 2017-07-09 07:04:03 +0000 | [diff] [blame] | 2456 | if (!OpTy->isIntOrIntVectorTy(1)) |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2457 | return nullptr; |
| 2458 | |
Sanjay Patel | e2787b9 | 2017-05-17 20:27:55 +0000 | [diff] [blame] | 2459 | // A boolean compared to true/false can be simplified in 14 out of the 20 |
| 2460 | // (10 predicates * 2 constants) possible combinations. Cases not handled here |
| 2461 | // require a 'not' of the LHS, so those must be transformed in InstCombine. |
| 2462 | if (match(RHS, m_Zero())) { |
| 2463 | switch (Pred) { |
| 2464 | case CmpInst::ICMP_NE: // X != 0 -> X |
| 2465 | case CmpInst::ICMP_UGT: // X >u 0 -> X |
| 2466 | case CmpInst::ICMP_SLT: // X <s 0 -> X |
| 2467 | return LHS; |
| 2468 | |
| 2469 | case CmpInst::ICMP_ULT: // X <u 0 -> false |
| 2470 | case CmpInst::ICMP_SGT: // X >s 0 -> false |
| 2471 | return getFalse(ITy); |
| 2472 | |
| 2473 | case CmpInst::ICMP_UGE: // X >=u 0 -> true |
| 2474 | case CmpInst::ICMP_SLE: // X <=s 0 -> true |
| 2475 | return getTrue(ITy); |
| 2476 | |
| 2477 | default: break; |
| 2478 | } |
| 2479 | } else if (match(RHS, m_One())) { |
| 2480 | switch (Pred) { |
| 2481 | case CmpInst::ICMP_EQ: // X == 1 -> X |
| 2482 | case CmpInst::ICMP_UGE: // X >=u 1 -> X |
| 2483 | case CmpInst::ICMP_SLE: // X <=s -1 -> X |
| 2484 | return LHS; |
| 2485 | |
| 2486 | case CmpInst::ICMP_UGT: // X >u 1 -> false |
| 2487 | case CmpInst::ICMP_SLT: // X <s -1 -> false |
| 2488 | return getFalse(ITy); |
| 2489 | |
| 2490 | case CmpInst::ICMP_ULE: // X <=u 1 -> true |
| 2491 | case CmpInst::ICMP_SGE: // X >=s -1 -> true |
| 2492 | return getTrue(ITy); |
| 2493 | |
| 2494 | default: break; |
| 2495 | } |
| 2496 | } |
| 2497 | |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2498 | switch (Pred) { |
| 2499 | default: |
| 2500 | break; |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2501 | case ICmpInst::ICMP_UGE: |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2502 | if (isImpliedCondition(RHS, LHS, Q.DL).getValueOr(false)) |
| 2503 | return getTrue(ITy); |
| 2504 | break; |
| 2505 | case ICmpInst::ICMP_SGE: |
| 2506 | /// For signed comparison, the values for an i1 are 0 and -1 |
| 2507 | /// respectively. This maps into a truth table of: |
| 2508 | /// LHS | RHS | LHS >=s RHS | LHS implies RHS |
| 2509 | /// 0 | 0 | 1 (0 >= 0) | 1 |
| 2510 | /// 0 | 1 | 1 (0 >= -1) | 1 |
| 2511 | /// 1 | 0 | 0 (-1 >= 0) | 0 |
| 2512 | /// 1 | 1 | 1 (-1 >= -1) | 1 |
| 2513 | if (isImpliedCondition(LHS, RHS, Q.DL).getValueOr(false)) |
| 2514 | return getTrue(ITy); |
| 2515 | break; |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2516 | case ICmpInst::ICMP_ULE: |
| 2517 | if (isImpliedCondition(LHS, RHS, Q.DL).getValueOr(false)) |
| 2518 | return getTrue(ITy); |
| 2519 | break; |
| 2520 | } |
| 2521 | |
| 2522 | return nullptr; |
| 2523 | } |
| 2524 | |
| 2525 | /// Try hard to fold icmp with zero RHS because this is a common case. |
| 2526 | static Value *simplifyICmpWithZero(CmpInst::Predicate Pred, Value *LHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 2527 | Value *RHS, const SimplifyQuery &Q) { |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2528 | if (!match(RHS, m_Zero())) |
| 2529 | return nullptr; |
| 2530 | |
| 2531 | Type *ITy = GetCompareTy(LHS); // The return type. |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2532 | switch (Pred) { |
| 2533 | default: |
| 2534 | llvm_unreachable("Unknown ICmp predicate!"); |
| 2535 | case ICmpInst::ICMP_ULT: |
| 2536 | return getFalse(ITy); |
| 2537 | case ICmpInst::ICMP_UGE: |
| 2538 | return getTrue(ITy); |
| 2539 | case ICmpInst::ICMP_EQ: |
| 2540 | case ICmpInst::ICMP_ULE: |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2541 | if (isKnownNonZero(LHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT, Q.IIQ.UseInstrInfo)) |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2542 | return getFalse(ITy); |
| 2543 | break; |
| 2544 | case ICmpInst::ICMP_NE: |
| 2545 | case ICmpInst::ICMP_UGT: |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2546 | if (isKnownNonZero(LHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT, Q.IIQ.UseInstrInfo)) |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2547 | return getTrue(ITy); |
| 2548 | break; |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2549 | case ICmpInst::ICMP_SLT: { |
| 2550 | KnownBits LHSKnown = computeKnownBits(LHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2551 | if (LHSKnown.isNegative()) |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2552 | return getTrue(ITy); |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2553 | if (LHSKnown.isNonNegative()) |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2554 | return getFalse(ITy); |
| 2555 | break; |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2556 | } |
| 2557 | case ICmpInst::ICMP_SLE: { |
| 2558 | KnownBits LHSKnown = computeKnownBits(LHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2559 | if (LHSKnown.isNegative()) |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2560 | return getTrue(ITy); |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2561 | if (LHSKnown.isNonNegative() && |
| 2562 | isKnownNonZero(LHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT)) |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2563 | return getFalse(ITy); |
| 2564 | break; |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2565 | } |
| 2566 | case ICmpInst::ICMP_SGE: { |
| 2567 | KnownBits LHSKnown = computeKnownBits(LHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2568 | if (LHSKnown.isNegative()) |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2569 | return getFalse(ITy); |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2570 | if (LHSKnown.isNonNegative()) |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2571 | return getTrue(ITy); |
| 2572 | break; |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2573 | } |
| 2574 | case ICmpInst::ICMP_SGT: { |
| 2575 | KnownBits LHSKnown = computeKnownBits(LHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2576 | if (LHSKnown.isNegative()) |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2577 | return getFalse(ITy); |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2578 | if (LHSKnown.isNonNegative() && |
| 2579 | isKnownNonZero(LHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT)) |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2580 | return getTrue(ITy); |
| 2581 | break; |
| 2582 | } |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2583 | } |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 2584 | |
| 2585 | return nullptr; |
| 2586 | } |
| 2587 | |
Sanjay Patel | 67bde28 | 2016-08-22 23:12:02 +0000 | [diff] [blame] | 2588 | static Value *simplifyICmpWithConstant(CmpInst::Predicate Pred, Value *LHS, |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2589 | Value *RHS, const InstrInfoQuery &IIQ) { |
Roman Lebedev | 0c43d72 | 2018-03-15 16:17:40 +0000 | [diff] [blame] | 2590 | Type *ITy = GetCompareTy(RHS); // The return type. |
| 2591 | |
Roman Lebedev | 6aca335 | 2018-03-15 16:17:46 +0000 | [diff] [blame] | 2592 | Value *X; |
| 2593 | // Sign-bit checks can be optimized to true/false after unsigned |
| 2594 | // floating-point casts: |
| 2595 | // icmp slt (bitcast (uitofp X)), 0 --> false |
| 2596 | // icmp sgt (bitcast (uitofp X)), -1 --> true |
| 2597 | if (match(LHS, m_BitCast(m_UIToFP(m_Value(X))))) { |
| 2598 | if (Pred == ICmpInst::ICMP_SLT && match(RHS, m_Zero())) |
| 2599 | return ConstantInt::getFalse(ITy); |
| 2600 | if (Pred == ICmpInst::ICMP_SGT && match(RHS, m_AllOnes())) |
| 2601 | return ConstantInt::getTrue(ITy); |
| 2602 | } |
| 2603 | |
Sanjay Patel | 200e3cb | 2016-08-23 17:30:56 +0000 | [diff] [blame] | 2604 | const APInt *C; |
| 2605 | if (!match(RHS, m_APInt(C))) |
Sanjay Patel | 67bde28 | 2016-08-22 23:12:02 +0000 | [diff] [blame] | 2606 | return nullptr; |
| 2607 | |
| 2608 | // Rule out tautological comparisons (eg., ult 0 or uge 0). |
Sanjoy Das | 1f7b813 | 2016-10-02 00:09:57 +0000 | [diff] [blame] | 2609 | ConstantRange RHS_CR = ConstantRange::makeExactICmpRegion(Pred, *C); |
Sanjay Patel | 67bde28 | 2016-08-22 23:12:02 +0000 | [diff] [blame] | 2610 | if (RHS_CR.isEmptySet()) |
Roman Lebedev | 0c43d72 | 2018-03-15 16:17:40 +0000 | [diff] [blame] | 2611 | return ConstantInt::getFalse(ITy); |
Sanjay Patel | 67bde28 | 2016-08-22 23:12:02 +0000 | [diff] [blame] | 2612 | if (RHS_CR.isFullSet()) |
Roman Lebedev | 0c43d72 | 2018-03-15 16:17:40 +0000 | [diff] [blame] | 2613 | return ConstantInt::getTrue(ITy); |
Sanjay Patel | 200e3cb | 2016-08-23 17:30:56 +0000 | [diff] [blame] | 2614 | |
Nikita Popov | 4909759 | 2019-03-09 21:17:42 +0000 | [diff] [blame] | 2615 | ConstantRange LHS_CR = computeConstantRange(LHS, IIQ.UseInstrInfo); |
Sanjay Patel | 67bde28 | 2016-08-22 23:12:02 +0000 | [diff] [blame] | 2616 | if (!LHS_CR.isFullSet()) { |
| 2617 | if (RHS_CR.contains(LHS_CR)) |
Roman Lebedev | 0c43d72 | 2018-03-15 16:17:40 +0000 | [diff] [blame] | 2618 | return ConstantInt::getTrue(ITy); |
Sanjay Patel | 67bde28 | 2016-08-22 23:12:02 +0000 | [diff] [blame] | 2619 | if (RHS_CR.inverse().contains(LHS_CR)) |
Roman Lebedev | 0c43d72 | 2018-03-15 16:17:40 +0000 | [diff] [blame] | 2620 | return ConstantInt::getFalse(ITy); |
Sanjay Patel | 67bde28 | 2016-08-22 23:12:02 +0000 | [diff] [blame] | 2621 | } |
| 2622 | |
| 2623 | return nullptr; |
| 2624 | } |
| 2625 | |
Sanjay Patel | 2df38a8 | 2017-05-08 16:21:55 +0000 | [diff] [blame] | 2626 | /// TODO: A large part of this logic is duplicated in InstCombine's |
| 2627 | /// foldICmpBinOp(). We should be able to share that and avoid the code |
| 2628 | /// duplication. |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2629 | static Value *simplifyICmpWithBinOp(CmpInst::Predicate Pred, Value *LHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 2630 | Value *RHS, const SimplifyQuery &Q, |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2631 | unsigned MaxRecurse) { |
| 2632 | Type *ITy = GetCompareTy(LHS); // The return type. |
| 2633 | |
| 2634 | BinaryOperator *LBO = dyn_cast<BinaryOperator>(LHS); |
| 2635 | BinaryOperator *RBO = dyn_cast<BinaryOperator>(RHS); |
| 2636 | if (MaxRecurse && (LBO || RBO)) { |
| 2637 | // Analyze the case when either LHS or RHS is an add instruction. |
| 2638 | Value *A = nullptr, *B = nullptr, *C = nullptr, *D = nullptr; |
| 2639 | // LHS = A + B (or A and B are null); RHS = C + D (or C and D are null). |
| 2640 | bool NoLHSWrapProblem = false, NoRHSWrapProblem = false; |
| 2641 | if (LBO && LBO->getOpcode() == Instruction::Add) { |
| 2642 | A = LBO->getOperand(0); |
| 2643 | B = LBO->getOperand(1); |
| 2644 | NoLHSWrapProblem = |
| 2645 | ICmpInst::isEquality(Pred) || |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2646 | (CmpInst::isUnsigned(Pred) && |
| 2647 | Q.IIQ.hasNoUnsignedWrap(cast<OverflowingBinaryOperator>(LBO))) || |
| 2648 | (CmpInst::isSigned(Pred) && |
| 2649 | Q.IIQ.hasNoSignedWrap(cast<OverflowingBinaryOperator>(LBO))); |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2650 | } |
| 2651 | if (RBO && RBO->getOpcode() == Instruction::Add) { |
| 2652 | C = RBO->getOperand(0); |
| 2653 | D = RBO->getOperand(1); |
| 2654 | NoRHSWrapProblem = |
| 2655 | ICmpInst::isEquality(Pred) || |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2656 | (CmpInst::isUnsigned(Pred) && |
| 2657 | Q.IIQ.hasNoUnsignedWrap(cast<OverflowingBinaryOperator>(RBO))) || |
| 2658 | (CmpInst::isSigned(Pred) && |
| 2659 | Q.IIQ.hasNoSignedWrap(cast<OverflowingBinaryOperator>(RBO))); |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2660 | } |
| 2661 | |
| 2662 | // icmp (X+Y), X -> icmp Y, 0 for equalities or if there is no overflow. |
| 2663 | if ((A == RHS || B == RHS) && NoLHSWrapProblem) |
| 2664 | if (Value *V = SimplifyICmpInst(Pred, A == RHS ? B : A, |
| 2665 | Constant::getNullValue(RHS->getType()), Q, |
| 2666 | MaxRecurse - 1)) |
| 2667 | return V; |
| 2668 | |
| 2669 | // icmp X, (X+Y) -> icmp 0, Y for equalities or if there is no overflow. |
| 2670 | if ((C == LHS || D == LHS) && NoRHSWrapProblem) |
| 2671 | if (Value *V = |
| 2672 | SimplifyICmpInst(Pred, Constant::getNullValue(LHS->getType()), |
| 2673 | C == LHS ? D : C, Q, MaxRecurse - 1)) |
| 2674 | return V; |
| 2675 | |
| 2676 | // icmp (X+Y), (X+Z) -> icmp Y,Z for equalities or if there is no overflow. |
| 2677 | if (A && C && (A == C || A == D || B == C || B == D) && NoLHSWrapProblem && |
| 2678 | NoRHSWrapProblem) { |
| 2679 | // Determine Y and Z in the form icmp (X+Y), (X+Z). |
| 2680 | Value *Y, *Z; |
| 2681 | if (A == C) { |
| 2682 | // C + B == C + D -> B == D |
| 2683 | Y = B; |
| 2684 | Z = D; |
| 2685 | } else if (A == D) { |
| 2686 | // D + B == C + D -> B == C |
| 2687 | Y = B; |
| 2688 | Z = C; |
| 2689 | } else if (B == C) { |
| 2690 | // A + C == C + D -> A == D |
| 2691 | Y = A; |
| 2692 | Z = D; |
| 2693 | } else { |
| 2694 | assert(B == D); |
| 2695 | // A + D == C + D -> A == C |
| 2696 | Y = A; |
| 2697 | Z = C; |
| 2698 | } |
| 2699 | if (Value *V = SimplifyICmpInst(Pred, Y, Z, Q, MaxRecurse - 1)) |
| 2700 | return V; |
| 2701 | } |
| 2702 | } |
| 2703 | |
| 2704 | { |
| 2705 | Value *Y = nullptr; |
| 2706 | // icmp pred (or X, Y), X |
| 2707 | if (LBO && match(LBO, m_c_Or(m_Value(Y), m_Specific(RHS)))) { |
| 2708 | if (Pred == ICmpInst::ICMP_ULT) |
| 2709 | return getFalse(ITy); |
| 2710 | if (Pred == ICmpInst::ICMP_UGE) |
| 2711 | return getTrue(ITy); |
| 2712 | |
| 2713 | if (Pred == ICmpInst::ICMP_SLT || Pred == ICmpInst::ICMP_SGE) { |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2714 | KnownBits RHSKnown = computeKnownBits(RHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2715 | KnownBits YKnown = computeKnownBits(Y, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2716 | if (RHSKnown.isNonNegative() && YKnown.isNegative()) |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2717 | return Pred == ICmpInst::ICMP_SLT ? getTrue(ITy) : getFalse(ITy); |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2718 | if (RHSKnown.isNegative() || YKnown.isNonNegative()) |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2719 | return Pred == ICmpInst::ICMP_SLT ? getFalse(ITy) : getTrue(ITy); |
| 2720 | } |
| 2721 | } |
| 2722 | // icmp pred X, (or X, Y) |
| 2723 | if (RBO && match(RBO, m_c_Or(m_Value(Y), m_Specific(LHS)))) { |
| 2724 | if (Pred == ICmpInst::ICMP_ULE) |
| 2725 | return getTrue(ITy); |
| 2726 | if (Pred == ICmpInst::ICMP_UGT) |
| 2727 | return getFalse(ITy); |
| 2728 | |
| 2729 | if (Pred == ICmpInst::ICMP_SGT || Pred == ICmpInst::ICMP_SLE) { |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2730 | KnownBits LHSKnown = computeKnownBits(LHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2731 | KnownBits YKnown = computeKnownBits(Y, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2732 | if (LHSKnown.isNonNegative() && YKnown.isNegative()) |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2733 | return Pred == ICmpInst::ICMP_SGT ? getTrue(ITy) : getFalse(ITy); |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2734 | if (LHSKnown.isNegative() || YKnown.isNonNegative()) |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2735 | return Pred == ICmpInst::ICMP_SGT ? getFalse(ITy) : getTrue(ITy); |
| 2736 | } |
| 2737 | } |
| 2738 | } |
| 2739 | |
| 2740 | // icmp pred (and X, Y), X |
Craig Topper | 72ee694 | 2017-06-24 06:24:01 +0000 | [diff] [blame] | 2741 | if (LBO && match(LBO, m_c_And(m_Value(), m_Specific(RHS)))) { |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2742 | if (Pred == ICmpInst::ICMP_UGT) |
| 2743 | return getFalse(ITy); |
| 2744 | if (Pred == ICmpInst::ICMP_ULE) |
| 2745 | return getTrue(ITy); |
| 2746 | } |
| 2747 | // icmp pred X, (and X, Y) |
Craig Topper | 72ee694 | 2017-06-24 06:24:01 +0000 | [diff] [blame] | 2748 | if (RBO && match(RBO, m_c_And(m_Value(), m_Specific(LHS)))) { |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2749 | if (Pred == ICmpInst::ICMP_UGE) |
| 2750 | return getTrue(ITy); |
| 2751 | if (Pred == ICmpInst::ICMP_ULT) |
| 2752 | return getFalse(ITy); |
| 2753 | } |
| 2754 | |
| 2755 | // 0 - (zext X) pred C |
| 2756 | if (!CmpInst::isUnsigned(Pred) && match(LHS, m_Neg(m_ZExt(m_Value())))) { |
| 2757 | if (ConstantInt *RHSC = dyn_cast<ConstantInt>(RHS)) { |
| 2758 | if (RHSC->getValue().isStrictlyPositive()) { |
| 2759 | if (Pred == ICmpInst::ICMP_SLT) |
| 2760 | return ConstantInt::getTrue(RHSC->getContext()); |
| 2761 | if (Pred == ICmpInst::ICMP_SGE) |
| 2762 | return ConstantInt::getFalse(RHSC->getContext()); |
| 2763 | if (Pred == ICmpInst::ICMP_EQ) |
| 2764 | return ConstantInt::getFalse(RHSC->getContext()); |
| 2765 | if (Pred == ICmpInst::ICMP_NE) |
| 2766 | return ConstantInt::getTrue(RHSC->getContext()); |
| 2767 | } |
| 2768 | if (RHSC->getValue().isNonNegative()) { |
| 2769 | if (Pred == ICmpInst::ICMP_SLE) |
| 2770 | return ConstantInt::getTrue(RHSC->getContext()); |
| 2771 | if (Pred == ICmpInst::ICMP_SGT) |
| 2772 | return ConstantInt::getFalse(RHSC->getContext()); |
| 2773 | } |
| 2774 | } |
| 2775 | } |
| 2776 | |
| 2777 | // icmp pred (urem X, Y), Y |
| 2778 | if (LBO && match(LBO, m_URem(m_Value(), m_Specific(RHS)))) { |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2779 | switch (Pred) { |
| 2780 | default: |
| 2781 | break; |
| 2782 | case ICmpInst::ICMP_SGT: |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2783 | case ICmpInst::ICMP_SGE: { |
| 2784 | KnownBits Known = computeKnownBits(RHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2785 | if (!Known.isNonNegative()) |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2786 | break; |
| 2787 | LLVM_FALLTHROUGH; |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2788 | } |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2789 | case ICmpInst::ICMP_EQ: |
| 2790 | case ICmpInst::ICMP_UGT: |
| 2791 | case ICmpInst::ICMP_UGE: |
| 2792 | return getFalse(ITy); |
| 2793 | case ICmpInst::ICMP_SLT: |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2794 | case ICmpInst::ICMP_SLE: { |
| 2795 | KnownBits Known = computeKnownBits(RHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2796 | if (!Known.isNonNegative()) |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2797 | break; |
| 2798 | LLVM_FALLTHROUGH; |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2799 | } |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2800 | case ICmpInst::ICMP_NE: |
| 2801 | case ICmpInst::ICMP_ULT: |
| 2802 | case ICmpInst::ICMP_ULE: |
| 2803 | return getTrue(ITy); |
| 2804 | } |
| 2805 | } |
| 2806 | |
| 2807 | // icmp pred X, (urem Y, X) |
| 2808 | if (RBO && match(RBO, m_URem(m_Value(), m_Specific(LHS)))) { |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2809 | switch (Pred) { |
| 2810 | default: |
| 2811 | break; |
| 2812 | case ICmpInst::ICMP_SGT: |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2813 | case ICmpInst::ICMP_SGE: { |
| 2814 | KnownBits Known = computeKnownBits(LHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2815 | if (!Known.isNonNegative()) |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2816 | break; |
| 2817 | LLVM_FALLTHROUGH; |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2818 | } |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2819 | case ICmpInst::ICMP_NE: |
| 2820 | case ICmpInst::ICMP_UGT: |
| 2821 | case ICmpInst::ICMP_UGE: |
| 2822 | return getTrue(ITy); |
| 2823 | case ICmpInst::ICMP_SLT: |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2824 | case ICmpInst::ICMP_SLE: { |
| 2825 | KnownBits Known = computeKnownBits(LHS, Q.DL, 0, Q.AC, Q.CxtI, Q.DT); |
| 2826 | if (!Known.isNonNegative()) |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2827 | break; |
| 2828 | LLVM_FALLTHROUGH; |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 2829 | } |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2830 | case ICmpInst::ICMP_EQ: |
| 2831 | case ICmpInst::ICMP_ULT: |
| 2832 | case ICmpInst::ICMP_ULE: |
| 2833 | return getFalse(ITy); |
| 2834 | } |
| 2835 | } |
| 2836 | |
| 2837 | // x >> y <=u x |
| 2838 | // x udiv y <=u x. |
| 2839 | if (LBO && (match(LBO, m_LShr(m_Specific(RHS), m_Value())) || |
| 2840 | match(LBO, m_UDiv(m_Specific(RHS), m_Value())))) { |
| 2841 | // icmp pred (X op Y), X |
| 2842 | if (Pred == ICmpInst::ICMP_UGT) |
| 2843 | return getFalse(ITy); |
| 2844 | if (Pred == ICmpInst::ICMP_ULE) |
| 2845 | return getTrue(ITy); |
| 2846 | } |
| 2847 | |
| 2848 | // x >=u x >> y |
| 2849 | // x >=u x udiv y. |
| 2850 | if (RBO && (match(RBO, m_LShr(m_Specific(LHS), m_Value())) || |
| 2851 | match(RBO, m_UDiv(m_Specific(LHS), m_Value())))) { |
| 2852 | // icmp pred X, (X op Y) |
| 2853 | if (Pred == ICmpInst::ICMP_ULT) |
| 2854 | return getFalse(ITy); |
| 2855 | if (Pred == ICmpInst::ICMP_UGE) |
| 2856 | return getTrue(ITy); |
| 2857 | } |
| 2858 | |
| 2859 | // handle: |
| 2860 | // CI2 << X == CI |
| 2861 | // CI2 << X != CI |
| 2862 | // |
| 2863 | // where CI2 is a power of 2 and CI isn't |
| 2864 | if (auto *CI = dyn_cast<ConstantInt>(RHS)) { |
| 2865 | const APInt *CI2Val, *CIVal = &CI->getValue(); |
| 2866 | if (LBO && match(LBO, m_Shl(m_APInt(CI2Val), m_Value())) && |
| 2867 | CI2Val->isPowerOf2()) { |
| 2868 | if (!CIVal->isPowerOf2()) { |
| 2869 | // CI2 << X can equal zero in some circumstances, |
| 2870 | // this simplification is unsafe if CI is zero. |
| 2871 | // |
| 2872 | // We know it is safe if: |
| 2873 | // - The shift is nsw, we can't shift out the one bit. |
| 2874 | // - The shift is nuw, we can't shift out the one bit. |
| 2875 | // - CI2 is one |
| 2876 | // - CI isn't zero |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2877 | if (Q.IIQ.hasNoSignedWrap(cast<OverflowingBinaryOperator>(LBO)) || |
| 2878 | Q.IIQ.hasNoUnsignedWrap(cast<OverflowingBinaryOperator>(LBO)) || |
Craig Topper | 73ba1c8 | 2017-06-07 07:40:37 +0000 | [diff] [blame] | 2879 | CI2Val->isOneValue() || !CI->isZero()) { |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2880 | if (Pred == ICmpInst::ICMP_EQ) |
| 2881 | return ConstantInt::getFalse(RHS->getContext()); |
| 2882 | if (Pred == ICmpInst::ICMP_NE) |
| 2883 | return ConstantInt::getTrue(RHS->getContext()); |
| 2884 | } |
| 2885 | } |
Craig Topper | 73ba1c8 | 2017-06-07 07:40:37 +0000 | [diff] [blame] | 2886 | if (CIVal->isSignMask() && CI2Val->isOneValue()) { |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2887 | if (Pred == ICmpInst::ICMP_UGT) |
| 2888 | return ConstantInt::getFalse(RHS->getContext()); |
| 2889 | if (Pred == ICmpInst::ICMP_ULE) |
| 2890 | return ConstantInt::getTrue(RHS->getContext()); |
| 2891 | } |
| 2892 | } |
| 2893 | } |
| 2894 | |
| 2895 | if (MaxRecurse && LBO && RBO && LBO->getOpcode() == RBO->getOpcode() && |
| 2896 | LBO->getOperand(1) == RBO->getOperand(1)) { |
| 2897 | switch (LBO->getOpcode()) { |
| 2898 | default: |
| 2899 | break; |
| 2900 | case Instruction::UDiv: |
| 2901 | case Instruction::LShr: |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2902 | if (ICmpInst::isSigned(Pred) || !Q.IIQ.isExact(LBO) || |
| 2903 | !Q.IIQ.isExact(RBO)) |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2904 | break; |
Sanjay Patel | a23b141 | 2017-05-15 19:16:49 +0000 | [diff] [blame] | 2905 | if (Value *V = SimplifyICmpInst(Pred, LBO->getOperand(0), |
| 2906 | RBO->getOperand(0), Q, MaxRecurse - 1)) |
| 2907 | return V; |
| 2908 | break; |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2909 | case Instruction::SDiv: |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2910 | if (!ICmpInst::isEquality(Pred) || !Q.IIQ.isExact(LBO) || |
| 2911 | !Q.IIQ.isExact(RBO)) |
Sanjay Patel | a23b141 | 2017-05-15 19:16:49 +0000 | [diff] [blame] | 2912 | break; |
| 2913 | if (Value *V = SimplifyICmpInst(Pred, LBO->getOperand(0), |
| 2914 | RBO->getOperand(0), Q, MaxRecurse - 1)) |
| 2915 | return V; |
| 2916 | break; |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2917 | case Instruction::AShr: |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2918 | if (!Q.IIQ.isExact(LBO) || !Q.IIQ.isExact(RBO)) |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2919 | break; |
| 2920 | if (Value *V = SimplifyICmpInst(Pred, LBO->getOperand(0), |
| 2921 | RBO->getOperand(0), Q, MaxRecurse - 1)) |
| 2922 | return V; |
| 2923 | break; |
| 2924 | case Instruction::Shl: { |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 2925 | bool NUW = Q.IIQ.hasNoUnsignedWrap(LBO) && Q.IIQ.hasNoUnsignedWrap(RBO); |
| 2926 | bool NSW = Q.IIQ.hasNoSignedWrap(LBO) && Q.IIQ.hasNoSignedWrap(RBO); |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2927 | if (!NUW && !NSW) |
| 2928 | break; |
| 2929 | if (!NSW && ICmpInst::isSigned(Pred)) |
| 2930 | break; |
| 2931 | if (Value *V = SimplifyICmpInst(Pred, LBO->getOperand(0), |
| 2932 | RBO->getOperand(0), Q, MaxRecurse - 1)) |
| 2933 | return V; |
| 2934 | break; |
| 2935 | } |
| 2936 | } |
| 2937 | } |
| 2938 | return nullptr; |
| 2939 | } |
| 2940 | |
Sanjay Patel | 35289c6 | 2016-12-10 17:40:47 +0000 | [diff] [blame] | 2941 | /// Simplify integer comparisons where at least one operand of the compare |
| 2942 | /// matches an integer min/max idiom. |
| 2943 | static Value *simplifyICmpWithMinMax(CmpInst::Predicate Pred, Value *LHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 2944 | Value *RHS, const SimplifyQuery &Q, |
Sanjay Patel | 35289c6 | 2016-12-10 17:40:47 +0000 | [diff] [blame] | 2945 | unsigned MaxRecurse) { |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 2946 | Type *ITy = GetCompareTy(LHS); // The return type. |
| 2947 | Value *A, *B; |
| 2948 | CmpInst::Predicate P = CmpInst::BAD_ICMP_PREDICATE; |
| 2949 | CmpInst::Predicate EqP; // Chosen so that "A == max/min(A,B)" iff "A EqP B". |
| 2950 | |
| 2951 | // Signed variants on "max(a,b)>=a -> true". |
| 2952 | if (match(LHS, m_SMax(m_Value(A), m_Value(B))) && (A == RHS || B == RHS)) { |
| 2953 | if (A != RHS) |
| 2954 | std::swap(A, B); // smax(A, B) pred A. |
| 2955 | EqP = CmpInst::ICMP_SGE; // "A == smax(A, B)" iff "A sge B". |
| 2956 | // We analyze this as smax(A, B) pred A. |
| 2957 | P = Pred; |
| 2958 | } else if (match(RHS, m_SMax(m_Value(A), m_Value(B))) && |
| 2959 | (A == LHS || B == LHS)) { |
| 2960 | if (A != LHS) |
| 2961 | std::swap(A, B); // A pred smax(A, B). |
| 2962 | EqP = CmpInst::ICMP_SGE; // "A == smax(A, B)" iff "A sge B". |
| 2963 | // We analyze this as smax(A, B) swapped-pred A. |
| 2964 | P = CmpInst::getSwappedPredicate(Pred); |
| 2965 | } else if (match(LHS, m_SMin(m_Value(A), m_Value(B))) && |
| 2966 | (A == RHS || B == RHS)) { |
| 2967 | if (A != RHS) |
| 2968 | std::swap(A, B); // smin(A, B) pred A. |
| 2969 | EqP = CmpInst::ICMP_SLE; // "A == smin(A, B)" iff "A sle B". |
| 2970 | // We analyze this as smax(-A, -B) swapped-pred -A. |
| 2971 | // Note that we do not need to actually form -A or -B thanks to EqP. |
| 2972 | P = CmpInst::getSwappedPredicate(Pred); |
| 2973 | } else if (match(RHS, m_SMin(m_Value(A), m_Value(B))) && |
| 2974 | (A == LHS || B == LHS)) { |
| 2975 | if (A != LHS) |
| 2976 | std::swap(A, B); // A pred smin(A, B). |
| 2977 | EqP = CmpInst::ICMP_SLE; // "A == smin(A, B)" iff "A sle B". |
| 2978 | // We analyze this as smax(-A, -B) pred -A. |
| 2979 | // Note that we do not need to actually form -A or -B thanks to EqP. |
| 2980 | P = Pred; |
| 2981 | } |
| 2982 | if (P != CmpInst::BAD_ICMP_PREDICATE) { |
| 2983 | // Cases correspond to "max(A, B) p A". |
| 2984 | switch (P) { |
| 2985 | default: |
| 2986 | break; |
| 2987 | case CmpInst::ICMP_EQ: |
| 2988 | case CmpInst::ICMP_SLE: |
| 2989 | // Equivalent to "A EqP B". This may be the same as the condition tested |
| 2990 | // in the max/min; if so, we can just return that. |
| 2991 | if (Value *V = ExtractEquivalentCondition(LHS, EqP, A, B)) |
| 2992 | return V; |
| 2993 | if (Value *V = ExtractEquivalentCondition(RHS, EqP, A, B)) |
| 2994 | return V; |
| 2995 | // Otherwise, see if "A EqP B" simplifies. |
| 2996 | if (MaxRecurse) |
| 2997 | if (Value *V = SimplifyICmpInst(EqP, A, B, Q, MaxRecurse - 1)) |
| 2998 | return V; |
| 2999 | break; |
| 3000 | case CmpInst::ICMP_NE: |
| 3001 | case CmpInst::ICMP_SGT: { |
| 3002 | CmpInst::Predicate InvEqP = CmpInst::getInversePredicate(EqP); |
| 3003 | // Equivalent to "A InvEqP B". This may be the same as the condition |
| 3004 | // tested in the max/min; if so, we can just return that. |
| 3005 | if (Value *V = ExtractEquivalentCondition(LHS, InvEqP, A, B)) |
| 3006 | return V; |
| 3007 | if (Value *V = ExtractEquivalentCondition(RHS, InvEqP, A, B)) |
| 3008 | return V; |
| 3009 | // Otherwise, see if "A InvEqP B" simplifies. |
| 3010 | if (MaxRecurse) |
| 3011 | if (Value *V = SimplifyICmpInst(InvEqP, A, B, Q, MaxRecurse - 1)) |
| 3012 | return V; |
| 3013 | break; |
| 3014 | } |
| 3015 | case CmpInst::ICMP_SGE: |
| 3016 | // Always true. |
| 3017 | return getTrue(ITy); |
| 3018 | case CmpInst::ICMP_SLT: |
| 3019 | // Always false. |
| 3020 | return getFalse(ITy); |
| 3021 | } |
| 3022 | } |
| 3023 | |
| 3024 | // Unsigned variants on "max(a,b)>=a -> true". |
| 3025 | P = CmpInst::BAD_ICMP_PREDICATE; |
| 3026 | if (match(LHS, m_UMax(m_Value(A), m_Value(B))) && (A == RHS || B == RHS)) { |
| 3027 | if (A != RHS) |
| 3028 | std::swap(A, B); // umax(A, B) pred A. |
| 3029 | EqP = CmpInst::ICMP_UGE; // "A == umax(A, B)" iff "A uge B". |
| 3030 | // We analyze this as umax(A, B) pred A. |
| 3031 | P = Pred; |
| 3032 | } else if (match(RHS, m_UMax(m_Value(A), m_Value(B))) && |
| 3033 | (A == LHS || B == LHS)) { |
| 3034 | if (A != LHS) |
| 3035 | std::swap(A, B); // A pred umax(A, B). |
| 3036 | EqP = CmpInst::ICMP_UGE; // "A == umax(A, B)" iff "A uge B". |
| 3037 | // We analyze this as umax(A, B) swapped-pred A. |
| 3038 | P = CmpInst::getSwappedPredicate(Pred); |
| 3039 | } else if (match(LHS, m_UMin(m_Value(A), m_Value(B))) && |
| 3040 | (A == RHS || B == RHS)) { |
| 3041 | if (A != RHS) |
| 3042 | std::swap(A, B); // umin(A, B) pred A. |
| 3043 | EqP = CmpInst::ICMP_ULE; // "A == umin(A, B)" iff "A ule B". |
| 3044 | // We analyze this as umax(-A, -B) swapped-pred -A. |
| 3045 | // Note that we do not need to actually form -A or -B thanks to EqP. |
| 3046 | P = CmpInst::getSwappedPredicate(Pred); |
| 3047 | } else if (match(RHS, m_UMin(m_Value(A), m_Value(B))) && |
| 3048 | (A == LHS || B == LHS)) { |
| 3049 | if (A != LHS) |
| 3050 | std::swap(A, B); // A pred umin(A, B). |
| 3051 | EqP = CmpInst::ICMP_ULE; // "A == umin(A, B)" iff "A ule B". |
| 3052 | // We analyze this as umax(-A, -B) pred -A. |
| 3053 | // Note that we do not need to actually form -A or -B thanks to EqP. |
| 3054 | P = Pred; |
| 3055 | } |
| 3056 | if (P != CmpInst::BAD_ICMP_PREDICATE) { |
| 3057 | // Cases correspond to "max(A, B) p A". |
| 3058 | switch (P) { |
| 3059 | default: |
| 3060 | break; |
| 3061 | case CmpInst::ICMP_EQ: |
| 3062 | case CmpInst::ICMP_ULE: |
| 3063 | // Equivalent to "A EqP B". This may be the same as the condition tested |
| 3064 | // in the max/min; if so, we can just return that. |
| 3065 | if (Value *V = ExtractEquivalentCondition(LHS, EqP, A, B)) |
| 3066 | return V; |
| 3067 | if (Value *V = ExtractEquivalentCondition(RHS, EqP, A, B)) |
| 3068 | return V; |
| 3069 | // Otherwise, see if "A EqP B" simplifies. |
| 3070 | if (MaxRecurse) |
| 3071 | if (Value *V = SimplifyICmpInst(EqP, A, B, Q, MaxRecurse - 1)) |
| 3072 | return V; |
| 3073 | break; |
| 3074 | case CmpInst::ICMP_NE: |
| 3075 | case CmpInst::ICMP_UGT: { |
| 3076 | CmpInst::Predicate InvEqP = CmpInst::getInversePredicate(EqP); |
| 3077 | // Equivalent to "A InvEqP B". This may be the same as the condition |
| 3078 | // tested in the max/min; if so, we can just return that. |
| 3079 | if (Value *V = ExtractEquivalentCondition(LHS, InvEqP, A, B)) |
| 3080 | return V; |
| 3081 | if (Value *V = ExtractEquivalentCondition(RHS, InvEqP, A, B)) |
| 3082 | return V; |
| 3083 | // Otherwise, see if "A InvEqP B" simplifies. |
| 3084 | if (MaxRecurse) |
| 3085 | if (Value *V = SimplifyICmpInst(InvEqP, A, B, Q, MaxRecurse - 1)) |
| 3086 | return V; |
| 3087 | break; |
| 3088 | } |
| 3089 | case CmpInst::ICMP_UGE: |
| 3090 | // Always true. |
| 3091 | return getTrue(ITy); |
| 3092 | case CmpInst::ICMP_ULT: |
| 3093 | // Always false. |
| 3094 | return getFalse(ITy); |
| 3095 | } |
| 3096 | } |
| 3097 | |
| 3098 | // Variants on "max(x,y) >= min(x,z)". |
| 3099 | Value *C, *D; |
| 3100 | if (match(LHS, m_SMax(m_Value(A), m_Value(B))) && |
| 3101 | match(RHS, m_SMin(m_Value(C), m_Value(D))) && |
| 3102 | (A == C || A == D || B == C || B == D)) { |
| 3103 | // max(x, ?) pred min(x, ?). |
| 3104 | if (Pred == CmpInst::ICMP_SGE) |
| 3105 | // Always true. |
| 3106 | return getTrue(ITy); |
| 3107 | if (Pred == CmpInst::ICMP_SLT) |
| 3108 | // Always false. |
| 3109 | return getFalse(ITy); |
| 3110 | } else if (match(LHS, m_SMin(m_Value(A), m_Value(B))) && |
| 3111 | match(RHS, m_SMax(m_Value(C), m_Value(D))) && |
| 3112 | (A == C || A == D || B == C || B == D)) { |
| 3113 | // min(x, ?) pred max(x, ?). |
| 3114 | if (Pred == CmpInst::ICMP_SLE) |
| 3115 | // Always true. |
| 3116 | return getTrue(ITy); |
| 3117 | if (Pred == CmpInst::ICMP_SGT) |
| 3118 | // Always false. |
| 3119 | return getFalse(ITy); |
| 3120 | } else if (match(LHS, m_UMax(m_Value(A), m_Value(B))) && |
| 3121 | match(RHS, m_UMin(m_Value(C), m_Value(D))) && |
| 3122 | (A == C || A == D || B == C || B == D)) { |
| 3123 | // max(x, ?) pred min(x, ?). |
| 3124 | if (Pred == CmpInst::ICMP_UGE) |
| 3125 | // Always true. |
| 3126 | return getTrue(ITy); |
| 3127 | if (Pred == CmpInst::ICMP_ULT) |
| 3128 | // Always false. |
| 3129 | return getFalse(ITy); |
| 3130 | } else if (match(LHS, m_UMin(m_Value(A), m_Value(B))) && |
| 3131 | match(RHS, m_UMax(m_Value(C), m_Value(D))) && |
| 3132 | (A == C || A == D || B == C || B == D)) { |
| 3133 | // min(x, ?) pred max(x, ?). |
| 3134 | if (Pred == CmpInst::ICMP_ULE) |
| 3135 | // Always true. |
| 3136 | return getTrue(ITy); |
| 3137 | if (Pred == CmpInst::ICMP_UGT) |
| 3138 | // Always false. |
| 3139 | return getFalse(ITy); |
| 3140 | } |
| 3141 | |
| 3142 | return nullptr; |
| 3143 | } |
| 3144 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 3145 | /// Given operands for an ICmpInst, see if we can fold the result. |
| 3146 | /// If not, this returns null. |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 3147 | static Value *SimplifyICmpInst(unsigned Predicate, Value *LHS, Value *RHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 3148 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
Chris Lattner | 084a1b5 | 2009-11-09 22:57:59 +0000 | [diff] [blame] | 3149 | CmpInst::Predicate Pred = (CmpInst::Predicate)Predicate; |
Chris Lattner | c1f1907 | 2009-11-09 23:28:39 +0000 | [diff] [blame] | 3150 | assert(CmpInst::isIntPredicate(Pred) && "Not an integer compare!"); |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 3151 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 3152 | if (Constant *CLHS = dyn_cast<Constant>(LHS)) { |
Chris Lattner | cdfb80d | 2009-11-09 23:06:58 +0000 | [diff] [blame] | 3153 | if (Constant *CRHS = dyn_cast<Constant>(RHS)) |
Rafael Espindola | 37dc9e1 | 2014-02-21 00:06:31 +0000 | [diff] [blame] | 3154 | return ConstantFoldCompareInstOperands(Pred, CLHS, CRHS, Q.DL, Q.TLI); |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 3155 | |
| 3156 | // If we have a constant, make sure it is on the RHS. |
| 3157 | std::swap(LHS, RHS); |
| 3158 | Pred = CmpInst::getSwappedPredicate(Pred); |
| 3159 | } |
Simon Pilgrim | 8ee477a | 2019-03-19 14:08:23 +0000 | [diff] [blame] | 3160 | assert(!isa<UndefValue>(LHS) && "Unexpected icmp undef,%X"); |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 3161 | |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3162 | Type *ITy = GetCompareTy(LHS); // The return type. |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 3163 | |
Simon Pilgrim | 8ee477a | 2019-03-19 14:08:23 +0000 | [diff] [blame] | 3164 | // For EQ and NE, we can always pick a value for the undef to make the |
| 3165 | // predicate pass or fail, so we can return undef. |
| 3166 | // Matches behavior in llvm::ConstantFoldCompareInstruction. |
| 3167 | if (isa<UndefValue>(RHS) && ICmpInst::isEquality(Pred)) |
| 3168 | return UndefValue::get(ITy); |
| 3169 | |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3170 | // icmp X, X -> true/false |
Sanjay Patel | 30be665 | 2018-04-22 17:07:44 +0000 | [diff] [blame] | 3171 | // icmp X, undef -> true/false because undef could be X. |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 3172 | if (LHS == RHS || isa<UndefValue>(RHS)) |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3173 | return ConstantInt::get(ITy, CmpInst::isTrueWhenEqual(Pred)); |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 3174 | |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 3175 | if (Value *V = simplifyICmpOfBools(Pred, LHS, RHS, Q)) |
| 3176 | return V; |
Duncan Sands | 8d25a7c | 2011-01-13 08:56:29 +0000 | [diff] [blame] | 3177 | |
Sanjay Patel | dc65a27 | 2016-12-03 17:30:22 +0000 | [diff] [blame] | 3178 | if (Value *V = simplifyICmpWithZero(Pred, LHS, RHS, Q)) |
| 3179 | return V; |
Duncan Sands | d395108 | 2011-01-25 09:38:29 +0000 | [diff] [blame] | 3180 | |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 3181 | if (Value *V = simplifyICmpWithConstant(Pred, LHS, RHS, Q.IIQ)) |
Sanjay Patel | 67bde28 | 2016-08-22 23:12:02 +0000 | [diff] [blame] | 3182 | return V; |
Duncan Sands | 8d25a7c | 2011-01-13 08:56:29 +0000 | [diff] [blame] | 3183 | |
Chen Li | 7452d95 | 2015-09-26 03:26:47 +0000 | [diff] [blame] | 3184 | // If both operands have range metadata, use the metadata |
| 3185 | // to simplify the comparison. |
| 3186 | if (isa<Instruction>(RHS) && isa<Instruction>(LHS)) { |
Craig Topper | 0c19861 | 2017-04-10 19:37:10 +0000 | [diff] [blame] | 3187 | auto RHS_Instr = cast<Instruction>(RHS); |
| 3188 | auto LHS_Instr = cast<Instruction>(LHS); |
Chen Li | 7452d95 | 2015-09-26 03:26:47 +0000 | [diff] [blame] | 3189 | |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 3190 | if (Q.IIQ.getMetadata(RHS_Instr, LLVMContext::MD_range) && |
| 3191 | Q.IIQ.getMetadata(LHS_Instr, LLVMContext::MD_range)) { |
Sanjoy Das | a7e1378 | 2015-10-24 05:37:35 +0000 | [diff] [blame] | 3192 | auto RHS_CR = getConstantRangeFromMetadata( |
| 3193 | *RHS_Instr->getMetadata(LLVMContext::MD_range)); |
| 3194 | auto LHS_CR = getConstantRangeFromMetadata( |
| 3195 | *LHS_Instr->getMetadata(LLVMContext::MD_range)); |
Chen Li | 7452d95 | 2015-09-26 03:26:47 +0000 | [diff] [blame] | 3196 | |
| 3197 | auto Satisfied_CR = ConstantRange::makeSatisfyingICmpRegion(Pred, RHS_CR); |
| 3198 | if (Satisfied_CR.contains(LHS_CR)) |
| 3199 | return ConstantInt::getTrue(RHS->getContext()); |
| 3200 | |
| 3201 | auto InversedSatisfied_CR = ConstantRange::makeSatisfyingICmpRegion( |
| 3202 | CmpInst::getInversePredicate(Pred), RHS_CR); |
| 3203 | if (InversedSatisfied_CR.contains(LHS_CR)) |
| 3204 | return ConstantInt::getFalse(RHS->getContext()); |
| 3205 | } |
| 3206 | } |
| 3207 | |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3208 | // Compare of cast, for example (zext X) != 0 -> X != 0 |
| 3209 | if (isa<CastInst>(LHS) && (isa<Constant>(RHS) || isa<CastInst>(RHS))) { |
| 3210 | Instruction *LI = cast<CastInst>(LHS); |
| 3211 | Value *SrcOp = LI->getOperand(0); |
Chris Lattner | 229907c | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 3212 | Type *SrcTy = SrcOp->getType(); |
| 3213 | Type *DstTy = LI->getType(); |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3214 | |
| 3215 | // Turn icmp (ptrtoint x), (ptrtoint/constant) into a compare of the input |
| 3216 | // if the integer type is the same size as the pointer type. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 3217 | if (MaxRecurse && isa<PtrToIntInst>(LI) && |
| 3218 | Q.DL.getTypeSizeInBits(SrcTy) == DstTy->getPrimitiveSizeInBits()) { |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3219 | if (Constant *RHSC = dyn_cast<Constant>(RHS)) { |
| 3220 | // Transfer the cast to the constant. |
| 3221 | if (Value *V = SimplifyICmpInst(Pred, SrcOp, |
| 3222 | ConstantExpr::getIntToPtr(RHSC, SrcTy), |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3223 | Q, MaxRecurse-1)) |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3224 | return V; |
| 3225 | } else if (PtrToIntInst *RI = dyn_cast<PtrToIntInst>(RHS)) { |
| 3226 | if (RI->getOperand(0)->getType() == SrcTy) |
| 3227 | // Compare without the cast. |
| 3228 | if (Value *V = SimplifyICmpInst(Pred, SrcOp, RI->getOperand(0), |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3229 | Q, MaxRecurse-1)) |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3230 | return V; |
| 3231 | } |
| 3232 | } |
| 3233 | |
| 3234 | if (isa<ZExtInst>(LHS)) { |
| 3235 | // Turn icmp (zext X), (zext Y) into a compare of X and Y if they have the |
| 3236 | // same type. |
| 3237 | if (ZExtInst *RI = dyn_cast<ZExtInst>(RHS)) { |
| 3238 | if (MaxRecurse && SrcTy == RI->getOperand(0)->getType()) |
| 3239 | // Compare X and Y. Note that signed predicates become unsigned. |
| 3240 | if (Value *V = SimplifyICmpInst(ICmpInst::getUnsignedPredicate(Pred), |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3241 | SrcOp, RI->getOperand(0), Q, |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3242 | MaxRecurse-1)) |
| 3243 | return V; |
| 3244 | } |
| 3245 | // Turn icmp (zext X), Cst into a compare of X and Cst if Cst is extended |
| 3246 | // too. If not, then try to deduce the result of the comparison. |
| 3247 | else if (ConstantInt *CI = dyn_cast<ConstantInt>(RHS)) { |
| 3248 | // Compute the constant that would happen if we truncated to SrcTy then |
| 3249 | // reextended to DstTy. |
| 3250 | Constant *Trunc = ConstantExpr::getTrunc(CI, SrcTy); |
| 3251 | Constant *RExt = ConstantExpr::getCast(CastInst::ZExt, Trunc, DstTy); |
| 3252 | |
| 3253 | // If the re-extended constant didn't change then this is effectively |
| 3254 | // also a case of comparing two zero-extended values. |
| 3255 | if (RExt == CI && MaxRecurse) |
| 3256 | if (Value *V = SimplifyICmpInst(ICmpInst::getUnsignedPredicate(Pred), |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3257 | SrcOp, Trunc, Q, MaxRecurse-1)) |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3258 | return V; |
| 3259 | |
| 3260 | // Otherwise the upper bits of LHS are zero while RHS has a non-zero bit |
| 3261 | // there. Use this to work out the result of the comparison. |
| 3262 | if (RExt != CI) { |
| 3263 | switch (Pred) { |
Craig Topper | a2886c2 | 2012-02-07 05:05:23 +0000 | [diff] [blame] | 3264 | default: llvm_unreachable("Unknown ICmp predicate!"); |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3265 | // LHS <u RHS. |
| 3266 | case ICmpInst::ICMP_EQ: |
| 3267 | case ICmpInst::ICMP_UGT: |
| 3268 | case ICmpInst::ICMP_UGE: |
| 3269 | return ConstantInt::getFalse(CI->getContext()); |
| 3270 | |
| 3271 | case ICmpInst::ICMP_NE: |
| 3272 | case ICmpInst::ICMP_ULT: |
| 3273 | case ICmpInst::ICMP_ULE: |
| 3274 | return ConstantInt::getTrue(CI->getContext()); |
| 3275 | |
| 3276 | // LHS is non-negative. If RHS is negative then LHS >s LHS. If RHS |
| 3277 | // is non-negative then LHS <s RHS. |
| 3278 | case ICmpInst::ICMP_SGT: |
| 3279 | case ICmpInst::ICMP_SGE: |
| 3280 | return CI->getValue().isNegative() ? |
| 3281 | ConstantInt::getTrue(CI->getContext()) : |
| 3282 | ConstantInt::getFalse(CI->getContext()); |
| 3283 | |
| 3284 | case ICmpInst::ICMP_SLT: |
| 3285 | case ICmpInst::ICMP_SLE: |
| 3286 | return CI->getValue().isNegative() ? |
| 3287 | ConstantInt::getFalse(CI->getContext()) : |
| 3288 | ConstantInt::getTrue(CI->getContext()); |
| 3289 | } |
| 3290 | } |
| 3291 | } |
| 3292 | } |
| 3293 | |
| 3294 | if (isa<SExtInst>(LHS)) { |
| 3295 | // Turn icmp (sext X), (sext Y) into a compare of X and Y if they have the |
| 3296 | // same type. |
| 3297 | if (SExtInst *RI = dyn_cast<SExtInst>(RHS)) { |
| 3298 | if (MaxRecurse && SrcTy == RI->getOperand(0)->getType()) |
| 3299 | // Compare X and Y. Note that the predicate does not change. |
| 3300 | if (Value *V = SimplifyICmpInst(Pred, SrcOp, RI->getOperand(0), |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3301 | Q, MaxRecurse-1)) |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3302 | return V; |
| 3303 | } |
| 3304 | // Turn icmp (sext X), Cst into a compare of X and Cst if Cst is extended |
| 3305 | // too. If not, then try to deduce the result of the comparison. |
| 3306 | else if (ConstantInt *CI = dyn_cast<ConstantInt>(RHS)) { |
| 3307 | // Compute the constant that would happen if we truncated to SrcTy then |
| 3308 | // reextended to DstTy. |
| 3309 | Constant *Trunc = ConstantExpr::getTrunc(CI, SrcTy); |
| 3310 | Constant *RExt = ConstantExpr::getCast(CastInst::SExt, Trunc, DstTy); |
| 3311 | |
| 3312 | // If the re-extended constant didn't change then this is effectively |
| 3313 | // also a case of comparing two sign-extended values. |
| 3314 | if (RExt == CI && MaxRecurse) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3315 | if (Value *V = SimplifyICmpInst(Pred, SrcOp, Trunc, Q, MaxRecurse-1)) |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3316 | return V; |
| 3317 | |
| 3318 | // Otherwise the upper bits of LHS are all equal, while RHS has varying |
| 3319 | // bits there. Use this to work out the result of the comparison. |
| 3320 | if (RExt != CI) { |
| 3321 | switch (Pred) { |
Craig Topper | a2886c2 | 2012-02-07 05:05:23 +0000 | [diff] [blame] | 3322 | default: llvm_unreachable("Unknown ICmp predicate!"); |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3323 | case ICmpInst::ICMP_EQ: |
| 3324 | return ConstantInt::getFalse(CI->getContext()); |
| 3325 | case ICmpInst::ICMP_NE: |
| 3326 | return ConstantInt::getTrue(CI->getContext()); |
| 3327 | |
| 3328 | // If RHS is non-negative then LHS <s RHS. If RHS is negative then |
| 3329 | // LHS >s RHS. |
| 3330 | case ICmpInst::ICMP_SGT: |
| 3331 | case ICmpInst::ICMP_SGE: |
| 3332 | return CI->getValue().isNegative() ? |
| 3333 | ConstantInt::getTrue(CI->getContext()) : |
| 3334 | ConstantInt::getFalse(CI->getContext()); |
| 3335 | case ICmpInst::ICMP_SLT: |
| 3336 | case ICmpInst::ICMP_SLE: |
| 3337 | return CI->getValue().isNegative() ? |
| 3338 | ConstantInt::getFalse(CI->getContext()) : |
| 3339 | ConstantInt::getTrue(CI->getContext()); |
| 3340 | |
| 3341 | // If LHS is non-negative then LHS <u RHS. If LHS is negative then |
| 3342 | // LHS >u RHS. |
| 3343 | case ICmpInst::ICMP_UGT: |
| 3344 | case ICmpInst::ICMP_UGE: |
Sylvestre Ledru | 91ce36c | 2012-09-27 10:14:43 +0000 | [diff] [blame] | 3345 | // Comparison is true iff the LHS <s 0. |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3346 | if (MaxRecurse) |
| 3347 | if (Value *V = SimplifyICmpInst(ICmpInst::ICMP_SLT, SrcOp, |
| 3348 | Constant::getNullValue(SrcTy), |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3349 | Q, MaxRecurse-1)) |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3350 | return V; |
| 3351 | break; |
| 3352 | case ICmpInst::ICMP_ULT: |
| 3353 | case ICmpInst::ICMP_ULE: |
Sylvestre Ledru | 91ce36c | 2012-09-27 10:14:43 +0000 | [diff] [blame] | 3354 | // Comparison is true iff the LHS >=s 0. |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3355 | if (MaxRecurse) |
| 3356 | if (Value *V = SimplifyICmpInst(ICmpInst::ICMP_SGE, SrcOp, |
| 3357 | Constant::getNullValue(SrcTy), |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3358 | Q, MaxRecurse-1)) |
Duncan Sands | 8fb2c38 | 2011-01-20 13:21:55 +0000 | [diff] [blame] | 3359 | return V; |
| 3360 | break; |
| 3361 | } |
| 3362 | } |
| 3363 | } |
| 3364 | } |
| 3365 | } |
| 3366 | |
James Molloy | 1d88d6f | 2015-10-22 13:18:42 +0000 | [diff] [blame] | 3367 | // icmp eq|ne X, Y -> false|true if X != Y |
Craig Topper | c2790ec | 2017-06-06 07:13:04 +0000 | [diff] [blame] | 3368 | if (ICmpInst::isEquality(Pred) && |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 3369 | isKnownNonEqual(LHS, RHS, Q.DL, Q.AC, Q.CxtI, Q.DT, Q.IIQ.UseInstrInfo)) { |
Craig Topper | 2dfb480 | 2017-06-06 07:13:13 +0000 | [diff] [blame] | 3370 | return Pred == ICmpInst::ICMP_NE ? getTrue(ITy) : getFalse(ITy); |
James Molloy | 1d88d6f | 2015-10-22 13:18:42 +0000 | [diff] [blame] | 3371 | } |
Junmo Park | 53470fc | 2016-04-05 21:14:31 +0000 | [diff] [blame] | 3372 | |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 3373 | if (Value *V = simplifyICmpWithBinOp(Pred, LHS, RHS, Q, MaxRecurse)) |
| 3374 | return V; |
Duncan Sands | d114ab3 | 2011-02-13 17:15:40 +0000 | [diff] [blame] | 3375 | |
Sanjay Patel | 35289c6 | 2016-12-10 17:40:47 +0000 | [diff] [blame] | 3376 | if (Value *V = simplifyICmpWithMinMax(Pred, LHS, RHS, Q, MaxRecurse)) |
Sanjay Patel | 9d5b5e3 | 2016-12-03 18:03:53 +0000 | [diff] [blame] | 3377 | return V; |
Duncan Sands | a228785 | 2011-05-04 16:05:05 +0000 | [diff] [blame] | 3378 | |
Chandler Carruth | 8059c84 | 2012-03-25 21:28:14 +0000 | [diff] [blame] | 3379 | // Simplify comparisons of related pointers using a powerful, recursive |
| 3380 | // GEP-walk when we have target data available.. |
Dan Gohman | 18c77a1 | 2013-01-31 02:50:36 +0000 | [diff] [blame] | 3381 | if (LHS->getType()->isPointerTy()) |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 3382 | if (auto *C = computePointerICmp(Q.DL, Q.TLI, Q.DT, Pred, Q.AC, Q.CxtI, |
| 3383 | Q.IIQ, LHS, RHS)) |
Chandler Carruth | 8059c84 | 2012-03-25 21:28:14 +0000 | [diff] [blame] | 3384 | return C; |
David Majnemer | dc8767a | 2016-08-07 07:58:10 +0000 | [diff] [blame] | 3385 | if (auto *CLHS = dyn_cast<PtrToIntOperator>(LHS)) |
| 3386 | if (auto *CRHS = dyn_cast<PtrToIntOperator>(RHS)) |
| 3387 | if (Q.DL.getTypeSizeInBits(CLHS->getPointerOperandType()) == |
| 3388 | Q.DL.getTypeSizeInBits(CLHS->getType()) && |
| 3389 | Q.DL.getTypeSizeInBits(CRHS->getPointerOperandType()) == |
| 3390 | Q.DL.getTypeSizeInBits(CRHS->getType())) |
Nuno Lopes | 404f106 | 2017-09-09 18:23:11 +0000 | [diff] [blame] | 3391 | if (auto *C = computePointerICmp(Q.DL, Q.TLI, Q.DT, Pred, Q.AC, Q.CxtI, |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 3392 | Q.IIQ, CLHS->getPointerOperand(), |
David Majnemer | dc8767a | 2016-08-07 07:58:10 +0000 | [diff] [blame] | 3393 | CRHS->getPointerOperand())) |
| 3394 | return C; |
Chandler Carruth | 8059c84 | 2012-03-25 21:28:14 +0000 | [diff] [blame] | 3395 | |
Nick Lewycky | 3db143e | 2012-02-26 02:09:49 +0000 | [diff] [blame] | 3396 | if (GetElementPtrInst *GLHS = dyn_cast<GetElementPtrInst>(LHS)) { |
| 3397 | if (GEPOperator *GRHS = dyn_cast<GEPOperator>(RHS)) { |
| 3398 | if (GLHS->getPointerOperand() == GRHS->getPointerOperand() && |
| 3399 | GLHS->hasAllConstantIndices() && GRHS->hasAllConstantIndices() && |
| 3400 | (ICmpInst::isEquality(Pred) || |
| 3401 | (GLHS->isInBounds() && GRHS->isInBounds() && |
| 3402 | Pred == ICmpInst::getSignedPredicate(Pred)))) { |
| 3403 | // The bases are equal and the indices are constant. Build a constant |
| 3404 | // expression GEP with the same indices and a null base pointer to see |
| 3405 | // what constant folding can make out of it. |
| 3406 | Constant *Null = Constant::getNullValue(GLHS->getPointerOperandType()); |
| 3407 | SmallVector<Value *, 4> IndicesLHS(GLHS->idx_begin(), GLHS->idx_end()); |
David Blaikie | 4a2e73b | 2015-04-02 18:55:32 +0000 | [diff] [blame] | 3408 | Constant *NewLHS = ConstantExpr::getGetElementPtr( |
| 3409 | GLHS->getSourceElementType(), Null, IndicesLHS); |
Nick Lewycky | 3db143e | 2012-02-26 02:09:49 +0000 | [diff] [blame] | 3410 | |
| 3411 | SmallVector<Value *, 4> IndicesRHS(GRHS->idx_begin(), GRHS->idx_end()); |
David Blaikie | 4a2e73b | 2015-04-02 18:55:32 +0000 | [diff] [blame] | 3412 | Constant *NewRHS = ConstantExpr::getGetElementPtr( |
| 3413 | GLHS->getSourceElementType(), Null, IndicesRHS); |
Nick Lewycky | 3db143e | 2012-02-26 02:09:49 +0000 | [diff] [blame] | 3414 | return ConstantExpr::getICmp(Pred, NewLHS, NewRHS); |
| 3415 | } |
| 3416 | } |
| 3417 | } |
| 3418 | |
Duncan Sands | f532d31 | 2010-11-07 16:12:23 +0000 | [diff] [blame] | 3419 | // If the comparison is with the result of a select instruction, check whether |
| 3420 | // comparing with either branch of the select always yields the same value. |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 3421 | if (isa<SelectInst>(LHS) || isa<SelectInst>(RHS)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3422 | if (Value *V = ThreadCmpOverSelect(Pred, LHS, RHS, Q, MaxRecurse)) |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 3423 | return V; |
| 3424 | |
| 3425 | // If the comparison is with the result of a phi instruction, check whether |
| 3426 | // doing the compare with each incoming phi value yields a common result. |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 3427 | if (isa<PHINode>(LHS) || isa<PHINode>(RHS)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3428 | if (Value *V = ThreadCmpOverPHI(Pred, LHS, RHS, Q, MaxRecurse)) |
Duncan Sands | fc5ad3f0 | 2010-11-09 17:25:51 +0000 | [diff] [blame] | 3429 | return V; |
Duncan Sands | f532d31 | 2010-11-07 16:12:23 +0000 | [diff] [blame] | 3430 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3431 | return nullptr; |
Chris Lattner | 084a1b5 | 2009-11-09 22:57:59 +0000 | [diff] [blame] | 3432 | } |
| 3433 | |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 3434 | Value *llvm::SimplifyICmpInst(unsigned Predicate, Value *LHS, Value *RHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 3435 | const SimplifyQuery &Q) { |
| 3436 | return ::SimplifyICmpInst(Predicate, LHS, RHS, Q, RecursionLimit); |
| 3437 | } |
| 3438 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 3439 | /// Given operands for an FCmpInst, see if we can fold the result. |
| 3440 | /// If not, this returns null. |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 3441 | static Value *SimplifyFCmpInst(unsigned Predicate, Value *LHS, Value *RHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 3442 | FastMathFlags FMF, const SimplifyQuery &Q, |
Benjamin Kramer | f4ebfa3 | 2015-07-10 14:02:02 +0000 | [diff] [blame] | 3443 | unsigned MaxRecurse) { |
Chris Lattner | c1f1907 | 2009-11-09 23:28:39 +0000 | [diff] [blame] | 3444 | CmpInst::Predicate Pred = (CmpInst::Predicate)Predicate; |
| 3445 | assert(CmpInst::isFPPredicate(Pred) && "Not an FP compare!"); |
| 3446 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 3447 | if (Constant *CLHS = dyn_cast<Constant>(LHS)) { |
Chris Lattner | c1f1907 | 2009-11-09 23:28:39 +0000 | [diff] [blame] | 3448 | if (Constant *CRHS = dyn_cast<Constant>(RHS)) |
Rafael Espindola | 37dc9e1 | 2014-02-21 00:06:31 +0000 | [diff] [blame] | 3449 | return ConstantFoldCompareInstOperands(Pred, CLHS, CRHS, Q.DL, Q.TLI); |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 3450 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 3451 | // If we have a constant, make sure it is on the RHS. |
| 3452 | std::swap(LHS, RHS); |
| 3453 | Pred = CmpInst::getSwappedPredicate(Pred); |
| 3454 | } |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 3455 | |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3456 | // Fold trivial predicates. |
Andrea Di Biagio | bff3fd6 | 2016-09-02 15:55:25 +0000 | [diff] [blame] | 3457 | Type *RetTy = GetCompareTy(LHS); |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3458 | if (Pred == FCmpInst::FCMP_FALSE) |
Andrea Di Biagio | bff3fd6 | 2016-09-02 15:55:25 +0000 | [diff] [blame] | 3459 | return getFalse(RetTy); |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3460 | if (Pred == FCmpInst::FCMP_TRUE) |
Andrea Di Biagio | bff3fd6 | 2016-09-02 15:55:25 +0000 | [diff] [blame] | 3461 | return getTrue(RetTy); |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3462 | |
Sanjay Patel | f3ae9cc | 2018-08-21 14:45:13 +0000 | [diff] [blame] | 3463 | // Fold (un)ordered comparison if we can determine there are no NaNs. |
| 3464 | if (Pred == FCmpInst::FCMP_UNO || Pred == FCmpInst::FCMP_ORD) |
| 3465 | if (FMF.noNaNs() || |
| 3466 | (isKnownNeverNaN(LHS, Q.TLI) && isKnownNeverNaN(RHS, Q.TLI))) |
| 3467 | return ConstantInt::get(RetTy, Pred == FCmpInst::FCMP_ORD); |
Benjamin Kramer | f4ebfa3 | 2015-07-10 14:02:02 +0000 | [diff] [blame] | 3468 | |
Sanjay Patel | 46b083e | 2018-03-02 18:36:08 +0000 | [diff] [blame] | 3469 | // NaN is unordered; NaN is not ordered. |
| 3470 | assert((FCmpInst::isOrdered(Pred) || FCmpInst::isUnordered(Pred)) && |
| 3471 | "Comparison must be either ordered or unordered"); |
| 3472 | if (match(RHS, m_NaN())) |
| 3473 | return ConstantInt::get(RetTy, CmpInst::isUnordered(Pred)); |
| 3474 | |
Mehdi Amini | eb242a5 | 2015-03-09 03:20:25 +0000 | [diff] [blame] | 3475 | // fcmp pred x, undef and fcmp pred undef, x |
| 3476 | // fold to true if unordered, false if ordered |
| 3477 | if (isa<UndefValue>(LHS) || isa<UndefValue>(RHS)) { |
| 3478 | // Choosing NaN for the undef will always make unordered comparison succeed |
| 3479 | // and ordered comparison fail. |
Andrea Di Biagio | bff3fd6 | 2016-09-02 15:55:25 +0000 | [diff] [blame] | 3480 | return ConstantInt::get(RetTy, CmpInst::isUnordered(Pred)); |
Mehdi Amini | eb242a5 | 2015-03-09 03:20:25 +0000 | [diff] [blame] | 3481 | } |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3482 | |
| 3483 | // fcmp x,x -> true/false. Not all compares are foldable. |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 3484 | if (LHS == RHS) { |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3485 | if (CmpInst::isTrueWhenEqual(Pred)) |
Andrea Di Biagio | bff3fd6 | 2016-09-02 15:55:25 +0000 | [diff] [blame] | 3486 | return getTrue(RetTy); |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3487 | if (CmpInst::isFalseWhenEqual(Pred)) |
Andrea Di Biagio | bff3fd6 | 2016-09-02 15:55:25 +0000 | [diff] [blame] | 3488 | return getFalse(RetTy); |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3489 | } |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 3490 | |
Sanjay Patel | 4ca9968 | 2017-11-27 16:37:09 +0000 | [diff] [blame] | 3491 | // Handle fcmp with constant RHS. |
Sanjay Patel | 68171e3 | 2019-02-20 14:34:00 +0000 | [diff] [blame] | 3492 | // TODO: Use match with a specific FP value, so these work with vectors with |
| 3493 | // undef lanes. |
Sanjay Patel | 4ca9968 | 2017-11-27 16:37:09 +0000 | [diff] [blame] | 3494 | const APFloat *C; |
| 3495 | if (match(RHS, m_APFloat(C))) { |
Mehdi Amini | 383d7ae | 2015-02-13 07:38:04 +0000 | [diff] [blame] | 3496 | // Check whether the constant is an infinity. |
Sanjay Patel | 4ca9968 | 2017-11-27 16:37:09 +0000 | [diff] [blame] | 3497 | if (C->isInfinity()) { |
| 3498 | if (C->isNegative()) { |
Elena Demikhovsky | 45f0448 | 2015-01-28 08:03:58 +0000 | [diff] [blame] | 3499 | switch (Pred) { |
Elena Demikhovsky | 45f0448 | 2015-01-28 08:03:58 +0000 | [diff] [blame] | 3500 | case FCmpInst::FCMP_OLT: |
Mehdi Amini | 383d7ae | 2015-02-13 07:38:04 +0000 | [diff] [blame] | 3501 | // No value is ordered and less than negative infinity. |
Andrea Di Biagio | bff3fd6 | 2016-09-02 15:55:25 +0000 | [diff] [blame] | 3502 | return getFalse(RetTy); |
Mehdi Amini | 383d7ae | 2015-02-13 07:38:04 +0000 | [diff] [blame] | 3503 | case FCmpInst::FCMP_UGE: |
| 3504 | // All values are unordered with or at least negative infinity. |
Andrea Di Biagio | bff3fd6 | 2016-09-02 15:55:25 +0000 | [diff] [blame] | 3505 | return getTrue(RetTy); |
Elena Demikhovsky | 45f0448 | 2015-01-28 08:03:58 +0000 | [diff] [blame] | 3506 | default: |
| 3507 | break; |
| 3508 | } |
Mehdi Amini | 383d7ae | 2015-02-13 07:38:04 +0000 | [diff] [blame] | 3509 | } else { |
| 3510 | switch (Pred) { |
| 3511 | case FCmpInst::FCMP_OGT: |
| 3512 | // No value is ordered and greater than infinity. |
Andrea Di Biagio | bff3fd6 | 2016-09-02 15:55:25 +0000 | [diff] [blame] | 3513 | return getFalse(RetTy); |
Mehdi Amini | 383d7ae | 2015-02-13 07:38:04 +0000 | [diff] [blame] | 3514 | case FCmpInst::FCMP_ULE: |
| 3515 | // All values are unordered with and at most infinity. |
Andrea Di Biagio | bff3fd6 | 2016-09-02 15:55:25 +0000 | [diff] [blame] | 3516 | return getTrue(RetTy); |
Mehdi Amini | 383d7ae | 2015-02-13 07:38:04 +0000 | [diff] [blame] | 3517 | default: |
| 3518 | break; |
| 3519 | } |
| 3520 | } |
Sanjay Patel | 49f9739 | 2019-02-20 00:20:38 +0000 | [diff] [blame] | 3521 | } |
Sanjay Patel | 68171e3 | 2019-02-20 14:34:00 +0000 | [diff] [blame] | 3522 | if (C->isNegative() && !C->isNegZero()) { |
Florian Hahn | 30932a3 | 2017-12-01 12:34:16 +0000 | [diff] [blame] | 3523 | assert(!C->isNaN() && "Unexpected NaN constant!"); |
| 3524 | // TODO: We can catch more cases by using a range check rather than |
| 3525 | // relying on CannotBeOrderedLessThanZero. |
| 3526 | switch (Pred) { |
| 3527 | case FCmpInst::FCMP_UGE: |
| 3528 | case FCmpInst::FCMP_UGT: |
| 3529 | case FCmpInst::FCMP_UNE: |
| 3530 | // (X >= 0) implies (X > C) when (C < 0) |
| 3531 | if (CannotBeOrderedLessThanZero(LHS, Q.TLI)) |
| 3532 | return getTrue(RetTy); |
| 3533 | break; |
| 3534 | case FCmpInst::FCMP_OEQ: |
| 3535 | case FCmpInst::FCMP_OLE: |
| 3536 | case FCmpInst::FCMP_OLT: |
| 3537 | // (X >= 0) implies !(X < C) when (C < 0) |
| 3538 | if (CannotBeOrderedLessThanZero(LHS, Q.TLI)) |
| 3539 | return getFalse(RetTy); |
| 3540 | break; |
| 3541 | default: |
| 3542 | break; |
| 3543 | } |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3544 | } |
Sanjay Patel | 152f81f | 2019-05-16 14:03:10 +0000 | [diff] [blame] | 3545 | |
Sanjay Patel | 63fa690 | 2019-05-20 17:52:18 +0000 | [diff] [blame] | 3546 | // Check comparison of [minnum/maxnum with constant] with other constant. |
Sanjay Patel | 9ef99b4 | 2019-05-19 14:26:39 +0000 | [diff] [blame] | 3547 | const APFloat *C2; |
| 3548 | if ((match(LHS, m_Intrinsic<Intrinsic::minnum>(m_Value(), m_APFloat(C2))) && |
| 3549 | C2->compare(*C) == APFloat::cmpLessThan) || |
| 3550 | (match(LHS, m_Intrinsic<Intrinsic::maxnum>(m_Value(), m_APFloat(C2))) && |
| 3551 | C2->compare(*C) == APFloat::cmpGreaterThan)) { |
| 3552 | bool IsMaxNum = |
| 3553 | cast<IntrinsicInst>(LHS)->getIntrinsicID() == Intrinsic::maxnum; |
| 3554 | // The ordered relationship and minnum/maxnum guarantee that we do not |
| 3555 | // have NaN constants, so ordered/unordered preds are handled the same. |
Sanjay Patel | 152f81f | 2019-05-16 14:03:10 +0000 | [diff] [blame] | 3556 | switch (Pred) { |
| 3557 | case FCmpInst::FCMP_OEQ: case FCmpInst::FCMP_UEQ: |
Sanjay Patel | 9ef99b4 | 2019-05-19 14:26:39 +0000 | [diff] [blame] | 3558 | // minnum(X, LesserC) == C --> false |
| 3559 | // maxnum(X, GreaterC) == C --> false |
Sanjay Patel | 152f81f | 2019-05-16 14:03:10 +0000 | [diff] [blame] | 3560 | return getFalse(RetTy); |
| 3561 | case FCmpInst::FCMP_ONE: case FCmpInst::FCMP_UNE: |
Sanjay Patel | 9ef99b4 | 2019-05-19 14:26:39 +0000 | [diff] [blame] | 3562 | // minnum(X, LesserC) != C --> true |
| 3563 | // maxnum(X, GreaterC) != C --> true |
| 3564 | return getTrue(RetTy); |
| 3565 | case FCmpInst::FCMP_OGE: case FCmpInst::FCMP_UGE: |
| 3566 | case FCmpInst::FCMP_OGT: case FCmpInst::FCMP_UGT: |
| 3567 | // minnum(X, LesserC) >= C --> false |
| 3568 | // minnum(X, LesserC) > C --> false |
| 3569 | // maxnum(X, GreaterC) >= C --> true |
| 3570 | // maxnum(X, GreaterC) > C --> true |
| 3571 | return ConstantInt::get(RetTy, IsMaxNum); |
Sanjay Patel | 152f81f | 2019-05-16 14:03:10 +0000 | [diff] [blame] | 3572 | case FCmpInst::FCMP_OLE: case FCmpInst::FCMP_ULE: |
| 3573 | case FCmpInst::FCMP_OLT: case FCmpInst::FCMP_ULT: |
Sanjay Patel | 9ef99b4 | 2019-05-19 14:26:39 +0000 | [diff] [blame] | 3574 | // minnum(X, LesserC) <= C --> true |
| 3575 | // minnum(X, LesserC) < C --> true |
| 3576 | // maxnum(X, GreaterC) <= C --> false |
| 3577 | // maxnum(X, GreaterC) < C --> false |
| 3578 | return ConstantInt::get(RetTy, !IsMaxNum); |
Sanjay Patel | 152f81f | 2019-05-16 14:03:10 +0000 | [diff] [blame] | 3579 | default: |
| 3580 | // TRUE/FALSE/ORD/UNO should be handled before this. |
| 3581 | llvm_unreachable("Unexpected fcmp predicate"); |
| 3582 | } |
| 3583 | } |
Chris Lattner | ccfdceb | 2009-11-09 23:55:12 +0000 | [diff] [blame] | 3584 | } |
Sanjay Patel | 152f81f | 2019-05-16 14:03:10 +0000 | [diff] [blame] | 3585 | |
Sanjay Patel | 68171e3 | 2019-02-20 14:34:00 +0000 | [diff] [blame] | 3586 | if (match(RHS, m_AnyZeroFP())) { |
| 3587 | switch (Pred) { |
| 3588 | case FCmpInst::FCMP_OGE: |
Sanjay Patel | 866db10 | 2019-06-09 13:58:46 +0000 | [diff] [blame] | 3589 | case FCmpInst::FCMP_ULT: |
| 3590 | // Positive or zero X >= 0.0 --> true |
| 3591 | // Positive or zero X < 0.0 --> false |
Sanjay Patel | 4329c15 | 2019-06-08 15:12:33 +0000 | [diff] [blame] | 3592 | if ((FMF.noNaNs() || isKnownNeverNaN(LHS, Q.TLI)) && |
| 3593 | CannotBeOrderedLessThanZero(LHS, Q.TLI)) |
Sanjay Patel | 866db10 | 2019-06-09 13:58:46 +0000 | [diff] [blame] | 3594 | return Pred == FCmpInst::FCMP_OGE ? getTrue(RetTy) : getFalse(RetTy); |
Sanjay Patel | 68171e3 | 2019-02-20 14:34:00 +0000 | [diff] [blame] | 3595 | break; |
| 3596 | case FCmpInst::FCMP_UGE: |
Sanjay Patel | 68171e3 | 2019-02-20 14:34:00 +0000 | [diff] [blame] | 3597 | case FCmpInst::FCMP_OLT: |
Sanjay Patel | 866db10 | 2019-06-09 13:58:46 +0000 | [diff] [blame] | 3598 | // Positive or zero or nan X >= 0.0 --> true |
| 3599 | // Positive or zero or nan X < 0.0 --> false |
Sanjay Patel | 68171e3 | 2019-02-20 14:34:00 +0000 | [diff] [blame] | 3600 | if (CannotBeOrderedLessThanZero(LHS, Q.TLI)) |
Sanjay Patel | 866db10 | 2019-06-09 13:58:46 +0000 | [diff] [blame] | 3601 | return Pred == FCmpInst::FCMP_UGE ? getTrue(RetTy) : getFalse(RetTy); |
Sanjay Patel | 68171e3 | 2019-02-20 14:34:00 +0000 | [diff] [blame] | 3602 | break; |
| 3603 | default: |
| 3604 | break; |
| 3605 | } |
| 3606 | } |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 3607 | |
Duncan Sands | a620bd1 | 2010-11-07 16:46:25 +0000 | [diff] [blame] | 3608 | // If the comparison is with the result of a select instruction, check whether |
| 3609 | // comparing with either branch of the select always yields the same value. |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 3610 | if (isa<SelectInst>(LHS) || isa<SelectInst>(RHS)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3611 | if (Value *V = ThreadCmpOverSelect(Pred, LHS, RHS, Q, MaxRecurse)) |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 3612 | return V; |
| 3613 | |
| 3614 | // If the comparison is with the result of a phi instruction, check whether |
| 3615 | // doing the compare with each incoming phi value yields a common result. |
Duncan Sands | f64e690 | 2010-12-21 09:09:15 +0000 | [diff] [blame] | 3616 | if (isa<PHINode>(LHS) || isa<PHINode>(RHS)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3617 | if (Value *V = ThreadCmpOverPHI(Pred, LHS, RHS, Q, MaxRecurse)) |
Duncan Sands | fc5ad3f0 | 2010-11-09 17:25:51 +0000 | [diff] [blame] | 3618 | return V; |
Duncan Sands | a620bd1 | 2010-11-07 16:46:25 +0000 | [diff] [blame] | 3619 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3620 | return nullptr; |
Chris Lattner | c1f1907 | 2009-11-09 23:28:39 +0000 | [diff] [blame] | 3621 | } |
| 3622 | |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 3623 | Value *llvm::SimplifyFCmpInst(unsigned Predicate, Value *LHS, Value *RHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 3624 | FastMathFlags FMF, const SimplifyQuery &Q) { |
| 3625 | return ::SimplifyFCmpInst(Predicate, LHS, RHS, FMF, Q, RecursionLimit); |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 3626 | } |
| 3627 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 3628 | /// See if V simplifies when its operand Op is replaced with RepOp. |
David Majnemer | 3f0fb98 | 2015-06-06 22:40:21 +0000 | [diff] [blame] | 3629 | static const Value *SimplifyWithOpReplaced(Value *V, Value *Op, Value *RepOp, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 3630 | const SimplifyQuery &Q, |
David Majnemer | 3f0fb98 | 2015-06-06 22:40:21 +0000 | [diff] [blame] | 3631 | unsigned MaxRecurse) { |
| 3632 | // Trivial replacement. |
| 3633 | if (V == Op) |
| 3634 | return RepOp; |
| 3635 | |
Tim Northover | 997f5f1 | 2017-05-22 21:28:08 +0000 | [diff] [blame] | 3636 | // We cannot replace a constant, and shouldn't even try. |
| 3637 | if (isa<Constant>(Op)) |
| 3638 | return nullptr; |
| 3639 | |
David Majnemer | 3f0fb98 | 2015-06-06 22:40:21 +0000 | [diff] [blame] | 3640 | auto *I = dyn_cast<Instruction>(V); |
| 3641 | if (!I) |
| 3642 | return nullptr; |
| 3643 | |
| 3644 | // If this is a binary operator, try to simplify it with the replaced op. |
| 3645 | if (auto *B = dyn_cast<BinaryOperator>(I)) { |
| 3646 | // Consider: |
| 3647 | // %cmp = icmp eq i32 %x, 2147483647 |
| 3648 | // %add = add nsw i32 %x, 1 |
| 3649 | // %sel = select i1 %cmp, i32 -2147483648, i32 %add |
| 3650 | // |
| 3651 | // We can't replace %sel with %add unless we strip away the flags. |
Sanjay Patel | 9ce5f41 | 2019-08-02 17:39:32 +0000 | [diff] [blame] | 3652 | // TODO: This is an unusual limitation because better analysis results in |
| 3653 | // worse simplification. InstCombine can do this fold more generally |
| 3654 | // by dropping the flags. Remove this fold to save compile-time? |
David Majnemer | 3f0fb98 | 2015-06-06 22:40:21 +0000 | [diff] [blame] | 3655 | if (isa<OverflowingBinaryOperator>(B)) |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 3656 | if (Q.IIQ.hasNoSignedWrap(B) || Q.IIQ.hasNoUnsignedWrap(B)) |
David Majnemer | 3f0fb98 | 2015-06-06 22:40:21 +0000 | [diff] [blame] | 3657 | return nullptr; |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 3658 | if (isa<PossiblyExactOperator>(B) && Q.IIQ.isExact(B)) |
| 3659 | return nullptr; |
David Majnemer | 3f0fb98 | 2015-06-06 22:40:21 +0000 | [diff] [blame] | 3660 | |
| 3661 | if (MaxRecurse) { |
| 3662 | if (B->getOperand(0) == Op) |
| 3663 | return SimplifyBinOp(B->getOpcode(), RepOp, B->getOperand(1), Q, |
| 3664 | MaxRecurse - 1); |
| 3665 | if (B->getOperand(1) == Op) |
| 3666 | return SimplifyBinOp(B->getOpcode(), B->getOperand(0), RepOp, Q, |
| 3667 | MaxRecurse - 1); |
| 3668 | } |
| 3669 | } |
| 3670 | |
| 3671 | // Same for CmpInsts. |
| 3672 | if (CmpInst *C = dyn_cast<CmpInst>(I)) { |
| 3673 | if (MaxRecurse) { |
| 3674 | if (C->getOperand(0) == Op) |
| 3675 | return SimplifyCmpInst(C->getPredicate(), RepOp, C->getOperand(1), Q, |
| 3676 | MaxRecurse - 1); |
| 3677 | if (C->getOperand(1) == Op) |
| 3678 | return SimplifyCmpInst(C->getPredicate(), C->getOperand(0), RepOp, Q, |
| 3679 | MaxRecurse - 1); |
| 3680 | } |
| 3681 | } |
| 3682 | |
George Burgess IV | 8e807bf | 2018-04-24 00:25:01 +0000 | [diff] [blame] | 3683 | // Same for GEPs. |
| 3684 | if (auto *GEP = dyn_cast<GetElementPtrInst>(I)) { |
| 3685 | if (MaxRecurse) { |
| 3686 | SmallVector<Value *, 8> NewOps(GEP->getNumOperands()); |
| 3687 | transform(GEP->operands(), NewOps.begin(), |
| 3688 | [&](Value *V) { return V == Op ? RepOp : V; }); |
| 3689 | return SimplifyGEPInst(GEP->getSourceElementType(), NewOps, Q, |
| 3690 | MaxRecurse - 1); |
| 3691 | } |
| 3692 | } |
| 3693 | |
David Majnemer | 3f0fb98 | 2015-06-06 22:40:21 +0000 | [diff] [blame] | 3694 | // TODO: We could hand off more cases to instsimplify here. |
| 3695 | |
| 3696 | // If all operands are constant after substituting Op for RepOp then we can |
| 3697 | // constant fold the instruction. |
| 3698 | if (Constant *CRepOp = dyn_cast<Constant>(RepOp)) { |
| 3699 | // Build a list of all constant operands. |
| 3700 | SmallVector<Constant *, 8> ConstOps; |
| 3701 | for (unsigned i = 0, e = I->getNumOperands(); i != e; ++i) { |
| 3702 | if (I->getOperand(i) == Op) |
| 3703 | ConstOps.push_back(CRepOp); |
| 3704 | else if (Constant *COp = dyn_cast<Constant>(I->getOperand(i))) |
| 3705 | ConstOps.push_back(COp); |
| 3706 | else |
| 3707 | break; |
| 3708 | } |
| 3709 | |
| 3710 | // All operands were constants, fold it. |
| 3711 | if (ConstOps.size() == I->getNumOperands()) { |
| 3712 | if (CmpInst *C = dyn_cast<CmpInst>(I)) |
| 3713 | return ConstantFoldCompareInstOperands(C->getPredicate(), ConstOps[0], |
| 3714 | ConstOps[1], Q.DL, Q.TLI); |
| 3715 | |
| 3716 | if (LoadInst *LI = dyn_cast<LoadInst>(I)) |
| 3717 | if (!LI->isVolatile()) |
Eduard Burtescu | 1423921 | 2016-01-22 01:17:26 +0000 | [diff] [blame] | 3718 | return ConstantFoldLoadFromConstPtr(ConstOps[0], LI->getType(), Q.DL); |
David Majnemer | 3f0fb98 | 2015-06-06 22:40:21 +0000 | [diff] [blame] | 3719 | |
Manuel Jacob | e902459 | 2016-01-21 06:33:22 +0000 | [diff] [blame] | 3720 | return ConstantFoldInstOperands(I, ConstOps, Q.DL, Q.TLI); |
David Majnemer | 3f0fb98 | 2015-06-06 22:40:21 +0000 | [diff] [blame] | 3721 | } |
| 3722 | } |
| 3723 | |
| 3724 | return nullptr; |
| 3725 | } |
| 3726 | |
Sanjay Patel | 5f5eb58 | 2016-07-18 20:56:53 +0000 | [diff] [blame] | 3727 | /// Try to simplify a select instruction when its condition operand is an |
| 3728 | /// integer comparison where one operand of the compare is a constant. |
| 3729 | static Value *simplifySelectBitTest(Value *TrueVal, Value *FalseVal, Value *X, |
| 3730 | const APInt *Y, bool TrueWhenUnset) { |
| 3731 | const APInt *C; |
| 3732 | |
| 3733 | // (X & Y) == 0 ? X & ~Y : X --> X |
| 3734 | // (X & Y) != 0 ? X & ~Y : X --> X & ~Y |
| 3735 | if (FalseVal == X && match(TrueVal, m_And(m_Specific(X), m_APInt(C))) && |
| 3736 | *Y == ~*C) |
| 3737 | return TrueWhenUnset ? FalseVal : TrueVal; |
| 3738 | |
| 3739 | // (X & Y) == 0 ? X : X & ~Y --> X & ~Y |
| 3740 | // (X & Y) != 0 ? X : X & ~Y --> X |
| 3741 | if (TrueVal == X && match(FalseVal, m_And(m_Specific(X), m_APInt(C))) && |
| 3742 | *Y == ~*C) |
| 3743 | return TrueWhenUnset ? FalseVal : TrueVal; |
| 3744 | |
| 3745 | if (Y->isPowerOf2()) { |
| 3746 | // (X & Y) == 0 ? X | Y : X --> X | Y |
| 3747 | // (X & Y) != 0 ? X | Y : X --> X |
| 3748 | if (FalseVal == X && match(TrueVal, m_Or(m_Specific(X), m_APInt(C))) && |
| 3749 | *Y == *C) |
| 3750 | return TrueWhenUnset ? TrueVal : FalseVal; |
| 3751 | |
| 3752 | // (X & Y) == 0 ? X : X | Y --> X |
| 3753 | // (X & Y) != 0 ? X : X | Y --> X | Y |
| 3754 | if (TrueVal == X && match(FalseVal, m_Or(m_Specific(X), m_APInt(C))) && |
| 3755 | *Y == *C) |
| 3756 | return TrueWhenUnset ? TrueVal : FalseVal; |
| 3757 | } |
Matt Arsenault | 8260666 | 2017-01-11 00:57:54 +0000 | [diff] [blame] | 3758 | |
Sanjay Patel | 5f5eb58 | 2016-07-18 20:56:53 +0000 | [diff] [blame] | 3759 | return nullptr; |
| 3760 | } |
| 3761 | |
Sanjay Patel | a3bfb4e | 2016-07-21 21:26:45 +0000 | [diff] [blame] | 3762 | /// An alternative way to test if a bit is set or not uses sgt/slt instead of |
| 3763 | /// eq/ne. |
Craig Topper | 0aa3a19 | 2017-08-14 21:39:51 +0000 | [diff] [blame] | 3764 | static Value *simplifySelectWithFakeICmpEq(Value *CmpLHS, Value *CmpRHS, |
| 3765 | ICmpInst::Predicate Pred, |
| 3766 | Value *TrueVal, Value *FalseVal) { |
| 3767 | Value *X; |
| 3768 | APInt Mask; |
| 3769 | if (!decomposeBitTestICmp(CmpLHS, CmpRHS, Pred, X, Mask)) |
| 3770 | return nullptr; |
| 3771 | |
Craig Topper | 0aa3a19 | 2017-08-14 21:39:51 +0000 | [diff] [blame] | 3772 | return simplifySelectBitTest(TrueVal, FalseVal, X, &Mask, |
| 3773 | Pred == ICmpInst::ICMP_EQ); |
Sanjay Patel | a3bfb4e | 2016-07-21 21:26:45 +0000 | [diff] [blame] | 3774 | } |
| 3775 | |
Sanjay Patel | 5f5eb58 | 2016-07-18 20:56:53 +0000 | [diff] [blame] | 3776 | /// Try to simplify a select instruction when its condition operand is an |
| 3777 | /// integer comparison. |
| 3778 | static Value *simplifySelectWithICmpCond(Value *CondVal, Value *TrueVal, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 3779 | Value *FalseVal, const SimplifyQuery &Q, |
Sanjay Patel | 5f5eb58 | 2016-07-18 20:56:53 +0000 | [diff] [blame] | 3780 | unsigned MaxRecurse) { |
| 3781 | ICmpInst::Predicate Pred; |
| 3782 | Value *CmpLHS, *CmpRHS; |
| 3783 | if (!match(CondVal, m_ICmp(Pred, m_Value(CmpLHS), m_Value(CmpRHS)))) |
| 3784 | return nullptr; |
| 3785 | |
Sanjay Patel | 5f5eb58 | 2016-07-18 20:56:53 +0000 | [diff] [blame] | 3786 | if (ICmpInst::isEquality(Pred) && match(CmpRHS, m_Zero())) { |
| 3787 | Value *X; |
| 3788 | const APInt *Y; |
| 3789 | if (match(CmpLHS, m_And(m_Value(X), m_APInt(Y)))) |
| 3790 | if (Value *V = simplifySelectBitTest(TrueVal, FalseVal, X, Y, |
| 3791 | Pred == ICmpInst::ICMP_EQ)) |
| 3792 | return V; |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 3793 | |
Sanjay Patel | 9dada83 | 2019-02-26 18:26:56 +0000 | [diff] [blame] | 3794 | // Test for a bogus zero-shift-guard-op around funnel-shift or rotate. |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 3795 | Value *ShAmt; |
| 3796 | auto isFsh = m_CombineOr(m_Intrinsic<Intrinsic::fshl>(m_Value(X), m_Value(), |
| 3797 | m_Value(ShAmt)), |
| 3798 | m_Intrinsic<Intrinsic::fshr>(m_Value(), m_Value(X), |
| 3799 | m_Value(ShAmt))); |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 3800 | // (ShAmt == 0) ? fshl(X, *, ShAmt) : X --> X |
| 3801 | // (ShAmt == 0) ? fshr(*, X, ShAmt) : X --> X |
Sanjay Patel | 9dada83 | 2019-02-26 18:26:56 +0000 | [diff] [blame] | 3802 | if (match(TrueVal, isFsh) && FalseVal == X && CmpLHS == ShAmt && |
| 3803 | Pred == ICmpInst::ICMP_EQ) |
| 3804 | return X; |
Sanjay Patel | e98ec77 | 2018-11-15 14:53:37 +0000 | [diff] [blame] | 3805 | // (ShAmt != 0) ? X : fshl(X, *, ShAmt) --> X |
| 3806 | // (ShAmt != 0) ? X : fshr(*, X, ShAmt) --> X |
Sanjay Patel | 9dada83 | 2019-02-26 18:26:56 +0000 | [diff] [blame] | 3807 | if (match(FalseVal, isFsh) && TrueVal == X && CmpLHS == ShAmt && |
| 3808 | Pred == ICmpInst::ICMP_NE) |
| 3809 | return X; |
| 3810 | |
| 3811 | // Test for a zero-shift-guard-op around rotates. These are used to |
| 3812 | // avoid UB from oversized shifts in raw IR rotate patterns, but the |
| 3813 | // intrinsics do not have that problem. |
| 3814 | // We do not allow this transform for the general funnel shift case because |
| 3815 | // that would not preserve the poison safety of the original code. |
| 3816 | auto isRotate = m_CombineOr(m_Intrinsic<Intrinsic::fshl>(m_Value(X), |
| 3817 | m_Deferred(X), |
| 3818 | m_Value(ShAmt)), |
| 3819 | m_Intrinsic<Intrinsic::fshr>(m_Value(X), |
| 3820 | m_Deferred(X), |
| 3821 | m_Value(ShAmt))); |
| 3822 | // (ShAmt != 0) ? fshl(X, X, ShAmt) : X --> fshl(X, X, ShAmt) |
| 3823 | // (ShAmt != 0) ? fshr(X, X, ShAmt) : X --> fshr(X, X, ShAmt) |
| 3824 | if (match(TrueVal, isRotate) && FalseVal == X && CmpLHS == ShAmt && |
| 3825 | Pred == ICmpInst::ICMP_NE) |
| 3826 | return TrueVal; |
| 3827 | // (ShAmt == 0) ? X : fshl(X, X, ShAmt) --> fshl(X, X, ShAmt) |
| 3828 | // (ShAmt == 0) ? X : fshr(X, X, ShAmt) --> fshr(X, X, ShAmt) |
| 3829 | if (match(FalseVal, isRotate) && TrueVal == X && CmpLHS == ShAmt && |
| 3830 | Pred == ICmpInst::ICMP_EQ) |
| 3831 | return FalseVal; |
Sanjay Patel | 5f5eb58 | 2016-07-18 20:56:53 +0000 | [diff] [blame] | 3832 | } |
| 3833 | |
Craig Topper | 0aa3a19 | 2017-08-14 21:39:51 +0000 | [diff] [blame] | 3834 | // Check for other compares that behave like bit test. |
| 3835 | if (Value *V = simplifySelectWithFakeICmpEq(CmpLHS, CmpRHS, Pred, |
| 3836 | TrueVal, FalseVal)) |
| 3837 | return V; |
| 3838 | |
Sanjay Patel | 5f5eb58 | 2016-07-18 20:56:53 +0000 | [diff] [blame] | 3839 | // If we have an equality comparison, then we know the value in one of the |
| 3840 | // arms of the select. See if substituting this value into the arm and |
| 3841 | // simplifying the result yields the same value as the other arm. |
| 3842 | if (Pred == ICmpInst::ICMP_EQ) { |
| 3843 | if (SimplifyWithOpReplaced(FalseVal, CmpLHS, CmpRHS, Q, MaxRecurse) == |
| 3844 | TrueVal || |
| 3845 | SimplifyWithOpReplaced(FalseVal, CmpRHS, CmpLHS, Q, MaxRecurse) == |
| 3846 | TrueVal) |
| 3847 | return FalseVal; |
| 3848 | if (SimplifyWithOpReplaced(TrueVal, CmpLHS, CmpRHS, Q, MaxRecurse) == |
| 3849 | FalseVal || |
| 3850 | SimplifyWithOpReplaced(TrueVal, CmpRHS, CmpLHS, Q, MaxRecurse) == |
| 3851 | FalseVal) |
| 3852 | return FalseVal; |
| 3853 | } else if (Pred == ICmpInst::ICMP_NE) { |
| 3854 | if (SimplifyWithOpReplaced(TrueVal, CmpLHS, CmpRHS, Q, MaxRecurse) == |
| 3855 | FalseVal || |
| 3856 | SimplifyWithOpReplaced(TrueVal, CmpRHS, CmpLHS, Q, MaxRecurse) == |
| 3857 | FalseVal) |
| 3858 | return TrueVal; |
| 3859 | if (SimplifyWithOpReplaced(FalseVal, CmpLHS, CmpRHS, Q, MaxRecurse) == |
| 3860 | TrueVal || |
| 3861 | SimplifyWithOpReplaced(FalseVal, CmpRHS, CmpLHS, Q, MaxRecurse) == |
| 3862 | TrueVal) |
| 3863 | return TrueVal; |
| 3864 | } |
| 3865 | |
| 3866 | return nullptr; |
| 3867 | } |
| 3868 | |
Sanjay Patel | 1440107 | 2018-11-05 21:51:39 +0000 | [diff] [blame] | 3869 | /// Try to simplify a select instruction when its condition operand is a |
| 3870 | /// floating-point comparison. |
| 3871 | static Value *simplifySelectWithFCmp(Value *Cond, Value *T, Value *F) { |
| 3872 | FCmpInst::Predicate Pred; |
| 3873 | if (!match(Cond, m_FCmp(Pred, m_Specific(T), m_Specific(F))) && |
| 3874 | !match(Cond, m_FCmp(Pred, m_Specific(F), m_Specific(T)))) |
| 3875 | return nullptr; |
| 3876 | |
| 3877 | // TODO: The transform may not be valid with -0.0. An incomplete way of |
| 3878 | // testing for that possibility is to check if at least one operand is a |
| 3879 | // non-zero constant. |
| 3880 | const APFloat *C; |
| 3881 | if ((match(T, m_APFloat(C)) && C->isNonZero()) || |
| 3882 | (match(F, m_APFloat(C)) && C->isNonZero())) { |
| 3883 | // (T == F) ? T : F --> F |
| 3884 | // (F == T) ? T : F --> F |
| 3885 | if (Pred == FCmpInst::FCMP_OEQ) |
| 3886 | return F; |
| 3887 | |
| 3888 | // (T != F) ? T : F --> T |
| 3889 | // (F != T) ? T : F --> T |
| 3890 | if (Pred == FCmpInst::FCMP_UNE) |
| 3891 | return T; |
| 3892 | } |
| 3893 | |
| 3894 | return nullptr; |
| 3895 | } |
| 3896 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 3897 | /// Given operands for a SelectInst, see if we can fold the result. |
| 3898 | /// If not, this returns null. |
Sanjay Patel | ac39520 | 2018-02-17 14:50:13 +0000 | [diff] [blame] | 3899 | static Value *SimplifySelectInst(Value *Cond, Value *TrueVal, Value *FalseVal, |
| 3900 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
| 3901 | if (auto *CondC = dyn_cast<Constant>(Cond)) { |
| 3902 | if (auto *TrueC = dyn_cast<Constant>(TrueVal)) |
| 3903 | if (auto *FalseC = dyn_cast<Constant>(FalseVal)) |
| 3904 | return ConstantFoldSelectInstruction(CondC, TrueC, FalseC); |
| 3905 | |
| 3906 | // select undef, X, Y -> X or Y |
| 3907 | if (isa<UndefValue>(CondC)) |
| 3908 | return isa<Constant>(FalseVal) ? FalseVal : TrueVal; |
| 3909 | |
| 3910 | // TODO: Vector constants with undef elements don't simplify. |
| 3911 | |
| 3912 | // select true, X, Y -> X |
| 3913 | if (CondC->isAllOnesValue()) |
Benjamin Kramer | 5e1794e | 2014-01-24 17:09:53 +0000 | [diff] [blame] | 3914 | return TrueVal; |
Sanjay Patel | ac39520 | 2018-02-17 14:50:13 +0000 | [diff] [blame] | 3915 | // select false, X, Y -> Y |
| 3916 | if (CondC->isNullValue()) |
Benjamin Kramer | 5e1794e | 2014-01-24 17:09:53 +0000 | [diff] [blame] | 3917 | return FalseVal; |
| 3918 | } |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 3919 | |
Sanjay Patel | ac39520 | 2018-02-17 14:50:13 +0000 | [diff] [blame] | 3920 | // select ?, X, X -> X |
Duncan Sands | 772749a | 2011-01-01 20:08:02 +0000 | [diff] [blame] | 3921 | if (TrueVal == FalseVal) |
Chris Lattner | c707fa9 | 2010-04-20 05:32:14 +0000 | [diff] [blame] | 3922 | return TrueVal; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 3923 | |
Sanjay Patel | ac39520 | 2018-02-17 14:50:13 +0000 | [diff] [blame] | 3924 | if (isa<UndefValue>(TrueVal)) // select ?, undef, X -> X |
Dan Gohman | 54664ed | 2011-07-01 01:03:43 +0000 | [diff] [blame] | 3925 | return FalseVal; |
Sanjay Patel | ac39520 | 2018-02-17 14:50:13 +0000 | [diff] [blame] | 3926 | if (isa<UndefValue>(FalseVal)) // select ?, X, undef -> X |
Dan Gohman | 54664ed | 2011-07-01 01:03:43 +0000 | [diff] [blame] | 3927 | return TrueVal; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 3928 | |
Sanjay Patel | 5f5eb58 | 2016-07-18 20:56:53 +0000 | [diff] [blame] | 3929 | if (Value *V = |
Sanjay Patel | ac39520 | 2018-02-17 14:50:13 +0000 | [diff] [blame] | 3930 | simplifySelectWithICmpCond(Cond, TrueVal, FalseVal, Q, MaxRecurse)) |
Sanjay Patel | 5f5eb58 | 2016-07-18 20:56:53 +0000 | [diff] [blame] | 3931 | return V; |
David Majnemer | c6a5e1d | 2014-11-27 06:32:46 +0000 | [diff] [blame] | 3932 | |
Sanjay Patel | 1440107 | 2018-11-05 21:51:39 +0000 | [diff] [blame] | 3933 | if (Value *V = simplifySelectWithFCmp(Cond, TrueVal, FalseVal)) |
| 3934 | return V; |
| 3935 | |
David Bolvansky | f947608 | 2018-07-28 06:55:51 +0000 | [diff] [blame] | 3936 | if (Value *V = foldSelectWithBinaryOp(Cond, TrueVal, FalseVal)) |
| 3937 | return V; |
| 3938 | |
Sanjay Patel | 7d82d37 | 2018-12-02 13:26:03 +0000 | [diff] [blame] | 3939 | Optional<bool> Imp = isImpliedByDomCondition(Cond, Q.CxtI, Q.DL); |
| 3940 | if (Imp) |
| 3941 | return *Imp ? TrueVal : FalseVal; |
Sanjay Patel | d802270 | 2018-11-29 18:44:39 +0000 | [diff] [blame] | 3942 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 3943 | return nullptr; |
Chris Lattner | c707fa9 | 2010-04-20 05:32:14 +0000 | [diff] [blame] | 3944 | } |
| 3945 | |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3946 | Value *llvm::SimplifySelectInst(Value *Cond, Value *TrueVal, Value *FalseVal, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 3947 | const SimplifyQuery &Q) { |
| 3948 | return ::SimplifySelectInst(Cond, TrueVal, FalseVal, Q, RecursionLimit); |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 3949 | } |
| 3950 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 3951 | /// Given operands for an GetElementPtrInst, see if we can fold the result. |
| 3952 | /// If not, this returns null. |
David Blaikie | 4a2e73b | 2015-04-02 18:55:32 +0000 | [diff] [blame] | 3953 | static Value *SimplifyGEPInst(Type *SrcTy, ArrayRef<Value *> Ops, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 3954 | const SimplifyQuery &Q, unsigned) { |
Duncan Sands | 8a0f486 | 2010-11-22 13:42:49 +0000 | [diff] [blame] | 3955 | // The type of the GEP pointer operand. |
David Blaikie | 4a2e73b | 2015-04-02 18:55:32 +0000 | [diff] [blame] | 3956 | unsigned AS = |
| 3957 | cast<PointerType>(Ops[0]->getType()->getScalarType())->getAddressSpace(); |
Duncan Sands | 8a0f486 | 2010-11-22 13:42:49 +0000 | [diff] [blame] | 3958 | |
Chris Lattner | 8574aba | 2009-11-27 00:29:05 +0000 | [diff] [blame] | 3959 | // getelementptr P -> P. |
Jay Foad | b992a63 | 2011-07-19 15:07:52 +0000 | [diff] [blame] | 3960 | if (Ops.size() == 1) |
Chris Lattner | 8574aba | 2009-11-27 00:29:05 +0000 | [diff] [blame] | 3961 | return Ops[0]; |
| 3962 | |
Nico Weber | 48c8240 | 2014-08-27 20:06:19 +0000 | [diff] [blame] | 3963 | // Compute the (pointer) type returned by the GEP instruction. |
David Blaikie | 4a2e73b | 2015-04-02 18:55:32 +0000 | [diff] [blame] | 3964 | Type *LastType = GetElementPtrInst::getIndexedType(SrcTy, Ops.slice(1)); |
Nico Weber | 48c8240 | 2014-08-27 20:06:19 +0000 | [diff] [blame] | 3965 | Type *GEPTy = PointerType::get(LastType, AS); |
| 3966 | if (VectorType *VT = dyn_cast<VectorType>(Ops[0]->getType())) |
| 3967 | GEPTy = VectorType::get(GEPTy, VT->getNumElements()); |
Davide Italiano | a9f047a | 2017-04-19 14:23:42 +0000 | [diff] [blame] | 3968 | else if (VectorType *VT = dyn_cast<VectorType>(Ops[1]->getType())) |
| 3969 | GEPTy = VectorType::get(GEPTy, VT->getNumElements()); |
Nico Weber | 48c8240 | 2014-08-27 20:06:19 +0000 | [diff] [blame] | 3970 | |
| 3971 | if (isa<UndefValue>(Ops[0])) |
Duncan Sands | 8a0f486 | 2010-11-22 13:42:49 +0000 | [diff] [blame] | 3972 | return UndefValue::get(GEPTy); |
Chris Lattner | 8574aba | 2009-11-27 00:29:05 +0000 | [diff] [blame] | 3973 | |
Jay Foad | b992a63 | 2011-07-19 15:07:52 +0000 | [diff] [blame] | 3974 | if (Ops.size() == 2) { |
Duncan Sands | cf4bceb | 2010-11-21 13:53:09 +0000 | [diff] [blame] | 3975 | // getelementptr P, 0 -> P. |
Matthew Simpson | c1c4ad6 | 2018-03-15 16:00:29 +0000 | [diff] [blame] | 3976 | if (match(Ops[1], m_Zero()) && Ops[0]->getType() == GEPTy) |
Benjamin Kramer | 5e1794e | 2014-01-24 17:09:53 +0000 | [diff] [blame] | 3977 | return Ops[0]; |
Nico Weber | 48c8240 | 2014-08-27 20:06:19 +0000 | [diff] [blame] | 3978 | |
David Blaikie | 4a2e73b | 2015-04-02 18:55:32 +0000 | [diff] [blame] | 3979 | Type *Ty = SrcTy; |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 3980 | if (Ty->isSized()) { |
Nico Weber | 48c8240 | 2014-08-27 20:06:19 +0000 | [diff] [blame] | 3981 | Value *P; |
| 3982 | uint64_t C; |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 3983 | uint64_t TyAllocSize = Q.DL.getTypeAllocSize(Ty); |
Nico Weber | 48c8240 | 2014-08-27 20:06:19 +0000 | [diff] [blame] | 3984 | // getelementptr P, N -> P if P points to a type of zero size. |
Matthew Simpson | c1c4ad6 | 2018-03-15 16:00:29 +0000 | [diff] [blame] | 3985 | if (TyAllocSize == 0 && Ops[0]->getType() == GEPTy) |
Duncan Sands | cf4bceb | 2010-11-21 13:53:09 +0000 | [diff] [blame] | 3986 | return Ops[0]; |
Nico Weber | 48c8240 | 2014-08-27 20:06:19 +0000 | [diff] [blame] | 3987 | |
| 3988 | // The following transforms are only safe if the ptrtoint cast |
| 3989 | // doesn't truncate the pointers. |
| 3990 | if (Ops[1]->getType()->getScalarSizeInBits() == |
Elena Demikhovsky | 945b7e5 | 2018-02-14 06:58:08 +0000 | [diff] [blame] | 3991 | Q.DL.getIndexSizeInBits(AS)) { |
Nico Weber | 48c8240 | 2014-08-27 20:06:19 +0000 | [diff] [blame] | 3992 | auto PtrToIntOrZero = [GEPTy](Value *P) -> Value * { |
| 3993 | if (match(P, m_Zero())) |
| 3994 | return Constant::getNullValue(GEPTy); |
| 3995 | Value *Temp; |
| 3996 | if (match(P, m_PtrToInt(m_Value(Temp)))) |
David Majnemer | 11ca297 | 2014-08-27 20:08:34 +0000 | [diff] [blame] | 3997 | if (Temp->getType() == GEPTy) |
| 3998 | return Temp; |
Nico Weber | 48c8240 | 2014-08-27 20:06:19 +0000 | [diff] [blame] | 3999 | return nullptr; |
| 4000 | }; |
| 4001 | |
| 4002 | // getelementptr V, (sub P, V) -> P if P points to a type of size 1. |
| 4003 | if (TyAllocSize == 1 && |
| 4004 | match(Ops[1], m_Sub(m_Value(P), m_PtrToInt(m_Specific(Ops[0]))))) |
| 4005 | if (Value *R = PtrToIntOrZero(P)) |
| 4006 | return R; |
| 4007 | |
| 4008 | // getelementptr V, (ashr (sub P, V), C) -> Q |
| 4009 | // if P points to a type of size 1 << C. |
| 4010 | if (match(Ops[1], |
| 4011 | m_AShr(m_Sub(m_Value(P), m_PtrToInt(m_Specific(Ops[0]))), |
| 4012 | m_ConstantInt(C))) && |
| 4013 | TyAllocSize == 1ULL << C) |
| 4014 | if (Value *R = PtrToIntOrZero(P)) |
| 4015 | return R; |
| 4016 | |
| 4017 | // getelementptr V, (sdiv (sub P, V), C) -> Q |
| 4018 | // if P points to a type of size C. |
| 4019 | if (match(Ops[1], |
| 4020 | m_SDiv(m_Sub(m_Value(P), m_PtrToInt(m_Specific(Ops[0]))), |
| 4021 | m_SpecificInt(TyAllocSize)))) |
| 4022 | if (Value *R = PtrToIntOrZero(P)) |
| 4023 | return R; |
| 4024 | } |
Duncan Sands | cf4bceb | 2010-11-21 13:53:09 +0000 | [diff] [blame] | 4025 | } |
| 4026 | } |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 4027 | |
David Majnemer | d150137 | 2016-08-07 07:58:12 +0000 | [diff] [blame] | 4028 | if (Q.DL.getTypeAllocSize(LastType) == 1 && |
| 4029 | all_of(Ops.slice(1).drop_back(1), |
| 4030 | [](Value *Idx) { return match(Idx, m_Zero()); })) { |
Elena Demikhovsky | 945b7e5 | 2018-02-14 06:58:08 +0000 | [diff] [blame] | 4031 | unsigned IdxWidth = |
| 4032 | Q.DL.getIndexSizeInBits(Ops[0]->getType()->getPointerAddressSpace()); |
| 4033 | if (Q.DL.getTypeSizeInBits(Ops.back()->getType()) == IdxWidth) { |
| 4034 | APInt BasePtrOffset(IdxWidth, 0); |
David Majnemer | d150137 | 2016-08-07 07:58:12 +0000 | [diff] [blame] | 4035 | Value *StrippedBasePtr = |
| 4036 | Ops[0]->stripAndAccumulateInBoundsConstantOffsets(Q.DL, |
| 4037 | BasePtrOffset); |
| 4038 | |
David Majnemer | 5c5df62 | 2016-08-16 06:13:46 +0000 | [diff] [blame] | 4039 | // gep (gep V, C), (sub 0, V) -> C |
David Majnemer | d150137 | 2016-08-07 07:58:12 +0000 | [diff] [blame] | 4040 | if (match(Ops.back(), |
| 4041 | m_Sub(m_Zero(), m_PtrToInt(m_Specific(StrippedBasePtr))))) { |
| 4042 | auto *CI = ConstantInt::get(GEPTy->getContext(), BasePtrOffset); |
| 4043 | return ConstantExpr::getIntToPtr(CI, GEPTy); |
| 4044 | } |
David Majnemer | 5c5df62 | 2016-08-16 06:13:46 +0000 | [diff] [blame] | 4045 | // gep (gep V, C), (xor V, -1) -> C-1 |
| 4046 | if (match(Ops.back(), |
| 4047 | m_Xor(m_PtrToInt(m_Specific(StrippedBasePtr)), m_AllOnes()))) { |
| 4048 | auto *CI = ConstantInt::get(GEPTy->getContext(), BasePtrOffset - 1); |
| 4049 | return ConstantExpr::getIntToPtr(CI, GEPTy); |
| 4050 | } |
David Majnemer | d150137 | 2016-08-07 07:58:12 +0000 | [diff] [blame] | 4051 | } |
| 4052 | } |
| 4053 | |
Chris Lattner | 8574aba | 2009-11-27 00:29:05 +0000 | [diff] [blame] | 4054 | // Check to see if this is constant foldable. |
Craig Topper | da8037f | 2017-06-04 22:41:56 +0000 | [diff] [blame] | 4055 | if (!all_of(Ops, [](Value *V) { return isa<Constant>(V); })) |
| 4056 | return nullptr; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 4057 | |
Joey Gouly | 61eaa63 | 2017-06-06 10:17:14 +0000 | [diff] [blame] | 4058 | auto *CE = ConstantExpr::getGetElementPtr(SrcTy, cast<Constant>(Ops[0]), |
| 4059 | Ops.slice(1)); |
| 4060 | if (auto *CEFolded = ConstantFoldConstant(CE, Q.DL)) |
| 4061 | return CEFolded; |
| 4062 | return CE; |
Chris Lattner | 8574aba | 2009-11-27 00:29:05 +0000 | [diff] [blame] | 4063 | } |
| 4064 | |
Manuel Jacob | 20c6d5b | 2016-01-17 22:46:43 +0000 | [diff] [blame] | 4065 | Value *llvm::SimplifyGEPInst(Type *SrcTy, ArrayRef<Value *> Ops, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4066 | const SimplifyQuery &Q) { |
| 4067 | return ::SimplifyGEPInst(SrcTy, Ops, Q, RecursionLimit); |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 4068 | } |
| 4069 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 4070 | /// Given operands for an InsertValueInst, see if we can fold the result. |
| 4071 | /// If not, this returns null. |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 4072 | static Value *SimplifyInsertValueInst(Value *Agg, Value *Val, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4073 | ArrayRef<unsigned> Idxs, const SimplifyQuery &Q, |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 4074 | unsigned) { |
Duncan Sands | fd26a95 | 2011-09-05 06:52:48 +0000 | [diff] [blame] | 4075 | if (Constant *CAgg = dyn_cast<Constant>(Agg)) |
| 4076 | if (Constant *CVal = dyn_cast<Constant>(Val)) |
| 4077 | return ConstantFoldInsertValueInstruction(CAgg, CVal, Idxs); |
| 4078 | |
| 4079 | // insertvalue x, undef, n -> x |
| 4080 | if (match(Val, m_Undef())) |
| 4081 | return Agg; |
| 4082 | |
| 4083 | // insertvalue x, (extractvalue y, n), n |
| 4084 | if (ExtractValueInst *EV = dyn_cast<ExtractValueInst>(Val)) |
Benjamin Kramer | 4b79c21 | 2011-09-05 18:16:19 +0000 | [diff] [blame] | 4085 | if (EV->getAggregateOperand()->getType() == Agg->getType() && |
| 4086 | EV->getIndices() == Idxs) { |
Duncan Sands | fd26a95 | 2011-09-05 06:52:48 +0000 | [diff] [blame] | 4087 | // insertvalue undef, (extractvalue y, n), n -> y |
| 4088 | if (match(Agg, m_Undef())) |
| 4089 | return EV->getAggregateOperand(); |
| 4090 | |
| 4091 | // insertvalue y, (extractvalue y, n), n -> y |
| 4092 | if (Agg == EV->getAggregateOperand()) |
| 4093 | return Agg; |
| 4094 | } |
| 4095 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 4096 | return nullptr; |
Duncan Sands | fd26a95 | 2011-09-05 06:52:48 +0000 | [diff] [blame] | 4097 | } |
| 4098 | |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4099 | Value *llvm::SimplifyInsertValueInst(Value *Agg, Value *Val, |
| 4100 | ArrayRef<unsigned> Idxs, |
| 4101 | const SimplifyQuery &Q) { |
| 4102 | return ::SimplifyInsertValueInst(Agg, Val, Idxs, Q, RecursionLimit); |
| 4103 | } |
| 4104 | |
Igor Laevsky | e0edb66 | 2017-12-13 11:21:18 +0000 | [diff] [blame] | 4105 | Value *llvm::SimplifyInsertElementInst(Value *Vec, Value *Val, Value *Idx, |
| 4106 | const SimplifyQuery &Q) { |
| 4107 | // Try to constant fold. |
| 4108 | auto *VecC = dyn_cast<Constant>(Vec); |
| 4109 | auto *ValC = dyn_cast<Constant>(Val); |
| 4110 | auto *IdxC = dyn_cast<Constant>(Idx); |
| 4111 | if (VecC && ValC && IdxC) |
| 4112 | return ConstantFoldInsertElementInstruction(VecC, ValC, IdxC); |
| 4113 | |
| 4114 | // Fold into undef if index is out of bounds. |
| 4115 | if (auto *CI = dyn_cast<ConstantInt>(Idx)) { |
| 4116 | uint64_t NumElements = cast<VectorType>(Vec->getType())->getNumElements(); |
Igor Laevsky | e0edb66 | 2017-12-13 11:21:18 +0000 | [diff] [blame] | 4117 | if (CI->uge(NumElements)) |
| 4118 | return UndefValue::get(Vec->getType()); |
| 4119 | } |
| 4120 | |
Philip Reames | e499bc3 | 2017-12-30 05:54:22 +0000 | [diff] [blame] | 4121 | // If index is undef, it might be out of bounds (see above case) |
| 4122 | if (isa<UndefValue>(Idx)) |
| 4123 | return UndefValue::get(Vec->getType()); |
Igor Laevsky | e0edb66 | 2017-12-13 11:21:18 +0000 | [diff] [blame] | 4124 | |
Sanjay Patel | e60cb7d | 2019-05-23 21:49:47 +0000 | [diff] [blame] | 4125 | // Inserting an undef scalar? Assume it is the same value as the existing |
| 4126 | // vector element. |
| 4127 | if (isa<UndefValue>(Val)) |
| 4128 | return Vec; |
| 4129 | |
Sanjay Patel | 8869a98 | 2019-05-24 00:13:58 +0000 | [diff] [blame] | 4130 | // If we are extracting a value from a vector, then inserting it into the same |
| 4131 | // place, that's the input vector: |
| 4132 | // insertelt Vec, (extractelt Vec, Idx), Idx --> Vec |
| 4133 | if (match(Val, m_ExtractElement(m_Specific(Vec), m_Specific(Idx)))) |
| 4134 | return Vec; |
| 4135 | |
Igor Laevsky | e0edb66 | 2017-12-13 11:21:18 +0000 | [diff] [blame] | 4136 | return nullptr; |
| 4137 | } |
| 4138 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 4139 | /// Given operands for an ExtractValueInst, see if we can fold the result. |
| 4140 | /// If not, this returns null. |
David Majnemer | 25a796e | 2015-07-13 01:15:46 +0000 | [diff] [blame] | 4141 | static Value *SimplifyExtractValueInst(Value *Agg, ArrayRef<unsigned> Idxs, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4142 | const SimplifyQuery &, unsigned) { |
David Majnemer | 25a796e | 2015-07-13 01:15:46 +0000 | [diff] [blame] | 4143 | if (auto *CAgg = dyn_cast<Constant>(Agg)) |
| 4144 | return ConstantFoldExtractValueInstruction(CAgg, Idxs); |
| 4145 | |
| 4146 | // extractvalue x, (insertvalue y, elt, n), n -> elt |
| 4147 | unsigned NumIdxs = Idxs.size(); |
| 4148 | for (auto *IVI = dyn_cast<InsertValueInst>(Agg); IVI != nullptr; |
| 4149 | IVI = dyn_cast<InsertValueInst>(IVI->getAggregateOperand())) { |
| 4150 | ArrayRef<unsigned> InsertValueIdxs = IVI->getIndices(); |
| 4151 | unsigned NumInsertValueIdxs = InsertValueIdxs.size(); |
| 4152 | unsigned NumCommonIdxs = std::min(NumInsertValueIdxs, NumIdxs); |
| 4153 | if (InsertValueIdxs.slice(0, NumCommonIdxs) == |
| 4154 | Idxs.slice(0, NumCommonIdxs)) { |
| 4155 | if (NumIdxs == NumInsertValueIdxs) |
| 4156 | return IVI->getInsertedValueOperand(); |
| 4157 | break; |
| 4158 | } |
| 4159 | } |
| 4160 | |
| 4161 | return nullptr; |
| 4162 | } |
| 4163 | |
| 4164 | Value *llvm::SimplifyExtractValueInst(Value *Agg, ArrayRef<unsigned> Idxs, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4165 | const SimplifyQuery &Q) { |
| 4166 | return ::SimplifyExtractValueInst(Agg, Idxs, Q, RecursionLimit); |
| 4167 | } |
| 4168 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 4169 | /// Given operands for an ExtractElementInst, see if we can fold the result. |
| 4170 | /// If not, this returns null. |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4171 | static Value *SimplifyExtractElementInst(Value *Vec, Value *Idx, const SimplifyQuery &, |
David Majnemer | 599ca44 | 2015-07-13 01:15:53 +0000 | [diff] [blame] | 4172 | unsigned) { |
| 4173 | if (auto *CVec = dyn_cast<Constant>(Vec)) { |
| 4174 | if (auto *CIdx = dyn_cast<Constant>(Idx)) |
| 4175 | return ConstantFoldExtractElementInstruction(CVec, CIdx); |
| 4176 | |
| 4177 | // The index is not relevant if our vector is a splat. |
| 4178 | if (auto *Splat = CVec->getSplatValue()) |
| 4179 | return Splat; |
| 4180 | |
| 4181 | if (isa<UndefValue>(Vec)) |
| 4182 | return UndefValue::get(Vec->getType()->getVectorElementType()); |
| 4183 | } |
| 4184 | |
| 4185 | // If extracting a specified index from the vector, see if we can recursively |
| 4186 | // find a previously computed scalar that was inserted into the vector. |
Philip Reames | e499bc3 | 2017-12-30 05:54:22 +0000 | [diff] [blame] | 4187 | if (auto *IdxC = dyn_cast<ConstantInt>(Idx)) { |
| 4188 | if (IdxC->getValue().uge(Vec->getType()->getVectorNumElements())) |
| 4189 | // definitely out of bounds, thus undefined result |
| 4190 | return UndefValue::get(Vec->getType()->getVectorElementType()); |
| 4191 | if (Value *Elt = findScalarElement(Vec, IdxC->getZExtValue())) |
| 4192 | return Elt; |
| 4193 | } |
David Majnemer | 599ca44 | 2015-07-13 01:15:53 +0000 | [diff] [blame] | 4194 | |
Zvi Rackover | 2e6e88f | 2017-12-06 17:51:46 +0000 | [diff] [blame] | 4195 | // An undef extract index can be arbitrarily chosen to be an out-of-range |
| 4196 | // index value, which would result in the instruction being undef. |
| 4197 | if (isa<UndefValue>(Idx)) |
| 4198 | return UndefValue::get(Vec->getType()->getVectorElementType()); |
| 4199 | |
David Majnemer | 599ca44 | 2015-07-13 01:15:53 +0000 | [diff] [blame] | 4200 | return nullptr; |
| 4201 | } |
| 4202 | |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4203 | Value *llvm::SimplifyExtractElementInst(Value *Vec, Value *Idx, |
| 4204 | const SimplifyQuery &Q) { |
| 4205 | return ::SimplifyExtractElementInst(Vec, Idx, Q, RecursionLimit); |
| 4206 | } |
| 4207 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 4208 | /// See if we can fold the given phi. If not, returns null. |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4209 | static Value *SimplifyPHINode(PHINode *PN, const SimplifyQuery &Q) { |
Duncan Sands | 7412f6e | 2010-11-17 04:30:22 +0000 | [diff] [blame] | 4210 | // If all of the PHI's incoming values are the same then replace the PHI node |
| 4211 | // with the common value. |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 4212 | Value *CommonValue = nullptr; |
Duncan Sands | 7412f6e | 2010-11-17 04:30:22 +0000 | [diff] [blame] | 4213 | bool HasUndefInput = false; |
Pete Cooper | 833f34d | 2015-05-12 20:05:31 +0000 | [diff] [blame] | 4214 | for (Value *Incoming : PN->incoming_values()) { |
Duncan Sands | 7412f6e | 2010-11-17 04:30:22 +0000 | [diff] [blame] | 4215 | // If the incoming value is the phi node itself, it can safely be skipped. |
| 4216 | if (Incoming == PN) continue; |
| 4217 | if (isa<UndefValue>(Incoming)) { |
| 4218 | // Remember that we saw an undef value, but otherwise ignore them. |
| 4219 | HasUndefInput = true; |
| 4220 | continue; |
| 4221 | } |
| 4222 | if (CommonValue && Incoming != CommonValue) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 4223 | return nullptr; // Not the same, bail out. |
Duncan Sands | 7412f6e | 2010-11-17 04:30:22 +0000 | [diff] [blame] | 4224 | CommonValue = Incoming; |
| 4225 | } |
| 4226 | |
| 4227 | // If CommonValue is null then all of the incoming values were either undef or |
| 4228 | // equal to the phi node itself. |
| 4229 | if (!CommonValue) |
| 4230 | return UndefValue::get(PN->getType()); |
| 4231 | |
| 4232 | // If we have a PHI node like phi(X, undef, X), where X is defined by some |
| 4233 | // instruction, we cannot return X as the result of the PHI node unless it |
| 4234 | // dominates the PHI block. |
| 4235 | if (HasUndefInput) |
Sanjay Patel | 5da361a | 2018-04-10 18:38:19 +0000 | [diff] [blame] | 4236 | return valueDominatesPHI(CommonValue, PN, Q.DT) ? CommonValue : nullptr; |
Duncan Sands | 7412f6e | 2010-11-17 04:30:22 +0000 | [diff] [blame] | 4237 | |
| 4238 | return CommonValue; |
| 4239 | } |
| 4240 | |
David Majnemer | 6774d61 | 2016-07-26 17:58:05 +0000 | [diff] [blame] | 4241 | static Value *SimplifyCastInst(unsigned CastOpc, Value *Op, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4242 | Type *Ty, const SimplifyQuery &Q, unsigned MaxRecurse) { |
David Majnemer | 126de5d | 2016-07-25 03:39:21 +0000 | [diff] [blame] | 4243 | if (auto *C = dyn_cast<Constant>(Op)) |
David Majnemer | 6774d61 | 2016-07-26 17:58:05 +0000 | [diff] [blame] | 4244 | return ConstantFoldCastOperand(CastOpc, C, Ty, Q.DL); |
Duncan Sands | 395ac42d | 2012-03-13 14:07:05 +0000 | [diff] [blame] | 4245 | |
David Majnemer | 6774d61 | 2016-07-26 17:58:05 +0000 | [diff] [blame] | 4246 | if (auto *CI = dyn_cast<CastInst>(Op)) { |
| 4247 | auto *Src = CI->getOperand(0); |
| 4248 | Type *SrcTy = Src->getType(); |
| 4249 | Type *MidTy = CI->getType(); |
| 4250 | Type *DstTy = Ty; |
| 4251 | if (Src->getType() == Ty) { |
| 4252 | auto FirstOp = static_cast<Instruction::CastOps>(CI->getOpcode()); |
| 4253 | auto SecondOp = static_cast<Instruction::CastOps>(CastOpc); |
| 4254 | Type *SrcIntPtrTy = |
| 4255 | SrcTy->isPtrOrPtrVectorTy() ? Q.DL.getIntPtrType(SrcTy) : nullptr; |
| 4256 | Type *MidIntPtrTy = |
| 4257 | MidTy->isPtrOrPtrVectorTy() ? Q.DL.getIntPtrType(MidTy) : nullptr; |
| 4258 | Type *DstIntPtrTy = |
| 4259 | DstTy->isPtrOrPtrVectorTy() ? Q.DL.getIntPtrType(DstTy) : nullptr; |
| 4260 | if (CastInst::isEliminableCastPair(FirstOp, SecondOp, SrcTy, MidTy, DstTy, |
| 4261 | SrcIntPtrTy, MidIntPtrTy, |
| 4262 | DstIntPtrTy) == Instruction::BitCast) |
| 4263 | return Src; |
| 4264 | } |
| 4265 | } |
David Majnemer | a90a621 | 2016-07-26 05:52:29 +0000 | [diff] [blame] | 4266 | |
| 4267 | // bitcast x -> x |
David Majnemer | 6774d61 | 2016-07-26 17:58:05 +0000 | [diff] [blame] | 4268 | if (CastOpc == Instruction::BitCast) |
| 4269 | if (Op->getType() == Ty) |
| 4270 | return Op; |
David Majnemer | a90a621 | 2016-07-26 05:52:29 +0000 | [diff] [blame] | 4271 | |
| 4272 | return nullptr; |
| 4273 | } |
| 4274 | |
David Majnemer | 6774d61 | 2016-07-26 17:58:05 +0000 | [diff] [blame] | 4275 | Value *llvm::SimplifyCastInst(unsigned CastOpc, Value *Op, Type *Ty, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4276 | const SimplifyQuery &Q) { |
| 4277 | return ::SimplifyCastInst(CastOpc, Op, Ty, Q, RecursionLimit); |
| 4278 | } |
| 4279 | |
Sanjay Patel | a3c297d | 2017-04-19 16:48:22 +0000 | [diff] [blame] | 4280 | /// For the given destination element of a shuffle, peek through shuffles to |
| 4281 | /// match a root vector source operand that contains that element in the same |
| 4282 | /// vector lane (ie, the same mask index), so we can eliminate the shuffle(s). |
| 4283 | static Value *foldIdentityShuffles(int DestElt, Value *Op0, Value *Op1, |
Zvi Rackover | 558f86b | 2017-05-08 15:46:58 +0000 | [diff] [blame] | 4284 | int MaskVal, Value *RootVec, |
Sanjay Patel | a3c297d | 2017-04-19 16:48:22 +0000 | [diff] [blame] | 4285 | unsigned MaxRecurse) { |
| 4286 | if (!MaxRecurse--) |
| 4287 | return nullptr; |
| 4288 | |
| 4289 | // Bail out if any mask value is undefined. That kind of shuffle may be |
| 4290 | // simplified further based on demanded bits or other folds. |
Sanjay Patel | a3c297d | 2017-04-19 16:48:22 +0000 | [diff] [blame] | 4291 | if (MaskVal == -1) |
| 4292 | return nullptr; |
| 4293 | |
| 4294 | // The mask value chooses which source operand we need to look at next. |
Sanjay Patel | a3c297d | 2017-04-19 16:48:22 +0000 | [diff] [blame] | 4295 | int InVecNumElts = Op0->getType()->getVectorNumElements(); |
Zvi Rackover | 558f86b | 2017-05-08 15:46:58 +0000 | [diff] [blame] | 4296 | int RootElt = MaskVal; |
| 4297 | Value *SourceOp = Op0; |
| 4298 | if (MaskVal >= InVecNumElts) { |
Sanjay Patel | a3c297d | 2017-04-19 16:48:22 +0000 | [diff] [blame] | 4299 | RootElt = MaskVal - InVecNumElts; |
| 4300 | SourceOp = Op1; |
| 4301 | } |
| 4302 | |
| 4303 | // If the source operand is a shuffle itself, look through it to find the |
| 4304 | // matching root vector. |
| 4305 | if (auto *SourceShuf = dyn_cast<ShuffleVectorInst>(SourceOp)) { |
| 4306 | return foldIdentityShuffles( |
| 4307 | DestElt, SourceShuf->getOperand(0), SourceShuf->getOperand(1), |
Zvi Rackover | 558f86b | 2017-05-08 15:46:58 +0000 | [diff] [blame] | 4308 | SourceShuf->getMaskValue(RootElt), RootVec, MaxRecurse); |
Sanjay Patel | a3c297d | 2017-04-19 16:48:22 +0000 | [diff] [blame] | 4309 | } |
| 4310 | |
| 4311 | // TODO: Look through bitcasts? What if the bitcast changes the vector element |
| 4312 | // size? |
| 4313 | |
| 4314 | // The source operand is not a shuffle. Initialize the root vector value for |
| 4315 | // this shuffle if that has not been done yet. |
| 4316 | if (!RootVec) |
| 4317 | RootVec = SourceOp; |
| 4318 | |
| 4319 | // Give up as soon as a source operand does not match the existing root value. |
| 4320 | if (RootVec != SourceOp) |
| 4321 | return nullptr; |
| 4322 | |
| 4323 | // The element must be coming from the same lane in the source vector |
| 4324 | // (although it may have crossed lanes in intermediate shuffles). |
| 4325 | if (RootElt != DestElt) |
| 4326 | return nullptr; |
| 4327 | |
| 4328 | return RootVec; |
| 4329 | } |
| 4330 | |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 4331 | static Value *SimplifyShuffleVectorInst(Value *Op0, Value *Op1, Constant *Mask, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4332 | Type *RetTy, const SimplifyQuery &Q, |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 4333 | unsigned MaxRecurse) { |
Zvi Rackover | 4086e13 | 2017-04-30 06:06:26 +0000 | [diff] [blame] | 4334 | if (isa<UndefValue>(Mask)) |
| 4335 | return UndefValue::get(RetTy); |
| 4336 | |
Zvi Rackover | 30efd24d | 2017-04-11 21:37:02 +0000 | [diff] [blame] | 4337 | Type *InVecTy = Op0->getType(); |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 4338 | unsigned MaskNumElts = Mask->getType()->getVectorNumElements(); |
Zvi Rackover | 30efd24d | 2017-04-11 21:37:02 +0000 | [diff] [blame] | 4339 | unsigned InVecNumElts = InVecTy->getVectorNumElements(); |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 4340 | |
Zvi Rackover | 0411e46 | 2017-04-30 06:10:54 +0000 | [diff] [blame] | 4341 | SmallVector<int, 32> Indices; |
| 4342 | ShuffleVectorInst::getShuffleMask(Mask, Indices); |
| 4343 | assert(MaskNumElts == Indices.size() && |
| 4344 | "Size of Indices not same as number of mask elements?"); |
| 4345 | |
Zvi Rackover | 973ff7c | 2017-05-07 18:16:37 +0000 | [diff] [blame] | 4346 | // Canonicalization: If mask does not select elements from an input vector, |
| 4347 | // replace that input vector with undef. |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 4348 | bool MaskSelects0 = false, MaskSelects1 = false; |
| 4349 | for (unsigned i = 0; i != MaskNumElts; ++i) { |
Zvi Rackover | 0411e46 | 2017-04-30 06:10:54 +0000 | [diff] [blame] | 4350 | if (Indices[i] == -1) |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 4351 | continue; |
Zvi Rackover | 0411e46 | 2017-04-30 06:10:54 +0000 | [diff] [blame] | 4352 | if ((unsigned)Indices[i] < InVecNumElts) |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 4353 | MaskSelects0 = true; |
| 4354 | else |
| 4355 | MaskSelects1 = true; |
| 4356 | } |
Zvi Rackover | 973ff7c | 2017-05-07 18:16:37 +0000 | [diff] [blame] | 4357 | if (!MaskSelects0) |
| 4358 | Op0 = UndefValue::get(InVecTy); |
| 4359 | if (!MaskSelects1) |
| 4360 | Op1 = UndefValue::get(InVecTy); |
| 4361 | |
| 4362 | auto *Op0Const = dyn_cast<Constant>(Op0); |
| 4363 | auto *Op1Const = dyn_cast<Constant>(Op1); |
| 4364 | |
| 4365 | // If all operands are constant, constant fold the shuffle. |
| 4366 | if (Op0Const && Op1Const) |
| 4367 | return ConstantFoldShuffleVectorInstruction(Op0Const, Op1Const, Mask); |
| 4368 | |
| 4369 | // Canonicalization: if only one input vector is constant, it shall be the |
| 4370 | // second one. |
| 4371 | if (Op0Const && !Op1Const) { |
| 4372 | std::swap(Op0, Op1); |
Zvi Rackover | dfbd3d7 | 2017-05-08 12:40:18 +0000 | [diff] [blame] | 4373 | ShuffleVectorInst::commuteShuffleMask(Indices, InVecNumElts); |
Zvi Rackover | 973ff7c | 2017-05-07 18:16:37 +0000 | [diff] [blame] | 4374 | } |
Zvi Rackover | 30efd24d | 2017-04-11 21:37:02 +0000 | [diff] [blame] | 4375 | |
| 4376 | // A shuffle of a splat is always the splat itself. Legal if the shuffle's |
| 4377 | // value type is same as the input vectors' type. |
| 4378 | if (auto *OpShuf = dyn_cast<ShuffleVectorInst>(Op0)) |
Zvi Rackover | 973ff7c | 2017-05-07 18:16:37 +0000 | [diff] [blame] | 4379 | if (isa<UndefValue>(Op1) && RetTy == InVecTy && |
Zvi Rackover | 30efd24d | 2017-04-11 21:37:02 +0000 | [diff] [blame] | 4380 | OpShuf->getMask()->getSplatValue()) |
| 4381 | return Op0; |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 4382 | |
Sanjay Patel | a3c297d | 2017-04-19 16:48:22 +0000 | [diff] [blame] | 4383 | // Don't fold a shuffle with undef mask elements. This may get folded in a |
| 4384 | // better way using demanded bits or other analysis. |
| 4385 | // TODO: Should we allow this? |
Zvi Rackover | 0411e46 | 2017-04-30 06:10:54 +0000 | [diff] [blame] | 4386 | if (find(Indices, -1) != Indices.end()) |
| 4387 | return nullptr; |
Sanjay Patel | a3c297d | 2017-04-19 16:48:22 +0000 | [diff] [blame] | 4388 | |
| 4389 | // Check if every element of this shuffle can be mapped back to the |
| 4390 | // corresponding element of a single root vector. If so, we don't need this |
| 4391 | // shuffle. This handles simple identity shuffles as well as chains of |
| 4392 | // shuffles that may widen/narrow and/or move elements across lanes and back. |
| 4393 | Value *RootVec = nullptr; |
| 4394 | for (unsigned i = 0; i != MaskNumElts; ++i) { |
| 4395 | // Note that recursion is limited for each vector element, so if any element |
| 4396 | // exceeds the limit, this will fail to simplify. |
Zvi Rackover | 558f86b | 2017-05-08 15:46:58 +0000 | [diff] [blame] | 4397 | RootVec = |
| 4398 | foldIdentityShuffles(i, Op0, Op1, Indices[i], RootVec, MaxRecurse); |
Sanjay Patel | a3c297d | 2017-04-19 16:48:22 +0000 | [diff] [blame] | 4399 | |
| 4400 | // We can't replace a widening/narrowing shuffle with one of its operands. |
| 4401 | if (!RootVec || RootVec->getType() != RetTy) |
| 4402 | return nullptr; |
| 4403 | } |
| 4404 | return RootVec; |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 4405 | } |
| 4406 | |
| 4407 | /// Given operands for a ShuffleVectorInst, fold the result or return null. |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4408 | Value *llvm::SimplifyShuffleVectorInst(Value *Op0, Value *Op1, Constant *Mask, |
| 4409 | Type *RetTy, const SimplifyQuery &Q) { |
| 4410 | return ::SimplifyShuffleVectorInst(Op0, Op1, Mask, RetTy, Q, RecursionLimit); |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 4411 | } |
| 4412 | |
Cameron McInally | c316769 | 2019-05-06 16:05:10 +0000 | [diff] [blame] | 4413 | static Constant *foldConstant(Instruction::UnaryOps Opcode, |
| 4414 | Value *&Op, const SimplifyQuery &Q) { |
| 4415 | if (auto *C = dyn_cast<Constant>(Op)) |
| 4416 | return ConstantFoldUnaryOpOperand(Opcode, C, Q.DL); |
| 4417 | return nullptr; |
| 4418 | } |
| 4419 | |
| 4420 | /// Given the operand for an FNeg, see if we can fold the result. If not, this |
| 4421 | /// returns null. |
| 4422 | static Value *simplifyFNegInst(Value *Op, FastMathFlags FMF, |
| 4423 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
| 4424 | if (Constant *C = foldConstant(Instruction::FNeg, Op, Q)) |
| 4425 | return C; |
| 4426 | |
| 4427 | Value *X; |
| 4428 | // fneg (fneg X) ==> X |
| 4429 | if (match(Op, m_FNeg(m_Value(X)))) |
| 4430 | return X; |
| 4431 | |
| 4432 | return nullptr; |
| 4433 | } |
| 4434 | |
| 4435 | Value *llvm::SimplifyFNegInst(Value *Op, FastMathFlags FMF, |
| 4436 | const SimplifyQuery &Q) { |
| 4437 | return ::simplifyFNegInst(Op, FMF, Q, RecursionLimit); |
| 4438 | } |
| 4439 | |
Sanjay Patel | e235942 | 2018-03-21 19:31:53 +0000 | [diff] [blame] | 4440 | static Constant *propagateNaN(Constant *In) { |
| 4441 | // If the input is a vector with undef elements, just return a default NaN. |
| 4442 | if (!In->isNaN()) |
| 4443 | return ConstantFP::getNaN(In->getType()); |
| 4444 | |
| 4445 | // Propagate the existing NaN constant when possible. |
| 4446 | // TODO: Should we quiet a signaling NaN? |
| 4447 | return In; |
| 4448 | } |
| 4449 | |
| 4450 | static Constant *simplifyFPBinop(Value *Op0, Value *Op1) { |
| 4451 | if (isa<UndefValue>(Op0) || isa<UndefValue>(Op1)) |
| 4452 | return ConstantFP::getNaN(Op0->getType()); |
| 4453 | |
| 4454 | if (match(Op0, m_NaN())) |
| 4455 | return propagateNaN(cast<Constant>(Op0)); |
| 4456 | if (match(Op1, m_NaN())) |
| 4457 | return propagateNaN(cast<Constant>(Op1)); |
| 4458 | |
| 4459 | return nullptr; |
| 4460 | } |
| 4461 | |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4462 | /// Given operands for an FAdd, see if we can fold the result. If not, this |
| 4463 | /// returns null. |
| 4464 | static Value *SimplifyFAddInst(Value *Op0, Value *Op1, FastMathFlags FMF, |
| 4465 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
| 4466 | if (Constant *C = foldOrCommuteConstant(Instruction::FAdd, Op0, Op1, Q)) |
| 4467 | return C; |
| 4468 | |
Sanjay Patel | e235942 | 2018-03-21 19:31:53 +0000 | [diff] [blame] | 4469 | if (Constant *C = simplifyFPBinop(Op0, Op1)) |
| 4470 | return C; |
Sanjay Patel | 4222716 | 2018-03-10 16:51:28 +0000 | [diff] [blame] | 4471 | |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4472 | // fadd X, -0 ==> X |
Sanjay Patel | 93e64dd | 2018-03-25 21:16:33 +0000 | [diff] [blame] | 4473 | if (match(Op1, m_NegZeroFP())) |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4474 | return Op0; |
| 4475 | |
| 4476 | // fadd X, 0 ==> X, when we know X is not -0 |
Sanjay Patel | 93e64dd | 2018-03-25 21:16:33 +0000 | [diff] [blame] | 4477 | if (match(Op1, m_PosZeroFP()) && |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4478 | (FMF.noSignedZeros() || CannotBeNegativeZero(Op0, Q.TLI))) |
| 4479 | return Op0; |
| 4480 | |
Cameron McInally | 0c82d9b | 2019-05-15 14:31:33 +0000 | [diff] [blame] | 4481 | // With nnan: -X + X --> 0.0 (and commuted variant) |
Sanjay Patel | 11f7f99 | 2018-03-14 21:23:27 +0000 | [diff] [blame] | 4482 | // We don't have to explicitly exclude infinities (ninf): INF + -INF == NaN. |
| 4483 | // Negative zeros are allowed because we always end up with positive zero: |
| 4484 | // X = -0.0: (-0.0 - (-0.0)) + (-0.0) == ( 0.0) + (-0.0) == 0.0 |
| 4485 | // X = -0.0: ( 0.0 - (-0.0)) + (-0.0) == ( 0.0) + (-0.0) == 0.0 |
| 4486 | // X = 0.0: (-0.0 - ( 0.0)) + ( 0.0) == (-0.0) + ( 0.0) == 0.0 |
| 4487 | // X = 0.0: ( 0.0 - ( 0.0)) + ( 0.0) == ( 0.0) + ( 0.0) == 0.0 |
Cameron McInally | 0c82d9b | 2019-05-15 14:31:33 +0000 | [diff] [blame] | 4488 | if (FMF.noNaNs()) { |
| 4489 | if (match(Op0, m_FSub(m_AnyZeroFP(), m_Specific(Op1))) || |
| 4490 | match(Op1, m_FSub(m_AnyZeroFP(), m_Specific(Op0)))) |
| 4491 | return ConstantFP::getNullValue(Op0->getType()); |
| 4492 | |
| 4493 | if (match(Op0, m_FNeg(m_Specific(Op1))) || |
| 4494 | match(Op1, m_FNeg(m_Specific(Op0)))) |
| 4495 | return ConstantFP::getNullValue(Op0->getType()); |
| 4496 | } |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4497 | |
Sanjay Patel | 9b07347 | 2018-08-07 20:32:55 +0000 | [diff] [blame] | 4498 | // (X - Y) + Y --> X |
| 4499 | // Y + (X - Y) --> X |
| 4500 | Value *X; |
| 4501 | if (FMF.noSignedZeros() && FMF.allowReassoc() && |
| 4502 | (match(Op0, m_FSub(m_Value(X), m_Specific(Op1))) || |
| 4503 | match(Op1, m_FSub(m_Value(X), m_Specific(Op0))))) |
| 4504 | return X; |
| 4505 | |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4506 | return nullptr; |
| 4507 | } |
| 4508 | |
| 4509 | /// Given operands for an FSub, see if we can fold the result. If not, this |
| 4510 | /// returns null. |
| 4511 | static Value *SimplifyFSubInst(Value *Op0, Value *Op1, FastMathFlags FMF, |
| 4512 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
| 4513 | if (Constant *C = foldOrCommuteConstant(Instruction::FSub, Op0, Op1, Q)) |
| 4514 | return C; |
| 4515 | |
Sanjay Patel | e235942 | 2018-03-21 19:31:53 +0000 | [diff] [blame] | 4516 | if (Constant *C = simplifyFPBinop(Op0, Op1)) |
| 4517 | return C; |
Sanjay Patel | 4222716 | 2018-03-10 16:51:28 +0000 | [diff] [blame] | 4518 | |
Sanjay Patel | 93e64dd | 2018-03-25 21:16:33 +0000 | [diff] [blame] | 4519 | // fsub X, +0 ==> X |
| 4520 | if (match(Op1, m_PosZeroFP())) |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4521 | return Op0; |
| 4522 | |
| 4523 | // fsub X, -0 ==> X, when we know X is not -0 |
Sanjay Patel | 93e64dd | 2018-03-25 21:16:33 +0000 | [diff] [blame] | 4524 | if (match(Op1, m_NegZeroFP()) && |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4525 | (FMF.noSignedZeros() || CannotBeNegativeZero(Op0, Q.TLI))) |
| 4526 | return Op0; |
| 4527 | |
| 4528 | // fsub -0.0, (fsub -0.0, X) ==> X |
Cameron McInally | 2d2a46d | 2019-05-20 13:13:35 +0000 | [diff] [blame] | 4529 | // fsub -0.0, (fneg X) ==> X |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4530 | Value *X; |
Sanjay Patel | 93e64dd | 2018-03-25 21:16:33 +0000 | [diff] [blame] | 4531 | if (match(Op0, m_NegZeroFP()) && |
Cameron McInally | 2d2a46d | 2019-05-20 13:13:35 +0000 | [diff] [blame] | 4532 | match(Op1, m_FNeg(m_Value(X)))) |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4533 | return X; |
| 4534 | |
| 4535 | // fsub 0.0, (fsub 0.0, X) ==> X if signed zeros are ignored. |
Cameron McInally | 067e946 | 2019-05-17 16:47:00 +0000 | [diff] [blame] | 4536 | // fsub 0.0, (fneg X) ==> X if signed zeros are ignored. |
Sanjay Patel | a4f42f2 | 2018-03-15 14:29:27 +0000 | [diff] [blame] | 4537 | if (FMF.noSignedZeros() && match(Op0, m_AnyZeroFP()) && |
Cameron McInally | 067e946 | 2019-05-17 16:47:00 +0000 | [diff] [blame] | 4538 | (match(Op1, m_FSub(m_AnyZeroFP(), m_Value(X))) || |
| 4539 | match(Op1, m_FNeg(m_Value(X))))) |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4540 | return X; |
| 4541 | |
| 4542 | // fsub nnan x, x ==> 0.0 |
| 4543 | if (FMF.noNaNs() && Op0 == Op1) |
| 4544 | return Constant::getNullValue(Op0->getType()); |
| 4545 | |
Sanjay Patel | f7a8fb2 | 2018-08-07 20:14:27 +0000 | [diff] [blame] | 4546 | // Y - (Y - X) --> X |
Sanjay Patel | 4364d60 | 2018-08-07 20:23:49 +0000 | [diff] [blame] | 4547 | // (X + Y) - Y --> X |
Sanjay Patel | f7a8fb2 | 2018-08-07 20:14:27 +0000 | [diff] [blame] | 4548 | if (FMF.noSignedZeros() && FMF.allowReassoc() && |
Sanjay Patel | 4364d60 | 2018-08-07 20:23:49 +0000 | [diff] [blame] | 4549 | (match(Op1, m_FSub(m_Specific(Op0), m_Value(X))) || |
| 4550 | match(Op0, m_c_FAdd(m_Specific(Op1), m_Value(X))))) |
Sanjay Patel | f7a8fb2 | 2018-08-07 20:14:27 +0000 | [diff] [blame] | 4551 | return X; |
| 4552 | |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4553 | return nullptr; |
| 4554 | } |
| 4555 | |
| 4556 | /// Given the operands for an FMul, see if we can fold the result |
| 4557 | static Value *SimplifyFMulInst(Value *Op0, Value *Op1, FastMathFlags FMF, |
| 4558 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
| 4559 | if (Constant *C = foldOrCommuteConstant(Instruction::FMul, Op0, Op1, Q)) |
| 4560 | return C; |
| 4561 | |
Sanjay Patel | e235942 | 2018-03-21 19:31:53 +0000 | [diff] [blame] | 4562 | if (Constant *C = simplifyFPBinop(Op0, Op1)) |
| 4563 | return C; |
Sanjay Patel | 4222716 | 2018-03-10 16:51:28 +0000 | [diff] [blame] | 4564 | |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4565 | // fmul X, 1.0 ==> X |
| 4566 | if (match(Op1, m_FPOne())) |
| 4567 | return Op0; |
| 4568 | |
| 4569 | // fmul nnan nsz X, 0 ==> 0 |
Sanjay Patel | a4f42f2 | 2018-03-15 14:29:27 +0000 | [diff] [blame] | 4570 | if (FMF.noNaNs() && FMF.noSignedZeros() && match(Op1, m_AnyZeroFP())) |
| 4571 | return ConstantFP::getNullValue(Op0->getType()); |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4572 | |
Sanjay Patel | 95ec4a4 | 2018-03-18 14:12:25 +0000 | [diff] [blame] | 4573 | // sqrt(X) * sqrt(X) --> X, if we can: |
| 4574 | // 1. Remove the intermediate rounding (reassociate). |
| 4575 | // 2. Ignore non-zero negative numbers because sqrt would produce NAN. |
| 4576 | // 3. Ignore -0.0 because sqrt(-0.0) == -0.0, but -0.0 * -0.0 == 0.0. |
Sanjay Patel | db53d18 | 2018-02-23 22:20:13 +0000 | [diff] [blame] | 4577 | Value *X; |
Sanjay Patel | 95ec4a4 | 2018-03-18 14:12:25 +0000 | [diff] [blame] | 4578 | if (Op0 == Op1 && match(Op0, m_Intrinsic<Intrinsic::sqrt>(m_Value(X))) && |
| 4579 | FMF.allowReassoc() && FMF.noNaNs() && FMF.noSignedZeros()) |
Sanjay Patel | db53d18 | 2018-02-23 22:20:13 +0000 | [diff] [blame] | 4580 | return X; |
| 4581 | |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4582 | return nullptr; |
| 4583 | } |
| 4584 | |
| 4585 | Value *llvm::SimplifyFAddInst(Value *Op0, Value *Op1, FastMathFlags FMF, |
| 4586 | const SimplifyQuery &Q) { |
| 4587 | return ::SimplifyFAddInst(Op0, Op1, FMF, Q, RecursionLimit); |
| 4588 | } |
| 4589 | |
| 4590 | |
| 4591 | Value *llvm::SimplifyFSubInst(Value *Op0, Value *Op1, FastMathFlags FMF, |
| 4592 | const SimplifyQuery &Q) { |
| 4593 | return ::SimplifyFSubInst(Op0, Op1, FMF, Q, RecursionLimit); |
| 4594 | } |
| 4595 | |
| 4596 | Value *llvm::SimplifyFMulInst(Value *Op0, Value *Op1, FastMathFlags FMF, |
| 4597 | const SimplifyQuery &Q) { |
| 4598 | return ::SimplifyFMulInst(Op0, Op1, FMF, Q, RecursionLimit); |
| 4599 | } |
| 4600 | |
| 4601 | static Value *SimplifyFDivInst(Value *Op0, Value *Op1, FastMathFlags FMF, |
| 4602 | const SimplifyQuery &Q, unsigned) { |
| 4603 | if (Constant *C = foldOrCommuteConstant(Instruction::FDiv, Op0, Op1, Q)) |
| 4604 | return C; |
| 4605 | |
Sanjay Patel | e235942 | 2018-03-21 19:31:53 +0000 | [diff] [blame] | 4606 | if (Constant *C = simplifyFPBinop(Op0, Op1)) |
| 4607 | return C; |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4608 | |
| 4609 | // X / 1.0 -> X |
| 4610 | if (match(Op1, m_FPOne())) |
| 4611 | return Op0; |
| 4612 | |
| 4613 | // 0 / X -> 0 |
| 4614 | // Requires that NaNs are off (X could be zero) and signed zeroes are |
| 4615 | // ignored (X could be positive or negative, so the output sign is unknown). |
Sanjay Patel | a4f42f2 | 2018-03-15 14:29:27 +0000 | [diff] [blame] | 4616 | if (FMF.noNaNs() && FMF.noSignedZeros() && match(Op0, m_AnyZeroFP())) |
| 4617 | return ConstantFP::getNullValue(Op0->getType()); |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4618 | |
| 4619 | if (FMF.noNaNs()) { |
| 4620 | // X / X -> 1.0 is legal when NaNs are ignored. |
Sanjay Patel | 83f0566 | 2018-01-30 00:18:37 +0000 | [diff] [blame] | 4621 | // We can ignore infinities because INF/INF is NaN. |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4622 | if (Op0 == Op1) |
| 4623 | return ConstantFP::get(Op0->getType(), 1.0); |
| 4624 | |
Sanjay Patel | 83f0566 | 2018-01-30 00:18:37 +0000 | [diff] [blame] | 4625 | // (X * Y) / Y --> X if we can reassociate to the above form. |
| 4626 | Value *X; |
| 4627 | if (FMF.allowReassoc() && match(Op0, m_c_FMul(m_Value(X), m_Specific(Op1)))) |
| 4628 | return X; |
| 4629 | |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4630 | // -X / X -> -1.0 and |
| 4631 | // X / -X -> -1.0 are legal when NaNs are ignored. |
| 4632 | // We can ignore signed zeros because +-0.0/+-0.0 is NaN and ignored. |
Cameron McInally | bea5967 | 2018-10-09 21:48:00 +0000 | [diff] [blame] | 4633 | if (match(Op0, m_FNegNSZ(m_Specific(Op1))) || |
| 4634 | match(Op1, m_FNegNSZ(m_Specific(Op0)))) |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4635 | return ConstantFP::get(Op0->getType(), -1.0); |
| 4636 | } |
| 4637 | |
| 4638 | return nullptr; |
| 4639 | } |
| 4640 | |
| 4641 | Value *llvm::SimplifyFDivInst(Value *Op0, Value *Op1, FastMathFlags FMF, |
| 4642 | const SimplifyQuery &Q) { |
| 4643 | return ::SimplifyFDivInst(Op0, Op1, FMF, Q, RecursionLimit); |
| 4644 | } |
| 4645 | |
| 4646 | static Value *SimplifyFRemInst(Value *Op0, Value *Op1, FastMathFlags FMF, |
| 4647 | const SimplifyQuery &Q, unsigned) { |
| 4648 | if (Constant *C = foldOrCommuteConstant(Instruction::FRem, Op0, Op1, Q)) |
| 4649 | return C; |
| 4650 | |
Sanjay Patel | e235942 | 2018-03-21 19:31:53 +0000 | [diff] [blame] | 4651 | if (Constant *C = simplifyFPBinop(Op0, Op1)) |
| 4652 | return C; |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4653 | |
Sanjay Patel | 8f063d0 | 2018-03-15 14:04:31 +0000 | [diff] [blame] | 4654 | // Unlike fdiv, the result of frem always matches the sign of the dividend. |
| 4655 | // The constant match may include undef elements in a vector, so return a full |
| 4656 | // zero constant as the result. |
| 4657 | if (FMF.noNaNs()) { |
Sanjay Patel | 93e64dd | 2018-03-25 21:16:33 +0000 | [diff] [blame] | 4658 | // +0 % X -> 0 |
| 4659 | if (match(Op0, m_PosZeroFP())) |
Sanjay Patel | 8f063d0 | 2018-03-15 14:04:31 +0000 | [diff] [blame] | 4660 | return ConstantFP::getNullValue(Op0->getType()); |
| 4661 | // -0 % X -> -0 |
Sanjay Patel | 93e64dd | 2018-03-25 21:16:33 +0000 | [diff] [blame] | 4662 | if (match(Op0, m_NegZeroFP())) |
Sanjay Patel | 8f063d0 | 2018-03-15 14:04:31 +0000 | [diff] [blame] | 4663 | return ConstantFP::getNegativeZero(Op0->getType()); |
| 4664 | } |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4665 | |
| 4666 | return nullptr; |
| 4667 | } |
| 4668 | |
| 4669 | Value *llvm::SimplifyFRemInst(Value *Op0, Value *Op1, FastMathFlags FMF, |
| 4670 | const SimplifyQuery &Q) { |
| 4671 | return ::SimplifyFRemInst(Op0, Op1, FMF, Q, RecursionLimit); |
| 4672 | } |
| 4673 | |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 4674 | //=== Helper functions for higher up the class hierarchy. |
Chris Lattner | c1f1907 | 2009-11-09 23:28:39 +0000 | [diff] [blame] | 4675 | |
Cameron McInally | c316769 | 2019-05-06 16:05:10 +0000 | [diff] [blame] | 4676 | /// Given the operand for a UnaryOperator, see if we can fold the result. |
| 4677 | /// If not, this returns null. |
| 4678 | static Value *simplifyUnOp(unsigned Opcode, Value *Op, const SimplifyQuery &Q, |
| 4679 | unsigned MaxRecurse) { |
| 4680 | switch (Opcode) { |
| 4681 | case Instruction::FNeg: |
| 4682 | return simplifyFNegInst(Op, FastMathFlags(), Q, MaxRecurse); |
| 4683 | default: |
| 4684 | llvm_unreachable("Unexpected opcode"); |
| 4685 | } |
| 4686 | } |
| 4687 | |
| 4688 | /// Given the operand for a UnaryOperator, see if we can fold the result. |
| 4689 | /// If not, this returns null. |
Jay Foad | 565c543 | 2019-07-24 12:50:10 +0000 | [diff] [blame] | 4690 | /// Try to use FastMathFlags when folding the result. |
Cameron McInally | c316769 | 2019-05-06 16:05:10 +0000 | [diff] [blame] | 4691 | static Value *simplifyFPUnOp(unsigned Opcode, Value *Op, |
| 4692 | const FastMathFlags &FMF, |
| 4693 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
| 4694 | switch (Opcode) { |
| 4695 | case Instruction::FNeg: |
| 4696 | return simplifyFNegInst(Op, FMF, Q, MaxRecurse); |
| 4697 | default: |
| 4698 | return simplifyUnOp(Opcode, Op, Q, MaxRecurse); |
| 4699 | } |
| 4700 | } |
| 4701 | |
Craig Topper | b457e43 | 2019-05-31 08:10:23 +0000 | [diff] [blame] | 4702 | Value *llvm::SimplifyUnOp(unsigned Opcode, Value *Op, const SimplifyQuery &Q) { |
| 4703 | return ::simplifyUnOp(Opcode, Op, Q, RecursionLimit); |
| 4704 | } |
| 4705 | |
Jay Foad | 565c543 | 2019-07-24 12:50:10 +0000 | [diff] [blame] | 4706 | Value *llvm::SimplifyUnOp(unsigned Opcode, Value *Op, FastMathFlags FMF, |
| 4707 | const SimplifyQuery &Q) { |
Cameron McInally | c316769 | 2019-05-06 16:05:10 +0000 | [diff] [blame] | 4708 | return ::simplifyFPUnOp(Opcode, Op, FMF, Q, RecursionLimit); |
| 4709 | } |
| 4710 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 4711 | /// Given operands for a BinaryOperator, see if we can fold the result. |
| 4712 | /// If not, this returns null. |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 4713 | static Value *SimplifyBinOp(unsigned Opcode, Value *LHS, Value *RHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4714 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 4715 | switch (Opcode) { |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 4716 | case Instruction::Add: |
Sanjay Patel | 1fd16f0 | 2017-04-01 18:40:30 +0000 | [diff] [blame] | 4717 | return SimplifyAddInst(LHS, RHS, false, false, Q, MaxRecurse); |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 4718 | case Instruction::Sub: |
Sanjay Patel | 1fd16f0 | 2017-04-01 18:40:30 +0000 | [diff] [blame] | 4719 | return SimplifySubInst(LHS, RHS, false, false, Q, MaxRecurse); |
Sanjay Patel | 1fd16f0 | 2017-04-01 18:40:30 +0000 | [diff] [blame] | 4720 | case Instruction::Mul: |
| 4721 | return SimplifyMulInst(LHS, RHS, Q, MaxRecurse); |
Sanjay Patel | 1fd16f0 | 2017-04-01 18:40:30 +0000 | [diff] [blame] | 4722 | case Instruction::SDiv: |
| 4723 | return SimplifySDivInst(LHS, RHS, Q, MaxRecurse); |
| 4724 | case Instruction::UDiv: |
| 4725 | return SimplifyUDivInst(LHS, RHS, Q, MaxRecurse); |
Sanjay Patel | 1fd16f0 | 2017-04-01 18:40:30 +0000 | [diff] [blame] | 4726 | case Instruction::SRem: |
| 4727 | return SimplifySRemInst(LHS, RHS, Q, MaxRecurse); |
| 4728 | case Instruction::URem: |
| 4729 | return SimplifyURemInst(LHS, RHS, Q, MaxRecurse); |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 4730 | case Instruction::Shl: |
Sanjay Patel | 1fd16f0 | 2017-04-01 18:40:30 +0000 | [diff] [blame] | 4731 | return SimplifyShlInst(LHS, RHS, false, false, Q, MaxRecurse); |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 4732 | case Instruction::LShr: |
Sanjay Patel | 1fd16f0 | 2017-04-01 18:40:30 +0000 | [diff] [blame] | 4733 | return SimplifyLShrInst(LHS, RHS, false, Q, MaxRecurse); |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 4734 | case Instruction::AShr: |
Sanjay Patel | 1fd16f0 | 2017-04-01 18:40:30 +0000 | [diff] [blame] | 4735 | return SimplifyAShrInst(LHS, RHS, false, Q, MaxRecurse); |
| 4736 | case Instruction::And: |
| 4737 | return SimplifyAndInst(LHS, RHS, Q, MaxRecurse); |
| 4738 | case Instruction::Or: |
| 4739 | return SimplifyOrInst(LHS, RHS, Q, MaxRecurse); |
| 4740 | case Instruction::Xor: |
| 4741 | return SimplifyXorInst(LHS, RHS, Q, MaxRecurse); |
Sanjay Patel | fa877fd | 2017-09-11 13:34:27 +0000 | [diff] [blame] | 4742 | case Instruction::FAdd: |
| 4743 | return SimplifyFAddInst(LHS, RHS, FastMathFlags(), Q, MaxRecurse); |
| 4744 | case Instruction::FSub: |
| 4745 | return SimplifyFSubInst(LHS, RHS, FastMathFlags(), Q, MaxRecurse); |
| 4746 | case Instruction::FMul: |
| 4747 | return SimplifyFMulInst(LHS, RHS, FastMathFlags(), Q, MaxRecurse); |
| 4748 | case Instruction::FDiv: |
| 4749 | return SimplifyFDivInst(LHS, RHS, FastMathFlags(), Q, MaxRecurse); |
| 4750 | case Instruction::FRem: |
| 4751 | return SimplifyFRemInst(LHS, RHS, FastMathFlags(), Q, MaxRecurse); |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 4752 | default: |
Craig Topper | 8ef20ea | 2017-04-06 18:59:08 +0000 | [diff] [blame] | 4753 | llvm_unreachable("Unexpected opcode"); |
Chris Lattner | a71e9d6 | 2009-11-10 00:55:12 +0000 | [diff] [blame] | 4754 | } |
| 4755 | } |
Chris Lattner | c1f1907 | 2009-11-09 23:28:39 +0000 | [diff] [blame] | 4756 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 4757 | /// Given operands for a BinaryOperator, see if we can fold the result. |
| 4758 | /// If not, this returns null. |
Jay Foad | 565c543 | 2019-07-24 12:50:10 +0000 | [diff] [blame] | 4759 | /// Try to use FastMathFlags when folding the result. |
| 4760 | static Value *SimplifyBinOp(unsigned Opcode, Value *LHS, Value *RHS, |
| 4761 | const FastMathFlags &FMF, const SimplifyQuery &Q, |
| 4762 | unsigned MaxRecurse) { |
Michael Zolotukhin | 4e8598e | 2015-02-06 20:02:51 +0000 | [diff] [blame] | 4763 | switch (Opcode) { |
| 4764 | case Instruction::FAdd: |
| 4765 | return SimplifyFAddInst(LHS, RHS, FMF, Q, MaxRecurse); |
| 4766 | case Instruction::FSub: |
| 4767 | return SimplifyFSubInst(LHS, RHS, FMF, Q, MaxRecurse); |
| 4768 | case Instruction::FMul: |
| 4769 | return SimplifyFMulInst(LHS, RHS, FMF, Q, MaxRecurse); |
Zia Ansari | 394cef8 | 2016-12-08 23:27:40 +0000 | [diff] [blame] | 4770 | case Instruction::FDiv: |
| 4771 | return SimplifyFDivInst(LHS, RHS, FMF, Q, MaxRecurse); |
Michael Zolotukhin | 4e8598e | 2015-02-06 20:02:51 +0000 | [diff] [blame] | 4772 | default: |
| 4773 | return SimplifyBinOp(Opcode, LHS, RHS, Q, MaxRecurse); |
| 4774 | } |
| 4775 | } |
| 4776 | |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 4777 | Value *llvm::SimplifyBinOp(unsigned Opcode, Value *LHS, Value *RHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4778 | const SimplifyQuery &Q) { |
| 4779 | return ::SimplifyBinOp(Opcode, LHS, RHS, Q, RecursionLimit); |
| 4780 | } |
| 4781 | |
Jay Foad | 565c543 | 2019-07-24 12:50:10 +0000 | [diff] [blame] | 4782 | Value *llvm::SimplifyBinOp(unsigned Opcode, Value *LHS, Value *RHS, |
| 4783 | FastMathFlags FMF, const SimplifyQuery &Q) { |
| 4784 | return ::SimplifyBinOp(Opcode, LHS, RHS, FMF, Q, RecursionLimit); |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4785 | } |
| 4786 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 4787 | /// Given operands for a CmpInst, see if we can fold the result. |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 4788 | static Value *SimplifyCmpInst(unsigned Predicate, Value *LHS, Value *RHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4789 | const SimplifyQuery &Q, unsigned MaxRecurse) { |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 4790 | if (CmpInst::isIntPredicate((CmpInst::Predicate)Predicate)) |
Duncan Sands | b8cee00 | 2012-03-13 11:42:19 +0000 | [diff] [blame] | 4791 | return SimplifyICmpInst(Predicate, LHS, RHS, Q, MaxRecurse); |
Benjamin Kramer | f4ebfa3 | 2015-07-10 14:02:02 +0000 | [diff] [blame] | 4792 | return SimplifyFCmpInst(Predicate, LHS, RHS, FastMathFlags(), Q, MaxRecurse); |
Duncan Sands | f3b1bf1 | 2010-11-10 18:23:01 +0000 | [diff] [blame] | 4793 | } |
| 4794 | |
| 4795 | Value *llvm::SimplifyCmpInst(unsigned Predicate, Value *LHS, Value *RHS, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 4796 | const SimplifyQuery &Q) { |
| 4797 | return ::SimplifyCmpInst(Predicate, LHS, RHS, Q, RecursionLimit); |
| 4798 | } |
| 4799 | |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 4800 | static bool IsIdempotent(Intrinsic::ID ID) { |
| 4801 | switch (ID) { |
| 4802 | default: return false; |
| 4803 | |
| 4804 | // Unary idempotent: f(f(x)) = f(x) |
| 4805 | case Intrinsic::fabs: |
| 4806 | case Intrinsic::floor: |
| 4807 | case Intrinsic::ceil: |
| 4808 | case Intrinsic::trunc: |
| 4809 | case Intrinsic::rint: |
| 4810 | case Intrinsic::nearbyint: |
Hal Finkel | 171817e | 2013-08-07 22:49:12 +0000 | [diff] [blame] | 4811 | case Intrinsic::round: |
Matt Arsenault | 3ced3d9 | 2017-09-07 01:21:43 +0000 | [diff] [blame] | 4812 | case Intrinsic::canonicalize: |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 4813 | return true; |
| 4814 | } |
| 4815 | } |
| 4816 | |
Peter Collingbourne | 7dd8dbf | 2016-04-22 21:18:02 +0000 | [diff] [blame] | 4817 | static Value *SimplifyRelativeLoad(Constant *Ptr, Constant *Offset, |
| 4818 | const DataLayout &DL) { |
| 4819 | GlobalValue *PtrSym; |
| 4820 | APInt PtrOffset; |
| 4821 | if (!IsConstantOffsetFromGlobal(Ptr, PtrSym, PtrOffset, DL)) |
| 4822 | return nullptr; |
| 4823 | |
| 4824 | Type *Int8PtrTy = Type::getInt8PtrTy(Ptr->getContext()); |
| 4825 | Type *Int32Ty = Type::getInt32Ty(Ptr->getContext()); |
| 4826 | Type *Int32PtrTy = Int32Ty->getPointerTo(); |
| 4827 | Type *Int64Ty = Type::getInt64Ty(Ptr->getContext()); |
| 4828 | |
| 4829 | auto *OffsetConstInt = dyn_cast<ConstantInt>(Offset); |
| 4830 | if (!OffsetConstInt || OffsetConstInt->getType()->getBitWidth() > 64) |
| 4831 | return nullptr; |
| 4832 | |
| 4833 | uint64_t OffsetInt = OffsetConstInt->getSExtValue(); |
| 4834 | if (OffsetInt % 4 != 0) |
| 4835 | return nullptr; |
| 4836 | |
| 4837 | Constant *C = ConstantExpr::getGetElementPtr( |
| 4838 | Int32Ty, ConstantExpr::getBitCast(Ptr, Int32PtrTy), |
| 4839 | ConstantInt::get(Int64Ty, OffsetInt / 4)); |
| 4840 | Constant *Loaded = ConstantFoldLoadFromConstPtr(C, Int32Ty, DL); |
| 4841 | if (!Loaded) |
| 4842 | return nullptr; |
| 4843 | |
| 4844 | auto *LoadedCE = dyn_cast<ConstantExpr>(Loaded); |
| 4845 | if (!LoadedCE) |
| 4846 | return nullptr; |
| 4847 | |
| 4848 | if (LoadedCE->getOpcode() == Instruction::Trunc) { |
| 4849 | LoadedCE = dyn_cast<ConstantExpr>(LoadedCE->getOperand(0)); |
| 4850 | if (!LoadedCE) |
| 4851 | return nullptr; |
| 4852 | } |
| 4853 | |
| 4854 | if (LoadedCE->getOpcode() != Instruction::Sub) |
| 4855 | return nullptr; |
| 4856 | |
| 4857 | auto *LoadedLHS = dyn_cast<ConstantExpr>(LoadedCE->getOperand(0)); |
| 4858 | if (!LoadedLHS || LoadedLHS->getOpcode() != Instruction::PtrToInt) |
| 4859 | return nullptr; |
| 4860 | auto *LoadedLHSPtr = LoadedLHS->getOperand(0); |
| 4861 | |
| 4862 | Constant *LoadedRHS = LoadedCE->getOperand(1); |
| 4863 | GlobalValue *LoadedRHSSym; |
| 4864 | APInt LoadedRHSOffset; |
| 4865 | if (!IsConstantOffsetFromGlobal(LoadedRHS, LoadedRHSSym, LoadedRHSOffset, |
| 4866 | DL) || |
| 4867 | PtrSym != LoadedRHSSym || PtrOffset != LoadedRHSOffset) |
| 4868 | return nullptr; |
| 4869 | |
| 4870 | return ConstantExpr::getBitCast(LoadedLHSPtr, Int8PtrTy); |
| 4871 | } |
| 4872 | |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 4873 | static Value *simplifyUnaryIntrinsic(Function *F, Value *Op0, |
| 4874 | const SimplifyQuery &Q) { |
| 4875 | // Idempotent functions return the same result when called repeatedly. |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 4876 | Intrinsic::ID IID = F->getIntrinsicID(); |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 4877 | if (IsIdempotent(IID)) |
| 4878 | if (auto *II = dyn_cast<IntrinsicInst>(Op0)) |
| 4879 | if (II->getIntrinsicID() == IID) |
| 4880 | return II; |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 4881 | |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 4882 | Value *X; |
| 4883 | switch (IID) { |
| 4884 | case Intrinsic::fabs: |
| 4885 | if (SignBitMustBeZero(Op0, Q.TLI)) return Op0; |
| 4886 | break; |
| 4887 | case Intrinsic::bswap: |
| 4888 | // bswap(bswap(x)) -> x |
| 4889 | if (match(Op0, m_BSwap(m_Value(X)))) return X; |
| 4890 | break; |
| 4891 | case Intrinsic::bitreverse: |
| 4892 | // bitreverse(bitreverse(x)) -> x |
| 4893 | if (match(Op0, m_BitReverse(m_Value(X)))) return X; |
| 4894 | break; |
| 4895 | case Intrinsic::exp: |
| 4896 | // exp(log(x)) -> x |
| 4897 | if (Q.CxtI->hasAllowReassoc() && |
| 4898 | match(Op0, m_Intrinsic<Intrinsic::log>(m_Value(X)))) return X; |
| 4899 | break; |
| 4900 | case Intrinsic::exp2: |
| 4901 | // exp2(log2(x)) -> x |
| 4902 | if (Q.CxtI->hasAllowReassoc() && |
| 4903 | match(Op0, m_Intrinsic<Intrinsic::log2>(m_Value(X)))) return X; |
| 4904 | break; |
| 4905 | case Intrinsic::log: |
| 4906 | // log(exp(x)) -> x |
| 4907 | if (Q.CxtI->hasAllowReassoc() && |
| 4908 | match(Op0, m_Intrinsic<Intrinsic::exp>(m_Value(X)))) return X; |
| 4909 | break; |
| 4910 | case Intrinsic::log2: |
| 4911 | // log2(exp2(x)) -> x |
| 4912 | if (Q.CxtI->hasAllowReassoc() && |
Dmitry Venikov | aaa709f | 2019-02-03 03:48:30 +0000 | [diff] [blame] | 4913 | (match(Op0, m_Intrinsic<Intrinsic::exp2>(m_Value(X))) || |
| 4914 | match(Op0, m_Intrinsic<Intrinsic::pow>(m_SpecificFP(2.0), |
| 4915 | m_Value(X))))) return X; |
| 4916 | break; |
| 4917 | case Intrinsic::log10: |
| 4918 | // log10(pow(10.0, x)) -> x |
| 4919 | if (Q.CxtI->hasAllowReassoc() && |
| 4920 | match(Op0, m_Intrinsic<Intrinsic::pow>(m_SpecificFP(10.0), |
| 4921 | m_Value(X)))) return X; |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 4922 | break; |
Matt Arsenault | 03e74928 | 2019-04-03 00:25:06 +0000 | [diff] [blame] | 4923 | case Intrinsic::floor: |
| 4924 | case Intrinsic::trunc: |
| 4925 | case Intrinsic::ceil: |
| 4926 | case Intrinsic::round: |
| 4927 | case Intrinsic::nearbyint: |
| 4928 | case Intrinsic::rint: { |
| 4929 | // floor (sitofp x) -> sitofp x |
| 4930 | // floor (uitofp x) -> uitofp x |
| 4931 | // |
| 4932 | // Converting from int always results in a finite integral number or |
| 4933 | // infinity. For either of those inputs, these rounding functions always |
| 4934 | // return the same value, so the rounding can be eliminated. |
| 4935 | if (match(Op0, m_SIToFP(m_Value())) || match(Op0, m_UIToFP(m_Value()))) |
| 4936 | return Op0; |
| 4937 | break; |
| 4938 | } |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 4939 | default: |
| 4940 | break; |
Matt Arsenault | 1e0edbf | 2017-01-11 00:33:24 +0000 | [diff] [blame] | 4941 | } |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 4942 | |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 4943 | return nullptr; |
| 4944 | } |
Matt Arsenault | 8260666 | 2017-01-11 00:57:54 +0000 | [diff] [blame] | 4945 | |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 4946 | static Value *simplifyBinaryIntrinsic(Function *F, Value *Op0, Value *Op1, |
| 4947 | const SimplifyQuery &Q) { |
| 4948 | Intrinsic::ID IID = F->getIntrinsicID(); |
| 4949 | Type *ReturnType = F->getReturnType(); |
| 4950 | switch (IID) { |
| 4951 | case Intrinsic::usub_with_overflow: |
| 4952 | case Intrinsic::ssub_with_overflow: |
| 4953 | // X - X -> { 0, false } |
| 4954 | if (Op0 == Op1) |
| 4955 | return Constant::getNullValue(ReturnType); |
Roman Lebedev | 5a663bd | 2019-06-16 20:39:45 +0000 | [diff] [blame] | 4956 | LLVM_FALLTHROUGH; |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 4957 | case Intrinsic::uadd_with_overflow: |
| 4958 | case Intrinsic::sadd_with_overflow: |
Roman Lebedev | 5a663bd | 2019-06-16 20:39:45 +0000 | [diff] [blame] | 4959 | // X - undef -> { undef, false } |
| 4960 | // undef - X -> { undef, false } |
| 4961 | // X + undef -> { undef, false } |
| 4962 | // undef + x -> { undef, false } |
| 4963 | if (isa<UndefValue>(Op0) || isa<UndefValue>(Op1)) { |
| 4964 | return ConstantStruct::get( |
| 4965 | cast<StructType>(ReturnType), |
| 4966 | {UndefValue::get(ReturnType->getStructElementType(0)), |
| 4967 | Constant::getNullValue(ReturnType->getStructElementType(1))}); |
| 4968 | } |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 4969 | break; |
| 4970 | case Intrinsic::umul_with_overflow: |
| 4971 | case Intrinsic::smul_with_overflow: |
| 4972 | // 0 * X -> { 0, false } |
| 4973 | // X * 0 -> { 0, false } |
| 4974 | if (match(Op0, m_Zero()) || match(Op1, m_Zero())) |
| 4975 | return Constant::getNullValue(ReturnType); |
| 4976 | // undef * X -> { 0, false } |
| 4977 | // X * undef -> { 0, false } |
| 4978 | if (match(Op0, m_Undef()) || match(Op1, m_Undef())) |
| 4979 | return Constant::getNullValue(ReturnType); |
| 4980 | break; |
Sanjay Patel | eea21da | 2018-11-20 17:20:26 +0000 | [diff] [blame] | 4981 | case Intrinsic::uadd_sat: |
| 4982 | // sat(MAX + X) -> MAX |
| 4983 | // sat(X + MAX) -> MAX |
| 4984 | if (match(Op0, m_AllOnes()) || match(Op1, m_AllOnes())) |
| 4985 | return Constant::getAllOnesValue(ReturnType); |
| 4986 | LLVM_FALLTHROUGH; |
| 4987 | case Intrinsic::sadd_sat: |
| 4988 | // sat(X + undef) -> -1 |
| 4989 | // sat(undef + X) -> -1 |
| 4990 | // For unsigned: Assume undef is MAX, thus we saturate to MAX (-1). |
| 4991 | // For signed: Assume undef is ~X, in which case X + ~X = -1. |
| 4992 | if (match(Op0, m_Undef()) || match(Op1, m_Undef())) |
| 4993 | return Constant::getAllOnesValue(ReturnType); |
| 4994 | |
| 4995 | // X + 0 -> X |
| 4996 | if (match(Op1, m_Zero())) |
| 4997 | return Op0; |
| 4998 | // 0 + X -> X |
| 4999 | if (match(Op0, m_Zero())) |
| 5000 | return Op1; |
| 5001 | break; |
| 5002 | case Intrinsic::usub_sat: |
| 5003 | // sat(0 - X) -> 0, sat(X - MAX) -> 0 |
| 5004 | if (match(Op0, m_Zero()) || match(Op1, m_AllOnes())) |
| 5005 | return Constant::getNullValue(ReturnType); |
| 5006 | LLVM_FALLTHROUGH; |
| 5007 | case Intrinsic::ssub_sat: |
| 5008 | // X - X -> 0, X - undef -> 0, undef - X -> 0 |
| 5009 | if (Op0 == Op1 || match(Op0, m_Undef()) || match(Op1, m_Undef())) |
| 5010 | return Constant::getNullValue(ReturnType); |
| 5011 | // X - 0 -> X |
| 5012 | if (match(Op1, m_Zero())) |
| 5013 | return Op0; |
| 5014 | break; |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 5015 | case Intrinsic::load_relative: |
| 5016 | if (auto *C0 = dyn_cast<Constant>(Op0)) |
| 5017 | if (auto *C1 = dyn_cast<Constant>(Op1)) |
Matt Arsenault | 8260666 | 2017-01-11 00:57:54 +0000 | [diff] [blame] | 5018 | return SimplifyRelativeLoad(C0, C1, Q.DL); |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 5019 | break; |
| 5020 | case Intrinsic::powi: |
| 5021 | if (auto *Power = dyn_cast<ConstantInt>(Op1)) { |
| 5022 | // powi(x, 0) -> 1.0 |
| 5023 | if (Power->isZero()) |
| 5024 | return ConstantFP::get(Op0->getType(), 1.0); |
| 5025 | // powi(x, 1) -> x |
| 5026 | if (Power->isOne()) |
| 5027 | return Op0; |
Matt Arsenault | 8260666 | 2017-01-11 00:57:54 +0000 | [diff] [blame] | 5028 | } |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 5029 | break; |
| 5030 | case Intrinsic::maxnum: |
Thomas Lively | c339250 | 2018-10-19 19:01:26 +0000 | [diff] [blame] | 5031 | case Intrinsic::minnum: |
| 5032 | case Intrinsic::maximum: |
| 5033 | case Intrinsic::minimum: { |
Sanjay Patel | 28c7e41 | 2018-08-01 23:05:55 +0000 | [diff] [blame] | 5034 | // If the arguments are the same, this is a no-op. |
| 5035 | if (Op0 == Op1) return Op0; |
| 5036 | |
Thomas Lively | c339250 | 2018-10-19 19:01:26 +0000 | [diff] [blame] | 5037 | // If one argument is undef, return the other argument. |
| 5038 | if (match(Op0, m_Undef())) |
| 5039 | return Op1; |
| 5040 | if (match(Op1, m_Undef())) |
| 5041 | return Op0; |
| 5042 | |
| 5043 | // If one argument is NaN, return other or NaN appropriately. |
| 5044 | bool PropagateNaN = IID == Intrinsic::minimum || IID == Intrinsic::maximum; |
| 5045 | if (match(Op0, m_NaN())) |
| 5046 | return PropagateNaN ? Op0 : Op1; |
| 5047 | if (match(Op1, m_NaN())) |
| 5048 | return PropagateNaN ? Op1 : Op0; |
Sanjay Patel | 3f6e9a7 | 2018-08-02 14:33:40 +0000 | [diff] [blame] | 5049 | |
Sanjay Patel | 948ff87 | 2018-08-07 14:36:27 +0000 | [diff] [blame] | 5050 | // Min/max of the same operation with common operand: |
| 5051 | // m(m(X, Y)), X --> m(X, Y) (4 commuted variants) |
| 5052 | if (auto *M0 = dyn_cast<IntrinsicInst>(Op0)) |
| 5053 | if (M0->getIntrinsicID() == IID && |
| 5054 | (M0->getOperand(0) == Op1 || M0->getOperand(1) == Op1)) |
| 5055 | return Op0; |
| 5056 | if (auto *M1 = dyn_cast<IntrinsicInst>(Op1)) |
| 5057 | if (M1->getIntrinsicID() == IID && |
| 5058 | (M1->getOperand(0) == Op0 || M1->getOperand(1) == Op0)) |
| 5059 | return Op1; |
| 5060 | |
Thomas Lively | c339250 | 2018-10-19 19:01:26 +0000 | [diff] [blame] | 5061 | // min(X, -Inf) --> -Inf (and commuted variant) |
| 5062 | // max(X, +Inf) --> +Inf (and commuted variant) |
| 5063 | bool UseNegInf = IID == Intrinsic::minnum || IID == Intrinsic::minimum; |
Sanjay Patel | c6944f7 | 2018-08-09 22:20:44 +0000 | [diff] [blame] | 5064 | const APFloat *C; |
| 5065 | if ((match(Op0, m_APFloat(C)) && C->isInfinity() && |
| 5066 | C->isNegative() == UseNegInf) || |
| 5067 | (match(Op1, m_APFloat(C)) && C->isInfinity() && |
| 5068 | C->isNegative() == UseNegInf)) |
| 5069 | return ConstantFP::getInfinity(ReturnType, UseNegInf); |
| 5070 | |
| 5071 | // TODO: minnum(nnan x, inf) -> x |
| 5072 | // TODO: minnum(nnan ninf x, flt_max) -> x |
| 5073 | // TODO: maxnum(nnan x, -inf) -> x |
| 5074 | // TODO: maxnum(nnan ninf x, -flt_max) -> x |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 5075 | break; |
Sanjay Patel | c6944f7 | 2018-08-09 22:20:44 +0000 | [diff] [blame] | 5076 | } |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 5077 | default: |
| 5078 | break; |
Matt Arsenault | 8260666 | 2017-01-11 00:57:54 +0000 | [diff] [blame] | 5079 | } |
| 5080 | |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 5081 | return nullptr; |
| 5082 | } |
| 5083 | |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5084 | static Value *simplifyIntrinsic(CallBase *Call, const SimplifyQuery &Q) { |
| 5085 | |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 5086 | // Intrinsics with no operands have some kind of side effect. Don't simplify. |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5087 | unsigned NumOperands = Call->getNumArgOperands(); |
| 5088 | if (!NumOperands) |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 5089 | return nullptr; |
| 5090 | |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5091 | Function *F = cast<Function>(Call->getCalledFunction()); |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 5092 | Intrinsic::ID IID = F->getIntrinsicID(); |
| 5093 | if (NumOperands == 1) |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5094 | return simplifyUnaryIntrinsic(F, Call->getArgOperand(0), Q); |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 5095 | |
| 5096 | if (NumOperands == 2) |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5097 | return simplifyBinaryIntrinsic(F, Call->getArgOperand(0), |
| 5098 | Call->getArgOperand(1), Q); |
Sanjay Patel | f52eeb1 | 2018-07-29 14:42:08 +0000 | [diff] [blame] | 5099 | |
| 5100 | // Handle intrinsics with 3 or more arguments. |
Matt Arsenault | 8260666 | 2017-01-11 00:57:54 +0000 | [diff] [blame] | 5101 | switch (IID) { |
Philip Reames | d8d9b7b | 2019-04-22 19:30:01 +0000 | [diff] [blame] | 5102 | case Intrinsic::masked_load: |
| 5103 | case Intrinsic::masked_gather: { |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5104 | Value *MaskArg = Call->getArgOperand(2); |
| 5105 | Value *PassthruArg = Call->getArgOperand(3); |
Matt Arsenault | 8260666 | 2017-01-11 00:57:54 +0000 | [diff] [blame] | 5106 | // If the mask is all zeros or undef, the "passthru" argument is the result. |
| 5107 | if (maskIsAllZeroOrUndef(MaskArg)) |
| 5108 | return PassthruArg; |
| 5109 | return nullptr; |
| 5110 | } |
Sanjay Patel | 54421ce | 2018-07-29 16:36:38 +0000 | [diff] [blame] | 5111 | case Intrinsic::fshl: |
| 5112 | case Intrinsic::fshr: { |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5113 | Value *Op0 = Call->getArgOperand(0), *Op1 = Call->getArgOperand(1), |
| 5114 | *ShAmtArg = Call->getArgOperand(2); |
Sanjay Patel | 14ab917 | 2018-11-20 17:34:59 +0000 | [diff] [blame] | 5115 | |
| 5116 | // If both operands are undef, the result is undef. |
| 5117 | if (match(Op0, m_Undef()) && match(Op1, m_Undef())) |
| 5118 | return UndefValue::get(F->getReturnType()); |
| 5119 | |
| 5120 | // If shift amount is undef, assume it is zero. |
| 5121 | if (match(ShAmtArg, m_Undef())) |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5122 | return Call->getArgOperand(IID == Intrinsic::fshl ? 0 : 1); |
Sanjay Patel | 14ab917 | 2018-11-20 17:34:59 +0000 | [diff] [blame] | 5123 | |
Sanjay Patel | 54421ce | 2018-07-29 16:36:38 +0000 | [diff] [blame] | 5124 | const APInt *ShAmtC; |
| 5125 | if (match(ShAmtArg, m_APInt(ShAmtC))) { |
| 5126 | // If there's effectively no shift, return the 1st arg or 2nd arg. |
Sanjay Patel | 54421ce | 2018-07-29 16:36:38 +0000 | [diff] [blame] | 5127 | APInt BitWidth = APInt(ShAmtC->getBitWidth(), ShAmtC->getBitWidth()); |
| 5128 | if (ShAmtC->urem(BitWidth).isNullValue()) |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5129 | return Call->getArgOperand(IID == Intrinsic::fshl ? 0 : 1); |
Sanjay Patel | 54421ce | 2018-07-29 16:36:38 +0000 | [diff] [blame] | 5130 | } |
| 5131 | return nullptr; |
| 5132 | } |
Matt Arsenault | 8260666 | 2017-01-11 00:57:54 +0000 | [diff] [blame] | 5133 | default: |
| 5134 | return nullptr; |
| 5135 | } |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 5136 | } |
| 5137 | |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5138 | Value *llvm::SimplifyCall(CallBase *Call, const SimplifyQuery &Q) { |
| 5139 | Value *Callee = Call->getCalledValue(); |
Chandler Carruth | 9dc3558 | 2012-12-28 11:30:55 +0000 | [diff] [blame] | 5140 | |
Dan Gohman | 85977e6 | 2011-11-04 18:32:42 +0000 | [diff] [blame] | 5141 | // call undef -> undef |
David Majnemer | bb53d23 | 2016-06-25 07:37:30 +0000 | [diff] [blame] | 5142 | // call null -> undef |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5143 | if (isa<UndefValue>(Callee) || isa<ConstantPointerNull>(Callee)) |
| 5144 | return UndefValue::get(Call->getType()); |
Dan Gohman | 85977e6 | 2011-11-04 18:32:42 +0000 | [diff] [blame] | 5145 | |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5146 | Function *F = dyn_cast<Function>(Callee); |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 5147 | if (!F) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5148 | return nullptr; |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 5149 | |
David Majnemer | 1503258 | 2015-05-22 03:56:46 +0000 | [diff] [blame] | 5150 | if (F->isIntrinsic()) |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5151 | if (Value *Ret = simplifyIntrinsic(Call, Q)) |
Michael Ilseman | 5485729 | 2013-02-07 19:26:05 +0000 | [diff] [blame] | 5152 | return Ret; |
| 5153 | |
Chandler Carruth | dac20a8 | 2019-02-11 07:54:10 +0000 | [diff] [blame] | 5154 | if (!canConstantFoldCallTo(Call, F)) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5155 | return nullptr; |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 5156 | |
| 5157 | SmallVector<Constant *, 4> ConstantArgs; |
Tim Northover | 030bb3d | 2019-07-11 13:11:44 +0000 | [diff] [blame] | 5158 | unsigned NumArgs = Call->getNumArgOperands(); |
| 5159 | ConstantArgs.reserve(NumArgs); |
| 5160 | for (auto &Arg : Call->args()) { |
| 5161 | Constant *C = dyn_cast<Constant>(&Arg); |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 5162 | if (!C) |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 5163 | return nullptr; |
Chandler Carruth | f618215 | 2012-12-28 14:23:29 +0000 | [diff] [blame] | 5164 | ConstantArgs.push_back(C); |
| 5165 | } |
| 5166 | |
Chandler Carruth | dac20a8 | 2019-02-11 07:54:10 +0000 | [diff] [blame] | 5167 | return ConstantFoldCall(Call, F, ConstantArgs, Q.TLI); |
Dan Gohman | 85977e6 | 2011-11-04 18:32:42 +0000 | [diff] [blame] | 5168 | } |
| 5169 | |
Sanjay Patel | 472cc78 | 2016-01-11 22:14:42 +0000 | [diff] [blame] | 5170 | /// See if we can compute a simplified version of this instruction. |
| 5171 | /// If not, this returns null. |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5172 | |
Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 5173 | Value *llvm::SimplifyInstruction(Instruction *I, const SimplifyQuery &SQ, |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5174 | OptimizationRemarkEmitter *ORE) { |
Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 5175 | const SimplifyQuery Q = SQ.CxtI ? SQ : SQ.getWithInstruction(I); |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5176 | Value *Result; |
| 5177 | |
Chris Lattner | fb7f87d | 2009-11-10 01:08:51 +0000 | [diff] [blame] | 5178 | switch (I->getOpcode()) { |
| 5179 | default: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5180 | Result = ConstantFoldInstruction(I, Q.DL, Q.TLI); |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5181 | break; |
Cameron McInally | c316769 | 2019-05-06 16:05:10 +0000 | [diff] [blame] | 5182 | case Instruction::FNeg: |
| 5183 | Result = SimplifyFNegInst(I->getOperand(0), I->getFastMathFlags(), Q); |
| 5184 | break; |
Michael Ilseman | bb6f691 | 2012-12-12 00:27:46 +0000 | [diff] [blame] | 5185 | case Instruction::FAdd: |
| 5186 | Result = SimplifyFAddInst(I->getOperand(0), I->getOperand(1), |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5187 | I->getFastMathFlags(), Q); |
Michael Ilseman | bb6f691 | 2012-12-12 00:27:46 +0000 | [diff] [blame] | 5188 | break; |
Chris Lattner | 3d9823b | 2009-11-27 17:42:22 +0000 | [diff] [blame] | 5189 | case Instruction::Add: |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 5190 | Result = |
| 5191 | SimplifyAddInst(I->getOperand(0), I->getOperand(1), |
| 5192 | Q.IIQ.hasNoSignedWrap(cast<BinaryOperator>(I)), |
| 5193 | Q.IIQ.hasNoUnsignedWrap(cast<BinaryOperator>(I)), Q); |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5194 | break; |
Michael Ilseman | bb6f691 | 2012-12-12 00:27:46 +0000 | [diff] [blame] | 5195 | case Instruction::FSub: |
| 5196 | Result = SimplifyFSubInst(I->getOperand(0), I->getOperand(1), |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5197 | I->getFastMathFlags(), Q); |
Michael Ilseman | bb6f691 | 2012-12-12 00:27:46 +0000 | [diff] [blame] | 5198 | break; |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 5199 | case Instruction::Sub: |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 5200 | Result = |
| 5201 | SimplifySubInst(I->getOperand(0), I->getOperand(1), |
| 5202 | Q.IIQ.hasNoSignedWrap(cast<BinaryOperator>(I)), |
| 5203 | Q.IIQ.hasNoUnsignedWrap(cast<BinaryOperator>(I)), Q); |
Duncan Sands | 0a2c4168 | 2010-12-15 14:07:39 +0000 | [diff] [blame] | 5204 | break; |
Michael Ilseman | be9137a | 2012-11-27 00:46:26 +0000 | [diff] [blame] | 5205 | case Instruction::FMul: |
| 5206 | Result = SimplifyFMulInst(I->getOperand(0), I->getOperand(1), |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5207 | I->getFastMathFlags(), Q); |
Michael Ilseman | be9137a | 2012-11-27 00:46:26 +0000 | [diff] [blame] | 5208 | break; |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 5209 | case Instruction::Mul: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5210 | Result = SimplifyMulInst(I->getOperand(0), I->getOperand(1), Q); |
Duncan Sands | d0eb6d3 | 2010-12-21 14:00:22 +0000 | [diff] [blame] | 5211 | break; |
Duncan Sands | 771e82a | 2011-01-28 16:51:11 +0000 | [diff] [blame] | 5212 | case Instruction::SDiv: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5213 | Result = SimplifySDivInst(I->getOperand(0), I->getOperand(1), Q); |
Duncan Sands | 771e82a | 2011-01-28 16:51:11 +0000 | [diff] [blame] | 5214 | break; |
| 5215 | case Instruction::UDiv: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5216 | Result = SimplifyUDivInst(I->getOperand(0), I->getOperand(1), Q); |
Duncan Sands | 771e82a | 2011-01-28 16:51:11 +0000 | [diff] [blame] | 5217 | break; |
Frits van Bommel | c254966 | 2011-01-29 15:26:31 +0000 | [diff] [blame] | 5218 | case Instruction::FDiv: |
Mehdi Amini | cd3ca6f | 2015-02-23 18:30:25 +0000 | [diff] [blame] | 5219 | Result = SimplifyFDivInst(I->getOperand(0), I->getOperand(1), |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5220 | I->getFastMathFlags(), Q); |
Frits van Bommel | c254966 | 2011-01-29 15:26:31 +0000 | [diff] [blame] | 5221 | break; |
Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 5222 | case Instruction::SRem: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5223 | Result = SimplifySRemInst(I->getOperand(0), I->getOperand(1), Q); |
Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 5224 | break; |
| 5225 | case Instruction::URem: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5226 | Result = SimplifyURemInst(I->getOperand(0), I->getOperand(1), Q); |
Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 5227 | break; |
| 5228 | case Instruction::FRem: |
Mehdi Amini | cd3ca6f | 2015-02-23 18:30:25 +0000 | [diff] [blame] | 5229 | Result = SimplifyFRemInst(I->getOperand(0), I->getOperand(1), |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5230 | I->getFastMathFlags(), Q); |
Duncan Sands | a3e3699 | 2011-05-02 16:27:02 +0000 | [diff] [blame] | 5231 | break; |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 5232 | case Instruction::Shl: |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 5233 | Result = |
| 5234 | SimplifyShlInst(I->getOperand(0), I->getOperand(1), |
| 5235 | Q.IIQ.hasNoSignedWrap(cast<BinaryOperator>(I)), |
| 5236 | Q.IIQ.hasNoUnsignedWrap(cast<BinaryOperator>(I)), Q); |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 5237 | break; |
| 5238 | case Instruction::LShr: |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 5239 | Result = SimplifyLShrInst(I->getOperand(0), I->getOperand(1), |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 5240 | Q.IIQ.isExact(cast<BinaryOperator>(I)), Q); |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 5241 | break; |
| 5242 | case Instruction::AShr: |
Chris Lattner | 9e4aa02 | 2011-02-09 17:15:04 +0000 | [diff] [blame] | 5243 | Result = SimplifyAShrInst(I->getOperand(0), I->getOperand(1), |
Florian Hahn | 19f9e32 | 2018-08-17 14:39:04 +0000 | [diff] [blame] | 5244 | Q.IIQ.isExact(cast<BinaryOperator>(I)), Q); |
Duncan Sands | 7f60dc1 | 2011-01-14 00:37:45 +0000 | [diff] [blame] | 5245 | break; |
Chris Lattner | fb7f87d | 2009-11-10 01:08:51 +0000 | [diff] [blame] | 5246 | case Instruction::And: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5247 | Result = SimplifyAndInst(I->getOperand(0), I->getOperand(1), Q); |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5248 | break; |
Chris Lattner | fb7f87d | 2009-11-10 01:08:51 +0000 | [diff] [blame] | 5249 | case Instruction::Or: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5250 | Result = SimplifyOrInst(I->getOperand(0), I->getOperand(1), Q); |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5251 | break; |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 5252 | case Instruction::Xor: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5253 | Result = SimplifyXorInst(I->getOperand(0), I->getOperand(1), Q); |
Duncan Sands | c89ac07 | 2010-11-17 18:52:15 +0000 | [diff] [blame] | 5254 | break; |
Chris Lattner | fb7f87d | 2009-11-10 01:08:51 +0000 | [diff] [blame] | 5255 | case Instruction::ICmp: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5256 | Result = SimplifyICmpInst(cast<ICmpInst>(I)->getPredicate(), |
| 5257 | I->getOperand(0), I->getOperand(1), Q); |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5258 | break; |
Chris Lattner | fb7f87d | 2009-11-10 01:08:51 +0000 | [diff] [blame] | 5259 | case Instruction::FCmp: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5260 | Result = |
| 5261 | SimplifyFCmpInst(cast<FCmpInst>(I)->getPredicate(), I->getOperand(0), |
| 5262 | I->getOperand(1), I->getFastMathFlags(), Q); |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5263 | break; |
Chris Lattner | c707fa9 | 2010-04-20 05:32:14 +0000 | [diff] [blame] | 5264 | case Instruction::Select: |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5265 | Result = SimplifySelectInst(I->getOperand(0), I->getOperand(1), |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5266 | I->getOperand(2), Q); |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5267 | break; |
Chris Lattner | 8574aba | 2009-11-27 00:29:05 +0000 | [diff] [blame] | 5268 | case Instruction::GetElementPtr: { |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5269 | SmallVector<Value *, 8> Ops(I->op_begin(), I->op_end()); |
Manuel Jacob | 20c6d5b | 2016-01-17 22:46:43 +0000 | [diff] [blame] | 5270 | Result = SimplifyGEPInst(cast<GetElementPtrInst>(I)->getSourceElementType(), |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5271 | Ops, Q); |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5272 | break; |
Chris Lattner | 8574aba | 2009-11-27 00:29:05 +0000 | [diff] [blame] | 5273 | } |
Duncan Sands | fd26a95 | 2011-09-05 06:52:48 +0000 | [diff] [blame] | 5274 | case Instruction::InsertValue: { |
| 5275 | InsertValueInst *IV = cast<InsertValueInst>(I); |
| 5276 | Result = SimplifyInsertValueInst(IV->getAggregateOperand(), |
| 5277 | IV->getInsertedValueOperand(), |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5278 | IV->getIndices(), Q); |
Duncan Sands | fd26a95 | 2011-09-05 06:52:48 +0000 | [diff] [blame] | 5279 | break; |
| 5280 | } |
Igor Laevsky | e0edb66 | 2017-12-13 11:21:18 +0000 | [diff] [blame] | 5281 | case Instruction::InsertElement: { |
| 5282 | auto *IE = cast<InsertElementInst>(I); |
| 5283 | Result = SimplifyInsertElementInst(IE->getOperand(0), IE->getOperand(1), |
| 5284 | IE->getOperand(2), Q); |
| 5285 | break; |
| 5286 | } |
David Majnemer | 25a796e | 2015-07-13 01:15:46 +0000 | [diff] [blame] | 5287 | case Instruction::ExtractValue: { |
| 5288 | auto *EVI = cast<ExtractValueInst>(I); |
| 5289 | Result = SimplifyExtractValueInst(EVI->getAggregateOperand(), |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5290 | EVI->getIndices(), Q); |
David Majnemer | 25a796e | 2015-07-13 01:15:46 +0000 | [diff] [blame] | 5291 | break; |
| 5292 | } |
David Majnemer | 599ca44 | 2015-07-13 01:15:53 +0000 | [diff] [blame] | 5293 | case Instruction::ExtractElement: { |
| 5294 | auto *EEI = cast<ExtractElementInst>(I); |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5295 | Result = SimplifyExtractElementInst(EEI->getVectorOperand(), |
| 5296 | EEI->getIndexOperand(), Q); |
David Majnemer | 599ca44 | 2015-07-13 01:15:53 +0000 | [diff] [blame] | 5297 | break; |
| 5298 | } |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 5299 | case Instruction::ShuffleVector: { |
| 5300 | auto *SVI = cast<ShuffleVectorInst>(I); |
| 5301 | Result = SimplifyShuffleVectorInst(SVI->getOperand(0), SVI->getOperand(1), |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5302 | SVI->getMask(), SVI->getType(), Q); |
Zvi Rackover | 8f46065 | 2017-04-03 22:05:30 +0000 | [diff] [blame] | 5303 | break; |
| 5304 | } |
Duncan Sands | 4581ddc | 2010-11-14 13:30:18 +0000 | [diff] [blame] | 5305 | case Instruction::PHI: |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5306 | Result = SimplifyPHINode(cast<PHINode>(I), Q); |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5307 | break; |
Chandler Carruth | 9dc3558 | 2012-12-28 11:30:55 +0000 | [diff] [blame] | 5308 | case Instruction::Call: { |
Chandler Carruth | dac20a8 | 2019-02-11 07:54:10 +0000 | [diff] [blame] | 5309 | Result = SimplifyCall(cast<CallInst>(I), Q); |
Dan Gohman | 85977e6 | 2011-11-04 18:32:42 +0000 | [diff] [blame] | 5310 | break; |
Chandler Carruth | 9dc3558 | 2012-12-28 11:30:55 +0000 | [diff] [blame] | 5311 | } |
David Majnemer | 6774d61 | 2016-07-26 17:58:05 +0000 | [diff] [blame] | 5312 | #define HANDLE_CAST_INST(num, opc, clas) case Instruction::opc: |
| 5313 | #include "llvm/IR/Instruction.def" |
| 5314 | #undef HANDLE_CAST_INST |
Daniel Berlin | 5e3fcb1 | 2017-04-26 04:09:56 +0000 | [diff] [blame] | 5315 | Result = |
| 5316 | SimplifyCastInst(I->getOpcode(), I->getOperand(0), I->getType(), Q); |
David Majnemer | a90a621 | 2016-07-26 05:52:29 +0000 | [diff] [blame] | 5317 | break; |
Craig Topper | 81c03a7 | 2017-04-12 22:54:24 +0000 | [diff] [blame] | 5318 | case Instruction::Alloca: |
| 5319 | // No simplifications for Alloca and it can't be constant folded. |
| 5320 | Result = nullptr; |
| 5321 | break; |
Chris Lattner | fb7f87d | 2009-11-10 01:08:51 +0000 | [diff] [blame] | 5322 | } |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5323 | |
Hal Finkel | f2199b2 | 2015-10-23 20:37:08 +0000 | [diff] [blame] | 5324 | // In general, it is possible for computeKnownBits to determine all bits in a |
| 5325 | // value even when the operands are not all constants. |
Sanjay Patel | 8ca30ab | 2016-11-27 21:07:28 +0000 | [diff] [blame] | 5326 | if (!Result && I->getType()->isIntOrIntVectorTy()) { |
Craig Topper | 8205a1a | 2017-05-24 16:53:07 +0000 | [diff] [blame] | 5327 | KnownBits Known = computeKnownBits(I, Q.DL, /*Depth*/ 0, Q.AC, I, Q.DT, ORE); |
Craig Topper | 8189a87 | 2017-05-03 23:12:29 +0000 | [diff] [blame] | 5328 | if (Known.isConstant()) |
| 5329 | Result = ConstantInt::get(I->getType(), Known.getConstant()); |
Hal Finkel | f2199b2 | 2015-10-23 20:37:08 +0000 | [diff] [blame] | 5330 | } |
| 5331 | |
Duncan Sands | 64e41cf | 2010-11-17 08:35:29 +0000 | [diff] [blame] | 5332 | /// If called on unreachable code, the above logic may report that the |
| 5333 | /// instruction simplified to itself. Make life easier for users by |
Duncan Sands | 019a418 | 2010-12-15 11:02:22 +0000 | [diff] [blame] | 5334 | /// detecting that case here, returning a safe value instead. |
| 5335 | return Result == I ? UndefValue::get(I->getType()) : Result; |
Chris Lattner | fb7f87d | 2009-11-10 01:08:51 +0000 | [diff] [blame] | 5336 | } |
| 5337 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 5338 | /// Implementation of recursive simplification through an instruction's |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5339 | /// uses. |
Chris Lattner | 852d6d6 | 2009-11-10 22:26:15 +0000 | [diff] [blame] | 5340 | /// |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5341 | /// This is the common implementation of the recursive simplification routines. |
| 5342 | /// If we have a pre-simplified value in 'SimpleV', that is forcibly used to |
| 5343 | /// replace the instruction 'I'. Otherwise, we simply add 'I' to the list of |
| 5344 | /// instructions to process and attempt to simplify it using |
Joerg Sonnenberger | 799c966 | 2019-08-29 13:22:30 +0000 | [diff] [blame] | 5345 | /// InstructionSimplify. Recursively visited users which could not be |
| 5346 | /// simplified themselves are to the optional UnsimplifiedUsers set for |
| 5347 | /// further processing by the caller. |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5348 | /// |
| 5349 | /// This routine returns 'true' only when *it* simplifies something. The passed |
| 5350 | /// in simplified value does not count toward this. |
Joerg Sonnenberger | 799c966 | 2019-08-29 13:22:30 +0000 | [diff] [blame] | 5351 | static bool replaceAndRecursivelySimplifyImpl( |
| 5352 | Instruction *I, Value *SimpleV, const TargetLibraryInfo *TLI, |
| 5353 | const DominatorTree *DT, AssumptionCache *AC, |
| 5354 | SmallSetVector<Instruction *, 8> *UnsimplifiedUsers = nullptr) { |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5355 | bool Simplified = false; |
Chandler Carruth | 77e8bfb | 2012-03-24 22:34:26 +0000 | [diff] [blame] | 5356 | SmallSetVector<Instruction *, 8> Worklist; |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 5357 | const DataLayout &DL = I->getModule()->getDataLayout(); |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 5358 | |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5359 | // If we have an explicit value to collapse to, do that round of the |
| 5360 | // simplification loop by hand initially. |
| 5361 | if (SimpleV) { |
Chandler Carruth | cdf4788 | 2014-03-09 03:16:01 +0000 | [diff] [blame] | 5362 | for (User *U : I->users()) |
| 5363 | if (U != I) |
| 5364 | Worklist.insert(cast<Instruction>(U)); |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 5365 | |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5366 | // Replace the instruction with its simplified value. |
| 5367 | I->replaceAllUsesWith(SimpleV); |
Chris Lattner | 19eff2a | 2010-07-15 06:36:08 +0000 | [diff] [blame] | 5368 | |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5369 | // Gracefully handle edge cases where the instruction is not wired into any |
| 5370 | // parent block. |
Chandler Carruth | 9ae926b | 2018-08-26 09:51:22 +0000 | [diff] [blame] | 5371 | if (I->getParent() && !I->isEHPad() && !I->isTerminator() && |
David Majnemer | 909793f | 2016-08-04 04:24:02 +0000 | [diff] [blame] | 5372 | !I->mayHaveSideEffects()) |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5373 | I->eraseFromParent(); |
| 5374 | } else { |
Chandler Carruth | 77e8bfb | 2012-03-24 22:34:26 +0000 | [diff] [blame] | 5375 | Worklist.insert(I); |
Chris Lattner | 852d6d6 | 2009-11-10 22:26:15 +0000 | [diff] [blame] | 5376 | } |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 5377 | |
Chandler Carruth | 77e8bfb | 2012-03-24 22:34:26 +0000 | [diff] [blame] | 5378 | // Note that we must test the size on each iteration, the worklist can grow. |
| 5379 | for (unsigned Idx = 0; Idx != Worklist.size(); ++Idx) { |
| 5380 | I = Worklist[Idx]; |
Duncan Sands | 7e800d6 | 2010-11-14 11:23:23 +0000 | [diff] [blame] | 5381 | |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5382 | // See if this instruction simplifies. |
Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 5383 | SimpleV = SimplifyInstruction(I, {DL, TLI, DT, AC}); |
Joerg Sonnenberger | 799c966 | 2019-08-29 13:22:30 +0000 | [diff] [blame] | 5384 | if (!SimpleV) { |
| 5385 | if (UnsimplifiedUsers) |
| 5386 | UnsimplifiedUsers->insert(I); |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5387 | continue; |
Joerg Sonnenberger | 799c966 | 2019-08-29 13:22:30 +0000 | [diff] [blame] | 5388 | } |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5389 | |
| 5390 | Simplified = true; |
| 5391 | |
| 5392 | // Stash away all the uses of the old instruction so we can check them for |
| 5393 | // recursive simplifications after a RAUW. This is cheaper than checking all |
| 5394 | // uses of To on the recursive step in most cases. |
Chandler Carruth | cdf4788 | 2014-03-09 03:16:01 +0000 | [diff] [blame] | 5395 | for (User *U : I->users()) |
| 5396 | Worklist.insert(cast<Instruction>(U)); |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5397 | |
| 5398 | // Replace the instruction with its simplified value. |
| 5399 | I->replaceAllUsesWith(SimpleV); |
| 5400 | |
| 5401 | // Gracefully handle edge cases where the instruction is not wired into any |
| 5402 | // parent block. |
Chandler Carruth | 9ae926b | 2018-08-26 09:51:22 +0000 | [diff] [blame] | 5403 | if (I->getParent() && !I->isEHPad() && !I->isTerminator() && |
David Majnemer | 909793f | 2016-08-04 04:24:02 +0000 | [diff] [blame] | 5404 | !I->mayHaveSideEffects()) |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5405 | I->eraseFromParent(); |
| 5406 | } |
| 5407 | return Simplified; |
| 5408 | } |
| 5409 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 5410 | bool llvm::recursivelySimplifyInstruction(Instruction *I, |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5411 | const TargetLibraryInfo *TLI, |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 5412 | const DominatorTree *DT, |
| 5413 | AssumptionCache *AC) { |
Joerg Sonnenberger | 799c966 | 2019-08-29 13:22:30 +0000 | [diff] [blame] | 5414 | return replaceAndRecursivelySimplifyImpl(I, nullptr, TLI, DT, AC, nullptr); |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5415 | } |
| 5416 | |
Joerg Sonnenberger | 799c966 | 2019-08-29 13:22:30 +0000 | [diff] [blame] | 5417 | bool llvm::replaceAndRecursivelySimplify( |
| 5418 | Instruction *I, Value *SimpleV, const TargetLibraryInfo *TLI, |
| 5419 | const DominatorTree *DT, AssumptionCache *AC, |
| 5420 | SmallSetVector<Instruction *, 8> *UnsimplifiedUsers) { |
Chandler Carruth | cf1b585 | 2012-03-24 21:11:24 +0000 | [diff] [blame] | 5421 | assert(I != SimpleV && "replaceAndRecursivelySimplify(X,X) is not valid!"); |
| 5422 | assert(SimpleV && "Must provide a simplified value."); |
Joerg Sonnenberger | 799c966 | 2019-08-29 13:22:30 +0000 | [diff] [blame] | 5423 | return replaceAndRecursivelySimplifyImpl(I, SimpleV, TLI, DT, AC, |
| 5424 | UnsimplifiedUsers); |
Chris Lattner | 852d6d6 | 2009-11-10 22:26:15 +0000 | [diff] [blame] | 5425 | } |
Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 5426 | |
| 5427 | namespace llvm { |
| 5428 | const SimplifyQuery getBestSimplifyQuery(Pass &P, Function &F) { |
| 5429 | auto *DTWP = P.getAnalysisIfAvailable<DominatorTreeWrapperPass>(); |
| 5430 | auto *DT = DTWP ? &DTWP->getDomTree() : nullptr; |
| 5431 | auto *TLIWP = P.getAnalysisIfAvailable<TargetLibraryInfoWrapperPass>(); |
Teresa Johnson | 9c27b59 | 2019-09-07 03:09:36 +0000 | [diff] [blame] | 5432 | auto *TLI = TLIWP ? &TLIWP->getTLI(F) : nullptr; |
Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 5433 | auto *ACWP = P.getAnalysisIfAvailable<AssumptionCacheTracker>(); |
| 5434 | auto *AC = ACWP ? &ACWP->getAssumptionCache(F) : nullptr; |
| 5435 | return {F.getParent()->getDataLayout(), TLI, DT, AC}; |
| 5436 | } |
| 5437 | |
| 5438 | const SimplifyQuery getBestSimplifyQuery(LoopStandardAnalysisResults &AR, |
| 5439 | const DataLayout &DL) { |
| 5440 | return {DL, &AR.TLI, &AR.DT, &AR.AC}; |
| 5441 | } |
| 5442 | |
| 5443 | template <class T, class... TArgs> |
| 5444 | const SimplifyQuery getBestSimplifyQuery(AnalysisManager<T, TArgs...> &AM, |
| 5445 | Function &F) { |
| 5446 | auto *DT = AM.template getCachedResult<DominatorTreeAnalysis>(F); |
| 5447 | auto *TLI = AM.template getCachedResult<TargetLibraryAnalysis>(F); |
| 5448 | auto *AC = AM.template getCachedResult<AssumptionAnalysis>(F); |
| 5449 | return {F.getParent()->getDataLayout(), TLI, DT, AC}; |
| 5450 | } |
| 5451 | template const SimplifyQuery getBestSimplifyQuery(AnalysisManager<Function> &, |
| 5452 | Function &); |
| 5453 | } |