Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 1 | //===- InlineCost.cpp - Cost analysis for inliner -------------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file implements inline cost analysis. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #include "llvm/Analysis/InlineCost.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 15 | #include "llvm/ADT/STLExtras.h" |
| 16 | #include "llvm/ADT/SetVector.h" |
| 17 | #include "llvm/ADT/SmallPtrSet.h" |
| 18 | #include "llvm/ADT/SmallVector.h" |
| 19 | #include "llvm/ADT/Statistic.h" |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 20 | #include "llvm/Analysis/AssumptionCache.h" |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 21 | #include "llvm/Analysis/CodeMetrics.h" |
Chandler Carruth | d990388 | 2015-01-14 11:23:27 +0000 | [diff] [blame] | 22 | #include "llvm/Analysis/ConstantFolding.h" |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 23 | #include "llvm/Analysis/InstructionSimplify.h" |
Chandler Carruth | 42f3dce | 2013-01-21 11:55:09 +0000 | [diff] [blame] | 24 | #include "llvm/Analysis/TargetTransformInfo.h" |
Chandler Carruth | 219b89b | 2014-03-04 11:01:28 +0000 | [diff] [blame] | 25 | #include "llvm/IR/CallSite.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 26 | #include "llvm/IR/CallingConv.h" |
| 27 | #include "llvm/IR/DataLayout.h" |
Chandler Carruth | 03eb0de | 2014-03-04 10:40:04 +0000 | [diff] [blame] | 28 | #include "llvm/IR/GetElementPtrTypeIterator.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 29 | #include "llvm/IR/GlobalAlias.h" |
Chandler Carruth | 7da14f1 | 2014-03-06 03:23:41 +0000 | [diff] [blame] | 30 | #include "llvm/IR/InstVisitor.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 31 | #include "llvm/IR/IntrinsicInst.h" |
| 32 | #include "llvm/IR/Operator.h" |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 33 | #include "llvm/Support/Debug.h" |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 34 | #include "llvm/Support/raw_ostream.h" |
Eric Christopher | 2dfbd7e | 2011-02-05 00:49:15 +0000 | [diff] [blame] | 35 | |
Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 36 | using namespace llvm; |
| 37 | |
Chandler Carruth | f1221bd | 2014-04-22 02:48:03 +0000 | [diff] [blame] | 38 | #define DEBUG_TYPE "inline-cost" |
| 39 | |
Chandler Carruth | 7ae90d4 | 2012-04-11 10:15:10 +0000 | [diff] [blame] | 40 | STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed"); |
| 41 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 42 | namespace { |
Chandler Carruth | a308955 | 2012-03-14 07:32:53 +0000 | [diff] [blame] | 43 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 44 | class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> { |
| 45 | typedef InstVisitor<CallAnalyzer, bool> Base; |
| 46 | friend class InstVisitor<CallAnalyzer, bool>; |
Owen Anderson | a08318a | 2010-09-09 16:56:42 +0000 | [diff] [blame] | 47 | |
Chandler Carruth | 42f3dce | 2013-01-21 11:55:09 +0000 | [diff] [blame] | 48 | /// The TargetTransformInfo available for this compilation. |
| 49 | const TargetTransformInfo &TTI; |
| 50 | |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 51 | /// The cache of @llvm.assume intrinsics. |
Bjorn Steinbrink | 6f972a1 | 2015-02-12 21:04:22 +0000 | [diff] [blame] | 52 | AssumptionCacheTracker *ACT; |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 53 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 54 | // The called function. |
| 55 | Function &F; |
Owen Anderson | a08318a | 2010-09-09 16:56:42 +0000 | [diff] [blame] | 56 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 57 | int Threshold; |
| 58 | int Cost; |
Owen Anderson | a08318a | 2010-09-09 16:56:42 +0000 | [diff] [blame] | 59 | |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 60 | bool IsCallerRecursive; |
| 61 | bool IsRecursiveCall; |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 62 | bool ExposesReturnsTwice; |
| 63 | bool HasDynamicAlloca; |
James Molloy | 4f6fb95 | 2012-12-20 16:04:27 +0000 | [diff] [blame] | 64 | bool ContainsNoDuplicateCall; |
Chandler Carruth | 0814d2a | 2013-12-13 07:59:56 +0000 | [diff] [blame] | 65 | bool HasReturn; |
| 66 | bool HasIndirectBr; |
Reid Kleckner | 223de26 | 2015-04-14 20:38:14 +0000 | [diff] [blame] | 67 | bool HasFrameEscape; |
James Molloy | 4f6fb95 | 2012-12-20 16:04:27 +0000 | [diff] [blame] | 68 | |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 69 | /// Number of bytes allocated statically by the callee. |
| 70 | uint64_t AllocatedSize; |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 71 | unsigned NumInstructions, NumVectorInstructions; |
| 72 | int FiftyPercentVectorBonus, TenPercentVectorBonus; |
| 73 | int VectorBonus; |
| 74 | |
| 75 | // While we walk the potentially-inlined instructions, we build up and |
| 76 | // maintain a mapping of simplified values specific to this callsite. The |
| 77 | // idea is to propagate any special information we have about arguments to |
| 78 | // this call through the inlinable section of the function, and account for |
| 79 | // likely simplifications post-inlining. The most important aspect we track |
| 80 | // is CFG altering simplifications -- when we prove a basic block dead, that |
| 81 | // can cause dramatic shifts in the cost of inlining a function. |
| 82 | DenseMap<Value *, Constant *> SimplifiedValues; |
| 83 | |
| 84 | // Keep track of the values which map back (through function arguments) to |
| 85 | // allocas on the caller stack which could be simplified through SROA. |
| 86 | DenseMap<Value *, Value *> SROAArgValues; |
| 87 | |
| 88 | // The mapping of caller Alloca values to their accumulated cost savings. If |
| 89 | // we have to disable SROA for one of the allocas, this tells us how much |
| 90 | // cost must be added. |
| 91 | DenseMap<Value *, int> SROAArgCosts; |
| 92 | |
| 93 | // Keep track of values which map to a pointer base and constant offset. |
| 94 | DenseMap<Value *, std::pair<Value *, APInt> > ConstantOffsetPtrs; |
| 95 | |
| 96 | // Custom simplification helper routines. |
| 97 | bool isAllocaDerivedArg(Value *V); |
| 98 | bool lookupSROAArgAndCost(Value *V, Value *&Arg, |
| 99 | DenseMap<Value *, int>::iterator &CostIt); |
| 100 | void disableSROA(DenseMap<Value *, int>::iterator CostIt); |
| 101 | void disableSROA(Value *V); |
| 102 | void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt, |
| 103 | int InstructionCost); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 104 | bool isGEPOffsetConstant(GetElementPtrInst &GEP); |
| 105 | bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset); |
Chandler Carruth | 753e21d | 2012-12-28 14:23:32 +0000 | [diff] [blame] | 106 | bool simplifyCallSite(Function *F, CallSite CS); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 107 | ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V); |
| 108 | |
| 109 | // Custom analysis routines. |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 110 | bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 111 | |
| 112 | // Disable several entry points to the visitor so we don't accidentally use |
| 113 | // them by declaring but not defining them here. |
| 114 | void visit(Module *); void visit(Module &); |
| 115 | void visit(Function *); void visit(Function &); |
| 116 | void visit(BasicBlock *); void visit(BasicBlock &); |
| 117 | |
| 118 | // Provide base case for our instruction visit. |
| 119 | bool visitInstruction(Instruction &I); |
| 120 | |
| 121 | // Our visit overrides. |
| 122 | bool visitAlloca(AllocaInst &I); |
| 123 | bool visitPHI(PHINode &I); |
| 124 | bool visitGetElementPtr(GetElementPtrInst &I); |
| 125 | bool visitBitCast(BitCastInst &I); |
| 126 | bool visitPtrToInt(PtrToIntInst &I); |
| 127 | bool visitIntToPtr(IntToPtrInst &I); |
| 128 | bool visitCastInst(CastInst &I); |
| 129 | bool visitUnaryInstruction(UnaryInstruction &I); |
Matt Arsenault | 727aa34 | 2013-07-20 04:09:00 +0000 | [diff] [blame] | 130 | bool visitCmpInst(CmpInst &I); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 131 | bool visitSub(BinaryOperator &I); |
| 132 | bool visitBinaryOperator(BinaryOperator &I); |
| 133 | bool visitLoad(LoadInst &I); |
| 134 | bool visitStore(StoreInst &I); |
Chandler Carruth | 753e21d | 2012-12-28 14:23:32 +0000 | [diff] [blame] | 135 | bool visitExtractValue(ExtractValueInst &I); |
| 136 | bool visitInsertValue(InsertValueInst &I); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 137 | bool visitCallSite(CallSite CS); |
Chandler Carruth | 0814d2a | 2013-12-13 07:59:56 +0000 | [diff] [blame] | 138 | bool visitReturnInst(ReturnInst &RI); |
| 139 | bool visitBranchInst(BranchInst &BI); |
| 140 | bool visitSwitchInst(SwitchInst &SI); |
| 141 | bool visitIndirectBrInst(IndirectBrInst &IBI); |
| 142 | bool visitResumeInst(ResumeInst &RI); |
| 143 | bool visitUnreachableInst(UnreachableInst &I); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 144 | |
| 145 | public: |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 146 | CallAnalyzer(const TargetTransformInfo &TTI, AssumptionCacheTracker *ACT, |
| 147 | Function &Callee, int Threshold) |
| 148 | : TTI(TTI), ACT(ACT), F(Callee), Threshold(Threshold), Cost(0), |
Chandler Carruth | 42f3dce | 2013-01-21 11:55:09 +0000 | [diff] [blame] | 149 | IsCallerRecursive(false), IsRecursiveCall(false), |
| 150 | ExposesReturnsTwice(false), HasDynamicAlloca(false), |
Chandler Carruth | 0814d2a | 2013-12-13 07:59:56 +0000 | [diff] [blame] | 151 | ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false), |
Reid Kleckner | 223de26 | 2015-04-14 20:38:14 +0000 | [diff] [blame] | 152 | HasFrameEscape(false), AllocatedSize(0), NumInstructions(0), |
| 153 | NumVectorInstructions(0), FiftyPercentVectorBonus(0), |
| 154 | TenPercentVectorBonus(0), VectorBonus(0), NumConstantArgs(0), |
| 155 | NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), NumConstantPtrCmps(0), |
| 156 | NumConstantPtrDiffs(0), NumInstructionsSimplified(0), |
| 157 | SROACostSavings(0), SROACostSavingsLost(0) {} |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 158 | |
| 159 | bool analyzeCall(CallSite CS); |
| 160 | |
| 161 | int getThreshold() { return Threshold; } |
| 162 | int getCost() { return Cost; } |
| 163 | |
| 164 | // Keep a bunch of stats about the cost savings found so we can print them |
| 165 | // out when debugging. |
| 166 | unsigned NumConstantArgs; |
| 167 | unsigned NumConstantOffsetPtrArgs; |
| 168 | unsigned NumAllocaArgs; |
| 169 | unsigned NumConstantPtrCmps; |
| 170 | unsigned NumConstantPtrDiffs; |
| 171 | unsigned NumInstructionsSimplified; |
| 172 | unsigned SROACostSavings; |
| 173 | unsigned SROACostSavingsLost; |
| 174 | |
| 175 | void dump(); |
| 176 | }; |
| 177 | |
| 178 | } // namespace |
| 179 | |
| 180 | /// \brief Test whether the given value is an Alloca-derived function argument. |
| 181 | bool CallAnalyzer::isAllocaDerivedArg(Value *V) { |
| 182 | return SROAArgValues.count(V); |
Owen Anderson | a08318a | 2010-09-09 16:56:42 +0000 | [diff] [blame] | 183 | } |
| 184 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 185 | /// \brief Lookup the SROA-candidate argument and cost iterator which V maps to. |
| 186 | /// Returns false if V does not map to a SROA-candidate. |
| 187 | bool CallAnalyzer::lookupSROAArgAndCost( |
| 188 | Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) { |
| 189 | if (SROAArgValues.empty() || SROAArgCosts.empty()) |
| 190 | return false; |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 191 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 192 | DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V); |
| 193 | if (ArgIt == SROAArgValues.end()) |
| 194 | return false; |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 195 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 196 | Arg = ArgIt->second; |
| 197 | CostIt = SROAArgCosts.find(Arg); |
| 198 | return CostIt != SROAArgCosts.end(); |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 199 | } |
| 200 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 201 | /// \brief Disable SROA for the candidate marked by this cost iterator. |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 202 | /// |
Benjamin Kramer | bde9176 | 2012-06-02 10:20:22 +0000 | [diff] [blame] | 203 | /// This marks the candidate as no longer viable for SROA, and adds the cost |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 204 | /// savings associated with it back into the inline cost measurement. |
| 205 | void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) { |
| 206 | // If we're no longer able to perform SROA we need to undo its cost savings |
| 207 | // and prevent subsequent analysis. |
| 208 | Cost += CostIt->second; |
| 209 | SROACostSavings -= CostIt->second; |
| 210 | SROACostSavingsLost += CostIt->second; |
| 211 | SROAArgCosts.erase(CostIt); |
| 212 | } |
| 213 | |
| 214 | /// \brief If 'V' maps to a SROA candidate, disable SROA for it. |
| 215 | void CallAnalyzer::disableSROA(Value *V) { |
| 216 | Value *SROAArg; |
| 217 | DenseMap<Value *, int>::iterator CostIt; |
| 218 | if (lookupSROAArgAndCost(V, SROAArg, CostIt)) |
| 219 | disableSROA(CostIt); |
| 220 | } |
| 221 | |
| 222 | /// \brief Accumulate the given cost for a particular SROA candidate. |
| 223 | void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt, |
| 224 | int InstructionCost) { |
| 225 | CostIt->second += InstructionCost; |
| 226 | SROACostSavings += InstructionCost; |
| 227 | } |
| 228 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 229 | /// \brief Check whether a GEP's indices are all constant. |
| 230 | /// |
| 231 | /// Respects any simplified values known during the analysis of this callsite. |
| 232 | bool CallAnalyzer::isGEPOffsetConstant(GetElementPtrInst &GEP) { |
| 233 | for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I) |
| 234 | if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I)) |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 235 | return false; |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 236 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 237 | return true; |
| 238 | } |
| 239 | |
| 240 | /// \brief Accumulate a constant GEP offset into an APInt if possible. |
| 241 | /// |
| 242 | /// Returns false if unable to compute the offset for any reason. Respects any |
| 243 | /// simplified values known during the analysis of this callsite. |
| 244 | bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) { |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 245 | const DataLayout &DL = F.getParent()->getDataLayout(); |
| 246 | unsigned IntPtrWidth = DL.getPointerSizeInBits(); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 247 | assert(IntPtrWidth == Offset.getBitWidth()); |
| 248 | |
| 249 | for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP); |
| 250 | GTI != GTE; ++GTI) { |
| 251 | ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand()); |
| 252 | if (!OpC) |
| 253 | if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand())) |
| 254 | OpC = dyn_cast<ConstantInt>(SimpleOp); |
| 255 | if (!OpC) |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 256 | return false; |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 257 | if (OpC->isZero()) continue; |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 258 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 259 | // Handle a struct index, which adds its field offset to the pointer. |
| 260 | if (StructType *STy = dyn_cast<StructType>(*GTI)) { |
| 261 | unsigned ElementIdx = OpC->getZExtValue(); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 262 | const StructLayout *SL = DL.getStructLayout(STy); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 263 | Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx)); |
| 264 | continue; |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 265 | } |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 266 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 267 | APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType())); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 268 | Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize; |
| 269 | } |
| 270 | return true; |
| 271 | } |
| 272 | |
| 273 | bool CallAnalyzer::visitAlloca(AllocaInst &I) { |
Eric Christopher | beb2cd6 | 2014-04-07 13:36:21 +0000 | [diff] [blame] | 274 | // Check whether inlining will turn a dynamic alloca into a static |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 275 | // alloca, and handle that case. |
Eric Christopher | beb2cd6 | 2014-04-07 13:36:21 +0000 | [diff] [blame] | 276 | if (I.isArrayAllocation()) { |
| 277 | if (Constant *Size = SimplifiedValues.lookup(I.getArraySize())) { |
| 278 | ConstantInt *AllocSize = dyn_cast<ConstantInt>(Size); |
| 279 | assert(AllocSize && "Allocation size not a constant int?"); |
| 280 | Type *Ty = I.getAllocatedType(); |
| 281 | AllocatedSize += Ty->getPrimitiveSizeInBits() * AllocSize->getZExtValue(); |
| 282 | return Base::visitAlloca(I); |
| 283 | } |
| 284 | } |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 285 | |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 286 | // Accumulate the allocated size. |
| 287 | if (I.isStaticAlloca()) { |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 288 | const DataLayout &DL = F.getParent()->getDataLayout(); |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 289 | Type *Ty = I.getAllocatedType(); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 290 | AllocatedSize += DL.getTypeAllocSize(Ty); |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 291 | } |
| 292 | |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 293 | // We will happily inline static alloca instructions. |
| 294 | if (I.isStaticAlloca()) |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 295 | return Base::visitAlloca(I); |
| 296 | |
| 297 | // FIXME: This is overly conservative. Dynamic allocas are inefficient for |
| 298 | // a variety of reasons, and so we would like to not inline them into |
| 299 | // functions which don't currently have a dynamic alloca. This simply |
| 300 | // disables inlining altogether in the presence of a dynamic alloca. |
| 301 | HasDynamicAlloca = true; |
| 302 | return false; |
| 303 | } |
| 304 | |
| 305 | bool CallAnalyzer::visitPHI(PHINode &I) { |
| 306 | // FIXME: We should potentially be tracking values through phi nodes, |
| 307 | // especially when they collapse to a single value due to deleted CFG edges |
| 308 | // during inlining. |
| 309 | |
| 310 | // FIXME: We need to propagate SROA *disabling* through phi nodes, even |
| 311 | // though we don't want to propagate it's bonuses. The idea is to disable |
| 312 | // SROA if it *might* be used in an inappropriate manner. |
| 313 | |
| 314 | // Phi nodes are always zero-cost. |
| 315 | return true; |
| 316 | } |
| 317 | |
| 318 | bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) { |
| 319 | Value *SROAArg; |
| 320 | DenseMap<Value *, int>::iterator CostIt; |
| 321 | bool SROACandidate = lookupSROAArgAndCost(I.getPointerOperand(), |
| 322 | SROAArg, CostIt); |
| 323 | |
| 324 | // Try to fold GEPs of constant-offset call site argument pointers. This |
| 325 | // requires target data and inbounds GEPs. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 326 | if (I.isInBounds()) { |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 327 | // Check if we have a base + offset for the pointer. |
| 328 | Value *Ptr = I.getPointerOperand(); |
| 329 | std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr); |
| 330 | if (BaseAndOffset.first) { |
| 331 | // Check if the offset of this GEP is constant, and if so accumulate it |
| 332 | // into Offset. |
| 333 | if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) { |
| 334 | // Non-constant GEPs aren't folded, and disable SROA. |
| 335 | if (SROACandidate) |
| 336 | disableSROA(CostIt); |
| 337 | return false; |
| 338 | } |
| 339 | |
| 340 | // Add the result as a new mapping to Base + Offset. |
| 341 | ConstantOffsetPtrs[&I] = BaseAndOffset; |
| 342 | |
| 343 | // Also handle SROA candidates here, we already know that the GEP is |
| 344 | // all-constant indexed. |
| 345 | if (SROACandidate) |
| 346 | SROAArgValues[&I] = SROAArg; |
| 347 | |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 348 | return true; |
| 349 | } |
| 350 | } |
| 351 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 352 | if (isGEPOffsetConstant(I)) { |
| 353 | if (SROACandidate) |
| 354 | SROAArgValues[&I] = SROAArg; |
| 355 | |
| 356 | // Constant GEPs are modeled as free. |
| 357 | return true; |
| 358 | } |
| 359 | |
| 360 | // Variable GEPs will require math and will disable SROA. |
| 361 | if (SROACandidate) |
| 362 | disableSROA(CostIt); |
Chandler Carruth | 783b719 | 2012-03-09 02:49:36 +0000 | [diff] [blame] | 363 | return false; |
| 364 | } |
| 365 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 366 | bool CallAnalyzer::visitBitCast(BitCastInst &I) { |
| 367 | // Propagate constants through bitcasts. |
Chandler Carruth | 86ed530 | 2012-12-28 14:43:42 +0000 | [diff] [blame] | 368 | Constant *COp = dyn_cast<Constant>(I.getOperand(0)); |
| 369 | if (!COp) |
| 370 | COp = SimplifiedValues.lookup(I.getOperand(0)); |
| 371 | if (COp) |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 372 | if (Constant *C = ConstantExpr::getBitCast(COp, I.getType())) { |
| 373 | SimplifiedValues[&I] = C; |
| 374 | return true; |
Owen Anderson | a08318a | 2010-09-09 16:56:42 +0000 | [diff] [blame] | 375 | } |
Owen Anderson | a08318a | 2010-09-09 16:56:42 +0000 | [diff] [blame] | 376 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 377 | // Track base/offsets through casts |
| 378 | std::pair<Value *, APInt> BaseAndOffset |
| 379 | = ConstantOffsetPtrs.lookup(I.getOperand(0)); |
| 380 | // Casts don't change the offset, just wrap it up. |
| 381 | if (BaseAndOffset.first) |
| 382 | ConstantOffsetPtrs[&I] = BaseAndOffset; |
| 383 | |
| 384 | // Also look for SROA candidates here. |
| 385 | Value *SROAArg; |
| 386 | DenseMap<Value *, int>::iterator CostIt; |
| 387 | if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) |
| 388 | SROAArgValues[&I] = SROAArg; |
| 389 | |
| 390 | // Bitcasts are always zero cost. |
| 391 | return true; |
Owen Anderson | a08318a | 2010-09-09 16:56:42 +0000 | [diff] [blame] | 392 | } |
| 393 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 394 | bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) { |
| 395 | // Propagate constants through ptrtoint. |
Chandler Carruth | 86ed530 | 2012-12-28 14:43:42 +0000 | [diff] [blame] | 396 | Constant *COp = dyn_cast<Constant>(I.getOperand(0)); |
| 397 | if (!COp) |
| 398 | COp = SimplifiedValues.lookup(I.getOperand(0)); |
| 399 | if (COp) |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 400 | if (Constant *C = ConstantExpr::getPtrToInt(COp, I.getType())) { |
| 401 | SimplifiedValues[&I] = C; |
| 402 | return true; |
Chandler Carruth | 4d1d34f | 2012-03-14 23:19:53 +0000 | [diff] [blame] | 403 | } |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 404 | |
| 405 | // Track base/offset pairs when converted to a plain integer provided the |
| 406 | // integer is large enough to represent the pointer. |
| 407 | unsigned IntegerSize = I.getType()->getScalarSizeInBits(); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 408 | const DataLayout &DL = F.getParent()->getDataLayout(); |
Mehdi Amini | 46a4355 | 2015-03-04 18:43:29 +0000 | [diff] [blame] | 409 | if (IntegerSize >= DL.getPointerSizeInBits()) { |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 410 | std::pair<Value *, APInt> BaseAndOffset |
| 411 | = ConstantOffsetPtrs.lookup(I.getOperand(0)); |
| 412 | if (BaseAndOffset.first) |
| 413 | ConstantOffsetPtrs[&I] = BaseAndOffset; |
| 414 | } |
| 415 | |
| 416 | // This is really weird. Technically, ptrtoint will disable SROA. However, |
| 417 | // unless that ptrtoint is *used* somewhere in the live basic blocks after |
| 418 | // inlining, it will be nuked, and SROA should proceed. All of the uses which |
| 419 | // would block SROA would also block SROA if applied directly to a pointer, |
| 420 | // and so we can just add the integer in here. The only places where SROA is |
| 421 | // preserved either cannot fire on an integer, or won't in-and-of themselves |
| 422 | // disable SROA (ext) w/o some later use that we would see and disable. |
| 423 | Value *SROAArg; |
| 424 | DenseMap<Value *, int>::iterator CostIt; |
| 425 | if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) |
| 426 | SROAArgValues[&I] = SROAArg; |
| 427 | |
Chandler Carruth | b8cf510 | 2013-01-21 12:05:16 +0000 | [diff] [blame] | 428 | return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I); |
Chandler Carruth | 4d1d34f | 2012-03-14 23:19:53 +0000 | [diff] [blame] | 429 | } |
| 430 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 431 | bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) { |
| 432 | // Propagate constants through ptrtoint. |
Chandler Carruth | 86ed530 | 2012-12-28 14:43:42 +0000 | [diff] [blame] | 433 | Constant *COp = dyn_cast<Constant>(I.getOperand(0)); |
| 434 | if (!COp) |
| 435 | COp = SimplifiedValues.lookup(I.getOperand(0)); |
| 436 | if (COp) |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 437 | if (Constant *C = ConstantExpr::getIntToPtr(COp, I.getType())) { |
| 438 | SimplifiedValues[&I] = C; |
| 439 | return true; |
| 440 | } |
Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 441 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 442 | // Track base/offset pairs when round-tripped through a pointer without |
| 443 | // modifications provided the integer is not too large. |
| 444 | Value *Op = I.getOperand(0); |
| 445 | unsigned IntegerSize = Op->getType()->getScalarSizeInBits(); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 446 | const DataLayout &DL = F.getParent()->getDataLayout(); |
Mehdi Amini | 46a4355 | 2015-03-04 18:43:29 +0000 | [diff] [blame] | 447 | if (IntegerSize <= DL.getPointerSizeInBits()) { |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 448 | std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op); |
| 449 | if (BaseAndOffset.first) |
| 450 | ConstantOffsetPtrs[&I] = BaseAndOffset; |
| 451 | } |
Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 452 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 453 | // "Propagate" SROA here in the same manner as we do for ptrtoint above. |
| 454 | Value *SROAArg; |
| 455 | DenseMap<Value *, int>::iterator CostIt; |
| 456 | if (lookupSROAArgAndCost(Op, SROAArg, CostIt)) |
| 457 | SROAArgValues[&I] = SROAArg; |
Chandler Carruth | 4d1d34f | 2012-03-14 23:19:53 +0000 | [diff] [blame] | 458 | |
Chandler Carruth | b8cf510 | 2013-01-21 12:05:16 +0000 | [diff] [blame] | 459 | return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 460 | } |
| 461 | |
| 462 | bool CallAnalyzer::visitCastInst(CastInst &I) { |
| 463 | // Propagate constants through ptrtoint. |
Chandler Carruth | 86ed530 | 2012-12-28 14:43:42 +0000 | [diff] [blame] | 464 | Constant *COp = dyn_cast<Constant>(I.getOperand(0)); |
| 465 | if (!COp) |
| 466 | COp = SimplifiedValues.lookup(I.getOperand(0)); |
| 467 | if (COp) |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 468 | if (Constant *C = ConstantExpr::getCast(I.getOpcode(), COp, I.getType())) { |
| 469 | SimplifiedValues[&I] = C; |
| 470 | return true; |
| 471 | } |
| 472 | |
| 473 | // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere. |
| 474 | disableSROA(I.getOperand(0)); |
| 475 | |
Chandler Carruth | b8cf510 | 2013-01-21 12:05:16 +0000 | [diff] [blame] | 476 | return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 477 | } |
| 478 | |
| 479 | bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) { |
| 480 | Value *Operand = I.getOperand(0); |
Jakub Staszak | 7b9e0b9 | 2013-03-07 20:01:19 +0000 | [diff] [blame] | 481 | Constant *COp = dyn_cast<Constant>(Operand); |
| 482 | if (!COp) |
| 483 | COp = SimplifiedValues.lookup(Operand); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 484 | if (COp) { |
| 485 | const DataLayout &DL = F.getParent()->getDataLayout(); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 486 | if (Constant *C = ConstantFoldInstOperands(I.getOpcode(), I.getType(), |
Rafael Espindola | 37dc9e1 | 2014-02-21 00:06:31 +0000 | [diff] [blame] | 487 | COp, DL)) { |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 488 | SimplifiedValues[&I] = C; |
| 489 | return true; |
| 490 | } |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 491 | } |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 492 | |
| 493 | // Disable any SROA on the argument to arbitrary unary operators. |
| 494 | disableSROA(Operand); |
| 495 | |
| 496 | return false; |
| 497 | } |
| 498 | |
Matt Arsenault | 727aa34 | 2013-07-20 04:09:00 +0000 | [diff] [blame] | 499 | bool CallAnalyzer::visitCmpInst(CmpInst &I) { |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 500 | Value *LHS = I.getOperand(0), *RHS = I.getOperand(1); |
| 501 | // First try to handle simplified comparisons. |
| 502 | if (!isa<Constant>(LHS)) |
| 503 | if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS)) |
| 504 | LHS = SimpleLHS; |
| 505 | if (!isa<Constant>(RHS)) |
| 506 | if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS)) |
| 507 | RHS = SimpleRHS; |
Matt Arsenault | 727aa34 | 2013-07-20 04:09:00 +0000 | [diff] [blame] | 508 | if (Constant *CLHS = dyn_cast<Constant>(LHS)) { |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 509 | if (Constant *CRHS = dyn_cast<Constant>(RHS)) |
Matt Arsenault | 727aa34 | 2013-07-20 04:09:00 +0000 | [diff] [blame] | 510 | if (Constant *C = ConstantExpr::getCompare(I.getPredicate(), CLHS, CRHS)) { |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 511 | SimplifiedValues[&I] = C; |
| 512 | return true; |
| 513 | } |
Matt Arsenault | 727aa34 | 2013-07-20 04:09:00 +0000 | [diff] [blame] | 514 | } |
| 515 | |
| 516 | if (I.getOpcode() == Instruction::FCmp) |
| 517 | return false; |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 518 | |
| 519 | // Otherwise look for a comparison between constant offset pointers with |
| 520 | // a common base. |
| 521 | Value *LHSBase, *RHSBase; |
| 522 | APInt LHSOffset, RHSOffset; |
Benjamin Kramer | d6f1f84 | 2014-03-02 13:30:33 +0000 | [diff] [blame] | 523 | std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 524 | if (LHSBase) { |
Benjamin Kramer | d6f1f84 | 2014-03-02 13:30:33 +0000 | [diff] [blame] | 525 | std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 526 | if (RHSBase && LHSBase == RHSBase) { |
| 527 | // We have common bases, fold the icmp to a constant based on the |
| 528 | // offsets. |
| 529 | Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset); |
| 530 | Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset); |
| 531 | if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) { |
| 532 | SimplifiedValues[&I] = C; |
| 533 | ++NumConstantPtrCmps; |
| 534 | return true; |
| 535 | } |
| 536 | } |
| 537 | } |
| 538 | |
| 539 | // If the comparison is an equality comparison with null, we can simplify it |
| 540 | // for any alloca-derived argument. |
| 541 | if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1))) |
| 542 | if (isAllocaDerivedArg(I.getOperand(0))) { |
| 543 | // We can actually predict the result of comparisons between an |
| 544 | // alloca-derived value and null. Note that this fires regardless of |
| 545 | // SROA firing. |
| 546 | bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE; |
| 547 | SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType()) |
| 548 | : ConstantInt::getFalse(I.getType()); |
| 549 | return true; |
| 550 | } |
| 551 | |
| 552 | // Finally check for SROA candidates in comparisons. |
| 553 | Value *SROAArg; |
| 554 | DenseMap<Value *, int>::iterator CostIt; |
| 555 | if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) { |
| 556 | if (isa<ConstantPointerNull>(I.getOperand(1))) { |
| 557 | accumulateSROACost(CostIt, InlineConstants::InstrCost); |
| 558 | return true; |
| 559 | } |
| 560 | |
| 561 | disableSROA(CostIt); |
| 562 | } |
| 563 | |
| 564 | return false; |
| 565 | } |
| 566 | |
| 567 | bool CallAnalyzer::visitSub(BinaryOperator &I) { |
| 568 | // Try to handle a special case: we can fold computing the difference of two |
| 569 | // constant-related pointers. |
| 570 | Value *LHS = I.getOperand(0), *RHS = I.getOperand(1); |
| 571 | Value *LHSBase, *RHSBase; |
| 572 | APInt LHSOffset, RHSOffset; |
Benjamin Kramer | d6f1f84 | 2014-03-02 13:30:33 +0000 | [diff] [blame] | 573 | std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 574 | if (LHSBase) { |
Benjamin Kramer | d6f1f84 | 2014-03-02 13:30:33 +0000 | [diff] [blame] | 575 | std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 576 | if (RHSBase && LHSBase == RHSBase) { |
| 577 | // We have common bases, fold the subtract to a constant based on the |
| 578 | // offsets. |
| 579 | Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset); |
| 580 | Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset); |
| 581 | if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) { |
| 582 | SimplifiedValues[&I] = C; |
| 583 | ++NumConstantPtrDiffs; |
| 584 | return true; |
| 585 | } |
| 586 | } |
| 587 | } |
| 588 | |
| 589 | // Otherwise, fall back to the generic logic for simplifying and handling |
| 590 | // instructions. |
| 591 | return Base::visitSub(I); |
| 592 | } |
| 593 | |
| 594 | bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) { |
| 595 | Value *LHS = I.getOperand(0), *RHS = I.getOperand(1); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 596 | const DataLayout &DL = F.getParent()->getDataLayout(); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 597 | if (!isa<Constant>(LHS)) |
| 598 | if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS)) |
| 599 | LHS = SimpleLHS; |
| 600 | if (!isa<Constant>(RHS)) |
| 601 | if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS)) |
| 602 | RHS = SimpleRHS; |
Michael Zolotukhin | 4e8598e | 2015-02-06 20:02:51 +0000 | [diff] [blame] | 603 | Value *SimpleV = nullptr; |
| 604 | if (auto FI = dyn_cast<FPMathOperator>(&I)) |
| 605 | SimpleV = |
| 606 | SimplifyFPBinOp(I.getOpcode(), LHS, RHS, FI->getFastMathFlags(), DL); |
| 607 | else |
| 608 | SimpleV = SimplifyBinOp(I.getOpcode(), LHS, RHS, DL); |
| 609 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 610 | if (Constant *C = dyn_cast_or_null<Constant>(SimpleV)) { |
| 611 | SimplifiedValues[&I] = C; |
| 612 | return true; |
| 613 | } |
| 614 | |
| 615 | // Disable any SROA on arguments to arbitrary, unsimplified binary operators. |
| 616 | disableSROA(LHS); |
| 617 | disableSROA(RHS); |
| 618 | |
| 619 | return false; |
| 620 | } |
| 621 | |
| 622 | bool CallAnalyzer::visitLoad(LoadInst &I) { |
| 623 | Value *SROAArg; |
| 624 | DenseMap<Value *, int>::iterator CostIt; |
Wei Mi | 6c428d6 | 2015-03-20 18:33:12 +0000 | [diff] [blame] | 625 | if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) { |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 626 | if (I.isSimple()) { |
| 627 | accumulateSROACost(CostIt, InlineConstants::InstrCost); |
| 628 | return true; |
| 629 | } |
| 630 | |
| 631 | disableSROA(CostIt); |
| 632 | } |
| 633 | |
| 634 | return false; |
| 635 | } |
| 636 | |
| 637 | bool CallAnalyzer::visitStore(StoreInst &I) { |
| 638 | Value *SROAArg; |
| 639 | DenseMap<Value *, int>::iterator CostIt; |
Wei Mi | 6c428d6 | 2015-03-20 18:33:12 +0000 | [diff] [blame] | 640 | if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) { |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 641 | if (I.isSimple()) { |
| 642 | accumulateSROACost(CostIt, InlineConstants::InstrCost); |
| 643 | return true; |
| 644 | } |
| 645 | |
| 646 | disableSROA(CostIt); |
| 647 | } |
| 648 | |
| 649 | return false; |
| 650 | } |
| 651 | |
Chandler Carruth | 753e21d | 2012-12-28 14:23:32 +0000 | [diff] [blame] | 652 | bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) { |
| 653 | // Constant folding for extract value is trivial. |
| 654 | Constant *C = dyn_cast<Constant>(I.getAggregateOperand()); |
| 655 | if (!C) |
| 656 | C = SimplifiedValues.lookup(I.getAggregateOperand()); |
| 657 | if (C) { |
| 658 | SimplifiedValues[&I] = ConstantExpr::getExtractValue(C, I.getIndices()); |
| 659 | return true; |
| 660 | } |
| 661 | |
| 662 | // SROA can look through these but give them a cost. |
| 663 | return false; |
| 664 | } |
| 665 | |
| 666 | bool CallAnalyzer::visitInsertValue(InsertValueInst &I) { |
| 667 | // Constant folding for insert value is trivial. |
| 668 | Constant *AggC = dyn_cast<Constant>(I.getAggregateOperand()); |
| 669 | if (!AggC) |
| 670 | AggC = SimplifiedValues.lookup(I.getAggregateOperand()); |
| 671 | Constant *InsertedC = dyn_cast<Constant>(I.getInsertedValueOperand()); |
| 672 | if (!InsertedC) |
| 673 | InsertedC = SimplifiedValues.lookup(I.getInsertedValueOperand()); |
| 674 | if (AggC && InsertedC) { |
| 675 | SimplifiedValues[&I] = ConstantExpr::getInsertValue(AggC, InsertedC, |
| 676 | I.getIndices()); |
| 677 | return true; |
| 678 | } |
| 679 | |
| 680 | // SROA can look through these but give them a cost. |
| 681 | return false; |
| 682 | } |
| 683 | |
| 684 | /// \brief Try to simplify a call site. |
| 685 | /// |
| 686 | /// Takes a concrete function and callsite and tries to actually simplify it by |
| 687 | /// analyzing the arguments and call itself with instsimplify. Returns true if |
| 688 | /// it has simplified the callsite to some other entity (a constant), making it |
| 689 | /// free. |
| 690 | bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) { |
| 691 | // FIXME: Using the instsimplify logic directly for this is inefficient |
| 692 | // because we have to continually rebuild the argument list even when no |
| 693 | // simplifications can be performed. Until that is fixed with remapping |
| 694 | // inside of instsimplify, directly constant fold calls here. |
| 695 | if (!canConstantFoldCallTo(F)) |
| 696 | return false; |
| 697 | |
| 698 | // Try to re-map the arguments to constants. |
| 699 | SmallVector<Constant *, 4> ConstantArgs; |
| 700 | ConstantArgs.reserve(CS.arg_size()); |
| 701 | for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); |
| 702 | I != E; ++I) { |
| 703 | Constant *C = dyn_cast<Constant>(*I); |
| 704 | if (!C) |
| 705 | C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I)); |
| 706 | if (!C) |
| 707 | return false; // This argument doesn't map to a constant. |
| 708 | |
| 709 | ConstantArgs.push_back(C); |
| 710 | } |
| 711 | if (Constant *C = ConstantFoldCall(F, ConstantArgs)) { |
| 712 | SimplifiedValues[CS.getInstruction()] = C; |
| 713 | return true; |
| 714 | } |
| 715 | |
| 716 | return false; |
| 717 | } |
| 718 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 719 | bool CallAnalyzer::visitCallSite(CallSite CS) { |
Chandler Carruth | 37d25de | 2013-12-13 08:00:01 +0000 | [diff] [blame] | 720 | if (CS.hasFnAttr(Attribute::ReturnsTwice) && |
Duncan P. N. Exon Smith | b3fc83c | 2015-02-14 00:12:15 +0000 | [diff] [blame] | 721 | !F.hasFnAttribute(Attribute::ReturnsTwice)) { |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 722 | // This aborts the entire analysis. |
| 723 | ExposesReturnsTwice = true; |
| 724 | return false; |
| 725 | } |
James Molloy | 4f6fb95 | 2012-12-20 16:04:27 +0000 | [diff] [blame] | 726 | if (CS.isCall() && |
Eli Bendersky | 576ef3c | 2014-03-17 16:19:07 +0000 | [diff] [blame] | 727 | cast<CallInst>(CS.getInstruction())->cannotDuplicate()) |
James Molloy | 4f6fb95 | 2012-12-20 16:04:27 +0000 | [diff] [blame] | 728 | ContainsNoDuplicateCall = true; |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 729 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 730 | if (Function *F = CS.getCalledFunction()) { |
Chandler Carruth | 753e21d | 2012-12-28 14:23:32 +0000 | [diff] [blame] | 731 | // When we have a concrete function, first try to simplify it directly. |
| 732 | if (simplifyCallSite(F, CS)) |
| 733 | return true; |
| 734 | |
| 735 | // Next check if it is an intrinsic we know about. |
| 736 | // FIXME: Lift this into part of the InstVisitor. |
| 737 | if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) { |
| 738 | switch (II->getIntrinsicID()) { |
| 739 | default: |
| 740 | return Base::visitCallSite(CS); |
| 741 | |
| 742 | case Intrinsic::memset: |
| 743 | case Intrinsic::memcpy: |
| 744 | case Intrinsic::memmove: |
| 745 | // SROA can usually chew through these intrinsics, but they aren't free. |
| 746 | return false; |
Reid Kleckner | 223de26 | 2015-04-14 20:38:14 +0000 | [diff] [blame] | 747 | case Intrinsic::frameescape: |
| 748 | HasFrameEscape = true; |
| 749 | return false; |
Chandler Carruth | 753e21d | 2012-12-28 14:23:32 +0000 | [diff] [blame] | 750 | } |
| 751 | } |
| 752 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 753 | if (F == CS.getInstruction()->getParent()->getParent()) { |
| 754 | // This flag will fully abort the analysis, so don't bother with anything |
| 755 | // else. |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 756 | IsRecursiveCall = true; |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 757 | return false; |
| 758 | } |
| 759 | |
Chandler Carruth | 0ba8db4 | 2013-01-22 11:26:02 +0000 | [diff] [blame] | 760 | if (TTI.isLoweredToCall(F)) { |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 761 | // We account for the average 1 instruction per call argument setup |
| 762 | // here. |
| 763 | Cost += CS.arg_size() * InlineConstants::InstrCost; |
| 764 | |
| 765 | // Everything other than inline ASM will also have a significant cost |
| 766 | // merely from making the call. |
| 767 | if (!isa<InlineAsm>(CS.getCalledValue())) |
| 768 | Cost += InlineConstants::CallPenalty; |
| 769 | } |
| 770 | |
| 771 | return Base::visitCallSite(CS); |
| 772 | } |
| 773 | |
| 774 | // Otherwise we're in a very special case -- an indirect function call. See |
| 775 | // if we can be particularly clever about this. |
| 776 | Value *Callee = CS.getCalledValue(); |
| 777 | |
| 778 | // First, pay the price of the argument setup. We account for the average |
| 779 | // 1 instruction per call argument setup here. |
| 780 | Cost += CS.arg_size() * InlineConstants::InstrCost; |
| 781 | |
| 782 | // Next, check if this happens to be an indirect function call to a known |
| 783 | // function in this inline context. If not, we've done all we can. |
| 784 | Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee)); |
| 785 | if (!F) |
| 786 | return Base::visitCallSite(CS); |
| 787 | |
| 788 | // If we have a constant that we are calling as a function, we can peer |
| 789 | // through it and see the function target. This happens not infrequently |
| 790 | // during devirtualization and so we want to give it a hefty bonus for |
| 791 | // inlining, but cap that bonus in the event that inlining wouldn't pan |
| 792 | // out. Pretend to inline the function, with a custom threshold. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 793 | CallAnalyzer CA(TTI, ACT, *F, InlineConstants::IndirectCallThreshold); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 794 | if (CA.analyzeCall(CS)) { |
| 795 | // We were able to inline the indirect call! Subtract the cost from the |
| 796 | // bonus we want to apply, but don't go below zero. |
| 797 | Cost -= std::max(0, InlineConstants::IndirectCallThreshold - CA.getCost()); |
| 798 | } |
| 799 | |
| 800 | return Base::visitCallSite(CS); |
| 801 | } |
| 802 | |
Chandler Carruth | 0814d2a | 2013-12-13 07:59:56 +0000 | [diff] [blame] | 803 | bool CallAnalyzer::visitReturnInst(ReturnInst &RI) { |
| 804 | // At least one return instruction will be free after inlining. |
| 805 | bool Free = !HasReturn; |
| 806 | HasReturn = true; |
| 807 | return Free; |
| 808 | } |
| 809 | |
| 810 | bool CallAnalyzer::visitBranchInst(BranchInst &BI) { |
| 811 | // We model unconditional branches as essentially free -- they really |
| 812 | // shouldn't exist at all, but handling them makes the behavior of the |
| 813 | // inliner more regular and predictable. Interestingly, conditional branches |
| 814 | // which will fold away are also free. |
| 815 | return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) || |
| 816 | dyn_cast_or_null<ConstantInt>( |
| 817 | SimplifiedValues.lookup(BI.getCondition())); |
| 818 | } |
| 819 | |
| 820 | bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) { |
| 821 | // We model unconditional switches as free, see the comments on handling |
| 822 | // branches. |
Chandler Carruth | e01fd5f | 2014-04-28 08:52:44 +0000 | [diff] [blame] | 823 | if (isa<ConstantInt>(SI.getCondition())) |
| 824 | return true; |
| 825 | if (Value *V = SimplifiedValues.lookup(SI.getCondition())) |
| 826 | if (isa<ConstantInt>(V)) |
| 827 | return true; |
| 828 | |
| 829 | // Otherwise, we need to accumulate a cost proportional to the number of |
| 830 | // distinct successor blocks. This fan-out in the CFG cannot be represented |
| 831 | // for free even if we can represent the core switch as a jumptable that |
| 832 | // takes a single instruction. |
| 833 | // |
| 834 | // NB: We convert large switches which are just used to initialize large phi |
| 835 | // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent |
| 836 | // inlining those. It will prevent inlining in cases where the optimization |
| 837 | // does not (yet) fire. |
| 838 | SmallPtrSet<BasicBlock *, 8> SuccessorBlocks; |
| 839 | SuccessorBlocks.insert(SI.getDefaultDest()); |
| 840 | for (auto I = SI.case_begin(), E = SI.case_end(); I != E; ++I) |
| 841 | SuccessorBlocks.insert(I.getCaseSuccessor()); |
| 842 | // Add cost corresponding to the number of distinct destinations. The first |
| 843 | // we model as free because of fallthrough. |
| 844 | Cost += (SuccessorBlocks.size() - 1) * InlineConstants::InstrCost; |
| 845 | return false; |
Chandler Carruth | 0814d2a | 2013-12-13 07:59:56 +0000 | [diff] [blame] | 846 | } |
| 847 | |
| 848 | bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) { |
| 849 | // We never want to inline functions that contain an indirectbr. This is |
| 850 | // incorrect because all the blockaddress's (in static global initializers |
| 851 | // for example) would be referring to the original function, and this |
| 852 | // indirect jump would jump from the inlined copy of the function into the |
| 853 | // original function which is extremely undefined behavior. |
| 854 | // FIXME: This logic isn't really right; we can safely inline functions with |
| 855 | // indirectbr's as long as no other function or global references the |
Gerolf Hoflehner | 734f4c8 | 2014-07-01 00:19:34 +0000 | [diff] [blame] | 856 | // blockaddress of a block within the current function. |
Chandler Carruth | 0814d2a | 2013-12-13 07:59:56 +0000 | [diff] [blame] | 857 | HasIndirectBr = true; |
| 858 | return false; |
| 859 | } |
| 860 | |
| 861 | bool CallAnalyzer::visitResumeInst(ResumeInst &RI) { |
| 862 | // FIXME: It's not clear that a single instruction is an accurate model for |
| 863 | // the inline cost of a resume instruction. |
| 864 | return false; |
| 865 | } |
| 866 | |
| 867 | bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) { |
| 868 | // FIXME: It might be reasonably to discount the cost of instructions leading |
| 869 | // to unreachable as they have the lowest possible impact on both runtime and |
| 870 | // code size. |
| 871 | return true; // No actual code is needed for unreachable. |
| 872 | } |
| 873 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 874 | bool CallAnalyzer::visitInstruction(Instruction &I) { |
Chandler Carruth | da7513a | 2012-05-04 00:58:03 +0000 | [diff] [blame] | 875 | // Some instructions are free. All of the free intrinsics can also be |
| 876 | // handled by SROA, etc. |
Chandler Carruth | b8cf510 | 2013-01-21 12:05:16 +0000 | [diff] [blame] | 877 | if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I)) |
Chandler Carruth | da7513a | 2012-05-04 00:58:03 +0000 | [diff] [blame] | 878 | return true; |
| 879 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 880 | // We found something we don't understand or can't handle. Mark any SROA-able |
| 881 | // values in the operand list as no longer viable. |
| 882 | for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI) |
| 883 | disableSROA(*OI); |
| 884 | |
| 885 | return false; |
| 886 | } |
| 887 | |
| 888 | |
| 889 | /// \brief Analyze a basic block for its contribution to the inline cost. |
| 890 | /// |
| 891 | /// This method walks the analyzer over every instruction in the given basic |
| 892 | /// block and accounts for their cost during inlining at this callsite. It |
| 893 | /// aborts early if the threshold has been exceeded or an impossible to inline |
| 894 | /// construct has been detected. It returns false if inlining is no longer |
| 895 | /// viable, and true if inlining remains viable. |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 896 | bool CallAnalyzer::analyzeBlock(BasicBlock *BB, |
| 897 | SmallPtrSetImpl<const Value *> &EphValues) { |
Chandler Carruth | 0814d2a | 2013-12-13 07:59:56 +0000 | [diff] [blame] | 898 | for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) { |
Chandler Carruth | 6b4cc8b | 2014-02-01 10:38:17 +0000 | [diff] [blame] | 899 | // FIXME: Currently, the number of instructions in a function regardless of |
| 900 | // our ability to simplify them during inline to constants or dead code, |
| 901 | // are actually used by the vector bonus heuristic. As long as that's true, |
| 902 | // we have to special case debug intrinsics here to prevent differences in |
| 903 | // inlining due to debug symbols. Eventually, the number of unsimplified |
| 904 | // instructions shouldn't factor into the cost computation, but until then, |
| 905 | // hack around it here. |
| 906 | if (isa<DbgInfoIntrinsic>(I)) |
| 907 | continue; |
| 908 | |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 909 | // Skip ephemeral values. |
| 910 | if (EphValues.count(I)) |
| 911 | continue; |
| 912 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 913 | ++NumInstructions; |
| 914 | if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy()) |
| 915 | ++NumVectorInstructions; |
| 916 | |
Cameron Esfahani | 17177d1 | 2015-02-05 02:09:33 +0000 | [diff] [blame] | 917 | // If the instruction is floating point, and the target says this operation is |
| 918 | // expensive or the function has the "use-soft-float" attribute, this may |
| 919 | // eventually become a library call. Treat the cost as such. |
| 920 | if (I->getType()->isFloatingPointTy()) { |
| 921 | bool hasSoftFloatAttr = false; |
| 922 | |
| 923 | // If the function has the "use-soft-float" attribute, mark it as expensive. |
| 924 | if (F.hasFnAttribute("use-soft-float")) { |
| 925 | Attribute Attr = F.getFnAttribute("use-soft-float"); |
| 926 | StringRef Val = Attr.getValueAsString(); |
| 927 | if (Val == "true") |
| 928 | hasSoftFloatAttr = true; |
| 929 | } |
| 930 | |
| 931 | if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive || |
| 932 | hasSoftFloatAttr) |
| 933 | Cost += InlineConstants::CallPenalty; |
| 934 | } |
| 935 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 936 | // If the instruction simplified to a constant, there is no cost to this |
| 937 | // instruction. Visit the instructions using our InstVisitor to account for |
| 938 | // all of the per-instruction logic. The visit tree returns true if we |
| 939 | // consumed the instruction in any way, and false if the instruction's base |
| 940 | // cost should count against inlining. |
| 941 | if (Base::visit(I)) |
| 942 | ++NumInstructionsSimplified; |
| 943 | else |
| 944 | Cost += InlineConstants::InstrCost; |
| 945 | |
| 946 | // If the visit this instruction detected an uninlinable pattern, abort. |
Chandler Carruth | 0814d2a | 2013-12-13 07:59:56 +0000 | [diff] [blame] | 947 | if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca || |
Reid Kleckner | 223de26 | 2015-04-14 20:38:14 +0000 | [diff] [blame] | 948 | HasIndirectBr || HasFrameEscape) |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 949 | return false; |
| 950 | |
| 951 | // If the caller is a recursive function then we don't want to inline |
| 952 | // functions which allocate a lot of stack space because it would increase |
| 953 | // the caller stack usage dramatically. |
| 954 | if (IsCallerRecursive && |
| 955 | AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller) |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 956 | return false; |
| 957 | |
Chandler Carruth | a004f22 | 2015-05-27 02:49:05 +0000 | [diff] [blame^] | 958 | // Check if we've past the maximum possible threshold so we don't spin in |
| 959 | // huge basic blocks that will never inline. |
| 960 | if (Cost > Threshold) |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 961 | return false; |
| 962 | } |
| 963 | |
| 964 | return true; |
| 965 | } |
| 966 | |
| 967 | /// \brief Compute the base pointer and cumulative constant offsets for V. |
| 968 | /// |
| 969 | /// This strips all constant offsets off of V, leaving it the base pointer, and |
| 970 | /// accumulates the total constant offset applied in the returned constant. It |
| 971 | /// returns 0 if V is not a pointer, and returns the constant '0' if there are |
| 972 | /// no constant offsets applied. |
| 973 | ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) { |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 974 | if (!V->getType()->isPointerTy()) |
Craig Topper | 353eda4 | 2014-04-24 06:44:33 +0000 | [diff] [blame] | 975 | return nullptr; |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 976 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 977 | const DataLayout &DL = F.getParent()->getDataLayout(); |
| 978 | unsigned IntPtrWidth = DL.getPointerSizeInBits(); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 979 | APInt Offset = APInt::getNullValue(IntPtrWidth); |
| 980 | |
| 981 | // Even though we don't look through PHI nodes, we could be called on an |
| 982 | // instruction in an unreachable block, which may be on a cycle. |
| 983 | SmallPtrSet<Value *, 4> Visited; |
| 984 | Visited.insert(V); |
| 985 | do { |
| 986 | if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) { |
| 987 | if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset)) |
Craig Topper | 353eda4 | 2014-04-24 06:44:33 +0000 | [diff] [blame] | 988 | return nullptr; |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 989 | V = GEP->getPointerOperand(); |
| 990 | } else if (Operator::getOpcode(V) == Instruction::BitCast) { |
| 991 | V = cast<Operator>(V)->getOperand(0); |
| 992 | } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) { |
| 993 | if (GA->mayBeOverridden()) |
| 994 | break; |
| 995 | V = GA->getAliasee(); |
| 996 | } else { |
| 997 | break; |
| 998 | } |
| 999 | assert(V->getType()->isPointerTy() && "Unexpected operand type!"); |
David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 1000 | } while (Visited.insert(V).second); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1001 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1002 | Type *IntPtrTy = DL.getIntPtrType(V->getContext()); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1003 | return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset)); |
| 1004 | } |
| 1005 | |
| 1006 | /// \brief Analyze a call site for potential inlining. |
| 1007 | /// |
| 1008 | /// Returns true if inlining this call is viable, and false if it is not |
| 1009 | /// viable. It computes the cost and adjusts the threshold based on numerous |
| 1010 | /// factors and heuristics. If this method returns false but the computed cost |
| 1011 | /// is below the computed threshold, then inlining was forcibly disabled by |
Bob Wilson | 266802d | 2012-11-19 07:04:30 +0000 | [diff] [blame] | 1012 | /// some artifact of the routine. |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1013 | bool CallAnalyzer::analyzeCall(CallSite CS) { |
Chandler Carruth | 7ae90d4 | 2012-04-11 10:15:10 +0000 | [diff] [blame] | 1014 | ++NumCallsAnalyzed; |
| 1015 | |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1016 | // Perform some tweaks to the cost and threshold based on the direct |
| 1017 | // callsite information. |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1018 | |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1019 | // We want to more aggressively inline vector-dense kernels, so up the |
| 1020 | // threshold, and we'll lower it if the % of vector instructions gets too |
Chandler Carruth | a004f22 | 2015-05-27 02:49:05 +0000 | [diff] [blame^] | 1021 | // low. Note that these bonuses are some what arbitrary and evolved over time |
| 1022 | // by accident as much as because they are principled bonuses. |
| 1023 | // |
| 1024 | // FIXME: It would be nice to remove all such bonuses. At least it would be |
| 1025 | // nice to base the bonus values on something more scientific. |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1026 | assert(NumInstructions == 0); |
| 1027 | assert(NumVectorInstructions == 0); |
Chandler Carruth | a004f22 | 2015-05-27 02:49:05 +0000 | [diff] [blame^] | 1028 | FiftyPercentVectorBonus = 3 * Threshold / 2; |
| 1029 | TenPercentVectorBonus = 3 * Threshold / 4; |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1030 | const DataLayout &DL = F.getParent()->getDataLayout(); |
Benjamin Kramer | c99d0e9 | 2012-08-07 11:13:19 +0000 | [diff] [blame] | 1031 | |
Chandler Carruth | a004f22 | 2015-05-27 02:49:05 +0000 | [diff] [blame^] | 1032 | // Track whether the post-inlining function would have more than one basic |
| 1033 | // block. A single basic block is often intended for inlining. Balloon the |
| 1034 | // threshold by 50% until we pass the single-BB phase. |
| 1035 | bool SingleBB = true; |
| 1036 | int SingleBBBonus = Threshold / 2; |
| 1037 | |
| 1038 | // Speculatively apply all possible bonuses to Threshold. If cost exceeds |
| 1039 | // this Threshold any time, and cost cannot decrease, we can stop processing |
| 1040 | // the rest of the function body. |
| 1041 | Threshold += (SingleBBBonus + FiftyPercentVectorBonus); |
| 1042 | |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1043 | // Give out bonuses per argument, as the instructions setting them up will |
| 1044 | // be gone after inlining. |
| 1045 | for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) { |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1046 | if (CS.isByValArgument(I)) { |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1047 | // We approximate the number of loads and stores needed by dividing the |
| 1048 | // size of the byval type by the target's pointer size. |
| 1049 | PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType()); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1050 | unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType()); |
| 1051 | unsigned PointerSize = DL.getPointerSizeInBits(); |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1052 | // Ceiling division. |
| 1053 | unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize; |
Benjamin Kramer | c99d0e9 | 2012-08-07 11:13:19 +0000 | [diff] [blame] | 1054 | |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1055 | // If it generates more than 8 stores it is likely to be expanded as an |
| 1056 | // inline memcpy so we take that as an upper bound. Otherwise we assume |
| 1057 | // one load and one store per word copied. |
| 1058 | // FIXME: The maxStoresPerMemcpy setting from the target should be used |
| 1059 | // here instead of a magic number of 8, but it's not available via |
| 1060 | // DataLayout. |
| 1061 | NumStores = std::min(NumStores, 8U); |
| 1062 | |
| 1063 | Cost -= 2 * NumStores * InlineConstants::InstrCost; |
| 1064 | } else { |
| 1065 | // For non-byval arguments subtract off one instruction per call |
| 1066 | // argument. |
| 1067 | Cost -= InlineConstants::InstrCost; |
Benjamin Kramer | c99d0e9 | 2012-08-07 11:13:19 +0000 | [diff] [blame] | 1068 | } |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1069 | } |
| 1070 | |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1071 | // If there is only one call of the function, and it has internal linkage, |
| 1072 | // the cost of inlining it drops dramatically. |
James Molloy | 4f6fb95 | 2012-12-20 16:04:27 +0000 | [diff] [blame] | 1073 | bool OnlyOneCallAndLocalLinkage = F.hasLocalLinkage() && F.hasOneUse() && |
| 1074 | &F == CS.getCalledFunction(); |
| 1075 | if (OnlyOneCallAndLocalLinkage) |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1076 | Cost += InlineConstants::LastCallToStaticBonus; |
| 1077 | |
| 1078 | // If the instruction after the call, or if the normal destination of the |
| 1079 | // invoke is an unreachable instruction, the function is noreturn. As such, |
| 1080 | // there is little point in inlining this unless there is literally zero |
| 1081 | // cost. |
| 1082 | Instruction *Instr = CS.getInstruction(); |
| 1083 | if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) { |
| 1084 | if (isa<UnreachableInst>(II->getNormalDest()->begin())) |
Chandler Carruth | a004f22 | 2015-05-27 02:49:05 +0000 | [diff] [blame^] | 1085 | Threshold = 0; |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1086 | } else if (isa<UnreachableInst>(++BasicBlock::iterator(Instr))) |
Chandler Carruth | a004f22 | 2015-05-27 02:49:05 +0000 | [diff] [blame^] | 1087 | Threshold = 0; |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1088 | |
| 1089 | // If this function uses the coldcc calling convention, prefer not to inline |
| 1090 | // it. |
| 1091 | if (F.getCallingConv() == CallingConv::Cold) |
| 1092 | Cost += InlineConstants::ColdccPenalty; |
| 1093 | |
| 1094 | // Check if we're done. This can happen due to bonuses and penalties. |
| 1095 | if (Cost > Threshold) |
| 1096 | return false; |
| 1097 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1098 | if (F.empty()) |
| 1099 | return true; |
| 1100 | |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 1101 | Function *Caller = CS.getInstruction()->getParent()->getParent(); |
| 1102 | // Check if the caller function is recursive itself. |
Chandler Carruth | cdf4788 | 2014-03-09 03:16:01 +0000 | [diff] [blame] | 1103 | for (User *U : Caller->users()) { |
| 1104 | CallSite Site(U); |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 1105 | if (!Site) |
| 1106 | continue; |
| 1107 | Instruction *I = Site.getInstruction(); |
| 1108 | if (I->getParent()->getParent() == Caller) { |
| 1109 | IsCallerRecursive = true; |
| 1110 | break; |
| 1111 | } |
| 1112 | } |
| 1113 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1114 | // Populate our simplified values by mapping from function arguments to call |
| 1115 | // arguments with known important simplifications. |
| 1116 | CallSite::arg_iterator CAI = CS.arg_begin(); |
| 1117 | for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end(); |
| 1118 | FAI != FAE; ++FAI, ++CAI) { |
| 1119 | assert(CAI != CS.arg_end()); |
| 1120 | if (Constant *C = dyn_cast<Constant>(CAI)) |
| 1121 | SimplifiedValues[FAI] = C; |
| 1122 | |
| 1123 | Value *PtrArg = *CAI; |
| 1124 | if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) { |
| 1125 | ConstantOffsetPtrs[FAI] = std::make_pair(PtrArg, C->getValue()); |
| 1126 | |
| 1127 | // We can SROA any pointer arguments derived from alloca instructions. |
| 1128 | if (isa<AllocaInst>(PtrArg)) { |
| 1129 | SROAArgValues[FAI] = PtrArg; |
| 1130 | SROAArgCosts[PtrArg] = 0; |
| 1131 | } |
| 1132 | } |
| 1133 | } |
| 1134 | NumConstantArgs = SimplifiedValues.size(); |
| 1135 | NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size(); |
| 1136 | NumAllocaArgs = SROAArgValues.size(); |
| 1137 | |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 1138 | // FIXME: If a caller has multiple calls to a callee, we end up recomputing |
| 1139 | // the ephemeral values multiple times (and they're completely determined by |
| 1140 | // the callee, so this is purely duplicate work). |
| 1141 | SmallPtrSet<const Value *, 32> EphValues; |
Bjorn Steinbrink | 6f972a1 | 2015-02-12 21:04:22 +0000 | [diff] [blame] | 1142 | CodeMetrics::collectEphemeralValues(&F, &ACT->getAssumptionCache(F), EphValues); |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 1143 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1144 | // The worklist of live basic blocks in the callee *after* inlining. We avoid |
| 1145 | // adding basic blocks of the callee which can be proven to be dead for this |
| 1146 | // particular call site in order to get more accurate cost estimates. This |
| 1147 | // requires a somewhat heavyweight iteration pattern: we need to walk the |
| 1148 | // basic blocks in a breadth-first order as we insert live successors. To |
| 1149 | // accomplish this, prioritizing for small iterations because we exit after |
| 1150 | // crossing our threshold, we use a small-size optimized SetVector. |
| 1151 | typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>, |
| 1152 | SmallPtrSet<BasicBlock *, 16> > BBSetVector; |
| 1153 | BBSetVector BBWorklist; |
| 1154 | BBWorklist.insert(&F.getEntryBlock()); |
| 1155 | // Note that we *must not* cache the size, this loop grows the worklist. |
| 1156 | for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) { |
| 1157 | // Bail out the moment we cross the threshold. This means we'll under-count |
| 1158 | // the cost, but only when undercounting doesn't matter. |
Chandler Carruth | a004f22 | 2015-05-27 02:49:05 +0000 | [diff] [blame^] | 1159 | if (Cost > Threshold) |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1160 | break; |
| 1161 | |
| 1162 | BasicBlock *BB = BBWorklist[Idx]; |
| 1163 | if (BB->empty()) |
Chandler Carruth | 4d1d34f | 2012-03-14 23:19:53 +0000 | [diff] [blame] | 1164 | continue; |
Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 1165 | |
Gerolf Hoflehner | 734f4c8 | 2014-07-01 00:19:34 +0000 | [diff] [blame] | 1166 | // Disallow inlining a blockaddress. A blockaddress only has defined |
| 1167 | // behavior for an indirect branch in the same function, and we do not |
| 1168 | // currently support inlining indirect branches. But, the inliner may not |
| 1169 | // see an indirect branch that ends up being dead code at a particular call |
| 1170 | // site. If the blockaddress escapes the function, e.g., via a global |
| 1171 | // variable, inlining may lead to an invalid cross-function reference. |
| 1172 | if (BB->hasAddressTaken()) |
| 1173 | return false; |
| 1174 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1175 | // Analyze the cost of this block. If we blow through the threshold, this |
| 1176 | // returns false, and we can bail on out. |
Hal Finkel | 57f03dd | 2014-09-07 13:49:57 +0000 | [diff] [blame] | 1177 | if (!analyzeBlock(BB, EphValues)) { |
Chandler Carruth | 0814d2a | 2013-12-13 07:59:56 +0000 | [diff] [blame] | 1178 | if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca || |
Reid Kleckner | 223de26 | 2015-04-14 20:38:14 +0000 | [diff] [blame] | 1179 | HasIndirectBr || HasFrameEscape) |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1180 | return false; |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 1181 | |
| 1182 | // If the caller is a recursive function then we don't want to inline |
| 1183 | // functions which allocate a lot of stack space because it would increase |
| 1184 | // the caller stack usage dramatically. |
| 1185 | if (IsCallerRecursive && |
| 1186 | AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller) |
| 1187 | return false; |
| 1188 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1189 | break; |
Eric Christopher | 46308e6 | 2011-02-01 01:16:32 +0000 | [diff] [blame] | 1190 | } |
Eric Christopher | 46308e6 | 2011-02-01 01:16:32 +0000 | [diff] [blame] | 1191 | |
Chandler Carruth | 0814d2a | 2013-12-13 07:59:56 +0000 | [diff] [blame] | 1192 | TerminatorInst *TI = BB->getTerminator(); |
| 1193 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1194 | // Add in the live successors by first checking whether we have terminator |
| 1195 | // that may be simplified based on the values simplified by this call. |
| 1196 | if (BranchInst *BI = dyn_cast<BranchInst>(TI)) { |
| 1197 | if (BI->isConditional()) { |
| 1198 | Value *Cond = BI->getCondition(); |
| 1199 | if (ConstantInt *SimpleCond |
| 1200 | = dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) { |
| 1201 | BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0)); |
| 1202 | continue; |
Eric Christopher | 46308e6 | 2011-02-01 01:16:32 +0000 | [diff] [blame] | 1203 | } |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1204 | } |
| 1205 | } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) { |
| 1206 | Value *Cond = SI->getCondition(); |
| 1207 | if (ConstantInt *SimpleCond |
| 1208 | = dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) { |
| 1209 | BBWorklist.insert(SI->findCaseValue(SimpleCond).getCaseSuccessor()); |
| 1210 | continue; |
| 1211 | } |
| 1212 | } |
Eric Christopher | 46308e6 | 2011-02-01 01:16:32 +0000 | [diff] [blame] | 1213 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1214 | // If we're unable to select a particular successor, just count all of |
| 1215 | // them. |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 1216 | for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize; |
| 1217 | ++TIdx) |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1218 | BBWorklist.insert(TI->getSuccessor(TIdx)); |
| 1219 | |
| 1220 | // If we had any successors at this point, than post-inlining is likely to |
| 1221 | // have them as well. Note that we assume any basic blocks which existed |
| 1222 | // due to branches or switches which folded above will also fold after |
| 1223 | // inlining. |
| 1224 | if (SingleBB && TI->getNumSuccessors() > 1) { |
| 1225 | // Take off the bonus we applied to the threshold. |
| 1226 | Threshold -= SingleBBBonus; |
| 1227 | SingleBB = false; |
Eric Christopher | 46308e6 | 2011-02-01 01:16:32 +0000 | [diff] [blame] | 1228 | } |
| 1229 | } |
Andrew Trick | caa500b | 2011-10-01 01:27:56 +0000 | [diff] [blame] | 1230 | |
Chandler Carruth | cb5beb3 | 2013-12-12 11:59:26 +0000 | [diff] [blame] | 1231 | // If this is a noduplicate call, we can still inline as long as |
James Molloy | 4f6fb95 | 2012-12-20 16:04:27 +0000 | [diff] [blame] | 1232 | // inlining this would cause the removal of the caller (so the instruction |
| 1233 | // is not actually duplicated, just moved). |
| 1234 | if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall) |
| 1235 | return false; |
| 1236 | |
Chandler Carruth | a004f22 | 2015-05-27 02:49:05 +0000 | [diff] [blame^] | 1237 | // We applied the maximum possible vector bonus at the beginning. Now, |
| 1238 | // subtract the excess bonus, if any, from the Threshold before |
| 1239 | // comparing against Cost. |
| 1240 | if (NumVectorInstructions <= NumInstructions / 10) |
| 1241 | Threshold -= FiftyPercentVectorBonus; |
| 1242 | else if (NumVectorInstructions <= NumInstructions / 2) |
| 1243 | Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1244 | |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1245 | return Cost < Threshold; |
Eric Christopher | 2dfbd7e | 2011-02-05 00:49:15 +0000 | [diff] [blame] | 1246 | } |
| 1247 | |
Manman Ren | 49d684e | 2012-09-12 05:06:18 +0000 | [diff] [blame] | 1248 | #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1249 | /// \brief Dump stats about this call's analysis. |
| 1250 | void CallAnalyzer::dump() { |
Eric Christopher | a13839f | 2014-02-26 23:27:16 +0000 | [diff] [blame] | 1251 | #define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n" |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1252 | DEBUG_PRINT_STAT(NumConstantArgs); |
| 1253 | DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs); |
| 1254 | DEBUG_PRINT_STAT(NumAllocaArgs); |
| 1255 | DEBUG_PRINT_STAT(NumConstantPtrCmps); |
| 1256 | DEBUG_PRINT_STAT(NumConstantPtrDiffs); |
| 1257 | DEBUG_PRINT_STAT(NumInstructionsSimplified); |
Chandler Carruth | a004f22 | 2015-05-27 02:49:05 +0000 | [diff] [blame^] | 1258 | DEBUG_PRINT_STAT(NumInstructions); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1259 | DEBUG_PRINT_STAT(SROACostSavings); |
| 1260 | DEBUG_PRINT_STAT(SROACostSavingsLost); |
James Molloy | 4f6fb95 | 2012-12-20 16:04:27 +0000 | [diff] [blame] | 1261 | DEBUG_PRINT_STAT(ContainsNoDuplicateCall); |
Chandler Carruth | 394e34f | 2014-01-31 22:32:32 +0000 | [diff] [blame] | 1262 | DEBUG_PRINT_STAT(Cost); |
| 1263 | DEBUG_PRINT_STAT(Threshold); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1264 | #undef DEBUG_PRINT_STAT |
Eric Christopher | 2dfbd7e | 2011-02-05 00:49:15 +0000 | [diff] [blame] | 1265 | } |
Manman Ren | c3366cc | 2012-09-06 19:55:56 +0000 | [diff] [blame] | 1266 | #endif |
Eric Christopher | 2dfbd7e | 2011-02-05 00:49:15 +0000 | [diff] [blame] | 1267 | |
Chandler Carruth | 4319e29 | 2013-01-21 11:39:18 +0000 | [diff] [blame] | 1268 | INITIALIZE_PASS_BEGIN(InlineCostAnalysis, "inline-cost", "Inline Cost Analysis", |
| 1269 | true, true) |
Chandler Carruth | 705b185 | 2015-01-31 03:43:40 +0000 | [diff] [blame] | 1270 | INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass) |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 1271 | INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) |
Chandler Carruth | 4319e29 | 2013-01-21 11:39:18 +0000 | [diff] [blame] | 1272 | INITIALIZE_PASS_END(InlineCostAnalysis, "inline-cost", "Inline Cost Analysis", |
| 1273 | true, true) |
| 1274 | |
| 1275 | char InlineCostAnalysis::ID = 0; |
| 1276 | |
Rafael Espindola | 339430f | 2014-02-25 23:25:17 +0000 | [diff] [blame] | 1277 | InlineCostAnalysis::InlineCostAnalysis() : CallGraphSCCPass(ID) {} |
Chandler Carruth | 4319e29 | 2013-01-21 11:39:18 +0000 | [diff] [blame] | 1278 | |
| 1279 | InlineCostAnalysis::~InlineCostAnalysis() {} |
| 1280 | |
| 1281 | void InlineCostAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { |
| 1282 | AU.setPreservesAll(); |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 1283 | AU.addRequired<AssumptionCacheTracker>(); |
Chandler Carruth | 705b185 | 2015-01-31 03:43:40 +0000 | [diff] [blame] | 1284 | AU.addRequired<TargetTransformInfoWrapperPass>(); |
Chandler Carruth | 4319e29 | 2013-01-21 11:39:18 +0000 | [diff] [blame] | 1285 | CallGraphSCCPass::getAnalysisUsage(AU); |
| 1286 | } |
| 1287 | |
| 1288 | bool InlineCostAnalysis::runOnSCC(CallGraphSCC &SCC) { |
Chandler Carruth | fdb9c57 | 2015-02-01 12:01:35 +0000 | [diff] [blame] | 1289 | TTIWP = &getAnalysis<TargetTransformInfoWrapperPass>(); |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 1290 | ACT = &getAnalysis<AssumptionCacheTracker>(); |
Chandler Carruth | 4319e29 | 2013-01-21 11:39:18 +0000 | [diff] [blame] | 1291 | return false; |
| 1292 | } |
| 1293 | |
| 1294 | InlineCost InlineCostAnalysis::getInlineCost(CallSite CS, int Threshold) { |
David Chisnall | c1c9cda | 2012-04-06 17:27:41 +0000 | [diff] [blame] | 1295 | return getInlineCost(CS, CS.getCalledFunction(), Threshold); |
| 1296 | } |
Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 1297 | |
Evgeniy Stepanov | 2ad3698 | 2013-08-08 08:22:39 +0000 | [diff] [blame] | 1298 | /// \brief Test that two functions either have or have not the given attribute |
| 1299 | /// at the same time. |
Akira Hatanaka | f99e191 | 2015-04-13 18:43:38 +0000 | [diff] [blame] | 1300 | template<typename AttrKind> |
| 1301 | static bool attributeMatches(Function *F1, Function *F2, AttrKind Attr) { |
| 1302 | return F1->getFnAttribute(Attr) == F2->getFnAttribute(Attr); |
Evgeniy Stepanov | 2ad3698 | 2013-08-08 08:22:39 +0000 | [diff] [blame] | 1303 | } |
| 1304 | |
| 1305 | /// \brief Test that there are no attribute conflicts between Caller and Callee |
| 1306 | /// that prevent inlining. |
| 1307 | static bool functionsHaveCompatibleAttributes(Function *Caller, |
| 1308 | Function *Callee) { |
Akira Hatanaka | f99e191 | 2015-04-13 18:43:38 +0000 | [diff] [blame] | 1309 | return attributeMatches(Caller, Callee, "target-cpu") && |
| 1310 | attributeMatches(Caller, Callee, "target-features") && |
| 1311 | attributeMatches(Caller, Callee, Attribute::SanitizeAddress) && |
Evgeniy Stepanov | 2ad3698 | 2013-08-08 08:22:39 +0000 | [diff] [blame] | 1312 | attributeMatches(Caller, Callee, Attribute::SanitizeMemory) && |
| 1313 | attributeMatches(Caller, Callee, Attribute::SanitizeThread); |
| 1314 | } |
| 1315 | |
Chandler Carruth | 4319e29 | 2013-01-21 11:39:18 +0000 | [diff] [blame] | 1316 | InlineCost InlineCostAnalysis::getInlineCost(CallSite CS, Function *Callee, |
David Chisnall | c1c9cda | 2012-04-06 17:27:41 +0000 | [diff] [blame] | 1317 | int Threshold) { |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1318 | // Cannot inline indirect calls. |
| 1319 | if (!Callee) |
| 1320 | return llvm::InlineCost::getNever(); |
| 1321 | |
| 1322 | // Calls to functions with always-inline attributes should be inlined |
| 1323 | // whenever possible. |
Peter Collingbourne | 68a8897 | 2014-05-19 18:25:54 +0000 | [diff] [blame] | 1324 | if (CS.hasFnAttr(Attribute::AlwaysInline)) { |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1325 | if (isInlineViable(*Callee)) |
| 1326 | return llvm::InlineCost::getAlways(); |
| 1327 | return llvm::InlineCost::getNever(); |
| 1328 | } |
| 1329 | |
Evgeniy Stepanov | 2ad3698 | 2013-08-08 08:22:39 +0000 | [diff] [blame] | 1330 | // Never inline functions with conflicting attributes (unless callee has |
| 1331 | // always-inline attribute). |
| 1332 | if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee)) |
| 1333 | return llvm::InlineCost::getNever(); |
| 1334 | |
Paul Robinson | dcbe35b | 2013-11-18 21:44:03 +0000 | [diff] [blame] | 1335 | // Don't inline this call if the caller has the optnone attribute. |
| 1336 | if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone)) |
| 1337 | return llvm::InlineCost::getNever(); |
| 1338 | |
Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 1339 | // Don't inline functions which can be redefined at link-time to mean |
Eric Christopher | b1a382d | 2010-03-25 04:49:10 +0000 | [diff] [blame] | 1340 | // something else. Don't inline functions marked noinline or call sites |
| 1341 | // marked noinline. |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1342 | if (Callee->mayBeOverridden() || |
Evgeniy Stepanov | 2ad3698 | 2013-08-08 08:22:39 +0000 | [diff] [blame] | 1343 | Callee->hasFnAttribute(Attribute::NoInline) || CS.isNoInline()) |
Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 1344 | return llvm::InlineCost::getNever(); |
| 1345 | |
Nadav Rotem | 4eb3d4b | 2012-09-19 08:08:04 +0000 | [diff] [blame] | 1346 | DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName() |
| 1347 | << "...\n"); |
Andrew Trick | caa500b | 2011-10-01 01:27:56 +0000 | [diff] [blame] | 1348 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1349 | CallAnalyzer CA(TTIWP->getTTI(*Callee), ACT, *Callee, Threshold); |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1350 | bool ShouldInline = CA.analyzeCall(CS); |
Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 1351 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1352 | DEBUG(CA.dump()); |
| 1353 | |
| 1354 | // Check if there was a reason to force inlining or no inlining. |
| 1355 | if (!ShouldInline && CA.getCost() < CA.getThreshold()) |
Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 1356 | return InlineCost::getNever(); |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1357 | if (ShouldInline && CA.getCost() >= CA.getThreshold()) |
Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 1358 | return InlineCost::getAlways(); |
Andrew Trick | caa500b | 2011-10-01 01:27:56 +0000 | [diff] [blame] | 1359 | |
Chandler Carruth | 0539c07 | 2012-03-31 12:42:41 +0000 | [diff] [blame] | 1360 | return llvm::InlineCost::get(CA.getCost(), CA.getThreshold()); |
Dan Gohman | 4552e3c | 2009-10-13 18:30:07 +0000 | [diff] [blame] | 1361 | } |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1362 | |
Chandler Carruth | 4319e29 | 2013-01-21 11:39:18 +0000 | [diff] [blame] | 1363 | bool InlineCostAnalysis::isInlineViable(Function &F) { |
Duncan P. N. Exon Smith | b3fc83c | 2015-02-14 00:12:15 +0000 | [diff] [blame] | 1364 | bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice); |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1365 | for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) { |
Gerolf Hoflehner | 734f4c8 | 2014-07-01 00:19:34 +0000 | [diff] [blame] | 1366 | // Disallow inlining of functions which contain indirect branches or |
| 1367 | // blockaddresses. |
| 1368 | if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken()) |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1369 | return false; |
| 1370 | |
| 1371 | for (BasicBlock::iterator II = BI->begin(), IE = BI->end(); II != IE; |
| 1372 | ++II) { |
| 1373 | CallSite CS(II); |
| 1374 | if (!CS) |
| 1375 | continue; |
| 1376 | |
| 1377 | // Disallow recursive calls. |
| 1378 | if (&F == CS.getCalledFunction()) |
| 1379 | return false; |
| 1380 | |
| 1381 | // Disallow calls which expose returns-twice to a function not previously |
| 1382 | // attributed as such. |
| 1383 | if (!ReturnsTwice && CS.isCall() && |
| 1384 | cast<CallInst>(CS.getInstruction())->canReturnTwice()) |
| 1385 | return false; |
Reid Kleckner | 223de26 | 2015-04-14 20:38:14 +0000 | [diff] [blame] | 1386 | |
| 1387 | // Disallow inlining functions that call @llvm.frameescape. Doing this |
| 1388 | // correctly would require major changes to the inliner. |
| 1389 | if (CS.getCalledFunction() && |
| 1390 | CS.getCalledFunction()->getIntrinsicID() == |
| 1391 | llvm::Intrinsic::frameescape) |
| 1392 | return false; |
Bob Wilson | a5b0dc8 | 2012-11-19 07:04:35 +0000 | [diff] [blame] | 1393 | } |
| 1394 | } |
| 1395 | |
| 1396 | return true; |
| 1397 | } |