Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 1 | //===- BranchProbabilityInfo.cpp - Branch Probability Analysis ------------===// |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 2 | // |
Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // Loops should be simplified before this analysis. |
| 10 | // |
| 11 | //===----------------------------------------------------------------------===// |
| 12 | |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 13 | #include "llvm/Analysis/BranchProbabilityInfo.h" |
| 14 | #include "llvm/ADT/PostOrderIterator.h" |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 15 | #include "llvm/ADT/SCCIterator.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 16 | #include "llvm/ADT/STLExtras.h" |
| 17 | #include "llvm/ADT/SmallVector.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 18 | #include "llvm/Analysis/LoopInfo.h" |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 19 | #include "llvm/Analysis/PostDominators.h" |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 20 | #include "llvm/Analysis/TargetLibraryInfo.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 21 | #include "llvm/IR/Attributes.h" |
| 22 | #include "llvm/IR/BasicBlock.h" |
Chandler Carruth | 1305dc3 | 2014-03-04 11:45:46 +0000 | [diff] [blame] | 23 | #include "llvm/IR/CFG.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 24 | #include "llvm/IR/Constants.h" |
Mikael Holmen | 2ca1689 | 2018-05-17 09:05:40 +0000 | [diff] [blame] | 25 | #include "llvm/IR/Dominators.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 26 | #include "llvm/IR/Function.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 27 | #include "llvm/IR/InstrTypes.h" |
| 28 | #include "llvm/IR/Instruction.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 29 | #include "llvm/IR/Instructions.h" |
| 30 | #include "llvm/IR/LLVMContext.h" |
| 31 | #include "llvm/IR/Metadata.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 32 | #include "llvm/IR/PassManager.h" |
| 33 | #include "llvm/IR/Type.h" |
| 34 | #include "llvm/IR/Value.h" |
Reid Kleckner | 05da2fe | 2019-11-13 13:15:01 -0800 | [diff] [blame] | 35 | #include "llvm/InitializePasses.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 36 | #include "llvm/Pass.h" |
| 37 | #include "llvm/Support/BranchProbability.h" |
| 38 | #include "llvm/Support/Casting.h" |
Reid Kleckner | 4c1a1d3 | 2019-11-14 15:15:48 -0800 | [diff] [blame] | 39 | #include "llvm/Support/CommandLine.h" |
Andrew Trick | 3d4e64b | 2011-06-11 01:05:22 +0000 | [diff] [blame] | 40 | #include "llvm/Support/Debug.h" |
Benjamin Kramer | 16132e6 | 2015-03-23 18:07:13 +0000 | [diff] [blame] | 41 | #include "llvm/Support/raw_ostream.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 42 | #include <cassert> |
| 43 | #include <cstdint> |
| 44 | #include <iterator> |
| 45 | #include <utility> |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 46 | |
| 47 | using namespace llvm; |
| 48 | |
Chandler Carruth | f1221bd | 2014-04-22 02:48:03 +0000 | [diff] [blame] | 49 | #define DEBUG_TYPE "branch-prob" |
| 50 | |
Hiroshi Yamauchi | 63e17eb | 2017-08-26 00:31:00 +0000 | [diff] [blame] | 51 | static cl::opt<bool> PrintBranchProb( |
| 52 | "print-bpi", cl::init(false), cl::Hidden, |
| 53 | cl::desc("Print the branch probability info.")); |
| 54 | |
| 55 | cl::opt<std::string> PrintBranchProbFuncName( |
| 56 | "print-bpi-func-name", cl::Hidden, |
| 57 | cl::desc("The option to specify the name of the function " |
| 58 | "whose branch probability info is printed.")); |
| 59 | |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 60 | INITIALIZE_PASS_BEGIN(BranchProbabilityInfoWrapperPass, "branch-prob", |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 61 | "Branch Probability Analysis", false, true) |
Chandler Carruth | 4f8f307 | 2015-01-17 14:16:18 +0000 | [diff] [blame] | 62 | INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 63 | INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 64 | INITIALIZE_PASS_DEPENDENCY(PostDominatorTreeWrapperPass) |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 65 | INITIALIZE_PASS_END(BranchProbabilityInfoWrapperPass, "branch-prob", |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 66 | "Branch Probability Analysis", false, true) |
| 67 | |
Reid Kleckner | 05da2fe | 2019-11-13 13:15:01 -0800 | [diff] [blame] | 68 | BranchProbabilityInfoWrapperPass::BranchProbabilityInfoWrapperPass() |
| 69 | : FunctionPass(ID) { |
| 70 | initializeBranchProbabilityInfoWrapperPassPass( |
| 71 | *PassRegistry::getPassRegistry()); |
| 72 | } |
| 73 | |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 74 | char BranchProbabilityInfoWrapperPass::ID = 0; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 75 | |
Chandler Carruth | 7a0094a | 2011-10-24 01:40:45 +0000 | [diff] [blame] | 76 | // Weights are for internal use only. They are used by heuristics to help to |
| 77 | // estimate edges' probability. Example: |
| 78 | // |
| 79 | // Using "Loop Branch Heuristics" we predict weights of edges for the |
| 80 | // block BB2. |
| 81 | // ... |
| 82 | // | |
| 83 | // V |
| 84 | // BB1<-+ |
| 85 | // | | |
| 86 | // | | (Weight = 124) |
| 87 | // V | |
| 88 | // BB2--+ |
| 89 | // | |
| 90 | // | (Weight = 4) |
| 91 | // V |
| 92 | // BB3 |
| 93 | // |
| 94 | // Probability of the edge BB2->BB1 = 124 / (124 + 4) = 0.96875 |
| 95 | // Probability of the edge BB2->BB3 = 4 / (124 + 4) = 0.03125 |
| 96 | static const uint32_t LBH_TAKEN_WEIGHT = 124; |
| 97 | static const uint32_t LBH_NONTAKEN_WEIGHT = 4; |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 98 | // Unlikely edges within a loop are half as likely as other edges |
| 99 | static const uint32_t LBH_UNLIKELY_WEIGHT = 62; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 100 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 101 | /// Unreachable-terminating branch taken probability. |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 102 | /// |
Serguei Katkov | ba831f7 | 2017-05-18 06:11:56 +0000 | [diff] [blame] | 103 | /// This is the probability for a branch being taken to a block that terminates |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 104 | /// (eventually) in unreachable. These are predicted as unlikely as possible. |
Serguei Katkov | ba831f7 | 2017-05-18 06:11:56 +0000 | [diff] [blame] | 105 | /// All reachable probability will equally share the remaining part. |
| 106 | static const BranchProbability UR_TAKEN_PROB = BranchProbability::getRaw(1); |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 107 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 108 | /// Weight for a branch taken going into a cold block. |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 109 | /// |
| 110 | /// This is the weight for a branch taken toward a block marked |
| 111 | /// cold. A block is marked cold if it's postdominated by a |
| 112 | /// block containing a call to a cold function. Cold functions |
| 113 | /// are those marked with attribute 'cold'. |
| 114 | static const uint32_t CC_TAKEN_WEIGHT = 4; |
| 115 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 116 | /// Weight for a branch not-taken into a cold block. |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 117 | /// |
| 118 | /// This is the weight for a branch not taken toward a block marked |
| 119 | /// cold. |
| 120 | static const uint32_t CC_NONTAKEN_WEIGHT = 64; |
| 121 | |
Chandler Carruth | 7a0094a | 2011-10-24 01:40:45 +0000 | [diff] [blame] | 122 | static const uint32_t PH_TAKEN_WEIGHT = 20; |
| 123 | static const uint32_t PH_NONTAKEN_WEIGHT = 12; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 124 | |
Chandler Carruth | 7a0094a | 2011-10-24 01:40:45 +0000 | [diff] [blame] | 125 | static const uint32_t ZH_TAKEN_WEIGHT = 20; |
| 126 | static const uint32_t ZH_NONTAKEN_WEIGHT = 12; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 127 | |
Chandler Carruth | 7a0094a | 2011-10-24 01:40:45 +0000 | [diff] [blame] | 128 | static const uint32_t FPH_TAKEN_WEIGHT = 20; |
| 129 | static const uint32_t FPH_NONTAKEN_WEIGHT = 12; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 130 | |
Guozhi Wei | b329e07 | 2019-09-10 17:25:11 +0000 | [diff] [blame] | 131 | /// This is the probability for an ordered floating point comparison. |
| 132 | static const uint32_t FPH_ORD_WEIGHT = 1024 * 1024 - 1; |
| 133 | /// This is the probability for an unordered floating point comparison, it means |
| 134 | /// one or two of the operands are NaN. Usually it is used to test for an |
| 135 | /// exceptional case, so the result is unlikely. |
| 136 | static const uint32_t FPH_UNO_WEIGHT = 1; |
| 137 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 138 | /// Invoke-terminating normal branch taken weight |
Bill Wendling | e1c5426 | 2012-08-15 12:22:35 +0000 | [diff] [blame] | 139 | /// |
| 140 | /// This is the weight for branching to the normal destination of an invoke |
| 141 | /// instruction. We expect this to happen most of the time. Set the weight to an |
| 142 | /// absurdly high value so that nested loops subsume it. |
| 143 | static const uint32_t IH_TAKEN_WEIGHT = 1024 * 1024 - 1; |
| 144 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 145 | /// Invoke-terminating normal branch not-taken weight. |
Bill Wendling | e1c5426 | 2012-08-15 12:22:35 +0000 | [diff] [blame] | 146 | /// |
| 147 | /// This is the weight for branching to the unwind destination of an invoke |
| 148 | /// instruction. This is essentially never taken. |
| 149 | static const uint32_t IH_NONTAKEN_WEIGHT = 1; |
| 150 | |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 151 | static void UpdatePDTWorklist(const BasicBlock *BB, PostDominatorTree *PDT, |
| 152 | SmallVectorImpl<const BasicBlock *> &WorkList, |
| 153 | SmallPtrSetImpl<const BasicBlock *> &TargetSet) { |
| 154 | SmallVector<BasicBlock *, 8> Descendants; |
| 155 | SmallPtrSet<const BasicBlock *, 16> NewItems; |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 156 | |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 157 | PDT->getDescendants(const_cast<BasicBlock *>(BB), Descendants); |
| 158 | for (auto *BB : Descendants) |
| 159 | if (TargetSet.insert(BB).second) |
| 160 | for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI) |
| 161 | if (!TargetSet.count(*PI)) |
| 162 | NewItems.insert(*PI); |
| 163 | WorkList.insert(WorkList.end(), NewItems.begin(), NewItems.end()); |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 164 | } |
| 165 | |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 166 | /// Compute a set of basic blocks that are post-dominated by unreachables. |
| 167 | void BranchProbabilityInfo::computePostDominatedByUnreachable( |
| 168 | const Function &F, PostDominatorTree *PDT) { |
| 169 | SmallVector<const BasicBlock *, 8> WorkList; |
| 170 | for (auto &BB : F) { |
| 171 | const Instruction *TI = BB.getTerminator(); |
| 172 | if (TI->getNumSuccessors() == 0) { |
| 173 | if (isa<UnreachableInst>(TI) || |
| 174 | // If this block is terminated by a call to |
| 175 | // @llvm.experimental.deoptimize then treat it like an unreachable |
| 176 | // since the @llvm.experimental.deoptimize call is expected to |
| 177 | // practically never execute. |
| 178 | BB.getTerminatingDeoptimizeCall()) |
| 179 | UpdatePDTWorklist(&BB, PDT, WorkList, PostDominatedByUnreachable); |
| 180 | } |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 181 | } |
| 182 | |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 183 | while (!WorkList.empty()) { |
| 184 | const BasicBlock *BB = WorkList.pop_back_val(); |
| 185 | if (PostDominatedByUnreachable.count(BB)) |
| 186 | continue; |
| 187 | // If the terminator is an InvokeInst, check only the normal destination |
| 188 | // block as the unwind edge of InvokeInst is also very unlikely taken. |
| 189 | if (auto *II = dyn_cast<InvokeInst>(BB->getTerminator())) { |
| 190 | if (PostDominatedByUnreachable.count(II->getNormalDest())) |
| 191 | UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByUnreachable); |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 192 | } |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 193 | // If all the successors are unreachable, BB is unreachable as well. |
| 194 | else if (!successors(BB).empty() && |
| 195 | llvm::all_of(successors(BB), [this](const BasicBlock *Succ) { |
| 196 | return PostDominatedByUnreachable.count(Succ); |
| 197 | })) |
| 198 | UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByUnreachable); |
| 199 | } |
| 200 | } |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 201 | |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 202 | /// compute a set of basic blocks that are post-dominated by ColdCalls. |
| 203 | void BranchProbabilityInfo::computePostDominatedByColdCall( |
| 204 | const Function &F, PostDominatorTree *PDT) { |
| 205 | SmallVector<const BasicBlock *, 8> WorkList; |
| 206 | for (auto &BB : F) |
| 207 | for (auto &I : BB) |
| 208 | if (const CallInst *CI = dyn_cast<CallInst>(&I)) |
| 209 | if (CI->hasFnAttr(Attribute::Cold)) |
| 210 | UpdatePDTWorklist(&BB, PDT, WorkList, PostDominatedByColdCall); |
| 211 | |
| 212 | while (!WorkList.empty()) { |
| 213 | const BasicBlock *BB = WorkList.pop_back_val(); |
| 214 | |
| 215 | // If the terminator is an InvokeInst, check only the normal destination |
| 216 | // block as the unwind edge of InvokeInst is also very unlikely taken. |
| 217 | if (auto *II = dyn_cast<InvokeInst>(BB->getTerminator())) { |
| 218 | if (PostDominatedByColdCall.count(II->getNormalDest())) |
| 219 | UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByColdCall); |
| 220 | } |
| 221 | // If all of successor are post dominated then BB is also done. |
| 222 | else if (!successors(BB).empty() && |
| 223 | llvm::all_of(successors(BB), [this](const BasicBlock *Succ) { |
| 224 | return PostDominatedByColdCall.count(Succ); |
| 225 | })) |
| 226 | UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByColdCall); |
| 227 | } |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 228 | } |
| 229 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 230 | /// Calculate edge weights for successors lead to unreachable. |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 231 | /// |
| 232 | /// Predict that a successor which leads necessarily to an |
| 233 | /// unreachable-terminated block as extremely unlikely. |
| 234 | bool BranchProbabilityInfo::calcUnreachableHeuristics(const BasicBlock *BB) { |
Chandler Carruth | edb12a8 | 2018-10-15 10:04:59 +0000 | [diff] [blame] | 235 | const Instruction *TI = BB->getTerminator(); |
Artur Pilipenko | 4d063e7 | 2018-06-08 13:03:21 +0000 | [diff] [blame] | 236 | (void) TI; |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 237 | assert(TI->getNumSuccessors() > 1 && "expected more than one successor!"); |
Artur Pilipenko | 4d063e7 | 2018-06-08 13:03:21 +0000 | [diff] [blame] | 238 | assert(!isa<InvokeInst>(TI) && |
| 239 | "Invokes should have already been handled by calcInvokeHeuristics"); |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 240 | |
Manman Ren | cf10446 | 2012-08-24 18:14:27 +0000 | [diff] [blame] | 241 | SmallVector<unsigned, 4> UnreachableEdges; |
| 242 | SmallVector<unsigned, 4> ReachableEdges; |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 243 | |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 244 | for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I) |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 245 | if (PostDominatedByUnreachable.count(*I)) |
Manman Ren | cf10446 | 2012-08-24 18:14:27 +0000 | [diff] [blame] | 246 | UnreachableEdges.push_back(I.getSuccessorIndex()); |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 247 | else |
Manman Ren | cf10446 | 2012-08-24 18:14:27 +0000 | [diff] [blame] | 248 | ReachableEdges.push_back(I.getSuccessorIndex()); |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 249 | |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 250 | // Skip probabilities if all were reachable. |
| 251 | if (UnreachableEdges.empty()) |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 252 | return false; |
Jun Bum Lim | a23e5f7 | 2015-12-21 22:00:51 +0000 | [diff] [blame] | 253 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 254 | SmallVector<BranchProbability, 4> EdgeProbabilities( |
| 255 | BB->getTerminator()->getNumSuccessors(), BranchProbability::getUnknown()); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 256 | if (ReachableEdges.empty()) { |
| 257 | BranchProbability Prob(1, UnreachableEdges.size()); |
| 258 | for (unsigned SuccIdx : UnreachableEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 259 | EdgeProbabilities[SuccIdx] = Prob; |
| 260 | setEdgeProbability(BB, EdgeProbabilities); |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 261 | return true; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 262 | } |
| 263 | |
Serguei Katkov | ba831f7 | 2017-05-18 06:11:56 +0000 | [diff] [blame] | 264 | auto UnreachableProb = UR_TAKEN_PROB; |
| 265 | auto ReachableProb = |
| 266 | (BranchProbability::getOne() - UR_TAKEN_PROB * UnreachableEdges.size()) / |
| 267 | ReachableEdges.size(); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 268 | |
| 269 | for (unsigned SuccIdx : UnreachableEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 270 | EdgeProbabilities[SuccIdx] = UnreachableProb; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 271 | for (unsigned SuccIdx : ReachableEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 272 | EdgeProbabilities[SuccIdx] = ReachableProb; |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 273 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 274 | setEdgeProbability(BB, EdgeProbabilities); |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 275 | return true; |
| 276 | } |
| 277 | |
Chandler Carruth | d27a7a9 | 2011-10-19 10:30:30 +0000 | [diff] [blame] | 278 | // Propagate existing explicit probabilities from either profile data or |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 279 | // 'expect' intrinsic processing. Examine metadata against unreachable |
| 280 | // heuristic. The probability of the edge coming to unreachable block is |
| 281 | // set to min of metadata and unreachable heuristic. |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 282 | bool BranchProbabilityInfo::calcMetadataWeights(const BasicBlock *BB) { |
Chandler Carruth | edb12a8 | 2018-10-15 10:04:59 +0000 | [diff] [blame] | 283 | const Instruction *TI = BB->getTerminator(); |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 284 | assert(TI->getNumSuccessors() > 1 && "expected more than one successor!"); |
Rong Xu | 15848e5 | 2017-08-23 21:36:02 +0000 | [diff] [blame] | 285 | if (!(isa<BranchInst>(TI) || isa<SwitchInst>(TI) || isa<IndirectBrInst>(TI))) |
Chandler Carruth | d27a7a9 | 2011-10-19 10:30:30 +0000 | [diff] [blame] | 286 | return false; |
| 287 | |
Duncan P. N. Exon Smith | de36e80 | 2014-11-11 21:30:22 +0000 | [diff] [blame] | 288 | MDNode *WeightsNode = TI->getMetadata(LLVMContext::MD_prof); |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 289 | if (!WeightsNode) |
Chandler Carruth | d27a7a9 | 2011-10-19 10:30:30 +0000 | [diff] [blame] | 290 | return false; |
| 291 | |
Diego Novillo | de5b801 | 2015-05-07 17:22:06 +0000 | [diff] [blame] | 292 | // Check that the number of successors is manageable. |
| 293 | assert(TI->getNumSuccessors() < UINT32_MAX && "Too many successors"); |
| 294 | |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 295 | // Ensure there are weights for all of the successors. Note that the first |
| 296 | // operand to the metadata node is a name, not a weight. |
| 297 | if (WeightsNode->getNumOperands() != TI->getNumSuccessors() + 1) |
Chandler Carruth | d27a7a9 | 2011-10-19 10:30:30 +0000 | [diff] [blame] | 298 | return false; |
| 299 | |
Diego Novillo | de5b801 | 2015-05-07 17:22:06 +0000 | [diff] [blame] | 300 | // Build up the final weights that will be used in a temporary buffer. |
| 301 | // Compute the sum of all weights to later decide whether they need to |
| 302 | // be scaled to fit in 32 bits. |
| 303 | uint64_t WeightSum = 0; |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 304 | SmallVector<uint32_t, 2> Weights; |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 305 | SmallVector<unsigned, 2> UnreachableIdxs; |
| 306 | SmallVector<unsigned, 2> ReachableIdxs; |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 307 | Weights.reserve(TI->getNumSuccessors()); |
| 308 | for (unsigned i = 1, e = WeightsNode->getNumOperands(); i != e; ++i) { |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 309 | ConstantInt *Weight = |
| 310 | mdconst::dyn_extract<ConstantInt>(WeightsNode->getOperand(i)); |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 311 | if (!Weight) |
| 312 | return false; |
Diego Novillo | de5b801 | 2015-05-07 17:22:06 +0000 | [diff] [blame] | 313 | assert(Weight->getValue().getActiveBits() <= 32 && |
| 314 | "Too many bits for uint32_t"); |
| 315 | Weights.push_back(Weight->getZExtValue()); |
| 316 | WeightSum += Weights.back(); |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 317 | if (PostDominatedByUnreachable.count(TI->getSuccessor(i - 1))) |
| 318 | UnreachableIdxs.push_back(i - 1); |
| 319 | else |
| 320 | ReachableIdxs.push_back(i - 1); |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 321 | } |
| 322 | assert(Weights.size() == TI->getNumSuccessors() && "Checked above"); |
Diego Novillo | de5b801 | 2015-05-07 17:22:06 +0000 | [diff] [blame] | 323 | |
| 324 | // If the sum of weights does not fit in 32 bits, scale every weight down |
| 325 | // accordingly. |
| 326 | uint64_t ScalingFactor = |
| 327 | (WeightSum > UINT32_MAX) ? WeightSum / UINT32_MAX + 1 : 1; |
| 328 | |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 329 | if (ScalingFactor > 1) { |
| 330 | WeightSum = 0; |
| 331 | for (unsigned i = 0, e = TI->getNumSuccessors(); i != e; ++i) { |
| 332 | Weights[i] /= ScalingFactor; |
| 333 | WeightSum += Weights[i]; |
| 334 | } |
Diego Novillo | de5b801 | 2015-05-07 17:22:06 +0000 | [diff] [blame] | 335 | } |
Serguei Katkov | 63c9c81 | 2017-05-12 07:50:06 +0000 | [diff] [blame] | 336 | assert(WeightSum <= UINT32_MAX && |
| 337 | "Expected weights to scale down to 32 bits"); |
Cong Hou | 6a2c71a | 2015-12-22 23:45:55 +0000 | [diff] [blame] | 338 | |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 339 | if (WeightSum == 0 || ReachableIdxs.size() == 0) { |
Cong Hou | 6a2c71a | 2015-12-22 23:45:55 +0000 | [diff] [blame] | 340 | for (unsigned i = 0, e = TI->getNumSuccessors(); i != e; ++i) |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 341 | Weights[i] = 1; |
| 342 | WeightSum = TI->getNumSuccessors(); |
Cong Hou | 6a2c71a | 2015-12-22 23:45:55 +0000 | [diff] [blame] | 343 | } |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 344 | |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 345 | // Set the probability. |
| 346 | SmallVector<BranchProbability, 2> BP; |
| 347 | for (unsigned i = 0, e = TI->getNumSuccessors(); i != e; ++i) |
| 348 | BP.push_back({ Weights[i], static_cast<uint32_t>(WeightSum) }); |
| 349 | |
| 350 | // Examine the metadata against unreachable heuristic. |
| 351 | // If the unreachable heuristic is more strong then we use it for this edge. |
| 352 | if (UnreachableIdxs.size() > 0 && ReachableIdxs.size() > 0) { |
Serguei Katkov | ba831f7 | 2017-05-18 06:11:56 +0000 | [diff] [blame] | 353 | auto UnreachableProb = UR_TAKEN_PROB; |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 354 | for (auto i : UnreachableIdxs) |
| 355 | if (UnreachableProb < BP[i]) { |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 356 | BP[i] = UnreachableProb; |
| 357 | } |
| 358 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 359 | // Because of possible rounding errors and the above fix up for |
| 360 | // the unreachable heuristic the sum of probabilities of all edges may be |
| 361 | // less than 1.0. Distribute the remaining probability (calculated as |
| 362 | // 1.0 - (sum of BP[i])) evenly among all the reachable edges. |
| 363 | auto ToDistribute = BranchProbability::getOne(); |
| 364 | for (auto &P : BP) |
| 365 | ToDistribute -= P; |
| 366 | |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 367 | // If we modified the probability of some edges then we must distribute |
| 368 | // the difference between reachable blocks. |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 369 | // TODO: This spreads ToDistribute evenly upon the reachable edges. A better |
| 370 | // distribution would be proportional. So the relation between weights of |
| 371 | // the reachable edges would be kept unchanged. That is for any reachable |
| 372 | // edges i and j: |
| 373 | // newBP[i] / newBP[j] == oldBP[i] / oldBP[j] |
| 374 | // newBP[i] / oldBP[i] == newBP[j] / oldBP[j] == |
| 375 | // == Denominator / (Denominator - ToDistribute) |
| 376 | // newBP[i] = oldBP[i] * Denominator / (Denominator - ToDistribute) |
| 377 | BranchProbability PerEdge = ToDistribute / ReachableIdxs.size(); |
| 378 | if (PerEdge > BranchProbability::getZero()) |
Serguei Katkov | 63c9c81 | 2017-05-12 07:50:06 +0000 | [diff] [blame] | 379 | for (auto i : ReachableIdxs) |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 380 | BP[i] += PerEdge; |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 381 | } |
| 382 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 383 | setEdgeProbability(BB, BP); |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 384 | |
Chandler Carruth | d27a7a9 | 2011-10-19 10:30:30 +0000 | [diff] [blame] | 385 | return true; |
| 386 | } |
| 387 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 388 | /// Calculate edge weights for edges leading to cold blocks. |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 389 | /// |
| 390 | /// A cold block is one post-dominated by a block with a call to a |
| 391 | /// cold function. Those edges are unlikely to be taken, so we give |
| 392 | /// them relatively low weight. |
| 393 | /// |
| 394 | /// Return true if we could compute the weights for cold edges. |
| 395 | /// Return false, otherwise. |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 396 | bool BranchProbabilityInfo::calcColdCallHeuristics(const BasicBlock *BB) { |
Chandler Carruth | edb12a8 | 2018-10-15 10:04:59 +0000 | [diff] [blame] | 397 | const Instruction *TI = BB->getTerminator(); |
Artur Pilipenko | 4d063e7 | 2018-06-08 13:03:21 +0000 | [diff] [blame] | 398 | (void) TI; |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 399 | assert(TI->getNumSuccessors() > 1 && "expected more than one successor!"); |
Artur Pilipenko | 4d063e7 | 2018-06-08 13:03:21 +0000 | [diff] [blame] | 400 | assert(!isa<InvokeInst>(TI) && |
| 401 | "Invokes should have already been handled by calcInvokeHeuristics"); |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 402 | |
| 403 | // Determine which successors are post-dominated by a cold block. |
| 404 | SmallVector<unsigned, 4> ColdEdges; |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 405 | SmallVector<unsigned, 4> NormalEdges; |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 406 | for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I) |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 407 | if (PostDominatedByColdCall.count(*I)) |
| 408 | ColdEdges.push_back(I.getSuccessorIndex()); |
| 409 | else |
| 410 | NormalEdges.push_back(I.getSuccessorIndex()); |
| 411 | |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 412 | // Skip probabilities if no cold edges. |
| 413 | if (ColdEdges.empty()) |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 414 | return false; |
| 415 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 416 | SmallVector<BranchProbability, 4> EdgeProbabilities( |
| 417 | BB->getTerminator()->getNumSuccessors(), BranchProbability::getUnknown()); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 418 | if (NormalEdges.empty()) { |
| 419 | BranchProbability Prob(1, ColdEdges.size()); |
| 420 | for (unsigned SuccIdx : ColdEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 421 | EdgeProbabilities[SuccIdx] = Prob; |
| 422 | setEdgeProbability(BB, EdgeProbabilities); |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 423 | return true; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 424 | } |
| 425 | |
Vedant Kumar | a4bd146 | 2016-12-17 01:02:08 +0000 | [diff] [blame] | 426 | auto ColdProb = BranchProbability::getBranchProbability( |
| 427 | CC_TAKEN_WEIGHT, |
| 428 | (CC_TAKEN_WEIGHT + CC_NONTAKEN_WEIGHT) * uint64_t(ColdEdges.size())); |
| 429 | auto NormalProb = BranchProbability::getBranchProbability( |
| 430 | CC_NONTAKEN_WEIGHT, |
| 431 | (CC_TAKEN_WEIGHT + CC_NONTAKEN_WEIGHT) * uint64_t(NormalEdges.size())); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 432 | |
| 433 | for (unsigned SuccIdx : ColdEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 434 | EdgeProbabilities[SuccIdx] = ColdProb; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 435 | for (unsigned SuccIdx : NormalEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 436 | EdgeProbabilities[SuccIdx] = NormalProb; |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 437 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 438 | setEdgeProbability(BB, EdgeProbabilities); |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 439 | return true; |
| 440 | } |
| 441 | |
Vedant Kumar | 1a8456d | 2018-03-02 18:57:02 +0000 | [diff] [blame] | 442 | // Calculate Edge Weights using "Pointer Heuristics". Predict a comparison |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 443 | // between two pointer or pointer and NULL will fail. |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 444 | bool BranchProbabilityInfo::calcPointerHeuristics(const BasicBlock *BB) { |
| 445 | const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 446 | if (!BI || !BI->isConditional()) |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 447 | return false; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 448 | |
| 449 | Value *Cond = BI->getCondition(); |
| 450 | ICmpInst *CI = dyn_cast<ICmpInst>(Cond); |
Jakub Staszak | abb236f | 2011-07-15 20:51:06 +0000 | [diff] [blame] | 451 | if (!CI || !CI->isEquality()) |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 452 | return false; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 453 | |
| 454 | Value *LHS = CI->getOperand(0); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 455 | |
| 456 | if (!LHS->getType()->isPointerTy()) |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 457 | return false; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 458 | |
Nick Lewycky | 75b2053 | 2011-06-04 02:07:10 +0000 | [diff] [blame] | 459 | assert(CI->getOperand(1)->getType()->isPointerTy()); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 460 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 461 | BranchProbability TakenProb(PH_TAKEN_WEIGHT, |
| 462 | PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT); |
| 463 | BranchProbability UntakenProb(PH_NONTAKEN_WEIGHT, |
| 464 | PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT); |
| 465 | |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 466 | // p != 0 -> isProb = true |
| 467 | // p == 0 -> isProb = false |
| 468 | // p != q -> isProb = true |
| 469 | // p == q -> isProb = false; |
Jakub Staszak | abb236f | 2011-07-15 20:51:06 +0000 | [diff] [blame] | 470 | bool isProb = CI->getPredicate() == ICmpInst::ICMP_NE; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 471 | if (!isProb) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 472 | std::swap(TakenProb, UntakenProb); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 473 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 474 | setEdgeProbability( |
| 475 | BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb})); |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 476 | return true; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 477 | } |
| 478 | |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 479 | static int getSCCNum(const BasicBlock *BB, |
| 480 | const BranchProbabilityInfo::SccInfo &SccI) { |
| 481 | auto SccIt = SccI.SccNums.find(BB); |
| 482 | if (SccIt == SccI.SccNums.end()) |
| 483 | return -1; |
| 484 | return SccIt->second; |
| 485 | } |
| 486 | |
| 487 | // Consider any block that is an entry point to the SCC as a header. |
| 488 | static bool isSCCHeader(const BasicBlock *BB, int SccNum, |
| 489 | BranchProbabilityInfo::SccInfo &SccI) { |
| 490 | assert(getSCCNum(BB, SccI) == SccNum); |
| 491 | |
| 492 | // Lazily compute the set of headers for a given SCC and cache the results |
| 493 | // in the SccHeaderMap. |
| 494 | if (SccI.SccHeaders.size() <= static_cast<unsigned>(SccNum)) |
| 495 | SccI.SccHeaders.resize(SccNum + 1); |
| 496 | auto &HeaderMap = SccI.SccHeaders[SccNum]; |
| 497 | bool Inserted; |
| 498 | BranchProbabilityInfo::SccHeaderMap::iterator HeaderMapIt; |
| 499 | std::tie(HeaderMapIt, Inserted) = HeaderMap.insert(std::make_pair(BB, false)); |
| 500 | if (Inserted) { |
| 501 | bool IsHeader = llvm::any_of(make_range(pred_begin(BB), pred_end(BB)), |
| 502 | [&](const BasicBlock *Pred) { |
| 503 | return getSCCNum(Pred, SccI) != SccNum; |
| 504 | }); |
| 505 | HeaderMapIt->second = IsHeader; |
| 506 | return IsHeader; |
| 507 | } else |
| 508 | return HeaderMapIt->second; |
| 509 | } |
| 510 | |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 511 | // Compute the unlikely successors to the block BB in the loop L, specifically |
| 512 | // those that are unlikely because this is a loop, and add them to the |
| 513 | // UnlikelyBlocks set. |
| 514 | static void |
| 515 | computeUnlikelySuccessors(const BasicBlock *BB, Loop *L, |
| 516 | SmallPtrSetImpl<const BasicBlock*> &UnlikelyBlocks) { |
| 517 | // Sometimes in a loop we have a branch whose condition is made false by |
| 518 | // taking it. This is typically something like |
| 519 | // int n = 0; |
| 520 | // while (...) { |
| 521 | // if (++n >= MAX) { |
| 522 | // n = 0; |
| 523 | // } |
| 524 | // } |
| 525 | // In this sort of situation taking the branch means that at the very least it |
| 526 | // won't be taken again in the next iteration of the loop, so we should |
| 527 | // consider it less likely than a typical branch. |
| 528 | // |
| 529 | // We detect this by looking back through the graph of PHI nodes that sets the |
| 530 | // value that the condition depends on, and seeing if we can reach a successor |
| 531 | // block which can be determined to make the condition false. |
| 532 | // |
| 533 | // FIXME: We currently consider unlikely blocks to be half as likely as other |
| 534 | // blocks, but if we consider the example above the likelyhood is actually |
| 535 | // 1/MAX. We could therefore be more precise in how unlikely we consider |
| 536 | // blocks to be, but it would require more careful examination of the form |
| 537 | // of the comparison expression. |
| 538 | const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); |
| 539 | if (!BI || !BI->isConditional()) |
| 540 | return; |
| 541 | |
| 542 | // Check if the branch is based on an instruction compared with a constant |
| 543 | CmpInst *CI = dyn_cast<CmpInst>(BI->getCondition()); |
| 544 | if (!CI || !isa<Instruction>(CI->getOperand(0)) || |
| 545 | !isa<Constant>(CI->getOperand(1))) |
| 546 | return; |
| 547 | |
| 548 | // Either the instruction must be a PHI, or a chain of operations involving |
| 549 | // constants that ends in a PHI which we can then collapse into a single value |
| 550 | // if the PHI value is known. |
| 551 | Instruction *CmpLHS = dyn_cast<Instruction>(CI->getOperand(0)); |
| 552 | PHINode *CmpPHI = dyn_cast<PHINode>(CmpLHS); |
| 553 | Constant *CmpConst = dyn_cast<Constant>(CI->getOperand(1)); |
| 554 | // Collect the instructions until we hit a PHI |
Benjamin Kramer | 7f68a30 | 2018-06-15 21:06:43 +0000 | [diff] [blame] | 555 | SmallVector<BinaryOperator *, 1> InstChain; |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 556 | while (!CmpPHI && CmpLHS && isa<BinaryOperator>(CmpLHS) && |
| 557 | isa<Constant>(CmpLHS->getOperand(1))) { |
| 558 | // Stop if the chain extends outside of the loop |
| 559 | if (!L->contains(CmpLHS)) |
| 560 | return; |
Benjamin Kramer | 7f68a30 | 2018-06-15 21:06:43 +0000 | [diff] [blame] | 561 | InstChain.push_back(cast<BinaryOperator>(CmpLHS)); |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 562 | CmpLHS = dyn_cast<Instruction>(CmpLHS->getOperand(0)); |
| 563 | if (CmpLHS) |
| 564 | CmpPHI = dyn_cast<PHINode>(CmpLHS); |
| 565 | } |
| 566 | if (!CmpPHI || !L->contains(CmpPHI)) |
| 567 | return; |
| 568 | |
| 569 | // Trace the phi node to find all values that come from successors of BB |
| 570 | SmallPtrSet<PHINode*, 8> VisitedInsts; |
| 571 | SmallVector<PHINode*, 8> WorkList; |
| 572 | WorkList.push_back(CmpPHI); |
| 573 | VisitedInsts.insert(CmpPHI); |
| 574 | while (!WorkList.empty()) { |
| 575 | PHINode *P = WorkList.back(); |
| 576 | WorkList.pop_back(); |
| 577 | for (BasicBlock *B : P->blocks()) { |
| 578 | // Skip blocks that aren't part of the loop |
| 579 | if (!L->contains(B)) |
| 580 | continue; |
| 581 | Value *V = P->getIncomingValueForBlock(B); |
| 582 | // If the source is a PHI add it to the work list if we haven't |
| 583 | // already visited it. |
| 584 | if (PHINode *PN = dyn_cast<PHINode>(V)) { |
| 585 | if (VisitedInsts.insert(PN).second) |
| 586 | WorkList.push_back(PN); |
| 587 | continue; |
| 588 | } |
| 589 | // If this incoming value is a constant and B is a successor of BB, then |
| 590 | // we can constant-evaluate the compare to see if it makes the branch be |
| 591 | // taken or not. |
| 592 | Constant *CmpLHSConst = dyn_cast<Constant>(V); |
| 593 | if (!CmpLHSConst || |
| 594 | std::find(succ_begin(BB), succ_end(BB), B) == succ_end(BB)) |
| 595 | continue; |
| 596 | // First collapse InstChain |
Benjamin Kramer | 7f68a30 | 2018-06-15 21:06:43 +0000 | [diff] [blame] | 597 | for (Instruction *I : llvm::reverse(InstChain)) { |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 598 | CmpLHSConst = ConstantExpr::get(I->getOpcode(), CmpLHSConst, |
Benjamin Kramer | 7f68a30 | 2018-06-15 21:06:43 +0000 | [diff] [blame] | 599 | cast<Constant>(I->getOperand(1)), true); |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 600 | if (!CmpLHSConst) |
| 601 | break; |
| 602 | } |
| 603 | if (!CmpLHSConst) |
| 604 | continue; |
| 605 | // Now constant-evaluate the compare |
| 606 | Constant *Result = ConstantExpr::getCompare(CI->getPredicate(), |
| 607 | CmpLHSConst, CmpConst, true); |
| 608 | // If the result means we don't branch to the block then that block is |
| 609 | // unlikely. |
| 610 | if (Result && |
| 611 | ((Result->isZeroValue() && B == BI->getSuccessor(0)) || |
| 612 | (Result->isOneValue() && B == BI->getSuccessor(1)))) |
| 613 | UnlikelyBlocks.insert(B); |
| 614 | } |
| 615 | } |
| 616 | } |
| 617 | |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 618 | // Calculate Edge Weights using "Loop Branch Heuristics". Predict backedges |
| 619 | // as taken, exiting edges as not-taken. |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 620 | bool BranchProbabilityInfo::calcLoopBranchHeuristics(const BasicBlock *BB, |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 621 | const LoopInfo &LI, |
| 622 | SccInfo &SccI) { |
| 623 | int SccNum; |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 624 | Loop *L = LI.getLoopFor(BB); |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 625 | if (!L) { |
| 626 | SccNum = getSCCNum(BB, SccI); |
| 627 | if (SccNum < 0) |
| 628 | return false; |
| 629 | } |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 630 | |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 631 | SmallPtrSet<const BasicBlock*, 8> UnlikelyBlocks; |
| 632 | if (L) |
| 633 | computeUnlikelySuccessors(BB, L, UnlikelyBlocks); |
| 634 | |
Manman Ren | cf10446 | 2012-08-24 18:14:27 +0000 | [diff] [blame] | 635 | SmallVector<unsigned, 8> BackEdges; |
| 636 | SmallVector<unsigned, 8> ExitingEdges; |
| 637 | SmallVector<unsigned, 8> InEdges; // Edges from header to the loop. |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 638 | SmallVector<unsigned, 8> UnlikelyEdges; |
Jakub Staszak | bcb3c65 | 2011-07-28 21:33:46 +0000 | [diff] [blame] | 639 | |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 640 | for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I) { |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 641 | // Use LoopInfo if we have it, otherwise fall-back to SCC info to catch |
| 642 | // irreducible loops. |
| 643 | if (L) { |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 644 | if (UnlikelyBlocks.count(*I) != 0) |
| 645 | UnlikelyEdges.push_back(I.getSuccessorIndex()); |
| 646 | else if (!L->contains(*I)) |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 647 | ExitingEdges.push_back(I.getSuccessorIndex()); |
| 648 | else if (L->getHeader() == *I) |
| 649 | BackEdges.push_back(I.getSuccessorIndex()); |
| 650 | else |
| 651 | InEdges.push_back(I.getSuccessorIndex()); |
| 652 | } else { |
| 653 | if (getSCCNum(*I, SccI) != SccNum) |
| 654 | ExitingEdges.push_back(I.getSuccessorIndex()); |
| 655 | else if (isSCCHeader(*I, SccNum, SccI)) |
| 656 | BackEdges.push_back(I.getSuccessorIndex()); |
| 657 | else |
| 658 | InEdges.push_back(I.getSuccessorIndex()); |
| 659 | } |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 660 | } |
| 661 | |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 662 | if (BackEdges.empty() && ExitingEdges.empty() && UnlikelyEdges.empty()) |
Akira Hatanaka | 5638b89 | 2014-04-14 16:56:19 +0000 | [diff] [blame] | 663 | return false; |
| 664 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 665 | // Collect the sum of probabilities of back-edges/in-edges/exiting-edges, and |
| 666 | // normalize them so that they sum up to one. |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 667 | unsigned Denom = (BackEdges.empty() ? 0 : LBH_TAKEN_WEIGHT) + |
| 668 | (InEdges.empty() ? 0 : LBH_TAKEN_WEIGHT) + |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 669 | (UnlikelyEdges.empty() ? 0 : LBH_UNLIKELY_WEIGHT) + |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 670 | (ExitingEdges.empty() ? 0 : LBH_NONTAKEN_WEIGHT); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 671 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 672 | SmallVector<BranchProbability, 4> EdgeProbabilities( |
| 673 | BB->getTerminator()->getNumSuccessors(), BranchProbability::getUnknown()); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 674 | if (uint32_t numBackEdges = BackEdges.size()) { |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 675 | BranchProbability TakenProb = BranchProbability(LBH_TAKEN_WEIGHT, Denom); |
| 676 | auto Prob = TakenProb / numBackEdges; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 677 | for (unsigned SuccIdx : BackEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 678 | EdgeProbabilities[SuccIdx] = Prob; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 679 | } |
| 680 | |
Jakub Staszak | bcb3c65 | 2011-07-28 21:33:46 +0000 | [diff] [blame] | 681 | if (uint32_t numInEdges = InEdges.size()) { |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 682 | BranchProbability TakenProb = BranchProbability(LBH_TAKEN_WEIGHT, Denom); |
| 683 | auto Prob = TakenProb / numInEdges; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 684 | for (unsigned SuccIdx : InEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 685 | EdgeProbabilities[SuccIdx] = Prob; |
Jakub Staszak | bcb3c65 | 2011-07-28 21:33:46 +0000 | [diff] [blame] | 686 | } |
| 687 | |
Chandler Carruth | 32f46e7 | 2011-10-25 09:47:41 +0000 | [diff] [blame] | 688 | if (uint32_t numExitingEdges = ExitingEdges.size()) { |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 689 | BranchProbability NotTakenProb = BranchProbability(LBH_NONTAKEN_WEIGHT, |
| 690 | Denom); |
| 691 | auto Prob = NotTakenProb / numExitingEdges; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 692 | for (unsigned SuccIdx : ExitingEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 693 | EdgeProbabilities[SuccIdx] = Prob; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 694 | } |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 695 | |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 696 | if (uint32_t numUnlikelyEdges = UnlikelyEdges.size()) { |
| 697 | BranchProbability UnlikelyProb = BranchProbability(LBH_UNLIKELY_WEIGHT, |
| 698 | Denom); |
| 699 | auto Prob = UnlikelyProb / numUnlikelyEdges; |
| 700 | for (unsigned SuccIdx : UnlikelyEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 701 | EdgeProbabilities[SuccIdx] = Prob; |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 702 | } |
| 703 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 704 | setEdgeProbability(BB, EdgeProbabilities); |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 705 | return true; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 706 | } |
| 707 | |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 708 | bool BranchProbabilityInfo::calcZeroHeuristics(const BasicBlock *BB, |
| 709 | const TargetLibraryInfo *TLI) { |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 710 | const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 711 | if (!BI || !BI->isConditional()) |
| 712 | return false; |
| 713 | |
| 714 | Value *Cond = BI->getCondition(); |
| 715 | ICmpInst *CI = dyn_cast<ICmpInst>(Cond); |
| 716 | if (!CI) |
| 717 | return false; |
| 718 | |
Sam Parker | 0b53e84 | 2019-02-15 11:50:21 +0000 | [diff] [blame] | 719 | auto GetConstantInt = [](Value *V) { |
| 720 | if (auto *I = dyn_cast<BitCastInst>(V)) |
| 721 | return dyn_cast<ConstantInt>(I->getOperand(0)); |
| 722 | return dyn_cast<ConstantInt>(V); |
| 723 | }; |
| 724 | |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 725 | Value *RHS = CI->getOperand(1); |
Sam Parker | 0b53e84 | 2019-02-15 11:50:21 +0000 | [diff] [blame] | 726 | ConstantInt *CV = GetConstantInt(RHS); |
Benjamin Kramer | 0ca1ad0 | 2011-09-04 23:53:04 +0000 | [diff] [blame] | 727 | if (!CV) |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 728 | return false; |
| 729 | |
Daniel Jasper | a73f3d5 | 2015-04-15 06:24:07 +0000 | [diff] [blame] | 730 | // If the LHS is the result of AND'ing a value with a single bit bitmask, |
| 731 | // we don't have information about probabilities. |
| 732 | if (Instruction *LHS = dyn_cast<Instruction>(CI->getOperand(0))) |
| 733 | if (LHS->getOpcode() == Instruction::And) |
| 734 | if (ConstantInt *AndRHS = dyn_cast<ConstantInt>(LHS->getOperand(1))) |
Craig Topper | 4e22ee6 | 2017-08-04 16:59:29 +0000 | [diff] [blame] | 735 | if (AndRHS->getValue().isPowerOf2()) |
Daniel Jasper | a73f3d5 | 2015-04-15 06:24:07 +0000 | [diff] [blame] | 736 | return false; |
| 737 | |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 738 | // Check if the LHS is the return value of a library function |
| 739 | LibFunc Func = NumLibFuncs; |
| 740 | if (TLI) |
| 741 | if (CallInst *Call = dyn_cast<CallInst>(CI->getOperand(0))) |
| 742 | if (Function *CalledFn = Call->getCalledFunction()) |
| 743 | TLI->getLibFunc(*CalledFn, Func); |
| 744 | |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 745 | bool isProb; |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 746 | if (Func == LibFunc_strcasecmp || |
| 747 | Func == LibFunc_strcmp || |
| 748 | Func == LibFunc_strncasecmp || |
| 749 | Func == LibFunc_strncmp || |
| 750 | Func == LibFunc_memcmp) { |
| 751 | // strcmp and similar functions return zero, negative, or positive, if the |
| 752 | // first string is equal, less, or greater than the second. We consider it |
| 753 | // likely that the strings are not equal, so a comparison with zero is |
| 754 | // probably false, but also a comparison with any other number is also |
| 755 | // probably false given that what exactly is returned for nonzero values is |
| 756 | // not specified. Any kind of comparison other than equality we know |
| 757 | // nothing about. |
| 758 | switch (CI->getPredicate()) { |
| 759 | case CmpInst::ICMP_EQ: |
| 760 | isProb = false; |
| 761 | break; |
| 762 | case CmpInst::ICMP_NE: |
| 763 | isProb = true; |
| 764 | break; |
| 765 | default: |
| 766 | return false; |
| 767 | } |
| 768 | } else if (CV->isZero()) { |
Benjamin Kramer | 0ca1ad0 | 2011-09-04 23:53:04 +0000 | [diff] [blame] | 769 | switch (CI->getPredicate()) { |
| 770 | case CmpInst::ICMP_EQ: |
| 771 | // X == 0 -> Unlikely |
| 772 | isProb = false; |
| 773 | break; |
| 774 | case CmpInst::ICMP_NE: |
| 775 | // X != 0 -> Likely |
| 776 | isProb = true; |
| 777 | break; |
| 778 | case CmpInst::ICMP_SLT: |
| 779 | // X < 0 -> Unlikely |
| 780 | isProb = false; |
| 781 | break; |
| 782 | case CmpInst::ICMP_SGT: |
| 783 | // X > 0 -> Likely |
| 784 | isProb = true; |
| 785 | break; |
| 786 | default: |
| 787 | return false; |
| 788 | } |
| 789 | } else if (CV->isOne() && CI->getPredicate() == CmpInst::ICMP_SLT) { |
| 790 | // InstCombine canonicalizes X <= 0 into X < 1. |
| 791 | // X <= 0 -> Unlikely |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 792 | isProb = false; |
Craig Topper | 79ab643 | 2017-07-06 18:39:47 +0000 | [diff] [blame] | 793 | } else if (CV->isMinusOne()) { |
Hal Finkel | 4d94930 | 2013-11-01 10:58:22 +0000 | [diff] [blame] | 794 | switch (CI->getPredicate()) { |
| 795 | case CmpInst::ICMP_EQ: |
| 796 | // X == -1 -> Unlikely |
| 797 | isProb = false; |
| 798 | break; |
| 799 | case CmpInst::ICMP_NE: |
| 800 | // X != -1 -> Likely |
| 801 | isProb = true; |
| 802 | break; |
| 803 | case CmpInst::ICMP_SGT: |
| 804 | // InstCombine canonicalizes X >= 0 into X > -1. |
| 805 | // X >= 0 -> Likely |
| 806 | isProb = true; |
| 807 | break; |
| 808 | default: |
| 809 | return false; |
| 810 | } |
Benjamin Kramer | 0ca1ad0 | 2011-09-04 23:53:04 +0000 | [diff] [blame] | 811 | } else { |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 812 | return false; |
Benjamin Kramer | 0ca1ad0 | 2011-09-04 23:53:04 +0000 | [diff] [blame] | 813 | } |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 814 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 815 | BranchProbability TakenProb(ZH_TAKEN_WEIGHT, |
| 816 | ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT); |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 817 | BranchProbability UntakenProb(ZH_NONTAKEN_WEIGHT, |
| 818 | ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT); |
| 819 | if (!isProb) |
| 820 | std::swap(TakenProb, UntakenProb); |
| 821 | |
| 822 | setEdgeProbability( |
| 823 | BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb})); |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 824 | return true; |
| 825 | } |
| 826 | |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 827 | bool BranchProbabilityInfo::calcFloatingPointHeuristics(const BasicBlock *BB) { |
| 828 | const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); |
Benjamin Kramer | 1e731a1 | 2011-10-21 20:12:47 +0000 | [diff] [blame] | 829 | if (!BI || !BI->isConditional()) |
| 830 | return false; |
| 831 | |
| 832 | Value *Cond = BI->getCondition(); |
| 833 | FCmpInst *FCmp = dyn_cast<FCmpInst>(Cond); |
Benjamin Kramer | 606a50a | 2011-10-21 21:13:47 +0000 | [diff] [blame] | 834 | if (!FCmp) |
Benjamin Kramer | 1e731a1 | 2011-10-21 20:12:47 +0000 | [diff] [blame] | 835 | return false; |
| 836 | |
Guozhi Wei | b329e07 | 2019-09-10 17:25:11 +0000 | [diff] [blame] | 837 | uint32_t TakenWeight = FPH_TAKEN_WEIGHT; |
| 838 | uint32_t NontakenWeight = FPH_NONTAKEN_WEIGHT; |
Benjamin Kramer | 606a50a | 2011-10-21 21:13:47 +0000 | [diff] [blame] | 839 | bool isProb; |
| 840 | if (FCmp->isEquality()) { |
| 841 | // f1 == f2 -> Unlikely |
| 842 | // f1 != f2 -> Likely |
| 843 | isProb = !FCmp->isTrueWhenEqual(); |
| 844 | } else if (FCmp->getPredicate() == FCmpInst::FCMP_ORD) { |
| 845 | // !isnan -> Likely |
| 846 | isProb = true; |
Guozhi Wei | b329e07 | 2019-09-10 17:25:11 +0000 | [diff] [blame] | 847 | TakenWeight = FPH_ORD_WEIGHT; |
| 848 | NontakenWeight = FPH_UNO_WEIGHT; |
Benjamin Kramer | 606a50a | 2011-10-21 21:13:47 +0000 | [diff] [blame] | 849 | } else if (FCmp->getPredicate() == FCmpInst::FCMP_UNO) { |
| 850 | // isnan -> Unlikely |
| 851 | isProb = false; |
Guozhi Wei | b329e07 | 2019-09-10 17:25:11 +0000 | [diff] [blame] | 852 | TakenWeight = FPH_ORD_WEIGHT; |
| 853 | NontakenWeight = FPH_UNO_WEIGHT; |
Benjamin Kramer | 606a50a | 2011-10-21 21:13:47 +0000 | [diff] [blame] | 854 | } else { |
| 855 | return false; |
| 856 | } |
| 857 | |
Reid Kleckner | 1370757 | 2020-05-13 08:23:09 -0700 | [diff] [blame] | 858 | BranchProbability TakenProb(TakenWeight, TakenWeight + NontakenWeight); |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 859 | BranchProbability UntakenProb(NontakenWeight, TakenWeight + NontakenWeight); |
| 860 | if (!isProb) |
| 861 | std::swap(TakenProb, UntakenProb); |
| 862 | |
| 863 | setEdgeProbability( |
| 864 | BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb})); |
Benjamin Kramer | 1e731a1 | 2011-10-21 20:12:47 +0000 | [diff] [blame] | 865 | return true; |
| 866 | } |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 867 | |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 868 | bool BranchProbabilityInfo::calcInvokeHeuristics(const BasicBlock *BB) { |
| 869 | const InvokeInst *II = dyn_cast<InvokeInst>(BB->getTerminator()); |
Bill Wendling | e1c5426 | 2012-08-15 12:22:35 +0000 | [diff] [blame] | 870 | if (!II) |
| 871 | return false; |
| 872 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 873 | BranchProbability TakenProb(IH_TAKEN_WEIGHT, |
| 874 | IH_TAKEN_WEIGHT + IH_NONTAKEN_WEIGHT); |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 875 | setEdgeProbability( |
| 876 | BB, SmallVector<BranchProbability, 2>({TakenProb, TakenProb.getCompl()})); |
Bill Wendling | e1c5426 | 2012-08-15 12:22:35 +0000 | [diff] [blame] | 877 | return true; |
| 878 | } |
| 879 | |
Pete Cooper | b9d2e34 | 2015-05-28 19:43:06 +0000 | [diff] [blame] | 880 | void BranchProbabilityInfo::releaseMemory() { |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 881 | Probs.clear(); |
Nikita Popov | fe8abbf | 2020-04-07 21:21:30 +0200 | [diff] [blame] | 882 | Handles.clear(); |
Pete Cooper | b9d2e34 | 2015-05-28 19:43:06 +0000 | [diff] [blame] | 883 | } |
| 884 | |
Alina Sbirlea | 62a50a9 | 2020-01-15 14:02:33 -0800 | [diff] [blame] | 885 | bool BranchProbabilityInfo::invalidate(Function &, const PreservedAnalyses &PA, |
| 886 | FunctionAnalysisManager::Invalidator &) { |
| 887 | // Check whether the analysis, all analyses on functions, or the function's |
| 888 | // CFG have been preserved. |
| 889 | auto PAC = PA.getChecker<BranchProbabilityAnalysis>(); |
| 890 | return !(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Function>>() || |
| 891 | PAC.preservedSet<CFGAnalyses>()); |
| 892 | } |
| 893 | |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 894 | void BranchProbabilityInfo::print(raw_ostream &OS) const { |
Chandler Carruth | 1c8ace0 | 2011-10-23 21:21:50 +0000 | [diff] [blame] | 895 | OS << "---- Branch Probabilities ----\n"; |
| 896 | // We print the probabilities from the last function the analysis ran over, |
| 897 | // or the function it is currently running over. |
| 898 | assert(LastF && "Cannot print prior to running over a function"); |
Duncan P. N. Exon Smith | 5a82c91 | 2015-10-10 00:53:03 +0000 | [diff] [blame] | 899 | for (const auto &BI : *LastF) { |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 900 | for (const_succ_iterator SI = succ_begin(&BI), SE = succ_end(&BI); SI != SE; |
Duncan P. N. Exon Smith | 5a82c91 | 2015-10-10 00:53:03 +0000 | [diff] [blame] | 901 | ++SI) { |
| 902 | printEdgeProbability(OS << " ", &BI, *SI); |
Duncan P. N. Exon Smith | 6c99015 | 2014-07-21 17:06:51 +0000 | [diff] [blame] | 903 | } |
| 904 | } |
Chandler Carruth | 1c8ace0 | 2011-10-23 21:21:50 +0000 | [diff] [blame] | 905 | } |
| 906 | |
Jakub Staszak | efd94c8 | 2011-07-29 19:30:00 +0000 | [diff] [blame] | 907 | bool BranchProbabilityInfo:: |
| 908 | isEdgeHot(const BasicBlock *Src, const BasicBlock *Dst) const { |
Andrew Trick | 3d4e64b | 2011-06-11 01:05:22 +0000 | [diff] [blame] | 909 | // Hot probability is at least 4/5 = 80% |
Benjamin Kramer | 929f53f | 2011-10-23 11:19:14 +0000 | [diff] [blame] | 910 | // FIXME: Compare against a static "hot" BranchProbability. |
| 911 | return getEdgeProbability(Src, Dst) > BranchProbability(4, 5); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 912 | } |
| 913 | |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 914 | const BasicBlock * |
| 915 | BranchProbabilityInfo::getHotSucc(const BasicBlock *BB) const { |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 916 | auto MaxProb = BranchProbability::getZero(); |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 917 | const BasicBlock *MaxSucc = nullptr; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 918 | |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 919 | for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I) { |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 920 | const BasicBlock *Succ = *I; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 921 | auto Prob = getEdgeProbability(BB, Succ); |
| 922 | if (Prob > MaxProb) { |
| 923 | MaxProb = Prob; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 924 | MaxSucc = Succ; |
| 925 | } |
| 926 | } |
| 927 | |
Benjamin Kramer | 929f53f | 2011-10-23 11:19:14 +0000 | [diff] [blame] | 928 | // Hot probability is at least 4/5 = 80% |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 929 | if (MaxProb > BranchProbability(4, 5)) |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 930 | return MaxSucc; |
| 931 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 932 | return nullptr; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 933 | } |
| 934 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 935 | /// Get the raw edge probability for the edge. If can't find it, return a |
| 936 | /// default probability 1/N where N is the number of successors. Here an edge is |
| 937 | /// specified using PredBlock and an |
| 938 | /// index to the successors. |
| 939 | BranchProbability |
| 940 | BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, |
| 941 | unsigned IndexInSuccessors) const { |
| 942 | auto I = Probs.find(std::make_pair(Src, IndexInSuccessors)); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 943 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 944 | if (I != Probs.end()) |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 945 | return I->second; |
| 946 | |
Vedant Kumar | e0b5f86 | 2018-05-10 23:01:54 +0000 | [diff] [blame] | 947 | return {1, static_cast<uint32_t>(succ_size(Src))}; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 948 | } |
| 949 | |
Cong Hou | d97c100 | 2015-12-01 05:29:22 +0000 | [diff] [blame] | 950 | BranchProbability |
| 951 | BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 952 | const_succ_iterator Dst) const { |
Cong Hou | d97c100 | 2015-12-01 05:29:22 +0000 | [diff] [blame] | 953 | return getEdgeProbability(Src, Dst.getSuccessorIndex()); |
| 954 | } |
| 955 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 956 | /// Get the raw edge probability calculated for the block pair. This returns the |
| 957 | /// sum of all raw edge probabilities from Src to Dst. |
| 958 | BranchProbability |
| 959 | BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, |
| 960 | const BasicBlock *Dst) const { |
| 961 | auto Prob = BranchProbability::getZero(); |
| 962 | bool FoundProb = false; |
Evgeniy Brevnov | bb0842a | 2020-04-29 14:08:01 +0700 | [diff] [blame] | 963 | uint32_t EdgeCount = 0; |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 964 | for (const_succ_iterator I = succ_begin(Src), E = succ_end(Src); I != E; ++I) |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 965 | if (*I == Dst) { |
Evgeniy Brevnov | bb0842a | 2020-04-29 14:08:01 +0700 | [diff] [blame] | 966 | ++EdgeCount; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 967 | auto MapI = Probs.find(std::make_pair(Src, I.getSuccessorIndex())); |
| 968 | if (MapI != Probs.end()) { |
| 969 | FoundProb = true; |
| 970 | Prob += MapI->second; |
| 971 | } |
| 972 | } |
| 973 | uint32_t succ_num = std::distance(succ_begin(Src), succ_end(Src)); |
Evgeniy Brevnov | bb0842a | 2020-04-29 14:08:01 +0700 | [diff] [blame] | 974 | return FoundProb ? Prob : BranchProbability(EdgeCount, succ_num); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 975 | } |
| 976 | |
| 977 | /// Set the edge probability for a given edge specified by PredBlock and an |
| 978 | /// index to the successors. |
| 979 | void BranchProbabilityInfo::setEdgeProbability(const BasicBlock *Src, |
| 980 | unsigned IndexInSuccessors, |
| 981 | BranchProbability Prob) { |
| 982 | Probs[std::make_pair(Src, IndexInSuccessors)] = Prob; |
Igor Laevsky | ee40d1e | 2016-07-15 14:31:16 +0000 | [diff] [blame] | 983 | Handles.insert(BasicBlockCallbackVH(Src, this)); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 984 | LLVM_DEBUG(dbgs() << "set edge " << Src->getName() << " -> " |
| 985 | << IndexInSuccessors << " successor probability to " << Prob |
| 986 | << "\n"); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 987 | } |
| 988 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame^] | 989 | /// Set the edge probability for all edges at once. |
| 990 | void BranchProbabilityInfo::setEdgeProbability( |
| 991 | const BasicBlock *Src, const SmallVectorImpl<BranchProbability> &Probs) { |
| 992 | assert(Src->getTerminator()->getNumSuccessors() == Probs.size()); |
| 993 | if (Probs.size() == 0) |
| 994 | return; // Nothing to set. |
| 995 | |
| 996 | uint64_t TotalNumerator = 0; |
| 997 | for (unsigned SuccIdx = 0; SuccIdx < Probs.size(); ++SuccIdx) { |
| 998 | setEdgeProbability(Src, SuccIdx, Probs[SuccIdx]); |
| 999 | TotalNumerator += Probs[SuccIdx].getNumerator(); |
| 1000 | } |
| 1001 | |
| 1002 | // Because of rounding errors the total probability cannot be checked to be |
| 1003 | // 1.0 exactly. That is TotalNumerator == BranchProbability::getDenominator. |
| 1004 | // Instead, every single probability in Probs must be as accurate as possible. |
| 1005 | // This results in error 1/denominator at most, thus the total absolute error |
| 1006 | // should be within Probs.size / BranchProbability::getDenominator. |
| 1007 | assert(TotalNumerator <= BranchProbability::getDenominator() + Probs.size()); |
| 1008 | assert(TotalNumerator >= BranchProbability::getDenominator() - Probs.size()); |
| 1009 | } |
| 1010 | |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 1011 | raw_ostream & |
Chandler Carruth | 1c8ace0 | 2011-10-23 21:21:50 +0000 | [diff] [blame] | 1012 | BranchProbabilityInfo::printEdgeProbability(raw_ostream &OS, |
| 1013 | const BasicBlock *Src, |
| 1014 | const BasicBlock *Dst) const { |
Jakub Staszak | 12a43bd | 2011-06-16 20:22:37 +0000 | [diff] [blame] | 1015 | const BranchProbability Prob = getEdgeProbability(Src, Dst); |
Benjamin Kramer | 1f97a5a | 2011-11-15 16:27:03 +0000 | [diff] [blame] | 1016 | OS << "edge " << Src->getName() << " -> " << Dst->getName() |
Andrew Trick | 3d4e64b | 2011-06-11 01:05:22 +0000 | [diff] [blame] | 1017 | << " probability is " << Prob |
| 1018 | << (isEdgeHot(Src, Dst) ? " [HOT edge]\n" : "\n"); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 1019 | |
| 1020 | return OS; |
| 1021 | } |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1022 | |
Igor Laevsky | ee40d1e | 2016-07-15 14:31:16 +0000 | [diff] [blame] | 1023 | void BranchProbabilityInfo::eraseBlock(const BasicBlock *BB) { |
| 1024 | for (auto I = Probs.begin(), E = Probs.end(); I != E; ++I) { |
| 1025 | auto Key = I->first; |
| 1026 | if (Key.first == BB) |
| 1027 | Probs.erase(Key); |
| 1028 | } |
| 1029 | } |
| 1030 | |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 1031 | void BranchProbabilityInfo::calculate(const Function &F, const LoopInfo &LI, |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 1032 | const TargetLibraryInfo *TLI, |
| 1033 | PostDominatorTree *PDT) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1034 | LLVM_DEBUG(dbgs() << "---- Branch Probability Info : " << F.getName() |
| 1035 | << " ----\n\n"); |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1036 | LastF = &F; // Store the last function we ran on for printing. |
| 1037 | assert(PostDominatedByUnreachable.empty()); |
| 1038 | assert(PostDominatedByColdCall.empty()); |
| 1039 | |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 1040 | // Record SCC numbers of blocks in the CFG to identify irreducible loops. |
| 1041 | // FIXME: We could only calculate this if the CFG is known to be irreducible |
| 1042 | // (perhaps cache this info in LoopInfo if we can easily calculate it there?). |
| 1043 | int SccNum = 0; |
| 1044 | SccInfo SccI; |
| 1045 | for (scc_iterator<const Function *> It = scc_begin(&F); !It.isAtEnd(); |
| 1046 | ++It, ++SccNum) { |
| 1047 | // Ignore single-block SCCs since they either aren't loops or LoopInfo will |
| 1048 | // catch them. |
| 1049 | const std::vector<const BasicBlock *> &Scc = *It; |
| 1050 | if (Scc.size() == 1) |
| 1051 | continue; |
| 1052 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1053 | LLVM_DEBUG(dbgs() << "BPI: SCC " << SccNum << ":"); |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 1054 | for (auto *BB : Scc) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1055 | LLVM_DEBUG(dbgs() << " " << BB->getName()); |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 1056 | SccI.SccNums[BB] = SccNum; |
| 1057 | } |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1058 | LLVM_DEBUG(dbgs() << "\n"); |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 1059 | } |
| 1060 | |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 1061 | std::unique_ptr<PostDominatorTree> PDTPtr; |
| 1062 | |
| 1063 | if (!PDT) { |
| 1064 | PDTPtr = std::make_unique<PostDominatorTree>(const_cast<Function &>(F)); |
| 1065 | PDT = PDTPtr.get(); |
| 1066 | } |
| 1067 | |
| 1068 | computePostDominatedByUnreachable(F, PDT); |
| 1069 | computePostDominatedByColdCall(F, PDT); |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 1070 | |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1071 | // Walk the basic blocks in post-order so that we can build up state about |
| 1072 | // the successors of a block iteratively. |
| 1073 | for (auto BB : post_order(&F.getEntryBlock())) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1074 | LLVM_DEBUG(dbgs() << "Computing probabilities for " << BB->getName() |
| 1075 | << "\n"); |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 1076 | // If there is no at least two successors, no sense to set probability. |
| 1077 | if (BB->getTerminator()->getNumSuccessors() < 2) |
| 1078 | continue; |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1079 | if (calcMetadataWeights(BB)) |
| 1080 | continue; |
Artur Pilipenko | 4d063e7 | 2018-06-08 13:03:21 +0000 | [diff] [blame] | 1081 | if (calcInvokeHeuristics(BB)) |
| 1082 | continue; |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 1083 | if (calcUnreachableHeuristics(BB)) |
| 1084 | continue; |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1085 | if (calcColdCallHeuristics(BB)) |
| 1086 | continue; |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 1087 | if (calcLoopBranchHeuristics(BB, LI, SccI)) |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1088 | continue; |
| 1089 | if (calcPointerHeuristics(BB)) |
| 1090 | continue; |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 1091 | if (calcZeroHeuristics(BB, TLI)) |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1092 | continue; |
| 1093 | if (calcFloatingPointHeuristics(BB)) |
| 1094 | continue; |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1095 | } |
| 1096 | |
| 1097 | PostDominatedByUnreachable.clear(); |
| 1098 | PostDominatedByColdCall.clear(); |
Hiroshi Yamauchi | 63e17eb | 2017-08-26 00:31:00 +0000 | [diff] [blame] | 1099 | |
| 1100 | if (PrintBranchProb && |
| 1101 | (PrintBranchProbFuncName.empty() || |
| 1102 | F.getName().equals(PrintBranchProbFuncName))) { |
| 1103 | print(dbgs()); |
| 1104 | } |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1105 | } |
| 1106 | |
| 1107 | void BranchProbabilityInfoWrapperPass::getAnalysisUsage( |
| 1108 | AnalysisUsage &AU) const { |
Mikael Holmen | 2ca1689 | 2018-05-17 09:05:40 +0000 | [diff] [blame] | 1109 | // We require DT so it's available when LI is available. The LI updating code |
| 1110 | // asserts that DT is also present so if we don't make sure that we have DT |
| 1111 | // here, that assert will trigger. |
| 1112 | AU.addRequired<DominatorTreeWrapperPass>(); |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1113 | AU.addRequired<LoopInfoWrapperPass>(); |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 1114 | AU.addRequired<TargetLibraryInfoWrapperPass>(); |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 1115 | AU.addRequired<PostDominatorTreeWrapperPass>(); |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1116 | AU.setPreservesAll(); |
| 1117 | } |
| 1118 | |
| 1119 | bool BranchProbabilityInfoWrapperPass::runOnFunction(Function &F) { |
| 1120 | const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); |
Teresa Johnson | 9c27b59 | 2019-09-07 03:09:36 +0000 | [diff] [blame] | 1121 | const TargetLibraryInfo &TLI = |
| 1122 | getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F); |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 1123 | PostDominatorTree &PDT = |
| 1124 | getAnalysis<PostDominatorTreeWrapperPass>().getPostDomTree(); |
| 1125 | BPI.calculate(F, LI, &TLI, &PDT); |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1126 | return false; |
| 1127 | } |
| 1128 | |
| 1129 | void BranchProbabilityInfoWrapperPass::releaseMemory() { BPI.releaseMemory(); } |
| 1130 | |
| 1131 | void BranchProbabilityInfoWrapperPass::print(raw_ostream &OS, |
| 1132 | const Module *) const { |
| 1133 | BPI.print(OS); |
| 1134 | } |
Xinliang David Li | 6e5dd41 | 2016-05-05 02:59:57 +0000 | [diff] [blame] | 1135 | |
Chandler Carruth | dab4eae | 2016-11-23 17:53:26 +0000 | [diff] [blame] | 1136 | AnalysisKey BranchProbabilityAnalysis::Key; |
Xinliang David Li | 6e5dd41 | 2016-05-05 02:59:57 +0000 | [diff] [blame] | 1137 | BranchProbabilityInfo |
Sean Silva | 36e0d01 | 2016-08-09 00:28:15 +0000 | [diff] [blame] | 1138 | BranchProbabilityAnalysis::run(Function &F, FunctionAnalysisManager &AM) { |
Xinliang David Li | 6e5dd41 | 2016-05-05 02:59:57 +0000 | [diff] [blame] | 1139 | BranchProbabilityInfo BPI; |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 1140 | BPI.calculate(F, AM.getResult<LoopAnalysis>(F), |
| 1141 | &AM.getResult<TargetLibraryAnalysis>(F), |
| 1142 | &AM.getResult<PostDominatorTreeAnalysis>(F)); |
Xinliang David Li | 6e5dd41 | 2016-05-05 02:59:57 +0000 | [diff] [blame] | 1143 | return BPI; |
| 1144 | } |
| 1145 | |
| 1146 | PreservedAnalyses |
Sean Silva | 36e0d01 | 2016-08-09 00:28:15 +0000 | [diff] [blame] | 1147 | BranchProbabilityPrinterPass::run(Function &F, FunctionAnalysisManager &AM) { |
Xinliang David Li | 6e5dd41 | 2016-05-05 02:59:57 +0000 | [diff] [blame] | 1148 | OS << "Printing analysis results of BPI for function " |
| 1149 | << "'" << F.getName() << "':" |
| 1150 | << "\n"; |
| 1151 | AM.getResult<BranchProbabilityAnalysis>(F).print(OS); |
| 1152 | return PreservedAnalyses::all(); |
| 1153 | } |