Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 1 | //===- BranchProbabilityInfo.cpp - Branch Probability Analysis ------------===// |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 2 | // |
Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // Loops should be simplified before this analysis. |
| 10 | // |
| 11 | //===----------------------------------------------------------------------===// |
| 12 | |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 13 | #include "llvm/Analysis/BranchProbabilityInfo.h" |
| 14 | #include "llvm/ADT/PostOrderIterator.h" |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 15 | #include "llvm/ADT/SCCIterator.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 16 | #include "llvm/ADT/STLExtras.h" |
| 17 | #include "llvm/ADT/SmallVector.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 18 | #include "llvm/Analysis/LoopInfo.h" |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 19 | #include "llvm/Analysis/PostDominators.h" |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 20 | #include "llvm/Analysis/TargetLibraryInfo.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 21 | #include "llvm/IR/Attributes.h" |
| 22 | #include "llvm/IR/BasicBlock.h" |
Chandler Carruth | 1305dc3 | 2014-03-04 11:45:46 +0000 | [diff] [blame] | 23 | #include "llvm/IR/CFG.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 24 | #include "llvm/IR/Constants.h" |
Mikael Holmen | 2ca1689 | 2018-05-17 09:05:40 +0000 | [diff] [blame] | 25 | #include "llvm/IR/Dominators.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 26 | #include "llvm/IR/Function.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 27 | #include "llvm/IR/InstrTypes.h" |
| 28 | #include "llvm/IR/Instruction.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 29 | #include "llvm/IR/Instructions.h" |
| 30 | #include "llvm/IR/LLVMContext.h" |
| 31 | #include "llvm/IR/Metadata.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 32 | #include "llvm/IR/PassManager.h" |
| 33 | #include "llvm/IR/Type.h" |
| 34 | #include "llvm/IR/Value.h" |
Reid Kleckner | 05da2fe | 2019-11-13 13:15:01 -0800 | [diff] [blame] | 35 | #include "llvm/InitializePasses.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 36 | #include "llvm/Pass.h" |
| 37 | #include "llvm/Support/BranchProbability.h" |
| 38 | #include "llvm/Support/Casting.h" |
Reid Kleckner | 4c1a1d3 | 2019-11-14 15:15:48 -0800 | [diff] [blame] | 39 | #include "llvm/Support/CommandLine.h" |
Andrew Trick | 3d4e64b | 2011-06-11 01:05:22 +0000 | [diff] [blame] | 40 | #include "llvm/Support/Debug.h" |
Benjamin Kramer | 16132e6 | 2015-03-23 18:07:13 +0000 | [diff] [blame] | 41 | #include "llvm/Support/raw_ostream.h" |
Eugene Zelenko | 38c02bc | 2017-07-21 21:37:46 +0000 | [diff] [blame] | 42 | #include <cassert> |
| 43 | #include <cstdint> |
| 44 | #include <iterator> |
| 45 | #include <utility> |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 46 | |
| 47 | using namespace llvm; |
| 48 | |
Chandler Carruth | f1221bd | 2014-04-22 02:48:03 +0000 | [diff] [blame] | 49 | #define DEBUG_TYPE "branch-prob" |
| 50 | |
Hiroshi Yamauchi | 63e17eb | 2017-08-26 00:31:00 +0000 | [diff] [blame] | 51 | static cl::opt<bool> PrintBranchProb( |
| 52 | "print-bpi", cl::init(false), cl::Hidden, |
| 53 | cl::desc("Print the branch probability info.")); |
| 54 | |
| 55 | cl::opt<std::string> PrintBranchProbFuncName( |
| 56 | "print-bpi-func-name", cl::Hidden, |
| 57 | cl::desc("The option to specify the name of the function " |
| 58 | "whose branch probability info is printed.")); |
| 59 | |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 60 | INITIALIZE_PASS_BEGIN(BranchProbabilityInfoWrapperPass, "branch-prob", |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 61 | "Branch Probability Analysis", false, true) |
Chandler Carruth | 4f8f307 | 2015-01-17 14:16:18 +0000 | [diff] [blame] | 62 | INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 63 | INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 64 | INITIALIZE_PASS_DEPENDENCY(PostDominatorTreeWrapperPass) |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 65 | INITIALIZE_PASS_END(BranchProbabilityInfoWrapperPass, "branch-prob", |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 66 | "Branch Probability Analysis", false, true) |
| 67 | |
Reid Kleckner | 05da2fe | 2019-11-13 13:15:01 -0800 | [diff] [blame] | 68 | BranchProbabilityInfoWrapperPass::BranchProbabilityInfoWrapperPass() |
| 69 | : FunctionPass(ID) { |
| 70 | initializeBranchProbabilityInfoWrapperPassPass( |
| 71 | *PassRegistry::getPassRegistry()); |
| 72 | } |
| 73 | |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 74 | char BranchProbabilityInfoWrapperPass::ID = 0; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 75 | |
Chandler Carruth | 7a0094a | 2011-10-24 01:40:45 +0000 | [diff] [blame] | 76 | // Weights are for internal use only. They are used by heuristics to help to |
| 77 | // estimate edges' probability. Example: |
| 78 | // |
| 79 | // Using "Loop Branch Heuristics" we predict weights of edges for the |
| 80 | // block BB2. |
| 81 | // ... |
| 82 | // | |
| 83 | // V |
| 84 | // BB1<-+ |
| 85 | // | | |
| 86 | // | | (Weight = 124) |
| 87 | // V | |
| 88 | // BB2--+ |
| 89 | // | |
| 90 | // | (Weight = 4) |
| 91 | // V |
| 92 | // BB3 |
| 93 | // |
| 94 | // Probability of the edge BB2->BB1 = 124 / (124 + 4) = 0.96875 |
| 95 | // Probability of the edge BB2->BB3 = 4 / (124 + 4) = 0.03125 |
| 96 | static const uint32_t LBH_TAKEN_WEIGHT = 124; |
| 97 | static const uint32_t LBH_NONTAKEN_WEIGHT = 4; |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 98 | // Unlikely edges within a loop are half as likely as other edges |
| 99 | static const uint32_t LBH_UNLIKELY_WEIGHT = 62; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 100 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 101 | /// Unreachable-terminating branch taken probability. |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 102 | /// |
Serguei Katkov | ba831f7 | 2017-05-18 06:11:56 +0000 | [diff] [blame] | 103 | /// This is the probability for a branch being taken to a block that terminates |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 104 | /// (eventually) in unreachable. These are predicted as unlikely as possible. |
Yevgeny Rouban | 07239c7 | 2020-06-02 11:28:12 +0700 | [diff] [blame^] | 105 | /// All reachable probability will proportionally share the remaining part. |
Serguei Katkov | ba831f7 | 2017-05-18 06:11:56 +0000 | [diff] [blame] | 106 | static const BranchProbability UR_TAKEN_PROB = BranchProbability::getRaw(1); |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 107 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 108 | /// Weight for a branch taken going into a cold block. |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 109 | /// |
| 110 | /// This is the weight for a branch taken toward a block marked |
| 111 | /// cold. A block is marked cold if it's postdominated by a |
| 112 | /// block containing a call to a cold function. Cold functions |
| 113 | /// are those marked with attribute 'cold'. |
| 114 | static const uint32_t CC_TAKEN_WEIGHT = 4; |
| 115 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 116 | /// Weight for a branch not-taken into a cold block. |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 117 | /// |
| 118 | /// This is the weight for a branch not taken toward a block marked |
| 119 | /// cold. |
| 120 | static const uint32_t CC_NONTAKEN_WEIGHT = 64; |
| 121 | |
Chandler Carruth | 7a0094a | 2011-10-24 01:40:45 +0000 | [diff] [blame] | 122 | static const uint32_t PH_TAKEN_WEIGHT = 20; |
| 123 | static const uint32_t PH_NONTAKEN_WEIGHT = 12; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 124 | |
Chandler Carruth | 7a0094a | 2011-10-24 01:40:45 +0000 | [diff] [blame] | 125 | static const uint32_t ZH_TAKEN_WEIGHT = 20; |
| 126 | static const uint32_t ZH_NONTAKEN_WEIGHT = 12; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 127 | |
Chandler Carruth | 7a0094a | 2011-10-24 01:40:45 +0000 | [diff] [blame] | 128 | static const uint32_t FPH_TAKEN_WEIGHT = 20; |
| 129 | static const uint32_t FPH_NONTAKEN_WEIGHT = 12; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 130 | |
Guozhi Wei | b329e07 | 2019-09-10 17:25:11 +0000 | [diff] [blame] | 131 | /// This is the probability for an ordered floating point comparison. |
| 132 | static const uint32_t FPH_ORD_WEIGHT = 1024 * 1024 - 1; |
| 133 | /// This is the probability for an unordered floating point comparison, it means |
| 134 | /// one or two of the operands are NaN. Usually it is used to test for an |
| 135 | /// exceptional case, so the result is unlikely. |
| 136 | static const uint32_t FPH_UNO_WEIGHT = 1; |
| 137 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 138 | /// Invoke-terminating normal branch taken weight |
Bill Wendling | e1c5426 | 2012-08-15 12:22:35 +0000 | [diff] [blame] | 139 | /// |
| 140 | /// This is the weight for branching to the normal destination of an invoke |
| 141 | /// instruction. We expect this to happen most of the time. Set the weight to an |
| 142 | /// absurdly high value so that nested loops subsume it. |
| 143 | static const uint32_t IH_TAKEN_WEIGHT = 1024 * 1024 - 1; |
| 144 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 145 | /// Invoke-terminating normal branch not-taken weight. |
Bill Wendling | e1c5426 | 2012-08-15 12:22:35 +0000 | [diff] [blame] | 146 | /// |
| 147 | /// This is the weight for branching to the unwind destination of an invoke |
| 148 | /// instruction. This is essentially never taken. |
| 149 | static const uint32_t IH_NONTAKEN_WEIGHT = 1; |
| 150 | |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 151 | static void UpdatePDTWorklist(const BasicBlock *BB, PostDominatorTree *PDT, |
| 152 | SmallVectorImpl<const BasicBlock *> &WorkList, |
| 153 | SmallPtrSetImpl<const BasicBlock *> &TargetSet) { |
| 154 | SmallVector<BasicBlock *, 8> Descendants; |
| 155 | SmallPtrSet<const BasicBlock *, 16> NewItems; |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 156 | |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 157 | PDT->getDescendants(const_cast<BasicBlock *>(BB), Descendants); |
| 158 | for (auto *BB : Descendants) |
| 159 | if (TargetSet.insert(BB).second) |
| 160 | for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI) |
| 161 | if (!TargetSet.count(*PI)) |
| 162 | NewItems.insert(*PI); |
| 163 | WorkList.insert(WorkList.end(), NewItems.begin(), NewItems.end()); |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 164 | } |
| 165 | |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 166 | /// Compute a set of basic blocks that are post-dominated by unreachables. |
| 167 | void BranchProbabilityInfo::computePostDominatedByUnreachable( |
| 168 | const Function &F, PostDominatorTree *PDT) { |
| 169 | SmallVector<const BasicBlock *, 8> WorkList; |
| 170 | for (auto &BB : F) { |
| 171 | const Instruction *TI = BB.getTerminator(); |
| 172 | if (TI->getNumSuccessors() == 0) { |
| 173 | if (isa<UnreachableInst>(TI) || |
| 174 | // If this block is terminated by a call to |
| 175 | // @llvm.experimental.deoptimize then treat it like an unreachable |
| 176 | // since the @llvm.experimental.deoptimize call is expected to |
| 177 | // practically never execute. |
| 178 | BB.getTerminatingDeoptimizeCall()) |
| 179 | UpdatePDTWorklist(&BB, PDT, WorkList, PostDominatedByUnreachable); |
| 180 | } |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 181 | } |
| 182 | |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 183 | while (!WorkList.empty()) { |
| 184 | const BasicBlock *BB = WorkList.pop_back_val(); |
| 185 | if (PostDominatedByUnreachable.count(BB)) |
| 186 | continue; |
| 187 | // If the terminator is an InvokeInst, check only the normal destination |
| 188 | // block as the unwind edge of InvokeInst is also very unlikely taken. |
| 189 | if (auto *II = dyn_cast<InvokeInst>(BB->getTerminator())) { |
| 190 | if (PostDominatedByUnreachable.count(II->getNormalDest())) |
| 191 | UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByUnreachable); |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 192 | } |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 193 | // If all the successors are unreachable, BB is unreachable as well. |
| 194 | else if (!successors(BB).empty() && |
| 195 | llvm::all_of(successors(BB), [this](const BasicBlock *Succ) { |
| 196 | return PostDominatedByUnreachable.count(Succ); |
| 197 | })) |
| 198 | UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByUnreachable); |
| 199 | } |
| 200 | } |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 201 | |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 202 | /// compute a set of basic blocks that are post-dominated by ColdCalls. |
| 203 | void BranchProbabilityInfo::computePostDominatedByColdCall( |
| 204 | const Function &F, PostDominatorTree *PDT) { |
| 205 | SmallVector<const BasicBlock *, 8> WorkList; |
| 206 | for (auto &BB : F) |
| 207 | for (auto &I : BB) |
| 208 | if (const CallInst *CI = dyn_cast<CallInst>(&I)) |
| 209 | if (CI->hasFnAttr(Attribute::Cold)) |
| 210 | UpdatePDTWorklist(&BB, PDT, WorkList, PostDominatedByColdCall); |
| 211 | |
| 212 | while (!WorkList.empty()) { |
| 213 | const BasicBlock *BB = WorkList.pop_back_val(); |
| 214 | |
| 215 | // If the terminator is an InvokeInst, check only the normal destination |
| 216 | // block as the unwind edge of InvokeInst is also very unlikely taken. |
| 217 | if (auto *II = dyn_cast<InvokeInst>(BB->getTerminator())) { |
| 218 | if (PostDominatedByColdCall.count(II->getNormalDest())) |
| 219 | UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByColdCall); |
| 220 | } |
| 221 | // If all of successor are post dominated then BB is also done. |
| 222 | else if (!successors(BB).empty() && |
| 223 | llvm::all_of(successors(BB), [this](const BasicBlock *Succ) { |
| 224 | return PostDominatedByColdCall.count(Succ); |
| 225 | })) |
| 226 | UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByColdCall); |
| 227 | } |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 228 | } |
| 229 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 230 | /// Calculate edge weights for successors lead to unreachable. |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 231 | /// |
| 232 | /// Predict that a successor which leads necessarily to an |
| 233 | /// unreachable-terminated block as extremely unlikely. |
| 234 | bool BranchProbabilityInfo::calcUnreachableHeuristics(const BasicBlock *BB) { |
Chandler Carruth | edb12a8 | 2018-10-15 10:04:59 +0000 | [diff] [blame] | 235 | const Instruction *TI = BB->getTerminator(); |
Artur Pilipenko | 4d063e7 | 2018-06-08 13:03:21 +0000 | [diff] [blame] | 236 | (void) TI; |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 237 | assert(TI->getNumSuccessors() > 1 && "expected more than one successor!"); |
Artur Pilipenko | 4d063e7 | 2018-06-08 13:03:21 +0000 | [diff] [blame] | 238 | assert(!isa<InvokeInst>(TI) && |
| 239 | "Invokes should have already been handled by calcInvokeHeuristics"); |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 240 | |
Manman Ren | cf10446 | 2012-08-24 18:14:27 +0000 | [diff] [blame] | 241 | SmallVector<unsigned, 4> UnreachableEdges; |
| 242 | SmallVector<unsigned, 4> ReachableEdges; |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 243 | |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 244 | for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I) |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 245 | if (PostDominatedByUnreachable.count(*I)) |
Manman Ren | cf10446 | 2012-08-24 18:14:27 +0000 | [diff] [blame] | 246 | UnreachableEdges.push_back(I.getSuccessorIndex()); |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 247 | else |
Manman Ren | cf10446 | 2012-08-24 18:14:27 +0000 | [diff] [blame] | 248 | ReachableEdges.push_back(I.getSuccessorIndex()); |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 249 | |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 250 | // Skip probabilities if all were reachable. |
| 251 | if (UnreachableEdges.empty()) |
Serguei Katkov | ecebc3d | 2017-04-12 05:42:14 +0000 | [diff] [blame] | 252 | return false; |
Jun Bum Lim | a23e5f7 | 2015-12-21 22:00:51 +0000 | [diff] [blame] | 253 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 254 | SmallVector<BranchProbability, 4> EdgeProbabilities( |
| 255 | BB->getTerminator()->getNumSuccessors(), BranchProbability::getUnknown()); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 256 | if (ReachableEdges.empty()) { |
| 257 | BranchProbability Prob(1, UnreachableEdges.size()); |
| 258 | for (unsigned SuccIdx : UnreachableEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 259 | EdgeProbabilities[SuccIdx] = Prob; |
| 260 | setEdgeProbability(BB, EdgeProbabilities); |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 261 | return true; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 262 | } |
| 263 | |
Serguei Katkov | ba831f7 | 2017-05-18 06:11:56 +0000 | [diff] [blame] | 264 | auto UnreachableProb = UR_TAKEN_PROB; |
| 265 | auto ReachableProb = |
| 266 | (BranchProbability::getOne() - UR_TAKEN_PROB * UnreachableEdges.size()) / |
| 267 | ReachableEdges.size(); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 268 | |
| 269 | for (unsigned SuccIdx : UnreachableEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 270 | EdgeProbabilities[SuccIdx] = UnreachableProb; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 271 | for (unsigned SuccIdx : ReachableEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 272 | EdgeProbabilities[SuccIdx] = ReachableProb; |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 273 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 274 | setEdgeProbability(BB, EdgeProbabilities); |
Chandler Carruth | 7111f45 | 2011-10-24 12:01:08 +0000 | [diff] [blame] | 275 | return true; |
| 276 | } |
| 277 | |
Chandler Carruth | d27a7a9 | 2011-10-19 10:30:30 +0000 | [diff] [blame] | 278 | // Propagate existing explicit probabilities from either profile data or |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 279 | // 'expect' intrinsic processing. Examine metadata against unreachable |
| 280 | // heuristic. The probability of the edge coming to unreachable block is |
| 281 | // set to min of metadata and unreachable heuristic. |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 282 | bool BranchProbabilityInfo::calcMetadataWeights(const BasicBlock *BB) { |
Chandler Carruth | edb12a8 | 2018-10-15 10:04:59 +0000 | [diff] [blame] | 283 | const Instruction *TI = BB->getTerminator(); |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 284 | assert(TI->getNumSuccessors() > 1 && "expected more than one successor!"); |
Rong Xu | 15848e5 | 2017-08-23 21:36:02 +0000 | [diff] [blame] | 285 | if (!(isa<BranchInst>(TI) || isa<SwitchInst>(TI) || isa<IndirectBrInst>(TI))) |
Chandler Carruth | d27a7a9 | 2011-10-19 10:30:30 +0000 | [diff] [blame] | 286 | return false; |
| 287 | |
Duncan P. N. Exon Smith | de36e80 | 2014-11-11 21:30:22 +0000 | [diff] [blame] | 288 | MDNode *WeightsNode = TI->getMetadata(LLVMContext::MD_prof); |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 289 | if (!WeightsNode) |
Chandler Carruth | d27a7a9 | 2011-10-19 10:30:30 +0000 | [diff] [blame] | 290 | return false; |
| 291 | |
Diego Novillo | de5b801 | 2015-05-07 17:22:06 +0000 | [diff] [blame] | 292 | // Check that the number of successors is manageable. |
| 293 | assert(TI->getNumSuccessors() < UINT32_MAX && "Too many successors"); |
| 294 | |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 295 | // Ensure there are weights for all of the successors. Note that the first |
| 296 | // operand to the metadata node is a name, not a weight. |
| 297 | if (WeightsNode->getNumOperands() != TI->getNumSuccessors() + 1) |
Chandler Carruth | d27a7a9 | 2011-10-19 10:30:30 +0000 | [diff] [blame] | 298 | return false; |
| 299 | |
Diego Novillo | de5b801 | 2015-05-07 17:22:06 +0000 | [diff] [blame] | 300 | // Build up the final weights that will be used in a temporary buffer. |
| 301 | // Compute the sum of all weights to later decide whether they need to |
| 302 | // be scaled to fit in 32 bits. |
| 303 | uint64_t WeightSum = 0; |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 304 | SmallVector<uint32_t, 2> Weights; |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 305 | SmallVector<unsigned, 2> UnreachableIdxs; |
| 306 | SmallVector<unsigned, 2> ReachableIdxs; |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 307 | Weights.reserve(TI->getNumSuccessors()); |
Yevgeny Rouban | 3bb0d95 | 2020-06-02 10:55:27 +0700 | [diff] [blame] | 308 | for (unsigned I = 1, E = WeightsNode->getNumOperands(); I != E; ++I) { |
Duncan P. N. Exon Smith | 5bf8fef | 2014-12-09 18:38:53 +0000 | [diff] [blame] | 309 | ConstantInt *Weight = |
Yevgeny Rouban | 3bb0d95 | 2020-06-02 10:55:27 +0700 | [diff] [blame] | 310 | mdconst::dyn_extract<ConstantInt>(WeightsNode->getOperand(I)); |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 311 | if (!Weight) |
| 312 | return false; |
Diego Novillo | de5b801 | 2015-05-07 17:22:06 +0000 | [diff] [blame] | 313 | assert(Weight->getValue().getActiveBits() <= 32 && |
| 314 | "Too many bits for uint32_t"); |
| 315 | Weights.push_back(Weight->getZExtValue()); |
| 316 | WeightSum += Weights.back(); |
Yevgeny Rouban | 3bb0d95 | 2020-06-02 10:55:27 +0700 | [diff] [blame] | 317 | if (PostDominatedByUnreachable.count(TI->getSuccessor(I - 1))) |
| 318 | UnreachableIdxs.push_back(I - 1); |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 319 | else |
Yevgeny Rouban | 3bb0d95 | 2020-06-02 10:55:27 +0700 | [diff] [blame] | 320 | ReachableIdxs.push_back(I - 1); |
Chandler Carruth | deac50c | 2011-10-19 10:32:19 +0000 | [diff] [blame] | 321 | } |
| 322 | assert(Weights.size() == TI->getNumSuccessors() && "Checked above"); |
Diego Novillo | de5b801 | 2015-05-07 17:22:06 +0000 | [diff] [blame] | 323 | |
| 324 | // If the sum of weights does not fit in 32 bits, scale every weight down |
| 325 | // accordingly. |
| 326 | uint64_t ScalingFactor = |
| 327 | (WeightSum > UINT32_MAX) ? WeightSum / UINT32_MAX + 1 : 1; |
| 328 | |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 329 | if (ScalingFactor > 1) { |
| 330 | WeightSum = 0; |
Yevgeny Rouban | 3bb0d95 | 2020-06-02 10:55:27 +0700 | [diff] [blame] | 331 | for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) { |
| 332 | Weights[I] /= ScalingFactor; |
| 333 | WeightSum += Weights[I]; |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 334 | } |
Diego Novillo | de5b801 | 2015-05-07 17:22:06 +0000 | [diff] [blame] | 335 | } |
Serguei Katkov | 63c9c81 | 2017-05-12 07:50:06 +0000 | [diff] [blame] | 336 | assert(WeightSum <= UINT32_MAX && |
| 337 | "Expected weights to scale down to 32 bits"); |
Cong Hou | 6a2c71a | 2015-12-22 23:45:55 +0000 | [diff] [blame] | 338 | |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 339 | if (WeightSum == 0 || ReachableIdxs.size() == 0) { |
Yevgeny Rouban | 3bb0d95 | 2020-06-02 10:55:27 +0700 | [diff] [blame] | 340 | for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) |
| 341 | Weights[I] = 1; |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 342 | WeightSum = TI->getNumSuccessors(); |
Cong Hou | 6a2c71a | 2015-12-22 23:45:55 +0000 | [diff] [blame] | 343 | } |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 344 | |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 345 | // Set the probability. |
| 346 | SmallVector<BranchProbability, 2> BP; |
Yevgeny Rouban | 3bb0d95 | 2020-06-02 10:55:27 +0700 | [diff] [blame] | 347 | for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) |
| 348 | BP.push_back({ Weights[I], static_cast<uint32_t>(WeightSum) }); |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 349 | |
| 350 | // Examine the metadata against unreachable heuristic. |
| 351 | // If the unreachable heuristic is more strong then we use it for this edge. |
Yevgeny Rouban | 07239c7 | 2020-06-02 11:28:12 +0700 | [diff] [blame^] | 352 | if (UnreachableIdxs.size() == 0 || ReachableIdxs.size() == 0) { |
| 353 | setEdgeProbability(BB, BP); |
| 354 | return true; |
| 355 | } |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 356 | |
Yevgeny Rouban | 07239c7 | 2020-06-02 11:28:12 +0700 | [diff] [blame^] | 357 | auto UnreachableProb = UR_TAKEN_PROB; |
| 358 | for (auto I : UnreachableIdxs) |
| 359 | if (UnreachableProb < BP[I]) { |
| 360 | BP[I] = UnreachableProb; |
| 361 | } |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 362 | |
Yevgeny Rouban | 07239c7 | 2020-06-02 11:28:12 +0700 | [diff] [blame^] | 363 | // Sum of all edge probabilities must be 1.0. If we modified the probability |
| 364 | // of some edges then we must distribute the introduced difference over the |
| 365 | // reachable blocks. |
| 366 | // |
| 367 | // Proportional distribution: the relation between probabilities of the |
| 368 | // reachable edges is kept unchanged. That is for any reachable edges i and j: |
| 369 | // newBP[i] / newBP[j] == oldBP[i] / oldBP[j] => |
| 370 | // newBP[i] / oldBP[i] == newBP[j] / oldBP[j] == K |
| 371 | // Where K is independent of i,j. |
| 372 | // newBP[i] == oldBP[i] * K |
| 373 | // We need to find K. |
| 374 | // Make sum of all reachables of the left and right parts: |
| 375 | // sum_of_reachable(newBP) == K * sum_of_reachable(oldBP) |
| 376 | // Sum of newBP must be equal to 1.0: |
| 377 | // sum_of_reachable(newBP) + sum_of_unreachable(newBP) == 1.0 => |
| 378 | // sum_of_reachable(newBP) = 1.0 - sum_of_unreachable(newBP) |
| 379 | // Where sum_of_unreachable(newBP) is what has been just changed. |
| 380 | // Finally: |
| 381 | // K == sum_of_reachable(newBP) / sum_of_reachable(oldBP) => |
| 382 | // K == (1.0 - sum_of_unreachable(newBP)) / sum_of_reachable(oldBP) |
| 383 | BranchProbability NewUnreachableSum = BranchProbability::getZero(); |
| 384 | for (auto I : UnreachableIdxs) |
| 385 | NewUnreachableSum += BP[I]; |
| 386 | |
| 387 | BranchProbability NewReachableSum = |
| 388 | BranchProbability::getOne() - NewUnreachableSum; |
| 389 | |
| 390 | BranchProbability OldReachableSum = BranchProbability::getZero(); |
| 391 | for (auto I : ReachableIdxs) |
| 392 | OldReachableSum += BP[I]; |
| 393 | |
| 394 | if (OldReachableSum != NewReachableSum) { // Anything to dsitribute? |
| 395 | if (OldReachableSum.isZero()) { |
| 396 | // If all oldBP[i] are zeroes then the proportional distribution results |
| 397 | // in all zero probabilities and the error stays big. In this case we |
| 398 | // evenly spread NewReachableSum over the reachable edges. |
| 399 | BranchProbability PerEdge = NewReachableSum / ReachableIdxs.size(); |
Yevgeny Rouban | 3bb0d95 | 2020-06-02 10:55:27 +0700 | [diff] [blame] | 400 | for (auto I : ReachableIdxs) |
Yevgeny Rouban | 07239c7 | 2020-06-02 11:28:12 +0700 | [diff] [blame^] | 401 | BP[I] = PerEdge; |
| 402 | } else { |
| 403 | for (auto I : ReachableIdxs) { |
| 404 | // We use uint64_t to avoid double rounding error of the following |
| 405 | // calculation: BP[i] = BP[i] * NewReachableSum / OldReachableSum |
| 406 | // The formula is taken from the private constructor |
| 407 | // BranchProbability(uint32_t Numerator, uint32_t Denominator) |
| 408 | uint64_t Mul = static_cast<uint64_t>(NewReachableSum.getNumerator()) * |
| 409 | BP[I].getNumerator(); |
| 410 | uint32_t Div = static_cast<uint32_t>( |
| 411 | divideNearest(Mul, OldReachableSum.getNumerator())); |
| 412 | BP[I] = BranchProbability::getRaw(Div); |
| 413 | } |
| 414 | } |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 415 | } |
| 416 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 417 | setEdgeProbability(BB, BP); |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 418 | |
Chandler Carruth | d27a7a9 | 2011-10-19 10:30:30 +0000 | [diff] [blame] | 419 | return true; |
| 420 | } |
| 421 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 422 | /// Calculate edge weights for edges leading to cold blocks. |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 423 | /// |
| 424 | /// A cold block is one post-dominated by a block with a call to a |
| 425 | /// cold function. Those edges are unlikely to be taken, so we give |
| 426 | /// them relatively low weight. |
| 427 | /// |
| 428 | /// Return true if we could compute the weights for cold edges. |
| 429 | /// Return false, otherwise. |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 430 | bool BranchProbabilityInfo::calcColdCallHeuristics(const BasicBlock *BB) { |
Chandler Carruth | edb12a8 | 2018-10-15 10:04:59 +0000 | [diff] [blame] | 431 | const Instruction *TI = BB->getTerminator(); |
Artur Pilipenko | 4d063e7 | 2018-06-08 13:03:21 +0000 | [diff] [blame] | 432 | (void) TI; |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 433 | assert(TI->getNumSuccessors() > 1 && "expected more than one successor!"); |
Artur Pilipenko | 4d063e7 | 2018-06-08 13:03:21 +0000 | [diff] [blame] | 434 | assert(!isa<InvokeInst>(TI) && |
| 435 | "Invokes should have already been handled by calcInvokeHeuristics"); |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 436 | |
| 437 | // Determine which successors are post-dominated by a cold block. |
| 438 | SmallVector<unsigned, 4> ColdEdges; |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 439 | SmallVector<unsigned, 4> NormalEdges; |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 440 | for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I) |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 441 | if (PostDominatedByColdCall.count(*I)) |
| 442 | ColdEdges.push_back(I.getSuccessorIndex()); |
| 443 | else |
| 444 | NormalEdges.push_back(I.getSuccessorIndex()); |
| 445 | |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 446 | // Skip probabilities if no cold edges. |
| 447 | if (ColdEdges.empty()) |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 448 | return false; |
| 449 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 450 | SmallVector<BranchProbability, 4> EdgeProbabilities( |
| 451 | BB->getTerminator()->getNumSuccessors(), BranchProbability::getUnknown()); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 452 | if (NormalEdges.empty()) { |
| 453 | BranchProbability Prob(1, ColdEdges.size()); |
| 454 | for (unsigned SuccIdx : ColdEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 455 | EdgeProbabilities[SuccIdx] = Prob; |
| 456 | setEdgeProbability(BB, EdgeProbabilities); |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 457 | return true; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 458 | } |
| 459 | |
Vedant Kumar | a4bd146 | 2016-12-17 01:02:08 +0000 | [diff] [blame] | 460 | auto ColdProb = BranchProbability::getBranchProbability( |
| 461 | CC_TAKEN_WEIGHT, |
| 462 | (CC_TAKEN_WEIGHT + CC_NONTAKEN_WEIGHT) * uint64_t(ColdEdges.size())); |
| 463 | auto NormalProb = BranchProbability::getBranchProbability( |
| 464 | CC_NONTAKEN_WEIGHT, |
| 465 | (CC_TAKEN_WEIGHT + CC_NONTAKEN_WEIGHT) * uint64_t(NormalEdges.size())); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 466 | |
| 467 | for (unsigned SuccIdx : ColdEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 468 | EdgeProbabilities[SuccIdx] = ColdProb; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 469 | for (unsigned SuccIdx : NormalEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 470 | EdgeProbabilities[SuccIdx] = NormalProb; |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 471 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 472 | setEdgeProbability(BB, EdgeProbabilities); |
Diego Novillo | c639953 | 2013-05-24 12:26:52 +0000 | [diff] [blame] | 473 | return true; |
| 474 | } |
| 475 | |
Vedant Kumar | 1a8456d | 2018-03-02 18:57:02 +0000 | [diff] [blame] | 476 | // Calculate Edge Weights using "Pointer Heuristics". Predict a comparison |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 477 | // between two pointer or pointer and NULL will fail. |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 478 | bool BranchProbabilityInfo::calcPointerHeuristics(const BasicBlock *BB) { |
| 479 | const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 480 | if (!BI || !BI->isConditional()) |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 481 | return false; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 482 | |
| 483 | Value *Cond = BI->getCondition(); |
| 484 | ICmpInst *CI = dyn_cast<ICmpInst>(Cond); |
Jakub Staszak | abb236f | 2011-07-15 20:51:06 +0000 | [diff] [blame] | 485 | if (!CI || !CI->isEquality()) |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 486 | return false; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 487 | |
| 488 | Value *LHS = CI->getOperand(0); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 489 | |
| 490 | if (!LHS->getType()->isPointerTy()) |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 491 | return false; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 492 | |
Nick Lewycky | 75b2053 | 2011-06-04 02:07:10 +0000 | [diff] [blame] | 493 | assert(CI->getOperand(1)->getType()->isPointerTy()); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 494 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 495 | BranchProbability TakenProb(PH_TAKEN_WEIGHT, |
| 496 | PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT); |
| 497 | BranchProbability UntakenProb(PH_NONTAKEN_WEIGHT, |
| 498 | PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT); |
| 499 | |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 500 | // p != 0 -> isProb = true |
| 501 | // p == 0 -> isProb = false |
| 502 | // p != q -> isProb = true |
| 503 | // p == q -> isProb = false; |
Jakub Staszak | abb236f | 2011-07-15 20:51:06 +0000 | [diff] [blame] | 504 | bool isProb = CI->getPredicate() == ICmpInst::ICMP_NE; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 505 | if (!isProb) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 506 | std::swap(TakenProb, UntakenProb); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 507 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 508 | setEdgeProbability( |
| 509 | BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb})); |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 510 | return true; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 511 | } |
| 512 | |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 513 | static int getSCCNum(const BasicBlock *BB, |
| 514 | const BranchProbabilityInfo::SccInfo &SccI) { |
| 515 | auto SccIt = SccI.SccNums.find(BB); |
| 516 | if (SccIt == SccI.SccNums.end()) |
| 517 | return -1; |
| 518 | return SccIt->second; |
| 519 | } |
| 520 | |
| 521 | // Consider any block that is an entry point to the SCC as a header. |
| 522 | static bool isSCCHeader(const BasicBlock *BB, int SccNum, |
| 523 | BranchProbabilityInfo::SccInfo &SccI) { |
| 524 | assert(getSCCNum(BB, SccI) == SccNum); |
| 525 | |
| 526 | // Lazily compute the set of headers for a given SCC and cache the results |
| 527 | // in the SccHeaderMap. |
| 528 | if (SccI.SccHeaders.size() <= static_cast<unsigned>(SccNum)) |
| 529 | SccI.SccHeaders.resize(SccNum + 1); |
| 530 | auto &HeaderMap = SccI.SccHeaders[SccNum]; |
| 531 | bool Inserted; |
| 532 | BranchProbabilityInfo::SccHeaderMap::iterator HeaderMapIt; |
| 533 | std::tie(HeaderMapIt, Inserted) = HeaderMap.insert(std::make_pair(BB, false)); |
| 534 | if (Inserted) { |
| 535 | bool IsHeader = llvm::any_of(make_range(pred_begin(BB), pred_end(BB)), |
| 536 | [&](const BasicBlock *Pred) { |
| 537 | return getSCCNum(Pred, SccI) != SccNum; |
| 538 | }); |
| 539 | HeaderMapIt->second = IsHeader; |
| 540 | return IsHeader; |
| 541 | } else |
| 542 | return HeaderMapIt->second; |
| 543 | } |
| 544 | |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 545 | // Compute the unlikely successors to the block BB in the loop L, specifically |
| 546 | // those that are unlikely because this is a loop, and add them to the |
| 547 | // UnlikelyBlocks set. |
| 548 | static void |
| 549 | computeUnlikelySuccessors(const BasicBlock *BB, Loop *L, |
| 550 | SmallPtrSetImpl<const BasicBlock*> &UnlikelyBlocks) { |
| 551 | // Sometimes in a loop we have a branch whose condition is made false by |
| 552 | // taking it. This is typically something like |
| 553 | // int n = 0; |
| 554 | // while (...) { |
| 555 | // if (++n >= MAX) { |
| 556 | // n = 0; |
| 557 | // } |
| 558 | // } |
| 559 | // In this sort of situation taking the branch means that at the very least it |
| 560 | // won't be taken again in the next iteration of the loop, so we should |
| 561 | // consider it less likely than a typical branch. |
| 562 | // |
| 563 | // We detect this by looking back through the graph of PHI nodes that sets the |
| 564 | // value that the condition depends on, and seeing if we can reach a successor |
| 565 | // block which can be determined to make the condition false. |
| 566 | // |
| 567 | // FIXME: We currently consider unlikely blocks to be half as likely as other |
| 568 | // blocks, but if we consider the example above the likelyhood is actually |
| 569 | // 1/MAX. We could therefore be more precise in how unlikely we consider |
| 570 | // blocks to be, but it would require more careful examination of the form |
| 571 | // of the comparison expression. |
| 572 | const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); |
| 573 | if (!BI || !BI->isConditional()) |
| 574 | return; |
| 575 | |
| 576 | // Check if the branch is based on an instruction compared with a constant |
| 577 | CmpInst *CI = dyn_cast<CmpInst>(BI->getCondition()); |
| 578 | if (!CI || !isa<Instruction>(CI->getOperand(0)) || |
| 579 | !isa<Constant>(CI->getOperand(1))) |
| 580 | return; |
| 581 | |
| 582 | // Either the instruction must be a PHI, or a chain of operations involving |
| 583 | // constants that ends in a PHI which we can then collapse into a single value |
| 584 | // if the PHI value is known. |
| 585 | Instruction *CmpLHS = dyn_cast<Instruction>(CI->getOperand(0)); |
| 586 | PHINode *CmpPHI = dyn_cast<PHINode>(CmpLHS); |
| 587 | Constant *CmpConst = dyn_cast<Constant>(CI->getOperand(1)); |
| 588 | // Collect the instructions until we hit a PHI |
Benjamin Kramer | 7f68a30 | 2018-06-15 21:06:43 +0000 | [diff] [blame] | 589 | SmallVector<BinaryOperator *, 1> InstChain; |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 590 | while (!CmpPHI && CmpLHS && isa<BinaryOperator>(CmpLHS) && |
| 591 | isa<Constant>(CmpLHS->getOperand(1))) { |
| 592 | // Stop if the chain extends outside of the loop |
| 593 | if (!L->contains(CmpLHS)) |
| 594 | return; |
Benjamin Kramer | 7f68a30 | 2018-06-15 21:06:43 +0000 | [diff] [blame] | 595 | InstChain.push_back(cast<BinaryOperator>(CmpLHS)); |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 596 | CmpLHS = dyn_cast<Instruction>(CmpLHS->getOperand(0)); |
| 597 | if (CmpLHS) |
| 598 | CmpPHI = dyn_cast<PHINode>(CmpLHS); |
| 599 | } |
| 600 | if (!CmpPHI || !L->contains(CmpPHI)) |
| 601 | return; |
| 602 | |
| 603 | // Trace the phi node to find all values that come from successors of BB |
| 604 | SmallPtrSet<PHINode*, 8> VisitedInsts; |
| 605 | SmallVector<PHINode*, 8> WorkList; |
| 606 | WorkList.push_back(CmpPHI); |
| 607 | VisitedInsts.insert(CmpPHI); |
| 608 | while (!WorkList.empty()) { |
| 609 | PHINode *P = WorkList.back(); |
| 610 | WorkList.pop_back(); |
| 611 | for (BasicBlock *B : P->blocks()) { |
| 612 | // Skip blocks that aren't part of the loop |
| 613 | if (!L->contains(B)) |
| 614 | continue; |
| 615 | Value *V = P->getIncomingValueForBlock(B); |
| 616 | // If the source is a PHI add it to the work list if we haven't |
| 617 | // already visited it. |
| 618 | if (PHINode *PN = dyn_cast<PHINode>(V)) { |
| 619 | if (VisitedInsts.insert(PN).second) |
| 620 | WorkList.push_back(PN); |
| 621 | continue; |
| 622 | } |
| 623 | // If this incoming value is a constant and B is a successor of BB, then |
| 624 | // we can constant-evaluate the compare to see if it makes the branch be |
| 625 | // taken or not. |
| 626 | Constant *CmpLHSConst = dyn_cast<Constant>(V); |
| 627 | if (!CmpLHSConst || |
| 628 | std::find(succ_begin(BB), succ_end(BB), B) == succ_end(BB)) |
| 629 | continue; |
| 630 | // First collapse InstChain |
Benjamin Kramer | 7f68a30 | 2018-06-15 21:06:43 +0000 | [diff] [blame] | 631 | for (Instruction *I : llvm::reverse(InstChain)) { |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 632 | CmpLHSConst = ConstantExpr::get(I->getOpcode(), CmpLHSConst, |
Benjamin Kramer | 7f68a30 | 2018-06-15 21:06:43 +0000 | [diff] [blame] | 633 | cast<Constant>(I->getOperand(1)), true); |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 634 | if (!CmpLHSConst) |
| 635 | break; |
| 636 | } |
| 637 | if (!CmpLHSConst) |
| 638 | continue; |
| 639 | // Now constant-evaluate the compare |
| 640 | Constant *Result = ConstantExpr::getCompare(CI->getPredicate(), |
| 641 | CmpLHSConst, CmpConst, true); |
| 642 | // If the result means we don't branch to the block then that block is |
| 643 | // unlikely. |
| 644 | if (Result && |
| 645 | ((Result->isZeroValue() && B == BI->getSuccessor(0)) || |
| 646 | (Result->isOneValue() && B == BI->getSuccessor(1)))) |
| 647 | UnlikelyBlocks.insert(B); |
| 648 | } |
| 649 | } |
| 650 | } |
| 651 | |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 652 | // Calculate Edge Weights using "Loop Branch Heuristics". Predict backedges |
| 653 | // as taken, exiting edges as not-taken. |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 654 | bool BranchProbabilityInfo::calcLoopBranchHeuristics(const BasicBlock *BB, |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 655 | const LoopInfo &LI, |
| 656 | SccInfo &SccI) { |
| 657 | int SccNum; |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 658 | Loop *L = LI.getLoopFor(BB); |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 659 | if (!L) { |
| 660 | SccNum = getSCCNum(BB, SccI); |
| 661 | if (SccNum < 0) |
| 662 | return false; |
| 663 | } |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 664 | |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 665 | SmallPtrSet<const BasicBlock*, 8> UnlikelyBlocks; |
| 666 | if (L) |
| 667 | computeUnlikelySuccessors(BB, L, UnlikelyBlocks); |
| 668 | |
Manman Ren | cf10446 | 2012-08-24 18:14:27 +0000 | [diff] [blame] | 669 | SmallVector<unsigned, 8> BackEdges; |
| 670 | SmallVector<unsigned, 8> ExitingEdges; |
| 671 | SmallVector<unsigned, 8> InEdges; // Edges from header to the loop. |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 672 | SmallVector<unsigned, 8> UnlikelyEdges; |
Jakub Staszak | bcb3c65 | 2011-07-28 21:33:46 +0000 | [diff] [blame] | 673 | |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 674 | for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I) { |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 675 | // Use LoopInfo if we have it, otherwise fall-back to SCC info to catch |
| 676 | // irreducible loops. |
| 677 | if (L) { |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 678 | if (UnlikelyBlocks.count(*I) != 0) |
| 679 | UnlikelyEdges.push_back(I.getSuccessorIndex()); |
| 680 | else if (!L->contains(*I)) |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 681 | ExitingEdges.push_back(I.getSuccessorIndex()); |
| 682 | else if (L->getHeader() == *I) |
| 683 | BackEdges.push_back(I.getSuccessorIndex()); |
| 684 | else |
| 685 | InEdges.push_back(I.getSuccessorIndex()); |
| 686 | } else { |
| 687 | if (getSCCNum(*I, SccI) != SccNum) |
| 688 | ExitingEdges.push_back(I.getSuccessorIndex()); |
| 689 | else if (isSCCHeader(*I, SccNum, SccI)) |
| 690 | BackEdges.push_back(I.getSuccessorIndex()); |
| 691 | else |
| 692 | InEdges.push_back(I.getSuccessorIndex()); |
| 693 | } |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 694 | } |
| 695 | |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 696 | if (BackEdges.empty() && ExitingEdges.empty() && UnlikelyEdges.empty()) |
Akira Hatanaka | 5638b89 | 2014-04-14 16:56:19 +0000 | [diff] [blame] | 697 | return false; |
| 698 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 699 | // Collect the sum of probabilities of back-edges/in-edges/exiting-edges, and |
| 700 | // normalize them so that they sum up to one. |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 701 | unsigned Denom = (BackEdges.empty() ? 0 : LBH_TAKEN_WEIGHT) + |
| 702 | (InEdges.empty() ? 0 : LBH_TAKEN_WEIGHT) + |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 703 | (UnlikelyEdges.empty() ? 0 : LBH_UNLIKELY_WEIGHT) + |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 704 | (ExitingEdges.empty() ? 0 : LBH_NONTAKEN_WEIGHT); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 705 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 706 | SmallVector<BranchProbability, 4> EdgeProbabilities( |
| 707 | BB->getTerminator()->getNumSuccessors(), BranchProbability::getUnknown()); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 708 | if (uint32_t numBackEdges = BackEdges.size()) { |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 709 | BranchProbability TakenProb = BranchProbability(LBH_TAKEN_WEIGHT, Denom); |
| 710 | auto Prob = TakenProb / numBackEdges; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 711 | for (unsigned SuccIdx : BackEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 712 | EdgeProbabilities[SuccIdx] = Prob; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 713 | } |
| 714 | |
Jakub Staszak | bcb3c65 | 2011-07-28 21:33:46 +0000 | [diff] [blame] | 715 | if (uint32_t numInEdges = InEdges.size()) { |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 716 | BranchProbability TakenProb = BranchProbability(LBH_TAKEN_WEIGHT, Denom); |
| 717 | auto Prob = TakenProb / numInEdges; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 718 | for (unsigned SuccIdx : InEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 719 | EdgeProbabilities[SuccIdx] = Prob; |
Jakub Staszak | bcb3c65 | 2011-07-28 21:33:46 +0000 | [diff] [blame] | 720 | } |
| 721 | |
Chandler Carruth | 32f46e7 | 2011-10-25 09:47:41 +0000 | [diff] [blame] | 722 | if (uint32_t numExitingEdges = ExitingEdges.size()) { |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 723 | BranchProbability NotTakenProb = BranchProbability(LBH_NONTAKEN_WEIGHT, |
| 724 | Denom); |
| 725 | auto Prob = NotTakenProb / numExitingEdges; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 726 | for (unsigned SuccIdx : ExitingEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 727 | EdgeProbabilities[SuccIdx] = Prob; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 728 | } |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 729 | |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 730 | if (uint32_t numUnlikelyEdges = UnlikelyEdges.size()) { |
| 731 | BranchProbability UnlikelyProb = BranchProbability(LBH_UNLIKELY_WEIGHT, |
| 732 | Denom); |
| 733 | auto Prob = UnlikelyProb / numUnlikelyEdges; |
| 734 | for (unsigned SuccIdx : UnlikelyEdges) |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 735 | EdgeProbabilities[SuccIdx] = Prob; |
John Brawn | 29bbed3 | 2018-02-23 17:17:31 +0000 | [diff] [blame] | 736 | } |
| 737 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 738 | setEdgeProbability(BB, EdgeProbabilities); |
Jakub Staszak | d07b2e1 | 2011-07-28 21:45:07 +0000 | [diff] [blame] | 739 | return true; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 740 | } |
| 741 | |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 742 | bool BranchProbabilityInfo::calcZeroHeuristics(const BasicBlock *BB, |
| 743 | const TargetLibraryInfo *TLI) { |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 744 | const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 745 | if (!BI || !BI->isConditional()) |
| 746 | return false; |
| 747 | |
| 748 | Value *Cond = BI->getCondition(); |
| 749 | ICmpInst *CI = dyn_cast<ICmpInst>(Cond); |
| 750 | if (!CI) |
| 751 | return false; |
| 752 | |
Sam Parker | 0b53e84 | 2019-02-15 11:50:21 +0000 | [diff] [blame] | 753 | auto GetConstantInt = [](Value *V) { |
| 754 | if (auto *I = dyn_cast<BitCastInst>(V)) |
| 755 | return dyn_cast<ConstantInt>(I->getOperand(0)); |
| 756 | return dyn_cast<ConstantInt>(V); |
| 757 | }; |
| 758 | |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 759 | Value *RHS = CI->getOperand(1); |
Sam Parker | 0b53e84 | 2019-02-15 11:50:21 +0000 | [diff] [blame] | 760 | ConstantInt *CV = GetConstantInt(RHS); |
Benjamin Kramer | 0ca1ad0 | 2011-09-04 23:53:04 +0000 | [diff] [blame] | 761 | if (!CV) |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 762 | return false; |
| 763 | |
Daniel Jasper | a73f3d5 | 2015-04-15 06:24:07 +0000 | [diff] [blame] | 764 | // If the LHS is the result of AND'ing a value with a single bit bitmask, |
| 765 | // we don't have information about probabilities. |
| 766 | if (Instruction *LHS = dyn_cast<Instruction>(CI->getOperand(0))) |
| 767 | if (LHS->getOpcode() == Instruction::And) |
| 768 | if (ConstantInt *AndRHS = dyn_cast<ConstantInt>(LHS->getOperand(1))) |
Craig Topper | 4e22ee6 | 2017-08-04 16:59:29 +0000 | [diff] [blame] | 769 | if (AndRHS->getValue().isPowerOf2()) |
Daniel Jasper | a73f3d5 | 2015-04-15 06:24:07 +0000 | [diff] [blame] | 770 | return false; |
| 771 | |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 772 | // Check if the LHS is the return value of a library function |
| 773 | LibFunc Func = NumLibFuncs; |
| 774 | if (TLI) |
| 775 | if (CallInst *Call = dyn_cast<CallInst>(CI->getOperand(0))) |
| 776 | if (Function *CalledFn = Call->getCalledFunction()) |
| 777 | TLI->getLibFunc(*CalledFn, Func); |
| 778 | |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 779 | bool isProb; |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 780 | if (Func == LibFunc_strcasecmp || |
| 781 | Func == LibFunc_strcmp || |
| 782 | Func == LibFunc_strncasecmp || |
| 783 | Func == LibFunc_strncmp || |
| 784 | Func == LibFunc_memcmp) { |
| 785 | // strcmp and similar functions return zero, negative, or positive, if the |
| 786 | // first string is equal, less, or greater than the second. We consider it |
| 787 | // likely that the strings are not equal, so a comparison with zero is |
| 788 | // probably false, but also a comparison with any other number is also |
| 789 | // probably false given that what exactly is returned for nonzero values is |
| 790 | // not specified. Any kind of comparison other than equality we know |
| 791 | // nothing about. |
| 792 | switch (CI->getPredicate()) { |
| 793 | case CmpInst::ICMP_EQ: |
| 794 | isProb = false; |
| 795 | break; |
| 796 | case CmpInst::ICMP_NE: |
| 797 | isProb = true; |
| 798 | break; |
| 799 | default: |
| 800 | return false; |
| 801 | } |
| 802 | } else if (CV->isZero()) { |
Benjamin Kramer | 0ca1ad0 | 2011-09-04 23:53:04 +0000 | [diff] [blame] | 803 | switch (CI->getPredicate()) { |
| 804 | case CmpInst::ICMP_EQ: |
| 805 | // X == 0 -> Unlikely |
| 806 | isProb = false; |
| 807 | break; |
| 808 | case CmpInst::ICMP_NE: |
| 809 | // X != 0 -> Likely |
| 810 | isProb = true; |
| 811 | break; |
| 812 | case CmpInst::ICMP_SLT: |
| 813 | // X < 0 -> Unlikely |
| 814 | isProb = false; |
| 815 | break; |
| 816 | case CmpInst::ICMP_SGT: |
| 817 | // X > 0 -> Likely |
| 818 | isProb = true; |
| 819 | break; |
| 820 | default: |
| 821 | return false; |
| 822 | } |
| 823 | } else if (CV->isOne() && CI->getPredicate() == CmpInst::ICMP_SLT) { |
| 824 | // InstCombine canonicalizes X <= 0 into X < 1. |
| 825 | // X <= 0 -> Unlikely |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 826 | isProb = false; |
Craig Topper | 79ab643 | 2017-07-06 18:39:47 +0000 | [diff] [blame] | 827 | } else if (CV->isMinusOne()) { |
Hal Finkel | 4d94930 | 2013-11-01 10:58:22 +0000 | [diff] [blame] | 828 | switch (CI->getPredicate()) { |
| 829 | case CmpInst::ICMP_EQ: |
| 830 | // X == -1 -> Unlikely |
| 831 | isProb = false; |
| 832 | break; |
| 833 | case CmpInst::ICMP_NE: |
| 834 | // X != -1 -> Likely |
| 835 | isProb = true; |
| 836 | break; |
| 837 | case CmpInst::ICMP_SGT: |
| 838 | // InstCombine canonicalizes X >= 0 into X > -1. |
| 839 | // X >= 0 -> Likely |
| 840 | isProb = true; |
| 841 | break; |
| 842 | default: |
| 843 | return false; |
| 844 | } |
Benjamin Kramer | 0ca1ad0 | 2011-09-04 23:53:04 +0000 | [diff] [blame] | 845 | } else { |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 846 | return false; |
Benjamin Kramer | 0ca1ad0 | 2011-09-04 23:53:04 +0000 | [diff] [blame] | 847 | } |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 848 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 849 | BranchProbability TakenProb(ZH_TAKEN_WEIGHT, |
| 850 | ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT); |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 851 | BranchProbability UntakenProb(ZH_NONTAKEN_WEIGHT, |
| 852 | ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT); |
| 853 | if (!isProb) |
| 854 | std::swap(TakenProb, UntakenProb); |
| 855 | |
| 856 | setEdgeProbability( |
| 857 | BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb})); |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 858 | return true; |
| 859 | } |
| 860 | |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 861 | bool BranchProbabilityInfo::calcFloatingPointHeuristics(const BasicBlock *BB) { |
| 862 | const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); |
Benjamin Kramer | 1e731a1 | 2011-10-21 20:12:47 +0000 | [diff] [blame] | 863 | if (!BI || !BI->isConditional()) |
| 864 | return false; |
| 865 | |
| 866 | Value *Cond = BI->getCondition(); |
| 867 | FCmpInst *FCmp = dyn_cast<FCmpInst>(Cond); |
Benjamin Kramer | 606a50a | 2011-10-21 21:13:47 +0000 | [diff] [blame] | 868 | if (!FCmp) |
Benjamin Kramer | 1e731a1 | 2011-10-21 20:12:47 +0000 | [diff] [blame] | 869 | return false; |
| 870 | |
Guozhi Wei | b329e07 | 2019-09-10 17:25:11 +0000 | [diff] [blame] | 871 | uint32_t TakenWeight = FPH_TAKEN_WEIGHT; |
| 872 | uint32_t NontakenWeight = FPH_NONTAKEN_WEIGHT; |
Benjamin Kramer | 606a50a | 2011-10-21 21:13:47 +0000 | [diff] [blame] | 873 | bool isProb; |
| 874 | if (FCmp->isEquality()) { |
| 875 | // f1 == f2 -> Unlikely |
| 876 | // f1 != f2 -> Likely |
| 877 | isProb = !FCmp->isTrueWhenEqual(); |
| 878 | } else if (FCmp->getPredicate() == FCmpInst::FCMP_ORD) { |
| 879 | // !isnan -> Likely |
| 880 | isProb = true; |
Guozhi Wei | b329e07 | 2019-09-10 17:25:11 +0000 | [diff] [blame] | 881 | TakenWeight = FPH_ORD_WEIGHT; |
| 882 | NontakenWeight = FPH_UNO_WEIGHT; |
Benjamin Kramer | 606a50a | 2011-10-21 21:13:47 +0000 | [diff] [blame] | 883 | } else if (FCmp->getPredicate() == FCmpInst::FCMP_UNO) { |
| 884 | // isnan -> Unlikely |
| 885 | isProb = false; |
Guozhi Wei | b329e07 | 2019-09-10 17:25:11 +0000 | [diff] [blame] | 886 | TakenWeight = FPH_ORD_WEIGHT; |
| 887 | NontakenWeight = FPH_UNO_WEIGHT; |
Benjamin Kramer | 606a50a | 2011-10-21 21:13:47 +0000 | [diff] [blame] | 888 | } else { |
| 889 | return false; |
| 890 | } |
| 891 | |
Reid Kleckner | 1370757 | 2020-05-13 08:23:09 -0700 | [diff] [blame] | 892 | BranchProbability TakenProb(TakenWeight, TakenWeight + NontakenWeight); |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 893 | BranchProbability UntakenProb(NontakenWeight, TakenWeight + NontakenWeight); |
| 894 | if (!isProb) |
| 895 | std::swap(TakenProb, UntakenProb); |
| 896 | |
| 897 | setEdgeProbability( |
| 898 | BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb})); |
Benjamin Kramer | 1e731a1 | 2011-10-21 20:12:47 +0000 | [diff] [blame] | 899 | return true; |
| 900 | } |
Jakub Staszak | 17af66a | 2011-07-31 03:27:24 +0000 | [diff] [blame] | 901 | |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 902 | bool BranchProbabilityInfo::calcInvokeHeuristics(const BasicBlock *BB) { |
| 903 | const InvokeInst *II = dyn_cast<InvokeInst>(BB->getTerminator()); |
Bill Wendling | e1c5426 | 2012-08-15 12:22:35 +0000 | [diff] [blame] | 904 | if (!II) |
| 905 | return false; |
| 906 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 907 | BranchProbability TakenProb(IH_TAKEN_WEIGHT, |
| 908 | IH_TAKEN_WEIGHT + IH_NONTAKEN_WEIGHT); |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 909 | setEdgeProbability( |
| 910 | BB, SmallVector<BranchProbability, 2>({TakenProb, TakenProb.getCompl()})); |
Bill Wendling | e1c5426 | 2012-08-15 12:22:35 +0000 | [diff] [blame] | 911 | return true; |
| 912 | } |
| 913 | |
Pete Cooper | b9d2e34 | 2015-05-28 19:43:06 +0000 | [diff] [blame] | 914 | void BranchProbabilityInfo::releaseMemory() { |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 915 | Probs.clear(); |
Nikita Popov | fe8abbf | 2020-04-07 21:21:30 +0200 | [diff] [blame] | 916 | Handles.clear(); |
Pete Cooper | b9d2e34 | 2015-05-28 19:43:06 +0000 | [diff] [blame] | 917 | } |
| 918 | |
Alina Sbirlea | 62a50a9 | 2020-01-15 14:02:33 -0800 | [diff] [blame] | 919 | bool BranchProbabilityInfo::invalidate(Function &, const PreservedAnalyses &PA, |
| 920 | FunctionAnalysisManager::Invalidator &) { |
| 921 | // Check whether the analysis, all analyses on functions, or the function's |
| 922 | // CFG have been preserved. |
| 923 | auto PAC = PA.getChecker<BranchProbabilityAnalysis>(); |
| 924 | return !(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Function>>() || |
| 925 | PAC.preservedSet<CFGAnalyses>()); |
| 926 | } |
| 927 | |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 928 | void BranchProbabilityInfo::print(raw_ostream &OS) const { |
Chandler Carruth | 1c8ace0 | 2011-10-23 21:21:50 +0000 | [diff] [blame] | 929 | OS << "---- Branch Probabilities ----\n"; |
| 930 | // We print the probabilities from the last function the analysis ran over, |
| 931 | // or the function it is currently running over. |
| 932 | assert(LastF && "Cannot print prior to running over a function"); |
Duncan P. N. Exon Smith | 5a82c91 | 2015-10-10 00:53:03 +0000 | [diff] [blame] | 933 | for (const auto &BI : *LastF) { |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 934 | for (const_succ_iterator SI = succ_begin(&BI), SE = succ_end(&BI); SI != SE; |
Duncan P. N. Exon Smith | 5a82c91 | 2015-10-10 00:53:03 +0000 | [diff] [blame] | 935 | ++SI) { |
| 936 | printEdgeProbability(OS << " ", &BI, *SI); |
Duncan P. N. Exon Smith | 6c99015 | 2014-07-21 17:06:51 +0000 | [diff] [blame] | 937 | } |
| 938 | } |
Chandler Carruth | 1c8ace0 | 2011-10-23 21:21:50 +0000 | [diff] [blame] | 939 | } |
| 940 | |
Jakub Staszak | efd94c8 | 2011-07-29 19:30:00 +0000 | [diff] [blame] | 941 | bool BranchProbabilityInfo:: |
| 942 | isEdgeHot(const BasicBlock *Src, const BasicBlock *Dst) const { |
Andrew Trick | 3d4e64b | 2011-06-11 01:05:22 +0000 | [diff] [blame] | 943 | // Hot probability is at least 4/5 = 80% |
Benjamin Kramer | 929f53f | 2011-10-23 11:19:14 +0000 | [diff] [blame] | 944 | // FIXME: Compare against a static "hot" BranchProbability. |
| 945 | return getEdgeProbability(Src, Dst) > BranchProbability(4, 5); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 946 | } |
| 947 | |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 948 | const BasicBlock * |
| 949 | BranchProbabilityInfo::getHotSucc(const BasicBlock *BB) const { |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 950 | auto MaxProb = BranchProbability::getZero(); |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 951 | const BasicBlock *MaxSucc = nullptr; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 952 | |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 953 | for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I) { |
Mehdi Amini | a797877 | 2016-04-07 21:59:28 +0000 | [diff] [blame] | 954 | const BasicBlock *Succ = *I; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 955 | auto Prob = getEdgeProbability(BB, Succ); |
| 956 | if (Prob > MaxProb) { |
| 957 | MaxProb = Prob; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 958 | MaxSucc = Succ; |
| 959 | } |
| 960 | } |
| 961 | |
Benjamin Kramer | 929f53f | 2011-10-23 11:19:14 +0000 | [diff] [blame] | 962 | // Hot probability is at least 4/5 = 80% |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 963 | if (MaxProb > BranchProbability(4, 5)) |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 964 | return MaxSucc; |
| 965 | |
Craig Topper | 9f00886 | 2014-04-15 04:59:12 +0000 | [diff] [blame] | 966 | return nullptr; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 967 | } |
| 968 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 969 | /// Get the raw edge probability for the edge. If can't find it, return a |
| 970 | /// default probability 1/N where N is the number of successors. Here an edge is |
| 971 | /// specified using PredBlock and an |
| 972 | /// index to the successors. |
| 973 | BranchProbability |
| 974 | BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, |
| 975 | unsigned IndexInSuccessors) const { |
| 976 | auto I = Probs.find(std::make_pair(Src, IndexInSuccessors)); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 977 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 978 | if (I != Probs.end()) |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 979 | return I->second; |
| 980 | |
Vedant Kumar | e0b5f86 | 2018-05-10 23:01:54 +0000 | [diff] [blame] | 981 | return {1, static_cast<uint32_t>(succ_size(Src))}; |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 982 | } |
| 983 | |
Cong Hou | d97c100 | 2015-12-01 05:29:22 +0000 | [diff] [blame] | 984 | BranchProbability |
| 985 | BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 986 | const_succ_iterator Dst) const { |
Cong Hou | d97c100 | 2015-12-01 05:29:22 +0000 | [diff] [blame] | 987 | return getEdgeProbability(Src, Dst.getSuccessorIndex()); |
| 988 | } |
| 989 | |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 990 | /// Get the raw edge probability calculated for the block pair. This returns the |
| 991 | /// sum of all raw edge probabilities from Src to Dst. |
| 992 | BranchProbability |
| 993 | BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, |
| 994 | const BasicBlock *Dst) const { |
| 995 | auto Prob = BranchProbability::getZero(); |
| 996 | bool FoundProb = false; |
Evgeniy Brevnov | bb0842a | 2020-04-29 14:08:01 +0700 | [diff] [blame] | 997 | uint32_t EdgeCount = 0; |
Alina Sbirlea | 3abcbf9 | 2020-03-10 11:33:02 -0700 | [diff] [blame] | 998 | for (const_succ_iterator I = succ_begin(Src), E = succ_end(Src); I != E; ++I) |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 999 | if (*I == Dst) { |
Evgeniy Brevnov | bb0842a | 2020-04-29 14:08:01 +0700 | [diff] [blame] | 1000 | ++EdgeCount; |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 1001 | auto MapI = Probs.find(std::make_pair(Src, I.getSuccessorIndex())); |
| 1002 | if (MapI != Probs.end()) { |
| 1003 | FoundProb = true; |
| 1004 | Prob += MapI->second; |
| 1005 | } |
| 1006 | } |
| 1007 | uint32_t succ_num = std::distance(succ_begin(Src), succ_end(Src)); |
Evgeniy Brevnov | bb0842a | 2020-04-29 14:08:01 +0700 | [diff] [blame] | 1008 | return FoundProb ? Prob : BranchProbability(EdgeCount, succ_num); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 1009 | } |
| 1010 | |
| 1011 | /// Set the edge probability for a given edge specified by PredBlock and an |
| 1012 | /// index to the successors. |
| 1013 | void BranchProbabilityInfo::setEdgeProbability(const BasicBlock *Src, |
| 1014 | unsigned IndexInSuccessors, |
| 1015 | BranchProbability Prob) { |
| 1016 | Probs[std::make_pair(Src, IndexInSuccessors)] = Prob; |
Igor Laevsky | ee40d1e | 2016-07-15 14:31:16 +0000 | [diff] [blame] | 1017 | Handles.insert(BasicBlockCallbackVH(Src, this)); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1018 | LLVM_DEBUG(dbgs() << "set edge " << Src->getName() << " -> " |
| 1019 | << IndexInSuccessors << " successor probability to " << Prob |
| 1020 | << "\n"); |
Cong Hou | e93b8e1 | 2015-12-22 18:56:14 +0000 | [diff] [blame] | 1021 | } |
| 1022 | |
Yevgeny Rouban | 8138487 | 2020-05-21 11:49:11 +0700 | [diff] [blame] | 1023 | /// Set the edge probability for all edges at once. |
| 1024 | void BranchProbabilityInfo::setEdgeProbability( |
| 1025 | const BasicBlock *Src, const SmallVectorImpl<BranchProbability> &Probs) { |
| 1026 | assert(Src->getTerminator()->getNumSuccessors() == Probs.size()); |
| 1027 | if (Probs.size() == 0) |
| 1028 | return; // Nothing to set. |
| 1029 | |
| 1030 | uint64_t TotalNumerator = 0; |
| 1031 | for (unsigned SuccIdx = 0; SuccIdx < Probs.size(); ++SuccIdx) { |
| 1032 | setEdgeProbability(Src, SuccIdx, Probs[SuccIdx]); |
| 1033 | TotalNumerator += Probs[SuccIdx].getNumerator(); |
| 1034 | } |
| 1035 | |
| 1036 | // Because of rounding errors the total probability cannot be checked to be |
| 1037 | // 1.0 exactly. That is TotalNumerator == BranchProbability::getDenominator. |
| 1038 | // Instead, every single probability in Probs must be as accurate as possible. |
| 1039 | // This results in error 1/denominator at most, thus the total absolute error |
| 1040 | // should be within Probs.size / BranchProbability::getDenominator. |
| 1041 | assert(TotalNumerator <= BranchProbability::getDenominator() + Probs.size()); |
| 1042 | assert(TotalNumerator >= BranchProbability::getDenominator() - Probs.size()); |
| 1043 | } |
| 1044 | |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 1045 | raw_ostream & |
Chandler Carruth | 1c8ace0 | 2011-10-23 21:21:50 +0000 | [diff] [blame] | 1046 | BranchProbabilityInfo::printEdgeProbability(raw_ostream &OS, |
| 1047 | const BasicBlock *Src, |
| 1048 | const BasicBlock *Dst) const { |
Jakub Staszak | 12a43bd | 2011-06-16 20:22:37 +0000 | [diff] [blame] | 1049 | const BranchProbability Prob = getEdgeProbability(Src, Dst); |
Benjamin Kramer | 1f97a5a | 2011-11-15 16:27:03 +0000 | [diff] [blame] | 1050 | OS << "edge " << Src->getName() << " -> " << Dst->getName() |
Andrew Trick | 3d4e64b | 2011-06-11 01:05:22 +0000 | [diff] [blame] | 1051 | << " probability is " << Prob |
| 1052 | << (isEdgeHot(Src, Dst) ? " [HOT edge]\n" : "\n"); |
Andrew Trick | 49371f3 | 2011-06-04 01:16:30 +0000 | [diff] [blame] | 1053 | |
| 1054 | return OS; |
| 1055 | } |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1056 | |
Igor Laevsky | ee40d1e | 2016-07-15 14:31:16 +0000 | [diff] [blame] | 1057 | void BranchProbabilityInfo::eraseBlock(const BasicBlock *BB) { |
| 1058 | for (auto I = Probs.begin(), E = Probs.end(); I != E; ++I) { |
| 1059 | auto Key = I->first; |
| 1060 | if (Key.first == BB) |
| 1061 | Probs.erase(Key); |
| 1062 | } |
| 1063 | } |
| 1064 | |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 1065 | void BranchProbabilityInfo::calculate(const Function &F, const LoopInfo &LI, |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 1066 | const TargetLibraryInfo *TLI, |
| 1067 | PostDominatorTree *PDT) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1068 | LLVM_DEBUG(dbgs() << "---- Branch Probability Info : " << F.getName() |
| 1069 | << " ----\n\n"); |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1070 | LastF = &F; // Store the last function we ran on for printing. |
| 1071 | assert(PostDominatedByUnreachable.empty()); |
| 1072 | assert(PostDominatedByColdCall.empty()); |
| 1073 | |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 1074 | // Record SCC numbers of blocks in the CFG to identify irreducible loops. |
| 1075 | // FIXME: We could only calculate this if the CFG is known to be irreducible |
| 1076 | // (perhaps cache this info in LoopInfo if we can easily calculate it there?). |
| 1077 | int SccNum = 0; |
| 1078 | SccInfo SccI; |
| 1079 | for (scc_iterator<const Function *> It = scc_begin(&F); !It.isAtEnd(); |
| 1080 | ++It, ++SccNum) { |
| 1081 | // Ignore single-block SCCs since they either aren't loops or LoopInfo will |
| 1082 | // catch them. |
| 1083 | const std::vector<const BasicBlock *> &Scc = *It; |
| 1084 | if (Scc.size() == 1) |
| 1085 | continue; |
| 1086 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1087 | LLVM_DEBUG(dbgs() << "BPI: SCC " << SccNum << ":"); |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 1088 | for (auto *BB : Scc) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1089 | LLVM_DEBUG(dbgs() << " " << BB->getName()); |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 1090 | SccI.SccNums[BB] = SccNum; |
| 1091 | } |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1092 | LLVM_DEBUG(dbgs() << "\n"); |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 1093 | } |
| 1094 | |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 1095 | std::unique_ptr<PostDominatorTree> PDTPtr; |
| 1096 | |
| 1097 | if (!PDT) { |
| 1098 | PDTPtr = std::make_unique<PostDominatorTree>(const_cast<Function &>(F)); |
| 1099 | PDT = PDTPtr.get(); |
| 1100 | } |
| 1101 | |
| 1102 | computePostDominatedByUnreachable(F, PDT); |
| 1103 | computePostDominatedByColdCall(F, PDT); |
Taewook Oh | 2da205d | 2019-12-02 10:15:22 -0800 | [diff] [blame] | 1104 | |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1105 | // Walk the basic blocks in post-order so that we can build up state about |
| 1106 | // the successors of a block iteratively. |
| 1107 | for (auto BB : post_order(&F.getEntryBlock())) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1108 | LLVM_DEBUG(dbgs() << "Computing probabilities for " << BB->getName() |
| 1109 | << "\n"); |
Serguei Katkov | 11d9c4f | 2017-04-17 06:39:47 +0000 | [diff] [blame] | 1110 | // If there is no at least two successors, no sense to set probability. |
| 1111 | if (BB->getTerminator()->getNumSuccessors() < 2) |
| 1112 | continue; |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1113 | if (calcMetadataWeights(BB)) |
| 1114 | continue; |
Artur Pilipenko | 4d063e7 | 2018-06-08 13:03:21 +0000 | [diff] [blame] | 1115 | if (calcInvokeHeuristics(BB)) |
| 1116 | continue; |
Serguei Katkov | 2616bbb | 2017-04-17 04:33:04 +0000 | [diff] [blame] | 1117 | if (calcUnreachableHeuristics(BB)) |
| 1118 | continue; |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1119 | if (calcColdCallHeuristics(BB)) |
| 1120 | continue; |
Geoff Berry | eed6531 | 2017-11-01 15:16:50 +0000 | [diff] [blame] | 1121 | if (calcLoopBranchHeuristics(BB, LI, SccI)) |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1122 | continue; |
| 1123 | if (calcPointerHeuristics(BB)) |
| 1124 | continue; |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 1125 | if (calcZeroHeuristics(BB, TLI)) |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1126 | continue; |
| 1127 | if (calcFloatingPointHeuristics(BB)) |
| 1128 | continue; |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1129 | } |
| 1130 | |
| 1131 | PostDominatedByUnreachable.clear(); |
| 1132 | PostDominatedByColdCall.clear(); |
Hiroshi Yamauchi | 63e17eb | 2017-08-26 00:31:00 +0000 | [diff] [blame] | 1133 | |
| 1134 | if (PrintBranchProb && |
| 1135 | (PrintBranchProbFuncName.empty() || |
| 1136 | F.getName().equals(PrintBranchProbFuncName))) { |
| 1137 | print(dbgs()); |
| 1138 | } |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1139 | } |
| 1140 | |
| 1141 | void BranchProbabilityInfoWrapperPass::getAnalysisUsage( |
| 1142 | AnalysisUsage &AU) const { |
Mikael Holmen | 2ca1689 | 2018-05-17 09:05:40 +0000 | [diff] [blame] | 1143 | // We require DT so it's available when LI is available. The LI updating code |
| 1144 | // asserts that DT is also present so if we don't make sure that we have DT |
| 1145 | // here, that assert will trigger. |
| 1146 | AU.addRequired<DominatorTreeWrapperPass>(); |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1147 | AU.addRequired<LoopInfoWrapperPass>(); |
John Brawn | da4a68a | 2017-06-08 09:44:40 +0000 | [diff] [blame] | 1148 | AU.addRequired<TargetLibraryInfoWrapperPass>(); |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 1149 | AU.addRequired<PostDominatorTreeWrapperPass>(); |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1150 | AU.setPreservesAll(); |
| 1151 | } |
| 1152 | |
| 1153 | bool BranchProbabilityInfoWrapperPass::runOnFunction(Function &F) { |
| 1154 | const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); |
Teresa Johnson | 9c27b59 | 2019-09-07 03:09:36 +0000 | [diff] [blame] | 1155 | const TargetLibraryInfo &TLI = |
| 1156 | getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F); |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 1157 | PostDominatorTree &PDT = |
| 1158 | getAnalysis<PostDominatorTreeWrapperPass>().getPostDomTree(); |
| 1159 | BPI.calculate(F, LI, &TLI, &PDT); |
Cong Hou | ab23bfb | 2015-07-15 22:48:29 +0000 | [diff] [blame] | 1160 | return false; |
| 1161 | } |
| 1162 | |
| 1163 | void BranchProbabilityInfoWrapperPass::releaseMemory() { BPI.releaseMemory(); } |
| 1164 | |
| 1165 | void BranchProbabilityInfoWrapperPass::print(raw_ostream &OS, |
| 1166 | const Module *) const { |
| 1167 | BPI.print(OS); |
| 1168 | } |
Xinliang David Li | 6e5dd41 | 2016-05-05 02:59:57 +0000 | [diff] [blame] | 1169 | |
Chandler Carruth | dab4eae | 2016-11-23 17:53:26 +0000 | [diff] [blame] | 1170 | AnalysisKey BranchProbabilityAnalysis::Key; |
Xinliang David Li | 6e5dd41 | 2016-05-05 02:59:57 +0000 | [diff] [blame] | 1171 | BranchProbabilityInfo |
Sean Silva | 36e0d01 | 2016-08-09 00:28:15 +0000 | [diff] [blame] | 1172 | BranchProbabilityAnalysis::run(Function &F, FunctionAnalysisManager &AM) { |
Xinliang David Li | 6e5dd41 | 2016-05-05 02:59:57 +0000 | [diff] [blame] | 1173 | BranchProbabilityInfo BPI; |
Evgeniy Brevnov | 3e68a667 | 2020-04-28 16:31:20 +0700 | [diff] [blame] | 1174 | BPI.calculate(F, AM.getResult<LoopAnalysis>(F), |
| 1175 | &AM.getResult<TargetLibraryAnalysis>(F), |
| 1176 | &AM.getResult<PostDominatorTreeAnalysis>(F)); |
Xinliang David Li | 6e5dd41 | 2016-05-05 02:59:57 +0000 | [diff] [blame] | 1177 | return BPI; |
| 1178 | } |
| 1179 | |
| 1180 | PreservedAnalyses |
Sean Silva | 36e0d01 | 2016-08-09 00:28:15 +0000 | [diff] [blame] | 1181 | BranchProbabilityPrinterPass::run(Function &F, FunctionAnalysisManager &AM) { |
Xinliang David Li | 6e5dd41 | 2016-05-05 02:59:57 +0000 | [diff] [blame] | 1182 | OS << "Printing analysis results of BPI for function " |
| 1183 | << "'" << F.getName() << "':" |
| 1184 | << "\n"; |
| 1185 | AM.getResult<BranchProbabilityAnalysis>(F).print(OS); |
| 1186 | return PreservedAnalyses::all(); |
| 1187 | } |