blob: 932b3af85ce4bef42c58265265157439dfce36e6 [file] [log] [blame]
Rong Xu3d2efdf2018-10-09 22:03:40 +00001//===---- X86CondBrFolding.cpp - optimize conditional branches ------------===//
2//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Rong Xu3d2efdf2018-10-09 22:03:40 +00006//
7//===----------------------------------------------------------------------===//
8// This file defines a pass that optimizes condition branches on x86 by taking
9// advantage of the three-way conditional code generated by compare
10// instructions.
11// Currently, it tries to hoisting EQ and NE conditional branch to a dominant
12// conditional branch condition where the same EQ/NE conditional code is
13// computed. An example:
14// bb_0:
15// cmp %0, 19
16// jg bb_1
17// jmp bb_2
18// bb_1:
19// cmp %0, 40
20// jg bb_3
21// jmp bb_4
22// bb_4:
23// cmp %0, 20
24// je bb_5
25// jmp bb_6
26// Here we could combine the two compares in bb_0 and bb_4 and have the
27// following code:
28// bb_0:
29// cmp %0, 20
30// jg bb_1
31// jl bb_2
32// jmp bb_5
33// bb_1:
34// cmp %0, 40
35// jg bb_3
36// jmp bb_6
37// For the case of %0 == 20 (bb_5), we eliminate two jumps, and the control
38// height for bb_6 is also reduced. bb_4 is gone after the optimization.
39//
40// There are plenty of this code patterns, especially from the switch case
41// lowing where we generate compare of "pivot-1" for the inner nodes in the
42// binary search tree.
43//===----------------------------------------------------------------------===//
44
45#include "X86.h"
46#include "X86InstrInfo.h"
47#include "X86Subtarget.h"
48#include "llvm/ADT/Statistic.h"
49#include "llvm/CodeGen/MachineBranchProbabilityInfo.h"
50#include "llvm/CodeGen/MachineFunctionPass.h"
51#include "llvm/CodeGen/MachineInstrBuilder.h"
52#include "llvm/CodeGen/MachineRegisterInfo.h"
53#include "llvm/Support/BranchProbability.h"
54
55using namespace llvm;
56
57#define DEBUG_TYPE "x86-condbr-folding"
58
59STATISTIC(NumFixedCondBrs, "Number of x86 condbr folded");
60
61namespace {
62class X86CondBrFoldingPass : public MachineFunctionPass {
63public:
Craig Topperba3ab782018-12-07 18:10:34 +000064 X86CondBrFoldingPass() : MachineFunctionPass(ID) {
65 initializeX86CondBrFoldingPassPass(*PassRegistry::getPassRegistry());
66 }
Rong Xu3d2efdf2018-10-09 22:03:40 +000067 StringRef getPassName() const override { return "X86 CondBr Folding"; }
68
69 bool runOnMachineFunction(MachineFunction &MF) override;
70
71 void getAnalysisUsage(AnalysisUsage &AU) const override {
72 MachineFunctionPass::getAnalysisUsage(AU);
73 AU.addRequired<MachineBranchProbabilityInfo>();
74 }
75
Craig Topperba3ab782018-12-07 18:10:34 +000076public:
Rong Xu3d2efdf2018-10-09 22:03:40 +000077 static char ID;
78};
Craig Topperba3ab782018-12-07 18:10:34 +000079} // namespace
Rong Xu3d2efdf2018-10-09 22:03:40 +000080
81char X86CondBrFoldingPass::ID = 0;
Craig Topperba3ab782018-12-07 18:10:34 +000082INITIALIZE_PASS(X86CondBrFoldingPass, "X86CondBrFolding", "X86CondBrFolding", false, false)
Rong Xu3d2efdf2018-10-09 22:03:40 +000083
84FunctionPass *llvm::createX86CondBrFolding() {
85 return new X86CondBrFoldingPass();
86}
87
Benjamin Kramerc55e9972018-10-13 22:18:22 +000088namespace {
Rong Xu3d2efdf2018-10-09 22:03:40 +000089// A class the stores the auxiliary information for each MBB.
90struct TargetMBBInfo {
91 MachineBasicBlock *TBB;
92 MachineBasicBlock *FBB;
93 MachineInstr *BrInstr;
94 MachineInstr *CmpInstr;
95 X86::CondCode BranchCode;
96 unsigned SrcReg;
97 int CmpValue;
98 bool Modified;
99 bool CmpBrOnly;
100};
101
102// A class that optimizes the conditional branch by hoisting and merge CondCode.
103class X86CondBrFolding {
104public:
105 X86CondBrFolding(const X86InstrInfo *TII,
106 const MachineBranchProbabilityInfo *MBPI,
107 MachineFunction &MF)
108 : TII(TII), MBPI(MBPI), MF(MF) {}
109 bool optimize();
110
111private:
112 const X86InstrInfo *TII;
113 const MachineBranchProbabilityInfo *MBPI;
114 MachineFunction &MF;
115 std::vector<std::unique_ptr<TargetMBBInfo>> MBBInfos;
116 SmallVector<MachineBasicBlock *, 4> RemoveList;
117
118 void optimizeCondBr(MachineBasicBlock &MBB,
119 SmallVectorImpl<MachineBasicBlock *> &BranchPath);
120 void fixBranchProb(MachineBasicBlock *NextMBB, MachineBasicBlock *RootMBB,
121 SmallVectorImpl<MachineBasicBlock *> &BranchPath);
122 void replaceBrDest(MachineBasicBlock *MBB, MachineBasicBlock *OrigDest,
123 MachineBasicBlock *NewDest);
124 void fixupModifiedCond(MachineBasicBlock *MBB);
125 std::unique_ptr<TargetMBBInfo> analyzeMBB(MachineBasicBlock &MBB);
126 static bool analyzeCompare(const MachineInstr &MI, unsigned &SrcReg,
127 int &CmpValue);
128 bool findPath(MachineBasicBlock *MBB,
129 SmallVectorImpl<MachineBasicBlock *> &BranchPath);
130 TargetMBBInfo *getMBBInfo(MachineBasicBlock *MBB) const {
131 return MBBInfos[MBB->getNumber()].get();
132 }
133};
Benjamin Kramerc55e9972018-10-13 22:18:22 +0000134} // namespace
Rong Xu3d2efdf2018-10-09 22:03:40 +0000135
136// Find a valid path that we can reuse the CondCode.
137// The resulted path (if return true) is stored in BranchPath.
138// Return value:
139// false: is no valid path is found.
140// true: a valid path is found and the targetBB can be reached.
141bool X86CondBrFolding::findPath(
142 MachineBasicBlock *MBB, SmallVectorImpl<MachineBasicBlock *> &BranchPath) {
143 TargetMBBInfo *MBBInfo = getMBBInfo(MBB);
144 assert(MBBInfo && "Expecting a candidate MBB");
145 int CmpValue = MBBInfo->CmpValue;
146
147 MachineBasicBlock *PredMBB = *MBB->pred_begin();
148 MachineBasicBlock *SaveMBB = MBB;
149 while (PredMBB) {
150 TargetMBBInfo *PredMBBInfo = getMBBInfo(PredMBB);
151 if (!PredMBBInfo || PredMBBInfo->SrcReg != MBBInfo->SrcReg)
152 return false;
153
154 assert(SaveMBB == PredMBBInfo->TBB || SaveMBB == PredMBBInfo->FBB);
155 bool IsFalseBranch = (SaveMBB == PredMBBInfo->FBB);
156
157 X86::CondCode CC = PredMBBInfo->BranchCode;
158 assert(CC == X86::COND_L || CC == X86::COND_G || CC == X86::COND_E);
159 int PredCmpValue = PredMBBInfo->CmpValue;
160 bool ValueCmpTrue = ((CmpValue < PredCmpValue && CC == X86::COND_L) ||
161 (CmpValue > PredCmpValue && CC == X86::COND_G) ||
162 (CmpValue == PredCmpValue && CC == X86::COND_E));
163 // Check if both the result of value compare and the branch target match.
164 if (!(ValueCmpTrue ^ IsFalseBranch)) {
165 LLVM_DEBUG(dbgs() << "Dead BB detected!\n");
166 return false;
167 }
168
169 BranchPath.push_back(PredMBB);
170 // These are the conditions on which we could combine the compares.
171 if ((CmpValue == PredCmpValue) ||
172 (CmpValue == PredCmpValue - 1 && CC == X86::COND_L) ||
173 (CmpValue == PredCmpValue + 1 && CC == X86::COND_G))
174 return true;
175
176 // If PredMBB has more than on preds, or not a pure cmp and br, we bailout.
177 if (PredMBB->pred_size() != 1 || !PredMBBInfo->CmpBrOnly)
178 return false;
179
180 SaveMBB = PredMBB;
181 PredMBB = *PredMBB->pred_begin();
182 }
183 return false;
184}
185
186// Fix up any PHI node in the successor of MBB.
187static void fixPHIsInSucc(MachineBasicBlock *MBB, MachineBasicBlock *OldMBB,
188 MachineBasicBlock *NewMBB) {
189 if (NewMBB == OldMBB)
190 return;
191 for (auto MI = MBB->instr_begin(), ME = MBB->instr_end();
192 MI != ME && MI->isPHI(); ++MI)
193 for (unsigned i = 2, e = MI->getNumOperands() + 1; i != e; i += 2) {
194 MachineOperand &MO = MI->getOperand(i);
195 if (MO.getMBB() == OldMBB)
196 MO.setMBB(NewMBB);
197 }
198}
199
200// Utility function to set branch probability for edge MBB->SuccMBB.
201static inline bool setBranchProb(MachineBasicBlock *MBB,
202 MachineBasicBlock *SuccMBB,
203 BranchProbability Prob) {
204 auto MBBI = std::find(MBB->succ_begin(), MBB->succ_end(), SuccMBB);
205 if (MBBI == MBB->succ_end())
206 return false;
207 MBB->setSuccProbability(MBBI, Prob);
208 return true;
209}
210
211// Utility function to find the unconditional br instruction in MBB.
212static inline MachineBasicBlock::iterator
213findUncondBrI(MachineBasicBlock *MBB) {
214 return std::find_if(MBB->begin(), MBB->end(), [](MachineInstr &MI) -> bool {
215 return MI.getOpcode() == X86::JMP_1;
216 });
217}
218
219// Replace MBB's original successor, OrigDest, with NewDest.
220// Also update the MBBInfo for MBB.
221void X86CondBrFolding::replaceBrDest(MachineBasicBlock *MBB,
222 MachineBasicBlock *OrigDest,
223 MachineBasicBlock *NewDest) {
224 TargetMBBInfo *MBBInfo = getMBBInfo(MBB);
225 MachineInstr *BrMI;
226 if (MBBInfo->TBB == OrigDest) {
227 BrMI = MBBInfo->BrInstr;
Rong Xu3d2efdf2018-10-09 22:03:40 +0000228 MachineInstrBuilder MIB =
Craig Topper80aa2292019-04-05 19:28:09 +0000229 BuildMI(*MBB, BrMI, MBB->findDebugLoc(BrMI), TII->get(X86::JCC_1))
230 .addMBB(NewDest).addImm(MBBInfo->BranchCode);
Rong Xu3d2efdf2018-10-09 22:03:40 +0000231 MBBInfo->TBB = NewDest;
232 MBBInfo->BrInstr = MIB.getInstr();
233 } else { // Should be the unconditional jump stmt.
234 MachineBasicBlock::iterator UncondBrI = findUncondBrI(MBB);
235 BuildMI(*MBB, UncondBrI, MBB->findDebugLoc(UncondBrI), TII->get(X86::JMP_1))
236 .addMBB(NewDest);
237 MBBInfo->FBB = NewDest;
238 BrMI = &*UncondBrI;
239 }
240 fixPHIsInSucc(NewDest, OrigDest, MBB);
241 BrMI->eraseFromParent();
242 MBB->addSuccessor(NewDest);
243 setBranchProb(MBB, NewDest, MBPI->getEdgeProbability(MBB, OrigDest));
244 MBB->removeSuccessor(OrigDest);
245}
246
247// Change the CondCode and BrInstr according to MBBInfo.
248void X86CondBrFolding::fixupModifiedCond(MachineBasicBlock *MBB) {
249 TargetMBBInfo *MBBInfo = getMBBInfo(MBB);
250 if (!MBBInfo->Modified)
251 return;
252
253 MachineInstr *BrMI = MBBInfo->BrInstr;
254 X86::CondCode CC = MBBInfo->BranchCode;
255 MachineInstrBuilder MIB = BuildMI(*MBB, BrMI, MBB->findDebugLoc(BrMI),
Craig Topper80aa2292019-04-05 19:28:09 +0000256 TII->get(X86::JCC_1))
257 .addMBB(MBBInfo->TBB).addImm(CC);
Rong Xu3d2efdf2018-10-09 22:03:40 +0000258 BrMI->eraseFromParent();
259 MBBInfo->BrInstr = MIB.getInstr();
260
261 MachineBasicBlock::iterator UncondBrI = findUncondBrI(MBB);
262 BuildMI(*MBB, UncondBrI, MBB->findDebugLoc(UncondBrI), TII->get(X86::JMP_1))
263 .addMBB(MBBInfo->FBB);
264 MBB->erase(UncondBrI);
265 MBBInfo->Modified = false;
266}
267
268//
269// Apply the transformation:
270// RootMBB -1-> ... PredMBB -3-> MBB -5-> TargetMBB
271// \-2-> \-4-> \-6-> FalseMBB
272// ==>
273// RootMBB -1-> ... PredMBB -7-> FalseMBB
274// TargetMBB <-8-/ \-2-> \-4->
275//
276// Note that PredMBB and RootMBB could be the same.
277// And in the case of dead TargetMBB, we will not have TargetMBB and edge 8.
278//
279// There are some special handling where the RootMBB is COND_E in which case
280// we directly short-cycle the brinstr.
281//
282void X86CondBrFolding::optimizeCondBr(
283 MachineBasicBlock &MBB, SmallVectorImpl<MachineBasicBlock *> &BranchPath) {
284
285 X86::CondCode CC;
286 TargetMBBInfo *MBBInfo = getMBBInfo(&MBB);
287 assert(MBBInfo && "Expecting a candidate MBB");
288 MachineBasicBlock *TargetMBB = MBBInfo->TBB;
289 BranchProbability TargetProb = MBPI->getEdgeProbability(&MBB, MBBInfo->TBB);
290
291 // Forward the jump from MBB's predecessor to MBB's false target.
292 MachineBasicBlock *PredMBB = BranchPath.front();
293 TargetMBBInfo *PredMBBInfo = getMBBInfo(PredMBB);
294 assert(PredMBBInfo && "Expecting a candidate MBB");
295 if (PredMBBInfo->Modified)
296 fixupModifiedCond(PredMBB);
297 CC = PredMBBInfo->BranchCode;
298 // Don't do this if depth of BranchPath is 1 and PredMBB is of COND_E.
299 // We will short-cycle directly for this case.
300 if (!(CC == X86::COND_E && BranchPath.size() == 1))
301 replaceBrDest(PredMBB, &MBB, MBBInfo->FBB);
302
303 MachineBasicBlock *RootMBB = BranchPath.back();
304 TargetMBBInfo *RootMBBInfo = getMBBInfo(RootMBB);
305 assert(RootMBBInfo && "Expecting a candidate MBB");
306 if (RootMBBInfo->Modified)
307 fixupModifiedCond(RootMBB);
308 CC = RootMBBInfo->BranchCode;
309
310 if (CC != X86::COND_E) {
311 MachineBasicBlock::iterator UncondBrI = findUncondBrI(RootMBB);
312 // RootMBB: Cond jump to the original not-taken MBB.
313 X86::CondCode NewCC;
314 switch (CC) {
315 case X86::COND_L:
316 NewCC = X86::COND_G;
317 break;
318 case X86::COND_G:
319 NewCC = X86::COND_L;
320 break;
321 default:
322 llvm_unreachable("unexpected condtional code.");
323 }
324 BuildMI(*RootMBB, UncondBrI, RootMBB->findDebugLoc(UncondBrI),
Craig Topper80aa2292019-04-05 19:28:09 +0000325 TII->get(X86::JCC_1))
326 .addMBB(RootMBBInfo->FBB).addImm(NewCC);
Rong Xu3d2efdf2018-10-09 22:03:40 +0000327
328 // RootMBB: Jump to TargetMBB
329 BuildMI(*RootMBB, UncondBrI, RootMBB->findDebugLoc(UncondBrI),
330 TII->get(X86::JMP_1))
331 .addMBB(TargetMBB);
332 RootMBB->addSuccessor(TargetMBB);
333 fixPHIsInSucc(TargetMBB, &MBB, RootMBB);
334 RootMBB->erase(UncondBrI);
335 } else {
336 replaceBrDest(RootMBB, RootMBBInfo->TBB, TargetMBB);
337 }
338
339 // Fix RootMBB's CmpValue to MBB's CmpValue to TargetMBB. Don't set Imm
340 // directly. Move MBB's stmt to here as the opcode might be different.
341 if (RootMBBInfo->CmpValue != MBBInfo->CmpValue) {
342 MachineInstr *NewCmp = MBBInfo->CmpInstr;
343 NewCmp->removeFromParent();
344 RootMBB->insert(RootMBBInfo->CmpInstr, NewCmp);
345 RootMBBInfo->CmpInstr->eraseFromParent();
346 }
347
348 // Fix branch Probabilities.
349 auto fixBranchProb = [&](MachineBasicBlock *NextMBB) {
350 BranchProbability Prob;
351 for (auto &I : BranchPath) {
352 MachineBasicBlock *ThisMBB = I;
353 if (!ThisMBB->hasSuccessorProbabilities() ||
354 !ThisMBB->isSuccessor(NextMBB))
355 break;
356 Prob = MBPI->getEdgeProbability(ThisMBB, NextMBB);
357 if (Prob.isUnknown())
358 break;
359 TargetProb = Prob * TargetProb;
360 Prob = Prob - TargetProb;
361 setBranchProb(ThisMBB, NextMBB, Prob);
362 if (ThisMBB == RootMBB) {
363 setBranchProb(ThisMBB, TargetMBB, TargetProb);
364 }
365 ThisMBB->normalizeSuccProbs();
366 if (ThisMBB == RootMBB)
367 break;
368 NextMBB = ThisMBB;
369 }
370 return true;
371 };
372 if (CC != X86::COND_E && !TargetProb.isUnknown())
373 fixBranchProb(MBBInfo->FBB);
374
375 if (CC != X86::COND_E)
376 RemoveList.push_back(&MBB);
377
378 // Invalidate MBBInfo just in case.
379 MBBInfos[MBB.getNumber()] = nullptr;
380 MBBInfos[RootMBB->getNumber()] = nullptr;
381
382 LLVM_DEBUG(dbgs() << "After optimization:\nRootMBB is: " << *RootMBB << "\n");
383 if (BranchPath.size() > 1)
384 LLVM_DEBUG(dbgs() << "PredMBB is: " << *(BranchPath[0]) << "\n");
385}
386
387// Driver function for optimization: find the valid candidate and apply
388// the transformation.
389bool X86CondBrFolding::optimize() {
390 bool Changed = false;
391 LLVM_DEBUG(dbgs() << "***** X86CondBr Folding on Function: " << MF.getName()
392 << " *****\n");
393 // Setup data structures.
394 MBBInfos.resize(MF.getNumBlockIDs());
395 for (auto &MBB : MF)
396 MBBInfos[MBB.getNumber()] = analyzeMBB(MBB);
397
398 for (auto &MBB : MF) {
399 TargetMBBInfo *MBBInfo = getMBBInfo(&MBB);
400 if (!MBBInfo || !MBBInfo->CmpBrOnly)
401 continue;
402 if (MBB.pred_size() != 1)
403 continue;
404 LLVM_DEBUG(dbgs() << "Work on MBB." << MBB.getNumber()
405 << " CmpValue: " << MBBInfo->CmpValue << "\n");
406 SmallVector<MachineBasicBlock *, 4> BranchPath;
407 if (!findPath(&MBB, BranchPath))
408 continue;
409
410#ifndef NDEBUG
411 LLVM_DEBUG(dbgs() << "Found one path (len=" << BranchPath.size() << "):\n");
412 int Index = 1;
413 LLVM_DEBUG(dbgs() << "Target MBB is: " << MBB << "\n");
414 for (auto I = BranchPath.rbegin(); I != BranchPath.rend(); ++I, ++Index) {
415 MachineBasicBlock *PMBB = *I;
416 TargetMBBInfo *PMBBInfo = getMBBInfo(PMBB);
417 LLVM_DEBUG(dbgs() << "Path MBB (" << Index << " of " << BranchPath.size()
418 << ") is " << *PMBB);
419 LLVM_DEBUG(dbgs() << "CC=" << PMBBInfo->BranchCode
420 << " Val=" << PMBBInfo->CmpValue
421 << " CmpBrOnly=" << PMBBInfo->CmpBrOnly << "\n\n");
422 }
423#endif
424 optimizeCondBr(MBB, BranchPath);
425 Changed = true;
426 }
427 NumFixedCondBrs += RemoveList.size();
428 for (auto MBBI : RemoveList) {
Rong Xu5c7bf1a2018-10-09 23:10:56 +0000429 while (!MBBI->succ_empty())
430 MBBI->removeSuccessor(MBBI->succ_end() - 1);
431
Rong Xu3d2efdf2018-10-09 22:03:40 +0000432 MBBI->eraseFromParent();
433 }
434
435 return Changed;
436}
437
438// Analyze instructions that generate CondCode and extract information.
439bool X86CondBrFolding::analyzeCompare(const MachineInstr &MI, unsigned &SrcReg,
440 int &CmpValue) {
441 unsigned SrcRegIndex = 0;
442 unsigned ValueIndex = 0;
443 switch (MI.getOpcode()) {
444 // TODO: handle test instructions.
445 default:
446 return false;
447 case X86::CMP64ri32:
448 case X86::CMP64ri8:
449 case X86::CMP32ri:
450 case X86::CMP32ri8:
451 case X86::CMP16ri:
452 case X86::CMP16ri8:
453 case X86::CMP8ri:
454 SrcRegIndex = 0;
455 ValueIndex = 1;
456 break;
457 case X86::SUB64ri32:
458 case X86::SUB64ri8:
459 case X86::SUB32ri:
460 case X86::SUB32ri8:
461 case X86::SUB16ri:
462 case X86::SUB16ri8:
463 case X86::SUB8ri:
464 SrcRegIndex = 1;
465 ValueIndex = 2;
466 break;
467 }
468 SrcReg = MI.getOperand(SrcRegIndex).getReg();
Craig Topperb51283b2018-12-11 15:32:14 +0000469 if (!MI.getOperand(ValueIndex).isImm())
470 return false;
Rong Xu3d2efdf2018-10-09 22:03:40 +0000471 CmpValue = MI.getOperand(ValueIndex).getImm();
472 return true;
473}
474
475// Analyze a candidate MBB and set the extract all the information needed.
476// The valid candidate will have two successors.
477// It also should have a sequence of
478// Branch_instr,
479// CondBr,
480// UnCondBr.
481// Return TargetMBBInfo if MBB is a valid candidate and nullptr otherwise.
482std::unique_ptr<TargetMBBInfo>
483X86CondBrFolding::analyzeMBB(MachineBasicBlock &MBB) {
484 MachineBasicBlock *TBB;
485 MachineBasicBlock *FBB;
486 MachineInstr *BrInstr;
487 MachineInstr *CmpInstr;
488 X86::CondCode CC;
489 unsigned SrcReg;
490 int CmpValue;
491 bool Modified;
492 bool CmpBrOnly;
493
494 if (MBB.succ_size() != 2)
495 return nullptr;
496
497 CmpBrOnly = true;
498 FBB = TBB = nullptr;
499 CmpInstr = nullptr;
500 MachineBasicBlock::iterator I = MBB.end();
501 while (I != MBB.begin()) {
502 --I;
503 if (I->isDebugValue())
504 continue;
505 if (I->getOpcode() == X86::JMP_1) {
506 if (FBB)
507 return nullptr;
508 FBB = I->getOperand(0).getMBB();
509 continue;
510 }
511 if (I->isBranch()) {
512 if (TBB)
513 return nullptr;
Craig Topper80aa2292019-04-05 19:28:09 +0000514 CC = X86::getCondFromBranch(*I);
Rong Xu3d2efdf2018-10-09 22:03:40 +0000515 switch (CC) {
516 default:
517 return nullptr;
518 case X86::COND_E:
519 case X86::COND_L:
520 case X86::COND_G:
521 case X86::COND_NE:
522 case X86::COND_LE:
523 case X86::COND_GE:
524 break;
525 }
526 TBB = I->getOperand(0).getMBB();
527 BrInstr = &*I;
528 continue;
529 }
530 if (analyzeCompare(*I, SrcReg, CmpValue)) {
531 if (CmpInstr)
532 return nullptr;
533 CmpInstr = &*I;
534 continue;
535 }
536 CmpBrOnly = false;
537 break;
538 }
539
540 if (!TBB || !FBB || !CmpInstr)
541 return nullptr;
542
543 // Simplify CondCode. Note this is only to simplify the findPath logic
544 // and will not change the instruction here.
545 switch (CC) {
546 case X86::COND_NE:
547 CC = X86::COND_E;
548 std::swap(TBB, FBB);
549 Modified = true;
550 break;
551 case X86::COND_LE:
552 if (CmpValue == INT_MAX)
553 return nullptr;
554 CC = X86::COND_L;
555 CmpValue += 1;
556 Modified = true;
557 break;
558 case X86::COND_GE:
559 if (CmpValue == INT_MIN)
560 return nullptr;
561 CC = X86::COND_G;
562 CmpValue -= 1;
563 Modified = true;
564 break;
565 default:
566 Modified = false;
567 break;
568 }
569 return llvm::make_unique<TargetMBBInfo>(TargetMBBInfo{
570 TBB, FBB, BrInstr, CmpInstr, CC, SrcReg, CmpValue, Modified, CmpBrOnly});
571}
572
573bool X86CondBrFoldingPass::runOnMachineFunction(MachineFunction &MF) {
574 const X86Subtarget &ST = MF.getSubtarget<X86Subtarget>();
575 if (!ST.threewayBranchProfitable())
576 return false;
577 const X86InstrInfo *TII = ST.getInstrInfo();
578 const MachineBranchProbabilityInfo *MBPI =
579 &getAnalysis<MachineBranchProbabilityInfo>();
580
581 X86CondBrFolding CondBr(TII, MBPI, MF);
582 return CondBr.optimize();
583}