Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 1 | //===- InlineSpiller.cpp - Insert spills and restores inline --------------===// |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 2 | // |
Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // The inline spiller modifies the machine function directly instead of |
| 10 | // inserting spills and restores in VirtRegMap. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
Wei Mi | 8c4136b | 2016-05-11 22:37:43 +0000 | [diff] [blame] | 14 | #include "SplitKit.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 15 | #include "llvm/ADT/ArrayRef.h" |
| 16 | #include "llvm/ADT/DenseMap.h" |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 17 | #include "llvm/ADT/MapVector.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 18 | #include "llvm/ADT/None.h" |
| 19 | #include "llvm/ADT/STLExtras.h" |
Benjamin Kramer | bc6666b | 2013-05-23 15:42:57 +0000 | [diff] [blame] | 20 | #include "llvm/ADT/SetVector.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 21 | #include "llvm/ADT/SmallPtrSet.h" |
| 22 | #include "llvm/ADT/SmallVector.h" |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 23 | #include "llvm/ADT/Statistic.h" |
Jakob Stoklund Olesen | 868dd4e | 2010-11-10 23:55:56 +0000 | [diff] [blame] | 24 | #include "llvm/Analysis/AliasAnalysis.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 25 | #include "llvm/CodeGen/LiveInterval.h" |
Marcello Maggioni | ea11f47 | 2020-03-22 19:08:29 -0700 | [diff] [blame] | 26 | #include "llvm/CodeGen/LiveIntervalCalc.h" |
Matthias Braun | f842297 | 2017-12-13 02:51:04 +0000 | [diff] [blame] | 27 | #include "llvm/CodeGen/LiveIntervals.h" |
Pete Cooper | 3ca96f9 | 2012-04-02 22:44:18 +0000 | [diff] [blame] | 28 | #include "llvm/CodeGen/LiveRangeEdit.h" |
Matthias Braun | ef95969 | 2017-12-18 23:19:44 +0000 | [diff] [blame] | 29 | #include "llvm/CodeGen/LiveStacks.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 30 | #include "llvm/CodeGen/MachineBasicBlock.h" |
Benjamin Kramer | e2a1d89 | 2013-06-17 19:00:36 +0000 | [diff] [blame] | 31 | #include "llvm/CodeGen/MachineBlockFrequencyInfo.h" |
Chandler Carruth | 8a8cd2b | 2014-01-07 11:48:04 +0000 | [diff] [blame] | 32 | #include "llvm/CodeGen/MachineDominators.h" |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 33 | #include "llvm/CodeGen/MachineFunction.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 34 | #include "llvm/CodeGen/MachineFunctionPass.h" |
| 35 | #include "llvm/CodeGen/MachineInstr.h" |
David Blaikie | 0252265b | 2013-06-16 20:34:15 +0000 | [diff] [blame] | 36 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 37 | #include "llvm/CodeGen/MachineInstrBundle.h" |
Jakob Stoklund Olesen | a0d5ec1 | 2011-03-15 21:13:25 +0000 | [diff] [blame] | 38 | #include "llvm/CodeGen/MachineLoopInfo.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 39 | #include "llvm/CodeGen/MachineOperand.h" |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 40 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 41 | #include "llvm/CodeGen/SlotIndexes.h" |
Marcello Maggioni | e520507 | 2020-03-08 09:36:29 -0700 | [diff] [blame] | 42 | #include "llvm/CodeGen/Spiller.h" |
Serguei Katkov | 496e0a9 | 2020-02-28 17:34:33 +0700 | [diff] [blame] | 43 | #include "llvm/CodeGen/StackMaps.h" |
David Blaikie | 3f833ed | 2017-11-08 01:01:31 +0000 | [diff] [blame] | 44 | #include "llvm/CodeGen/TargetInstrInfo.h" |
David Blaikie | b3bde2e | 2017-11-17 01:07:10 +0000 | [diff] [blame] | 45 | #include "llvm/CodeGen/TargetOpcodes.h" |
| 46 | #include "llvm/CodeGen/TargetRegisterInfo.h" |
| 47 | #include "llvm/CodeGen/TargetSubtargetInfo.h" |
Jakob Stoklund Olesen | 26c9d70 | 2012-11-28 19:13:06 +0000 | [diff] [blame] | 48 | #include "llvm/CodeGen/VirtRegMap.h" |
Nico Weber | 432a388 | 2018-04-30 14:59:11 +0000 | [diff] [blame] | 49 | #include "llvm/Config/llvm-config.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 50 | #include "llvm/Support/BlockFrequency.h" |
| 51 | #include "llvm/Support/BranchProbability.h" |
Jakob Stoklund Olesen | bceb9e5 | 2011-09-15 21:06:00 +0000 | [diff] [blame] | 52 | #include "llvm/Support/CommandLine.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 53 | #include "llvm/Support/Compiler.h" |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 54 | #include "llvm/Support/Debug.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 55 | #include "llvm/Support/ErrorHandling.h" |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 56 | #include "llvm/Support/raw_ostream.h" |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 57 | #include <cassert> |
| 58 | #include <iterator> |
| 59 | #include <tuple> |
| 60 | #include <utility> |
| 61 | #include <vector> |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 62 | |
| 63 | using namespace llvm; |
| 64 | |
Chandler Carruth | 1b9dde0 | 2014-04-22 02:02:50 +0000 | [diff] [blame] | 65 | #define DEBUG_TYPE "regalloc" |
| 66 | |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 67 | STATISTIC(NumSpilledRanges, "Number of spilled live ranges"); |
Jakob Stoklund Olesen | 37eb696 | 2011-09-15 17:54:28 +0000 | [diff] [blame] | 68 | STATISTIC(NumSnippets, "Number of spilled snippets"); |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 69 | STATISTIC(NumSpills, "Number of spills inserted"); |
Jakob Stoklund Olesen | 37eb696 | 2011-09-15 17:54:28 +0000 | [diff] [blame] | 70 | STATISTIC(NumSpillsRemoved, "Number of spills removed"); |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 71 | STATISTIC(NumReloads, "Number of reloads inserted"); |
Jakob Stoklund Olesen | 37eb696 | 2011-09-15 17:54:28 +0000 | [diff] [blame] | 72 | STATISTIC(NumReloadsRemoved, "Number of reloads removed"); |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 73 | STATISTIC(NumFolded, "Number of folded stack accesses"); |
| 74 | STATISTIC(NumFoldedLoads, "Number of folded loads"); |
| 75 | STATISTIC(NumRemats, "Number of rematerialized defs for spilling"); |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 76 | |
Jakob Stoklund Olesen | bceb9e5 | 2011-09-15 21:06:00 +0000 | [diff] [blame] | 77 | static cl::opt<bool> DisableHoisting("disable-spill-hoist", cl::Hidden, |
| 78 | cl::desc("Disable inline spill hoisting")); |
Philip Reames | 7403fac | 2019-02-12 18:33:01 +0000 | [diff] [blame] | 79 | static cl::opt<bool> |
| 80 | RestrictStatepointRemat("restrict-statepoint-remat", |
| 81 | cl::init(false), cl::Hidden, |
| 82 | cl::desc("Restrict remat for statepoint operands")); |
Jakob Stoklund Olesen | bceb9e5 | 2011-09-15 21:06:00 +0000 | [diff] [blame] | 83 | |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 84 | namespace { |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 85 | |
Wei Mi | 963f2df | 2016-04-15 23:16:44 +0000 | [diff] [blame] | 86 | class HoistSpillHelper : private LiveRangeEdit::Delegate { |
| 87 | MachineFunction &MF; |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 88 | LiveIntervals &LIS; |
| 89 | LiveStacks &LSS; |
| 90 | AliasAnalysis *AA; |
| 91 | MachineDominatorTree &MDT; |
| 92 | MachineLoopInfo &Loops; |
| 93 | VirtRegMap &VRM; |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 94 | MachineRegisterInfo &MRI; |
| 95 | const TargetInstrInfo &TII; |
| 96 | const TargetRegisterInfo &TRI; |
| 97 | const MachineBlockFrequencyInfo &MBFI; |
| 98 | |
Wei Mi | 8c4136b | 2016-05-11 22:37:43 +0000 | [diff] [blame] | 99 | InsertPointAnalysis IPA; |
| 100 | |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 101 | // Map from StackSlot to the LiveInterval of the original register. |
| 102 | // Note the LiveInterval of the original register may have been deleted |
| 103 | // after it is spilled. We keep a copy here to track the range where |
| 104 | // spills can be moved. |
| 105 | DenseMap<int, std::unique_ptr<LiveInterval>> StackSlotToOrigLI; |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 106 | |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 107 | // Map from pair of (StackSlot and Original VNI) to a set of spills which |
| 108 | // have the same stackslot and have equal values defined by Original VNI. |
| 109 | // These spills are mergeable and are hoist candiates. |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 110 | using MergeableSpillsMap = |
| 111 | MapVector<std::pair<int, VNInfo *>, SmallPtrSet<MachineInstr *, 16>>; |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 112 | MergeableSpillsMap MergeableSpills; |
| 113 | |
| 114 | /// This is the map from original register to a set containing all its |
| 115 | /// siblings. To hoist a spill to another BB, we need to find out a live |
| 116 | /// sibling there and use it as the source of the new spill. |
| 117 | DenseMap<unsigned, SmallSetVector<unsigned, 16>> Virt2SiblingsMap; |
| 118 | |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 119 | bool isSpillCandBB(LiveInterval &OrigLI, VNInfo &OrigVNI, |
| 120 | MachineBasicBlock &BB, unsigned &LiveReg); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 121 | |
| 122 | void rmRedundantSpills( |
| 123 | SmallPtrSet<MachineInstr *, 16> &Spills, |
| 124 | SmallVectorImpl<MachineInstr *> &SpillsToRm, |
| 125 | DenseMap<MachineDomTreeNode *, MachineInstr *> &SpillBBToSpill); |
| 126 | |
| 127 | void getVisitOrders( |
| 128 | MachineBasicBlock *Root, SmallPtrSet<MachineInstr *, 16> &Spills, |
| 129 | SmallVectorImpl<MachineDomTreeNode *> &Orders, |
| 130 | SmallVectorImpl<MachineInstr *> &SpillsToRm, |
| 131 | DenseMap<MachineDomTreeNode *, unsigned> &SpillsToKeep, |
| 132 | DenseMap<MachineDomTreeNode *, MachineInstr *> &SpillBBToSpill); |
| 133 | |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 134 | void runHoistSpills(LiveInterval &OrigLI, VNInfo &OrigVNI, |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 135 | SmallPtrSet<MachineInstr *, 16> &Spills, |
| 136 | SmallVectorImpl<MachineInstr *> &SpillsToRm, |
| 137 | DenseMap<MachineBasicBlock *, unsigned> &SpillsToIns); |
| 138 | |
| 139 | public: |
| 140 | HoistSpillHelper(MachineFunctionPass &pass, MachineFunction &mf, |
| 141 | VirtRegMap &vrm) |
Wei Mi | 963f2df | 2016-04-15 23:16:44 +0000 | [diff] [blame] | 142 | : MF(mf), LIS(pass.getAnalysis<LiveIntervals>()), |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 143 | LSS(pass.getAnalysis<LiveStacks>()), |
| 144 | AA(&pass.getAnalysis<AAResultsWrapperPass>().getAAResults()), |
| 145 | MDT(pass.getAnalysis<MachineDominatorTree>()), |
| 146 | Loops(pass.getAnalysis<MachineLoopInfo>()), VRM(vrm), |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 147 | MRI(mf.getRegInfo()), TII(*mf.getSubtarget().getInstrInfo()), |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 148 | TRI(*mf.getSubtarget().getRegisterInfo()), |
Wei Mi | 8c4136b | 2016-05-11 22:37:43 +0000 | [diff] [blame] | 149 | MBFI(pass.getAnalysis<MachineBlockFrequencyInfo>()), |
| 150 | IPA(LIS, mf.getNumBlockIDs()) {} |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 151 | |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 152 | void addToMergeableSpills(MachineInstr &Spill, int StackSlot, |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 153 | unsigned Original); |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 154 | bool rmFromMergeableSpills(MachineInstr &Spill, int StackSlot); |
Wei Mi | 963f2df | 2016-04-15 23:16:44 +0000 | [diff] [blame] | 155 | void hoistAllSpills(); |
| 156 | void LRE_DidCloneVirtReg(unsigned, unsigned) override; |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 157 | }; |
| 158 | |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 159 | class InlineSpiller : public Spiller { |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 160 | MachineFunction &MF; |
| 161 | LiveIntervals &LIS; |
| 162 | LiveStacks &LSS; |
| 163 | AliasAnalysis *AA; |
Jakob Stoklund Olesen | a0d5ec1 | 2011-03-15 21:13:25 +0000 | [diff] [blame] | 164 | MachineDominatorTree &MDT; |
| 165 | MachineLoopInfo &Loops; |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 166 | VirtRegMap &VRM; |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 167 | MachineRegisterInfo &MRI; |
| 168 | const TargetInstrInfo &TII; |
| 169 | const TargetRegisterInfo &TRI; |
Benjamin Kramer | e2a1d89 | 2013-06-17 19:00:36 +0000 | [diff] [blame] | 170 | const MachineBlockFrequencyInfo &MBFI; |
Jakob Stoklund Olesen | bde96ad | 2010-06-30 23:03:52 +0000 | [diff] [blame] | 171 | |
| 172 | // Variables that are valid during spill(), but used by multiple methods. |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 173 | LiveRangeEdit *Edit; |
Jakob Stoklund Olesen | e466345 | 2011-03-26 22:16:41 +0000 | [diff] [blame] | 174 | LiveInterval *StackInt; |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 175 | int StackSlot; |
Jakob Stoklund Olesen | a0d5ec1 | 2011-03-15 21:13:25 +0000 | [diff] [blame] | 176 | unsigned Original; |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 177 | |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 178 | // All registers to spill to StackSlot, including the main register. |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 179 | SmallVector<unsigned, 8> RegsToSpill; |
| 180 | |
| 181 | // All COPY instructions to/from snippets. |
| 182 | // They are ignored since both operands refer to the same stack slot. |
| 183 | SmallPtrSet<MachineInstr*, 8> SnippetCopies; |
| 184 | |
Jakob Stoklund Olesen | 2edaa2f | 2010-10-20 22:00:51 +0000 | [diff] [blame] | 185 | // Values that failed to remat at some point. |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 186 | SmallPtrSet<VNInfo*, 8> UsedValues; |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 187 | |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 188 | // Dead defs generated during spilling. |
| 189 | SmallVector<MachineInstr*, 8> DeadDefs; |
Jakob Stoklund Olesen | a0d5ec1 | 2011-03-15 21:13:25 +0000 | [diff] [blame] | 190 | |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 191 | // Object records spills information and does the hoisting. |
| 192 | HoistSpillHelper HSpiller; |
| 193 | |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 194 | ~InlineSpiller() override = default; |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 195 | |
| 196 | public: |
Eric Christopher | d913448 | 2014-08-04 21:25:23 +0000 | [diff] [blame] | 197 | InlineSpiller(MachineFunctionPass &pass, MachineFunction &mf, VirtRegMap &vrm) |
| 198 | : MF(mf), LIS(pass.getAnalysis<LiveIntervals>()), |
| 199 | LSS(pass.getAnalysis<LiveStacks>()), |
Chandler Carruth | 7b560d4 | 2015-09-09 17:55:00 +0000 | [diff] [blame] | 200 | AA(&pass.getAnalysis<AAResultsWrapperPass>().getAAResults()), |
Eric Christopher | d913448 | 2014-08-04 21:25:23 +0000 | [diff] [blame] | 201 | MDT(pass.getAnalysis<MachineDominatorTree>()), |
| 202 | Loops(pass.getAnalysis<MachineLoopInfo>()), VRM(vrm), |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 203 | MRI(mf.getRegInfo()), TII(*mf.getSubtarget().getInstrInfo()), |
Eric Christopher | fc6de42 | 2014-08-05 02:39:49 +0000 | [diff] [blame] | 204 | TRI(*mf.getSubtarget().getRegisterInfo()), |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 205 | MBFI(pass.getAnalysis<MachineBlockFrequencyInfo>()), |
| 206 | HSpiller(pass, mf, vrm) {} |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 207 | |
Craig Topper | 4584cd5 | 2014-03-07 09:26:03 +0000 | [diff] [blame] | 208 | void spill(LiveRangeEdit &) override; |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 209 | void postOptimization() override; |
Jakob Stoklund Olesen | 72911e4 | 2010-10-14 23:49:52 +0000 | [diff] [blame] | 210 | |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 211 | private: |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 212 | bool isSnippet(const LiveInterval &SnipLI); |
| 213 | void collectRegsToSpill(); |
| 214 | |
David Majnemer | 4253126 | 2016-08-12 03:55:06 +0000 | [diff] [blame] | 215 | bool isRegToSpill(unsigned Reg) { return is_contained(RegsToSpill, Reg); } |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 216 | |
| 217 | bool isSibling(unsigned Reg); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 218 | bool hoistSpillInsideBB(LiveInterval &SpillLI, MachineInstr &CopyMI); |
Jakob Stoklund Olesen | 3948864 | 2011-03-20 05:44:55 +0000 | [diff] [blame] | 219 | void eliminateRedundantSpills(LiveInterval &LI, VNInfo *VNI); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 220 | |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 221 | void markValueUsed(LiveInterval*, VNInfo*); |
Philip Reames | 7403fac | 2019-02-12 18:33:01 +0000 | [diff] [blame] | 222 | bool canGuaranteeAssignmentAfterRemat(unsigned VReg, MachineInstr &MI); |
Duncan P. N. Exon Smith | d6ebd07 | 2016-02-27 20:23:14 +0000 | [diff] [blame] | 223 | bool reMaterializeFor(LiveInterval &, MachineInstr &MI); |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 224 | void reMaterializeAll(); |
| 225 | |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 226 | bool coalesceStackAccess(MachineInstr *MI, unsigned Reg); |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 227 | bool foldMemoryOperand(ArrayRef<std::pair<MachineInstr *, unsigned>>, |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 228 | MachineInstr *LoadMI = nullptr); |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 229 | void insertReload(unsigned VReg, SlotIndex, MachineBasicBlock::iterator MI); |
| 230 | void insertSpill(unsigned VReg, bool isKill, MachineBasicBlock::iterator MI); |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 231 | |
| 232 | void spillAroundUses(unsigned Reg); |
Jakob Stoklund Olesen | e991f72 | 2011-03-29 21:20:19 +0000 | [diff] [blame] | 233 | void spillAll(); |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 234 | }; |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 235 | |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 236 | } // end anonymous namespace |
Lang Hames | cdd9077 | 2014-11-06 19:12:38 +0000 | [diff] [blame] | 237 | |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 238 | Spiller::~Spiller() = default; |
Lang Hames | cdd9077 | 2014-11-06 19:12:38 +0000 | [diff] [blame] | 239 | |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 240 | void Spiller::anchor() {} |
| 241 | |
| 242 | Spiller *llvm::createInlineSpiller(MachineFunctionPass &pass, |
| 243 | MachineFunction &mf, |
| 244 | VirtRegMap &vrm) { |
Jakob Stoklund Olesen | 0fef9dd | 2010-07-20 23:50:15 +0000 | [diff] [blame] | 245 | return new InlineSpiller(pass, mf, vrm); |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 246 | } |
Lang Hames | cdd9077 | 2014-11-06 19:12:38 +0000 | [diff] [blame] | 247 | |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 248 | //===----------------------------------------------------------------------===// |
| 249 | // Snippets |
| 250 | //===----------------------------------------------------------------------===// |
| 251 | |
| 252 | // When spilling a virtual register, we also spill any snippets it is connected |
| 253 | // to. The snippets are small live ranges that only have a single real use, |
| 254 | // leftovers from live range splitting. Spilling them enables memory operand |
| 255 | // folding or tightens the live range around the single use. |
| 256 | // |
| 257 | // This minimizes register pressure and maximizes the store-to-load distance for |
| 258 | // spill slots which can be important in tight loops. |
| 259 | |
| 260 | /// isFullCopyOf - If MI is a COPY to or from Reg, return the other register, |
| 261 | /// otherwise return 0. |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 262 | static unsigned isFullCopyOf(const MachineInstr &MI, unsigned Reg) { |
| 263 | if (!MI.isFullCopy()) |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 264 | return 0; |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 265 | if (MI.getOperand(0).getReg() == Reg) |
| 266 | return MI.getOperand(1).getReg(); |
| 267 | if (MI.getOperand(1).getReg() == Reg) |
| 268 | return MI.getOperand(0).getReg(); |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 269 | return 0; |
| 270 | } |
| 271 | |
| 272 | /// isSnippet - Identify if a live interval is a snippet that should be spilled. |
| 273 | /// It is assumed that SnipLI is a virtual register with the same original as |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 274 | /// Edit->getReg(). |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 275 | bool InlineSpiller::isSnippet(const LiveInterval &SnipLI) { |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 276 | unsigned Reg = Edit->getReg(); |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 277 | |
| 278 | // A snippet is a tiny live range with only a single instruction using it |
| 279 | // besides copies to/from Reg or spills/fills. We accept: |
| 280 | // |
| 281 | // %snip = COPY %Reg / FILL fi# |
| 282 | // %snip = USE %snip |
| 283 | // %Reg = COPY %snip / SPILL %snip, fi# |
| 284 | // |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 285 | if (SnipLI.getNumValNums() > 2 || !LIS.intervalIsInOneMBB(SnipLI)) |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 286 | return false; |
| 287 | |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 288 | MachineInstr *UseMI = nullptr; |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 289 | |
| 290 | // Check that all uses satisfy our criteria. |
Owen Anderson | abb90c9 | 2014-03-13 06:02:25 +0000 | [diff] [blame] | 291 | for (MachineRegisterInfo::reg_instr_nodbg_iterator |
| 292 | RI = MRI.reg_instr_nodbg_begin(SnipLI.reg), |
| 293 | E = MRI.reg_instr_nodbg_end(); RI != E; ) { |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 294 | MachineInstr &MI = *RI++; |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 295 | |
| 296 | // Allow copies to/from Reg. |
| 297 | if (isFullCopyOf(MI, Reg)) |
| 298 | continue; |
| 299 | |
| 300 | // Allow stack slot loads. |
| 301 | int FI; |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 302 | if (SnipLI.reg == TII.isLoadFromStackSlot(MI, FI) && FI == StackSlot) |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 303 | continue; |
| 304 | |
| 305 | // Allow stack slot stores. |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 306 | if (SnipLI.reg == TII.isStoreToStackSlot(MI, FI) && FI == StackSlot) |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 307 | continue; |
| 308 | |
| 309 | // Allow a single additional instruction. |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 310 | if (UseMI && &MI != UseMI) |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 311 | return false; |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 312 | UseMI = &MI; |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 313 | } |
| 314 | return true; |
| 315 | } |
| 316 | |
| 317 | /// collectRegsToSpill - Collect live range snippets that only have a single |
| 318 | /// real use. |
| 319 | void InlineSpiller::collectRegsToSpill() { |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 320 | unsigned Reg = Edit->getReg(); |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 321 | |
| 322 | // Main register always spills. |
| 323 | RegsToSpill.assign(1, Reg); |
| 324 | SnippetCopies.clear(); |
| 325 | |
| 326 | // Snippets all have the same original, so there can't be any for an original |
| 327 | // register. |
Jakob Stoklund Olesen | a0d5ec1 | 2011-03-15 21:13:25 +0000 | [diff] [blame] | 328 | if (Original == Reg) |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 329 | return; |
| 330 | |
Owen Anderson | abb90c9 | 2014-03-13 06:02:25 +0000 | [diff] [blame] | 331 | for (MachineRegisterInfo::reg_instr_iterator |
| 332 | RI = MRI.reg_instr_begin(Reg), E = MRI.reg_instr_end(); RI != E; ) { |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 333 | MachineInstr &MI = *RI++; |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 334 | unsigned SnipReg = isFullCopyOf(MI, Reg); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 335 | if (!isSibling(SnipReg)) |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 336 | continue; |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 337 | LiveInterval &SnipLI = LIS.getInterval(SnipReg); |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 338 | if (!isSnippet(SnipLI)) |
| 339 | continue; |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 340 | SnippetCopies.insert(&MI); |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 341 | if (isRegToSpill(SnipReg)) |
| 342 | continue; |
| 343 | RegsToSpill.push_back(SnipReg); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 344 | LLVM_DEBUG(dbgs() << "\talso spill snippet " << SnipLI << '\n'); |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 345 | ++NumSnippets; |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 346 | } |
| 347 | } |
| 348 | |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 349 | bool InlineSpiller::isSibling(unsigned Reg) { |
Daniel Sanders | 2bea69b | 2019-08-01 23:27:28 +0000 | [diff] [blame] | 350 | return Register::isVirtualRegister(Reg) && VRM.getOriginal(Reg) == Original; |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 351 | } |
| 352 | |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 353 | /// It is beneficial to spill to earlier place in the same BB in case |
| 354 | /// as follows: |
| 355 | /// There is an alternative def earlier in the same MBB. |
| 356 | /// Hoist the spill as far as possible in SpillMBB. This can ease |
| 357 | /// register pressure: |
Hans Wennborg | 5a7723c | 2016-04-08 15:17:43 +0000 | [diff] [blame] | 358 | /// |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 359 | /// x = def |
| 360 | /// y = use x |
| 361 | /// s = copy x |
Hans Wennborg | 5a7723c | 2016-04-08 15:17:43 +0000 | [diff] [blame] | 362 | /// |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 363 | /// Hoisting the spill of s to immediately after the def removes the |
| 364 | /// interference between x and y: |
Hans Wennborg | 5a7723c | 2016-04-08 15:17:43 +0000 | [diff] [blame] | 365 | /// |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 366 | /// x = def |
| 367 | /// spill x |
Francis Visoiu Mistrih | a8a83d1 | 2017-12-07 10:40:31 +0000 | [diff] [blame] | 368 | /// y = use killed x |
Hans Wennborg | 5a7723c | 2016-04-08 15:17:43 +0000 | [diff] [blame] | 369 | /// |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 370 | /// This hoist only helps when the copy kills its source. |
Hans Wennborg | 5a7723c | 2016-04-08 15:17:43 +0000 | [diff] [blame] | 371 | /// |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 372 | bool InlineSpiller::hoistSpillInsideBB(LiveInterval &SpillLI, |
| 373 | MachineInstr &CopyMI) { |
Hans Wennborg | 5a7723c | 2016-04-08 15:17:43 +0000 | [diff] [blame] | 374 | SlotIndex Idx = LIS.getInstructionIndex(CopyMI); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 375 | #ifndef NDEBUG |
Hans Wennborg | 5a7723c | 2016-04-08 15:17:43 +0000 | [diff] [blame] | 376 | VNInfo *VNI = SpillLI.getVNInfoAt(Idx.getRegSlot()); |
| 377 | assert(VNI && VNI->def == Idx.getRegSlot() && "Not defined by copy"); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 378 | #endif |
Wei Mi | fb5252c | 2016-04-04 17:45:03 +0000 | [diff] [blame] | 379 | |
Daniel Sanders | 0c47611 | 2019-08-15 19:22:08 +0000 | [diff] [blame] | 380 | Register SrcReg = CopyMI.getOperand(1).getReg(); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 381 | LiveInterval &SrcLI = LIS.getInterval(SrcReg); |
| 382 | VNInfo *SrcVNI = SrcLI.getVNInfoAt(Idx); |
| 383 | LiveQueryResult SrcQ = SrcLI.Query(Idx); |
| 384 | MachineBasicBlock *DefMBB = LIS.getMBBFromIndex(SrcVNI->def); |
| 385 | if (DefMBB != CopyMI.getParent() || !SrcQ.isKill()) |
Hans Wennborg | 5a7723c | 2016-04-08 15:17:43 +0000 | [diff] [blame] | 386 | return false; |
| 387 | |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 388 | // Conservatively extend the stack slot range to the range of the original |
| 389 | // value. We may be able to do better with stack slot coloring by being more |
| 390 | // careful here. |
Jakob Stoklund Olesen | e466345 | 2011-03-26 22:16:41 +0000 | [diff] [blame] | 391 | assert(StackInt && "No stack slot assigned yet."); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 392 | LiveInterval &OrigLI = LIS.getInterval(Original); |
| 393 | VNInfo *OrigVNI = OrigLI.getVNInfoAt(Idx); |
Jakob Stoklund Olesen | e466345 | 2011-03-26 22:16:41 +0000 | [diff] [blame] | 394 | StackInt->MergeValueInAsValue(OrigLI, OrigVNI, StackInt->getValNumInfo(0)); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 395 | LLVM_DEBUG(dbgs() << "\tmerged orig valno " << OrigVNI->id << ": " |
| 396 | << *StackInt << '\n'); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 397 | |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 398 | // We are going to spill SrcVNI immediately after its def, so clear out |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 399 | // any later spills of the same value. |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 400 | eliminateRedundantSpills(SrcLI, SrcVNI); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 401 | |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 402 | MachineBasicBlock *MBB = LIS.getMBBFromIndex(SrcVNI->def); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 403 | MachineBasicBlock::iterator MII; |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 404 | if (SrcVNI->isPHIDef()) |
Keith Walker | 830a8c1 | 2016-09-16 14:07:29 +0000 | [diff] [blame] | 405 | MII = MBB->SkipPHIsLabelsAndDebug(MBB->begin()); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 406 | else { |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 407 | MachineInstr *DefMI = LIS.getInstructionFromIndex(SrcVNI->def); |
Jakob Stoklund Olesen | ec9b4a6 | 2011-04-30 06:42:21 +0000 | [diff] [blame] | 408 | assert(DefMI && "Defining instruction disappeared"); |
| 409 | MII = DefMI; |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 410 | ++MII; |
| 411 | } |
| 412 | // Insert spill without kill flag immediately after def. |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 413 | TII.storeRegToStackSlot(*MBB, MII, SrcReg, false, StackSlot, |
| 414 | MRI.getRegClass(SrcReg), &TRI); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 415 | --MII; // Point to store instruction. |
Duncan P. N. Exon Smith | 3ac9cc6 | 2016-02-27 06:40:41 +0000 | [diff] [blame] | 416 | LIS.InsertMachineInstrInMaps(*MII); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 417 | LLVM_DEBUG(dbgs() << "\thoisted: " << SrcVNI->def << '\t' << *MII); |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 418 | |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 419 | HSpiller.addToMergeableSpills(*MII, StackSlot, Original); |
Jakob Stoklund Olesen | 37eb696 | 2011-09-15 17:54:28 +0000 | [diff] [blame] | 420 | ++NumSpills; |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 421 | return true; |
| 422 | } |
| 423 | |
Jakob Stoklund Olesen | 3948864 | 2011-03-20 05:44:55 +0000 | [diff] [blame] | 424 | /// eliminateRedundantSpills - SLI:VNI is known to be on the stack. Remove any |
| 425 | /// redundant spills of this value in SLI.reg and sibling copies. |
| 426 | void InlineSpiller::eliminateRedundantSpills(LiveInterval &SLI, VNInfo *VNI) { |
Jakob Stoklund Olesen | e55003f | 2011-03-20 05:44:58 +0000 | [diff] [blame] | 427 | assert(VNI && "Missing value"); |
Jakob Stoklund Olesen | 3948864 | 2011-03-20 05:44:55 +0000 | [diff] [blame] | 428 | SmallVector<std::pair<LiveInterval*, VNInfo*>, 8> WorkList; |
| 429 | WorkList.push_back(std::make_pair(&SLI, VNI)); |
Jakob Stoklund Olesen | e466345 | 2011-03-26 22:16:41 +0000 | [diff] [blame] | 430 | assert(StackInt && "No stack slot assigned yet."); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 431 | |
| 432 | do { |
Jakob Stoklund Olesen | 3948864 | 2011-03-20 05:44:55 +0000 | [diff] [blame] | 433 | LiveInterval *LI; |
Benjamin Kramer | d6f1f84 | 2014-03-02 13:30:33 +0000 | [diff] [blame] | 434 | std::tie(LI, VNI) = WorkList.pop_back_val(); |
Jakob Stoklund Olesen | 3948864 | 2011-03-20 05:44:55 +0000 | [diff] [blame] | 435 | unsigned Reg = LI->reg; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 436 | LLVM_DEBUG(dbgs() << "Checking redundant spills for " << VNI->id << '@' |
| 437 | << VNI->def << " in " << *LI << '\n'); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 438 | |
| 439 | // Regs to spill are taken care of. |
| 440 | if (isRegToSpill(Reg)) |
| 441 | continue; |
| 442 | |
| 443 | // Add all of VNI's live range to StackInt. |
Jakob Stoklund Olesen | e466345 | 2011-03-26 22:16:41 +0000 | [diff] [blame] | 444 | StackInt->MergeValueInAsValue(*LI, VNI, StackInt->getValNumInfo(0)); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 445 | LLVM_DEBUG(dbgs() << "Merged to stack int: " << *StackInt << '\n'); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 446 | |
| 447 | // Find all spills and copies of VNI. |
Owen Anderson | abb90c9 | 2014-03-13 06:02:25 +0000 | [diff] [blame] | 448 | for (MachineRegisterInfo::use_instr_nodbg_iterator |
| 449 | UI = MRI.use_instr_nodbg_begin(Reg), E = MRI.use_instr_nodbg_end(); |
| 450 | UI != E; ) { |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 451 | MachineInstr &MI = *UI++; |
| 452 | if (!MI.isCopy() && !MI.mayStore()) |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 453 | continue; |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 454 | SlotIndex Idx = LIS.getInstructionIndex(MI); |
Jakob Stoklund Olesen | 3948864 | 2011-03-20 05:44:55 +0000 | [diff] [blame] | 455 | if (LI->getVNInfoAt(Idx) != VNI) |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 456 | continue; |
| 457 | |
| 458 | // Follow sibling copies down the dominator tree. |
| 459 | if (unsigned DstReg = isFullCopyOf(MI, Reg)) { |
| 460 | if (isSibling(DstReg)) { |
| 461 | LiveInterval &DstLI = LIS.getInterval(DstReg); |
Jakob Stoklund Olesen | 90b5e56 | 2011-11-13 20:45:27 +0000 | [diff] [blame] | 462 | VNInfo *DstVNI = DstLI.getVNInfoAt(Idx.getRegSlot()); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 463 | assert(DstVNI && "Missing defined value"); |
Jakob Stoklund Olesen | 90b5e56 | 2011-11-13 20:45:27 +0000 | [diff] [blame] | 464 | assert(DstVNI->def == Idx.getRegSlot() && "Wrong copy def slot"); |
Jakob Stoklund Olesen | 3948864 | 2011-03-20 05:44:55 +0000 | [diff] [blame] | 465 | WorkList.push_back(std::make_pair(&DstLI, DstVNI)); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 466 | } |
| 467 | continue; |
| 468 | } |
| 469 | |
| 470 | // Erase spills. |
| 471 | int FI; |
| 472 | if (Reg == TII.isStoreToStackSlot(MI, FI) && FI == StackSlot) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 473 | LLVM_DEBUG(dbgs() << "Redundant spill " << Idx << '\t' << MI); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 474 | // eliminateDeadDefs won't normally remove stores, so switch opcode. |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 475 | MI.setDesc(TII.get(TargetOpcode::KILL)); |
| 476 | DeadDefs.push_back(&MI); |
Jakob Stoklund Olesen | 37eb696 | 2011-09-15 17:54:28 +0000 | [diff] [blame] | 477 | ++NumSpillsRemoved; |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 478 | if (HSpiller.rmFromMergeableSpills(MI, StackSlot)) |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 479 | --NumSpills; |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 480 | } |
| 481 | } |
| 482 | } while (!WorkList.empty()); |
| 483 | } |
| 484 | |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 485 | //===----------------------------------------------------------------------===// |
| 486 | // Rematerialization |
| 487 | //===----------------------------------------------------------------------===// |
| 488 | |
| 489 | /// markValueUsed - Remember that VNI failed to rematerialize, so its defining |
| 490 | /// instruction cannot be eliminated. See through snippet copies |
| 491 | void InlineSpiller::markValueUsed(LiveInterval *LI, VNInfo *VNI) { |
| 492 | SmallVector<std::pair<LiveInterval*, VNInfo*>, 8> WorkList; |
| 493 | WorkList.push_back(std::make_pair(LI, VNI)); |
| 494 | do { |
Benjamin Kramer | d6f1f84 | 2014-03-02 13:30:33 +0000 | [diff] [blame] | 495 | std::tie(LI, VNI) = WorkList.pop_back_val(); |
David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 496 | if (!UsedValues.insert(VNI).second) |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 497 | continue; |
| 498 | |
| 499 | if (VNI->isPHIDef()) { |
| 500 | MachineBasicBlock *MBB = LIS.getMBBFromIndex(VNI->def); |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 501 | for (MachineBasicBlock *P : MBB->predecessors()) { |
| 502 | VNInfo *PVNI = LI->getVNInfoBefore(LIS.getMBBEndIdx(P)); |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 503 | if (PVNI) |
| 504 | WorkList.push_back(std::make_pair(LI, PVNI)); |
| 505 | } |
| 506 | continue; |
| 507 | } |
| 508 | |
| 509 | // Follow snippet copies. |
| 510 | MachineInstr *MI = LIS.getInstructionFromIndex(VNI->def); |
| 511 | if (!SnippetCopies.count(MI)) |
| 512 | continue; |
| 513 | LiveInterval &SnipLI = LIS.getInterval(MI->getOperand(1).getReg()); |
| 514 | assert(isRegToSpill(SnipLI.reg) && "Unexpected register in copy"); |
Jakob Stoklund Olesen | 90b5e56 | 2011-11-13 20:45:27 +0000 | [diff] [blame] | 515 | VNInfo *SnipVNI = SnipLI.getVNInfoAt(VNI->def.getRegSlot(true)); |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 516 | assert(SnipVNI && "Snippet undefined before copy"); |
| 517 | WorkList.push_back(std::make_pair(&SnipLI, SnipVNI)); |
| 518 | } while (!WorkList.empty()); |
| 519 | } |
| 520 | |
Philip Reames | 7403fac | 2019-02-12 18:33:01 +0000 | [diff] [blame] | 521 | bool InlineSpiller::canGuaranteeAssignmentAfterRemat(unsigned VReg, |
| 522 | MachineInstr &MI) { |
| 523 | if (!RestrictStatepointRemat) |
| 524 | return true; |
| 525 | // Here's a quick explanation of the problem we're trying to handle here: |
| 526 | // * There are some pseudo instructions with more vreg uses than there are |
| 527 | // physical registers on the machine. |
| 528 | // * This is normally handled by spilling the vreg, and folding the reload |
| 529 | // into the user instruction. (Thus decreasing the number of used vregs |
| 530 | // until the remainder can be assigned to physregs.) |
| 531 | // * However, since we may try to spill vregs in any order, we can end up |
| 532 | // trying to spill each operand to the instruction, and then rematting it |
| 533 | // instead. When that happens, the new live intervals (for the remats) are |
| 534 | // expected to be trivially assignable (i.e. RS_Done). However, since we |
| 535 | // may have more remats than physregs, we're guaranteed to fail to assign |
| 536 | // one. |
| 537 | // At the moment, we only handle this for STATEPOINTs since they're the only |
Jay Foad | 8382f87 | 2020-01-03 14:05:58 +0000 | [diff] [blame] | 538 | // pseudo op where we've seen this. If we start seeing other instructions |
Philip Reames | 7403fac | 2019-02-12 18:33:01 +0000 | [diff] [blame] | 539 | // with the same problem, we need to revisit this. |
Serguei Katkov | 496e0a9 | 2020-02-28 17:34:33 +0700 | [diff] [blame] | 540 | if (MI.getOpcode() != TargetOpcode::STATEPOINT) |
| 541 | return true; |
| 542 | // For STATEPOINTs we allow re-materialization for fixed arguments only hoping |
| 543 | // that number of physical registers is enough to cover all fixed arguments. |
| 544 | // If it is not true we need to revisit it. |
| 545 | for (unsigned Idx = StatepointOpers(&MI).getVarIdx(), |
| 546 | EndIdx = MI.getNumOperands(); |
| 547 | Idx < EndIdx; ++Idx) { |
| 548 | MachineOperand &MO = MI.getOperand(Idx); |
| 549 | if (MO.isReg() && MO.getReg() == VReg) |
| 550 | return false; |
| 551 | } |
| 552 | return true; |
Philip Reames | 7403fac | 2019-02-12 18:33:01 +0000 | [diff] [blame] | 553 | } |
| 554 | |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 555 | /// reMaterializeFor - Attempt to rematerialize before MI instead of reloading. |
Duncan P. N. Exon Smith | d6ebd07 | 2016-02-27 20:23:14 +0000 | [diff] [blame] | 556 | bool InlineSpiller::reMaterializeFor(LiveInterval &VirtReg, MachineInstr &MI) { |
Patrik Hagglund | 296acbf | 2014-09-01 11:04:07 +0000 | [diff] [blame] | 557 | // Analyze instruction |
| 558 | SmallVector<std::pair<MachineInstr *, unsigned>, 8> Ops; |
Florian Hahn | 5d06256 | 2019-12-02 19:41:09 +0000 | [diff] [blame] | 559 | VirtRegInfo RI = AnalyzeVirtRegInBundle(MI, VirtReg.reg, &Ops); |
Patrik Hagglund | 296acbf | 2014-09-01 11:04:07 +0000 | [diff] [blame] | 560 | |
| 561 | if (!RI.Reads) |
| 562 | return false; |
| 563 | |
Duncan P. N. Exon Smith | d6ebd07 | 2016-02-27 20:23:14 +0000 | [diff] [blame] | 564 | SlotIndex UseIdx = LIS.getInstructionIndex(MI).getRegSlot(true); |
Jakob Stoklund Olesen | c0dd3da | 2011-07-18 05:31:59 +0000 | [diff] [blame] | 565 | VNInfo *ParentVNI = VirtReg.getVNInfoAt(UseIdx.getBaseIndex()); |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 566 | |
| 567 | if (!ParentVNI) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 568 | LLVM_DEBUG(dbgs() << "\tadding <undef> flags: "); |
Duncan P. N. Exon Smith | d6ebd07 | 2016-02-27 20:23:14 +0000 | [diff] [blame] | 569 | for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { |
| 570 | MachineOperand &MO = MI.getOperand(i); |
Jakob Stoklund Olesen | 0ed9ebc | 2011-03-29 17:47:02 +0000 | [diff] [blame] | 571 | if (MO.isReg() && MO.isUse() && MO.getReg() == VirtReg.reg) |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 572 | MO.setIsUndef(); |
| 573 | } |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 574 | LLVM_DEBUG(dbgs() << UseIdx << '\t' << MI); |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 575 | return true; |
| 576 | } |
Jakob Stoklund Olesen | 2edaa2f | 2010-10-20 22:00:51 +0000 | [diff] [blame] | 577 | |
Duncan P. N. Exon Smith | d6ebd07 | 2016-02-27 20:23:14 +0000 | [diff] [blame] | 578 | if (SnippetCopies.count(&MI)) |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 579 | return false; |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 580 | |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 581 | LiveInterval &OrigLI = LIS.getInterval(Original); |
| 582 | VNInfo *OrigVNI = OrigLI.getVNInfoAt(UseIdx); |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 583 | LiveRangeEdit::Remat RM(ParentVNI); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 584 | RM.OrigMI = LIS.getInstructionFromIndex(OrigVNI->def); |
| 585 | |
| 586 | if (!Edit->canRematerializeAt(RM, OrigVNI, UseIdx, false)) { |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 587 | markValueUsed(&VirtReg, ParentVNI); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 588 | LLVM_DEBUG(dbgs() << "\tcannot remat for " << UseIdx << '\t' << MI); |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 589 | return false; |
| 590 | } |
| 591 | |
Jakob Stoklund Olesen | 0ed9ebc | 2011-03-29 17:47:02 +0000 | [diff] [blame] | 592 | // If the instruction also writes VirtReg.reg, it had better not require the |
| 593 | // same register for uses and defs. |
Jakob Stoklund Olesen | abe8c09 | 2012-03-01 01:43:25 +0000 | [diff] [blame] | 594 | if (RI.Tied) { |
| 595 | markValueUsed(&VirtReg, ParentVNI); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 596 | LLVM_DEBUG(dbgs() << "\tcannot remat tied reg: " << UseIdx << '\t' << MI); |
Jakob Stoklund Olesen | abe8c09 | 2012-03-01 01:43:25 +0000 | [diff] [blame] | 597 | return false; |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 598 | } |
| 599 | |
Jakob Stoklund Olesen | 3b2966d | 2010-12-18 03:04:14 +0000 | [diff] [blame] | 600 | // Before rematerializing into a register for a single instruction, try to |
| 601 | // fold a load into the instruction. That avoids allocating a new register. |
Evan Cheng | 7f8e563 | 2011-12-07 07:15:52 +0000 | [diff] [blame] | 602 | if (RM.OrigMI->canFoldAsLoad() && |
Jakob Stoklund Olesen | abe8c09 | 2012-03-01 01:43:25 +0000 | [diff] [blame] | 603 | foldMemoryOperand(Ops, RM.OrigMI)) { |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 604 | Edit->markRematerialized(RM.ParentVNI); |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 605 | ++NumFoldedLoads; |
Jakob Stoklund Olesen | 3b2966d | 2010-12-18 03:04:14 +0000 | [diff] [blame] | 606 | return true; |
| 607 | } |
| 608 | |
Philip Reames | 7403fac | 2019-02-12 18:33:01 +0000 | [diff] [blame] | 609 | // If we can't guarantee that we'll be able to actually assign the new vreg, |
| 610 | // we can't remat. |
| 611 | if (!canGuaranteeAssignmentAfterRemat(VirtReg.reg, MI)) { |
| 612 | markValueUsed(&VirtReg, ParentVNI); |
| 613 | LLVM_DEBUG(dbgs() << "\tcannot remat for " << UseIdx << '\t' << MI); |
| 614 | return false; |
| 615 | } |
| 616 | |
Wolfgang Pieb | 8df58f4 | 2016-08-16 17:12:50 +0000 | [diff] [blame] | 617 | // Allocate a new register for the remat. |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 618 | unsigned NewVReg = Edit->createFrom(Original); |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 619 | |
| 620 | // Finally we can rematerialize OrigMI before MI. |
Duncan P. N. Exon Smith | d6ebd07 | 2016-02-27 20:23:14 +0000 | [diff] [blame] | 621 | SlotIndex DefIdx = |
| 622 | Edit->rematerializeAt(*MI.getParent(), MI, NewVReg, RM, TRI); |
Wolfgang Pieb | 8df58f4 | 2016-08-16 17:12:50 +0000 | [diff] [blame] | 623 | |
| 624 | // We take the DebugLoc from MI, since OrigMI may be attributed to a |
Junmo Park | 061bec8 | 2017-02-25 01:50:45 +0000 | [diff] [blame] | 625 | // different source location. |
Wolfgang Pieb | 8df58f4 | 2016-08-16 17:12:50 +0000 | [diff] [blame] | 626 | auto *NewMI = LIS.getInstructionFromIndex(DefIdx); |
| 627 | NewMI->setDebugLoc(MI.getDebugLoc()); |
| 628 | |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 629 | (void)DefIdx; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 630 | LLVM_DEBUG(dbgs() << "\tremat: " << DefIdx << '\t' |
| 631 | << *LIS.getInstructionFromIndex(DefIdx)); |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 632 | |
| 633 | // Replace operands |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 634 | for (const auto &OpPair : Ops) { |
| 635 | MachineOperand &MO = OpPair.first->getOperand(OpPair.second); |
Jakob Stoklund Olesen | 0ed9ebc | 2011-03-29 17:47:02 +0000 | [diff] [blame] | 636 | if (MO.isReg() && MO.isUse() && MO.getReg() == VirtReg.reg) { |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 637 | MO.setReg(NewVReg); |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 638 | MO.setIsKill(); |
| 639 | } |
| 640 | } |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 641 | LLVM_DEBUG(dbgs() << "\t " << UseIdx << '\t' << MI << '\n'); |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 642 | |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 643 | ++NumRemats; |
Jakob Stoklund Olesen | bde96ad | 2010-06-30 23:03:52 +0000 | [diff] [blame] | 644 | return true; |
| 645 | } |
| 646 | |
Jakob Stoklund Olesen | 72911e4 | 2010-10-14 23:49:52 +0000 | [diff] [blame] | 647 | /// reMaterializeAll - Try to rematerialize as many uses as possible, |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 648 | /// and trim the live ranges after. |
| 649 | void InlineSpiller::reMaterializeAll() { |
Pete Cooper | 2bde2f4 | 2012-04-02 22:22:53 +0000 | [diff] [blame] | 650 | if (!Edit->anyRematerializable(AA)) |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 651 | return; |
| 652 | |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 653 | UsedValues.clear(); |
Jakob Stoklund Olesen | 2edaa2f | 2010-10-20 22:00:51 +0000 | [diff] [blame] | 654 | |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 655 | // Try to remat before all uses of snippets. |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 656 | bool anyRemat = false; |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 657 | for (unsigned Reg : RegsToSpill) { |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 658 | LiveInterval &LI = LIS.getInterval(Reg); |
Patrik Hagglund | 296acbf | 2014-09-01 11:04:07 +0000 | [diff] [blame] | 659 | for (MachineRegisterInfo::reg_bundle_iterator |
| 660 | RegI = MRI.reg_bundle_begin(Reg), E = MRI.reg_bundle_end(); |
| 661 | RegI != E; ) { |
Duncan P. N. Exon Smith | d6ebd07 | 2016-02-27 20:23:14 +0000 | [diff] [blame] | 662 | MachineInstr &MI = *RegI++; |
Patrik Hagglund | 296acbf | 2014-09-01 11:04:07 +0000 | [diff] [blame] | 663 | |
| 664 | // Debug values are not allowed to affect codegen. |
Shiva Chen | 21eab93 | 2018-05-16 02:57:26 +0000 | [diff] [blame] | 665 | if (MI.isDebugValue()) |
Patrik Hagglund | 296acbf | 2014-09-01 11:04:07 +0000 | [diff] [blame] | 666 | continue; |
| 667 | |
Shiva Chen | 21eab93 | 2018-05-16 02:57:26 +0000 | [diff] [blame] | 668 | assert(!MI.isDebugInstr() && "Did not expect to find a use in debug " |
| 669 | "instruction that isn't a DBG_VALUE"); |
| 670 | |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 671 | anyRemat |= reMaterializeFor(LI, MI); |
Owen Anderson | abb90c9 | 2014-03-13 06:02:25 +0000 | [diff] [blame] | 672 | } |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 673 | } |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 674 | if (!anyRemat) |
| 675 | return; |
| 676 | |
| 677 | // Remove any values that were completely rematted. |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 678 | for (unsigned Reg : RegsToSpill) { |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 679 | LiveInterval &LI = LIS.getInterval(Reg); |
| 680 | for (LiveInterval::vni_iterator I = LI.vni_begin(), E = LI.vni_end(); |
| 681 | I != E; ++I) { |
| 682 | VNInfo *VNI = *I; |
Jakob Stoklund Olesen | add79c6 | 2011-03-29 17:47:00 +0000 | [diff] [blame] | 683 | if (VNI->isUnused() || VNI->isPHIDef() || UsedValues.count(VNI)) |
Jakob Stoklund Olesen | cf6c5c9 | 2010-07-02 19:54:40 +0000 | [diff] [blame] | 684 | continue; |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 685 | MachineInstr *MI = LIS.getInstructionFromIndex(VNI->def); |
| 686 | MI->addRegisterDead(Reg, &TRI); |
| 687 | if (!MI->allDefsAreDead()) |
| 688 | continue; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 689 | LLVM_DEBUG(dbgs() << "All defs dead: " << *MI); |
Jakob Stoklund Olesen | d8af529 | 2011-03-29 03:12:02 +0000 | [diff] [blame] | 690 | DeadDefs.push_back(MI); |
Jakob Stoklund Olesen | cf6c5c9 | 2010-07-02 19:54:40 +0000 | [diff] [blame] | 691 | } |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 692 | } |
Jakob Stoklund Olesen | add79c6 | 2011-03-29 17:47:00 +0000 | [diff] [blame] | 693 | |
| 694 | // Eliminate dead code after remat. Note that some snippet copies may be |
| 695 | // deleted here. |
| 696 | if (DeadDefs.empty()) |
| 697 | return; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 698 | LLVM_DEBUG(dbgs() << "Remat created " << DeadDefs.size() << " dead defs.\n"); |
Wei Mi | c022370 | 2016-07-08 21:08:09 +0000 | [diff] [blame] | 699 | Edit->eliminateDeadDefs(DeadDefs, RegsToSpill, AA); |
Jakob Stoklund Olesen | add79c6 | 2011-03-29 17:47:00 +0000 | [diff] [blame] | 700 | |
Wei Mi | a62f058 | 2016-02-05 18:14:24 +0000 | [diff] [blame] | 701 | // LiveRangeEdit::eliminateDeadDef is used to remove dead define instructions |
| 702 | // after rematerialization. To remove a VNI for a vreg from its LiveInterval, |
| 703 | // LiveIntervals::removeVRegDefAt is used. However, after non-PHI VNIs are all |
| 704 | // removed, PHI VNI are still left in the LiveInterval. |
| 705 | // So to get rid of unused reg, we need to check whether it has non-dbg |
| 706 | // reference instead of whether it has non-empty interval. |
Benjamin Kramer | 391f5a6 | 2013-05-05 11:29:14 +0000 | [diff] [blame] | 707 | unsigned ResultPos = 0; |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 708 | for (unsigned Reg : RegsToSpill) { |
Wei Mi | a62f058 | 2016-02-05 18:14:24 +0000 | [diff] [blame] | 709 | if (MRI.reg_nodbg_empty(Reg)) { |
Benjamin Kramer | 391f5a6 | 2013-05-05 11:29:14 +0000 | [diff] [blame] | 710 | Edit->eraseVirtReg(Reg); |
Jakob Stoklund Olesen | add79c6 | 2011-03-29 17:47:00 +0000 | [diff] [blame] | 711 | continue; |
| 712 | } |
Matt Arsenault | c5d1e50 | 2017-07-22 00:24:01 +0000 | [diff] [blame] | 713 | |
Matt Arsenault | 5fbc870 | 2017-07-24 18:07:55 +0000 | [diff] [blame] | 714 | assert(LIS.hasInterval(Reg) && |
| 715 | (!LIS.getInterval(Reg).empty() || !MRI.reg_nodbg_empty(Reg)) && |
| 716 | "Empty and not used live-range?!"); |
| 717 | |
Benjamin Kramer | 391f5a6 | 2013-05-05 11:29:14 +0000 | [diff] [blame] | 718 | RegsToSpill[ResultPos++] = Reg; |
Jakob Stoklund Olesen | add79c6 | 2011-03-29 17:47:00 +0000 | [diff] [blame] | 719 | } |
Benjamin Kramer | 391f5a6 | 2013-05-05 11:29:14 +0000 | [diff] [blame] | 720 | RegsToSpill.erase(RegsToSpill.begin() + ResultPos, RegsToSpill.end()); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 721 | LLVM_DEBUG(dbgs() << RegsToSpill.size() |
| 722 | << " registers to spill after remat.\n"); |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 723 | } |
| 724 | |
Jakob Stoklund Olesen | e991f72 | 2011-03-29 21:20:19 +0000 | [diff] [blame] | 725 | //===----------------------------------------------------------------------===// |
| 726 | // Spilling |
| 727 | //===----------------------------------------------------------------------===// |
| 728 | |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 729 | /// If MI is a load or store of StackSlot, it can be removed. |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 730 | bool InlineSpiller::coalesceStackAccess(MachineInstr *MI, unsigned Reg) { |
Jakob Stoklund Olesen | 7fd4905 | 2010-08-04 22:35:11 +0000 | [diff] [blame] | 731 | int FI = 0; |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 732 | unsigned InstrReg = TII.isLoadFromStackSlot(*MI, FI); |
Jakob Stoklund Olesen | 37eb696 | 2011-09-15 17:54:28 +0000 | [diff] [blame] | 733 | bool IsLoad = InstrReg; |
| 734 | if (!IsLoad) |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 735 | InstrReg = TII.isStoreToStackSlot(*MI, FI); |
Jakob Stoklund Olesen | 7fd4905 | 2010-08-04 22:35:11 +0000 | [diff] [blame] | 736 | |
| 737 | // We have a stack access. Is it the right register and slot? |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 738 | if (InstrReg != Reg || FI != StackSlot) |
Jakob Stoklund Olesen | 7fd4905 | 2010-08-04 22:35:11 +0000 | [diff] [blame] | 739 | return false; |
| 740 | |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 741 | if (!IsLoad) |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 742 | HSpiller.rmFromMergeableSpills(*MI, StackSlot); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 743 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 744 | LLVM_DEBUG(dbgs() << "Coalescing stack access: " << *MI); |
Duncan P. N. Exon Smith | 3ac9cc6 | 2016-02-27 06:40:41 +0000 | [diff] [blame] | 745 | LIS.RemoveMachineInstrFromMaps(*MI); |
Jakob Stoklund Olesen | 7fd4905 | 2010-08-04 22:35:11 +0000 | [diff] [blame] | 746 | MI->eraseFromParent(); |
Jakob Stoklund Olesen | 37eb696 | 2011-09-15 17:54:28 +0000 | [diff] [blame] | 747 | |
| 748 | if (IsLoad) { |
| 749 | ++NumReloadsRemoved; |
| 750 | --NumReloads; |
| 751 | } else { |
| 752 | ++NumSpillsRemoved; |
| 753 | --NumSpills; |
| 754 | } |
| 755 | |
Jakob Stoklund Olesen | 7fd4905 | 2010-08-04 22:35:11 +0000 | [diff] [blame] | 756 | return true; |
| 757 | } |
| 758 | |
Aaron Ballman | 615eb47 | 2017-10-15 14:32:27 +0000 | [diff] [blame] | 759 | #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) |
Junmo Park | c7479ba | 2017-03-28 04:14:25 +0000 | [diff] [blame] | 760 | LLVM_DUMP_METHOD |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 761 | // Dump the range of instructions from B to E with their slot indexes. |
| 762 | static void dumpMachineInstrRangeWithSlotIndex(MachineBasicBlock::iterator B, |
| 763 | MachineBasicBlock::iterator E, |
| 764 | LiveIntervals const &LIS, |
| 765 | const char *const header, |
| 766 | unsigned VReg =0) { |
| 767 | char NextLine = '\n'; |
| 768 | char SlotIndent = '\t'; |
| 769 | |
Benjamin Kramer | b6d0bd4 | 2014-03-02 12:27:27 +0000 | [diff] [blame] | 770 | if (std::next(B) == E) { |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 771 | NextLine = ' '; |
| 772 | SlotIndent = ' '; |
| 773 | } |
| 774 | |
| 775 | dbgs() << '\t' << header << ": " << NextLine; |
| 776 | |
| 777 | for (MachineBasicBlock::iterator I = B; I != E; ++I) { |
Duncan P. N. Exon Smith | 3ac9cc6 | 2016-02-27 06:40:41 +0000 | [diff] [blame] | 778 | SlotIndex Idx = LIS.getInstructionIndex(*I).getRegSlot(); |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 779 | |
| 780 | // If a register was passed in and this instruction has it as a |
| 781 | // destination that is marked as an early clobber, print the |
| 782 | // early-clobber slot index. |
| 783 | if (VReg) { |
| 784 | MachineOperand *MO = I->findRegisterDefOperand(VReg); |
| 785 | if (MO && MO->isEarlyClobber()) |
| 786 | Idx = Idx.getRegSlot(true); |
| 787 | } |
| 788 | |
| 789 | dbgs() << SlotIndent << Idx << '\t' << *I; |
| 790 | } |
| 791 | } |
| 792 | #endif |
| 793 | |
Jakob Stoklund Olesen | abe8c09 | 2012-03-01 01:43:25 +0000 | [diff] [blame] | 794 | /// foldMemoryOperand - Try folding stack slot references in Ops into their |
| 795 | /// instructions. |
| 796 | /// |
Florian Hahn | 5d06256 | 2019-12-02 19:41:09 +0000 | [diff] [blame] | 797 | /// @param Ops Operand indices from AnalyzeVirtRegInBundle(). |
Jakob Stoklund Olesen | 3b2966d | 2010-12-18 03:04:14 +0000 | [diff] [blame] | 798 | /// @param LoadMI Load instruction to use instead of stack slot when non-null. |
Jakob Stoklund Olesen | abe8c09 | 2012-03-01 01:43:25 +0000 | [diff] [blame] | 799 | /// @return True on success. |
| 800 | bool InlineSpiller:: |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 801 | foldMemoryOperand(ArrayRef<std::pair<MachineInstr *, unsigned>> Ops, |
Jakob Stoklund Olesen | abe8c09 | 2012-03-01 01:43:25 +0000 | [diff] [blame] | 802 | MachineInstr *LoadMI) { |
| 803 | if (Ops.empty()) |
| 804 | return false; |
| 805 | // Don't attempt folding in bundles. |
| 806 | MachineInstr *MI = Ops.front().first; |
| 807 | if (Ops.back().first != MI || MI->isBundled()) |
| 808 | return false; |
| 809 | |
Jakob Stoklund Olesen | c94c967 | 2011-09-15 18:22:52 +0000 | [diff] [blame] | 810 | bool WasCopy = MI->isCopy(); |
Jakob Stoklund Olesen | eef48b6 | 2011-11-10 00:17:03 +0000 | [diff] [blame] | 811 | unsigned ImpReg = 0; |
| 812 | |
Michael Kuperstein | 47eb85a | 2016-11-23 18:33:49 +0000 | [diff] [blame] | 813 | // Spill subregs if the target allows it. |
| 814 | // We always want to spill subregs for stackmap/patchpoint pseudos. |
| 815 | bool SpillSubRegs = TII.isSubregFoldable() || |
| 816 | MI->getOpcode() == TargetOpcode::STATEPOINT || |
| 817 | MI->getOpcode() == TargetOpcode::PATCHPOINT || |
| 818 | MI->getOpcode() == TargetOpcode::STACKMAP; |
Andrew Trick | 10d5be4 | 2013-11-17 01:36:23 +0000 | [diff] [blame] | 819 | |
Jakob Stoklund Olesen | 8656a45 | 2010-07-01 00:13:04 +0000 | [diff] [blame] | 820 | // TargetInstrInfo::foldMemoryOperand only expects explicit, non-tied |
| 821 | // operands. |
| 822 | SmallVector<unsigned, 8> FoldOps; |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 823 | for (const auto &OpPair : Ops) { |
| 824 | unsigned Idx = OpPair.second; |
| 825 | assert(MI == OpPair.first && "Instruction conflict during operand folding"); |
Jakob Stoklund Olesen | 8656a45 | 2010-07-01 00:13:04 +0000 | [diff] [blame] | 826 | MachineOperand &MO = MI->getOperand(Idx); |
Jakob Stoklund Olesen | eef48b6 | 2011-11-10 00:17:03 +0000 | [diff] [blame] | 827 | if (MO.isImplicit()) { |
| 828 | ImpReg = MO.getReg(); |
Jakob Stoklund Olesen | 8656a45 | 2010-07-01 00:13:04 +0000 | [diff] [blame] | 829 | continue; |
Jakob Stoklund Olesen | eef48b6 | 2011-11-10 00:17:03 +0000 | [diff] [blame] | 830 | } |
Michael Kuperstein | 47eb85a | 2016-11-23 18:33:49 +0000 | [diff] [blame] | 831 | |
Andrew Trick | 10d5be4 | 2013-11-17 01:36:23 +0000 | [diff] [blame] | 832 | if (!SpillSubRegs && MO.getSubReg()) |
Jakob Stoklund Olesen | 8656a45 | 2010-07-01 00:13:04 +0000 | [diff] [blame] | 833 | return false; |
Jakob Stoklund Olesen | c6a2041 | 2011-02-08 19:33:55 +0000 | [diff] [blame] | 834 | // We cannot fold a load instruction into a def. |
| 835 | if (LoadMI && MO.isDef()) |
| 836 | return false; |
Jakob Stoklund Olesen | 8656a45 | 2010-07-01 00:13:04 +0000 | [diff] [blame] | 837 | // Tied use operands should not be passed to foldMemoryOperand. |
| 838 | if (!MI->isRegTiedToDefOperand(Idx)) |
| 839 | FoldOps.push_back(Idx); |
| 840 | } |
| 841 | |
Quentin Colombet | ae3168d | 2016-12-08 00:06:51 +0000 | [diff] [blame] | 842 | // If we only have implicit uses, we won't be able to fold that. |
| 843 | // Moreover, TargetInstrInfo::foldMemoryOperand will assert if we try! |
| 844 | if (FoldOps.empty()) |
| 845 | return false; |
| 846 | |
Michael Liao | 8d6ea2d | 2019-07-05 20:23:59 +0000 | [diff] [blame] | 847 | MachineInstrSpan MIS(MI, MI->getParent()); |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 848 | |
Jakob Stoklund Olesen | 3b2966d | 2010-12-18 03:04:14 +0000 | [diff] [blame] | 849 | MachineInstr *FoldMI = |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 850 | LoadMI ? TII.foldMemoryOperand(*MI, FoldOps, *LoadMI, &LIS) |
Jonas Paulsson | fdc4ea3 | 2019-06-08 06:19:15 +0000 | [diff] [blame] | 851 | : TII.foldMemoryOperand(*MI, FoldOps, StackSlot, &LIS, &VRM); |
Jakob Stoklund Olesen | 8656a45 | 2010-07-01 00:13:04 +0000 | [diff] [blame] | 852 | if (!FoldMI) |
| 853 | return false; |
Andrew Trick | 5749b8b | 2013-06-21 18:33:26 +0000 | [diff] [blame] | 854 | |
| 855 | // Remove LIS for any dead defs in the original MI not in FoldMI. |
Duncan P. N. Exon Smith | f9ab416 | 2016-02-27 17:05:33 +0000 | [diff] [blame] | 856 | for (MIBundleOperands MO(*MI); MO.isValid(); ++MO) { |
Andrew Trick | 5749b8b | 2013-06-21 18:33:26 +0000 | [diff] [blame] | 857 | if (!MO->isReg()) |
| 858 | continue; |
Daniel Sanders | 0c47611 | 2019-08-15 19:22:08 +0000 | [diff] [blame] | 859 | Register Reg = MO->getReg(); |
Daniel Sanders | 2bea69b | 2019-08-01 23:27:28 +0000 | [diff] [blame] | 860 | if (!Reg || Register::isVirtualRegister(Reg) || MRI.isReserved(Reg)) { |
Andrew Trick | 5749b8b | 2013-06-21 18:33:26 +0000 | [diff] [blame] | 861 | continue; |
| 862 | } |
Andrew Trick | dfacda3 | 2014-01-07 07:31:10 +0000 | [diff] [blame] | 863 | // Skip non-Defs, including undef uses and internal reads. |
| 864 | if (MO->isUse()) |
| 865 | continue; |
Florian Hahn | 5154b02 | 2019-12-02 20:00:56 +0000 | [diff] [blame] | 866 | PhysRegInfo RI = AnalyzePhysRegInBundle(*FoldMI, Reg, &TRI); |
Matthias Braun | 60d69e2 | 2015-12-11 19:42:09 +0000 | [diff] [blame] | 867 | if (RI.FullyDefined) |
Andrew Trick | 5749b8b | 2013-06-21 18:33:26 +0000 | [diff] [blame] | 868 | continue; |
| 869 | // FoldMI does not define this physreg. Remove the LI segment. |
| 870 | assert(MO->isDead() && "Cannot fold physreg def"); |
Duncan P. N. Exon Smith | 3ac9cc6 | 2016-02-27 06:40:41 +0000 | [diff] [blame] | 871 | SlotIndex Idx = LIS.getInstructionIndex(*MI).getRegSlot(); |
Matthias Braun | cfb8ad2 | 2015-01-21 18:50:21 +0000 | [diff] [blame] | 872 | LIS.removePhysRegDefAt(Reg, Idx); |
Andrew Trick | 5749b8b | 2013-06-21 18:33:26 +0000 | [diff] [blame] | 873 | } |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 874 | |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 875 | int FI; |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 876 | if (TII.isStoreToStackSlot(*MI, FI) && |
| 877 | HSpiller.rmFromMergeableSpills(*MI, FI)) |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 878 | --NumSpills; |
Duncan P. N. Exon Smith | 3ac9cc6 | 2016-02-27 06:40:41 +0000 | [diff] [blame] | 879 | LIS.ReplaceMachineInstrInMaps(*MI, *FoldMI); |
Djordje Todorovic | 6890899 | 2020-02-10 09:49:14 +0100 | [diff] [blame] | 880 | // Update the call site info. |
| 881 | if (MI->isCandidateForCallSiteEntry()) |
Nikola Prica | 98603a8 | 2019-10-08 15:43:12 +0000 | [diff] [blame] | 882 | MI->getMF()->moveCallSiteInfo(MI, FoldMI); |
Jakob Stoklund Olesen | bd953d1 | 2010-07-09 17:29:08 +0000 | [diff] [blame] | 883 | MI->eraseFromParent(); |
Jakob Stoklund Olesen | eef48b6 | 2011-11-10 00:17:03 +0000 | [diff] [blame] | 884 | |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 885 | // Insert any new instructions other than FoldMI into the LIS maps. |
| 886 | assert(!MIS.empty() && "Unexpected empty span of instructions!"); |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 887 | for (MachineInstr &MI : MIS) |
| 888 | if (&MI != FoldMI) |
Duncan P. N. Exon Smith | 3ac9cc6 | 2016-02-27 06:40:41 +0000 | [diff] [blame] | 889 | LIS.InsertMachineInstrInMaps(MI); |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 890 | |
Jakob Stoklund Olesen | eef48b6 | 2011-11-10 00:17:03 +0000 | [diff] [blame] | 891 | // TII.foldMemoryOperand may have left some implicit operands on the |
| 892 | // instruction. Strip them. |
| 893 | if (ImpReg) |
| 894 | for (unsigned i = FoldMI->getNumOperands(); i; --i) { |
| 895 | MachineOperand &MO = FoldMI->getOperand(i - 1); |
| 896 | if (!MO.isReg() || !MO.isImplicit()) |
| 897 | break; |
| 898 | if (MO.getReg() == ImpReg) |
| 899 | FoldMI->RemoveOperand(i - 1); |
| 900 | } |
| 901 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 902 | LLVM_DEBUG(dumpMachineInstrRangeWithSlotIndex(MIS.begin(), MIS.end(), LIS, |
| 903 | "folded")); |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 904 | |
Jakob Stoklund Olesen | c94c967 | 2011-09-15 18:22:52 +0000 | [diff] [blame] | 905 | if (!WasCopy) |
| 906 | ++NumFolded; |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 907 | else if (Ops.front().second == 0) { |
Jakob Stoklund Olesen | c94c967 | 2011-09-15 18:22:52 +0000 | [diff] [blame] | 908 | ++NumSpills; |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 909 | HSpiller.addToMergeableSpills(*FoldMI, StackSlot, Original); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 910 | } else |
Jakob Stoklund Olesen | c94c967 | 2011-09-15 18:22:52 +0000 | [diff] [blame] | 911 | ++NumReloads; |
Jakob Stoklund Olesen | 8656a45 | 2010-07-01 00:13:04 +0000 | [diff] [blame] | 912 | return true; |
| 913 | } |
| 914 | |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 915 | void InlineSpiller::insertReload(unsigned NewVReg, |
Jakob Stoklund Olesen | 9f294a9 | 2011-04-18 20:23:27 +0000 | [diff] [blame] | 916 | SlotIndex Idx, |
Jakob Stoklund Olesen | bde96ad | 2010-06-30 23:03:52 +0000 | [diff] [blame] | 917 | MachineBasicBlock::iterator MI) { |
| 918 | MachineBasicBlock &MBB = *MI->getParent(); |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 919 | |
Michael Liao | 8d6ea2d | 2019-07-05 20:23:59 +0000 | [diff] [blame] | 920 | MachineInstrSpan MIS(MI, &MBB); |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 921 | TII.loadRegFromStackSlot(MBB, MI, NewVReg, StackSlot, |
| 922 | MRI.getRegClass(NewVReg), &TRI); |
| 923 | |
| 924 | LIS.InsertMachineInstrRangeInMaps(MIS.begin(), MI); |
| 925 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 926 | LLVM_DEBUG(dumpMachineInstrRangeWithSlotIndex(MIS.begin(), MI, LIS, "reload", |
| 927 | NewVReg)); |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 928 | ++NumReloads; |
Jakob Stoklund Olesen | bde96ad | 2010-06-30 23:03:52 +0000 | [diff] [blame] | 929 | } |
| 930 | |
Quentin Colombet | c668935 | 2017-06-05 23:51:27 +0000 | [diff] [blame] | 931 | /// Check if \p Def fully defines a VReg with an undefined value. |
| 932 | /// If that's the case, that means the value of VReg is actually |
| 933 | /// not relevant. |
| 934 | static bool isFullUndefDef(const MachineInstr &Def) { |
| 935 | if (!Def.isImplicitDef()) |
| 936 | return false; |
| 937 | assert(Def.getNumOperands() == 1 && |
| 938 | "Implicit def with more than one definition"); |
| 939 | // We can say that the VReg defined by Def is undef, only if it is |
| 940 | // fully defined by Def. Otherwise, some of the lanes may not be |
| 941 | // undef and the value of the VReg matters. |
| 942 | return !Def.getOperand(0).getSubReg(); |
| 943 | } |
| 944 | |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 945 | /// insertSpill - Insert a spill of NewVReg after MI. |
| 946 | void InlineSpiller::insertSpill(unsigned NewVReg, bool isKill, |
| 947 | MachineBasicBlock::iterator MI) { |
Jakob Stoklund Olesen | bde96ad | 2010-06-30 23:03:52 +0000 | [diff] [blame] | 948 | MachineBasicBlock &MBB = *MI->getParent(); |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 949 | |
Michael Liao | 8d6ea2d | 2019-07-05 20:23:59 +0000 | [diff] [blame] | 950 | MachineInstrSpan MIS(MI, &MBB); |
Quentin Colombet | 9e9d638 | 2017-06-07 00:22:07 +0000 | [diff] [blame] | 951 | bool IsRealSpill = true; |
| 952 | if (isFullUndefDef(*MI)) { |
Quentin Colombet | c668935 | 2017-06-05 23:51:27 +0000 | [diff] [blame] | 953 | // Don't spill undef value. |
| 954 | // Anything works for undef, in particular keeping the memory |
| 955 | // uninitialized is a viable option and it saves code size and |
| 956 | // run time. |
| 957 | BuildMI(MBB, std::next(MI), MI->getDebugLoc(), TII.get(TargetOpcode::KILL)) |
| 958 | .addReg(NewVReg, getKillRegState(isKill)); |
Quentin Colombet | 9e9d638 | 2017-06-07 00:22:07 +0000 | [diff] [blame] | 959 | IsRealSpill = false; |
| 960 | } else |
Quentin Colombet | c668935 | 2017-06-05 23:51:27 +0000 | [diff] [blame] | 961 | TII.storeRegToStackSlot(MBB, std::next(MI), NewVReg, isKill, StackSlot, |
| 962 | MRI.getRegClass(NewVReg), &TRI); |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 963 | |
Benjamin Kramer | b6d0bd4 | 2014-03-02 12:27:27 +0000 | [diff] [blame] | 964 | LIS.InsertMachineInstrRangeInMaps(std::next(MI), MIS.end()); |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 965 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 966 | LLVM_DEBUG(dumpMachineInstrRangeWithSlotIndex(std::next(MI), MIS.end(), LIS, |
| 967 | "spill")); |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 968 | ++NumSpills; |
Quentin Colombet | 9e9d638 | 2017-06-07 00:22:07 +0000 | [diff] [blame] | 969 | if (IsRealSpill) |
| 970 | HSpiller.addToMergeableSpills(*std::next(MI), StackSlot, Original); |
Jakob Stoklund Olesen | bde96ad | 2010-06-30 23:03:52 +0000 | [diff] [blame] | 971 | } |
| 972 | |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 973 | /// spillAroundUses - insert spill code around each use of Reg. |
| 974 | void InlineSpiller::spillAroundUses(unsigned Reg) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 975 | LLVM_DEBUG(dbgs() << "spillAroundUses " << printReg(Reg) << '\n'); |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 976 | LiveInterval &OldLI = LIS.getInterval(Reg); |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 977 | |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 978 | // Iterate over instructions using Reg. |
Owen Anderson | abb90c9 | 2014-03-13 06:02:25 +0000 | [diff] [blame] | 979 | for (MachineRegisterInfo::reg_bundle_iterator |
| 980 | RegI = MRI.reg_bundle_begin(Reg), E = MRI.reg_bundle_end(); |
| 981 | RegI != E; ) { |
Owen Anderson | ec5d480 | 2014-03-14 05:02:18 +0000 | [diff] [blame] | 982 | MachineInstr *MI = &*(RegI++); |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 983 | |
Jakob Stoklund Olesen | cf6c5c9 | 2010-07-02 19:54:40 +0000 | [diff] [blame] | 984 | // Debug values are not allowed to affect codegen. |
Shiva Chen | 21eab93 | 2018-05-16 02:57:26 +0000 | [diff] [blame] | 985 | if (MI->isDebugValue()) { |
Jakob Stoklund Olesen | cf6c5c9 | 2010-07-02 19:54:40 +0000 | [diff] [blame] | 986 | // Modify DBG_VALUE now that the value is in a spill slot. |
David Blaikie | 0252265b | 2013-06-16 20:34:15 +0000 | [diff] [blame] | 987 | MachineBasicBlock *MBB = MI->getParent(); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 988 | LLVM_DEBUG(dbgs() << "Modifying debug info due to spill:\t" << *MI); |
Adrian Prantl | 6825fb6 | 2017-04-18 01:21:53 +0000 | [diff] [blame] | 989 | buildDbgValueForSpill(*MBB, MI, *MI, StackSlot); |
| 990 | MBB->erase(MI); |
Jakob Stoklund Olesen | cf6c5c9 | 2010-07-02 19:54:40 +0000 | [diff] [blame] | 991 | continue; |
| 992 | } |
| 993 | |
Shiva Chen | 21eab93 | 2018-05-16 02:57:26 +0000 | [diff] [blame] | 994 | assert(!MI->isDebugInstr() && "Did not expect to find a use in debug " |
| 995 | "instruction that isn't a DBG_VALUE"); |
| 996 | |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 997 | // Ignore copies to/from snippets. We'll delete them. |
| 998 | if (SnippetCopies.count(MI)) |
| 999 | continue; |
| 1000 | |
Jakob Stoklund Olesen | 7fd4905 | 2010-08-04 22:35:11 +0000 | [diff] [blame] | 1001 | // Stack slot accesses may coalesce away. |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 1002 | if (coalesceStackAccess(MI, Reg)) |
Jakob Stoklund Olesen | 7fd4905 | 2010-08-04 22:35:11 +0000 | [diff] [blame] | 1003 | continue; |
| 1004 | |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 1005 | // Analyze instruction. |
Jakob Stoklund Olesen | abe8c09 | 2012-03-01 01:43:25 +0000 | [diff] [blame] | 1006 | SmallVector<std::pair<MachineInstr*, unsigned>, 8> Ops; |
Florian Hahn | 5d06256 | 2019-12-02 19:41:09 +0000 | [diff] [blame] | 1007 | VirtRegInfo RI = AnalyzeVirtRegInBundle(*MI, Reg, &Ops); |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 1008 | |
Jakob Stoklund Olesen | 9f294a9 | 2011-04-18 20:23:27 +0000 | [diff] [blame] | 1009 | // Find the slot index where this instruction reads and writes OldLI. |
| 1010 | // This is usually the def slot, except for tied early clobbers. |
Duncan P. N. Exon Smith | 3ac9cc6 | 2016-02-27 06:40:41 +0000 | [diff] [blame] | 1011 | SlotIndex Idx = LIS.getInstructionIndex(*MI).getRegSlot(); |
Jakob Stoklund Olesen | 90b5e56 | 2011-11-13 20:45:27 +0000 | [diff] [blame] | 1012 | if (VNInfo *VNI = OldLI.getVNInfoAt(Idx.getRegSlot(true))) |
Jakob Stoklund Olesen | 9f294a9 | 2011-04-18 20:23:27 +0000 | [diff] [blame] | 1013 | if (SlotIndex::isSameInstr(Idx, VNI->def)) |
| 1014 | Idx = VNI->def; |
| 1015 | |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 1016 | // Check for a sibling copy. |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 1017 | unsigned SibReg = isFullCopyOf(*MI, Reg); |
Jakob Stoklund Olesen | e55003f | 2011-03-20 05:44:58 +0000 | [diff] [blame] | 1018 | if (SibReg && isSibling(SibReg)) { |
Jakob Stoklund Olesen | 31a0b5e | 2011-05-11 18:25:10 +0000 | [diff] [blame] | 1019 | // This may actually be a copy between snippets. |
| 1020 | if (isRegToSpill(SibReg)) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1021 | LLVM_DEBUG(dbgs() << "Found new snippet copy: " << *MI); |
Jakob Stoklund Olesen | 31a0b5e | 2011-05-11 18:25:10 +0000 | [diff] [blame] | 1022 | SnippetCopies.insert(MI); |
| 1023 | continue; |
| 1024 | } |
Jakob Stoklund Olesen | abe8c09 | 2012-03-01 01:43:25 +0000 | [diff] [blame] | 1025 | if (RI.Writes) { |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1026 | if (hoistSpillInsideBB(OldLI, *MI)) { |
Jakob Stoklund Olesen | e55003f | 2011-03-20 05:44:58 +0000 | [diff] [blame] | 1027 | // This COPY is now dead, the value is already in the stack slot. |
| 1028 | MI->getOperand(0).setIsDead(); |
| 1029 | DeadDefs.push_back(MI); |
| 1030 | continue; |
| 1031 | } |
| 1032 | } else { |
| 1033 | // This is a reload for a sib-reg copy. Drop spills downstream. |
Jakob Stoklund Olesen | e55003f | 2011-03-20 05:44:58 +0000 | [diff] [blame] | 1034 | LiveInterval &SibLI = LIS.getInterval(SibReg); |
| 1035 | eliminateRedundantSpills(SibLI, SibLI.getVNInfoAt(Idx)); |
| 1036 | // The COPY will fold to a reload below. |
| 1037 | } |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 1038 | } |
| 1039 | |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 1040 | // Attempt to fold memory ops. |
Jakob Stoklund Olesen | abe8c09 | 2012-03-01 01:43:25 +0000 | [diff] [blame] | 1041 | if (foldMemoryOperand(Ops)) |
Jakob Stoklund Olesen | 9603718 | 2010-07-02 17:44:57 +0000 | [diff] [blame] | 1042 | continue; |
| 1043 | |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 1044 | // Create a new virtual register for spill/fill. |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 1045 | // FIXME: Infer regclass from instruction alone. |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 1046 | unsigned NewVReg = Edit->createFrom(Reg); |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 1047 | |
Jakob Stoklund Olesen | abe8c09 | 2012-03-01 01:43:25 +0000 | [diff] [blame] | 1048 | if (RI.Reads) |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 1049 | insertReload(NewVReg, Idx, MI); |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 1050 | |
| 1051 | // Rewrite instruction operands. |
| 1052 | bool hasLiveDef = false; |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 1053 | for (const auto &OpPair : Ops) { |
| 1054 | MachineOperand &MO = OpPair.first->getOperand(OpPair.second); |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 1055 | MO.setReg(NewVReg); |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 1056 | if (MO.isUse()) { |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 1057 | if (!OpPair.first->isRegTiedToDefOperand(OpPair.second)) |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 1058 | MO.setIsKill(); |
| 1059 | } else { |
| 1060 | if (!MO.isDead()) |
| 1061 | hasLiveDef = true; |
| 1062 | } |
| 1063 | } |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1064 | LLVM_DEBUG(dbgs() << "\trewrite: " << Idx << '\t' << *MI << '\n'); |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 1065 | |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 1066 | // FIXME: Use a second vreg if instruction has no tied ops. |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 1067 | if (RI.Writes) |
Jakob Stoklund Olesen | abe8c09 | 2012-03-01 01:43:25 +0000 | [diff] [blame] | 1068 | if (hasLiveDef) |
Mark Lacey | 9d8103d | 2013-08-14 23:50:16 +0000 | [diff] [blame] | 1069 | insertSpill(NewVReg, true, MI); |
Jakob Stoklund Olesen | f888911 | 2010-06-29 23:58:39 +0000 | [diff] [blame] | 1070 | } |
| 1071 | } |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 1072 | |
Jakob Stoklund Olesen | e991f72 | 2011-03-29 21:20:19 +0000 | [diff] [blame] | 1073 | /// spillAll - Spill all registers remaining after rematerialization. |
| 1074 | void InlineSpiller::spillAll() { |
| 1075 | // Update LiveStacks now that we are committed to spilling. |
| 1076 | if (StackSlot == VirtRegMap::NO_STACK_SLOT) { |
| 1077 | StackSlot = VRM.assignVirt2StackSlot(Original); |
| 1078 | StackInt = &LSS.getOrCreateInterval(StackSlot, MRI.getRegClass(Original)); |
Jakob Stoklund Olesen | ad6b22e | 2012-02-04 05:20:49 +0000 | [diff] [blame] | 1079 | StackInt->getNextValue(SlotIndex(), LSS.getVNInfoAllocator()); |
Jakob Stoklund Olesen | e991f72 | 2011-03-29 21:20:19 +0000 | [diff] [blame] | 1080 | } else |
| 1081 | StackInt = &LSS.getInterval(StackSlot); |
| 1082 | |
| 1083 | if (Original != Edit->getReg()) |
| 1084 | VRM.assignVirt2StackSlot(Edit->getReg(), StackSlot); |
| 1085 | |
| 1086 | assert(StackInt->getNumValNums() == 1 && "Bad stack interval values"); |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 1087 | for (unsigned Reg : RegsToSpill) |
| 1088 | StackInt->MergeSegmentsInAsValue(LIS.getInterval(Reg), |
Matthias Braun | 13ddb7c | 2013-10-10 21:28:43 +0000 | [diff] [blame] | 1089 | StackInt->getValNumInfo(0)); |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1090 | LLVM_DEBUG(dbgs() << "Merged spilled regs: " << *StackInt << '\n'); |
Jakob Stoklund Olesen | e991f72 | 2011-03-29 21:20:19 +0000 | [diff] [blame] | 1091 | |
| 1092 | // Spill around uses of all RegsToSpill. |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 1093 | for (unsigned Reg : RegsToSpill) |
| 1094 | spillAroundUses(Reg); |
Jakob Stoklund Olesen | e991f72 | 2011-03-29 21:20:19 +0000 | [diff] [blame] | 1095 | |
| 1096 | // Hoisted spills may cause dead code. |
| 1097 | if (!DeadDefs.empty()) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1098 | LLVM_DEBUG(dbgs() << "Eliminating " << DeadDefs.size() << " dead defs\n"); |
Wei Mi | c022370 | 2016-07-08 21:08:09 +0000 | [diff] [blame] | 1099 | Edit->eliminateDeadDefs(DeadDefs, RegsToSpill, AA); |
Jakob Stoklund Olesen | e991f72 | 2011-03-29 21:20:19 +0000 | [diff] [blame] | 1100 | } |
| 1101 | |
| 1102 | // Finally delete the SnippetCopies. |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 1103 | for (unsigned Reg : RegsToSpill) { |
Owen Anderson | abb90c9 | 2014-03-13 06:02:25 +0000 | [diff] [blame] | 1104 | for (MachineRegisterInfo::reg_instr_iterator |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 1105 | RI = MRI.reg_instr_begin(Reg), E = MRI.reg_instr_end(); |
Owen Anderson | abb90c9 | 2014-03-13 06:02:25 +0000 | [diff] [blame] | 1106 | RI != E; ) { |
Duncan P. N. Exon Smith | 3ac9cc6 | 2016-02-27 06:40:41 +0000 | [diff] [blame] | 1107 | MachineInstr &MI = *(RI++); |
| 1108 | assert(SnippetCopies.count(&MI) && "Remaining use wasn't a snippet copy"); |
Jakob Stoklund Olesen | 31a0b5e | 2011-05-11 18:25:10 +0000 | [diff] [blame] | 1109 | // FIXME: Do this with a LiveRangeEdit callback. |
Jakob Stoklund Olesen | 31a0b5e | 2011-05-11 18:25:10 +0000 | [diff] [blame] | 1110 | LIS.RemoveMachineInstrFromMaps(MI); |
Duncan P. N. Exon Smith | 3ac9cc6 | 2016-02-27 06:40:41 +0000 | [diff] [blame] | 1111 | MI.eraseFromParent(); |
Jakob Stoklund Olesen | 31a0b5e | 2011-05-11 18:25:10 +0000 | [diff] [blame] | 1112 | } |
Jakob Stoklund Olesen | e991f72 | 2011-03-29 21:20:19 +0000 | [diff] [blame] | 1113 | } |
| 1114 | |
| 1115 | // Delete all spilled registers. |
Craig Topper | 73275a2 | 2015-12-24 05:20:40 +0000 | [diff] [blame] | 1116 | for (unsigned Reg : RegsToSpill) |
| 1117 | Edit->eraseVirtReg(Reg); |
Jakob Stoklund Olesen | e991f72 | 2011-03-29 21:20:19 +0000 | [diff] [blame] | 1118 | } |
| 1119 | |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 1120 | void InlineSpiller::spill(LiveRangeEdit &edit) { |
Jakob Stoklund Olesen | c5a8c08 | 2011-05-05 17:22:53 +0000 | [diff] [blame] | 1121 | ++NumSpilledRanges; |
Jakob Stoklund Olesen | a00bab2 | 2011-03-14 19:56:43 +0000 | [diff] [blame] | 1122 | Edit = &edit; |
Daniel Sanders | 2bea69b | 2019-08-01 23:27:28 +0000 | [diff] [blame] | 1123 | assert(!Register::isStackSlot(edit.getReg()) && |
| 1124 | "Trying to spill a stack slot."); |
Jakob Stoklund Olesen | a0d5ec1 | 2011-03-15 21:13:25 +0000 | [diff] [blame] | 1125 | // Share a stack slot among all descendants of Original. |
| 1126 | Original = VRM.getOriginal(edit.getReg()); |
| 1127 | StackSlot = VRM.getStackSlot(Original); |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 1128 | StackInt = nullptr; |
Jakob Stoklund Olesen | a0d5ec1 | 2011-03-15 21:13:25 +0000 | [diff] [blame] | 1129 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1130 | LLVM_DEBUG(dbgs() << "Inline spilling " |
| 1131 | << TRI.getRegClassName(MRI.getRegClass(edit.getReg())) |
| 1132 | << ':' << edit.getParent() << "\nFrom original " |
| 1133 | << printReg(Original) << '\n'); |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 1134 | assert(edit.getParent().isSpillable() && |
| 1135 | "Attempting to spill already spilled value."); |
Jakob Stoklund Olesen | 27320cb | 2011-03-18 04:23:06 +0000 | [diff] [blame] | 1136 | assert(DeadDefs.empty() && "Previous spill didn't remove dead defs"); |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 1137 | |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 1138 | collectRegsToSpill(); |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 1139 | reMaterializeAll(); |
| 1140 | |
| 1141 | // Remat may handle everything. |
Jakob Stoklund Olesen | e991f72 | 2011-03-29 21:20:19 +0000 | [diff] [blame] | 1142 | if (!RegsToSpill.empty()) |
| 1143 | spillAll(); |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 1144 | |
Benjamin Kramer | e2a1d89 | 2013-06-17 19:00:36 +0000 | [diff] [blame] | 1145 | Edit->calculateRegClassAndHint(MF, Loops, MBFI); |
Jakob Stoklund Olesen | a86595e | 2011-03-12 04:17:20 +0000 | [diff] [blame] | 1146 | } |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1147 | |
| 1148 | /// Optimizations after all the reg selections and spills are done. |
Wei Mi | 963f2df | 2016-04-15 23:16:44 +0000 | [diff] [blame] | 1149 | void InlineSpiller::postOptimization() { HSpiller.hoistAllSpills(); } |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1150 | |
| 1151 | /// When a spill is inserted, add the spill to MergeableSpills map. |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 1152 | void HoistSpillHelper::addToMergeableSpills(MachineInstr &Spill, int StackSlot, |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1153 | unsigned Original) { |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1154 | BumpPtrAllocator &Allocator = LIS.getVNInfoAllocator(); |
| 1155 | LiveInterval &OrigLI = LIS.getInterval(Original); |
| 1156 | // save a copy of LiveInterval in StackSlotToOrigLI because the original |
| 1157 | // LiveInterval may be cleared after all its references are spilled. |
| 1158 | if (StackSlotToOrigLI.find(StackSlot) == StackSlotToOrigLI.end()) { |
Jonas Devlieghere | 0eaee54 | 2019-08-15 15:54:37 +0000 | [diff] [blame] | 1159 | auto LI = std::make_unique<LiveInterval>(OrigLI.reg, OrigLI.weight); |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1160 | LI->assign(OrigLI, Allocator); |
| 1161 | StackSlotToOrigLI[StackSlot] = std::move(LI); |
| 1162 | } |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 1163 | SlotIndex Idx = LIS.getInstructionIndex(Spill); |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1164 | VNInfo *OrigVNI = StackSlotToOrigLI[StackSlot]->getVNInfoAt(Idx.getRegSlot()); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1165 | std::pair<int, VNInfo *> MIdx = std::make_pair(StackSlot, OrigVNI); |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 1166 | MergeableSpills[MIdx].insert(&Spill); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1167 | } |
| 1168 | |
| 1169 | /// When a spill is removed, remove the spill from MergeableSpills map. |
| 1170 | /// Return true if the spill is removed successfully. |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 1171 | bool HoistSpillHelper::rmFromMergeableSpills(MachineInstr &Spill, |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1172 | int StackSlot) { |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1173 | auto It = StackSlotToOrigLI.find(StackSlot); |
| 1174 | if (It == StackSlotToOrigLI.end()) |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1175 | return false; |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 1176 | SlotIndex Idx = LIS.getInstructionIndex(Spill); |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1177 | VNInfo *OrigVNI = It->second->getVNInfoAt(Idx.getRegSlot()); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1178 | std::pair<int, VNInfo *> MIdx = std::make_pair(StackSlot, OrigVNI); |
Duncan P. N. Exon Smith | 9129873 | 2016-06-30 23:28:15 +0000 | [diff] [blame] | 1179 | return MergeableSpills[MIdx].erase(&Spill); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1180 | } |
| 1181 | |
| 1182 | /// Check BB to see if it is a possible target BB to place a hoisted spill, |
| 1183 | /// i.e., there should be a living sibling of OrigReg at the insert point. |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1184 | bool HoistSpillHelper::isSpillCandBB(LiveInterval &OrigLI, VNInfo &OrigVNI, |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1185 | MachineBasicBlock &BB, unsigned &LiveReg) { |
| 1186 | SlotIndex Idx; |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1187 | unsigned OrigReg = OrigLI.reg; |
Wei Mi | f3c8f53 | 2016-05-23 19:39:19 +0000 | [diff] [blame] | 1188 | MachineBasicBlock::iterator MI = IPA.getLastInsertPointIter(OrigLI, BB); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1189 | if (MI != BB.end()) |
| 1190 | Idx = LIS.getInstructionIndex(*MI); |
| 1191 | else |
| 1192 | Idx = LIS.getMBBEndIdx(&BB).getPrevSlot(); |
| 1193 | SmallSetVector<unsigned, 16> &Siblings = Virt2SiblingsMap[OrigReg]; |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1194 | assert(OrigLI.getVNInfoAt(Idx) == &OrigVNI && "Unexpected VNI"); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1195 | |
| 1196 | for (auto const SibReg : Siblings) { |
| 1197 | LiveInterval &LI = LIS.getInterval(SibReg); |
| 1198 | VNInfo *VNI = LI.getVNInfoAt(Idx); |
| 1199 | if (VNI) { |
| 1200 | LiveReg = SibReg; |
| 1201 | return true; |
| 1202 | } |
| 1203 | } |
| 1204 | return false; |
| 1205 | } |
| 1206 | |
Eric Christopher | 75d661a | 2016-05-04 21:45:36 +0000 | [diff] [blame] | 1207 | /// Remove redundant spills in the same BB. Save those redundant spills in |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1208 | /// SpillsToRm, and save the spill to keep and its BB in SpillBBToSpill map. |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1209 | void HoistSpillHelper::rmRedundantSpills( |
| 1210 | SmallPtrSet<MachineInstr *, 16> &Spills, |
| 1211 | SmallVectorImpl<MachineInstr *> &SpillsToRm, |
| 1212 | DenseMap<MachineDomTreeNode *, MachineInstr *> &SpillBBToSpill) { |
| 1213 | // For each spill saw, check SpillBBToSpill[] and see if its BB already has |
| 1214 | // another spill inside. If a BB contains more than one spill, only keep the |
| 1215 | // earlier spill with smaller SlotIndex. |
| 1216 | for (const auto CurrentSpill : Spills) { |
| 1217 | MachineBasicBlock *Block = CurrentSpill->getParent(); |
Bjorn Pettersson | 3c6ce73 | 2017-01-04 09:41:56 +0000 | [diff] [blame] | 1218 | MachineDomTreeNode *Node = MDT.getBase().getNode(Block); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1219 | MachineInstr *PrevSpill = SpillBBToSpill[Node]; |
| 1220 | if (PrevSpill) { |
| 1221 | SlotIndex PIdx = LIS.getInstructionIndex(*PrevSpill); |
| 1222 | SlotIndex CIdx = LIS.getInstructionIndex(*CurrentSpill); |
| 1223 | MachineInstr *SpillToRm = (CIdx > PIdx) ? CurrentSpill : PrevSpill; |
| 1224 | MachineInstr *SpillToKeep = (CIdx > PIdx) ? PrevSpill : CurrentSpill; |
| 1225 | SpillsToRm.push_back(SpillToRm); |
Bjorn Pettersson | 3c6ce73 | 2017-01-04 09:41:56 +0000 | [diff] [blame] | 1226 | SpillBBToSpill[MDT.getBase().getNode(Block)] = SpillToKeep; |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1227 | } else { |
Bjorn Pettersson | 3c6ce73 | 2017-01-04 09:41:56 +0000 | [diff] [blame] | 1228 | SpillBBToSpill[MDT.getBase().getNode(Block)] = CurrentSpill; |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1229 | } |
| 1230 | } |
| 1231 | for (const auto SpillToRm : SpillsToRm) |
| 1232 | Spills.erase(SpillToRm); |
| 1233 | } |
| 1234 | |
| 1235 | /// Starting from \p Root find a top-down traversal order of the dominator |
| 1236 | /// tree to visit all basic blocks containing the elements of \p Spills. |
| 1237 | /// Redundant spills will be found and put into \p SpillsToRm at the same |
| 1238 | /// time. \p SpillBBToSpill will be populated as part of the process and |
| 1239 | /// maps a basic block to the first store occurring in the basic block. |
| 1240 | /// \post SpillsToRm.union(Spills\@post) == Spills\@pre |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1241 | void HoistSpillHelper::getVisitOrders( |
| 1242 | MachineBasicBlock *Root, SmallPtrSet<MachineInstr *, 16> &Spills, |
| 1243 | SmallVectorImpl<MachineDomTreeNode *> &Orders, |
| 1244 | SmallVectorImpl<MachineInstr *> &SpillsToRm, |
| 1245 | DenseMap<MachineDomTreeNode *, unsigned> &SpillsToKeep, |
| 1246 | DenseMap<MachineDomTreeNode *, MachineInstr *> &SpillBBToSpill) { |
| 1247 | // The set contains all the possible BB nodes to which we may hoist |
| 1248 | // original spills. |
| 1249 | SmallPtrSet<MachineDomTreeNode *, 8> WorkSet; |
| 1250 | // Save the BB nodes on the path from the first BB node containing |
Eric Christopher | 75d661a | 2016-05-04 21:45:36 +0000 | [diff] [blame] | 1251 | // non-redundant spill to the Root node. |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1252 | SmallPtrSet<MachineDomTreeNode *, 8> NodesOnPath; |
| 1253 | // All the spills to be hoisted must originate from a single def instruction |
| 1254 | // to the OrigReg. It means the def instruction should dominate all the spills |
| 1255 | // to be hoisted. We choose the BB where the def instruction is located as |
| 1256 | // the Root. |
| 1257 | MachineDomTreeNode *RootIDomNode = MDT[Root]->getIDom(); |
| 1258 | // For every node on the dominator tree with spill, walk up on the dominator |
| 1259 | // tree towards the Root node until it is reached. If there is other node |
| 1260 | // containing spill in the middle of the path, the previous spill saw will |
Eric Christopher | 75d661a | 2016-05-04 21:45:36 +0000 | [diff] [blame] | 1261 | // be redundant and the node containing it will be removed. All the nodes on |
| 1262 | // the path starting from the first node with non-redundant spill to the Root |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1263 | // node will be added to the WorkSet, which will contain all the possible |
| 1264 | // locations where spills may be hoisted to after the loop below is done. |
| 1265 | for (const auto Spill : Spills) { |
| 1266 | MachineBasicBlock *Block = Spill->getParent(); |
| 1267 | MachineDomTreeNode *Node = MDT[Block]; |
| 1268 | MachineInstr *SpillToRm = nullptr; |
| 1269 | while (Node != RootIDomNode) { |
| 1270 | // If Node dominates Block, and it already contains a spill, the spill in |
Eric Christopher | 75d661a | 2016-05-04 21:45:36 +0000 | [diff] [blame] | 1271 | // Block will be redundant. |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1272 | if (Node != MDT[Block] && SpillBBToSpill[Node]) { |
| 1273 | SpillToRm = SpillBBToSpill[MDT[Block]]; |
| 1274 | break; |
| 1275 | /// If we see the Node already in WorkSet, the path from the Node to |
| 1276 | /// the Root node must already be traversed by another spill. |
| 1277 | /// Then no need to repeat. |
| 1278 | } else if (WorkSet.count(Node)) { |
| 1279 | break; |
| 1280 | } else { |
| 1281 | NodesOnPath.insert(Node); |
| 1282 | } |
| 1283 | Node = Node->getIDom(); |
| 1284 | } |
| 1285 | if (SpillToRm) { |
| 1286 | SpillsToRm.push_back(SpillToRm); |
| 1287 | } else { |
| 1288 | // Add a BB containing the original spills to SpillsToKeep -- i.e., |
| 1289 | // set the initial status before hoisting start. The value of BBs |
| 1290 | // containing original spills is set to 0, in order to descriminate |
| 1291 | // with BBs containing hoisted spills which will be inserted to |
| 1292 | // SpillsToKeep later during hoisting. |
| 1293 | SpillsToKeep[MDT[Block]] = 0; |
| 1294 | WorkSet.insert(NodesOnPath.begin(), NodesOnPath.end()); |
| 1295 | } |
| 1296 | NodesOnPath.clear(); |
| 1297 | } |
| 1298 | |
| 1299 | // Sort the nodes in WorkSet in top-down order and save the nodes |
| 1300 | // in Orders. Orders will be used for hoisting in runHoistSpills. |
| 1301 | unsigned idx = 0; |
Bjorn Pettersson | 3c6ce73 | 2017-01-04 09:41:56 +0000 | [diff] [blame] | 1302 | Orders.push_back(MDT.getBase().getNode(Root)); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1303 | do { |
| 1304 | MachineDomTreeNode *Node = Orders[idx++]; |
| 1305 | const std::vector<MachineDomTreeNode *> &Children = Node->getChildren(); |
| 1306 | unsigned NumChildren = Children.size(); |
| 1307 | for (unsigned i = 0; i != NumChildren; ++i) { |
| 1308 | MachineDomTreeNode *Child = Children[i]; |
| 1309 | if (WorkSet.count(Child)) |
| 1310 | Orders.push_back(Child); |
| 1311 | } |
| 1312 | } while (idx != Orders.size()); |
| 1313 | assert(Orders.size() == WorkSet.size() && |
| 1314 | "Orders have different size with WorkSet"); |
| 1315 | |
| 1316 | #ifndef NDEBUG |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1317 | LLVM_DEBUG(dbgs() << "Orders size is " << Orders.size() << "\n"); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1318 | SmallVector<MachineDomTreeNode *, 32>::reverse_iterator RIt = Orders.rbegin(); |
| 1319 | for (; RIt != Orders.rend(); RIt++) |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1320 | LLVM_DEBUG(dbgs() << "BB" << (*RIt)->getBlock()->getNumber() << ","); |
| 1321 | LLVM_DEBUG(dbgs() << "\n"); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1322 | #endif |
| 1323 | } |
| 1324 | |
| 1325 | /// Try to hoist spills according to BB hotness. The spills to removed will |
| 1326 | /// be saved in \p SpillsToRm. The spills to be inserted will be saved in |
| 1327 | /// \p SpillsToIns. |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1328 | void HoistSpillHelper::runHoistSpills( |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1329 | LiveInterval &OrigLI, VNInfo &OrigVNI, |
| 1330 | SmallPtrSet<MachineInstr *, 16> &Spills, |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1331 | SmallVectorImpl<MachineInstr *> &SpillsToRm, |
| 1332 | DenseMap<MachineBasicBlock *, unsigned> &SpillsToIns) { |
| 1333 | // Visit order of dominator tree nodes. |
| 1334 | SmallVector<MachineDomTreeNode *, 32> Orders; |
| 1335 | // SpillsToKeep contains all the nodes where spills are to be inserted |
| 1336 | // during hoisting. If the spill to be inserted is an original spill |
| 1337 | // (not a hoisted one), the value of the map entry is 0. If the spill |
| 1338 | // is a hoisted spill, the value of the map entry is the VReg to be used |
| 1339 | // as the source of the spill. |
| 1340 | DenseMap<MachineDomTreeNode *, unsigned> SpillsToKeep; |
| 1341 | // Map from BB to the first spill inside of it. |
| 1342 | DenseMap<MachineDomTreeNode *, MachineInstr *> SpillBBToSpill; |
| 1343 | |
| 1344 | rmRedundantSpills(Spills, SpillsToRm, SpillBBToSpill); |
| 1345 | |
| 1346 | MachineBasicBlock *Root = LIS.getMBBFromIndex(OrigVNI.def); |
| 1347 | getVisitOrders(Root, Spills, Orders, SpillsToRm, SpillsToKeep, |
| 1348 | SpillBBToSpill); |
| 1349 | |
| 1350 | // SpillsInSubTreeMap keeps the map from a dom tree node to a pair of |
| 1351 | // nodes set and the cost of all the spills inside those nodes. |
| 1352 | // The nodes set are the locations where spills are to be inserted |
| 1353 | // in the subtree of current node. |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 1354 | using NodesCostPair = |
| 1355 | std::pair<SmallPtrSet<MachineDomTreeNode *, 16>, BlockFrequency>; |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1356 | DenseMap<MachineDomTreeNode *, NodesCostPair> SpillsInSubTreeMap; |
Eugene Zelenko | 900b633 | 2017-08-29 22:32:07 +0000 | [diff] [blame] | 1357 | |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1358 | // Iterate Orders set in reverse order, which will be a bottom-up order |
| 1359 | // in the dominator tree. Once we visit a dom tree node, we know its |
| 1360 | // children have already been visited and the spill locations in the |
| 1361 | // subtrees of all the children have been determined. |
| 1362 | SmallVector<MachineDomTreeNode *, 32>::reverse_iterator RIt = Orders.rbegin(); |
| 1363 | for (; RIt != Orders.rend(); RIt++) { |
| 1364 | MachineBasicBlock *Block = (*RIt)->getBlock(); |
| 1365 | |
| 1366 | // If Block contains an original spill, simply continue. |
| 1367 | if (SpillsToKeep.find(*RIt) != SpillsToKeep.end() && !SpillsToKeep[*RIt]) { |
| 1368 | SpillsInSubTreeMap[*RIt].first.insert(*RIt); |
| 1369 | // SpillsInSubTreeMap[*RIt].second contains the cost of spill. |
| 1370 | SpillsInSubTreeMap[*RIt].second = MBFI.getBlockFreq(Block); |
| 1371 | continue; |
| 1372 | } |
| 1373 | |
| 1374 | // Collect spills in subtree of current node (*RIt) to |
| 1375 | // SpillsInSubTreeMap[*RIt].first. |
| 1376 | const std::vector<MachineDomTreeNode *> &Children = (*RIt)->getChildren(); |
| 1377 | unsigned NumChildren = Children.size(); |
| 1378 | for (unsigned i = 0; i != NumChildren; ++i) { |
| 1379 | MachineDomTreeNode *Child = Children[i]; |
| 1380 | if (SpillsInSubTreeMap.find(Child) == SpillsInSubTreeMap.end()) |
| 1381 | continue; |
| 1382 | // The stmt "SpillsInSubTree = SpillsInSubTreeMap[*RIt].first" below |
| 1383 | // should be placed before getting the begin and end iterators of |
| 1384 | // SpillsInSubTreeMap[Child].first, or else the iterators may be |
| 1385 | // invalidated when SpillsInSubTreeMap[*RIt] is seen the first time |
| 1386 | // and the map grows and then the original buckets in the map are moved. |
| 1387 | SmallPtrSet<MachineDomTreeNode *, 16> &SpillsInSubTree = |
| 1388 | SpillsInSubTreeMap[*RIt].first; |
| 1389 | BlockFrequency &SubTreeCost = SpillsInSubTreeMap[*RIt].second; |
| 1390 | SubTreeCost += SpillsInSubTreeMap[Child].second; |
| 1391 | auto BI = SpillsInSubTreeMap[Child].first.begin(); |
| 1392 | auto EI = SpillsInSubTreeMap[Child].first.end(); |
| 1393 | SpillsInSubTree.insert(BI, EI); |
| 1394 | SpillsInSubTreeMap.erase(Child); |
| 1395 | } |
| 1396 | |
| 1397 | SmallPtrSet<MachineDomTreeNode *, 16> &SpillsInSubTree = |
| 1398 | SpillsInSubTreeMap[*RIt].first; |
| 1399 | BlockFrequency &SubTreeCost = SpillsInSubTreeMap[*RIt].second; |
| 1400 | // No spills in subtree, simply continue. |
| 1401 | if (SpillsInSubTree.empty()) |
| 1402 | continue; |
| 1403 | |
| 1404 | // Check whether Block is a possible candidate to insert spill. |
| 1405 | unsigned LiveReg = 0; |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1406 | if (!isSpillCandBB(OrigLI, OrigVNI, *Block, LiveReg)) |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1407 | continue; |
| 1408 | |
| 1409 | // If there are multiple spills that could be merged, bias a little |
| 1410 | // to hoist the spill. |
| 1411 | BranchProbability MarginProb = (SpillsInSubTree.size() > 1) |
| 1412 | ? BranchProbability(9, 10) |
| 1413 | : BranchProbability(1, 1); |
| 1414 | if (SubTreeCost > MBFI.getBlockFreq(Block) * MarginProb) { |
| 1415 | // Hoist: Move spills to current Block. |
| 1416 | for (const auto SpillBB : SpillsInSubTree) { |
| 1417 | // When SpillBB is a BB contains original spill, insert the spill |
| 1418 | // to SpillsToRm. |
| 1419 | if (SpillsToKeep.find(SpillBB) != SpillsToKeep.end() && |
| 1420 | !SpillsToKeep[SpillBB]) { |
| 1421 | MachineInstr *SpillToRm = SpillBBToSpill[SpillBB]; |
| 1422 | SpillsToRm.push_back(SpillToRm); |
| 1423 | } |
| 1424 | // SpillBB will not contain spill anymore, remove it from SpillsToKeep. |
| 1425 | SpillsToKeep.erase(SpillBB); |
| 1426 | } |
| 1427 | // Current Block is the BB containing the new hoisted spill. Add it to |
| 1428 | // SpillsToKeep. LiveReg is the source of the new spill. |
| 1429 | SpillsToKeep[*RIt] = LiveReg; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1430 | LLVM_DEBUG({ |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1431 | dbgs() << "spills in BB: "; |
| 1432 | for (const auto Rspill : SpillsInSubTree) |
| 1433 | dbgs() << Rspill->getBlock()->getNumber() << " "; |
| 1434 | dbgs() << "were promoted to BB" << (*RIt)->getBlock()->getNumber() |
| 1435 | << "\n"; |
| 1436 | }); |
| 1437 | SpillsInSubTree.clear(); |
| 1438 | SpillsInSubTree.insert(*RIt); |
| 1439 | SubTreeCost = MBFI.getBlockFreq(Block); |
| 1440 | } |
| 1441 | } |
| 1442 | // For spills in SpillsToKeep with LiveReg set (i.e., not original spill), |
| 1443 | // save them to SpillsToIns. |
Mark de Wever | 8dc7b98 | 2020-01-01 17:23:21 +0100 | [diff] [blame] | 1444 | for (const auto &Ent : SpillsToKeep) { |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1445 | if (Ent.second) |
| 1446 | SpillsToIns[Ent.first->getBlock()] = Ent.second; |
| 1447 | } |
| 1448 | } |
| 1449 | |
Eric Christopher | 75d661a | 2016-05-04 21:45:36 +0000 | [diff] [blame] | 1450 | /// For spills with equal values, remove redundant spills and hoist those left |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1451 | /// to less hot spots. |
| 1452 | /// |
| 1453 | /// Spills with equal values will be collected into the same set in |
| 1454 | /// MergeableSpills when spill is inserted. These equal spills are originated |
Eric Christopher | 75d661a | 2016-05-04 21:45:36 +0000 | [diff] [blame] | 1455 | /// from the same defining instruction and are dominated by the instruction. |
| 1456 | /// Before hoisting all the equal spills, redundant spills inside in the same |
| 1457 | /// BB are first marked to be deleted. Then starting from the spills left, walk |
| 1458 | /// up on the dominator tree towards the Root node where the define instruction |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1459 | /// is located, mark the dominated spills to be deleted along the way and |
| 1460 | /// collect the BB nodes on the path from non-dominated spills to the define |
| 1461 | /// instruction into a WorkSet. The nodes in WorkSet are the candidate places |
Eric Christopher | 75d661a | 2016-05-04 21:45:36 +0000 | [diff] [blame] | 1462 | /// where we are considering to hoist the spills. We iterate the WorkSet in |
| 1463 | /// bottom-up order, and for each node, we will decide whether to hoist spills |
| 1464 | /// inside its subtree to that node. In this way, we can get benefit locally |
| 1465 | /// even if hoisting all the equal spills to one cold place is impossible. |
Wei Mi | 963f2df | 2016-04-15 23:16:44 +0000 | [diff] [blame] | 1466 | void HoistSpillHelper::hoistAllSpills() { |
| 1467 | SmallVector<unsigned, 4> NewVRegs; |
| 1468 | LiveRangeEdit Edit(nullptr, NewVRegs, MF, LIS, &VRM, this); |
| 1469 | |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1470 | for (unsigned i = 0, e = MRI.getNumVirtRegs(); i != e; ++i) { |
Daniel Sanders | 2bea69b | 2019-08-01 23:27:28 +0000 | [diff] [blame] | 1471 | unsigned Reg = Register::index2VirtReg(i); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1472 | unsigned Original = VRM.getPreSplitReg(Reg); |
| 1473 | if (!MRI.def_empty(Reg)) |
| 1474 | Virt2SiblingsMap[Original].insert(Reg); |
| 1475 | } |
| 1476 | |
| 1477 | // Each entry in MergeableSpills contains a spill set with equal values. |
| 1478 | for (auto &Ent : MergeableSpills) { |
| 1479 | int Slot = Ent.first.first; |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1480 | LiveInterval &OrigLI = *StackSlotToOrigLI[Slot]; |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1481 | VNInfo *OrigVNI = Ent.first.second; |
| 1482 | SmallPtrSet<MachineInstr *, 16> &EqValSpills = Ent.second; |
| 1483 | if (Ent.second.empty()) |
| 1484 | continue; |
| 1485 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1486 | LLVM_DEBUG({ |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1487 | dbgs() << "\nFor Slot" << Slot << " and VN" << OrigVNI->id << ":\n" |
| 1488 | << "Equal spills in BB: "; |
| 1489 | for (const auto spill : EqValSpills) |
| 1490 | dbgs() << spill->getParent()->getNumber() << " "; |
| 1491 | dbgs() << "\n"; |
| 1492 | }); |
| 1493 | |
| 1494 | // SpillsToRm is the spill set to be removed from EqValSpills. |
| 1495 | SmallVector<MachineInstr *, 16> SpillsToRm; |
| 1496 | // SpillsToIns is the spill set to be newly inserted after hoisting. |
| 1497 | DenseMap<MachineBasicBlock *, unsigned> SpillsToIns; |
| 1498 | |
Wei Mi | c0d0664 | 2017-09-13 21:41:30 +0000 | [diff] [blame] | 1499 | runHoistSpills(OrigLI, *OrigVNI, EqValSpills, SpillsToRm, SpillsToIns); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1500 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1501 | LLVM_DEBUG({ |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1502 | dbgs() << "Finally inserted spills in BB: "; |
Mark de Wever | 8dc7b98 | 2020-01-01 17:23:21 +0100 | [diff] [blame] | 1503 | for (const auto &Ispill : SpillsToIns) |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1504 | dbgs() << Ispill.first->getNumber() << " "; |
| 1505 | dbgs() << "\nFinally removed spills in BB: "; |
| 1506 | for (const auto Rspill : SpillsToRm) |
| 1507 | dbgs() << Rspill->getParent()->getNumber() << " "; |
| 1508 | dbgs() << "\n"; |
| 1509 | }); |
| 1510 | |
| 1511 | // Stack live range update. |
| 1512 | LiveInterval &StackIntvl = LSS.getInterval(Slot); |
Wei Mi | 8c4136b | 2016-05-11 22:37:43 +0000 | [diff] [blame] | 1513 | if (!SpillsToIns.empty() || !SpillsToRm.empty()) |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1514 | StackIntvl.MergeValueInAsValue(OrigLI, OrigVNI, |
| 1515 | StackIntvl.getValNumInfo(0)); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1516 | |
| 1517 | // Insert hoisted spills. |
Mark de Wever | 8dc7b98 | 2020-01-01 17:23:21 +0100 | [diff] [blame] | 1518 | for (auto const &Insert : SpillsToIns) { |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1519 | MachineBasicBlock *BB = Insert.first; |
| 1520 | unsigned LiveReg = Insert.second; |
Wei Mi | f3c8f53 | 2016-05-23 19:39:19 +0000 | [diff] [blame] | 1521 | MachineBasicBlock::iterator MI = IPA.getLastInsertPointIter(OrigLI, *BB); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1522 | TII.storeRegToStackSlot(*BB, MI, LiveReg, false, Slot, |
| 1523 | MRI.getRegClass(LiveReg), &TRI); |
| 1524 | LIS.InsertMachineInstrRangeInMaps(std::prev(MI), MI); |
| 1525 | ++NumSpills; |
| 1526 | } |
| 1527 | |
Eric Christopher | 75d661a | 2016-05-04 21:45:36 +0000 | [diff] [blame] | 1528 | // Remove redundant spills or change them to dead instructions. |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1529 | NumSpills -= SpillsToRm.size(); |
| 1530 | for (auto const RMEnt : SpillsToRm) { |
| 1531 | RMEnt->setDesc(TII.get(TargetOpcode::KILL)); |
| 1532 | for (unsigned i = RMEnt->getNumOperands(); i; --i) { |
| 1533 | MachineOperand &MO = RMEnt->getOperand(i - 1); |
| 1534 | if (MO.isReg() && MO.isImplicit() && MO.isDef() && !MO.isDead()) |
| 1535 | RMEnt->RemoveOperand(i - 1); |
| 1536 | } |
| 1537 | } |
Wei Mi | c022370 | 2016-07-08 21:08:09 +0000 | [diff] [blame] | 1538 | Edit.eliminateDeadDefs(SpillsToRm, None, AA); |
Wei Mi | 9a16d65 | 2016-04-13 03:08:27 +0000 | [diff] [blame] | 1539 | } |
| 1540 | } |
Wei Mi | 963f2df | 2016-04-15 23:16:44 +0000 | [diff] [blame] | 1541 | |
| 1542 | /// For VirtReg clone, the \p New register should have the same physreg or |
| 1543 | /// stackslot as the \p old register. |
| 1544 | void HoistSpillHelper::LRE_DidCloneVirtReg(unsigned New, unsigned Old) { |
| 1545 | if (VRM.hasPhys(Old)) |
| 1546 | VRM.assignVirt2Phys(New, VRM.getPhys(Old)); |
| 1547 | else if (VRM.getStackSlot(Old) != VirtRegMap::NO_STACK_SLOT) |
| 1548 | VRM.assignVirt2StackSlot(New, VRM.getStackSlot(Old)); |
| 1549 | else |
| 1550 | llvm_unreachable("VReg should be assigned either physreg or stackslot"); |
| 1551 | } |