blob: 6018c59da77b26a3cf9b39a31e7c663e9b56d239 [file] [log] [blame]
Aditya Nandakumar81c81b62018-01-25 00:41:58 +00001//== ---lib/CodeGen/GlobalISel/GICombinerHelper.cpp --------------------- == //
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9#include "llvm/CodeGen/GlobalISel/CombinerHelper.h"
Aditya Nandakumarf75d4f32018-12-05 20:14:52 +000010#include "llvm/CodeGen/GlobalISel/Combiner.h"
11#include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
Aditya Nandakumar81c81b62018-01-25 00:41:58 +000012#include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
13#include "llvm/CodeGen/GlobalISel/Utils.h"
14#include "llvm/CodeGen/MachineInstr.h"
15#include "llvm/CodeGen/MachineRegisterInfo.h"
Daniel Sandersc973ad12018-10-03 02:12:17 +000016#include "llvm/CodeGen/TargetInstrInfo.h"
Aditya Nandakumar81c81b62018-01-25 00:41:58 +000017
18#define DEBUG_TYPE "gi-combine"
19
20using namespace llvm;
21
Aditya Nandakumarf75d4f32018-12-05 20:14:52 +000022CombinerHelper::CombinerHelper(GISelChangeObserver &Observer,
Daniel Sandersc973ad12018-10-03 02:12:17 +000023 MachineIRBuilder &B)
24 : Builder(B), MRI(Builder.getMF().getRegInfo()), Observer(Observer) {}
25
Daniel Sandersc973ad12018-10-03 02:12:17 +000026void CombinerHelper::scheduleForVisit(MachineInstr &MI) {
27 Observer.createdInstr(MI);
28}
Aditya Nandakumar81c81b62018-01-25 00:41:58 +000029
30bool CombinerHelper::tryCombineCopy(MachineInstr &MI) {
31 if (MI.getOpcode() != TargetOpcode::COPY)
32 return false;
33 unsigned DstReg = MI.getOperand(0).getReg();
34 unsigned SrcReg = MI.getOperand(1).getReg();
35 LLT DstTy = MRI.getType(DstReg);
36 LLT SrcTy = MRI.getType(SrcReg);
37 // Simple Copy Propagation.
38 // a(sx) = COPY b(sx) -> Replace all uses of a with b.
39 if (DstTy.isValid() && SrcTy.isValid() && DstTy == SrcTy) {
40 MI.eraseFromParent();
41 MRI.replaceRegWith(DstReg, SrcReg);
42 return true;
43 }
44 return false;
45}
46
Daniel Sandersc973ad12018-10-03 02:12:17 +000047namespace {
48struct PreferredTuple {
49 LLT Ty; // The result type of the extend.
50 unsigned ExtendOpcode; // G_ANYEXT/G_SEXT/G_ZEXT
51 MachineInstr *MI;
52};
53
54/// Select a preference between two uses. CurrentUse is the current preference
55/// while *ForCandidate is attributes of the candidate under consideration.
56PreferredTuple ChoosePreferredUse(PreferredTuple &CurrentUse,
57 const LLT &TyForCandidate,
58 unsigned OpcodeForCandidate,
59 MachineInstr *MIForCandidate) {
60 if (!CurrentUse.Ty.isValid()) {
Daniel Sandersab358bf2018-10-04 21:44:32 +000061 if (CurrentUse.ExtendOpcode == OpcodeForCandidate ||
62 CurrentUse.ExtendOpcode == TargetOpcode::G_ANYEXT)
Daniel Sandersc973ad12018-10-03 02:12:17 +000063 return {TyForCandidate, OpcodeForCandidate, MIForCandidate};
64 return CurrentUse;
65 }
66
67 // We permit the extend to hoist through basic blocks but this is only
68 // sensible if the target has extending loads. If you end up lowering back
69 // into a load and extend during the legalizer then the end result is
70 // hoisting the extend up to the load.
71
72 // Prefer defined extensions to undefined extensions as these are more
73 // likely to reduce the number of instructions.
74 if (OpcodeForCandidate == TargetOpcode::G_ANYEXT &&
75 CurrentUse.ExtendOpcode != TargetOpcode::G_ANYEXT)
76 return CurrentUse;
77 else if (CurrentUse.ExtendOpcode == TargetOpcode::G_ANYEXT &&
78 OpcodeForCandidate != TargetOpcode::G_ANYEXT)
79 return {TyForCandidate, OpcodeForCandidate, MIForCandidate};
80
81 // Prefer sign extensions to zero extensions as sign-extensions tend to be
82 // more expensive.
83 if (CurrentUse.Ty == TyForCandidate) {
84 if (CurrentUse.ExtendOpcode == TargetOpcode::G_SEXT &&
85 OpcodeForCandidate == TargetOpcode::G_ZEXT)
86 return CurrentUse;
87 else if (CurrentUse.ExtendOpcode == TargetOpcode::G_ZEXT &&
88 OpcodeForCandidate == TargetOpcode::G_SEXT)
89 return {TyForCandidate, OpcodeForCandidate, MIForCandidate};
90 }
91
92 // This is potentially target specific. We've chosen the largest type
93 // because G_TRUNC is usually free. One potential catch with this is that
94 // some targets have a reduced number of larger registers than smaller
95 // registers and this choice potentially increases the live-range for the
96 // larger value.
97 if (TyForCandidate.getSizeInBits() > CurrentUse.Ty.getSizeInBits()) {
98 return {TyForCandidate, OpcodeForCandidate, MIForCandidate};
99 }
100 return CurrentUse;
Jonas Toth602e3a6402018-10-03 10:59:19 +0000101}
Daniel Sandersa05c7582018-10-04 18:44:58 +0000102
103/// Find a suitable place to insert some instructions and insert them. This
104/// function accounts for special cases like inserting before a PHI node.
105/// The current strategy for inserting before PHI's is to duplicate the
106/// instructions for each predecessor. However, while that's ok for G_TRUNC
107/// on most targets since it generally requires no code, other targets/cases may
108/// want to try harder to find a dominating block.
109static void InsertInsnsWithoutSideEffectsBeforeUse(
110 MachineIRBuilder &Builder, MachineInstr &DefMI, MachineOperand &UseMO,
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000111 std::function<void(MachineBasicBlock *, MachineBasicBlock::iterator)>
112 Inserter) {
Daniel Sandersa05c7582018-10-04 18:44:58 +0000113 MachineInstr &UseMI = *UseMO.getParent();
114
115 MachineBasicBlock *InsertBB = UseMI.getParent();
116
117 // If the use is a PHI then we want the predecessor block instead.
118 if (UseMI.isPHI()) {
119 MachineOperand *PredBB = std::next(&UseMO);
120 InsertBB = PredBB->getMBB();
121 }
122
123 // If the block is the same block as the def then we want to insert just after
124 // the def instead of at the start of the block.
125 if (InsertBB == DefMI.getParent()) {
126 MachineBasicBlock::iterator InsertPt = &DefMI;
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000127 Inserter(InsertBB, std::next(InsertPt));
Daniel Sandersa05c7582018-10-04 18:44:58 +0000128 return;
129 }
130
131 // Otherwise we want the start of the BB
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000132 Inserter(InsertBB, InsertBB->getFirstNonPHI());
Daniel Sandersa05c7582018-10-04 18:44:58 +0000133}
Daniel Sandersc973ad12018-10-03 02:12:17 +0000134} // end anonymous namespace
135
136bool CombinerHelper::tryCombineExtendingLoads(MachineInstr &MI) {
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000137 struct InsertionPoint {
138 MachineOperand *UseMO;
139 MachineBasicBlock *InsertIntoBB;
140 MachineBasicBlock::iterator InsertBefore;
141 InsertionPoint(MachineOperand *UseMO, MachineBasicBlock *InsertIntoBB,
142 MachineBasicBlock::iterator InsertBefore)
143 : UseMO(UseMO), InsertIntoBB(InsertIntoBB), InsertBefore(InsertBefore) {
144 }
145 };
146
Daniel Sandersc973ad12018-10-03 02:12:17 +0000147 // We match the loads and follow the uses to the extend instead of matching
148 // the extends and following the def to the load. This is because the load
149 // must remain in the same position for correctness (unless we also add code
150 // to find a safe place to sink it) whereas the extend is freely movable.
151 // It also prevents us from duplicating the load for the volatile case or just
152 // for performance.
153
154 if (MI.getOpcode() != TargetOpcode::G_LOAD &&
155 MI.getOpcode() != TargetOpcode::G_SEXTLOAD &&
156 MI.getOpcode() != TargetOpcode::G_ZEXTLOAD)
157 return false;
158
159 auto &LoadValue = MI.getOperand(0);
160 assert(LoadValue.isReg() && "Result wasn't a register?");
161
162 LLT LoadValueTy = MRI.getType(LoadValue.getReg());
163 if (!LoadValueTy.isScalar())
164 return false;
165
166 // Find the preferred type aside from the any-extends (unless it's the only
167 // one) and non-extending ops. We'll emit an extending load to that type and
168 // and emit a variant of (extend (trunc X)) for the others according to the
169 // relative type sizes. At the same time, pick an extend to use based on the
170 // extend involved in the chosen type.
171 unsigned PreferredOpcode = MI.getOpcode() == TargetOpcode::G_LOAD
172 ? TargetOpcode::G_ANYEXT
173 : MI.getOpcode() == TargetOpcode::G_SEXTLOAD
174 ? TargetOpcode::G_SEXT
175 : TargetOpcode::G_ZEXT;
176 PreferredTuple Preferred = {LLT(), PreferredOpcode, nullptr};
177 for (auto &UseMI : MRI.use_instructions(LoadValue.getReg())) {
178 if (UseMI.getOpcode() == TargetOpcode::G_SEXT ||
Daniel Sandersab358bf2018-10-04 21:44:32 +0000179 UseMI.getOpcode() == TargetOpcode::G_ZEXT ||
180 UseMI.getOpcode() == TargetOpcode::G_ANYEXT) {
Daniel Sandersc973ad12018-10-03 02:12:17 +0000181 Preferred = ChoosePreferredUse(Preferred,
182 MRI.getType(UseMI.getOperand(0).getReg()),
183 UseMI.getOpcode(), &UseMI);
Daniel Sandersab358bf2018-10-04 21:44:32 +0000184 }
Daniel Sandersc973ad12018-10-03 02:12:17 +0000185 }
186
187 // There were no extends
188 if (!Preferred.MI)
189 return false;
190 // It should be impossible to chose an extend without selecting a different
191 // type since by definition the result of an extend is larger.
192 assert(Preferred.Ty != LoadValueTy && "Extending to same type?");
193
Daniel Sandersc973ad12018-10-03 02:12:17 +0000194 // Rewrite the load to the chosen extending load.
195 unsigned ChosenDstReg = Preferred.MI->getOperand(0).getReg();
196 MI.setDesc(
197 Builder.getTII().get(Preferred.ExtendOpcode == TargetOpcode::G_SEXT
198 ? TargetOpcode::G_SEXTLOAD
199 : Preferred.ExtendOpcode == TargetOpcode::G_ZEXT
200 ? TargetOpcode::G_ZEXTLOAD
201 : TargetOpcode::G_LOAD));
202
203 // Rewrite all the uses to fix up the types.
204 SmallVector<MachineInstr *, 1> ScheduleForErase;
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000205 SmallVector<InsertionPoint, 4> ScheduleForInsert;
Daniel Sandersc973ad12018-10-03 02:12:17 +0000206 for (auto &UseMO : MRI.use_operands(LoadValue.getReg())) {
207 MachineInstr *UseMI = UseMO.getParent();
208
209 // If the extend is compatible with the preferred extend then we should fix
210 // up the type and extend so that it uses the preferred use.
211 if (UseMI->getOpcode() == Preferred.ExtendOpcode ||
212 UseMI->getOpcode() == TargetOpcode::G_ANYEXT) {
213 unsigned UseDstReg = UseMI->getOperand(0).getReg();
214 unsigned UseSrcReg = UseMI->getOperand(1).getReg();
215 const LLT &UseDstTy = MRI.getType(UseDstReg);
216 if (UseDstReg != ChosenDstReg) {
217 if (Preferred.Ty == UseDstTy) {
218 // If the use has the same type as the preferred use, then merge
219 // the vregs and erase the extend. For example:
220 // %1:_(s8) = G_LOAD ...
221 // %2:_(s32) = G_SEXT %1(s8)
222 // %3:_(s32) = G_ANYEXT %1(s8)
223 // ... = ... %3(s32)
224 // rewrites to:
225 // %2:_(s32) = G_SEXTLOAD ...
226 // ... = ... %2(s32)
227 MRI.replaceRegWith(UseDstReg, ChosenDstReg);
228 ScheduleForErase.push_back(UseMO.getParent());
Daniel Sandersc973ad12018-10-03 02:12:17 +0000229 } else if (Preferred.Ty.getSizeInBits() < UseDstTy.getSizeInBits()) {
230 // If the preferred size is smaller, then keep the extend but extend
231 // from the result of the extending load. For example:
232 // %1:_(s8) = G_LOAD ...
233 // %2:_(s32) = G_SEXT %1(s8)
234 // %3:_(s64) = G_ANYEXT %1(s8)
235 // ... = ... %3(s64)
236 /// rewrites to:
237 // %2:_(s32) = G_SEXTLOAD ...
238 // %3:_(s64) = G_ANYEXT %2:_(s32)
239 // ... = ... %3(s64)
240 MRI.replaceRegWith(UseSrcReg, ChosenDstReg);
241 } else {
242 // If the preferred size is large, then insert a truncate. For
243 // example:
244 // %1:_(s8) = G_LOAD ...
245 // %2:_(s64) = G_SEXT %1(s8)
246 // %3:_(s32) = G_ZEXT %1(s8)
247 // ... = ... %3(s32)
248 /// rewrites to:
249 // %2:_(s64) = G_SEXTLOAD ...
250 // %4:_(s8) = G_TRUNC %2:_(s32)
251 // %3:_(s64) = G_ZEXT %2:_(s8)
252 // ... = ... %3(s64)
Daniel Sandersa05c7582018-10-04 18:44:58 +0000253 InsertInsnsWithoutSideEffectsBeforeUse(
254 Builder, MI, UseMO,
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000255 [&](MachineBasicBlock *InsertIntoBB,
256 MachineBasicBlock::iterator InsertBefore) {
257 ScheduleForInsert.emplace_back(&UseMO, InsertIntoBB, InsertBefore);
Daniel Sandersa05c7582018-10-04 18:44:58 +0000258 });
Daniel Sandersc973ad12018-10-03 02:12:17 +0000259 }
260 continue;
261 }
262 // The use is (one of) the uses of the preferred use we chose earlier.
263 // We're going to update the load to def this value later so just erase
264 // the old extend.
265 ScheduleForErase.push_back(UseMO.getParent());
Daniel Sandersc973ad12018-10-03 02:12:17 +0000266 continue;
267 }
268
269 // The use isn't an extend. Truncate back to the type we originally loaded.
270 // This is free on many targets.
Daniel Sandersa05c7582018-10-04 18:44:58 +0000271 InsertInsnsWithoutSideEffectsBeforeUse(
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000272 Builder, MI, UseMO,
273 [&](MachineBasicBlock *InsertIntoBB,
274 MachineBasicBlock::iterator InsertBefore) {
275 ScheduleForInsert.emplace_back(&UseMO, InsertIntoBB, InsertBefore);
Daniel Sandersa05c7582018-10-04 18:44:58 +0000276 });
Daniel Sandersc973ad12018-10-03 02:12:17 +0000277 }
Daniel Sandersa05c7582018-10-04 18:44:58 +0000278
279 DenseMap<MachineBasicBlock *, MachineInstr *> EmittedInsns;
280 for (auto &InsertionInfo : ScheduleForInsert) {
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000281 MachineOperand *UseMO = InsertionInfo.UseMO;
282 MachineBasicBlock *InsertIntoBB = InsertionInfo.InsertIntoBB;
283 MachineBasicBlock::iterator InsertBefore = InsertionInfo.InsertBefore;
Daniel Sandersa05c7582018-10-04 18:44:58 +0000284
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000285 MachineInstr *PreviouslyEmitted = EmittedInsns.lookup(InsertIntoBB);
Daniel Sandersa05c7582018-10-04 18:44:58 +0000286 if (PreviouslyEmitted) {
287 UseMO->setReg(PreviouslyEmitted->getOperand(0).getReg());
288 continue;
289 }
290
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000291 Builder.setInsertPt(*InsertIntoBB, InsertBefore);
Daniel Sandersa05c7582018-10-04 18:44:58 +0000292 unsigned NewDstReg = MRI.cloneVirtualRegister(MI.getOperand(0).getReg());
293 MachineInstr *NewMI = Builder.buildTrunc(NewDstReg, ChosenDstReg);
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000294 EmittedInsns[InsertIntoBB] = NewMI;
Daniel Sandersa05c7582018-10-04 18:44:58 +0000295 UseMO->setReg(NewDstReg);
296 Observer.createdInstr(*NewMI);
297 }
298 for (auto &EraseMI : ScheduleForErase) {
Daniel Sanders91dfdd52018-12-12 21:32:01 +0000299 Observer.erasingInstr(*EraseMI);
Daniel Sandersc973ad12018-10-03 02:12:17 +0000300 EraseMI->eraseFromParent();
Daniel Sandersa05c7582018-10-04 18:44:58 +0000301 }
Daniel Sandersc973ad12018-10-03 02:12:17 +0000302 MI.getOperand(0).setReg(ChosenDstReg);
303
304 return true;
305}
306
Aditya Nandakumar81c81b62018-01-25 00:41:58 +0000307bool CombinerHelper::tryCombine(MachineInstr &MI) {
Daniel Sandersc973ad12018-10-03 02:12:17 +0000308 if (tryCombineCopy(MI))
309 return true;
310 return tryCombineExtendingLoads(MI);
Aditya Nandakumar81c81b62018-01-25 00:41:58 +0000311}