blob: a979e16ad41b0de6bb5b2ee0ef3218285daf9aa3 [file] [log] [blame]
Daniel Sanders629db5d2018-12-14 17:50:14 +00001//===-- lib/CodeGen/GlobalISel/GICombinerHelper.cpp -----------------------===//
Aditya Nandakumar81c81b62018-01-25 00:41:58 +00002//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Aditya Nandakumar81c81b62018-01-25 00:41:58 +00006//
7//===----------------------------------------------------------------------===//
8#include "llvm/CodeGen/GlobalISel/CombinerHelper.h"
Aditya Nandakumarf75d4f32018-12-05 20:14:52 +00009#include "llvm/CodeGen/GlobalISel/Combiner.h"
10#include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
Aditya Nandakumar81c81b62018-01-25 00:41:58 +000011#include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
12#include "llvm/CodeGen/GlobalISel/Utils.h"
13#include "llvm/CodeGen/MachineInstr.h"
14#include "llvm/CodeGen/MachineRegisterInfo.h"
Daniel Sandersc973ad12018-10-03 02:12:17 +000015#include "llvm/CodeGen/TargetInstrInfo.h"
Aditya Nandakumar81c81b62018-01-25 00:41:58 +000016
Daniel Sanders629db5d2018-12-14 17:50:14 +000017#define DEBUG_TYPE "gi-combiner"
Aditya Nandakumar81c81b62018-01-25 00:41:58 +000018
19using namespace llvm;
20
Aditya Nandakumarf75d4f32018-12-05 20:14:52 +000021CombinerHelper::CombinerHelper(GISelChangeObserver &Observer,
Daniel Sandersc973ad12018-10-03 02:12:17 +000022 MachineIRBuilder &B)
23 : Builder(B), MRI(Builder.getMF().getRegInfo()), Observer(Observer) {}
24
Matt Arsenaultfaeaedf2019-06-24 16:16:12 +000025void CombinerHelper::replaceRegWith(MachineRegisterInfo &MRI, Register FromReg,
26 Register ToReg) const {
Daniel Sanders629db5d2018-12-14 17:50:14 +000027 Observer.changingAllUsesOfReg(MRI, FromReg);
28
29 if (MRI.constrainRegAttrs(ToReg, FromReg))
30 MRI.replaceRegWith(FromReg, ToReg);
31 else
32 Builder.buildCopy(ToReg, FromReg);
33
34 Observer.finishedChangingAllUsesOfReg();
35}
36
37void CombinerHelper::replaceRegOpWith(MachineRegisterInfo &MRI,
38 MachineOperand &FromRegOp,
Matt Arsenaultfaeaedf2019-06-24 16:16:12 +000039 Register ToReg) const {
Daniel Sanders629db5d2018-12-14 17:50:14 +000040 assert(FromRegOp.getParent() && "Expected an operand in an MI");
41 Observer.changingInstr(*FromRegOp.getParent());
42
43 FromRegOp.setReg(ToReg);
44
45 Observer.changedInstr(*FromRegOp.getParent());
Daniel Sandersc973ad12018-10-03 02:12:17 +000046}
Aditya Nandakumar81c81b62018-01-25 00:41:58 +000047
48bool CombinerHelper::tryCombineCopy(MachineInstr &MI) {
Daniel Sandersdfa0f552019-02-14 00:15:28 +000049 if (matchCombineCopy(MI)) {
50 applyCombineCopy(MI);
51 return true;
52 }
53 return false;
54}
55bool CombinerHelper::matchCombineCopy(MachineInstr &MI) {
Aditya Nandakumar81c81b62018-01-25 00:41:58 +000056 if (MI.getOpcode() != TargetOpcode::COPY)
57 return false;
58 unsigned DstReg = MI.getOperand(0).getReg();
59 unsigned SrcReg = MI.getOperand(1).getReg();
60 LLT DstTy = MRI.getType(DstReg);
61 LLT SrcTy = MRI.getType(SrcReg);
62 // Simple Copy Propagation.
63 // a(sx) = COPY b(sx) -> Replace all uses of a with b.
Daniel Sandersdfa0f552019-02-14 00:15:28 +000064 if (DstTy.isValid() && SrcTy.isValid() && DstTy == SrcTy)
Aditya Nandakumar81c81b62018-01-25 00:41:58 +000065 return true;
Aditya Nandakumar81c81b62018-01-25 00:41:58 +000066 return false;
67}
Daniel Sandersdfa0f552019-02-14 00:15:28 +000068void CombinerHelper::applyCombineCopy(MachineInstr &MI) {
69 unsigned DstReg = MI.getOperand(0).getReg();
70 unsigned SrcReg = MI.getOperand(1).getReg();
71 MI.eraseFromParent();
72 replaceRegWith(MRI, DstReg, SrcReg);
73}
Aditya Nandakumar81c81b62018-01-25 00:41:58 +000074
Daniel Sandersc973ad12018-10-03 02:12:17 +000075namespace {
Daniel Sandersc973ad12018-10-03 02:12:17 +000076
77/// Select a preference between two uses. CurrentUse is the current preference
78/// while *ForCandidate is attributes of the candidate under consideration.
79PreferredTuple ChoosePreferredUse(PreferredTuple &CurrentUse,
80 const LLT &TyForCandidate,
81 unsigned OpcodeForCandidate,
82 MachineInstr *MIForCandidate) {
83 if (!CurrentUse.Ty.isValid()) {
Daniel Sandersab358bf2018-10-04 21:44:32 +000084 if (CurrentUse.ExtendOpcode == OpcodeForCandidate ||
85 CurrentUse.ExtendOpcode == TargetOpcode::G_ANYEXT)
Daniel Sandersc973ad12018-10-03 02:12:17 +000086 return {TyForCandidate, OpcodeForCandidate, MIForCandidate};
87 return CurrentUse;
88 }
89
90 // We permit the extend to hoist through basic blocks but this is only
91 // sensible if the target has extending loads. If you end up lowering back
92 // into a load and extend during the legalizer then the end result is
93 // hoisting the extend up to the load.
94
95 // Prefer defined extensions to undefined extensions as these are more
96 // likely to reduce the number of instructions.
97 if (OpcodeForCandidate == TargetOpcode::G_ANYEXT &&
98 CurrentUse.ExtendOpcode != TargetOpcode::G_ANYEXT)
99 return CurrentUse;
100 else if (CurrentUse.ExtendOpcode == TargetOpcode::G_ANYEXT &&
101 OpcodeForCandidate != TargetOpcode::G_ANYEXT)
102 return {TyForCandidate, OpcodeForCandidate, MIForCandidate};
103
104 // Prefer sign extensions to zero extensions as sign-extensions tend to be
105 // more expensive.
106 if (CurrentUse.Ty == TyForCandidate) {
107 if (CurrentUse.ExtendOpcode == TargetOpcode::G_SEXT &&
108 OpcodeForCandidate == TargetOpcode::G_ZEXT)
109 return CurrentUse;
110 else if (CurrentUse.ExtendOpcode == TargetOpcode::G_ZEXT &&
111 OpcodeForCandidate == TargetOpcode::G_SEXT)
112 return {TyForCandidate, OpcodeForCandidate, MIForCandidate};
113 }
114
115 // This is potentially target specific. We've chosen the largest type
116 // because G_TRUNC is usually free. One potential catch with this is that
117 // some targets have a reduced number of larger registers than smaller
118 // registers and this choice potentially increases the live-range for the
119 // larger value.
120 if (TyForCandidate.getSizeInBits() > CurrentUse.Ty.getSizeInBits()) {
121 return {TyForCandidate, OpcodeForCandidate, MIForCandidate};
122 }
123 return CurrentUse;
Jonas Toth602e3a6402018-10-03 10:59:19 +0000124}
Daniel Sandersa05c7582018-10-04 18:44:58 +0000125
126/// Find a suitable place to insert some instructions and insert them. This
127/// function accounts for special cases like inserting before a PHI node.
128/// The current strategy for inserting before PHI's is to duplicate the
129/// instructions for each predecessor. However, while that's ok for G_TRUNC
130/// on most targets since it generally requires no code, other targets/cases may
131/// want to try harder to find a dominating block.
132static void InsertInsnsWithoutSideEffectsBeforeUse(
133 MachineIRBuilder &Builder, MachineInstr &DefMI, MachineOperand &UseMO,
Daniel Sanders184c8ee2019-06-17 20:56:31 +0000134 std::function<void(MachineBasicBlock *, MachineBasicBlock::iterator,
135 MachineOperand &UseMO)>
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000136 Inserter) {
Daniel Sandersa05c7582018-10-04 18:44:58 +0000137 MachineInstr &UseMI = *UseMO.getParent();
138
139 MachineBasicBlock *InsertBB = UseMI.getParent();
140
141 // If the use is a PHI then we want the predecessor block instead.
142 if (UseMI.isPHI()) {
143 MachineOperand *PredBB = std::next(&UseMO);
144 InsertBB = PredBB->getMBB();
145 }
146
147 // If the block is the same block as the def then we want to insert just after
148 // the def instead of at the start of the block.
149 if (InsertBB == DefMI.getParent()) {
150 MachineBasicBlock::iterator InsertPt = &DefMI;
Daniel Sanders184c8ee2019-06-17 20:56:31 +0000151 Inserter(InsertBB, std::next(InsertPt), UseMO);
Daniel Sandersa05c7582018-10-04 18:44:58 +0000152 return;
153 }
154
155 // Otherwise we want the start of the BB
Daniel Sanders184c8ee2019-06-17 20:56:31 +0000156 Inserter(InsertBB, InsertBB->getFirstNonPHI(), UseMO);
Daniel Sandersa05c7582018-10-04 18:44:58 +0000157}
Daniel Sandersc973ad12018-10-03 02:12:17 +0000158} // end anonymous namespace
159
160bool CombinerHelper::tryCombineExtendingLoads(MachineInstr &MI) {
Daniel Sandersdfa0f552019-02-14 00:15:28 +0000161 PreferredTuple Preferred;
162 if (matchCombineExtendingLoads(MI, Preferred)) {
163 applyCombineExtendingLoads(MI, Preferred);
164 return true;
165 }
166 return false;
167}
Daniel Sandersa464ffd2018-10-04 23:47:37 +0000168
Daniel Sandersdfa0f552019-02-14 00:15:28 +0000169bool CombinerHelper::matchCombineExtendingLoads(MachineInstr &MI,
170 PreferredTuple &Preferred) {
Daniel Sandersc973ad12018-10-03 02:12:17 +0000171 // We match the loads and follow the uses to the extend instead of matching
172 // the extends and following the def to the load. This is because the load
173 // must remain in the same position for correctness (unless we also add code
174 // to find a safe place to sink it) whereas the extend is freely movable.
175 // It also prevents us from duplicating the load for the volatile case or just
176 // for performance.
177
178 if (MI.getOpcode() != TargetOpcode::G_LOAD &&
179 MI.getOpcode() != TargetOpcode::G_SEXTLOAD &&
180 MI.getOpcode() != TargetOpcode::G_ZEXTLOAD)
181 return false;
182
183 auto &LoadValue = MI.getOperand(0);
184 assert(LoadValue.isReg() && "Result wasn't a register?");
185
186 LLT LoadValueTy = MRI.getType(LoadValue.getReg());
187 if (!LoadValueTy.isScalar())
188 return false;
189
Amara Emersonbf430042019-01-27 10:56:20 +0000190 // Most architectures are going to legalize <s8 loads into at least a 1 byte
191 // load, and the MMOs can only describe memory accesses in multiples of bytes.
192 // If we try to perform extload combining on those, we can end up with
193 // %a(s8) = extload %ptr (load 1 byte from %ptr)
194 // ... which is an illegal extload instruction.
195 if (LoadValueTy.getSizeInBits() < 8)
196 return false;
197
Amara Emerson02a90ea2019-04-15 22:34:08 +0000198 // For non power-of-2 types, they will very likely be legalized into multiple
199 // loads. Don't bother trying to match them into extending loads.
200 if (!isPowerOf2_32(LoadValueTy.getSizeInBits()))
201 return false;
202
Daniel Sandersc973ad12018-10-03 02:12:17 +0000203 // Find the preferred type aside from the any-extends (unless it's the only
204 // one) and non-extending ops. We'll emit an extending load to that type and
205 // and emit a variant of (extend (trunc X)) for the others according to the
206 // relative type sizes. At the same time, pick an extend to use based on the
207 // extend involved in the chosen type.
208 unsigned PreferredOpcode = MI.getOpcode() == TargetOpcode::G_LOAD
209 ? TargetOpcode::G_ANYEXT
210 : MI.getOpcode() == TargetOpcode::G_SEXTLOAD
211 ? TargetOpcode::G_SEXT
212 : TargetOpcode::G_ZEXT;
Daniel Sandersdfa0f552019-02-14 00:15:28 +0000213 Preferred = {LLT(), PreferredOpcode, nullptr};
Daniel Sandersc973ad12018-10-03 02:12:17 +0000214 for (auto &UseMI : MRI.use_instructions(LoadValue.getReg())) {
215 if (UseMI.getOpcode() == TargetOpcode::G_SEXT ||
Daniel Sandersab358bf2018-10-04 21:44:32 +0000216 UseMI.getOpcode() == TargetOpcode::G_ZEXT ||
217 UseMI.getOpcode() == TargetOpcode::G_ANYEXT) {
Daniel Sandersc973ad12018-10-03 02:12:17 +0000218 Preferred = ChoosePreferredUse(Preferred,
219 MRI.getType(UseMI.getOperand(0).getReg()),
220 UseMI.getOpcode(), &UseMI);
Daniel Sandersab358bf2018-10-04 21:44:32 +0000221 }
Daniel Sandersc973ad12018-10-03 02:12:17 +0000222 }
223
224 // There were no extends
225 if (!Preferred.MI)
226 return false;
227 // It should be impossible to chose an extend without selecting a different
228 // type since by definition the result of an extend is larger.
229 assert(Preferred.Ty != LoadValueTy && "Extending to same type?");
230
Daniel Sanders629db5d2018-12-14 17:50:14 +0000231 LLVM_DEBUG(dbgs() << "Preferred use is: " << *Preferred.MI);
Daniel Sandersdfa0f552019-02-14 00:15:28 +0000232 return true;
233}
234
235void CombinerHelper::applyCombineExtendingLoads(MachineInstr &MI,
236 PreferredTuple &Preferred) {
Daniel Sandersc973ad12018-10-03 02:12:17 +0000237 // Rewrite the load to the chosen extending load.
Matt Arsenaultfaeaedf2019-06-24 16:16:12 +0000238 Register ChosenDstReg = Preferred.MI->getOperand(0).getReg();
Daniel Sanders184c8ee2019-06-17 20:56:31 +0000239
240 // Inserter to insert a truncate back to the original type at a given point
241 // with some basic CSE to limit truncate duplication to one per BB.
242 DenseMap<MachineBasicBlock *, MachineInstr *> EmittedInsns;
243 auto InsertTruncAt = [&](MachineBasicBlock *InsertIntoBB,
244 MachineBasicBlock::iterator InsertBefore,
245 MachineOperand &UseMO) {
246 MachineInstr *PreviouslyEmitted = EmittedInsns.lookup(InsertIntoBB);
247 if (PreviouslyEmitted) {
248 Observer.changingInstr(*UseMO.getParent());
249 UseMO.setReg(PreviouslyEmitted->getOperand(0).getReg());
250 Observer.changedInstr(*UseMO.getParent());
251 return;
252 }
253
254 Builder.setInsertPt(*InsertIntoBB, InsertBefore);
Matt Arsenaultfaeaedf2019-06-24 16:16:12 +0000255 Register NewDstReg = MRI.cloneVirtualRegister(MI.getOperand(0).getReg());
Daniel Sanders184c8ee2019-06-17 20:56:31 +0000256 MachineInstr *NewMI = Builder.buildTrunc(NewDstReg, ChosenDstReg);
257 EmittedInsns[InsertIntoBB] = NewMI;
258 replaceRegOpWith(MRI, UseMO, NewDstReg);
259 };
260
Daniel Sanders629db5d2018-12-14 17:50:14 +0000261 Observer.changingInstr(MI);
Daniel Sandersc973ad12018-10-03 02:12:17 +0000262 MI.setDesc(
263 Builder.getTII().get(Preferred.ExtendOpcode == TargetOpcode::G_SEXT
264 ? TargetOpcode::G_SEXTLOAD
265 : Preferred.ExtendOpcode == TargetOpcode::G_ZEXT
266 ? TargetOpcode::G_ZEXTLOAD
267 : TargetOpcode::G_LOAD));
268
269 // Rewrite all the uses to fix up the types.
Daniel Sandersdfa0f552019-02-14 00:15:28 +0000270 auto &LoadValue = MI.getOperand(0);
Daniel Sanders184c8ee2019-06-17 20:56:31 +0000271 SmallVector<MachineOperand *, 4> Uses;
272 for (auto &UseMO : MRI.use_operands(LoadValue.getReg()))
273 Uses.push_back(&UseMO);
274
275 for (auto *UseMO : Uses) {
276 MachineInstr *UseMI = UseMO->getParent();
Daniel Sandersc973ad12018-10-03 02:12:17 +0000277
278 // If the extend is compatible with the preferred extend then we should fix
279 // up the type and extend so that it uses the preferred use.
280 if (UseMI->getOpcode() == Preferred.ExtendOpcode ||
281 UseMI->getOpcode() == TargetOpcode::G_ANYEXT) {
282 unsigned UseDstReg = UseMI->getOperand(0).getReg();
Daniel Sanders629db5d2018-12-14 17:50:14 +0000283 MachineOperand &UseSrcMO = UseMI->getOperand(1);
Daniel Sandersc973ad12018-10-03 02:12:17 +0000284 const LLT &UseDstTy = MRI.getType(UseDstReg);
285 if (UseDstReg != ChosenDstReg) {
286 if (Preferred.Ty == UseDstTy) {
287 // If the use has the same type as the preferred use, then merge
288 // the vregs and erase the extend. For example:
289 // %1:_(s8) = G_LOAD ...
290 // %2:_(s32) = G_SEXT %1(s8)
291 // %3:_(s32) = G_ANYEXT %1(s8)
292 // ... = ... %3(s32)
293 // rewrites to:
294 // %2:_(s32) = G_SEXTLOAD ...
295 // ... = ... %2(s32)
Daniel Sanders629db5d2018-12-14 17:50:14 +0000296 replaceRegWith(MRI, UseDstReg, ChosenDstReg);
Daniel Sanders184c8ee2019-06-17 20:56:31 +0000297 Observer.erasingInstr(*UseMO->getParent());
298 UseMO->getParent()->eraseFromParent();
Daniel Sandersc973ad12018-10-03 02:12:17 +0000299 } else if (Preferred.Ty.getSizeInBits() < UseDstTy.getSizeInBits()) {
300 // If the preferred size is smaller, then keep the extend but extend
301 // from the result of the extending load. For example:
302 // %1:_(s8) = G_LOAD ...
303 // %2:_(s32) = G_SEXT %1(s8)
304 // %3:_(s64) = G_ANYEXT %1(s8)
305 // ... = ... %3(s64)
306 /// rewrites to:
307 // %2:_(s32) = G_SEXTLOAD ...
308 // %3:_(s64) = G_ANYEXT %2:_(s32)
309 // ... = ... %3(s64)
Daniel Sanders629db5d2018-12-14 17:50:14 +0000310 replaceRegOpWith(MRI, UseSrcMO, ChosenDstReg);
Daniel Sandersc973ad12018-10-03 02:12:17 +0000311 } else {
312 // If the preferred size is large, then insert a truncate. For
313 // example:
314 // %1:_(s8) = G_LOAD ...
315 // %2:_(s64) = G_SEXT %1(s8)
316 // %3:_(s32) = G_ZEXT %1(s8)
317 // ... = ... %3(s32)
318 /// rewrites to:
319 // %2:_(s64) = G_SEXTLOAD ...
320 // %4:_(s8) = G_TRUNC %2:_(s32)
321 // %3:_(s64) = G_ZEXT %2:_(s8)
322 // ... = ... %3(s64)
Daniel Sanders184c8ee2019-06-17 20:56:31 +0000323 InsertInsnsWithoutSideEffectsBeforeUse(Builder, MI, *UseMO,
324 InsertTruncAt);
Daniel Sandersc973ad12018-10-03 02:12:17 +0000325 }
326 continue;
327 }
328 // The use is (one of) the uses of the preferred use we chose earlier.
329 // We're going to update the load to def this value later so just erase
330 // the old extend.
Daniel Sanders184c8ee2019-06-17 20:56:31 +0000331 Observer.erasingInstr(*UseMO->getParent());
332 UseMO->getParent()->eraseFromParent();
Daniel Sandersc973ad12018-10-03 02:12:17 +0000333 continue;
334 }
335
336 // The use isn't an extend. Truncate back to the type we originally loaded.
337 // This is free on many targets.
Daniel Sanders184c8ee2019-06-17 20:56:31 +0000338 InsertInsnsWithoutSideEffectsBeforeUse(Builder, MI, *UseMO, InsertTruncAt);
Daniel Sandersc973ad12018-10-03 02:12:17 +0000339 }
Daniel Sandersa05c7582018-10-04 18:44:58 +0000340
Daniel Sandersc973ad12018-10-03 02:12:17 +0000341 MI.getOperand(0).setReg(ChosenDstReg);
Daniel Sanders629db5d2018-12-14 17:50:14 +0000342 Observer.changedInstr(MI);
Daniel Sandersc973ad12018-10-03 02:12:17 +0000343}
344
Aditya Nandakumar81c81b62018-01-25 00:41:58 +0000345bool CombinerHelper::tryCombine(MachineInstr &MI) {
Daniel Sandersc973ad12018-10-03 02:12:17 +0000346 if (tryCombineCopy(MI))
347 return true;
348 return tryCombineExtendingLoads(MI);
Aditya Nandakumar81c81b62018-01-25 00:41:58 +0000349}