blob: 648626a7c125870becc873770a463a24207cc5d5 [file] [log] [blame]
Michael J. Spencer289067c2014-05-29 01:55:07 +00001//===- LoadCombine.cpp - Combine Adjacent Loads ---------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9/// \file
10/// This transformation combines adjacent loads.
11///
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Transforms/Scalar.h"
15
16#include "llvm/ADT/DenseMap.h"
17#include "llvm/ADT/Statistic.h"
18#include "llvm/Analysis/TargetFolder.h"
19#include "llvm/Pass.h"
20#include "llvm/IR/DataLayout.h"
21#include "llvm/IR/Function.h"
22#include "llvm/IR/Instructions.h"
23#include "llvm/IR/IRBuilder.h"
24#include "llvm/Support/Debug.h"
25#include "llvm/Support/MathExtras.h"
26#include "llvm/Support/raw_ostream.h"
27
28using namespace llvm;
29
30#define DEBUG_TYPE "load-combine"
31
32STATISTIC(NumLoadsAnalyzed, "Number of loads analyzed for combining");
33STATISTIC(NumLoadsCombined, "Number of loads combined");
34
35namespace {
36struct PointerOffsetPair {
37 Value *Pointer;
38 uint64_t Offset;
39};
40
41struct LoadPOPPair {
42 LoadPOPPair(LoadInst *L, PointerOffsetPair P, unsigned O)
43 : Load(L), POP(P), InsertOrder(O) {}
44 LoadPOPPair() {}
45 LoadInst *Load;
46 PointerOffsetPair POP;
47 /// \brief The new load needs to be created before the first load in IR order.
48 unsigned InsertOrder;
49};
50
51class LoadCombine : public BasicBlockPass {
52 LLVMContext *C;
53 const DataLayout *DL;
54
55public:
56 LoadCombine()
57 : BasicBlockPass(ID),
58 C(nullptr), DL(nullptr) {
59 initializeSROAPass(*PassRegistry::getPassRegistry());
60 }
Aaron Ballman573f3b52014-07-30 19:23:59 +000061
62 using llvm::Pass::doInitialization;
Michael J. Spencer289067c2014-05-29 01:55:07 +000063 bool doInitialization(Function &) override;
64 bool runOnBasicBlock(BasicBlock &BB) override;
65 void getAnalysisUsage(AnalysisUsage &AU) const override;
66
67 const char *getPassName() const override { return "LoadCombine"; }
68 static char ID;
69
70 typedef IRBuilder<true, TargetFolder> BuilderTy;
71
72private:
73 BuilderTy *Builder;
74
75 PointerOffsetPair getPointerOffsetPair(LoadInst &);
76 bool combineLoads(DenseMap<const Value *, SmallVector<LoadPOPPair, 8>> &);
77 bool aggregateLoads(SmallVectorImpl<LoadPOPPair> &);
78 bool combineLoads(SmallVectorImpl<LoadPOPPair> &);
79};
80}
81
82bool LoadCombine::doInitialization(Function &F) {
83 DEBUG(dbgs() << "LoadCombine function: " << F.getName() << "\n");
84 C = &F.getContext();
85 DataLayoutPass *DLP = getAnalysisIfAvailable<DataLayoutPass>();
86 if (!DLP) {
87 DEBUG(dbgs() << " Skipping LoadCombine -- no target data!\n");
88 return false;
89 }
90 DL = &DLP->getDataLayout();
91 return true;
92}
93
94PointerOffsetPair LoadCombine::getPointerOffsetPair(LoadInst &LI) {
95 PointerOffsetPair POP;
96 POP.Pointer = LI.getPointerOperand();
97 POP.Offset = 0;
98 while (isa<BitCastInst>(POP.Pointer) || isa<GetElementPtrInst>(POP.Pointer)) {
99 if (auto *GEP = dyn_cast<GetElementPtrInst>(POP.Pointer)) {
100 unsigned BitWidth = DL->getPointerTypeSizeInBits(GEP->getType());
101 APInt Offset(BitWidth, 0);
102 if (GEP->accumulateConstantOffset(*DL, Offset))
103 POP.Offset += Offset.getZExtValue();
104 else
105 // Can't handle GEPs with variable indices.
106 return POP;
107 POP.Pointer = GEP->getPointerOperand();
108 } else if (auto *BC = dyn_cast<BitCastInst>(POP.Pointer))
109 POP.Pointer = BC->getOperand(0);
110 }
111 return POP;
112}
113
114bool LoadCombine::combineLoads(
115 DenseMap<const Value *, SmallVector<LoadPOPPair, 8>> &LoadMap) {
116 bool Combined = false;
117 for (auto &Loads : LoadMap) {
118 if (Loads.second.size() < 2)
119 continue;
120 std::sort(Loads.second.begin(), Loads.second.end(),
121 [](const LoadPOPPair &A, const LoadPOPPair &B) {
122 return A.POP.Offset < B.POP.Offset;
123 });
124 if (aggregateLoads(Loads.second))
125 Combined = true;
126 }
127 return Combined;
128}
129
130/// \brief Try to aggregate loads from a sorted list of loads to be combined.
131///
132/// It is guaranteed that no writes occur between any of the loads. All loads
133/// have the same base pointer. There are at least two loads.
134bool LoadCombine::aggregateLoads(SmallVectorImpl<LoadPOPPair> &Loads) {
135 assert(Loads.size() >= 2 && "Insufficient loads!");
136 LoadInst *BaseLoad = nullptr;
137 SmallVector<LoadPOPPair, 8> AggregateLoads;
138 bool Combined = false;
139 uint64_t PrevOffset = -1ull;
140 uint64_t PrevSize = 0;
141 for (auto &L : Loads) {
142 if (PrevOffset == -1ull) {
143 BaseLoad = L.Load;
144 PrevOffset = L.POP.Offset;
145 PrevSize = DL->getTypeStoreSize(L.Load->getType());
146 AggregateLoads.push_back(L);
147 continue;
148 }
149 if (L.Load->getAlignment() > BaseLoad->getAlignment())
150 continue;
151 if (L.POP.Offset > PrevOffset + PrevSize) {
152 // No other load will be combinable
153 if (combineLoads(AggregateLoads))
154 Combined = true;
155 AggregateLoads.clear();
156 PrevOffset = -1;
157 continue;
158 }
159 if (L.POP.Offset != PrevOffset + PrevSize)
160 // This load is offset less than the size of the last load.
161 // FIXME: We may want to handle this case.
162 continue;
163 PrevOffset = L.POP.Offset;
164 PrevSize = DL->getTypeStoreSize(L.Load->getType());
165 AggregateLoads.push_back(L);
166 }
167 if (combineLoads(AggregateLoads))
168 Combined = true;
169 return Combined;
170}
171
172/// \brief Given a list of combinable load. Combine the maximum number of them.
173bool LoadCombine::combineLoads(SmallVectorImpl<LoadPOPPair> &Loads) {
174 // Remove loads from the end while the size is not a power of 2.
175 unsigned TotalSize = 0;
176 for (const auto &L : Loads)
177 TotalSize += L.Load->getType()->getPrimitiveSizeInBits();
178 while (TotalSize != 0 && !isPowerOf2_32(TotalSize))
179 TotalSize -= Loads.pop_back_val().Load->getType()->getPrimitiveSizeInBits();
180 if (Loads.size() < 2)
181 return false;
182
183 DEBUG({
184 dbgs() << "***** Combining Loads ******\n";
185 for (const auto &L : Loads) {
186 dbgs() << L.POP.Offset << ": " << *L.Load << "\n";
187 }
188 });
189
190 // Find first load. This is where we put the new load.
191 LoadPOPPair FirstLP;
192 FirstLP.InsertOrder = -1u;
193 for (const auto &L : Loads)
194 if (L.InsertOrder < FirstLP.InsertOrder)
195 FirstLP = L;
196
197 unsigned AddressSpace =
198 FirstLP.POP.Pointer->getType()->getPointerAddressSpace();
199
200 Builder->SetInsertPoint(FirstLP.Load);
201 Value *Ptr = Builder->CreateConstGEP1_64(
202 Builder->CreatePointerCast(Loads[0].POP.Pointer,
203 Builder->getInt8PtrTy(AddressSpace)),
204 Loads[0].POP.Offset);
205 LoadInst *NewLoad = new LoadInst(
206 Builder->CreatePointerCast(
207 Ptr, PointerType::get(IntegerType::get(Ptr->getContext(), TotalSize),
208 Ptr->getType()->getPointerAddressSpace())),
209 Twine(Loads[0].Load->getName()) + ".combined", false,
210 Loads[0].Load->getAlignment(), FirstLP.Load);
211
212 for (const auto &L : Loads) {
213 Builder->SetInsertPoint(L.Load);
214 Value *V = Builder->CreateExtractInteger(
215 *DL, NewLoad, cast<IntegerType>(L.Load->getType()),
216 L.POP.Offset - Loads[0].POP.Offset, "combine.extract");
217 L.Load->replaceAllUsesWith(V);
218 }
219
220 NumLoadsCombined = NumLoadsCombined + Loads.size();
221 return true;
222}
223
224bool LoadCombine::runOnBasicBlock(BasicBlock &BB) {
225 if (skipOptnoneFunction(BB) || !DL)
226 return false;
227
228 IRBuilder<true, TargetFolder>
229 TheBuilder(BB.getContext(), TargetFolder(DL));
230 Builder = &TheBuilder;
231
232 DenseMap<const Value *, SmallVector<LoadPOPPair, 8>> LoadMap;
233
234 bool Combined = false;
235 unsigned Index = 0;
236 for (auto &I : BB) {
237 if (I.mayWriteToMemory() || I.mayThrow()) {
238 if (combineLoads(LoadMap))
239 Combined = true;
240 LoadMap.clear();
241 continue;
242 }
243 LoadInst *LI = dyn_cast<LoadInst>(&I);
244 if (!LI)
245 continue;
246 ++NumLoadsAnalyzed;
247 if (!LI->isSimple() || !LI->getType()->isIntegerTy())
248 continue;
249 auto POP = getPointerOffsetPair(*LI);
250 if (!POP.Pointer)
251 continue;
252 LoadMap[POP.Pointer].push_back(LoadPOPPair(LI, POP, Index++));
253 }
254 if (combineLoads(LoadMap))
255 Combined = true;
256 return Combined;
257}
258
259void LoadCombine::getAnalysisUsage(AnalysisUsage &AU) const {
260 AU.setPreservesCFG();
261}
262
263char LoadCombine::ID = 0;
264
265BasicBlockPass *llvm::createLoadCombinePass() {
266 return new LoadCombine();
267}
268
269INITIALIZE_PASS(LoadCombine, "load-combine", "Combine Adjacent Loads", false,
270 false)