blob: 8602642103cf75ebd6955adf8163257ce2a846dc [file] [log] [blame]
Peter Collingbournee6909c82015-02-20 20:30:47 +00001//===-- LowerBitSets.cpp - Bitset lowering pass ---------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass lowers bitset metadata and calls to the llvm.bitset.test intrinsic.
11// See http://llvm.org/docs/LangRef.html#bitsets for more information.
12//
13//===----------------------------------------------------------------------===//
14
15#include "llvm/Transforms/IPO/LowerBitSets.h"
16#include "llvm/Transforms/IPO.h"
17#include "llvm/ADT/EquivalenceClasses.h"
18#include "llvm/ADT/Statistic.h"
Peter Collingbournec9f277f2015-03-14 00:00:49 +000019#include "llvm/ADT/Triple.h"
Peter Collingbournee6909c82015-02-20 20:30:47 +000020#include "llvm/IR/Constant.h"
21#include "llvm/IR/Constants.h"
22#include "llvm/IR/GlobalVariable.h"
23#include "llvm/IR/IRBuilder.h"
24#include "llvm/IR/Instructions.h"
25#include "llvm/IR/Intrinsics.h"
26#include "llvm/IR/Module.h"
27#include "llvm/IR/Operator.h"
28#include "llvm/Pass.h"
29#include "llvm/Transforms/Utils/BasicBlockUtils.h"
30
31using namespace llvm;
32
33#define DEBUG_TYPE "lowerbitsets"
34
Peter Collingbourneda2dbf22015-03-03 00:49:28 +000035STATISTIC(ByteArraySizeBits, "Byte array size in bits");
36STATISTIC(ByteArraySizeBytes, "Byte array size in bytes");
37STATISTIC(NumByteArraysCreated, "Number of byte arrays created");
Peter Collingbournee6909c82015-02-20 20:30:47 +000038STATISTIC(NumBitSetCallsLowered, "Number of bitset calls lowered");
39STATISTIC(NumBitSetDisjointSets, "Number of disjoint sets of bitsets");
40
41bool BitSetInfo::containsGlobalOffset(uint64_t Offset) const {
42 if (Offset < ByteOffset)
43 return false;
44
45 if ((Offset - ByteOffset) % (uint64_t(1) << AlignLog2) != 0)
46 return false;
47
48 uint64_t BitOffset = (Offset - ByteOffset) >> AlignLog2;
49 if (BitOffset >= BitSize)
50 return false;
51
Peter Collingbourneda2dbf22015-03-03 00:49:28 +000052 return Bits.count(BitOffset);
Peter Collingbournee6909c82015-02-20 20:30:47 +000053}
54
55bool BitSetInfo::containsValue(
Mehdi Aminia28d91d2015-03-10 02:37:25 +000056 const DataLayout &DL,
Peter Collingbournee6909c82015-02-20 20:30:47 +000057 const DenseMap<GlobalVariable *, uint64_t> &GlobalLayout, Value *V,
58 uint64_t COffset) const {
59 if (auto GV = dyn_cast<GlobalVariable>(V)) {
60 auto I = GlobalLayout.find(GV);
61 if (I == GlobalLayout.end())
62 return false;
63 return containsGlobalOffset(I->second + COffset);
64 }
65
66 if (auto GEP = dyn_cast<GEPOperator>(V)) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +000067 APInt APOffset(DL.getPointerSizeInBits(0), 0);
68 bool Result = GEP->accumulateConstantOffset(DL, APOffset);
Peter Collingbournee6909c82015-02-20 20:30:47 +000069 if (!Result)
70 return false;
71 COffset += APOffset.getZExtValue();
72 return containsValue(DL, GlobalLayout, GEP->getPointerOperand(),
73 COffset);
74 }
75
76 if (auto Op = dyn_cast<Operator>(V)) {
77 if (Op->getOpcode() == Instruction::BitCast)
78 return containsValue(DL, GlobalLayout, Op->getOperand(0), COffset);
79
80 if (Op->getOpcode() == Instruction::Select)
81 return containsValue(DL, GlobalLayout, Op->getOperand(1), COffset) &&
82 containsValue(DL, GlobalLayout, Op->getOperand(2), COffset);
83 }
84
85 return false;
86}
87
88BitSetInfo BitSetBuilder::build() {
89 if (Min > Max)
90 Min = 0;
91
92 // Normalize each offset against the minimum observed offset, and compute
93 // the bitwise OR of each of the offsets. The number of trailing zeros
94 // in the mask gives us the log2 of the alignment of all offsets, which
95 // allows us to compress the bitset by only storing one bit per aligned
96 // address.
97 uint64_t Mask = 0;
98 for (uint64_t &Offset : Offsets) {
99 Offset -= Min;
100 Mask |= Offset;
101 }
102
103 BitSetInfo BSI;
104 BSI.ByteOffset = Min;
105
106 BSI.AlignLog2 = 0;
Peter Collingbournee6909c82015-02-20 20:30:47 +0000107 if (Mask != 0)
108 BSI.AlignLog2 = countTrailingZeros(Mask, ZB_Undefined);
109
110 // Build the compressed bitset while normalizing the offsets against the
111 // computed alignment.
112 BSI.BitSize = ((Max - Min) >> BSI.AlignLog2) + 1;
Peter Collingbournee6909c82015-02-20 20:30:47 +0000113 for (uint64_t Offset : Offsets) {
114 Offset >>= BSI.AlignLog2;
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000115 BSI.Bits.insert(Offset);
Peter Collingbournee6909c82015-02-20 20:30:47 +0000116 }
117
118 return BSI;
119}
120
Peter Collingbourne1baeaa32015-02-24 23:17:02 +0000121void GlobalLayoutBuilder::addFragment(const std::set<uint64_t> &F) {
122 // Create a new fragment to hold the layout for F.
123 Fragments.emplace_back();
124 std::vector<uint64_t> &Fragment = Fragments.back();
125 uint64_t FragmentIndex = Fragments.size() - 1;
126
127 for (auto ObjIndex : F) {
128 uint64_t OldFragmentIndex = FragmentMap[ObjIndex];
129 if (OldFragmentIndex == 0) {
130 // We haven't seen this object index before, so just add it to the current
131 // fragment.
132 Fragment.push_back(ObjIndex);
133 } else {
134 // This index belongs to an existing fragment. Copy the elements of the
135 // old fragment into this one and clear the old fragment. We don't update
136 // the fragment map just yet, this ensures that any further references to
137 // indices from the old fragment in this fragment do not insert any more
138 // indices.
139 std::vector<uint64_t> &OldFragment = Fragments[OldFragmentIndex];
140 Fragment.insert(Fragment.end(), OldFragment.begin(), OldFragment.end());
141 OldFragment.clear();
142 }
143 }
144
145 // Update the fragment map to point our object indices to this fragment.
146 for (uint64_t ObjIndex : Fragment)
147 FragmentMap[ObjIndex] = FragmentIndex;
148}
149
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000150void ByteArrayBuilder::allocate(const std::set<uint64_t> &Bits,
151 uint64_t BitSize, uint64_t &AllocByteOffset,
152 uint8_t &AllocMask) {
153 // Find the smallest current allocation.
154 unsigned Bit = 0;
155 for (unsigned I = 1; I != BitsPerByte; ++I)
156 if (BitAllocs[I] < BitAllocs[Bit])
157 Bit = I;
158
159 AllocByteOffset = BitAllocs[Bit];
160
161 // Add our size to it.
162 unsigned ReqSize = AllocByteOffset + BitSize;
163 BitAllocs[Bit] = ReqSize;
164 if (Bytes.size() < ReqSize)
165 Bytes.resize(ReqSize);
166
167 // Set our bits.
168 AllocMask = 1 << Bit;
169 for (uint64_t B : Bits)
170 Bytes[AllocByteOffset + B] |= AllocMask;
171}
172
Peter Collingbournee6909c82015-02-20 20:30:47 +0000173namespace {
174
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000175struct ByteArrayInfo {
176 std::set<uint64_t> Bits;
177 uint64_t BitSize;
178 GlobalVariable *ByteArray;
179 Constant *Mask;
180};
181
Peter Collingbournee6909c82015-02-20 20:30:47 +0000182struct LowerBitSets : public ModulePass {
183 static char ID;
184 LowerBitSets() : ModulePass(ID) {
185 initializeLowerBitSetsPass(*PassRegistry::getPassRegistry());
186 }
187
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000188 Module *M;
189
Peter Collingbournec9f277f2015-03-14 00:00:49 +0000190 bool LinkerSubsectionsViaSymbols;
Peter Collingbournee6909c82015-02-20 20:30:47 +0000191 IntegerType *Int1Ty;
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000192 IntegerType *Int8Ty;
Peter Collingbournee6909c82015-02-20 20:30:47 +0000193 IntegerType *Int32Ty;
194 Type *Int32PtrTy;
195 IntegerType *Int64Ty;
196 Type *IntPtrTy;
197
198 // The llvm.bitsets named metadata.
199 NamedMDNode *BitSetNM;
200
201 // Mapping from bitset mdstrings to the call sites that test them.
202 DenseMap<MDString *, std::vector<CallInst *>> BitSetTestCallSites;
203
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000204 std::vector<ByteArrayInfo> ByteArrayInfos;
205
Peter Collingbournee6909c82015-02-20 20:30:47 +0000206 BitSetInfo
207 buildBitSet(MDString *BitSet,
208 const DenseMap<GlobalVariable *, uint64_t> &GlobalLayout);
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000209 ByteArrayInfo *createByteArray(BitSetInfo &BSI);
210 void allocateByteArrays();
211 Value *createBitSetTest(IRBuilder<> &B, BitSetInfo &BSI, ByteArrayInfo *&BAI,
212 Value *BitOffset);
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000213 Value *
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000214 lowerBitSetCall(CallInst *CI, BitSetInfo &BSI, ByteArrayInfo *&BAI,
215 GlobalVariable *CombinedGlobal,
Peter Collingbournee6909c82015-02-20 20:30:47 +0000216 const DenseMap<GlobalVariable *, uint64_t> &GlobalLayout);
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000217 void buildBitSetsFromGlobals(const std::vector<MDString *> &BitSets,
Peter Collingbournee6909c82015-02-20 20:30:47 +0000218 const std::vector<GlobalVariable *> &Globals);
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000219 bool buildBitSets();
220 bool eraseBitSetMetadata();
Peter Collingbournee6909c82015-02-20 20:30:47 +0000221
222 bool doInitialization(Module &M) override;
223 bool runOnModule(Module &M) override;
224};
225
226} // namespace
227
228INITIALIZE_PASS_BEGIN(LowerBitSets, "lowerbitsets",
229 "Lower bitset metadata", false, false)
230INITIALIZE_PASS_END(LowerBitSets, "lowerbitsets",
231 "Lower bitset metadata", false, false)
232char LowerBitSets::ID = 0;
233
234ModulePass *llvm::createLowerBitSetsPass() { return new LowerBitSets; }
235
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000236bool LowerBitSets::doInitialization(Module &Mod) {
237 M = &Mod;
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000238 const DataLayout &DL = Mod.getDataLayout();
Peter Collingbournee6909c82015-02-20 20:30:47 +0000239
Peter Collingbournec9f277f2015-03-14 00:00:49 +0000240 Triple TargetTriple(M->getTargetTriple());
241 LinkerSubsectionsViaSymbols = TargetTriple.isMacOSX();
242
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000243 Int1Ty = Type::getInt1Ty(M->getContext());
244 Int8Ty = Type::getInt8Ty(M->getContext());
245 Int32Ty = Type::getInt32Ty(M->getContext());
Peter Collingbournee6909c82015-02-20 20:30:47 +0000246 Int32PtrTy = PointerType::getUnqual(Int32Ty);
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000247 Int64Ty = Type::getInt64Ty(M->getContext());
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000248 IntPtrTy = DL.getIntPtrType(M->getContext(), 0);
Peter Collingbournee6909c82015-02-20 20:30:47 +0000249
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000250 BitSetNM = M->getNamedMetadata("llvm.bitsets");
Peter Collingbournee6909c82015-02-20 20:30:47 +0000251
252 BitSetTestCallSites.clear();
253
254 return false;
255}
256
NAKAMURA Takumi6c246842015-02-22 09:51:42 +0000257/// Build a bit set for BitSet using the object layouts in
258/// GlobalLayout.
Peter Collingbournee6909c82015-02-20 20:30:47 +0000259BitSetInfo LowerBitSets::buildBitSet(
260 MDString *BitSet,
261 const DenseMap<GlobalVariable *, uint64_t> &GlobalLayout) {
262 BitSetBuilder BSB;
263
264 // Compute the byte offset of each element of this bitset.
265 if (BitSetNM) {
266 for (MDNode *Op : BitSetNM->operands()) {
267 if (Op->getOperand(0) != BitSet || !Op->getOperand(1))
268 continue;
269 auto OpGlobal = cast<GlobalVariable>(
270 cast<ConstantAsMetadata>(Op->getOperand(1))->getValue());
271 uint64_t Offset =
272 cast<ConstantInt>(cast<ConstantAsMetadata>(Op->getOperand(2))
273 ->getValue())->getZExtValue();
274
275 Offset += GlobalLayout.find(OpGlobal)->second;
276
277 BSB.addOffset(Offset);
278 }
279 }
280
281 return BSB.build();
282}
283
NAKAMURA Takumi6c246842015-02-22 09:51:42 +0000284/// Build a test that bit BitOffset mod sizeof(Bits)*8 is set in
285/// Bits. This pattern matches to the bt instruction on x86.
Peter Collingbournee6909c82015-02-20 20:30:47 +0000286static Value *createMaskedBitTest(IRBuilder<> &B, Value *Bits,
287 Value *BitOffset) {
288 auto BitsType = cast<IntegerType>(Bits->getType());
289 unsigned BitWidth = BitsType->getBitWidth();
290
291 BitOffset = B.CreateZExtOrTrunc(BitOffset, BitsType);
292 Value *BitIndex =
293 B.CreateAnd(BitOffset, ConstantInt::get(BitsType, BitWidth - 1));
294 Value *BitMask = B.CreateShl(ConstantInt::get(BitsType, 1), BitIndex);
295 Value *MaskedBits = B.CreateAnd(Bits, BitMask);
296 return B.CreateICmpNE(MaskedBits, ConstantInt::get(BitsType, 0));
297}
298
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000299ByteArrayInfo *LowerBitSets::createByteArray(BitSetInfo &BSI) {
300 // Create globals to stand in for byte arrays and masks. These never actually
301 // get initialized, we RAUW and erase them later in allocateByteArrays() once
302 // we know the offset and mask to use.
303 auto ByteArrayGlobal = new GlobalVariable(
304 *M, Int8Ty, /*isConstant=*/true, GlobalValue::PrivateLinkage, nullptr);
305 auto MaskGlobal = new GlobalVariable(
306 *M, Int8Ty, /*isConstant=*/true, GlobalValue::PrivateLinkage, nullptr);
307
308 ByteArrayInfos.emplace_back();
309 ByteArrayInfo *BAI = &ByteArrayInfos.back();
310
311 BAI->Bits = BSI.Bits;
312 BAI->BitSize = BSI.BitSize;
313 BAI->ByteArray = ByteArrayGlobal;
314 BAI->Mask = ConstantExpr::getPtrToInt(MaskGlobal, Int8Ty);
315 return BAI;
316}
317
318void LowerBitSets::allocateByteArrays() {
319 std::stable_sort(ByteArrayInfos.begin(), ByteArrayInfos.end(),
320 [](const ByteArrayInfo &BAI1, const ByteArrayInfo &BAI2) {
321 return BAI1.BitSize > BAI2.BitSize;
322 });
323
324 std::vector<uint64_t> ByteArrayOffsets(ByteArrayInfos.size());
325
326 ByteArrayBuilder BAB;
327 for (unsigned I = 0; I != ByteArrayInfos.size(); ++I) {
328 ByteArrayInfo *BAI = &ByteArrayInfos[I];
329
330 uint8_t Mask;
331 BAB.allocate(BAI->Bits, BAI->BitSize, ByteArrayOffsets[I], Mask);
332
333 BAI->Mask->replaceAllUsesWith(ConstantInt::get(Int8Ty, Mask));
334 cast<GlobalVariable>(BAI->Mask->getOperand(0))->eraseFromParent();
335 }
336
337 Constant *ByteArrayConst = ConstantDataArray::get(M->getContext(), BAB.Bytes);
338 auto ByteArray =
339 new GlobalVariable(*M, ByteArrayConst->getType(), /*isConstant=*/true,
340 GlobalValue::PrivateLinkage, ByteArrayConst);
341
342 for (unsigned I = 0; I != ByteArrayInfos.size(); ++I) {
343 ByteArrayInfo *BAI = &ByteArrayInfos[I];
344
345 Constant *Idxs[] = {ConstantInt::get(IntPtrTy, 0),
346 ConstantInt::get(IntPtrTy, ByteArrayOffsets[I])};
347 Constant *GEP = ConstantExpr::getInBoundsGetElementPtr(ByteArray, Idxs);
348
349 // Create an alias instead of RAUW'ing the gep directly. On x86 this ensures
350 // that the pc-relative displacement is folded into the lea instead of the
351 // test instruction getting another displacement.
352 GlobalAlias *Alias = GlobalAlias::create(
353 Int8Ty, 0, GlobalValue::PrivateLinkage, "bits", GEP, M);
354 BAI->ByteArray->replaceAllUsesWith(Alias);
355 BAI->ByteArray->eraseFromParent();
356 }
357
358 ByteArraySizeBits = BAB.BitAllocs[0] + BAB.BitAllocs[1] + BAB.BitAllocs[2] +
359 BAB.BitAllocs[3] + BAB.BitAllocs[4] + BAB.BitAllocs[5] +
360 BAB.BitAllocs[6] + BAB.BitAllocs[7];
361 ByteArraySizeBytes = BAB.Bytes.size();
362}
363
NAKAMURA Takumi6c246842015-02-22 09:51:42 +0000364/// Build a test that bit BitOffset is set in BSI, where
365/// BitSetGlobal is a global containing the bits in BSI.
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000366Value *LowerBitSets::createBitSetTest(IRBuilder<> &B, BitSetInfo &BSI,
367 ByteArrayInfo *&BAI, Value *BitOffset) {
368 if (BSI.BitSize <= 64) {
Peter Collingbournee6909c82015-02-20 20:30:47 +0000369 // If the bit set is sufficiently small, we can avoid a load by bit testing
370 // a constant.
371 IntegerType *BitsTy;
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000372 if (BSI.BitSize <= 32)
Peter Collingbournee6909c82015-02-20 20:30:47 +0000373 BitsTy = Int32Ty;
374 else
375 BitsTy = Int64Ty;
376
377 uint64_t Bits = 0;
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000378 for (auto Bit : BSI.Bits)
379 Bits |= uint64_t(1) << Bit;
Peter Collingbournee6909c82015-02-20 20:30:47 +0000380 Constant *BitsConst = ConstantInt::get(BitsTy, Bits);
381 return createMaskedBitTest(B, BitsConst, BitOffset);
382 } else {
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000383 if (!BAI) {
384 ++NumByteArraysCreated;
385 BAI = createByteArray(BSI);
386 }
Peter Collingbournee6909c82015-02-20 20:30:47 +0000387
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000388 Value *ByteAddr = B.CreateGEP(BAI->ByteArray, BitOffset);
389 Value *Byte = B.CreateLoad(ByteAddr);
390
391 Value *ByteAndMask = B.CreateAnd(Byte, BAI->Mask);
392 return B.CreateICmpNE(ByteAndMask, ConstantInt::get(Int8Ty, 0));
Peter Collingbournee6909c82015-02-20 20:30:47 +0000393 }
394}
395
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000396/// Lower a llvm.bitset.test call to its implementation. Returns the value to
397/// replace the call with.
398Value *LowerBitSets::lowerBitSetCall(
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000399 CallInst *CI, BitSetInfo &BSI, ByteArrayInfo *&BAI,
Peter Collingbournee6909c82015-02-20 20:30:47 +0000400 GlobalVariable *CombinedGlobal,
401 const DenseMap<GlobalVariable *, uint64_t> &GlobalLayout) {
402 Value *Ptr = CI->getArgOperand(0);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000403 const DataLayout &DL = M->getDataLayout();
Peter Collingbournee6909c82015-02-20 20:30:47 +0000404
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000405 if (BSI.containsValue(DL, GlobalLayout, Ptr))
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000406 return ConstantInt::getTrue(CombinedGlobal->getParent()->getContext());
Peter Collingbournee6909c82015-02-20 20:30:47 +0000407
408 Constant *GlobalAsInt = ConstantExpr::getPtrToInt(CombinedGlobal, IntPtrTy);
409 Constant *OffsetedGlobalAsInt = ConstantExpr::getAdd(
410 GlobalAsInt, ConstantInt::get(IntPtrTy, BSI.ByteOffset));
411
412 BasicBlock *InitialBB = CI->getParent();
413
414 IRBuilder<> B(CI);
415
416 Value *PtrAsInt = B.CreatePtrToInt(Ptr, IntPtrTy);
417
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000418 if (BSI.isSingleOffset())
419 return B.CreateICmpEQ(PtrAsInt, OffsetedGlobalAsInt);
Peter Collingbournee6909c82015-02-20 20:30:47 +0000420
421 Value *PtrOffset = B.CreateSub(PtrAsInt, OffsetedGlobalAsInt);
422
423 Value *BitOffset;
424 if (BSI.AlignLog2 == 0) {
425 BitOffset = PtrOffset;
426 } else {
427 // We need to check that the offset both falls within our range and is
428 // suitably aligned. We can check both properties at the same time by
429 // performing a right rotate by log2(alignment) followed by an integer
430 // comparison against the bitset size. The rotate will move the lower
431 // order bits that need to be zero into the higher order bits of the
432 // result, causing the comparison to fail if they are nonzero. The rotate
433 // also conveniently gives us a bit offset to use during the load from
434 // the bitset.
435 Value *OffsetSHR =
436 B.CreateLShr(PtrOffset, ConstantInt::get(IntPtrTy, BSI.AlignLog2));
437 Value *OffsetSHL = B.CreateShl(
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000438 PtrOffset,
439 ConstantInt::get(IntPtrTy, DL.getPointerSizeInBits(0) - BSI.AlignLog2));
Peter Collingbournee6909c82015-02-20 20:30:47 +0000440 BitOffset = B.CreateOr(OffsetSHR, OffsetSHL);
441 }
442
443 Constant *BitSizeConst = ConstantInt::get(IntPtrTy, BSI.BitSize);
444 Value *OffsetInRange = B.CreateICmpULT(BitOffset, BitSizeConst);
445
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000446 // If the bit set is all ones, testing against it is unnecessary.
447 if (BSI.isAllOnes())
448 return OffsetInRange;
449
Peter Collingbournee6909c82015-02-20 20:30:47 +0000450 TerminatorInst *Term = SplitBlockAndInsertIfThen(OffsetInRange, CI, false);
451 IRBuilder<> ThenB(Term);
452
453 // Now that we know that the offset is in range and aligned, load the
454 // appropriate bit from the bitset.
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000455 Value *Bit = createBitSetTest(ThenB, BSI, BAI, BitOffset);
Peter Collingbournee6909c82015-02-20 20:30:47 +0000456
457 // The value we want is 0 if we came directly from the initial block
458 // (having failed the range or alignment checks), or the loaded bit if
459 // we came from the block in which we loaded it.
460 B.SetInsertPoint(CI);
461 PHINode *P = B.CreatePHI(Int1Ty, 2);
462 P->addIncoming(ConstantInt::get(Int1Ty, 0), InitialBB);
463 P->addIncoming(Bit, ThenB.GetInsertBlock());
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000464 return P;
Peter Collingbournee6909c82015-02-20 20:30:47 +0000465}
466
467/// Given a disjoint set of bitsets and globals, layout the globals, build the
468/// bit sets and lower the llvm.bitset.test calls.
469void LowerBitSets::buildBitSetsFromGlobals(
Peter Collingbournee6909c82015-02-20 20:30:47 +0000470 const std::vector<MDString *> &BitSets,
471 const std::vector<GlobalVariable *> &Globals) {
472 // Build a new global with the combined contents of the referenced globals.
473 std::vector<Constant *> GlobalInits;
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000474 const DataLayout &DL = M->getDataLayout();
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000475 for (GlobalVariable *G : Globals) {
Peter Collingbournee6909c82015-02-20 20:30:47 +0000476 GlobalInits.push_back(G->getInitializer());
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000477 uint64_t InitSize = DL.getTypeAllocSize(G->getInitializer()->getType());
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000478
479 // Compute the amount of padding required to align the next element to the
480 // next power of 2.
481 uint64_t Padding = NextPowerOf2(InitSize - 1) - InitSize;
482
483 // Cap at 128 was found experimentally to have a good data/instruction
484 // overhead tradeoff.
485 if (Padding > 128)
486 Padding = RoundUpToAlignment(InitSize, 128) - InitSize;
487
488 GlobalInits.push_back(
489 ConstantAggregateZero::get(ArrayType::get(Int8Ty, Padding)));
490 }
491 if (!GlobalInits.empty())
492 GlobalInits.pop_back();
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000493 Constant *NewInit = ConstantStruct::getAnon(M->getContext(), GlobalInits);
Peter Collingbournee6909c82015-02-20 20:30:47 +0000494 auto CombinedGlobal =
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000495 new GlobalVariable(*M, NewInit->getType(), /*isConstant=*/true,
Peter Collingbournee6909c82015-02-20 20:30:47 +0000496 GlobalValue::PrivateLinkage, NewInit);
497
498 const StructLayout *CombinedGlobalLayout =
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000499 DL.getStructLayout(cast<StructType>(NewInit->getType()));
Peter Collingbournee6909c82015-02-20 20:30:47 +0000500
501 // Compute the offsets of the original globals within the new global.
502 DenseMap<GlobalVariable *, uint64_t> GlobalLayout;
503 for (unsigned I = 0; I != Globals.size(); ++I)
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000504 // Multiply by 2 to account for padding elements.
505 GlobalLayout[Globals[I]] = CombinedGlobalLayout->getElementOffset(I * 2);
Peter Collingbournee6909c82015-02-20 20:30:47 +0000506
507 // For each bitset in this disjoint set...
508 for (MDString *BS : BitSets) {
509 // Build the bitset.
510 BitSetInfo BSI = buildBitSet(BS, GlobalLayout);
511
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000512 ByteArrayInfo *BAI = 0;
Peter Collingbournee6909c82015-02-20 20:30:47 +0000513
514 // Lower each call to llvm.bitset.test for this bitset.
515 for (CallInst *CI : BitSetTestCallSites[BS]) {
516 ++NumBitSetCallsLowered;
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000517 Value *Lowered = lowerBitSetCall(CI, BSI, BAI, CombinedGlobal, GlobalLayout);
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000518 CI->replaceAllUsesWith(Lowered);
519 CI->eraseFromParent();
Peter Collingbournee6909c82015-02-20 20:30:47 +0000520 }
521 }
522
523 // Build aliases pointing to offsets into the combined global for each
524 // global from which we built the combined global, and replace references
525 // to the original globals with references to the aliases.
526 for (unsigned I = 0; I != Globals.size(); ++I) {
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000527 // Multiply by 2 to account for padding elements.
Peter Collingbournee6909c82015-02-20 20:30:47 +0000528 Constant *CombinedGlobalIdxs[] = {ConstantInt::get(Int32Ty, 0),
Peter Collingbourneeba7f732015-02-25 20:42:41 +0000529 ConstantInt::get(Int32Ty, I * 2)};
Peter Collingbournee6909c82015-02-20 20:30:47 +0000530 Constant *CombinedGlobalElemPtr =
531 ConstantExpr::getGetElementPtr(CombinedGlobal, CombinedGlobalIdxs);
Peter Collingbournec9f277f2015-03-14 00:00:49 +0000532 GlobalValue::LinkageTypes GAliasLinkage = LinkerSubsectionsViaSymbols
533 ? GlobalValue::PrivateLinkage
534 : Globals[I]->getLinkage();
Peter Collingbournee6909c82015-02-20 20:30:47 +0000535 GlobalAlias *GAlias = GlobalAlias::create(
536 Globals[I]->getType()->getElementType(),
Peter Collingbournec9f277f2015-03-14 00:00:49 +0000537 Globals[I]->getType()->getAddressSpace(), GAliasLinkage,
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000538 "", CombinedGlobalElemPtr, M);
Peter Collingbournee6909c82015-02-20 20:30:47 +0000539 GAlias->takeName(Globals[I]);
540 Globals[I]->replaceAllUsesWith(GAlias);
541 Globals[I]->eraseFromParent();
542 }
543}
544
545/// Lower all bit sets in this module.
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000546bool LowerBitSets::buildBitSets() {
Peter Collingbournee6909c82015-02-20 20:30:47 +0000547 Function *BitSetTestFunc =
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000548 M->getFunction(Intrinsic::getName(Intrinsic::bitset_test));
Peter Collingbournee6909c82015-02-20 20:30:47 +0000549 if (!BitSetTestFunc)
550 return false;
551
552 // Equivalence class set containing bitsets and the globals they reference.
553 // This is used to partition the set of bitsets in the module into disjoint
554 // sets.
555 typedef EquivalenceClasses<PointerUnion<GlobalVariable *, MDString *>>
556 GlobalClassesTy;
557 GlobalClassesTy GlobalClasses;
558
559 for (const Use &U : BitSetTestFunc->uses()) {
560 auto CI = cast<CallInst>(U.getUser());
561
562 auto BitSetMDVal = dyn_cast<MetadataAsValue>(CI->getArgOperand(1));
563 if (!BitSetMDVal || !isa<MDString>(BitSetMDVal->getMetadata()))
564 report_fatal_error(
565 "Second argument of llvm.bitset.test must be metadata string");
566 auto BitSet = cast<MDString>(BitSetMDVal->getMetadata());
567
568 // Add the call site to the list of call sites for this bit set. We also use
569 // BitSetTestCallSites to keep track of whether we have seen this bit set
570 // before. If we have, we don't need to re-add the referenced globals to the
571 // equivalence class.
572 std::pair<DenseMap<MDString *, std::vector<CallInst *>>::iterator,
573 bool> Ins =
574 BitSetTestCallSites.insert(
575 std::make_pair(BitSet, std::vector<CallInst *>()));
576 Ins.first->second.push_back(CI);
577 if (!Ins.second)
578 continue;
579
580 // Add the bitset to the equivalence class.
581 GlobalClassesTy::iterator GCI = GlobalClasses.insert(BitSet);
582 GlobalClassesTy::member_iterator CurSet = GlobalClasses.findLeader(GCI);
583
584 if (!BitSetNM)
585 continue;
586
587 // Verify the bitset metadata and add the referenced globals to the bitset's
588 // equivalence class.
589 for (MDNode *Op : BitSetNM->operands()) {
590 if (Op->getNumOperands() != 3)
591 report_fatal_error(
592 "All operands of llvm.bitsets metadata must have 3 elements");
593
594 if (Op->getOperand(0) != BitSet || !Op->getOperand(1))
595 continue;
596
597 auto OpConstMD = dyn_cast<ConstantAsMetadata>(Op->getOperand(1));
598 if (!OpConstMD)
599 report_fatal_error("Bit set element must be a constant");
600 auto OpGlobal = dyn_cast<GlobalVariable>(OpConstMD->getValue());
601 if (!OpGlobal)
602 report_fatal_error("Bit set element must refer to global");
603
604 auto OffsetConstMD = dyn_cast<ConstantAsMetadata>(Op->getOperand(2));
605 if (!OffsetConstMD)
606 report_fatal_error("Bit set element offset must be a constant");
607 auto OffsetInt = dyn_cast<ConstantInt>(OffsetConstMD->getValue());
608 if (!OffsetInt)
609 report_fatal_error(
610 "Bit set element offset must be an integer constant");
611
612 CurSet = GlobalClasses.unionSets(
613 CurSet, GlobalClasses.findLeader(GlobalClasses.insert(OpGlobal)));
614 }
615 }
616
617 if (GlobalClasses.empty())
618 return false;
619
620 // For each disjoint set we found...
621 for (GlobalClassesTy::iterator I = GlobalClasses.begin(),
622 E = GlobalClasses.end();
623 I != E; ++I) {
624 if (!I->isLeader()) continue;
625
626 ++NumBitSetDisjointSets;
627
628 // Build the list of bitsets and referenced globals in this disjoint set.
629 std::vector<MDString *> BitSets;
630 std::vector<GlobalVariable *> Globals;
Peter Collingbourne1baeaa32015-02-24 23:17:02 +0000631 llvm::DenseMap<MDString *, uint64_t> BitSetIndices;
632 llvm::DenseMap<GlobalVariable *, uint64_t> GlobalIndices;
Peter Collingbournee6909c82015-02-20 20:30:47 +0000633 for (GlobalClassesTy::member_iterator MI = GlobalClasses.member_begin(I);
634 MI != GlobalClasses.member_end(); ++MI) {
Peter Collingbourne1baeaa32015-02-24 23:17:02 +0000635 if ((*MI).is<MDString *>()) {
636 BitSetIndices[MI->get<MDString *>()] = BitSets.size();
Peter Collingbournee6909c82015-02-20 20:30:47 +0000637 BitSets.push_back(MI->get<MDString *>());
Peter Collingbourne1baeaa32015-02-24 23:17:02 +0000638 } else {
639 GlobalIndices[MI->get<GlobalVariable *>()] = Globals.size();
Peter Collingbournee6909c82015-02-20 20:30:47 +0000640 Globals.push_back(MI->get<GlobalVariable *>());
Peter Collingbourne1baeaa32015-02-24 23:17:02 +0000641 }
Peter Collingbournee6909c82015-02-20 20:30:47 +0000642 }
643
Peter Collingbourne1baeaa32015-02-24 23:17:02 +0000644 // For each bitset, build a set of indices that refer to globals referenced
645 // by the bitset.
646 std::vector<std::set<uint64_t>> BitSetMembers(BitSets.size());
647 if (BitSetNM) {
648 for (MDNode *Op : BitSetNM->operands()) {
649 // Op = { bitset name, global, offset }
650 if (!Op->getOperand(1))
651 continue;
652 auto I = BitSetIndices.find(cast<MDString>(Op->getOperand(0)));
653 if (I == BitSetIndices.end())
654 continue;
655
656 auto OpGlobal = cast<GlobalVariable>(
657 cast<ConstantAsMetadata>(Op->getOperand(1))->getValue());
658 BitSetMembers[I->second].insert(GlobalIndices[OpGlobal]);
659 }
660 }
661
662 // Order the sets of indices by size. The GlobalLayoutBuilder works best
663 // when given small index sets first.
664 std::stable_sort(
665 BitSetMembers.begin(), BitSetMembers.end(),
666 [](const std::set<uint64_t> &O1, const std::set<uint64_t> &O2) {
667 return O1.size() < O2.size();
668 });
669
670 // Create a GlobalLayoutBuilder and provide it with index sets as layout
671 // fragments. The GlobalLayoutBuilder tries to lay out members of fragments
672 // as close together as possible.
673 GlobalLayoutBuilder GLB(Globals.size());
674 for (auto &&MemSet : BitSetMembers)
675 GLB.addFragment(MemSet);
676
677 // Build a vector of globals with the computed layout.
678 std::vector<GlobalVariable *> OrderedGlobals(Globals.size());
679 auto OGI = OrderedGlobals.begin();
680 for (auto &&F : GLB.Fragments)
681 for (auto &&Offset : F)
682 *OGI++ = Globals[Offset];
683
684 // Order bitsets by name for determinism.
Peter Collingbournee6909c82015-02-20 20:30:47 +0000685 std::sort(BitSets.begin(), BitSets.end(), [](MDString *S1, MDString *S2) {
686 return S1->getString() < S2->getString();
687 });
Peter Collingbournee6909c82015-02-20 20:30:47 +0000688
689 // Build the bitsets from this disjoint set.
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000690 buildBitSetsFromGlobals(BitSets, OrderedGlobals);
Peter Collingbournee6909c82015-02-20 20:30:47 +0000691 }
692
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000693 allocateByteArrays();
694
Peter Collingbournee6909c82015-02-20 20:30:47 +0000695 return true;
696}
697
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000698bool LowerBitSets::eraseBitSetMetadata() {
Peter Collingbournee6909c82015-02-20 20:30:47 +0000699 if (!BitSetNM)
700 return false;
701
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000702 M->eraseNamedMetadata(BitSetNM);
Peter Collingbournee6909c82015-02-20 20:30:47 +0000703 return true;
704}
705
706bool LowerBitSets::runOnModule(Module &M) {
Peter Collingbourneda2dbf22015-03-03 00:49:28 +0000707 bool Changed = buildBitSets();
708 Changed |= eraseBitSetMetadata();
Peter Collingbournee6909c82015-02-20 20:30:47 +0000709 return Changed;
710}