blob: e7c161dd029825f33517266991f46d3ab41fc676 [file] [log] [blame]
Peter Collingbournedf49d1b2016-02-09 22:50:34 +00001//===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass implements whole program optimization of virtual calls in cases
11// where we know (via bitset information) that the list of callee is fixed. This
12// includes the following:
13// - Single implementation devirtualization: if a virtual call has a single
14// possible callee, replace all calls with a direct call to that callee.
15// - Virtual constant propagation: if the virtual function's return type is an
16// integer <=64 bits and all possible callees are readnone, for each class and
17// each list of constant arguments: evaluate the function, store the return
18// value alongside the virtual table, and rewrite each virtual call as a load
19// from the virtual table.
20// - Uniform return value optimization: if the conditions for virtual constant
21// propagation hold and each function returns the same constant value, replace
22// each virtual call with that constant.
23// - Unique return value optimization for i1 return values: if the conditions
24// for virtual constant propagation hold and a single vtable's function
25// returns 0, or a single vtable's function returns 1, replace each virtual
26// call with a comparison of the vptr against that vtable's address.
27//
28//===----------------------------------------------------------------------===//
29
30#include "llvm/Transforms/IPO/WholeProgramDevirt.h"
Mehdi Aminib550cb12016-04-18 09:17:29 +000031#include "llvm/ADT/ArrayRef.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000032#include "llvm/ADT/DenseSet.h"
33#include "llvm/ADT/MapVector.h"
34#include "llvm/IR/CallSite.h"
35#include "llvm/IR/Constants.h"
36#include "llvm/IR/DataLayout.h"
37#include "llvm/IR/IRBuilder.h"
38#include "llvm/IR/Instructions.h"
39#include "llvm/IR/Intrinsics.h"
40#include "llvm/IR/Module.h"
41#include "llvm/Pass.h"
42#include "llvm/Support/raw_ostream.h"
Mehdi Aminib550cb12016-04-18 09:17:29 +000043#include "llvm/Transforms/IPO.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000044#include "llvm/Transforms/Utils/Evaluator.h"
45#include "llvm/Transforms/Utils/Local.h"
46
47#include <set>
48
49using namespace llvm;
50using namespace wholeprogramdevirt;
51
52#define DEBUG_TYPE "wholeprogramdevirt"
53
54// Find the minimum offset that we may store a value of size Size bits at. If
55// IsAfter is set, look for an offset before the object, otherwise look for an
56// offset after the object.
57uint64_t
58wholeprogramdevirt::findLowestOffset(ArrayRef<VirtualCallTarget> Targets,
59 bool IsAfter, uint64_t Size) {
60 // Find a minimum offset taking into account only vtable sizes.
61 uint64_t MinByte = 0;
62 for (const VirtualCallTarget &Target : Targets) {
63 if (IsAfter)
64 MinByte = std::max(MinByte, Target.minAfterBytes());
65 else
66 MinByte = std::max(MinByte, Target.minBeforeBytes());
67 }
68
69 // Build a vector of arrays of bytes covering, for each target, a slice of the
70 // used region (see AccumBitVector::BytesUsed in
71 // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively,
72 // this aligns the used regions to start at MinByte.
73 //
74 // In this example, A, B and C are vtables, # is a byte already allocated for
75 // a virtual function pointer, AAAA... (etc.) are the used regions for the
76 // vtables and Offset(X) is the value computed for the Offset variable below
77 // for X.
78 //
79 // Offset(A)
80 // | |
81 // |MinByte
82 // A: ################AAAAAAAA|AAAAAAAA
83 // B: ########BBBBBBBBBBBBBBBB|BBBB
84 // C: ########################|CCCCCCCCCCCCCCCC
85 // | Offset(B) |
86 //
87 // This code produces the slices of A, B and C that appear after the divider
88 // at MinByte.
89 std::vector<ArrayRef<uint8_t>> Used;
90 for (const VirtualCallTarget &Target : Targets) {
91 ArrayRef<uint8_t> VTUsed = IsAfter ? Target.BS->Bits->After.BytesUsed
92 : Target.BS->Bits->Before.BytesUsed;
93 uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes()
94 : MinByte - Target.minBeforeBytes();
95
96 // Disregard used regions that are smaller than Offset. These are
97 // effectively all-free regions that do not need to be checked.
98 if (VTUsed.size() > Offset)
99 Used.push_back(VTUsed.slice(Offset));
100 }
101
102 if (Size == 1) {
103 // Find a free bit in each member of Used.
104 for (unsigned I = 0;; ++I) {
105 uint8_t BitsUsed = 0;
106 for (auto &&B : Used)
107 if (I < B.size())
108 BitsUsed |= B[I];
109 if (BitsUsed != 0xff)
110 return (MinByte + I) * 8 +
111 countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined);
112 }
113 } else {
114 // Find a free (Size/8) byte region in each member of Used.
115 // FIXME: see if alignment helps.
116 for (unsigned I = 0;; ++I) {
117 for (auto &&B : Used) {
118 unsigned Byte = 0;
119 while ((I + Byte) < B.size() && Byte < (Size / 8)) {
120 if (B[I + Byte])
121 goto NextI;
122 ++Byte;
123 }
124 }
125 return (MinByte + I) * 8;
126 NextI:;
127 }
128 }
129}
130
131void wholeprogramdevirt::setBeforeReturnValues(
132 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore,
133 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
134 if (BitWidth == 1)
135 OffsetByte = -(AllocBefore / 8 + 1);
136 else
137 OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8);
138 OffsetBit = AllocBefore % 8;
139
140 for (VirtualCallTarget &Target : Targets) {
141 if (BitWidth == 1)
142 Target.setBeforeBit(AllocBefore);
143 else
144 Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8);
145 }
146}
147
148void wholeprogramdevirt::setAfterReturnValues(
149 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter,
150 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
151 if (BitWidth == 1)
152 OffsetByte = AllocAfter / 8;
153 else
154 OffsetByte = (AllocAfter + 7) / 8;
155 OffsetBit = AllocAfter % 8;
156
157 for (VirtualCallTarget &Target : Targets) {
158 if (BitWidth == 1)
159 Target.setAfterBit(AllocAfter);
160 else
161 Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8);
162 }
163}
164
165VirtualCallTarget::VirtualCallTarget(Function *Fn, const BitSetInfo *BS)
166 : Fn(Fn), BS(BS),
167 IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()) {}
168
169namespace {
170
171// A slot in a set of virtual tables. The BitSetID identifies the set of virtual
172// tables, and the ByteOffset is the offset in bytes from the address point to
173// the virtual function pointer.
174struct VTableSlot {
175 Metadata *BitSetID;
176 uint64_t ByteOffset;
177};
178
179}
180
Peter Collingbourne9b656522016-02-09 23:01:38 +0000181namespace llvm {
182
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000183template <> struct DenseMapInfo<VTableSlot> {
184 static VTableSlot getEmptyKey() {
185 return {DenseMapInfo<Metadata *>::getEmptyKey(),
186 DenseMapInfo<uint64_t>::getEmptyKey()};
187 }
188 static VTableSlot getTombstoneKey() {
189 return {DenseMapInfo<Metadata *>::getTombstoneKey(),
190 DenseMapInfo<uint64_t>::getTombstoneKey()};
191 }
192 static unsigned getHashValue(const VTableSlot &I) {
193 return DenseMapInfo<Metadata *>::getHashValue(I.BitSetID) ^
194 DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset);
195 }
196 static bool isEqual(const VTableSlot &LHS,
197 const VTableSlot &RHS) {
198 return LHS.BitSetID == RHS.BitSetID && LHS.ByteOffset == RHS.ByteOffset;
199 }
200};
201
Peter Collingbourne9b656522016-02-09 23:01:38 +0000202}
203
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000204namespace {
205
206// A virtual call site. VTable is the loaded virtual table pointer, and CS is
207// the indirect virtual call.
208struct VirtualCallSite {
209 Value *VTable;
210 CallSite CS;
211
212 void replaceAndErase(Value *New) {
213 CS->replaceAllUsesWith(New);
214 if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) {
215 BranchInst::Create(II->getNormalDest(), CS.getInstruction());
216 II->getUnwindDest()->removePredecessor(II->getParent());
217 }
218 CS->eraseFromParent();
219 }
220};
221
222struct DevirtModule {
223 Module &M;
224 IntegerType *Int8Ty;
225 PointerType *Int8PtrTy;
226 IntegerType *Int32Ty;
227
228 MapVector<VTableSlot, std::vector<VirtualCallSite>> CallSlots;
229
230 DevirtModule(Module &M)
231 : M(M), Int8Ty(Type::getInt8Ty(M.getContext())),
232 Int8PtrTy(Type::getInt8PtrTy(M.getContext())),
233 Int32Ty(Type::getInt32Ty(M.getContext())) {}
234 void findLoadCallsAtConstantOffset(Metadata *BitSet, Value *Ptr,
235 uint64_t Offset, Value *VTable);
236 void findCallsAtConstantOffset(Metadata *BitSet, Value *Ptr, uint64_t Offset,
237 Value *VTable);
238
239 void buildBitSets(std::vector<VTableBits> &Bits,
240 DenseMap<Metadata *, std::set<BitSetInfo>> &BitSets);
241 bool tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot,
242 const std::set<BitSetInfo> &BitSetInfos,
243 uint64_t ByteOffset);
244 bool trySingleImplDevirt(ArrayRef<VirtualCallTarget> TargetsForSlot,
245 MutableArrayRef<VirtualCallSite> CallSites);
246 bool tryEvaluateFunctionsWithArgs(
247 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
248 ArrayRef<ConstantInt *> Args);
249 bool tryUniformRetValOpt(IntegerType *RetType,
250 ArrayRef<VirtualCallTarget> TargetsForSlot,
251 MutableArrayRef<VirtualCallSite> CallSites);
252 bool tryUniqueRetValOpt(unsigned BitWidth,
253 ArrayRef<VirtualCallTarget> TargetsForSlot,
254 MutableArrayRef<VirtualCallSite> CallSites);
255 bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
256 ArrayRef<VirtualCallSite> CallSites);
257
258 void rebuildGlobal(VTableBits &B);
259
260 bool run();
261};
262
263struct WholeProgramDevirt : public ModulePass {
264 static char ID;
265 WholeProgramDevirt() : ModulePass(ID) {
266 initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry());
267 }
Andrew Kaylorf0f27922016-04-21 17:58:54 +0000268 bool runOnModule(Module &M) {
269 if (skipModule(M))
270 return false;
271
272 return DevirtModule(M).run();
273 }
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000274};
275
276} // anonymous namespace
277
278INITIALIZE_PASS(WholeProgramDevirt, "wholeprogramdevirt",
279 "Whole program devirtualization", false, false)
280char WholeProgramDevirt::ID = 0;
281
282ModulePass *llvm::createWholeProgramDevirtPass() {
283 return new WholeProgramDevirt;
284}
285
286// Search for virtual calls that call FPtr and add them to CallSlots.
287void DevirtModule::findCallsAtConstantOffset(Metadata *BitSet, Value *FPtr,
288 uint64_t Offset, Value *VTable) {
289 for (const Use &U : FPtr->uses()) {
290 Value *User = U.getUser();
291 if (isa<BitCastInst>(User)) {
292 findCallsAtConstantOffset(BitSet, User, Offset, VTable);
293 } else if (auto CI = dyn_cast<CallInst>(User)) {
294 CallSlots[{BitSet, Offset}].push_back({VTable, CI});
295 } else if (auto II = dyn_cast<InvokeInst>(User)) {
296 CallSlots[{BitSet, Offset}].push_back({VTable, II});
297 }
298 }
299}
300
301// Search for virtual calls that load from VPtr and add them to CallSlots.
302void DevirtModule::findLoadCallsAtConstantOffset(Metadata *BitSet, Value *VPtr,
303 uint64_t Offset,
304 Value *VTable) {
305 for (const Use &U : VPtr->uses()) {
306 Value *User = U.getUser();
307 if (isa<BitCastInst>(User)) {
308 findLoadCallsAtConstantOffset(BitSet, User, Offset, VTable);
309 } else if (isa<LoadInst>(User)) {
310 findCallsAtConstantOffset(BitSet, User, Offset, VTable);
311 } else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) {
312 // Take into account the GEP offset.
313 if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) {
314 SmallVector<Value *, 8> Indices(GEP->op_begin() + 1, GEP->op_end());
315 uint64_t GEPOffset = M.getDataLayout().getIndexedOffsetInType(
316 GEP->getSourceElementType(), Indices);
317 findLoadCallsAtConstantOffset(BitSet, User, Offset + GEPOffset, VTable);
318 }
319 }
320 }
321}
322
323void DevirtModule::buildBitSets(
324 std::vector<VTableBits> &Bits,
325 DenseMap<Metadata *, std::set<BitSetInfo>> &BitSets) {
326 NamedMDNode *BitSetNM = M.getNamedMetadata("llvm.bitsets");
327 if (!BitSetNM)
328 return;
329
330 DenseMap<GlobalVariable *, VTableBits *> GVToBits;
331 Bits.reserve(BitSetNM->getNumOperands());
332 for (auto Op : BitSetNM->operands()) {
333 auto OpConstMD = dyn_cast_or_null<ConstantAsMetadata>(Op->getOperand(1));
334 if (!OpConstMD)
335 continue;
336 auto BitSetID = Op->getOperand(0).get();
337
338 Constant *OpConst = OpConstMD->getValue();
339 if (auto GA = dyn_cast<GlobalAlias>(OpConst))
340 OpConst = GA->getAliasee();
341 auto OpGlobal = dyn_cast<GlobalVariable>(OpConst);
342 if (!OpGlobal)
343 continue;
344
345 uint64_t Offset =
346 cast<ConstantInt>(
347 cast<ConstantAsMetadata>(Op->getOperand(2))->getValue())
348 ->getZExtValue();
349
350 VTableBits *&BitsPtr = GVToBits[OpGlobal];
351 if (!BitsPtr) {
352 Bits.emplace_back();
353 Bits.back().GV = OpGlobal;
354 Bits.back().ObjectSize = M.getDataLayout().getTypeAllocSize(
355 OpGlobal->getInitializer()->getType());
356 BitsPtr = &Bits.back();
357 }
358 BitSets[BitSetID].insert({BitsPtr, Offset});
359 }
360}
361
362bool DevirtModule::tryFindVirtualCallTargets(
363 std::vector<VirtualCallTarget> &TargetsForSlot,
364 const std::set<BitSetInfo> &BitSetInfos, uint64_t ByteOffset) {
365 for (const BitSetInfo &BS : BitSetInfos) {
366 if (!BS.Bits->GV->isConstant())
367 return false;
368
369 auto Init = dyn_cast<ConstantArray>(BS.Bits->GV->getInitializer());
370 if (!Init)
371 return false;
372 ArrayType *VTableTy = Init->getType();
373
374 uint64_t ElemSize =
375 M.getDataLayout().getTypeAllocSize(VTableTy->getElementType());
376 uint64_t GlobalSlotOffset = BS.Offset + ByteOffset;
377 if (GlobalSlotOffset % ElemSize != 0)
378 return false;
379
380 unsigned Op = GlobalSlotOffset / ElemSize;
381 if (Op >= Init->getNumOperands())
382 return false;
383
384 auto Fn = dyn_cast<Function>(Init->getOperand(Op)->stripPointerCasts());
385 if (!Fn)
386 return false;
387
388 // We can disregard __cxa_pure_virtual as a possible call target, as
389 // calls to pure virtuals are UB.
390 if (Fn->getName() == "__cxa_pure_virtual")
391 continue;
392
393 TargetsForSlot.push_back({Fn, &BS});
394 }
395
396 // Give up if we couldn't find any targets.
397 return !TargetsForSlot.empty();
398}
399
400bool DevirtModule::trySingleImplDevirt(
401 ArrayRef<VirtualCallTarget> TargetsForSlot,
402 MutableArrayRef<VirtualCallSite> CallSites) {
403 // See if the program contains a single implementation of this virtual
404 // function.
405 Function *TheFn = TargetsForSlot[0].Fn;
406 for (auto &&Target : TargetsForSlot)
407 if (TheFn != Target.Fn)
408 return false;
409
410 // If so, update each call site to call that implementation directly.
411 for (auto &&VCallSite : CallSites) {
412 VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast(
413 TheFn, VCallSite.CS.getCalledValue()->getType()));
414 }
415 return true;
416}
417
418bool DevirtModule::tryEvaluateFunctionsWithArgs(
419 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
420 ArrayRef<ConstantInt *> Args) {
421 // Evaluate each function and store the result in each target's RetVal
422 // field.
423 for (VirtualCallTarget &Target : TargetsForSlot) {
424 if (Target.Fn->arg_size() != Args.size() + 1)
425 return false;
426 for (unsigned I = 0; I != Args.size(); ++I)
427 if (Target.Fn->getFunctionType()->getParamType(I + 1) !=
428 Args[I]->getType())
429 return false;
430
431 Evaluator Eval(M.getDataLayout(), nullptr);
432 SmallVector<Constant *, 2> EvalArgs;
433 EvalArgs.push_back(
434 Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0)));
435 EvalArgs.insert(EvalArgs.end(), Args.begin(), Args.end());
436 Constant *RetVal;
437 if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) ||
438 !isa<ConstantInt>(RetVal))
439 return false;
440 Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue();
441 }
442 return true;
443}
444
445bool DevirtModule::tryUniformRetValOpt(
446 IntegerType *RetType, ArrayRef<VirtualCallTarget> TargetsForSlot,
447 MutableArrayRef<VirtualCallSite> CallSites) {
448 // Uniform return value optimization. If all functions return the same
449 // constant, replace all calls with that constant.
450 uint64_t TheRetVal = TargetsForSlot[0].RetVal;
451 for (const VirtualCallTarget &Target : TargetsForSlot)
452 if (Target.RetVal != TheRetVal)
453 return false;
454
455 auto TheRetValConst = ConstantInt::get(RetType, TheRetVal);
456 for (auto Call : CallSites)
457 Call.replaceAndErase(TheRetValConst);
458 return true;
459}
460
461bool DevirtModule::tryUniqueRetValOpt(
462 unsigned BitWidth, ArrayRef<VirtualCallTarget> TargetsForSlot,
463 MutableArrayRef<VirtualCallSite> CallSites) {
464 // IsOne controls whether we look for a 0 or a 1.
465 auto tryUniqueRetValOptFor = [&](bool IsOne) {
466 const BitSetInfo *UniqueBitSet = 0;
467 for (const VirtualCallTarget &Target : TargetsForSlot) {
Peter Collingbourne3866cc52016-03-08 03:50:36 +0000468 if (Target.RetVal == (IsOne ? 1 : 0)) {
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000469 if (UniqueBitSet)
470 return false;
471 UniqueBitSet = Target.BS;
472 }
473 }
474
475 // We should have found a unique bit set or bailed out by now. We already
476 // checked for a uniform return value in tryUniformRetValOpt.
477 assert(UniqueBitSet);
478
479 // Replace each call with the comparison.
480 for (auto &&Call : CallSites) {
481 IRBuilder<> B(Call.CS.getInstruction());
482 Value *OneAddr = B.CreateBitCast(UniqueBitSet->Bits->GV, Int8PtrTy);
483 OneAddr = B.CreateConstGEP1_64(OneAddr, UniqueBitSet->Offset);
484 Value *Cmp = B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE,
485 Call.VTable, OneAddr);
486 Call.replaceAndErase(Cmp);
487 }
488 return true;
489 };
490
491 if (BitWidth == 1) {
492 if (tryUniqueRetValOptFor(true))
493 return true;
494 if (tryUniqueRetValOptFor(false))
495 return true;
496 }
497 return false;
498}
499
500bool DevirtModule::tryVirtualConstProp(
501 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
502 ArrayRef<VirtualCallSite> CallSites) {
503 // This only works if the function returns an integer.
504 auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType());
505 if (!RetType)
506 return false;
507 unsigned BitWidth = RetType->getBitWidth();
508 if (BitWidth > 64)
509 return false;
510
511 // Make sure that each function does not access memory, takes at least one
512 // argument, does not use its first argument (which we assume is 'this'),
513 // and has the same return type.
514 for (VirtualCallTarget &Target : TargetsForSlot) {
515 if (!Target.Fn->doesNotAccessMemory() || Target.Fn->arg_empty() ||
516 !Target.Fn->arg_begin()->use_empty() ||
517 Target.Fn->getReturnType() != RetType)
518 return false;
519 }
520
521 // Group call sites by the list of constant arguments they pass.
522 // The comparator ensures deterministic ordering.
523 struct ByAPIntValue {
524 bool operator()(const std::vector<ConstantInt *> &A,
525 const std::vector<ConstantInt *> &B) const {
526 return std::lexicographical_compare(
527 A.begin(), A.end(), B.begin(), B.end(),
528 [](ConstantInt *AI, ConstantInt *BI) {
529 return AI->getValue().ult(BI->getValue());
530 });
531 }
532 };
533 std::map<std::vector<ConstantInt *>, std::vector<VirtualCallSite>,
534 ByAPIntValue>
535 VCallSitesByConstantArg;
536 for (auto &&VCallSite : CallSites) {
537 std::vector<ConstantInt *> Args;
538 if (VCallSite.CS.getType() != RetType)
539 continue;
540 for (auto &&Arg :
541 make_range(VCallSite.CS.arg_begin() + 1, VCallSite.CS.arg_end())) {
542 if (!isa<ConstantInt>(Arg))
543 break;
544 Args.push_back(cast<ConstantInt>(&Arg));
545 }
546 if (Args.size() + 1 != VCallSite.CS.arg_size())
547 continue;
548
549 VCallSitesByConstantArg[Args].push_back(VCallSite);
550 }
551
552 for (auto &&CSByConstantArg : VCallSitesByConstantArg) {
553 if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first))
554 continue;
555
556 if (tryUniformRetValOpt(RetType, TargetsForSlot, CSByConstantArg.second))
557 continue;
558
559 if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second))
560 continue;
561
562 // Find an allocation offset in bits in all vtables in the bitset.
563 uint64_t AllocBefore =
564 findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth);
565 uint64_t AllocAfter =
566 findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth);
567
568 // Calculate the total amount of padding needed to store a value at both
569 // ends of the object.
570 uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0;
571 for (auto &&Target : TargetsForSlot) {
572 TotalPaddingBefore += std::max<int64_t>(
573 (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0);
574 TotalPaddingAfter += std::max<int64_t>(
575 (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0);
576 }
577
578 // If the amount of padding is too large, give up.
579 // FIXME: do something smarter here.
580 if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128)
581 continue;
582
583 // Calculate the offset to the value as a (possibly negative) byte offset
584 // and (if applicable) a bit offset, and store the values in the targets.
585 int64_t OffsetByte;
586 uint64_t OffsetBit;
587 if (TotalPaddingBefore <= TotalPaddingAfter)
588 setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte,
589 OffsetBit);
590 else
591 setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte,
592 OffsetBit);
593
594 // Rewrite each call to a load from OffsetByte/OffsetBit.
595 for (auto Call : CSByConstantArg.second) {
596 IRBuilder<> B(Call.CS.getInstruction());
597 Value *Addr = B.CreateConstGEP1_64(Call.VTable, OffsetByte);
598 if (BitWidth == 1) {
599 Value *Bits = B.CreateLoad(Addr);
Aaron Ballmanef0fe1e2016-03-30 21:30:00 +0000600 Value *Bit = ConstantInt::get(Int8Ty, 1ULL << OffsetBit);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000601 Value *BitsAndBit = B.CreateAnd(Bits, Bit);
602 auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0));
603 Call.replaceAndErase(IsBitSet);
604 } else {
605 Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo());
606 Value *Val = B.CreateLoad(RetType, ValAddr);
607 Call.replaceAndErase(Val);
608 }
609 }
610 }
611 return true;
612}
613
614void DevirtModule::rebuildGlobal(VTableBits &B) {
615 if (B.Before.Bytes.empty() && B.After.Bytes.empty())
616 return;
617
618 // Align each byte array to pointer width.
619 unsigned PointerSize = M.getDataLayout().getPointerSize();
620 B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize));
621 B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize));
622
623 // Before was stored in reverse order; flip it now.
624 for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I)
625 std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]);
626
627 // Build an anonymous global containing the before bytes, followed by the
628 // original initializer, followed by the after bytes.
629 auto NewInit = ConstantStruct::getAnon(
630 {ConstantDataArray::get(M.getContext(), B.Before.Bytes),
631 B.GV->getInitializer(),
632 ConstantDataArray::get(M.getContext(), B.After.Bytes)});
633 auto NewGV =
634 new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(),
635 GlobalVariable::PrivateLinkage, NewInit, "", B.GV);
636 NewGV->setSection(B.GV->getSection());
637 NewGV->setComdat(B.GV->getComdat());
638
639 // Build an alias named after the original global, pointing at the second
640 // element (the original initializer).
641 auto Alias = GlobalAlias::create(
642 B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "",
643 ConstantExpr::getGetElementPtr(
644 NewInit->getType(), NewGV,
645 ArrayRef<Constant *>{ConstantInt::get(Int32Ty, 0),
646 ConstantInt::get(Int32Ty, 1)}),
647 &M);
648 Alias->setVisibility(B.GV->getVisibility());
649 Alias->takeName(B.GV);
650
651 B.GV->replaceAllUsesWith(Alias);
652 B.GV->eraseFromParent();
653}
654
655bool DevirtModule::run() {
656 Function *BitSetTestFunc =
657 M.getFunction(Intrinsic::getName(Intrinsic::bitset_test));
658 if (!BitSetTestFunc || BitSetTestFunc->use_empty())
659 return false;
660
661 Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume));
662 if (!AssumeFunc || AssumeFunc->use_empty())
663 return false;
664
665 // Find all virtual calls via a virtual table pointer %p under an assumption
666 // of the form llvm.assume(llvm.bitset.test(%p, %md)). This indicates that %p
667 // points to a vtable in the bitset %md. Group calls by (bitset, offset) pair
668 // (effectively the identity of the virtual function) and store to CallSlots.
669 DenseSet<Value *> SeenPtrs;
670 for (auto I = BitSetTestFunc->use_begin(), E = BitSetTestFunc->use_end();
671 I != E;) {
672 auto CI = dyn_cast<CallInst>(I->getUser());
673 ++I;
674 if (!CI)
675 continue;
676
677 // Find llvm.assume intrinsics for this llvm.bitset.test call.
678 SmallVector<CallInst *, 1> Assumes;
679 for (const Use &CIU : CI->uses()) {
680 auto AssumeCI = dyn_cast<CallInst>(CIU.getUser());
681 if (AssumeCI && AssumeCI->getCalledValue() == AssumeFunc)
682 Assumes.push_back(AssumeCI);
683 }
684
685 // If we found any, search for virtual calls based on %p and add them to
686 // CallSlots.
687 if (!Assumes.empty()) {
688 Metadata *BitSet =
689 cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata();
690 Value *Ptr = CI->getArgOperand(0)->stripPointerCasts();
691 if (SeenPtrs.insert(Ptr).second)
692 findLoadCallsAtConstantOffset(BitSet, Ptr, 0, CI->getArgOperand(0));
693 }
694
695 // We no longer need the assumes or the bitset test.
696 for (auto Assume : Assumes)
697 Assume->eraseFromParent();
698 // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we
699 // may use the vtable argument later.
700 if (CI->use_empty())
701 CI->eraseFromParent();
702 }
703
704 // Rebuild llvm.bitsets metadata into a map for easy lookup.
705 std::vector<VTableBits> Bits;
706 DenseMap<Metadata *, std::set<BitSetInfo>> BitSets;
707 buildBitSets(Bits, BitSets);
708 if (BitSets.empty())
709 return true;
710
711 // For each (bitset, offset) pair:
712 bool DidVirtualConstProp = false;
713 for (auto &S : CallSlots) {
714 // Search each of the vtables in the bitset for the virtual function
715 // implementation at offset S.first.ByteOffset, and add to TargetsForSlot.
716 std::vector<VirtualCallTarget> TargetsForSlot;
717 if (!tryFindVirtualCallTargets(TargetsForSlot, BitSets[S.first.BitSetID],
718 S.first.ByteOffset))
719 continue;
720
721 if (trySingleImplDevirt(TargetsForSlot, S.second))
722 continue;
723
724 DidVirtualConstProp |= tryVirtualConstProp(TargetsForSlot, S.second);
725 }
726
727 // Rebuild each global we touched as part of virtual constant propagation to
728 // include the before and after bytes.
729 if (DidVirtualConstProp)
730 for (VTableBits &B : Bits)
731 rebuildGlobal(B);
732
733 return true;
734}