blob: 640d9943d1f02aff6a2317f3170d46516b41852e [file] [log] [blame]
Peter Collingbournedf49d1b2016-02-09 22:50:34 +00001//===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass implements whole program optimization of virtual calls in cases
Peter Collingbourne7efd7502016-06-24 21:21:32 +000011// where we know (via !type metadata) that the list of callees is fixed. This
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000012// includes the following:
13// - Single implementation devirtualization: if a virtual call has a single
14// possible callee, replace all calls with a direct call to that callee.
15// - Virtual constant propagation: if the virtual function's return type is an
16// integer <=64 bits and all possible callees are readnone, for each class and
17// each list of constant arguments: evaluate the function, store the return
18// value alongside the virtual table, and rewrite each virtual call as a load
19// from the virtual table.
20// - Uniform return value optimization: if the conditions for virtual constant
21// propagation hold and each function returns the same constant value, replace
22// each virtual call with that constant.
23// - Unique return value optimization for i1 return values: if the conditions
24// for virtual constant propagation hold and a single vtable's function
25// returns 0, or a single vtable's function returns 1, replace each virtual
26// call with a comparison of the vptr against that vtable's address.
27//
28//===----------------------------------------------------------------------===//
29
30#include "llvm/Transforms/IPO/WholeProgramDevirt.h"
Mehdi Aminib550cb12016-04-18 09:17:29 +000031#include "llvm/ADT/ArrayRef.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000032#include "llvm/ADT/DenseMap.h"
33#include "llvm/ADT/DenseMapInfo.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000034#include "llvm/ADT/DenseSet.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000035#include "llvm/ADT/iterator_range.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000036#include "llvm/ADT/MapVector.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000037#include "llvm/ADT/SmallVector.h"
Peter Collingbourne7efd7502016-06-24 21:21:32 +000038#include "llvm/Analysis/TypeMetadataUtils.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000039#include "llvm/IR/CallSite.h"
40#include "llvm/IR/Constants.h"
41#include "llvm/IR/DataLayout.h"
Ivan Krasinb05e06e2016-08-05 19:45:16 +000042#include "llvm/IR/DebugInfoMetadata.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000043#include "llvm/IR/DebugLoc.h"
44#include "llvm/IR/DerivedTypes.h"
Ivan Krasin54746452016-07-12 02:38:37 +000045#include "llvm/IR/DiagnosticInfo.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000046#include "llvm/IR/Function.h"
47#include "llvm/IR/GlobalAlias.h"
48#include "llvm/IR/GlobalVariable.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000049#include "llvm/IR/IRBuilder.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000050#include "llvm/IR/InstrTypes.h"
51#include "llvm/IR/Instruction.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000052#include "llvm/IR/Instructions.h"
53#include "llvm/IR/Intrinsics.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000054#include "llvm/IR/LLVMContext.h"
55#include "llvm/IR/Metadata.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000056#include "llvm/IR/Module.h"
57#include "llvm/Pass.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000058#include "llvm/PassRegistry.h"
59#include "llvm/PassSupport.h"
60#include "llvm/Support/Casting.h"
61#include "llvm/Support/MathExtras.h"
Mehdi Aminib550cb12016-04-18 09:17:29 +000062#include "llvm/Transforms/IPO.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000063#include "llvm/Transforms/Utils/Evaluator.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000064#include <algorithm>
65#include <cstddef>
66#include <map>
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000067#include <set>
Eugene Zelenkocdc71612016-08-11 17:20:18 +000068#include <string>
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000069
70using namespace llvm;
71using namespace wholeprogramdevirt;
72
73#define DEBUG_TYPE "wholeprogramdevirt"
74
75// Find the minimum offset that we may store a value of size Size bits at. If
76// IsAfter is set, look for an offset before the object, otherwise look for an
77// offset after the object.
78uint64_t
79wholeprogramdevirt::findLowestOffset(ArrayRef<VirtualCallTarget> Targets,
80 bool IsAfter, uint64_t Size) {
81 // Find a minimum offset taking into account only vtable sizes.
82 uint64_t MinByte = 0;
83 for (const VirtualCallTarget &Target : Targets) {
84 if (IsAfter)
85 MinByte = std::max(MinByte, Target.minAfterBytes());
86 else
87 MinByte = std::max(MinByte, Target.minBeforeBytes());
88 }
89
90 // Build a vector of arrays of bytes covering, for each target, a slice of the
91 // used region (see AccumBitVector::BytesUsed in
92 // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively,
93 // this aligns the used regions to start at MinByte.
94 //
95 // In this example, A, B and C are vtables, # is a byte already allocated for
96 // a virtual function pointer, AAAA... (etc.) are the used regions for the
97 // vtables and Offset(X) is the value computed for the Offset variable below
98 // for X.
99 //
100 // Offset(A)
101 // | |
102 // |MinByte
103 // A: ################AAAAAAAA|AAAAAAAA
104 // B: ########BBBBBBBBBBBBBBBB|BBBB
105 // C: ########################|CCCCCCCCCCCCCCCC
106 // | Offset(B) |
107 //
108 // This code produces the slices of A, B and C that appear after the divider
109 // at MinByte.
110 std::vector<ArrayRef<uint8_t>> Used;
111 for (const VirtualCallTarget &Target : Targets) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000112 ArrayRef<uint8_t> VTUsed = IsAfter ? Target.TM->Bits->After.BytesUsed
113 : Target.TM->Bits->Before.BytesUsed;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000114 uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes()
115 : MinByte - Target.minBeforeBytes();
116
117 // Disregard used regions that are smaller than Offset. These are
118 // effectively all-free regions that do not need to be checked.
119 if (VTUsed.size() > Offset)
120 Used.push_back(VTUsed.slice(Offset));
121 }
122
123 if (Size == 1) {
124 // Find a free bit in each member of Used.
125 for (unsigned I = 0;; ++I) {
126 uint8_t BitsUsed = 0;
127 for (auto &&B : Used)
128 if (I < B.size())
129 BitsUsed |= B[I];
130 if (BitsUsed != 0xff)
131 return (MinByte + I) * 8 +
132 countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined);
133 }
134 } else {
135 // Find a free (Size/8) byte region in each member of Used.
136 // FIXME: see if alignment helps.
137 for (unsigned I = 0;; ++I) {
138 for (auto &&B : Used) {
139 unsigned Byte = 0;
140 while ((I + Byte) < B.size() && Byte < (Size / 8)) {
141 if (B[I + Byte])
142 goto NextI;
143 ++Byte;
144 }
145 }
146 return (MinByte + I) * 8;
147 NextI:;
148 }
149 }
150}
151
152void wholeprogramdevirt::setBeforeReturnValues(
153 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore,
154 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
155 if (BitWidth == 1)
156 OffsetByte = -(AllocBefore / 8 + 1);
157 else
158 OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8);
159 OffsetBit = AllocBefore % 8;
160
161 for (VirtualCallTarget &Target : Targets) {
162 if (BitWidth == 1)
163 Target.setBeforeBit(AllocBefore);
164 else
165 Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8);
166 }
167}
168
169void wholeprogramdevirt::setAfterReturnValues(
170 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter,
171 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
172 if (BitWidth == 1)
173 OffsetByte = AllocAfter / 8;
174 else
175 OffsetByte = (AllocAfter + 7) / 8;
176 OffsetBit = AllocAfter % 8;
177
178 for (VirtualCallTarget &Target : Targets) {
179 if (BitWidth == 1)
180 Target.setAfterBit(AllocAfter);
181 else
182 Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8);
183 }
184}
185
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000186VirtualCallTarget::VirtualCallTarget(Function *Fn, const TypeMemberInfo *TM)
187 : Fn(Fn), TM(TM),
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000188 IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()) {}
189
190namespace {
191
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000192// A slot in a set of virtual tables. The TypeID identifies the set of virtual
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000193// tables, and the ByteOffset is the offset in bytes from the address point to
194// the virtual function pointer.
195struct VTableSlot {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000196 Metadata *TypeID;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000197 uint64_t ByteOffset;
198};
199
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000200} // end anonymous namespace
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000201
Peter Collingbourne9b656522016-02-09 23:01:38 +0000202namespace llvm {
203
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000204template <> struct DenseMapInfo<VTableSlot> {
205 static VTableSlot getEmptyKey() {
206 return {DenseMapInfo<Metadata *>::getEmptyKey(),
207 DenseMapInfo<uint64_t>::getEmptyKey()};
208 }
209 static VTableSlot getTombstoneKey() {
210 return {DenseMapInfo<Metadata *>::getTombstoneKey(),
211 DenseMapInfo<uint64_t>::getTombstoneKey()};
212 }
213 static unsigned getHashValue(const VTableSlot &I) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000214 return DenseMapInfo<Metadata *>::getHashValue(I.TypeID) ^
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000215 DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset);
216 }
217 static bool isEqual(const VTableSlot &LHS,
218 const VTableSlot &RHS) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000219 return LHS.TypeID == RHS.TypeID && LHS.ByteOffset == RHS.ByteOffset;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000220 }
221};
222
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000223} // end namespace llvm
Peter Collingbourne9b656522016-02-09 23:01:38 +0000224
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000225namespace {
226
227// A virtual call site. VTable is the loaded virtual table pointer, and CS is
228// the indirect virtual call.
229struct VirtualCallSite {
230 Value *VTable;
231 CallSite CS;
232
Peter Collingbourne0312f612016-06-25 00:23:04 +0000233 // If non-null, this field points to the associated unsafe use count stored in
234 // the DevirtModule::NumUnsafeUsesForTypeTest map below. See the description
235 // of that field for details.
236 unsigned *NumUnsafeUses;
237
Ivan Krasin54746452016-07-12 02:38:37 +0000238 void emitRemark() {
239 Function *F = CS.getCaller();
240 emitOptimizationRemark(F->getContext(), DEBUG_TYPE, *F,
241 CS.getInstruction()->getDebugLoc(),
242 "devirtualized call");
243 }
244
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000245 void replaceAndErase(Value *New) {
Ivan Krasin54746452016-07-12 02:38:37 +0000246 emitRemark();
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000247 CS->replaceAllUsesWith(New);
248 if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) {
249 BranchInst::Create(II->getNormalDest(), CS.getInstruction());
250 II->getUnwindDest()->removePredecessor(II->getParent());
251 }
252 CS->eraseFromParent();
Peter Collingbourne0312f612016-06-25 00:23:04 +0000253 // This use is no longer unsafe.
254 if (NumUnsafeUses)
255 --*NumUnsafeUses;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000256 }
257};
258
259struct DevirtModule {
260 Module &M;
261 IntegerType *Int8Ty;
262 PointerType *Int8PtrTy;
263 IntegerType *Int32Ty;
264
265 MapVector<VTableSlot, std::vector<VirtualCallSite>> CallSlots;
266
Peter Collingbourne0312f612016-06-25 00:23:04 +0000267 // This map keeps track of the number of "unsafe" uses of a loaded function
268 // pointer. The key is the associated llvm.type.test intrinsic call generated
269 // by this pass. An unsafe use is one that calls the loaded function pointer
270 // directly. Every time we eliminate an unsafe use (for example, by
271 // devirtualizing it or by applying virtual constant propagation), we
272 // decrement the value stored in this map. If a value reaches zero, we can
273 // eliminate the type check by RAUWing the associated llvm.type.test call with
274 // true.
275 std::map<CallInst *, unsigned> NumUnsafeUsesForTypeTest;
276
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000277 DevirtModule(Module &M)
278 : M(M), Int8Ty(Type::getInt8Ty(M.getContext())),
279 Int8PtrTy(Type::getInt8PtrTy(M.getContext())),
280 Int32Ty(Type::getInt32Ty(M.getContext())) {}
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000281
Peter Collingbourne0312f612016-06-25 00:23:04 +0000282 void scanTypeTestUsers(Function *TypeTestFunc, Function *AssumeFunc);
283 void scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc);
284
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000285 void buildTypeIdentifierMap(
286 std::vector<VTableBits> &Bits,
287 DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap);
288 bool
289 tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot,
290 const std::set<TypeMemberInfo> &TypeMemberInfos,
291 uint64_t ByteOffset);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000292 bool trySingleImplDevirt(ArrayRef<VirtualCallTarget> TargetsForSlot,
293 MutableArrayRef<VirtualCallSite> CallSites);
294 bool tryEvaluateFunctionsWithArgs(
295 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
296 ArrayRef<ConstantInt *> Args);
297 bool tryUniformRetValOpt(IntegerType *RetType,
298 ArrayRef<VirtualCallTarget> TargetsForSlot,
299 MutableArrayRef<VirtualCallSite> CallSites);
300 bool tryUniqueRetValOpt(unsigned BitWidth,
301 ArrayRef<VirtualCallTarget> TargetsForSlot,
302 MutableArrayRef<VirtualCallSite> CallSites);
303 bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
304 ArrayRef<VirtualCallSite> CallSites);
305
306 void rebuildGlobal(VTableBits &B);
307
308 bool run();
309};
310
311struct WholeProgramDevirt : public ModulePass {
312 static char ID;
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000313
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000314 WholeProgramDevirt() : ModulePass(ID) {
315 initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry());
316 }
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000317
318 bool runOnModule(Module &M) override {
Andrew Kayloraa641a52016-04-22 22:06:11 +0000319 if (skipModule(M))
320 return false;
321
322 return DevirtModule(M).run();
323 }
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000324};
325
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000326} // end anonymous namespace
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000327
328INITIALIZE_PASS(WholeProgramDevirt, "wholeprogramdevirt",
329 "Whole program devirtualization", false, false)
330char WholeProgramDevirt::ID = 0;
331
332ModulePass *llvm::createWholeProgramDevirtPass() {
333 return new WholeProgramDevirt;
334}
335
Chandler Carruth164a2aa62016-06-17 00:11:01 +0000336PreservedAnalyses WholeProgramDevirtPass::run(Module &M,
337 ModuleAnalysisManager &) {
Davide Italianod737dd22016-06-14 21:44:19 +0000338 if (!DevirtModule(M).run())
339 return PreservedAnalyses::all();
340 return PreservedAnalyses::none();
341}
342
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000343void DevirtModule::buildTypeIdentifierMap(
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000344 std::vector<VTableBits> &Bits,
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000345 DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap) {
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000346 DenseMap<GlobalVariable *, VTableBits *> GVToBits;
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000347 Bits.reserve(M.getGlobalList().size());
348 SmallVector<MDNode *, 2> Types;
349 for (GlobalVariable &GV : M.globals()) {
350 Types.clear();
351 GV.getMetadata(LLVMContext::MD_type, Types);
352 if (Types.empty())
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000353 continue;
354
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000355 VTableBits *&BitsPtr = GVToBits[&GV];
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000356 if (!BitsPtr) {
357 Bits.emplace_back();
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000358 Bits.back().GV = &GV;
359 Bits.back().ObjectSize =
360 M.getDataLayout().getTypeAllocSize(GV.getInitializer()->getType());
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000361 BitsPtr = &Bits.back();
362 }
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000363
364 for (MDNode *Type : Types) {
365 auto TypeID = Type->getOperand(1).get();
366
367 uint64_t Offset =
368 cast<ConstantInt>(
369 cast<ConstantAsMetadata>(Type->getOperand(0))->getValue())
370 ->getZExtValue();
371
372 TypeIdMap[TypeID].insert({BitsPtr, Offset});
373 }
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000374 }
375}
376
377bool DevirtModule::tryFindVirtualCallTargets(
378 std::vector<VirtualCallTarget> &TargetsForSlot,
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000379 const std::set<TypeMemberInfo> &TypeMemberInfos, uint64_t ByteOffset) {
380 for (const TypeMemberInfo &TM : TypeMemberInfos) {
381 if (!TM.Bits->GV->isConstant())
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000382 return false;
383
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000384 auto Init = dyn_cast<ConstantArray>(TM.Bits->GV->getInitializer());
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000385 if (!Init)
386 return false;
387 ArrayType *VTableTy = Init->getType();
388
389 uint64_t ElemSize =
390 M.getDataLayout().getTypeAllocSize(VTableTy->getElementType());
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000391 uint64_t GlobalSlotOffset = TM.Offset + ByteOffset;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000392 if (GlobalSlotOffset % ElemSize != 0)
393 return false;
394
395 unsigned Op = GlobalSlotOffset / ElemSize;
396 if (Op >= Init->getNumOperands())
397 return false;
398
399 auto Fn = dyn_cast<Function>(Init->getOperand(Op)->stripPointerCasts());
400 if (!Fn)
401 return false;
402
403 // We can disregard __cxa_pure_virtual as a possible call target, as
404 // calls to pure virtuals are UB.
405 if (Fn->getName() == "__cxa_pure_virtual")
406 continue;
407
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000408 TargetsForSlot.push_back({Fn, &TM});
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000409 }
410
411 // Give up if we couldn't find any targets.
412 return !TargetsForSlot.empty();
413}
414
415bool DevirtModule::trySingleImplDevirt(
416 ArrayRef<VirtualCallTarget> TargetsForSlot,
417 MutableArrayRef<VirtualCallSite> CallSites) {
418 // See if the program contains a single implementation of this virtual
419 // function.
420 Function *TheFn = TargetsForSlot[0].Fn;
421 for (auto &&Target : TargetsForSlot)
422 if (TheFn != Target.Fn)
423 return false;
424
425 // If so, update each call site to call that implementation directly.
426 for (auto &&VCallSite : CallSites) {
Ivan Krasin54746452016-07-12 02:38:37 +0000427 VCallSite.emitRemark();
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000428 VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast(
429 TheFn, VCallSite.CS.getCalledValue()->getType()));
Peter Collingbourne0312f612016-06-25 00:23:04 +0000430 // This use is no longer unsafe.
431 if (VCallSite.NumUnsafeUses)
432 --*VCallSite.NumUnsafeUses;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000433 }
434 return true;
435}
436
437bool DevirtModule::tryEvaluateFunctionsWithArgs(
438 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
439 ArrayRef<ConstantInt *> Args) {
440 // Evaluate each function and store the result in each target's RetVal
441 // field.
442 for (VirtualCallTarget &Target : TargetsForSlot) {
443 if (Target.Fn->arg_size() != Args.size() + 1)
444 return false;
445 for (unsigned I = 0; I != Args.size(); ++I)
446 if (Target.Fn->getFunctionType()->getParamType(I + 1) !=
447 Args[I]->getType())
448 return false;
449
450 Evaluator Eval(M.getDataLayout(), nullptr);
451 SmallVector<Constant *, 2> EvalArgs;
452 EvalArgs.push_back(
453 Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0)));
454 EvalArgs.insert(EvalArgs.end(), Args.begin(), Args.end());
455 Constant *RetVal;
456 if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) ||
457 !isa<ConstantInt>(RetVal))
458 return false;
459 Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue();
460 }
461 return true;
462}
463
464bool DevirtModule::tryUniformRetValOpt(
465 IntegerType *RetType, ArrayRef<VirtualCallTarget> TargetsForSlot,
466 MutableArrayRef<VirtualCallSite> CallSites) {
467 // Uniform return value optimization. If all functions return the same
468 // constant, replace all calls with that constant.
469 uint64_t TheRetVal = TargetsForSlot[0].RetVal;
470 for (const VirtualCallTarget &Target : TargetsForSlot)
471 if (Target.RetVal != TheRetVal)
472 return false;
473
474 auto TheRetValConst = ConstantInt::get(RetType, TheRetVal);
475 for (auto Call : CallSites)
476 Call.replaceAndErase(TheRetValConst);
477 return true;
478}
479
480bool DevirtModule::tryUniqueRetValOpt(
481 unsigned BitWidth, ArrayRef<VirtualCallTarget> TargetsForSlot,
482 MutableArrayRef<VirtualCallSite> CallSites) {
483 // IsOne controls whether we look for a 0 or a 1.
484 auto tryUniqueRetValOptFor = [&](bool IsOne) {
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000485 const TypeMemberInfo *UniqueMember = nullptr;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000486 for (const VirtualCallTarget &Target : TargetsForSlot) {
Peter Collingbourne3866cc52016-03-08 03:50:36 +0000487 if (Target.RetVal == (IsOne ? 1 : 0)) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000488 if (UniqueMember)
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000489 return false;
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000490 UniqueMember = Target.TM;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000491 }
492 }
493
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000494 // We should have found a unique member or bailed out by now. We already
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000495 // checked for a uniform return value in tryUniformRetValOpt.
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000496 assert(UniqueMember);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000497
498 // Replace each call with the comparison.
499 for (auto &&Call : CallSites) {
500 IRBuilder<> B(Call.CS.getInstruction());
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000501 Value *OneAddr = B.CreateBitCast(UniqueMember->Bits->GV, Int8PtrTy);
502 OneAddr = B.CreateConstGEP1_64(OneAddr, UniqueMember->Offset);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000503 Value *Cmp = B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE,
504 Call.VTable, OneAddr);
505 Call.replaceAndErase(Cmp);
506 }
507 return true;
508 };
509
510 if (BitWidth == 1) {
511 if (tryUniqueRetValOptFor(true))
512 return true;
513 if (tryUniqueRetValOptFor(false))
514 return true;
515 }
516 return false;
517}
518
519bool DevirtModule::tryVirtualConstProp(
520 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
521 ArrayRef<VirtualCallSite> CallSites) {
522 // This only works if the function returns an integer.
523 auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType());
524 if (!RetType)
525 return false;
526 unsigned BitWidth = RetType->getBitWidth();
527 if (BitWidth > 64)
528 return false;
529
530 // Make sure that each function does not access memory, takes at least one
531 // argument, does not use its first argument (which we assume is 'this'),
532 // and has the same return type.
533 for (VirtualCallTarget &Target : TargetsForSlot) {
534 if (!Target.Fn->doesNotAccessMemory() || Target.Fn->arg_empty() ||
535 !Target.Fn->arg_begin()->use_empty() ||
536 Target.Fn->getReturnType() != RetType)
537 return false;
538 }
539
540 // Group call sites by the list of constant arguments they pass.
541 // The comparator ensures deterministic ordering.
542 struct ByAPIntValue {
543 bool operator()(const std::vector<ConstantInt *> &A,
544 const std::vector<ConstantInt *> &B) const {
545 return std::lexicographical_compare(
546 A.begin(), A.end(), B.begin(), B.end(),
547 [](ConstantInt *AI, ConstantInt *BI) {
548 return AI->getValue().ult(BI->getValue());
549 });
550 }
551 };
552 std::map<std::vector<ConstantInt *>, std::vector<VirtualCallSite>,
553 ByAPIntValue>
554 VCallSitesByConstantArg;
555 for (auto &&VCallSite : CallSites) {
556 std::vector<ConstantInt *> Args;
557 if (VCallSite.CS.getType() != RetType)
558 continue;
559 for (auto &&Arg :
560 make_range(VCallSite.CS.arg_begin() + 1, VCallSite.CS.arg_end())) {
561 if (!isa<ConstantInt>(Arg))
562 break;
563 Args.push_back(cast<ConstantInt>(&Arg));
564 }
565 if (Args.size() + 1 != VCallSite.CS.arg_size())
566 continue;
567
568 VCallSitesByConstantArg[Args].push_back(VCallSite);
569 }
570
571 for (auto &&CSByConstantArg : VCallSitesByConstantArg) {
572 if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first))
573 continue;
574
575 if (tryUniformRetValOpt(RetType, TargetsForSlot, CSByConstantArg.second))
576 continue;
577
578 if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second))
579 continue;
580
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000581 // Find an allocation offset in bits in all vtables associated with the
582 // type.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000583 uint64_t AllocBefore =
584 findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth);
585 uint64_t AllocAfter =
586 findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth);
587
588 // Calculate the total amount of padding needed to store a value at both
589 // ends of the object.
590 uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0;
591 for (auto &&Target : TargetsForSlot) {
592 TotalPaddingBefore += std::max<int64_t>(
593 (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0);
594 TotalPaddingAfter += std::max<int64_t>(
595 (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0);
596 }
597
598 // If the amount of padding is too large, give up.
599 // FIXME: do something smarter here.
600 if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128)
601 continue;
602
603 // Calculate the offset to the value as a (possibly negative) byte offset
604 // and (if applicable) a bit offset, and store the values in the targets.
605 int64_t OffsetByte;
606 uint64_t OffsetBit;
607 if (TotalPaddingBefore <= TotalPaddingAfter)
608 setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte,
609 OffsetBit);
610 else
611 setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte,
612 OffsetBit);
613
614 // Rewrite each call to a load from OffsetByte/OffsetBit.
615 for (auto Call : CSByConstantArg.second) {
616 IRBuilder<> B(Call.CS.getInstruction());
617 Value *Addr = B.CreateConstGEP1_64(Call.VTable, OffsetByte);
618 if (BitWidth == 1) {
619 Value *Bits = B.CreateLoad(Addr);
Aaron Ballmanef0fe1e2016-03-30 21:30:00 +0000620 Value *Bit = ConstantInt::get(Int8Ty, 1ULL << OffsetBit);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000621 Value *BitsAndBit = B.CreateAnd(Bits, Bit);
622 auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0));
623 Call.replaceAndErase(IsBitSet);
624 } else {
625 Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo());
626 Value *Val = B.CreateLoad(RetType, ValAddr);
627 Call.replaceAndErase(Val);
628 }
629 }
630 }
631 return true;
632}
633
Ivan Krasinb05e06e2016-08-05 19:45:16 +0000634static void emitTargetsRemarks(const std::vector<VirtualCallTarget> &TargetsForSlot) {
635 for (const VirtualCallTarget &Target : TargetsForSlot) {
636 Function *F = Target.Fn;
637 DISubprogram *SP = F->getSubprogram();
638 DebugLoc DL = SP ? DebugLoc::get(SP->getScopeLine(), 0, SP) : DebugLoc();
639 emitOptimizationRemark(F->getContext(), DEBUG_TYPE, *F, DL,
640 std::string("devirtualized ") + F->getName().str());
641 }
642}
643
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000644void DevirtModule::rebuildGlobal(VTableBits &B) {
645 if (B.Before.Bytes.empty() && B.After.Bytes.empty())
646 return;
647
648 // Align each byte array to pointer width.
649 unsigned PointerSize = M.getDataLayout().getPointerSize();
650 B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize));
651 B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize));
652
653 // Before was stored in reverse order; flip it now.
654 for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I)
655 std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]);
656
657 // Build an anonymous global containing the before bytes, followed by the
658 // original initializer, followed by the after bytes.
659 auto NewInit = ConstantStruct::getAnon(
660 {ConstantDataArray::get(M.getContext(), B.Before.Bytes),
661 B.GV->getInitializer(),
662 ConstantDataArray::get(M.getContext(), B.After.Bytes)});
663 auto NewGV =
664 new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(),
665 GlobalVariable::PrivateLinkage, NewInit, "", B.GV);
666 NewGV->setSection(B.GV->getSection());
667 NewGV->setComdat(B.GV->getComdat());
668
Peter Collingbourne0312f612016-06-25 00:23:04 +0000669 // Copy the original vtable's metadata to the anonymous global, adjusting
670 // offsets as required.
671 NewGV->copyMetadata(B.GV, B.Before.Bytes.size());
672
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000673 // Build an alias named after the original global, pointing at the second
674 // element (the original initializer).
675 auto Alias = GlobalAlias::create(
676 B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "",
677 ConstantExpr::getGetElementPtr(
678 NewInit->getType(), NewGV,
679 ArrayRef<Constant *>{ConstantInt::get(Int32Ty, 0),
680 ConstantInt::get(Int32Ty, 1)}),
681 &M);
682 Alias->setVisibility(B.GV->getVisibility());
683 Alias->takeName(B.GV);
684
685 B.GV->replaceAllUsesWith(Alias);
686 B.GV->eraseFromParent();
687}
688
Peter Collingbourne0312f612016-06-25 00:23:04 +0000689void DevirtModule::scanTypeTestUsers(Function *TypeTestFunc,
690 Function *AssumeFunc) {
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000691 // Find all virtual calls via a virtual table pointer %p under an assumption
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000692 // of the form llvm.assume(llvm.type.test(%p, %md)). This indicates that %p
693 // points to a member of the type identifier %md. Group calls by (type ID,
694 // offset) pair (effectively the identity of the virtual function) and store
695 // to CallSlots.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000696 DenseSet<Value *> SeenPtrs;
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000697 for (auto I = TypeTestFunc->use_begin(), E = TypeTestFunc->use_end();
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000698 I != E;) {
699 auto CI = dyn_cast<CallInst>(I->getUser());
700 ++I;
701 if (!CI)
702 continue;
703
Peter Collingbourneccdc2252016-05-10 18:07:21 +0000704 // Search for virtual calls based on %p and add them to DevirtCalls.
705 SmallVector<DevirtCallSite, 1> DevirtCalls;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000706 SmallVector<CallInst *, 1> Assumes;
Peter Collingbourne0312f612016-06-25 00:23:04 +0000707 findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000708
Peter Collingbourneccdc2252016-05-10 18:07:21 +0000709 // If we found any, add them to CallSlots. Only do this if we haven't seen
710 // the vtable pointer before, as it may have been CSE'd with pointers from
711 // other call sites, and we don't want to process call sites multiple times.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000712 if (!Assumes.empty()) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000713 Metadata *TypeId =
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000714 cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata();
715 Value *Ptr = CI->getArgOperand(0)->stripPointerCasts();
Peter Collingbourneccdc2252016-05-10 18:07:21 +0000716 if (SeenPtrs.insert(Ptr).second) {
717 for (DevirtCallSite Call : DevirtCalls) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000718 CallSlots[{TypeId, Call.Offset}].push_back(
Peter Collingbourne0312f612016-06-25 00:23:04 +0000719 {CI->getArgOperand(0), Call.CS, nullptr});
Peter Collingbourneccdc2252016-05-10 18:07:21 +0000720 }
721 }
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000722 }
723
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000724 // We no longer need the assumes or the type test.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000725 for (auto Assume : Assumes)
726 Assume->eraseFromParent();
727 // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we
728 // may use the vtable argument later.
729 if (CI->use_empty())
730 CI->eraseFromParent();
731 }
Peter Collingbourne0312f612016-06-25 00:23:04 +0000732}
733
734void DevirtModule::scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc) {
735 Function *TypeTestFunc = Intrinsic::getDeclaration(&M, Intrinsic::type_test);
736
737 for (auto I = TypeCheckedLoadFunc->use_begin(),
738 E = TypeCheckedLoadFunc->use_end();
739 I != E;) {
740 auto CI = dyn_cast<CallInst>(I->getUser());
741 ++I;
742 if (!CI)
743 continue;
744
745 Value *Ptr = CI->getArgOperand(0);
746 Value *Offset = CI->getArgOperand(1);
747 Value *TypeIdValue = CI->getArgOperand(2);
748 Metadata *TypeId = cast<MetadataAsValue>(TypeIdValue)->getMetadata();
749
750 SmallVector<DevirtCallSite, 1> DevirtCalls;
751 SmallVector<Instruction *, 1> LoadedPtrs;
752 SmallVector<Instruction *, 1> Preds;
753 bool HasNonCallUses = false;
754 findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds,
755 HasNonCallUses, CI);
756
757 // Start by generating "pessimistic" code that explicitly loads the function
758 // pointer from the vtable and performs the type check. If possible, we will
759 // eliminate the load and the type check later.
760
761 // If possible, only generate the load at the point where it is used.
762 // This helps avoid unnecessary spills.
763 IRBuilder<> LoadB(
764 (LoadedPtrs.size() == 1 && !HasNonCallUses) ? LoadedPtrs[0] : CI);
765 Value *GEP = LoadB.CreateGEP(Int8Ty, Ptr, Offset);
766 Value *GEPPtr = LoadB.CreateBitCast(GEP, PointerType::getUnqual(Int8PtrTy));
767 Value *LoadedValue = LoadB.CreateLoad(Int8PtrTy, GEPPtr);
768
769 for (Instruction *LoadedPtr : LoadedPtrs) {
770 LoadedPtr->replaceAllUsesWith(LoadedValue);
771 LoadedPtr->eraseFromParent();
772 }
773
774 // Likewise for the type test.
775 IRBuilder<> CallB((Preds.size() == 1 && !HasNonCallUses) ? Preds[0] : CI);
776 CallInst *TypeTestCall = CallB.CreateCall(TypeTestFunc, {Ptr, TypeIdValue});
777
778 for (Instruction *Pred : Preds) {
779 Pred->replaceAllUsesWith(TypeTestCall);
780 Pred->eraseFromParent();
781 }
782
783 // We have already erased any extractvalue instructions that refer to the
784 // intrinsic call, but the intrinsic may have other non-extractvalue uses
785 // (although this is unlikely). In that case, explicitly build a pair and
786 // RAUW it.
787 if (!CI->use_empty()) {
788 Value *Pair = UndefValue::get(CI->getType());
789 IRBuilder<> B(CI);
790 Pair = B.CreateInsertValue(Pair, LoadedValue, {0});
791 Pair = B.CreateInsertValue(Pair, TypeTestCall, {1});
792 CI->replaceAllUsesWith(Pair);
793 }
794
795 // The number of unsafe uses is initially the number of uses.
796 auto &NumUnsafeUses = NumUnsafeUsesForTypeTest[TypeTestCall];
797 NumUnsafeUses = DevirtCalls.size();
798
799 // If the function pointer has a non-call user, we cannot eliminate the type
800 // check, as one of those users may eventually call the pointer. Increment
801 // the unsafe use count to make sure it cannot reach zero.
802 if (HasNonCallUses)
803 ++NumUnsafeUses;
804 for (DevirtCallSite Call : DevirtCalls) {
805 CallSlots[{TypeId, Call.Offset}].push_back(
806 {Ptr, Call.CS, &NumUnsafeUses});
807 }
808
809 CI->eraseFromParent();
810 }
811}
812
813bool DevirtModule::run() {
814 Function *TypeTestFunc =
815 M.getFunction(Intrinsic::getName(Intrinsic::type_test));
816 Function *TypeCheckedLoadFunc =
817 M.getFunction(Intrinsic::getName(Intrinsic::type_checked_load));
818 Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume));
819
820 if ((!TypeTestFunc || TypeTestFunc->use_empty() || !AssumeFunc ||
821 AssumeFunc->use_empty()) &&
822 (!TypeCheckedLoadFunc || TypeCheckedLoadFunc->use_empty()))
823 return false;
824
825 if (TypeTestFunc && AssumeFunc)
826 scanTypeTestUsers(TypeTestFunc, AssumeFunc);
827
828 if (TypeCheckedLoadFunc)
829 scanTypeCheckedLoadUsers(TypeCheckedLoadFunc);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000830
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000831 // Rebuild type metadata into a map for easy lookup.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000832 std::vector<VTableBits> Bits;
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000833 DenseMap<Metadata *, std::set<TypeMemberInfo>> TypeIdMap;
834 buildTypeIdentifierMap(Bits, TypeIdMap);
835 if (TypeIdMap.empty())
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000836 return true;
837
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000838 // For each (type, offset) pair:
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000839 bool DidVirtualConstProp = false;
840 for (auto &S : CallSlots) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000841 // Search each of the members of the type identifier for the virtual
842 // function implementation at offset S.first.ByteOffset, and add to
843 // TargetsForSlot.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000844 std::vector<VirtualCallTarget> TargetsForSlot;
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000845 if (!tryFindVirtualCallTargets(TargetsForSlot, TypeIdMap[S.first.TypeID],
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000846 S.first.ByteOffset))
847 continue;
848
Ivan Krasinb05e06e2016-08-05 19:45:16 +0000849 if (trySingleImplDevirt(TargetsForSlot, S.second)) {
850 emitTargetsRemarks(TargetsForSlot);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000851 continue;
Ivan Krasinb05e06e2016-08-05 19:45:16 +0000852 }
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000853
Ivan Krasinb05e06e2016-08-05 19:45:16 +0000854 if (tryVirtualConstProp(TargetsForSlot, S.second)) {
855 emitTargetsRemarks(TargetsForSlot);
856 DidVirtualConstProp = true;
857 }
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000858 }
859
Peter Collingbourne0312f612016-06-25 00:23:04 +0000860 // If we were able to eliminate all unsafe uses for a type checked load,
861 // eliminate the type test by replacing it with true.
862 if (TypeCheckedLoadFunc) {
863 auto True = ConstantInt::getTrue(M.getContext());
864 for (auto &&U : NumUnsafeUsesForTypeTest) {
865 if (U.second == 0) {
866 U.first->replaceAllUsesWith(True);
867 U.first->eraseFromParent();
868 }
869 }
870 }
871
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000872 // Rebuild each global we touched as part of virtual constant propagation to
873 // include the before and after bytes.
874 if (DidVirtualConstProp)
875 for (VTableBits &B : Bits)
876 rebuildGlobal(B);
877
878 return true;
879}