blob: e78665f5c0cae18aedefae00d157f850ed0453bc [file] [log] [blame]
Peter Collingbournedf49d1b2016-02-09 22:50:34 +00001//===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass implements whole program optimization of virtual calls in cases
Peter Collingbourne7efd7502016-06-24 21:21:32 +000011// where we know (via !type metadata) that the list of callees is fixed. This
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000012// includes the following:
13// - Single implementation devirtualization: if a virtual call has a single
14// possible callee, replace all calls with a direct call to that callee.
15// - Virtual constant propagation: if the virtual function's return type is an
16// integer <=64 bits and all possible callees are readnone, for each class and
17// each list of constant arguments: evaluate the function, store the return
18// value alongside the virtual table, and rewrite each virtual call as a load
19// from the virtual table.
20// - Uniform return value optimization: if the conditions for virtual constant
21// propagation hold and each function returns the same constant value, replace
22// each virtual call with that constant.
23// - Unique return value optimization for i1 return values: if the conditions
24// for virtual constant propagation hold and a single vtable's function
25// returns 0, or a single vtable's function returns 1, replace each virtual
26// call with a comparison of the vptr against that vtable's address.
27//
28//===----------------------------------------------------------------------===//
29
30#include "llvm/Transforms/IPO/WholeProgramDevirt.h"
Mehdi Aminib550cb12016-04-18 09:17:29 +000031#include "llvm/ADT/ArrayRef.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000032#include "llvm/ADT/DenseMap.h"
33#include "llvm/ADT/DenseMapInfo.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000034#include "llvm/ADT/DenseSet.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000035#include "llvm/ADT/iterator_range.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000036#include "llvm/ADT/MapVector.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000037#include "llvm/ADT/SmallVector.h"
Peter Collingbourne7efd7502016-06-24 21:21:32 +000038#include "llvm/Analysis/TypeMetadataUtils.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000039#include "llvm/IR/CallSite.h"
40#include "llvm/IR/Constants.h"
41#include "llvm/IR/DataLayout.h"
Ivan Krasinb05e06e2016-08-05 19:45:16 +000042#include "llvm/IR/DebugInfoMetadata.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000043#include "llvm/IR/DebugLoc.h"
44#include "llvm/IR/DerivedTypes.h"
Ivan Krasin54746452016-07-12 02:38:37 +000045#include "llvm/IR/DiagnosticInfo.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000046#include "llvm/IR/Function.h"
47#include "llvm/IR/GlobalAlias.h"
48#include "llvm/IR/GlobalVariable.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000049#include "llvm/IR/IRBuilder.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000050#include "llvm/IR/InstrTypes.h"
51#include "llvm/IR/Instruction.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000052#include "llvm/IR/Instructions.h"
53#include "llvm/IR/Intrinsics.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000054#include "llvm/IR/LLVMContext.h"
55#include "llvm/IR/Metadata.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000056#include "llvm/IR/Module.h"
57#include "llvm/Pass.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000058#include "llvm/PassRegistry.h"
59#include "llvm/PassSupport.h"
60#include "llvm/Support/Casting.h"
61#include "llvm/Support/MathExtras.h"
Mehdi Aminib550cb12016-04-18 09:17:29 +000062#include "llvm/Transforms/IPO.h"
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000063#include "llvm/Transforms/Utils/Evaluator.h"
Eugene Zelenkocdc71612016-08-11 17:20:18 +000064#include <algorithm>
65#include <cstddef>
66#include <map>
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000067#include <set>
Eugene Zelenkocdc71612016-08-11 17:20:18 +000068#include <string>
Peter Collingbournedf49d1b2016-02-09 22:50:34 +000069
70using namespace llvm;
71using namespace wholeprogramdevirt;
72
73#define DEBUG_TYPE "wholeprogramdevirt"
74
75// Find the minimum offset that we may store a value of size Size bits at. If
76// IsAfter is set, look for an offset before the object, otherwise look for an
77// offset after the object.
78uint64_t
79wholeprogramdevirt::findLowestOffset(ArrayRef<VirtualCallTarget> Targets,
80 bool IsAfter, uint64_t Size) {
81 // Find a minimum offset taking into account only vtable sizes.
82 uint64_t MinByte = 0;
83 for (const VirtualCallTarget &Target : Targets) {
84 if (IsAfter)
85 MinByte = std::max(MinByte, Target.minAfterBytes());
86 else
87 MinByte = std::max(MinByte, Target.minBeforeBytes());
88 }
89
90 // Build a vector of arrays of bytes covering, for each target, a slice of the
91 // used region (see AccumBitVector::BytesUsed in
92 // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively,
93 // this aligns the used regions to start at MinByte.
94 //
95 // In this example, A, B and C are vtables, # is a byte already allocated for
96 // a virtual function pointer, AAAA... (etc.) are the used regions for the
97 // vtables and Offset(X) is the value computed for the Offset variable below
98 // for X.
99 //
100 // Offset(A)
101 // | |
102 // |MinByte
103 // A: ################AAAAAAAA|AAAAAAAA
104 // B: ########BBBBBBBBBBBBBBBB|BBBB
105 // C: ########################|CCCCCCCCCCCCCCCC
106 // | Offset(B) |
107 //
108 // This code produces the slices of A, B and C that appear after the divider
109 // at MinByte.
110 std::vector<ArrayRef<uint8_t>> Used;
111 for (const VirtualCallTarget &Target : Targets) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000112 ArrayRef<uint8_t> VTUsed = IsAfter ? Target.TM->Bits->After.BytesUsed
113 : Target.TM->Bits->Before.BytesUsed;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000114 uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes()
115 : MinByte - Target.minBeforeBytes();
116
117 // Disregard used regions that are smaller than Offset. These are
118 // effectively all-free regions that do not need to be checked.
119 if (VTUsed.size() > Offset)
120 Used.push_back(VTUsed.slice(Offset));
121 }
122
123 if (Size == 1) {
124 // Find a free bit in each member of Used.
125 for (unsigned I = 0;; ++I) {
126 uint8_t BitsUsed = 0;
127 for (auto &&B : Used)
128 if (I < B.size())
129 BitsUsed |= B[I];
130 if (BitsUsed != 0xff)
131 return (MinByte + I) * 8 +
132 countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined);
133 }
134 } else {
135 // Find a free (Size/8) byte region in each member of Used.
136 // FIXME: see if alignment helps.
137 for (unsigned I = 0;; ++I) {
138 for (auto &&B : Used) {
139 unsigned Byte = 0;
140 while ((I + Byte) < B.size() && Byte < (Size / 8)) {
141 if (B[I + Byte])
142 goto NextI;
143 ++Byte;
144 }
145 }
146 return (MinByte + I) * 8;
147 NextI:;
148 }
149 }
150}
151
152void wholeprogramdevirt::setBeforeReturnValues(
153 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore,
154 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
155 if (BitWidth == 1)
156 OffsetByte = -(AllocBefore / 8 + 1);
157 else
158 OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8);
159 OffsetBit = AllocBefore % 8;
160
161 for (VirtualCallTarget &Target : Targets) {
162 if (BitWidth == 1)
163 Target.setBeforeBit(AllocBefore);
164 else
165 Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8);
166 }
167}
168
169void wholeprogramdevirt::setAfterReturnValues(
170 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter,
171 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
172 if (BitWidth == 1)
173 OffsetByte = AllocAfter / 8;
174 else
175 OffsetByte = (AllocAfter + 7) / 8;
176 OffsetBit = AllocAfter % 8;
177
178 for (VirtualCallTarget &Target : Targets) {
179 if (BitWidth == 1)
180 Target.setAfterBit(AllocAfter);
181 else
182 Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8);
183 }
184}
185
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000186VirtualCallTarget::VirtualCallTarget(Function *Fn, const TypeMemberInfo *TM)
187 : Fn(Fn), TM(TM),
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000188 IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()) {}
189
190namespace {
191
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000192// A slot in a set of virtual tables. The TypeID identifies the set of virtual
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000193// tables, and the ByteOffset is the offset in bytes from the address point to
194// the virtual function pointer.
195struct VTableSlot {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000196 Metadata *TypeID;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000197 uint64_t ByteOffset;
198};
199
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000200} // end anonymous namespace
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000201
Peter Collingbourne9b656522016-02-09 23:01:38 +0000202namespace llvm {
203
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000204template <> struct DenseMapInfo<VTableSlot> {
205 static VTableSlot getEmptyKey() {
206 return {DenseMapInfo<Metadata *>::getEmptyKey(),
207 DenseMapInfo<uint64_t>::getEmptyKey()};
208 }
209 static VTableSlot getTombstoneKey() {
210 return {DenseMapInfo<Metadata *>::getTombstoneKey(),
211 DenseMapInfo<uint64_t>::getTombstoneKey()};
212 }
213 static unsigned getHashValue(const VTableSlot &I) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000214 return DenseMapInfo<Metadata *>::getHashValue(I.TypeID) ^
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000215 DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset);
216 }
217 static bool isEqual(const VTableSlot &LHS,
218 const VTableSlot &RHS) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000219 return LHS.TypeID == RHS.TypeID && LHS.ByteOffset == RHS.ByteOffset;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000220 }
221};
222
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000223} // end namespace llvm
Peter Collingbourne9b656522016-02-09 23:01:38 +0000224
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000225namespace {
226
227// A virtual call site. VTable is the loaded virtual table pointer, and CS is
228// the indirect virtual call.
229struct VirtualCallSite {
230 Value *VTable;
231 CallSite CS;
232
Peter Collingbourne0312f612016-06-25 00:23:04 +0000233 // If non-null, this field points to the associated unsafe use count stored in
234 // the DevirtModule::NumUnsafeUsesForTypeTest map below. See the description
235 // of that field for details.
236 unsigned *NumUnsafeUses;
237
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000238 void emitRemark(const Twine &OptName, const Twine &TargetName) {
Ivan Krasin54746452016-07-12 02:38:37 +0000239 Function *F = CS.getCaller();
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000240 emitOptimizationRemark(
241 F->getContext(), DEBUG_TYPE, *F,
242 CS.getInstruction()->getDebugLoc(),
243 OptName + ": devirtualized a call to " + TargetName);
Ivan Krasin54746452016-07-12 02:38:37 +0000244 }
245
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000246 void replaceAndErase(const Twine &OptName, const Twine &TargetName,
247 bool RemarksEnabled, Value *New) {
248 if (RemarksEnabled)
249 emitRemark(OptName, TargetName);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000250 CS->replaceAllUsesWith(New);
251 if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) {
252 BranchInst::Create(II->getNormalDest(), CS.getInstruction());
253 II->getUnwindDest()->removePredecessor(II->getParent());
254 }
255 CS->eraseFromParent();
Peter Collingbourne0312f612016-06-25 00:23:04 +0000256 // This use is no longer unsafe.
257 if (NumUnsafeUses)
258 --*NumUnsafeUses;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000259 }
260};
261
262struct DevirtModule {
263 Module &M;
264 IntegerType *Int8Ty;
265 PointerType *Int8PtrTy;
266 IntegerType *Int32Ty;
267
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000268 bool RemarksEnabled;
269
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000270 MapVector<VTableSlot, std::vector<VirtualCallSite>> CallSlots;
271
Peter Collingbourne0312f612016-06-25 00:23:04 +0000272 // This map keeps track of the number of "unsafe" uses of a loaded function
273 // pointer. The key is the associated llvm.type.test intrinsic call generated
274 // by this pass. An unsafe use is one that calls the loaded function pointer
275 // directly. Every time we eliminate an unsafe use (for example, by
276 // devirtualizing it or by applying virtual constant propagation), we
277 // decrement the value stored in this map. If a value reaches zero, we can
278 // eliminate the type check by RAUWing the associated llvm.type.test call with
279 // true.
280 std::map<CallInst *, unsigned> NumUnsafeUsesForTypeTest;
281
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000282 DevirtModule(Module &M)
283 : M(M), Int8Ty(Type::getInt8Ty(M.getContext())),
284 Int8PtrTy(Type::getInt8PtrTy(M.getContext())),
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000285 Int32Ty(Type::getInt32Ty(M.getContext())),
286 RemarksEnabled(areRemarksEnabled()) {}
287
288 bool areRemarksEnabled();
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000289
Peter Collingbourne0312f612016-06-25 00:23:04 +0000290 void scanTypeTestUsers(Function *TypeTestFunc, Function *AssumeFunc);
291 void scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc);
292
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000293 void buildTypeIdentifierMap(
294 std::vector<VTableBits> &Bits,
295 DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap);
296 bool
297 tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot,
298 const std::set<TypeMemberInfo> &TypeMemberInfos,
299 uint64_t ByteOffset);
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000300 bool trySingleImplDevirt(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000301 MutableArrayRef<VirtualCallSite> CallSites);
302 bool tryEvaluateFunctionsWithArgs(
303 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
304 ArrayRef<ConstantInt *> Args);
305 bool tryUniformRetValOpt(IntegerType *RetType,
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000306 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000307 MutableArrayRef<VirtualCallSite> CallSites);
308 bool tryUniqueRetValOpt(unsigned BitWidth,
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000309 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000310 MutableArrayRef<VirtualCallSite> CallSites);
311 bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
312 ArrayRef<VirtualCallSite> CallSites);
313
314 void rebuildGlobal(VTableBits &B);
315
316 bool run();
317};
318
319struct WholeProgramDevirt : public ModulePass {
320 static char ID;
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000321
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000322 WholeProgramDevirt() : ModulePass(ID) {
323 initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry());
324 }
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000325
326 bool runOnModule(Module &M) override {
Andrew Kayloraa641a52016-04-22 22:06:11 +0000327 if (skipModule(M))
328 return false;
329
330 return DevirtModule(M).run();
331 }
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000332};
333
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000334} // end anonymous namespace
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000335
336INITIALIZE_PASS(WholeProgramDevirt, "wholeprogramdevirt",
337 "Whole program devirtualization", false, false)
338char WholeProgramDevirt::ID = 0;
339
340ModulePass *llvm::createWholeProgramDevirtPass() {
341 return new WholeProgramDevirt;
342}
343
Chandler Carruth164a2aa62016-06-17 00:11:01 +0000344PreservedAnalyses WholeProgramDevirtPass::run(Module &M,
345 ModuleAnalysisManager &) {
Davide Italianod737dd22016-06-14 21:44:19 +0000346 if (!DevirtModule(M).run())
347 return PreservedAnalyses::all();
348 return PreservedAnalyses::none();
349}
350
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000351void DevirtModule::buildTypeIdentifierMap(
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000352 std::vector<VTableBits> &Bits,
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000353 DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap) {
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000354 DenseMap<GlobalVariable *, VTableBits *> GVToBits;
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000355 Bits.reserve(M.getGlobalList().size());
356 SmallVector<MDNode *, 2> Types;
357 for (GlobalVariable &GV : M.globals()) {
358 Types.clear();
359 GV.getMetadata(LLVMContext::MD_type, Types);
360 if (Types.empty())
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000361 continue;
362
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000363 VTableBits *&BitsPtr = GVToBits[&GV];
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000364 if (!BitsPtr) {
365 Bits.emplace_back();
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000366 Bits.back().GV = &GV;
367 Bits.back().ObjectSize =
368 M.getDataLayout().getTypeAllocSize(GV.getInitializer()->getType());
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000369 BitsPtr = &Bits.back();
370 }
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000371
372 for (MDNode *Type : Types) {
373 auto TypeID = Type->getOperand(1).get();
374
375 uint64_t Offset =
376 cast<ConstantInt>(
377 cast<ConstantAsMetadata>(Type->getOperand(0))->getValue())
378 ->getZExtValue();
379
380 TypeIdMap[TypeID].insert({BitsPtr, Offset});
381 }
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000382 }
383}
384
385bool DevirtModule::tryFindVirtualCallTargets(
386 std::vector<VirtualCallTarget> &TargetsForSlot,
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000387 const std::set<TypeMemberInfo> &TypeMemberInfos, uint64_t ByteOffset) {
388 for (const TypeMemberInfo &TM : TypeMemberInfos) {
389 if (!TM.Bits->GV->isConstant())
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000390 return false;
391
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000392 auto Init = dyn_cast<ConstantArray>(TM.Bits->GV->getInitializer());
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000393 if (!Init)
394 return false;
395 ArrayType *VTableTy = Init->getType();
396
397 uint64_t ElemSize =
398 M.getDataLayout().getTypeAllocSize(VTableTy->getElementType());
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000399 uint64_t GlobalSlotOffset = TM.Offset + ByteOffset;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000400 if (GlobalSlotOffset % ElemSize != 0)
401 return false;
402
403 unsigned Op = GlobalSlotOffset / ElemSize;
404 if (Op >= Init->getNumOperands())
405 return false;
406
407 auto Fn = dyn_cast<Function>(Init->getOperand(Op)->stripPointerCasts());
408 if (!Fn)
409 return false;
410
411 // We can disregard __cxa_pure_virtual as a possible call target, as
412 // calls to pure virtuals are UB.
413 if (Fn->getName() == "__cxa_pure_virtual")
414 continue;
415
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000416 TargetsForSlot.push_back({Fn, &TM});
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000417 }
418
419 // Give up if we couldn't find any targets.
420 return !TargetsForSlot.empty();
421}
422
423bool DevirtModule::trySingleImplDevirt(
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000424 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000425 MutableArrayRef<VirtualCallSite> CallSites) {
426 // See if the program contains a single implementation of this virtual
427 // function.
428 Function *TheFn = TargetsForSlot[0].Fn;
429 for (auto &&Target : TargetsForSlot)
430 if (TheFn != Target.Fn)
431 return false;
432
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000433 if (RemarksEnabled)
434 TargetsForSlot[0].WasDevirt = true;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000435 // If so, update each call site to call that implementation directly.
436 for (auto &&VCallSite : CallSites) {
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000437 if (RemarksEnabled)
438 VCallSite.emitRemark("single-impl", TheFn->getName());
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000439 VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast(
440 TheFn, VCallSite.CS.getCalledValue()->getType()));
Peter Collingbourne0312f612016-06-25 00:23:04 +0000441 // This use is no longer unsafe.
442 if (VCallSite.NumUnsafeUses)
443 --*VCallSite.NumUnsafeUses;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000444 }
445 return true;
446}
447
448bool DevirtModule::tryEvaluateFunctionsWithArgs(
449 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
450 ArrayRef<ConstantInt *> Args) {
451 // Evaluate each function and store the result in each target's RetVal
452 // field.
453 for (VirtualCallTarget &Target : TargetsForSlot) {
454 if (Target.Fn->arg_size() != Args.size() + 1)
455 return false;
456 for (unsigned I = 0; I != Args.size(); ++I)
457 if (Target.Fn->getFunctionType()->getParamType(I + 1) !=
458 Args[I]->getType())
459 return false;
460
461 Evaluator Eval(M.getDataLayout(), nullptr);
462 SmallVector<Constant *, 2> EvalArgs;
463 EvalArgs.push_back(
464 Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0)));
465 EvalArgs.insert(EvalArgs.end(), Args.begin(), Args.end());
466 Constant *RetVal;
467 if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) ||
468 !isa<ConstantInt>(RetVal))
469 return false;
470 Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue();
471 }
472 return true;
473}
474
475bool DevirtModule::tryUniformRetValOpt(
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000476 IntegerType *RetType, MutableArrayRef<VirtualCallTarget> TargetsForSlot,
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000477 MutableArrayRef<VirtualCallSite> CallSites) {
478 // Uniform return value optimization. If all functions return the same
479 // constant, replace all calls with that constant.
480 uint64_t TheRetVal = TargetsForSlot[0].RetVal;
481 for (const VirtualCallTarget &Target : TargetsForSlot)
482 if (Target.RetVal != TheRetVal)
483 return false;
484
485 auto TheRetValConst = ConstantInt::get(RetType, TheRetVal);
486 for (auto Call : CallSites)
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000487 Call.replaceAndErase("uniform-ret-val", TargetsForSlot[0].Fn->getName(),
488 RemarksEnabled, TheRetValConst);
489 if (RemarksEnabled)
490 for (auto &&Target : TargetsForSlot)
491 Target.WasDevirt = true;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000492 return true;
493}
494
495bool DevirtModule::tryUniqueRetValOpt(
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000496 unsigned BitWidth, MutableArrayRef<VirtualCallTarget> TargetsForSlot,
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000497 MutableArrayRef<VirtualCallSite> CallSites) {
498 // IsOne controls whether we look for a 0 or a 1.
499 auto tryUniqueRetValOptFor = [&](bool IsOne) {
Eugene Zelenkocdc71612016-08-11 17:20:18 +0000500 const TypeMemberInfo *UniqueMember = nullptr;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000501 for (const VirtualCallTarget &Target : TargetsForSlot) {
Peter Collingbourne3866cc52016-03-08 03:50:36 +0000502 if (Target.RetVal == (IsOne ? 1 : 0)) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000503 if (UniqueMember)
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000504 return false;
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000505 UniqueMember = Target.TM;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000506 }
507 }
508
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000509 // We should have found a unique member or bailed out by now. We already
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000510 // checked for a uniform return value in tryUniformRetValOpt.
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000511 assert(UniqueMember);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000512
513 // Replace each call with the comparison.
514 for (auto &&Call : CallSites) {
515 IRBuilder<> B(Call.CS.getInstruction());
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000516 Value *OneAddr = B.CreateBitCast(UniqueMember->Bits->GV, Int8PtrTy);
517 OneAddr = B.CreateConstGEP1_64(OneAddr, UniqueMember->Offset);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000518 Value *Cmp = B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE,
519 Call.VTable, OneAddr);
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000520 Call.replaceAndErase("unique-ret-val", TargetsForSlot[0].Fn->getName(),
521 RemarksEnabled, Cmp);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000522 }
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000523 // Update devirtualization statistics for targets.
524 if (RemarksEnabled)
525 for (auto &&Target : TargetsForSlot)
526 Target.WasDevirt = true;
527
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000528 return true;
529 };
530
531 if (BitWidth == 1) {
532 if (tryUniqueRetValOptFor(true))
533 return true;
534 if (tryUniqueRetValOptFor(false))
535 return true;
536 }
537 return false;
538}
539
540bool DevirtModule::tryVirtualConstProp(
541 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
542 ArrayRef<VirtualCallSite> CallSites) {
543 // This only works if the function returns an integer.
544 auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType());
545 if (!RetType)
546 return false;
547 unsigned BitWidth = RetType->getBitWidth();
548 if (BitWidth > 64)
549 return false;
550
551 // Make sure that each function does not access memory, takes at least one
552 // argument, does not use its first argument (which we assume is 'this'),
553 // and has the same return type.
554 for (VirtualCallTarget &Target : TargetsForSlot) {
555 if (!Target.Fn->doesNotAccessMemory() || Target.Fn->arg_empty() ||
556 !Target.Fn->arg_begin()->use_empty() ||
557 Target.Fn->getReturnType() != RetType)
558 return false;
559 }
560
561 // Group call sites by the list of constant arguments they pass.
562 // The comparator ensures deterministic ordering.
563 struct ByAPIntValue {
564 bool operator()(const std::vector<ConstantInt *> &A,
565 const std::vector<ConstantInt *> &B) const {
566 return std::lexicographical_compare(
567 A.begin(), A.end(), B.begin(), B.end(),
568 [](ConstantInt *AI, ConstantInt *BI) {
569 return AI->getValue().ult(BI->getValue());
570 });
571 }
572 };
573 std::map<std::vector<ConstantInt *>, std::vector<VirtualCallSite>,
574 ByAPIntValue>
575 VCallSitesByConstantArg;
576 for (auto &&VCallSite : CallSites) {
577 std::vector<ConstantInt *> Args;
578 if (VCallSite.CS.getType() != RetType)
579 continue;
580 for (auto &&Arg :
581 make_range(VCallSite.CS.arg_begin() + 1, VCallSite.CS.arg_end())) {
582 if (!isa<ConstantInt>(Arg))
583 break;
584 Args.push_back(cast<ConstantInt>(&Arg));
585 }
586 if (Args.size() + 1 != VCallSite.CS.arg_size())
587 continue;
588
589 VCallSitesByConstantArg[Args].push_back(VCallSite);
590 }
591
592 for (auto &&CSByConstantArg : VCallSitesByConstantArg) {
593 if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first))
594 continue;
595
596 if (tryUniformRetValOpt(RetType, TargetsForSlot, CSByConstantArg.second))
597 continue;
598
599 if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second))
600 continue;
601
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000602 // Find an allocation offset in bits in all vtables associated with the
603 // type.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000604 uint64_t AllocBefore =
605 findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth);
606 uint64_t AllocAfter =
607 findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth);
608
609 // Calculate the total amount of padding needed to store a value at both
610 // ends of the object.
611 uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0;
612 for (auto &&Target : TargetsForSlot) {
613 TotalPaddingBefore += std::max<int64_t>(
614 (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0);
615 TotalPaddingAfter += std::max<int64_t>(
616 (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0);
617 }
618
619 // If the amount of padding is too large, give up.
620 // FIXME: do something smarter here.
621 if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128)
622 continue;
623
624 // Calculate the offset to the value as a (possibly negative) byte offset
625 // and (if applicable) a bit offset, and store the values in the targets.
626 int64_t OffsetByte;
627 uint64_t OffsetBit;
628 if (TotalPaddingBefore <= TotalPaddingAfter)
629 setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte,
630 OffsetBit);
631 else
632 setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte,
633 OffsetBit);
634
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000635 if (RemarksEnabled)
636 for (auto &&Target : TargetsForSlot)
637 Target.WasDevirt = true;
638
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000639 // Rewrite each call to a load from OffsetByte/OffsetBit.
640 for (auto Call : CSByConstantArg.second) {
641 IRBuilder<> B(Call.CS.getInstruction());
642 Value *Addr = B.CreateConstGEP1_64(Call.VTable, OffsetByte);
643 if (BitWidth == 1) {
644 Value *Bits = B.CreateLoad(Addr);
Aaron Ballmanef0fe1e2016-03-30 21:30:00 +0000645 Value *Bit = ConstantInt::get(Int8Ty, 1ULL << OffsetBit);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000646 Value *BitsAndBit = B.CreateAnd(Bits, Bit);
647 auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0));
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000648 Call.replaceAndErase("virtual-const-prop-1-bit",
649 TargetsForSlot[0].Fn->getName(),
650 RemarksEnabled, IsBitSet);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000651 } else {
652 Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo());
653 Value *Val = B.CreateLoad(RetType, ValAddr);
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000654 Call.replaceAndErase("virtual-const-prop",
655 TargetsForSlot[0].Fn->getName(),
656 RemarksEnabled, Val);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000657 }
658 }
659 }
660 return true;
661}
662
663void DevirtModule::rebuildGlobal(VTableBits &B) {
664 if (B.Before.Bytes.empty() && B.After.Bytes.empty())
665 return;
666
667 // Align each byte array to pointer width.
668 unsigned PointerSize = M.getDataLayout().getPointerSize();
669 B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize));
670 B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize));
671
672 // Before was stored in reverse order; flip it now.
673 for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I)
674 std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]);
675
676 // Build an anonymous global containing the before bytes, followed by the
677 // original initializer, followed by the after bytes.
678 auto NewInit = ConstantStruct::getAnon(
679 {ConstantDataArray::get(M.getContext(), B.Before.Bytes),
680 B.GV->getInitializer(),
681 ConstantDataArray::get(M.getContext(), B.After.Bytes)});
682 auto NewGV =
683 new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(),
684 GlobalVariable::PrivateLinkage, NewInit, "", B.GV);
685 NewGV->setSection(B.GV->getSection());
686 NewGV->setComdat(B.GV->getComdat());
687
Peter Collingbourne0312f612016-06-25 00:23:04 +0000688 // Copy the original vtable's metadata to the anonymous global, adjusting
689 // offsets as required.
690 NewGV->copyMetadata(B.GV, B.Before.Bytes.size());
691
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000692 // Build an alias named after the original global, pointing at the second
693 // element (the original initializer).
694 auto Alias = GlobalAlias::create(
695 B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "",
696 ConstantExpr::getGetElementPtr(
697 NewInit->getType(), NewGV,
698 ArrayRef<Constant *>{ConstantInt::get(Int32Ty, 0),
699 ConstantInt::get(Int32Ty, 1)}),
700 &M);
701 Alias->setVisibility(B.GV->getVisibility());
702 Alias->takeName(B.GV);
703
704 B.GV->replaceAllUsesWith(Alias);
705 B.GV->eraseFromParent();
706}
707
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000708bool DevirtModule::areRemarksEnabled() {
709 const auto &FL = M.getFunctionList();
710 if (FL.empty())
711 return false;
712 const Function &Fn = FL.front();
713 auto DI = DiagnosticInfoOptimizationRemark(DEBUG_TYPE, Fn, DebugLoc(), "");
714 return DI.isEnabled();
715}
716
Peter Collingbourne0312f612016-06-25 00:23:04 +0000717void DevirtModule::scanTypeTestUsers(Function *TypeTestFunc,
718 Function *AssumeFunc) {
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000719 // Find all virtual calls via a virtual table pointer %p under an assumption
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000720 // of the form llvm.assume(llvm.type.test(%p, %md)). This indicates that %p
721 // points to a member of the type identifier %md. Group calls by (type ID,
722 // offset) pair (effectively the identity of the virtual function) and store
723 // to CallSlots.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000724 DenseSet<Value *> SeenPtrs;
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000725 for (auto I = TypeTestFunc->use_begin(), E = TypeTestFunc->use_end();
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000726 I != E;) {
727 auto CI = dyn_cast<CallInst>(I->getUser());
728 ++I;
729 if (!CI)
730 continue;
731
Peter Collingbourneccdc2252016-05-10 18:07:21 +0000732 // Search for virtual calls based on %p and add them to DevirtCalls.
733 SmallVector<DevirtCallSite, 1> DevirtCalls;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000734 SmallVector<CallInst *, 1> Assumes;
Peter Collingbourne0312f612016-06-25 00:23:04 +0000735 findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000736
Peter Collingbourneccdc2252016-05-10 18:07:21 +0000737 // If we found any, add them to CallSlots. Only do this if we haven't seen
738 // the vtable pointer before, as it may have been CSE'd with pointers from
739 // other call sites, and we don't want to process call sites multiple times.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000740 if (!Assumes.empty()) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000741 Metadata *TypeId =
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000742 cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata();
743 Value *Ptr = CI->getArgOperand(0)->stripPointerCasts();
Peter Collingbourneccdc2252016-05-10 18:07:21 +0000744 if (SeenPtrs.insert(Ptr).second) {
745 for (DevirtCallSite Call : DevirtCalls) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000746 CallSlots[{TypeId, Call.Offset}].push_back(
Peter Collingbourne0312f612016-06-25 00:23:04 +0000747 {CI->getArgOperand(0), Call.CS, nullptr});
Peter Collingbourneccdc2252016-05-10 18:07:21 +0000748 }
749 }
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000750 }
751
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000752 // We no longer need the assumes or the type test.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000753 for (auto Assume : Assumes)
754 Assume->eraseFromParent();
755 // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we
756 // may use the vtable argument later.
757 if (CI->use_empty())
758 CI->eraseFromParent();
759 }
Peter Collingbourne0312f612016-06-25 00:23:04 +0000760}
761
762void DevirtModule::scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc) {
763 Function *TypeTestFunc = Intrinsic::getDeclaration(&M, Intrinsic::type_test);
764
765 for (auto I = TypeCheckedLoadFunc->use_begin(),
766 E = TypeCheckedLoadFunc->use_end();
767 I != E;) {
768 auto CI = dyn_cast<CallInst>(I->getUser());
769 ++I;
770 if (!CI)
771 continue;
772
773 Value *Ptr = CI->getArgOperand(0);
774 Value *Offset = CI->getArgOperand(1);
775 Value *TypeIdValue = CI->getArgOperand(2);
776 Metadata *TypeId = cast<MetadataAsValue>(TypeIdValue)->getMetadata();
777
778 SmallVector<DevirtCallSite, 1> DevirtCalls;
779 SmallVector<Instruction *, 1> LoadedPtrs;
780 SmallVector<Instruction *, 1> Preds;
781 bool HasNonCallUses = false;
782 findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds,
783 HasNonCallUses, CI);
784
785 // Start by generating "pessimistic" code that explicitly loads the function
786 // pointer from the vtable and performs the type check. If possible, we will
787 // eliminate the load and the type check later.
788
789 // If possible, only generate the load at the point where it is used.
790 // This helps avoid unnecessary spills.
791 IRBuilder<> LoadB(
792 (LoadedPtrs.size() == 1 && !HasNonCallUses) ? LoadedPtrs[0] : CI);
793 Value *GEP = LoadB.CreateGEP(Int8Ty, Ptr, Offset);
794 Value *GEPPtr = LoadB.CreateBitCast(GEP, PointerType::getUnqual(Int8PtrTy));
795 Value *LoadedValue = LoadB.CreateLoad(Int8PtrTy, GEPPtr);
796
797 for (Instruction *LoadedPtr : LoadedPtrs) {
798 LoadedPtr->replaceAllUsesWith(LoadedValue);
799 LoadedPtr->eraseFromParent();
800 }
801
802 // Likewise for the type test.
803 IRBuilder<> CallB((Preds.size() == 1 && !HasNonCallUses) ? Preds[0] : CI);
804 CallInst *TypeTestCall = CallB.CreateCall(TypeTestFunc, {Ptr, TypeIdValue});
805
806 for (Instruction *Pred : Preds) {
807 Pred->replaceAllUsesWith(TypeTestCall);
808 Pred->eraseFromParent();
809 }
810
811 // We have already erased any extractvalue instructions that refer to the
812 // intrinsic call, but the intrinsic may have other non-extractvalue uses
813 // (although this is unlikely). In that case, explicitly build a pair and
814 // RAUW it.
815 if (!CI->use_empty()) {
816 Value *Pair = UndefValue::get(CI->getType());
817 IRBuilder<> B(CI);
818 Pair = B.CreateInsertValue(Pair, LoadedValue, {0});
819 Pair = B.CreateInsertValue(Pair, TypeTestCall, {1});
820 CI->replaceAllUsesWith(Pair);
821 }
822
823 // The number of unsafe uses is initially the number of uses.
824 auto &NumUnsafeUses = NumUnsafeUsesForTypeTest[TypeTestCall];
825 NumUnsafeUses = DevirtCalls.size();
826
827 // If the function pointer has a non-call user, we cannot eliminate the type
828 // check, as one of those users may eventually call the pointer. Increment
829 // the unsafe use count to make sure it cannot reach zero.
830 if (HasNonCallUses)
831 ++NumUnsafeUses;
832 for (DevirtCallSite Call : DevirtCalls) {
833 CallSlots[{TypeId, Call.Offset}].push_back(
834 {Ptr, Call.CS, &NumUnsafeUses});
835 }
836
837 CI->eraseFromParent();
838 }
839}
840
841bool DevirtModule::run() {
842 Function *TypeTestFunc =
843 M.getFunction(Intrinsic::getName(Intrinsic::type_test));
844 Function *TypeCheckedLoadFunc =
845 M.getFunction(Intrinsic::getName(Intrinsic::type_checked_load));
846 Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume));
847
848 if ((!TypeTestFunc || TypeTestFunc->use_empty() || !AssumeFunc ||
849 AssumeFunc->use_empty()) &&
850 (!TypeCheckedLoadFunc || TypeCheckedLoadFunc->use_empty()))
851 return false;
852
853 if (TypeTestFunc && AssumeFunc)
854 scanTypeTestUsers(TypeTestFunc, AssumeFunc);
855
856 if (TypeCheckedLoadFunc)
857 scanTypeCheckedLoadUsers(TypeCheckedLoadFunc);
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000858
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000859 // Rebuild type metadata into a map for easy lookup.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000860 std::vector<VTableBits> Bits;
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000861 DenseMap<Metadata *, std::set<TypeMemberInfo>> TypeIdMap;
862 buildTypeIdentifierMap(Bits, TypeIdMap);
863 if (TypeIdMap.empty())
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000864 return true;
865
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000866 // For each (type, offset) pair:
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000867 bool DidVirtualConstProp = false;
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000868 std::map<std::string, Function*> DevirtTargets;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000869 for (auto &S : CallSlots) {
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000870 // Search each of the members of the type identifier for the virtual
871 // function implementation at offset S.first.ByteOffset, and add to
872 // TargetsForSlot.
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000873 std::vector<VirtualCallTarget> TargetsForSlot;
Peter Collingbourne7efd7502016-06-24 21:21:32 +0000874 if (!tryFindVirtualCallTargets(TargetsForSlot, TypeIdMap[S.first.TypeID],
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000875 S.first.ByteOffset))
876 continue;
877
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000878 if (!trySingleImplDevirt(TargetsForSlot, S.second) &&
879 tryVirtualConstProp(TargetsForSlot, S.second))
880 DidVirtualConstProp = true;
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000881
Ivan Krasinf3403fd2016-08-11 19:09:02 +0000882 // Collect functions devirtualized at least for one call site for stats.
883 if (RemarksEnabled)
884 for (const auto &T : TargetsForSlot)
885 if (T.WasDevirt)
886 DevirtTargets[T.Fn->getName()] = T.Fn;
887 }
888
889 if (RemarksEnabled) {
890 // Generate remarks for each devirtualized function.
891 for (const auto &DT : DevirtTargets) {
892 Function *F = DT.second;
893 DISubprogram *SP = F->getSubprogram();
894 DebugLoc DL = SP ? DebugLoc::get(SP->getScopeLine(), 0, SP) : DebugLoc();
895 emitOptimizationRemark(F->getContext(), DEBUG_TYPE, *F, DL,
896 Twine("devirtualized ") + F->getName());
Ivan Krasinb05e06e2016-08-05 19:45:16 +0000897 }
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000898 }
899
Peter Collingbourne0312f612016-06-25 00:23:04 +0000900 // If we were able to eliminate all unsafe uses for a type checked load,
901 // eliminate the type test by replacing it with true.
902 if (TypeCheckedLoadFunc) {
903 auto True = ConstantInt::getTrue(M.getContext());
904 for (auto &&U : NumUnsafeUsesForTypeTest) {
905 if (U.second == 0) {
906 U.first->replaceAllUsesWith(True);
907 U.first->eraseFromParent();
908 }
909 }
910 }
911
Peter Collingbournedf49d1b2016-02-09 22:50:34 +0000912 // Rebuild each global we touched as part of virtual constant propagation to
913 // include the before and after bytes.
914 if (DidVirtualConstProp)
915 for (VTableBits &B : Bits)
916 rebuildGlobal(B);
917
918 return true;
919}