Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 1 | //===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This pass implements whole program optimization of virtual calls in cases |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 11 | // where we know (via !type metadata) that the list of callees is fixed. This |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 12 | // includes the following: |
| 13 | // - Single implementation devirtualization: if a virtual call has a single |
| 14 | // possible callee, replace all calls with a direct call to that callee. |
| 15 | // - Virtual constant propagation: if the virtual function's return type is an |
| 16 | // integer <=64 bits and all possible callees are readnone, for each class and |
| 17 | // each list of constant arguments: evaluate the function, store the return |
| 18 | // value alongside the virtual table, and rewrite each virtual call as a load |
| 19 | // from the virtual table. |
| 20 | // - Uniform return value optimization: if the conditions for virtual constant |
| 21 | // propagation hold and each function returns the same constant value, replace |
| 22 | // each virtual call with that constant. |
| 23 | // - Unique return value optimization for i1 return values: if the conditions |
| 24 | // for virtual constant propagation hold and a single vtable's function |
| 25 | // returns 0, or a single vtable's function returns 1, replace each virtual |
| 26 | // call with a comparison of the vptr against that vtable's address. |
| 27 | // |
| 28 | //===----------------------------------------------------------------------===// |
| 29 | |
| 30 | #include "llvm/Transforms/IPO/WholeProgramDevirt.h" |
Mehdi Amini | b550cb1 | 2016-04-18 09:17:29 +0000 | [diff] [blame] | 31 | #include "llvm/ADT/ArrayRef.h" |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 32 | #include "llvm/ADT/DenseMap.h" |
| 33 | #include "llvm/ADT/DenseMapInfo.h" |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 34 | #include "llvm/ADT/DenseSet.h" |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 35 | #include "llvm/ADT/iterator_range.h" |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 36 | #include "llvm/ADT/MapVector.h" |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 37 | #include "llvm/ADT/SmallVector.h" |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 38 | #include "llvm/Analysis/TypeMetadataUtils.h" |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 39 | #include "llvm/IR/CallSite.h" |
| 40 | #include "llvm/IR/Constants.h" |
| 41 | #include "llvm/IR/DataLayout.h" |
Ivan Krasin | b05e06e | 2016-08-05 19:45:16 +0000 | [diff] [blame] | 42 | #include "llvm/IR/DebugInfoMetadata.h" |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 43 | #include "llvm/IR/DebugLoc.h" |
| 44 | #include "llvm/IR/DerivedTypes.h" |
Ivan Krasin | 5474645 | 2016-07-12 02:38:37 +0000 | [diff] [blame] | 45 | #include "llvm/IR/DiagnosticInfo.h" |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 46 | #include "llvm/IR/Function.h" |
| 47 | #include "llvm/IR/GlobalAlias.h" |
| 48 | #include "llvm/IR/GlobalVariable.h" |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 49 | #include "llvm/IR/IRBuilder.h" |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 50 | #include "llvm/IR/InstrTypes.h" |
| 51 | #include "llvm/IR/Instruction.h" |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 52 | #include "llvm/IR/Instructions.h" |
| 53 | #include "llvm/IR/Intrinsics.h" |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 54 | #include "llvm/IR/LLVMContext.h" |
| 55 | #include "llvm/IR/Metadata.h" |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 56 | #include "llvm/IR/Module.h" |
| 57 | #include "llvm/Pass.h" |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 58 | #include "llvm/PassRegistry.h" |
| 59 | #include "llvm/PassSupport.h" |
| 60 | #include "llvm/Support/Casting.h" |
| 61 | #include "llvm/Support/MathExtras.h" |
Mehdi Amini | b550cb1 | 2016-04-18 09:17:29 +0000 | [diff] [blame] | 62 | #include "llvm/Transforms/IPO.h" |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 63 | #include "llvm/Transforms/Utils/Evaluator.h" |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 64 | #include <algorithm> |
| 65 | #include <cstddef> |
| 66 | #include <map> |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 67 | #include <set> |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 68 | #include <string> |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 69 | |
| 70 | using namespace llvm; |
| 71 | using namespace wholeprogramdevirt; |
| 72 | |
| 73 | #define DEBUG_TYPE "wholeprogramdevirt" |
| 74 | |
| 75 | // Find the minimum offset that we may store a value of size Size bits at. If |
| 76 | // IsAfter is set, look for an offset before the object, otherwise look for an |
| 77 | // offset after the object. |
| 78 | uint64_t |
| 79 | wholeprogramdevirt::findLowestOffset(ArrayRef<VirtualCallTarget> Targets, |
| 80 | bool IsAfter, uint64_t Size) { |
| 81 | // Find a minimum offset taking into account only vtable sizes. |
| 82 | uint64_t MinByte = 0; |
| 83 | for (const VirtualCallTarget &Target : Targets) { |
| 84 | if (IsAfter) |
| 85 | MinByte = std::max(MinByte, Target.minAfterBytes()); |
| 86 | else |
| 87 | MinByte = std::max(MinByte, Target.minBeforeBytes()); |
| 88 | } |
| 89 | |
| 90 | // Build a vector of arrays of bytes covering, for each target, a slice of the |
| 91 | // used region (see AccumBitVector::BytesUsed in |
| 92 | // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively, |
| 93 | // this aligns the used regions to start at MinByte. |
| 94 | // |
| 95 | // In this example, A, B and C are vtables, # is a byte already allocated for |
| 96 | // a virtual function pointer, AAAA... (etc.) are the used regions for the |
| 97 | // vtables and Offset(X) is the value computed for the Offset variable below |
| 98 | // for X. |
| 99 | // |
| 100 | // Offset(A) |
| 101 | // | | |
| 102 | // |MinByte |
| 103 | // A: ################AAAAAAAA|AAAAAAAA |
| 104 | // B: ########BBBBBBBBBBBBBBBB|BBBB |
| 105 | // C: ########################|CCCCCCCCCCCCCCCC |
| 106 | // | Offset(B) | |
| 107 | // |
| 108 | // This code produces the slices of A, B and C that appear after the divider |
| 109 | // at MinByte. |
| 110 | std::vector<ArrayRef<uint8_t>> Used; |
| 111 | for (const VirtualCallTarget &Target : Targets) { |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 112 | ArrayRef<uint8_t> VTUsed = IsAfter ? Target.TM->Bits->After.BytesUsed |
| 113 | : Target.TM->Bits->Before.BytesUsed; |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 114 | uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes() |
| 115 | : MinByte - Target.minBeforeBytes(); |
| 116 | |
| 117 | // Disregard used regions that are smaller than Offset. These are |
| 118 | // effectively all-free regions that do not need to be checked. |
| 119 | if (VTUsed.size() > Offset) |
| 120 | Used.push_back(VTUsed.slice(Offset)); |
| 121 | } |
| 122 | |
| 123 | if (Size == 1) { |
| 124 | // Find a free bit in each member of Used. |
| 125 | for (unsigned I = 0;; ++I) { |
| 126 | uint8_t BitsUsed = 0; |
| 127 | for (auto &&B : Used) |
| 128 | if (I < B.size()) |
| 129 | BitsUsed |= B[I]; |
| 130 | if (BitsUsed != 0xff) |
| 131 | return (MinByte + I) * 8 + |
| 132 | countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined); |
| 133 | } |
| 134 | } else { |
| 135 | // Find a free (Size/8) byte region in each member of Used. |
| 136 | // FIXME: see if alignment helps. |
| 137 | for (unsigned I = 0;; ++I) { |
| 138 | for (auto &&B : Used) { |
| 139 | unsigned Byte = 0; |
| 140 | while ((I + Byte) < B.size() && Byte < (Size / 8)) { |
| 141 | if (B[I + Byte]) |
| 142 | goto NextI; |
| 143 | ++Byte; |
| 144 | } |
| 145 | } |
| 146 | return (MinByte + I) * 8; |
| 147 | NextI:; |
| 148 | } |
| 149 | } |
| 150 | } |
| 151 | |
| 152 | void wholeprogramdevirt::setBeforeReturnValues( |
| 153 | MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore, |
| 154 | unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) { |
| 155 | if (BitWidth == 1) |
| 156 | OffsetByte = -(AllocBefore / 8 + 1); |
| 157 | else |
| 158 | OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8); |
| 159 | OffsetBit = AllocBefore % 8; |
| 160 | |
| 161 | for (VirtualCallTarget &Target : Targets) { |
| 162 | if (BitWidth == 1) |
| 163 | Target.setBeforeBit(AllocBefore); |
| 164 | else |
| 165 | Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8); |
| 166 | } |
| 167 | } |
| 168 | |
| 169 | void wholeprogramdevirt::setAfterReturnValues( |
| 170 | MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter, |
| 171 | unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) { |
| 172 | if (BitWidth == 1) |
| 173 | OffsetByte = AllocAfter / 8; |
| 174 | else |
| 175 | OffsetByte = (AllocAfter + 7) / 8; |
| 176 | OffsetBit = AllocAfter % 8; |
| 177 | |
| 178 | for (VirtualCallTarget &Target : Targets) { |
| 179 | if (BitWidth == 1) |
| 180 | Target.setAfterBit(AllocAfter); |
| 181 | else |
| 182 | Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8); |
| 183 | } |
| 184 | } |
| 185 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 186 | VirtualCallTarget::VirtualCallTarget(Function *Fn, const TypeMemberInfo *TM) |
| 187 | : Fn(Fn), TM(TM), |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 188 | IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()) {} |
| 189 | |
| 190 | namespace { |
| 191 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 192 | // A slot in a set of virtual tables. The TypeID identifies the set of virtual |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 193 | // tables, and the ByteOffset is the offset in bytes from the address point to |
| 194 | // the virtual function pointer. |
| 195 | struct VTableSlot { |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 196 | Metadata *TypeID; |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 197 | uint64_t ByteOffset; |
| 198 | }; |
| 199 | |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 200 | } // end anonymous namespace |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 201 | |
Peter Collingbourne | 9b65652 | 2016-02-09 23:01:38 +0000 | [diff] [blame] | 202 | namespace llvm { |
| 203 | |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 204 | template <> struct DenseMapInfo<VTableSlot> { |
| 205 | static VTableSlot getEmptyKey() { |
| 206 | return {DenseMapInfo<Metadata *>::getEmptyKey(), |
| 207 | DenseMapInfo<uint64_t>::getEmptyKey()}; |
| 208 | } |
| 209 | static VTableSlot getTombstoneKey() { |
| 210 | return {DenseMapInfo<Metadata *>::getTombstoneKey(), |
| 211 | DenseMapInfo<uint64_t>::getTombstoneKey()}; |
| 212 | } |
| 213 | static unsigned getHashValue(const VTableSlot &I) { |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 214 | return DenseMapInfo<Metadata *>::getHashValue(I.TypeID) ^ |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 215 | DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset); |
| 216 | } |
| 217 | static bool isEqual(const VTableSlot &LHS, |
| 218 | const VTableSlot &RHS) { |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 219 | return LHS.TypeID == RHS.TypeID && LHS.ByteOffset == RHS.ByteOffset; |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 220 | } |
| 221 | }; |
| 222 | |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 223 | } // end namespace llvm |
Peter Collingbourne | 9b65652 | 2016-02-09 23:01:38 +0000 | [diff] [blame] | 224 | |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 225 | namespace { |
| 226 | |
| 227 | // A virtual call site. VTable is the loaded virtual table pointer, and CS is |
| 228 | // the indirect virtual call. |
| 229 | struct VirtualCallSite { |
| 230 | Value *VTable; |
| 231 | CallSite CS; |
| 232 | |
Peter Collingbourne | 0312f61 | 2016-06-25 00:23:04 +0000 | [diff] [blame] | 233 | // If non-null, this field points to the associated unsafe use count stored in |
| 234 | // the DevirtModule::NumUnsafeUsesForTypeTest map below. See the description |
| 235 | // of that field for details. |
| 236 | unsigned *NumUnsafeUses; |
| 237 | |
Ivan Krasin | 5474645 | 2016-07-12 02:38:37 +0000 | [diff] [blame] | 238 | void emitRemark() { |
| 239 | Function *F = CS.getCaller(); |
| 240 | emitOptimizationRemark(F->getContext(), DEBUG_TYPE, *F, |
| 241 | CS.getInstruction()->getDebugLoc(), |
| 242 | "devirtualized call"); |
| 243 | } |
| 244 | |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 245 | void replaceAndErase(Value *New) { |
Ivan Krasin | 5474645 | 2016-07-12 02:38:37 +0000 | [diff] [blame] | 246 | emitRemark(); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 247 | CS->replaceAllUsesWith(New); |
| 248 | if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) { |
| 249 | BranchInst::Create(II->getNormalDest(), CS.getInstruction()); |
| 250 | II->getUnwindDest()->removePredecessor(II->getParent()); |
| 251 | } |
| 252 | CS->eraseFromParent(); |
Peter Collingbourne | 0312f61 | 2016-06-25 00:23:04 +0000 | [diff] [blame] | 253 | // This use is no longer unsafe. |
| 254 | if (NumUnsafeUses) |
| 255 | --*NumUnsafeUses; |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 256 | } |
| 257 | }; |
| 258 | |
| 259 | struct DevirtModule { |
| 260 | Module &M; |
| 261 | IntegerType *Int8Ty; |
| 262 | PointerType *Int8PtrTy; |
| 263 | IntegerType *Int32Ty; |
| 264 | |
| 265 | MapVector<VTableSlot, std::vector<VirtualCallSite>> CallSlots; |
| 266 | |
Peter Collingbourne | 0312f61 | 2016-06-25 00:23:04 +0000 | [diff] [blame] | 267 | // This map keeps track of the number of "unsafe" uses of a loaded function |
| 268 | // pointer. The key is the associated llvm.type.test intrinsic call generated |
| 269 | // by this pass. An unsafe use is one that calls the loaded function pointer |
| 270 | // directly. Every time we eliminate an unsafe use (for example, by |
| 271 | // devirtualizing it or by applying virtual constant propagation), we |
| 272 | // decrement the value stored in this map. If a value reaches zero, we can |
| 273 | // eliminate the type check by RAUWing the associated llvm.type.test call with |
| 274 | // true. |
| 275 | std::map<CallInst *, unsigned> NumUnsafeUsesForTypeTest; |
| 276 | |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 277 | DevirtModule(Module &M) |
| 278 | : M(M), Int8Ty(Type::getInt8Ty(M.getContext())), |
| 279 | Int8PtrTy(Type::getInt8PtrTy(M.getContext())), |
| 280 | Int32Ty(Type::getInt32Ty(M.getContext())) {} |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 281 | |
Peter Collingbourne | 0312f61 | 2016-06-25 00:23:04 +0000 | [diff] [blame] | 282 | void scanTypeTestUsers(Function *TypeTestFunc, Function *AssumeFunc); |
| 283 | void scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc); |
| 284 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 285 | void buildTypeIdentifierMap( |
| 286 | std::vector<VTableBits> &Bits, |
| 287 | DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap); |
| 288 | bool |
| 289 | tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot, |
| 290 | const std::set<TypeMemberInfo> &TypeMemberInfos, |
| 291 | uint64_t ByteOffset); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 292 | bool trySingleImplDevirt(ArrayRef<VirtualCallTarget> TargetsForSlot, |
| 293 | MutableArrayRef<VirtualCallSite> CallSites); |
| 294 | bool tryEvaluateFunctionsWithArgs( |
| 295 | MutableArrayRef<VirtualCallTarget> TargetsForSlot, |
| 296 | ArrayRef<ConstantInt *> Args); |
| 297 | bool tryUniformRetValOpt(IntegerType *RetType, |
| 298 | ArrayRef<VirtualCallTarget> TargetsForSlot, |
| 299 | MutableArrayRef<VirtualCallSite> CallSites); |
| 300 | bool tryUniqueRetValOpt(unsigned BitWidth, |
| 301 | ArrayRef<VirtualCallTarget> TargetsForSlot, |
| 302 | MutableArrayRef<VirtualCallSite> CallSites); |
| 303 | bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot, |
| 304 | ArrayRef<VirtualCallSite> CallSites); |
| 305 | |
| 306 | void rebuildGlobal(VTableBits &B); |
| 307 | |
| 308 | bool run(); |
| 309 | }; |
| 310 | |
| 311 | struct WholeProgramDevirt : public ModulePass { |
| 312 | static char ID; |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 313 | |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 314 | WholeProgramDevirt() : ModulePass(ID) { |
| 315 | initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry()); |
| 316 | } |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 317 | |
| 318 | bool runOnModule(Module &M) override { |
Andrew Kaylor | aa641a5 | 2016-04-22 22:06:11 +0000 | [diff] [blame] | 319 | if (skipModule(M)) |
| 320 | return false; |
| 321 | |
| 322 | return DevirtModule(M).run(); |
| 323 | } |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 324 | }; |
| 325 | |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 326 | } // end anonymous namespace |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 327 | |
| 328 | INITIALIZE_PASS(WholeProgramDevirt, "wholeprogramdevirt", |
| 329 | "Whole program devirtualization", false, false) |
| 330 | char WholeProgramDevirt::ID = 0; |
| 331 | |
| 332 | ModulePass *llvm::createWholeProgramDevirtPass() { |
| 333 | return new WholeProgramDevirt; |
| 334 | } |
| 335 | |
Chandler Carruth | 164a2aa6 | 2016-06-17 00:11:01 +0000 | [diff] [blame] | 336 | PreservedAnalyses WholeProgramDevirtPass::run(Module &M, |
| 337 | ModuleAnalysisManager &) { |
Davide Italiano | d737dd2 | 2016-06-14 21:44:19 +0000 | [diff] [blame] | 338 | if (!DevirtModule(M).run()) |
| 339 | return PreservedAnalyses::all(); |
| 340 | return PreservedAnalyses::none(); |
| 341 | } |
| 342 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 343 | void DevirtModule::buildTypeIdentifierMap( |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 344 | std::vector<VTableBits> &Bits, |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 345 | DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap) { |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 346 | DenseMap<GlobalVariable *, VTableBits *> GVToBits; |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 347 | Bits.reserve(M.getGlobalList().size()); |
| 348 | SmallVector<MDNode *, 2> Types; |
| 349 | for (GlobalVariable &GV : M.globals()) { |
| 350 | Types.clear(); |
| 351 | GV.getMetadata(LLVMContext::MD_type, Types); |
| 352 | if (Types.empty()) |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 353 | continue; |
| 354 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 355 | VTableBits *&BitsPtr = GVToBits[&GV]; |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 356 | if (!BitsPtr) { |
| 357 | Bits.emplace_back(); |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 358 | Bits.back().GV = &GV; |
| 359 | Bits.back().ObjectSize = |
| 360 | M.getDataLayout().getTypeAllocSize(GV.getInitializer()->getType()); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 361 | BitsPtr = &Bits.back(); |
| 362 | } |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 363 | |
| 364 | for (MDNode *Type : Types) { |
| 365 | auto TypeID = Type->getOperand(1).get(); |
| 366 | |
| 367 | uint64_t Offset = |
| 368 | cast<ConstantInt>( |
| 369 | cast<ConstantAsMetadata>(Type->getOperand(0))->getValue()) |
| 370 | ->getZExtValue(); |
| 371 | |
| 372 | TypeIdMap[TypeID].insert({BitsPtr, Offset}); |
| 373 | } |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 374 | } |
| 375 | } |
| 376 | |
| 377 | bool DevirtModule::tryFindVirtualCallTargets( |
| 378 | std::vector<VirtualCallTarget> &TargetsForSlot, |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 379 | const std::set<TypeMemberInfo> &TypeMemberInfos, uint64_t ByteOffset) { |
| 380 | for (const TypeMemberInfo &TM : TypeMemberInfos) { |
| 381 | if (!TM.Bits->GV->isConstant()) |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 382 | return false; |
| 383 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 384 | auto Init = dyn_cast<ConstantArray>(TM.Bits->GV->getInitializer()); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 385 | if (!Init) |
| 386 | return false; |
| 387 | ArrayType *VTableTy = Init->getType(); |
| 388 | |
| 389 | uint64_t ElemSize = |
| 390 | M.getDataLayout().getTypeAllocSize(VTableTy->getElementType()); |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 391 | uint64_t GlobalSlotOffset = TM.Offset + ByteOffset; |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 392 | if (GlobalSlotOffset % ElemSize != 0) |
| 393 | return false; |
| 394 | |
| 395 | unsigned Op = GlobalSlotOffset / ElemSize; |
| 396 | if (Op >= Init->getNumOperands()) |
| 397 | return false; |
| 398 | |
| 399 | auto Fn = dyn_cast<Function>(Init->getOperand(Op)->stripPointerCasts()); |
| 400 | if (!Fn) |
| 401 | return false; |
| 402 | |
| 403 | // We can disregard __cxa_pure_virtual as a possible call target, as |
| 404 | // calls to pure virtuals are UB. |
| 405 | if (Fn->getName() == "__cxa_pure_virtual") |
| 406 | continue; |
| 407 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 408 | TargetsForSlot.push_back({Fn, &TM}); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 409 | } |
| 410 | |
| 411 | // Give up if we couldn't find any targets. |
| 412 | return !TargetsForSlot.empty(); |
| 413 | } |
| 414 | |
| 415 | bool DevirtModule::trySingleImplDevirt( |
| 416 | ArrayRef<VirtualCallTarget> TargetsForSlot, |
| 417 | MutableArrayRef<VirtualCallSite> CallSites) { |
| 418 | // See if the program contains a single implementation of this virtual |
| 419 | // function. |
| 420 | Function *TheFn = TargetsForSlot[0].Fn; |
| 421 | for (auto &&Target : TargetsForSlot) |
| 422 | if (TheFn != Target.Fn) |
| 423 | return false; |
| 424 | |
| 425 | // If so, update each call site to call that implementation directly. |
| 426 | for (auto &&VCallSite : CallSites) { |
Ivan Krasin | 5474645 | 2016-07-12 02:38:37 +0000 | [diff] [blame] | 427 | VCallSite.emitRemark(); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 428 | VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast( |
| 429 | TheFn, VCallSite.CS.getCalledValue()->getType())); |
Peter Collingbourne | 0312f61 | 2016-06-25 00:23:04 +0000 | [diff] [blame] | 430 | // This use is no longer unsafe. |
| 431 | if (VCallSite.NumUnsafeUses) |
| 432 | --*VCallSite.NumUnsafeUses; |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 433 | } |
| 434 | return true; |
| 435 | } |
| 436 | |
| 437 | bool DevirtModule::tryEvaluateFunctionsWithArgs( |
| 438 | MutableArrayRef<VirtualCallTarget> TargetsForSlot, |
| 439 | ArrayRef<ConstantInt *> Args) { |
| 440 | // Evaluate each function and store the result in each target's RetVal |
| 441 | // field. |
| 442 | for (VirtualCallTarget &Target : TargetsForSlot) { |
| 443 | if (Target.Fn->arg_size() != Args.size() + 1) |
| 444 | return false; |
| 445 | for (unsigned I = 0; I != Args.size(); ++I) |
| 446 | if (Target.Fn->getFunctionType()->getParamType(I + 1) != |
| 447 | Args[I]->getType()) |
| 448 | return false; |
| 449 | |
| 450 | Evaluator Eval(M.getDataLayout(), nullptr); |
| 451 | SmallVector<Constant *, 2> EvalArgs; |
| 452 | EvalArgs.push_back( |
| 453 | Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0))); |
| 454 | EvalArgs.insert(EvalArgs.end(), Args.begin(), Args.end()); |
| 455 | Constant *RetVal; |
| 456 | if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) || |
| 457 | !isa<ConstantInt>(RetVal)) |
| 458 | return false; |
| 459 | Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue(); |
| 460 | } |
| 461 | return true; |
| 462 | } |
| 463 | |
| 464 | bool DevirtModule::tryUniformRetValOpt( |
| 465 | IntegerType *RetType, ArrayRef<VirtualCallTarget> TargetsForSlot, |
| 466 | MutableArrayRef<VirtualCallSite> CallSites) { |
| 467 | // Uniform return value optimization. If all functions return the same |
| 468 | // constant, replace all calls with that constant. |
| 469 | uint64_t TheRetVal = TargetsForSlot[0].RetVal; |
| 470 | for (const VirtualCallTarget &Target : TargetsForSlot) |
| 471 | if (Target.RetVal != TheRetVal) |
| 472 | return false; |
| 473 | |
| 474 | auto TheRetValConst = ConstantInt::get(RetType, TheRetVal); |
| 475 | for (auto Call : CallSites) |
| 476 | Call.replaceAndErase(TheRetValConst); |
| 477 | return true; |
| 478 | } |
| 479 | |
| 480 | bool DevirtModule::tryUniqueRetValOpt( |
| 481 | unsigned BitWidth, ArrayRef<VirtualCallTarget> TargetsForSlot, |
| 482 | MutableArrayRef<VirtualCallSite> CallSites) { |
| 483 | // IsOne controls whether we look for a 0 or a 1. |
| 484 | auto tryUniqueRetValOptFor = [&](bool IsOne) { |
Eugene Zelenko | cdc7161 | 2016-08-11 17:20:18 +0000 | [diff] [blame^] | 485 | const TypeMemberInfo *UniqueMember = nullptr; |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 486 | for (const VirtualCallTarget &Target : TargetsForSlot) { |
Peter Collingbourne | 3866cc5 | 2016-03-08 03:50:36 +0000 | [diff] [blame] | 487 | if (Target.RetVal == (IsOne ? 1 : 0)) { |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 488 | if (UniqueMember) |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 489 | return false; |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 490 | UniqueMember = Target.TM; |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 491 | } |
| 492 | } |
| 493 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 494 | // We should have found a unique member or bailed out by now. We already |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 495 | // checked for a uniform return value in tryUniformRetValOpt. |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 496 | assert(UniqueMember); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 497 | |
| 498 | // Replace each call with the comparison. |
| 499 | for (auto &&Call : CallSites) { |
| 500 | IRBuilder<> B(Call.CS.getInstruction()); |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 501 | Value *OneAddr = B.CreateBitCast(UniqueMember->Bits->GV, Int8PtrTy); |
| 502 | OneAddr = B.CreateConstGEP1_64(OneAddr, UniqueMember->Offset); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 503 | Value *Cmp = B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE, |
| 504 | Call.VTable, OneAddr); |
| 505 | Call.replaceAndErase(Cmp); |
| 506 | } |
| 507 | return true; |
| 508 | }; |
| 509 | |
| 510 | if (BitWidth == 1) { |
| 511 | if (tryUniqueRetValOptFor(true)) |
| 512 | return true; |
| 513 | if (tryUniqueRetValOptFor(false)) |
| 514 | return true; |
| 515 | } |
| 516 | return false; |
| 517 | } |
| 518 | |
| 519 | bool DevirtModule::tryVirtualConstProp( |
| 520 | MutableArrayRef<VirtualCallTarget> TargetsForSlot, |
| 521 | ArrayRef<VirtualCallSite> CallSites) { |
| 522 | // This only works if the function returns an integer. |
| 523 | auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType()); |
| 524 | if (!RetType) |
| 525 | return false; |
| 526 | unsigned BitWidth = RetType->getBitWidth(); |
| 527 | if (BitWidth > 64) |
| 528 | return false; |
| 529 | |
| 530 | // Make sure that each function does not access memory, takes at least one |
| 531 | // argument, does not use its first argument (which we assume is 'this'), |
| 532 | // and has the same return type. |
| 533 | for (VirtualCallTarget &Target : TargetsForSlot) { |
| 534 | if (!Target.Fn->doesNotAccessMemory() || Target.Fn->arg_empty() || |
| 535 | !Target.Fn->arg_begin()->use_empty() || |
| 536 | Target.Fn->getReturnType() != RetType) |
| 537 | return false; |
| 538 | } |
| 539 | |
| 540 | // Group call sites by the list of constant arguments they pass. |
| 541 | // The comparator ensures deterministic ordering. |
| 542 | struct ByAPIntValue { |
| 543 | bool operator()(const std::vector<ConstantInt *> &A, |
| 544 | const std::vector<ConstantInt *> &B) const { |
| 545 | return std::lexicographical_compare( |
| 546 | A.begin(), A.end(), B.begin(), B.end(), |
| 547 | [](ConstantInt *AI, ConstantInt *BI) { |
| 548 | return AI->getValue().ult(BI->getValue()); |
| 549 | }); |
| 550 | } |
| 551 | }; |
| 552 | std::map<std::vector<ConstantInt *>, std::vector<VirtualCallSite>, |
| 553 | ByAPIntValue> |
| 554 | VCallSitesByConstantArg; |
| 555 | for (auto &&VCallSite : CallSites) { |
| 556 | std::vector<ConstantInt *> Args; |
| 557 | if (VCallSite.CS.getType() != RetType) |
| 558 | continue; |
| 559 | for (auto &&Arg : |
| 560 | make_range(VCallSite.CS.arg_begin() + 1, VCallSite.CS.arg_end())) { |
| 561 | if (!isa<ConstantInt>(Arg)) |
| 562 | break; |
| 563 | Args.push_back(cast<ConstantInt>(&Arg)); |
| 564 | } |
| 565 | if (Args.size() + 1 != VCallSite.CS.arg_size()) |
| 566 | continue; |
| 567 | |
| 568 | VCallSitesByConstantArg[Args].push_back(VCallSite); |
| 569 | } |
| 570 | |
| 571 | for (auto &&CSByConstantArg : VCallSitesByConstantArg) { |
| 572 | if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first)) |
| 573 | continue; |
| 574 | |
| 575 | if (tryUniformRetValOpt(RetType, TargetsForSlot, CSByConstantArg.second)) |
| 576 | continue; |
| 577 | |
| 578 | if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second)) |
| 579 | continue; |
| 580 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 581 | // Find an allocation offset in bits in all vtables associated with the |
| 582 | // type. |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 583 | uint64_t AllocBefore = |
| 584 | findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth); |
| 585 | uint64_t AllocAfter = |
| 586 | findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth); |
| 587 | |
| 588 | // Calculate the total amount of padding needed to store a value at both |
| 589 | // ends of the object. |
| 590 | uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0; |
| 591 | for (auto &&Target : TargetsForSlot) { |
| 592 | TotalPaddingBefore += std::max<int64_t>( |
| 593 | (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0); |
| 594 | TotalPaddingAfter += std::max<int64_t>( |
| 595 | (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0); |
| 596 | } |
| 597 | |
| 598 | // If the amount of padding is too large, give up. |
| 599 | // FIXME: do something smarter here. |
| 600 | if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128) |
| 601 | continue; |
| 602 | |
| 603 | // Calculate the offset to the value as a (possibly negative) byte offset |
| 604 | // and (if applicable) a bit offset, and store the values in the targets. |
| 605 | int64_t OffsetByte; |
| 606 | uint64_t OffsetBit; |
| 607 | if (TotalPaddingBefore <= TotalPaddingAfter) |
| 608 | setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte, |
| 609 | OffsetBit); |
| 610 | else |
| 611 | setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte, |
| 612 | OffsetBit); |
| 613 | |
| 614 | // Rewrite each call to a load from OffsetByte/OffsetBit. |
| 615 | for (auto Call : CSByConstantArg.second) { |
| 616 | IRBuilder<> B(Call.CS.getInstruction()); |
| 617 | Value *Addr = B.CreateConstGEP1_64(Call.VTable, OffsetByte); |
| 618 | if (BitWidth == 1) { |
| 619 | Value *Bits = B.CreateLoad(Addr); |
Aaron Ballman | ef0fe1e | 2016-03-30 21:30:00 +0000 | [diff] [blame] | 620 | Value *Bit = ConstantInt::get(Int8Ty, 1ULL << OffsetBit); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 621 | Value *BitsAndBit = B.CreateAnd(Bits, Bit); |
| 622 | auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0)); |
| 623 | Call.replaceAndErase(IsBitSet); |
| 624 | } else { |
| 625 | Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo()); |
| 626 | Value *Val = B.CreateLoad(RetType, ValAddr); |
| 627 | Call.replaceAndErase(Val); |
| 628 | } |
| 629 | } |
| 630 | } |
| 631 | return true; |
| 632 | } |
| 633 | |
Ivan Krasin | b05e06e | 2016-08-05 19:45:16 +0000 | [diff] [blame] | 634 | static void emitTargetsRemarks(const std::vector<VirtualCallTarget> &TargetsForSlot) { |
| 635 | for (const VirtualCallTarget &Target : TargetsForSlot) { |
| 636 | Function *F = Target.Fn; |
| 637 | DISubprogram *SP = F->getSubprogram(); |
| 638 | DebugLoc DL = SP ? DebugLoc::get(SP->getScopeLine(), 0, SP) : DebugLoc(); |
| 639 | emitOptimizationRemark(F->getContext(), DEBUG_TYPE, *F, DL, |
| 640 | std::string("devirtualized ") + F->getName().str()); |
| 641 | } |
| 642 | } |
| 643 | |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 644 | void DevirtModule::rebuildGlobal(VTableBits &B) { |
| 645 | if (B.Before.Bytes.empty() && B.After.Bytes.empty()) |
| 646 | return; |
| 647 | |
| 648 | // Align each byte array to pointer width. |
| 649 | unsigned PointerSize = M.getDataLayout().getPointerSize(); |
| 650 | B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize)); |
| 651 | B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize)); |
| 652 | |
| 653 | // Before was stored in reverse order; flip it now. |
| 654 | for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I) |
| 655 | std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]); |
| 656 | |
| 657 | // Build an anonymous global containing the before bytes, followed by the |
| 658 | // original initializer, followed by the after bytes. |
| 659 | auto NewInit = ConstantStruct::getAnon( |
| 660 | {ConstantDataArray::get(M.getContext(), B.Before.Bytes), |
| 661 | B.GV->getInitializer(), |
| 662 | ConstantDataArray::get(M.getContext(), B.After.Bytes)}); |
| 663 | auto NewGV = |
| 664 | new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(), |
| 665 | GlobalVariable::PrivateLinkage, NewInit, "", B.GV); |
| 666 | NewGV->setSection(B.GV->getSection()); |
| 667 | NewGV->setComdat(B.GV->getComdat()); |
| 668 | |
Peter Collingbourne | 0312f61 | 2016-06-25 00:23:04 +0000 | [diff] [blame] | 669 | // Copy the original vtable's metadata to the anonymous global, adjusting |
| 670 | // offsets as required. |
| 671 | NewGV->copyMetadata(B.GV, B.Before.Bytes.size()); |
| 672 | |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 673 | // Build an alias named after the original global, pointing at the second |
| 674 | // element (the original initializer). |
| 675 | auto Alias = GlobalAlias::create( |
| 676 | B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "", |
| 677 | ConstantExpr::getGetElementPtr( |
| 678 | NewInit->getType(), NewGV, |
| 679 | ArrayRef<Constant *>{ConstantInt::get(Int32Ty, 0), |
| 680 | ConstantInt::get(Int32Ty, 1)}), |
| 681 | &M); |
| 682 | Alias->setVisibility(B.GV->getVisibility()); |
| 683 | Alias->takeName(B.GV); |
| 684 | |
| 685 | B.GV->replaceAllUsesWith(Alias); |
| 686 | B.GV->eraseFromParent(); |
| 687 | } |
| 688 | |
Peter Collingbourne | 0312f61 | 2016-06-25 00:23:04 +0000 | [diff] [blame] | 689 | void DevirtModule::scanTypeTestUsers(Function *TypeTestFunc, |
| 690 | Function *AssumeFunc) { |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 691 | // Find all virtual calls via a virtual table pointer %p under an assumption |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 692 | // of the form llvm.assume(llvm.type.test(%p, %md)). This indicates that %p |
| 693 | // points to a member of the type identifier %md. Group calls by (type ID, |
| 694 | // offset) pair (effectively the identity of the virtual function) and store |
| 695 | // to CallSlots. |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 696 | DenseSet<Value *> SeenPtrs; |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 697 | for (auto I = TypeTestFunc->use_begin(), E = TypeTestFunc->use_end(); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 698 | I != E;) { |
| 699 | auto CI = dyn_cast<CallInst>(I->getUser()); |
| 700 | ++I; |
| 701 | if (!CI) |
| 702 | continue; |
| 703 | |
Peter Collingbourne | ccdc225 | 2016-05-10 18:07:21 +0000 | [diff] [blame] | 704 | // Search for virtual calls based on %p and add them to DevirtCalls. |
| 705 | SmallVector<DevirtCallSite, 1> DevirtCalls; |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 706 | SmallVector<CallInst *, 1> Assumes; |
Peter Collingbourne | 0312f61 | 2016-06-25 00:23:04 +0000 | [diff] [blame] | 707 | findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 708 | |
Peter Collingbourne | ccdc225 | 2016-05-10 18:07:21 +0000 | [diff] [blame] | 709 | // If we found any, add them to CallSlots. Only do this if we haven't seen |
| 710 | // the vtable pointer before, as it may have been CSE'd with pointers from |
| 711 | // other call sites, and we don't want to process call sites multiple times. |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 712 | if (!Assumes.empty()) { |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 713 | Metadata *TypeId = |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 714 | cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata(); |
| 715 | Value *Ptr = CI->getArgOperand(0)->stripPointerCasts(); |
Peter Collingbourne | ccdc225 | 2016-05-10 18:07:21 +0000 | [diff] [blame] | 716 | if (SeenPtrs.insert(Ptr).second) { |
| 717 | for (DevirtCallSite Call : DevirtCalls) { |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 718 | CallSlots[{TypeId, Call.Offset}].push_back( |
Peter Collingbourne | 0312f61 | 2016-06-25 00:23:04 +0000 | [diff] [blame] | 719 | {CI->getArgOperand(0), Call.CS, nullptr}); |
Peter Collingbourne | ccdc225 | 2016-05-10 18:07:21 +0000 | [diff] [blame] | 720 | } |
| 721 | } |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 722 | } |
| 723 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 724 | // We no longer need the assumes or the type test. |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 725 | for (auto Assume : Assumes) |
| 726 | Assume->eraseFromParent(); |
| 727 | // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we |
| 728 | // may use the vtable argument later. |
| 729 | if (CI->use_empty()) |
| 730 | CI->eraseFromParent(); |
| 731 | } |
Peter Collingbourne | 0312f61 | 2016-06-25 00:23:04 +0000 | [diff] [blame] | 732 | } |
| 733 | |
| 734 | void DevirtModule::scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc) { |
| 735 | Function *TypeTestFunc = Intrinsic::getDeclaration(&M, Intrinsic::type_test); |
| 736 | |
| 737 | for (auto I = TypeCheckedLoadFunc->use_begin(), |
| 738 | E = TypeCheckedLoadFunc->use_end(); |
| 739 | I != E;) { |
| 740 | auto CI = dyn_cast<CallInst>(I->getUser()); |
| 741 | ++I; |
| 742 | if (!CI) |
| 743 | continue; |
| 744 | |
| 745 | Value *Ptr = CI->getArgOperand(0); |
| 746 | Value *Offset = CI->getArgOperand(1); |
| 747 | Value *TypeIdValue = CI->getArgOperand(2); |
| 748 | Metadata *TypeId = cast<MetadataAsValue>(TypeIdValue)->getMetadata(); |
| 749 | |
| 750 | SmallVector<DevirtCallSite, 1> DevirtCalls; |
| 751 | SmallVector<Instruction *, 1> LoadedPtrs; |
| 752 | SmallVector<Instruction *, 1> Preds; |
| 753 | bool HasNonCallUses = false; |
| 754 | findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds, |
| 755 | HasNonCallUses, CI); |
| 756 | |
| 757 | // Start by generating "pessimistic" code that explicitly loads the function |
| 758 | // pointer from the vtable and performs the type check. If possible, we will |
| 759 | // eliminate the load and the type check later. |
| 760 | |
| 761 | // If possible, only generate the load at the point where it is used. |
| 762 | // This helps avoid unnecessary spills. |
| 763 | IRBuilder<> LoadB( |
| 764 | (LoadedPtrs.size() == 1 && !HasNonCallUses) ? LoadedPtrs[0] : CI); |
| 765 | Value *GEP = LoadB.CreateGEP(Int8Ty, Ptr, Offset); |
| 766 | Value *GEPPtr = LoadB.CreateBitCast(GEP, PointerType::getUnqual(Int8PtrTy)); |
| 767 | Value *LoadedValue = LoadB.CreateLoad(Int8PtrTy, GEPPtr); |
| 768 | |
| 769 | for (Instruction *LoadedPtr : LoadedPtrs) { |
| 770 | LoadedPtr->replaceAllUsesWith(LoadedValue); |
| 771 | LoadedPtr->eraseFromParent(); |
| 772 | } |
| 773 | |
| 774 | // Likewise for the type test. |
| 775 | IRBuilder<> CallB((Preds.size() == 1 && !HasNonCallUses) ? Preds[0] : CI); |
| 776 | CallInst *TypeTestCall = CallB.CreateCall(TypeTestFunc, {Ptr, TypeIdValue}); |
| 777 | |
| 778 | for (Instruction *Pred : Preds) { |
| 779 | Pred->replaceAllUsesWith(TypeTestCall); |
| 780 | Pred->eraseFromParent(); |
| 781 | } |
| 782 | |
| 783 | // We have already erased any extractvalue instructions that refer to the |
| 784 | // intrinsic call, but the intrinsic may have other non-extractvalue uses |
| 785 | // (although this is unlikely). In that case, explicitly build a pair and |
| 786 | // RAUW it. |
| 787 | if (!CI->use_empty()) { |
| 788 | Value *Pair = UndefValue::get(CI->getType()); |
| 789 | IRBuilder<> B(CI); |
| 790 | Pair = B.CreateInsertValue(Pair, LoadedValue, {0}); |
| 791 | Pair = B.CreateInsertValue(Pair, TypeTestCall, {1}); |
| 792 | CI->replaceAllUsesWith(Pair); |
| 793 | } |
| 794 | |
| 795 | // The number of unsafe uses is initially the number of uses. |
| 796 | auto &NumUnsafeUses = NumUnsafeUsesForTypeTest[TypeTestCall]; |
| 797 | NumUnsafeUses = DevirtCalls.size(); |
| 798 | |
| 799 | // If the function pointer has a non-call user, we cannot eliminate the type |
| 800 | // check, as one of those users may eventually call the pointer. Increment |
| 801 | // the unsafe use count to make sure it cannot reach zero. |
| 802 | if (HasNonCallUses) |
| 803 | ++NumUnsafeUses; |
| 804 | for (DevirtCallSite Call : DevirtCalls) { |
| 805 | CallSlots[{TypeId, Call.Offset}].push_back( |
| 806 | {Ptr, Call.CS, &NumUnsafeUses}); |
| 807 | } |
| 808 | |
| 809 | CI->eraseFromParent(); |
| 810 | } |
| 811 | } |
| 812 | |
| 813 | bool DevirtModule::run() { |
| 814 | Function *TypeTestFunc = |
| 815 | M.getFunction(Intrinsic::getName(Intrinsic::type_test)); |
| 816 | Function *TypeCheckedLoadFunc = |
| 817 | M.getFunction(Intrinsic::getName(Intrinsic::type_checked_load)); |
| 818 | Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume)); |
| 819 | |
| 820 | if ((!TypeTestFunc || TypeTestFunc->use_empty() || !AssumeFunc || |
| 821 | AssumeFunc->use_empty()) && |
| 822 | (!TypeCheckedLoadFunc || TypeCheckedLoadFunc->use_empty())) |
| 823 | return false; |
| 824 | |
| 825 | if (TypeTestFunc && AssumeFunc) |
| 826 | scanTypeTestUsers(TypeTestFunc, AssumeFunc); |
| 827 | |
| 828 | if (TypeCheckedLoadFunc) |
| 829 | scanTypeCheckedLoadUsers(TypeCheckedLoadFunc); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 830 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 831 | // Rebuild type metadata into a map for easy lookup. |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 832 | std::vector<VTableBits> Bits; |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 833 | DenseMap<Metadata *, std::set<TypeMemberInfo>> TypeIdMap; |
| 834 | buildTypeIdentifierMap(Bits, TypeIdMap); |
| 835 | if (TypeIdMap.empty()) |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 836 | return true; |
| 837 | |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 838 | // For each (type, offset) pair: |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 839 | bool DidVirtualConstProp = false; |
| 840 | for (auto &S : CallSlots) { |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 841 | // Search each of the members of the type identifier for the virtual |
| 842 | // function implementation at offset S.first.ByteOffset, and add to |
| 843 | // TargetsForSlot. |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 844 | std::vector<VirtualCallTarget> TargetsForSlot; |
Peter Collingbourne | 7efd750 | 2016-06-24 21:21:32 +0000 | [diff] [blame] | 845 | if (!tryFindVirtualCallTargets(TargetsForSlot, TypeIdMap[S.first.TypeID], |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 846 | S.first.ByteOffset)) |
| 847 | continue; |
| 848 | |
Ivan Krasin | b05e06e | 2016-08-05 19:45:16 +0000 | [diff] [blame] | 849 | if (trySingleImplDevirt(TargetsForSlot, S.second)) { |
| 850 | emitTargetsRemarks(TargetsForSlot); |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 851 | continue; |
Ivan Krasin | b05e06e | 2016-08-05 19:45:16 +0000 | [diff] [blame] | 852 | } |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 853 | |
Ivan Krasin | b05e06e | 2016-08-05 19:45:16 +0000 | [diff] [blame] | 854 | if (tryVirtualConstProp(TargetsForSlot, S.second)) { |
| 855 | emitTargetsRemarks(TargetsForSlot); |
| 856 | DidVirtualConstProp = true; |
| 857 | } |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 858 | } |
| 859 | |
Peter Collingbourne | 0312f61 | 2016-06-25 00:23:04 +0000 | [diff] [blame] | 860 | // If we were able to eliminate all unsafe uses for a type checked load, |
| 861 | // eliminate the type test by replacing it with true. |
| 862 | if (TypeCheckedLoadFunc) { |
| 863 | auto True = ConstantInt::getTrue(M.getContext()); |
| 864 | for (auto &&U : NumUnsafeUsesForTypeTest) { |
| 865 | if (U.second == 0) { |
| 866 | U.first->replaceAllUsesWith(True); |
| 867 | U.first->eraseFromParent(); |
| 868 | } |
| 869 | } |
| 870 | } |
| 871 | |
Peter Collingbourne | df49d1b | 2016-02-09 22:50:34 +0000 | [diff] [blame] | 872 | // Rebuild each global we touched as part of virtual constant propagation to |
| 873 | // include the before and after bytes. |
| 874 | if (DidVirtualConstProp) |
| 875 | for (VTableBits &B : Bits) |
| 876 | rebuildGlobal(B); |
| 877 | |
| 878 | return true; |
| 879 | } |