Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 1 | //===-- SafeStack.cpp - Safe Stack Insertion ------------------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This pass splits the stack into the safe stack (kept as-is for LLVM backend) |
| 11 | // and the unsafe stack (explicitly allocated and managed through the runtime |
| 12 | // support library). |
| 13 | // |
| 14 | // http://clang.llvm.org/docs/SafeStack.html |
| 15 | // |
| 16 | //===----------------------------------------------------------------------===// |
| 17 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 18 | #include "llvm/ADT/Statistic.h" |
| 19 | #include "llvm/ADT/Triple.h" |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 20 | #include "llvm/Analysis/ScalarEvolution.h" |
| 21 | #include "llvm/Analysis/ScalarEvolutionExpressions.h" |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 22 | #include "llvm/CodeGen/Passes.h" |
Benjamin Kramer | 390c33c | 2016-01-27 16:53:42 +0000 | [diff] [blame] | 23 | #include "llvm/CodeGen/Passes.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 24 | #include "llvm/IR/Constants.h" |
Benjamin Kramer | 390c33c | 2016-01-27 16:53:42 +0000 | [diff] [blame] | 25 | #include "llvm/IR/DIBuilder.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 26 | #include "llvm/IR/DataLayout.h" |
| 27 | #include "llvm/IR/DerivedTypes.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 28 | #include "llvm/IR/Function.h" |
Benjamin Kramer | 390c33c | 2016-01-27 16:53:42 +0000 | [diff] [blame] | 29 | #include "llvm/IR/IRBuilder.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 30 | #include "llvm/IR/InstIterator.h" |
| 31 | #include "llvm/IR/Instructions.h" |
| 32 | #include "llvm/IR/IntrinsicInst.h" |
| 33 | #include "llvm/IR/Intrinsics.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 34 | #include "llvm/IR/Module.h" |
| 35 | #include "llvm/Pass.h" |
| 36 | #include "llvm/Support/CommandLine.h" |
| 37 | #include "llvm/Support/Debug.h" |
| 38 | #include "llvm/Support/Format.h" |
| 39 | #include "llvm/Support/MathExtras.h" |
| 40 | #include "llvm/Support/raw_os_ostream.h" |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 41 | #include "llvm/Target/TargetLowering.h" |
| 42 | #include "llvm/Target/TargetSubtargetInfo.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 43 | #include "llvm/Transforms/Utils/Local.h" |
| 44 | #include "llvm/Transforms/Utils/ModuleUtils.h" |
| 45 | |
| 46 | using namespace llvm; |
| 47 | |
| 48 | #define DEBUG_TYPE "safestack" |
| 49 | |
Evgeniy Stepanov | 8827f2d | 2015-12-22 00:13:11 +0000 | [diff] [blame] | 50 | enum UnsafeStackPtrStorageVal { ThreadLocalUSP, SingleThreadUSP }; |
| 51 | |
| 52 | static cl::opt<UnsafeStackPtrStorageVal> USPStorage("safe-stack-usp-storage", |
| 53 | cl::Hidden, cl::init(ThreadLocalUSP), |
| 54 | cl::desc("Type of storage for the unsafe stack pointer"), |
| 55 | cl::values(clEnumValN(ThreadLocalUSP, "thread-local", |
| 56 | "Thread-local storage"), |
| 57 | clEnumValN(SingleThreadUSP, "single-thread", |
| 58 | "Non-thread-local storage"), |
| 59 | clEnumValEnd)); |
| 60 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 61 | namespace llvm { |
| 62 | |
| 63 | STATISTIC(NumFunctions, "Total number of functions"); |
| 64 | STATISTIC(NumUnsafeStackFunctions, "Number of functions with unsafe stack"); |
| 65 | STATISTIC(NumUnsafeStackRestorePointsFunctions, |
| 66 | "Number of functions that use setjmp or exceptions"); |
| 67 | |
| 68 | STATISTIC(NumAllocas, "Total number of allocas"); |
| 69 | STATISTIC(NumUnsafeStaticAllocas, "Number of unsafe static allocas"); |
| 70 | STATISTIC(NumUnsafeDynamicAllocas, "Number of unsafe dynamic allocas"); |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 71 | STATISTIC(NumUnsafeByValArguments, "Number of unsafe byval arguments"); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 72 | STATISTIC(NumUnsafeStackRestorePoints, "Number of setjmps and landingpads"); |
| 73 | |
| 74 | } // namespace llvm |
| 75 | |
| 76 | namespace { |
| 77 | |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 78 | /// Rewrite an SCEV expression for a memory access address to an expression that |
| 79 | /// represents offset from the given alloca. |
| 80 | /// |
| 81 | /// The implementation simply replaces all mentions of the alloca with zero. |
| 82 | class AllocaOffsetRewriter : public SCEVRewriteVisitor<AllocaOffsetRewriter> { |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 83 | const Value *AllocaPtr; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 84 | |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 85 | public: |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 86 | AllocaOffsetRewriter(ScalarEvolution &SE, const Value *AllocaPtr) |
| 87 | : SCEVRewriteVisitor(SE), AllocaPtr(AllocaPtr) {} |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 88 | |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 89 | const SCEV *visitUnknown(const SCEVUnknown *Expr) { |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 90 | if (Expr->getValue() == AllocaPtr) |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 91 | return SE.getZero(Expr->getType()); |
| 92 | return Expr; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 93 | } |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 94 | }; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 95 | |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 96 | /// The SafeStack pass splits the stack of each function into the safe |
| 97 | /// stack, which is only accessed through memory safe dereferences (as |
| 98 | /// determined statically), and the unsafe stack, which contains all |
| 99 | /// local variables that are accessed in ways that we can't prove to |
| 100 | /// be safe. |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 101 | class SafeStack : public FunctionPass { |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 102 | const TargetMachine *TM; |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 103 | const TargetLoweringBase *TL; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 104 | const DataLayout *DL; |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 105 | ScalarEvolution *SE; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 106 | |
| 107 | Type *StackPtrTy; |
| 108 | Type *IntPtrTy; |
| 109 | Type *Int32Ty; |
| 110 | Type *Int8Ty; |
| 111 | |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 112 | Value *UnsafeStackPtr = nullptr; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 113 | |
| 114 | /// Unsafe stack alignment. Each stack frame must ensure that the stack is |
| 115 | /// aligned to this value. We need to re-align the unsafe stack if the |
| 116 | /// alignment of any object on the stack exceeds this value. |
| 117 | /// |
| 118 | /// 16 seems like a reasonable upper bound on the alignment of objects that we |
| 119 | /// might expect to appear on the stack on most common targets. |
| 120 | enum { StackAlignment = 16 }; |
| 121 | |
Evgeniy Stepanov | d1aad26 | 2015-10-26 18:28:25 +0000 | [diff] [blame] | 122 | /// \brief Build a value representing a pointer to the unsafe stack pointer. |
Evgeniy Stepanov | 9addbc9 | 2015-10-15 21:26:49 +0000 | [diff] [blame] | 123 | Value *getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F); |
| 124 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 125 | /// \brief Find all static allocas, dynamic allocas, return instructions and |
| 126 | /// stack restore points (exception unwind blocks and setjmp calls) in the |
| 127 | /// given function and append them to the respective vectors. |
| 128 | void findInsts(Function &F, SmallVectorImpl<AllocaInst *> &StaticAllocas, |
| 129 | SmallVectorImpl<AllocaInst *> &DynamicAllocas, |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 130 | SmallVectorImpl<Argument *> &ByValArguments, |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 131 | SmallVectorImpl<ReturnInst *> &Returns, |
| 132 | SmallVectorImpl<Instruction *> &StackRestorePoints); |
| 133 | |
Evgeniy Stepanov | a4ac3f4 | 2015-12-01 00:06:13 +0000 | [diff] [blame] | 134 | /// \brief Calculate the allocation size of a given alloca. Returns 0 if the |
| 135 | /// size can not be statically determined. |
| 136 | uint64_t getStaticAllocaAllocationSize(const AllocaInst* AI); |
| 137 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 138 | /// \brief Allocate space for all static allocas in \p StaticAllocas, |
| 139 | /// replace allocas with pointers into the unsafe stack and generate code to |
| 140 | /// restore the stack pointer before all return instructions in \p Returns. |
| 141 | /// |
| 142 | /// \returns A pointer to the top of the unsafe stack after all unsafe static |
| 143 | /// allocas are allocated. |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 144 | Value *moveStaticAllocasToUnsafeStack(IRBuilder<> &IRB, Function &F, |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 145 | ArrayRef<AllocaInst *> StaticAllocas, |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 146 | ArrayRef<Argument *> ByValArguments, |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 147 | ArrayRef<ReturnInst *> Returns, |
| 148 | Instruction *BasePointer); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 149 | |
| 150 | /// \brief Generate code to restore the stack after all stack restore points |
| 151 | /// in \p StackRestorePoints. |
| 152 | /// |
| 153 | /// \returns A local variable in which to maintain the dynamic top of the |
| 154 | /// unsafe stack if needed. |
| 155 | AllocaInst * |
Evgeniy Stepanov | 8685daf | 2015-09-24 01:23:51 +0000 | [diff] [blame] | 156 | createStackRestorePoints(IRBuilder<> &IRB, Function &F, |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 157 | ArrayRef<Instruction *> StackRestorePoints, |
| 158 | Value *StaticTop, bool NeedDynamicTop); |
| 159 | |
| 160 | /// \brief Replace all allocas in \p DynamicAllocas with code to allocate |
| 161 | /// space dynamically on the unsafe stack and store the dynamic unsafe stack |
| 162 | /// top to \p DynamicTop if non-null. |
| 163 | void moveDynamicAllocasToUnsafeStack(Function &F, Value *UnsafeStackPtr, |
| 164 | AllocaInst *DynamicTop, |
| 165 | ArrayRef<AllocaInst *> DynamicAllocas); |
| 166 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 167 | bool IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 168 | |
| 169 | bool IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U, |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 170 | const Value *AllocaPtr, uint64_t AllocaSize); |
| 171 | bool IsAccessSafe(Value *Addr, uint64_t Size, const Value *AllocaPtr, |
| 172 | uint64_t AllocaSize); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 173 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 174 | public: |
| 175 | static char ID; // Pass identification, replacement for typeid. |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 176 | SafeStack(const TargetMachine *TM) |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 177 | : FunctionPass(ID), TM(TM), TL(nullptr), DL(nullptr) { |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 178 | initializeSafeStackPass(*PassRegistry::getPassRegistry()); |
| 179 | } |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 180 | SafeStack() : SafeStack(nullptr) {} |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 181 | |
Hans Wennborg | aa15bff | 2015-09-10 16:49:58 +0000 | [diff] [blame] | 182 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 183 | AU.addRequired<ScalarEvolutionWrapperPass>(); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 184 | } |
| 185 | |
Hans Wennborg | aa15bff | 2015-09-10 16:49:58 +0000 | [diff] [blame] | 186 | bool doInitialization(Module &M) override { |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 187 | DL = &M.getDataLayout(); |
| 188 | |
| 189 | StackPtrTy = Type::getInt8PtrTy(M.getContext()); |
| 190 | IntPtrTy = DL->getIntPtrType(M.getContext()); |
| 191 | Int32Ty = Type::getInt32Ty(M.getContext()); |
| 192 | Int8Ty = Type::getInt8Ty(M.getContext()); |
| 193 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 194 | return false; |
| 195 | } |
| 196 | |
Hans Wennborg | aa15bff | 2015-09-10 16:49:58 +0000 | [diff] [blame] | 197 | bool runOnFunction(Function &F) override; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 198 | }; // class SafeStack |
| 199 | |
Evgeniy Stepanov | a4ac3f4 | 2015-12-01 00:06:13 +0000 | [diff] [blame] | 200 | uint64_t SafeStack::getStaticAllocaAllocationSize(const AllocaInst* AI) { |
| 201 | uint64_t Size = DL->getTypeAllocSize(AI->getAllocatedType()); |
| 202 | if (AI->isArrayAllocation()) { |
| 203 | auto C = dyn_cast<ConstantInt>(AI->getArraySize()); |
| 204 | if (!C) |
| 205 | return 0; |
| 206 | Size *= C->getZExtValue(); |
| 207 | } |
| 208 | return Size; |
| 209 | } |
| 210 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 211 | bool SafeStack::IsAccessSafe(Value *Addr, uint64_t AccessSize, |
| 212 | const Value *AllocaPtr, uint64_t AllocaSize) { |
| 213 | AllocaOffsetRewriter Rewriter(*SE, AllocaPtr); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 214 | const SCEV *Expr = Rewriter.visit(SE->getSCEV(Addr)); |
| 215 | |
| 216 | uint64_t BitWidth = SE->getTypeSizeInBits(Expr->getType()); |
| 217 | ConstantRange AccessStartRange = SE->getUnsignedRange(Expr); |
| 218 | ConstantRange SizeRange = |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 219 | ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AccessSize)); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 220 | ConstantRange AccessRange = AccessStartRange.add(SizeRange); |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 221 | ConstantRange AllocaRange = |
| 222 | ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AllocaSize)); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 223 | bool Safe = AllocaRange.contains(AccessRange); |
| 224 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 225 | DEBUG(dbgs() << "[SafeStack] " |
| 226 | << (isa<AllocaInst>(AllocaPtr) ? "Alloca " : "ByValArgument ") |
| 227 | << *AllocaPtr << "\n" |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 228 | << " Access " << *Addr << "\n" |
| 229 | << " SCEV " << *Expr |
| 230 | << " U: " << SE->getUnsignedRange(Expr) |
| 231 | << ", S: " << SE->getSignedRange(Expr) << "\n" |
| 232 | << " Range " << AccessRange << "\n" |
| 233 | << " AllocaRange " << AllocaRange << "\n" |
| 234 | << " " << (Safe ? "safe" : "unsafe") << "\n"); |
| 235 | |
| 236 | return Safe; |
| 237 | } |
| 238 | |
| 239 | bool SafeStack::IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U, |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 240 | const Value *AllocaPtr, |
| 241 | uint64_t AllocaSize) { |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 242 | // All MemIntrinsics have destination address in Arg0 and size in Arg2. |
| 243 | if (MI->getRawDest() != U) return true; |
| 244 | const auto *Len = dyn_cast<ConstantInt>(MI->getLength()); |
| 245 | // Non-constant size => unsafe. FIXME: try SCEV getRange. |
| 246 | if (!Len) return false; |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 247 | return IsAccessSafe(U, Len->getZExtValue(), AllocaPtr, AllocaSize); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 248 | } |
| 249 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 250 | /// Check whether a given allocation must be put on the safe |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 251 | /// stack or not. The function analyzes all uses of AI and checks whether it is |
| 252 | /// only accessed in a memory safe way (as decided statically). |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 253 | bool SafeStack::IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize) { |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 254 | // Go through all uses of this alloca and check whether all accesses to the |
| 255 | // allocated object are statically known to be memory safe and, hence, the |
| 256 | // object can be placed on the safe stack. |
| 257 | SmallPtrSet<const Value *, 16> Visited; |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 258 | SmallVector<const Value *, 8> WorkList; |
| 259 | WorkList.push_back(AllocaPtr); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 260 | |
| 261 | // A DFS search through all uses of the alloca in bitcasts/PHI/GEPs/etc. |
| 262 | while (!WorkList.empty()) { |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 263 | const Value *V = WorkList.pop_back_val(); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 264 | for (const Use &UI : V->uses()) { |
| 265 | auto I = cast<const Instruction>(UI.getUser()); |
| 266 | assert(V == UI.get()); |
| 267 | |
| 268 | switch (I->getOpcode()) { |
| 269 | case Instruction::Load: { |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 270 | if (!IsAccessSafe(UI, DL->getTypeStoreSize(I->getType()), AllocaPtr, |
| 271 | AllocaSize)) |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 272 | return false; |
| 273 | break; |
| 274 | } |
| 275 | case Instruction::VAArg: |
| 276 | // "va-arg" from a pointer is safe. |
| 277 | break; |
| 278 | case Instruction::Store: { |
| 279 | if (V == I->getOperand(0)) { |
| 280 | // Stored the pointer - conservatively assume it may be unsafe. |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 281 | DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 282 | << "\n store of address: " << *I << "\n"); |
| 283 | return false; |
| 284 | } |
| 285 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 286 | if (!IsAccessSafe(UI, DL->getTypeStoreSize(I->getOperand(0)->getType()), |
| 287 | AllocaPtr, AllocaSize)) |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 288 | return false; |
| 289 | break; |
| 290 | } |
| 291 | case Instruction::Ret: { |
| 292 | // Information leak. |
| 293 | return false; |
| 294 | } |
| 295 | |
| 296 | case Instruction::Call: |
| 297 | case Instruction::Invoke: { |
| 298 | ImmutableCallSite CS(I); |
| 299 | |
| 300 | if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) { |
| 301 | if (II->getIntrinsicID() == Intrinsic::lifetime_start || |
| 302 | II->getIntrinsicID() == Intrinsic::lifetime_end) |
| 303 | continue; |
| 304 | } |
| 305 | |
| 306 | if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) { |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 307 | if (!IsMemIntrinsicSafe(MI, UI, AllocaPtr, AllocaSize)) { |
| 308 | DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 309 | << "\n unsafe memintrinsic: " << *I |
| 310 | << "\n"); |
| 311 | return false; |
| 312 | } |
| 313 | continue; |
| 314 | } |
| 315 | |
| 316 | // LLVM 'nocapture' attribute is only set for arguments whose address |
| 317 | // is not stored, passed around, or used in any other non-trivial way. |
| 318 | // We assume that passing a pointer to an object as a 'nocapture |
| 319 | // readnone' argument is safe. |
| 320 | // FIXME: a more precise solution would require an interprocedural |
| 321 | // analysis here, which would look at all uses of an argument inside |
| 322 | // the function being called. |
| 323 | ImmutableCallSite::arg_iterator B = CS.arg_begin(), E = CS.arg_end(); |
| 324 | for (ImmutableCallSite::arg_iterator A = B; A != E; ++A) |
| 325 | if (A->get() == V) |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 326 | if (!(CS.doesNotCapture(A - B) && (CS.doesNotAccessMemory(A - B) || |
| 327 | CS.doesNotAccessMemory()))) { |
| 328 | DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 329 | << "\n unsafe call: " << *I << "\n"); |
| 330 | return false; |
| 331 | } |
| 332 | continue; |
| 333 | } |
| 334 | |
| 335 | default: |
| 336 | if (Visited.insert(I).second) |
| 337 | WorkList.push_back(cast<const Instruction>(I)); |
| 338 | } |
| 339 | } |
| 340 | } |
| 341 | |
| 342 | // All uses of the alloca are safe, we can place it on the safe stack. |
| 343 | return true; |
| 344 | } |
| 345 | |
Evgeniy Stepanov | 9addbc9 | 2015-10-15 21:26:49 +0000 | [diff] [blame] | 346 | Value *SafeStack::getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F) { |
Evgeniy Stepanov | d1aad26 | 2015-10-26 18:28:25 +0000 | [diff] [blame] | 347 | // Check if there is a target-specific location for the unsafe stack pointer. |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 348 | if (TL) |
| 349 | if (Value *V = TL->getSafeStackPointerLocation(IRB)) |
Evgeniy Stepanov | d1aad26 | 2015-10-26 18:28:25 +0000 | [diff] [blame] | 350 | return V; |
| 351 | |
| 352 | // Otherwise, assume the target links with compiler-rt, which provides a |
| 353 | // thread-local variable with a magic name. |
Evgeniy Stepanov | 9addbc9 | 2015-10-15 21:26:49 +0000 | [diff] [blame] | 354 | Module &M = *F.getParent(); |
Evgeniy Stepanov | d1aad26 | 2015-10-26 18:28:25 +0000 | [diff] [blame] | 355 | const char *UnsafeStackPtrVar = "__safestack_unsafe_stack_ptr"; |
| 356 | auto UnsafeStackPtr = |
| 357 | dyn_cast_or_null<GlobalVariable>(M.getNamedValue(UnsafeStackPtrVar)); |
Evgeniy Stepanov | 9addbc9 | 2015-10-15 21:26:49 +0000 | [diff] [blame] | 358 | |
Evgeniy Stepanov | 8827f2d | 2015-12-22 00:13:11 +0000 | [diff] [blame] | 359 | bool UseTLS = USPStorage == ThreadLocalUSP; |
| 360 | |
Evgeniy Stepanov | d1aad26 | 2015-10-26 18:28:25 +0000 | [diff] [blame] | 361 | if (!UnsafeStackPtr) { |
Evgeniy Stepanov | 8827f2d | 2015-12-22 00:13:11 +0000 | [diff] [blame] | 362 | auto TLSModel = UseTLS ? |
| 363 | GlobalValue::InitialExecTLSModel : |
| 364 | GlobalValue::NotThreadLocal; |
Evgeniy Stepanov | d1aad26 | 2015-10-26 18:28:25 +0000 | [diff] [blame] | 365 | // The global variable is not defined yet, define it ourselves. |
| 366 | // We use the initial-exec TLS model because we do not support the |
| 367 | // variable living anywhere other than in the main executable. |
| 368 | UnsafeStackPtr = new GlobalVariable( |
Eugene Zelenko | ffec81c | 2015-11-04 22:32:32 +0000 | [diff] [blame] | 369 | M, StackPtrTy, false, GlobalValue::ExternalLinkage, nullptr, |
Evgeniy Stepanov | 8827f2d | 2015-12-22 00:13:11 +0000 | [diff] [blame] | 370 | UnsafeStackPtrVar, nullptr, TLSModel); |
Evgeniy Stepanov | 9addbc9 | 2015-10-15 21:26:49 +0000 | [diff] [blame] | 371 | } else { |
Evgeniy Stepanov | d1aad26 | 2015-10-26 18:28:25 +0000 | [diff] [blame] | 372 | // The variable exists, check its type and attributes. |
| 373 | if (UnsafeStackPtr->getValueType() != StackPtrTy) |
| 374 | report_fatal_error(Twine(UnsafeStackPtrVar) + " must have void* type"); |
Evgeniy Stepanov | 8827f2d | 2015-12-22 00:13:11 +0000 | [diff] [blame] | 375 | if (UseTLS != UnsafeStackPtr->isThreadLocal()) |
| 376 | report_fatal_error(Twine(UnsafeStackPtrVar) + " must " + |
| 377 | (UseTLS ? "" : "not ") + "be thread-local"); |
Evgeniy Stepanov | 9addbc9 | 2015-10-15 21:26:49 +0000 | [diff] [blame] | 378 | } |
Evgeniy Stepanov | d1aad26 | 2015-10-26 18:28:25 +0000 | [diff] [blame] | 379 | return UnsafeStackPtr; |
Evgeniy Stepanov | 9addbc9 | 2015-10-15 21:26:49 +0000 | [diff] [blame] | 380 | } |
| 381 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 382 | void SafeStack::findInsts(Function &F, |
| 383 | SmallVectorImpl<AllocaInst *> &StaticAllocas, |
| 384 | SmallVectorImpl<AllocaInst *> &DynamicAllocas, |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 385 | SmallVectorImpl<Argument *> &ByValArguments, |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 386 | SmallVectorImpl<ReturnInst *> &Returns, |
| 387 | SmallVectorImpl<Instruction *> &StackRestorePoints) { |
Nico Rieck | 7819951 | 2015-08-06 19:10:45 +0000 | [diff] [blame] | 388 | for (Instruction &I : instructions(&F)) { |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 389 | if (auto AI = dyn_cast<AllocaInst>(&I)) { |
| 390 | ++NumAllocas; |
| 391 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 392 | uint64_t Size = getStaticAllocaAllocationSize(AI); |
| 393 | if (IsSafeStackAlloca(AI, Size)) |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 394 | continue; |
| 395 | |
| 396 | if (AI->isStaticAlloca()) { |
| 397 | ++NumUnsafeStaticAllocas; |
| 398 | StaticAllocas.push_back(AI); |
| 399 | } else { |
| 400 | ++NumUnsafeDynamicAllocas; |
| 401 | DynamicAllocas.push_back(AI); |
| 402 | } |
| 403 | } else if (auto RI = dyn_cast<ReturnInst>(&I)) { |
| 404 | Returns.push_back(RI); |
| 405 | } else if (auto CI = dyn_cast<CallInst>(&I)) { |
| 406 | // setjmps require stack restore. |
| 407 | if (CI->getCalledFunction() && CI->canReturnTwice()) |
| 408 | StackRestorePoints.push_back(CI); |
| 409 | } else if (auto LP = dyn_cast<LandingPadInst>(&I)) { |
| 410 | // Exception landing pads require stack restore. |
| 411 | StackRestorePoints.push_back(LP); |
| 412 | } else if (auto II = dyn_cast<IntrinsicInst>(&I)) { |
| 413 | if (II->getIntrinsicID() == Intrinsic::gcroot) |
| 414 | llvm::report_fatal_error( |
| 415 | "gcroot intrinsic not compatible with safestack attribute"); |
| 416 | } |
| 417 | } |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 418 | for (Argument &Arg : F.args()) { |
| 419 | if (!Arg.hasByValAttr()) |
| 420 | continue; |
| 421 | uint64_t Size = |
| 422 | DL->getTypeStoreSize(Arg.getType()->getPointerElementType()); |
| 423 | if (IsSafeStackAlloca(&Arg, Size)) |
| 424 | continue; |
| 425 | |
| 426 | ++NumUnsafeByValArguments; |
| 427 | ByValArguments.push_back(&Arg); |
| 428 | } |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 429 | } |
| 430 | |
| 431 | AllocaInst * |
Evgeniy Stepanov | 8685daf | 2015-09-24 01:23:51 +0000 | [diff] [blame] | 432 | SafeStack::createStackRestorePoints(IRBuilder<> &IRB, Function &F, |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 433 | ArrayRef<Instruction *> StackRestorePoints, |
| 434 | Value *StaticTop, bool NeedDynamicTop) { |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 435 | assert(StaticTop && "The stack top isn't set."); |
| 436 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 437 | if (StackRestorePoints.empty()) |
| 438 | return nullptr; |
| 439 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 440 | // We need the current value of the shadow stack pointer to restore |
| 441 | // after longjmp or exception catching. |
| 442 | |
| 443 | // FIXME: On some platforms this could be handled by the longjmp/exception |
| 444 | // runtime itself. |
| 445 | |
| 446 | AllocaInst *DynamicTop = nullptr; |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 447 | if (NeedDynamicTop) { |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 448 | // If we also have dynamic alloca's, the stack pointer value changes |
| 449 | // throughout the function. For now we store it in an alloca. |
| 450 | DynamicTop = IRB.CreateAlloca(StackPtrTy, /*ArraySize=*/nullptr, |
| 451 | "unsafe_stack_dynamic_ptr"); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 452 | IRB.CreateStore(StaticTop, DynamicTop); |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 453 | } |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 454 | |
| 455 | // Restore current stack pointer after longjmp/exception catch. |
| 456 | for (Instruction *I : StackRestorePoints) { |
| 457 | ++NumUnsafeStackRestorePoints; |
| 458 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 459 | IRB.SetInsertPoint(I->getNextNode()); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 460 | Value *CurrentTop = DynamicTop ? IRB.CreateLoad(DynamicTop) : StaticTop; |
| 461 | IRB.CreateStore(CurrentTop, UnsafeStackPtr); |
| 462 | } |
| 463 | |
| 464 | return DynamicTop; |
| 465 | } |
| 466 | |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 467 | /// We explicitly compute and set the unsafe stack layout for all unsafe |
| 468 | /// static alloca instructions. We save the unsafe "base pointer" in the |
| 469 | /// prologue into a local variable and restore it in the epilogue. |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 470 | Value *SafeStack::moveStaticAllocasToUnsafeStack( |
| 471 | IRBuilder<> &IRB, Function &F, ArrayRef<AllocaInst *> StaticAllocas, |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 472 | ArrayRef<Argument *> ByValArguments, ArrayRef<ReturnInst *> Returns, |
| 473 | Instruction *BasePointer) { |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 474 | if (StaticAllocas.empty() && ByValArguments.empty()) |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 475 | return BasePointer; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 476 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 477 | DIBuilder DIB(*F.getParent()); |
| 478 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 479 | // Compute maximum alignment among static objects on the unsafe stack. |
| 480 | unsigned MaxAlignment = 0; |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 481 | for (Argument *Arg : ByValArguments) { |
| 482 | Type *Ty = Arg->getType()->getPointerElementType(); |
| 483 | unsigned Align = std::max((unsigned)DL->getPrefTypeAlignment(Ty), |
| 484 | Arg->getParamAlignment()); |
| 485 | if (Align > MaxAlignment) |
| 486 | MaxAlignment = Align; |
| 487 | } |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 488 | for (AllocaInst *AI : StaticAllocas) { |
| 489 | Type *Ty = AI->getAllocatedType(); |
| 490 | unsigned Align = |
| 491 | std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment()); |
| 492 | if (Align > MaxAlignment) |
| 493 | MaxAlignment = Align; |
| 494 | } |
| 495 | |
| 496 | if (MaxAlignment > StackAlignment) { |
| 497 | // Re-align the base pointer according to the max requested alignment. |
| 498 | assert(isPowerOf2_32(MaxAlignment)); |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 499 | IRB.SetInsertPoint(BasePointer->getNextNode()); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 500 | BasePointer = cast<Instruction>(IRB.CreateIntToPtr( |
| 501 | IRB.CreateAnd(IRB.CreatePtrToInt(BasePointer, IntPtrTy), |
| 502 | ConstantInt::get(IntPtrTy, ~uint64_t(MaxAlignment - 1))), |
| 503 | StackPtrTy)); |
| 504 | } |
| 505 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 506 | int64_t StaticOffset = 0; // Current stack top. |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 507 | IRB.SetInsertPoint(BasePointer->getNextNode()); |
| 508 | |
| 509 | for (Argument *Arg : ByValArguments) { |
| 510 | Type *Ty = Arg->getType()->getPointerElementType(); |
| 511 | |
| 512 | uint64_t Size = DL->getTypeStoreSize(Ty); |
| 513 | if (Size == 0) |
| 514 | Size = 1; // Don't create zero-sized stack objects. |
| 515 | |
| 516 | // Ensure the object is properly aligned. |
| 517 | unsigned Align = std::max((unsigned)DL->getPrefTypeAlignment(Ty), |
| 518 | Arg->getParamAlignment()); |
| 519 | |
| 520 | // Add alignment. |
| 521 | // NOTE: we ensure that BasePointer itself is aligned to >= Align. |
| 522 | StaticOffset += Size; |
Rui Ueyama | da00f2f | 2016-01-14 21:06:47 +0000 | [diff] [blame] | 523 | StaticOffset = alignTo(StaticOffset, Align); |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 524 | |
| 525 | Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8* |
| 526 | ConstantInt::get(Int32Ty, -StaticOffset)); |
| 527 | Value *NewArg = IRB.CreateBitCast(Off, Arg->getType(), |
| 528 | Arg->getName() + ".unsafe-byval"); |
| 529 | |
| 530 | // Replace alloc with the new location. |
| 531 | replaceDbgDeclare(Arg, BasePointer, BasePointer->getNextNode(), DIB, |
| 532 | /*Deref=*/true, -StaticOffset); |
| 533 | Arg->replaceAllUsesWith(NewArg); |
| 534 | IRB.SetInsertPoint(cast<Instruction>(NewArg)->getNextNode()); |
| 535 | IRB.CreateMemCpy(Off, Arg, Size, Arg->getParamAlignment()); |
| 536 | } |
| 537 | |
| 538 | // Allocate space for every unsafe static AllocaInst on the unsafe stack. |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 539 | for (AllocaInst *AI : StaticAllocas) { |
| 540 | IRB.SetInsertPoint(AI); |
| 541 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 542 | Type *Ty = AI->getAllocatedType(); |
Evgeniy Stepanov | a4ac3f4 | 2015-12-01 00:06:13 +0000 | [diff] [blame] | 543 | uint64_t Size = getStaticAllocaAllocationSize(AI); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 544 | if (Size == 0) |
| 545 | Size = 1; // Don't create zero-sized stack objects. |
| 546 | |
| 547 | // Ensure the object is properly aligned. |
| 548 | unsigned Align = |
| 549 | std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment()); |
| 550 | |
| 551 | // Add alignment. |
| 552 | // NOTE: we ensure that BasePointer itself is aligned to >= Align. |
| 553 | StaticOffset += Size; |
Rui Ueyama | da00f2f | 2016-01-14 21:06:47 +0000 | [diff] [blame] | 554 | StaticOffset = alignTo(StaticOffset, Align); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 555 | |
| 556 | Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8* |
| 557 | ConstantInt::get(Int32Ty, -StaticOffset)); |
| 558 | Value *NewAI = IRB.CreateBitCast(Off, AI->getType(), AI->getName()); |
| 559 | if (AI->hasName() && isa<Instruction>(NewAI)) |
| 560 | cast<Instruction>(NewAI)->takeName(AI); |
| 561 | |
| 562 | // Replace alloc with the new location. |
Evgeniy Stepanov | f608111 | 2015-09-30 19:55:43 +0000 | [diff] [blame] | 563 | replaceDbgDeclareForAlloca(AI, BasePointer, DIB, /*Deref=*/true, -StaticOffset); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 564 | AI->replaceAllUsesWith(NewAI); |
| 565 | AI->eraseFromParent(); |
| 566 | } |
| 567 | |
| 568 | // Re-align BasePointer so that our callees would see it aligned as |
| 569 | // expected. |
| 570 | // FIXME: no need to update BasePointer in leaf functions. |
Rui Ueyama | da00f2f | 2016-01-14 21:06:47 +0000 | [diff] [blame] | 571 | StaticOffset = alignTo(StaticOffset, StackAlignment); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 572 | |
| 573 | // Update shadow stack pointer in the function epilogue. |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 574 | IRB.SetInsertPoint(BasePointer->getNextNode()); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 575 | |
| 576 | Value *StaticTop = |
| 577 | IRB.CreateGEP(BasePointer, ConstantInt::get(Int32Ty, -StaticOffset), |
| 578 | "unsafe_stack_static_top"); |
| 579 | IRB.CreateStore(StaticTop, UnsafeStackPtr); |
| 580 | return StaticTop; |
| 581 | } |
| 582 | |
| 583 | void SafeStack::moveDynamicAllocasToUnsafeStack( |
| 584 | Function &F, Value *UnsafeStackPtr, AllocaInst *DynamicTop, |
| 585 | ArrayRef<AllocaInst *> DynamicAllocas) { |
| 586 | DIBuilder DIB(*F.getParent()); |
| 587 | |
| 588 | for (AllocaInst *AI : DynamicAllocas) { |
| 589 | IRBuilder<> IRB(AI); |
| 590 | |
| 591 | // Compute the new SP value (after AI). |
| 592 | Value *ArraySize = AI->getArraySize(); |
| 593 | if (ArraySize->getType() != IntPtrTy) |
| 594 | ArraySize = IRB.CreateIntCast(ArraySize, IntPtrTy, false); |
| 595 | |
| 596 | Type *Ty = AI->getAllocatedType(); |
| 597 | uint64_t TySize = DL->getTypeAllocSize(Ty); |
| 598 | Value *Size = IRB.CreateMul(ArraySize, ConstantInt::get(IntPtrTy, TySize)); |
| 599 | |
| 600 | Value *SP = IRB.CreatePtrToInt(IRB.CreateLoad(UnsafeStackPtr), IntPtrTy); |
| 601 | SP = IRB.CreateSub(SP, Size); |
| 602 | |
| 603 | // Align the SP value to satisfy the AllocaInst, type and stack alignments. |
| 604 | unsigned Align = std::max( |
| 605 | std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment()), |
| 606 | (unsigned)StackAlignment); |
| 607 | |
| 608 | assert(isPowerOf2_32(Align)); |
| 609 | Value *NewTop = IRB.CreateIntToPtr( |
| 610 | IRB.CreateAnd(SP, ConstantInt::get(IntPtrTy, ~uint64_t(Align - 1))), |
| 611 | StackPtrTy); |
| 612 | |
| 613 | // Save the stack pointer. |
| 614 | IRB.CreateStore(NewTop, UnsafeStackPtr); |
| 615 | if (DynamicTop) |
| 616 | IRB.CreateStore(NewTop, DynamicTop); |
| 617 | |
Evgeniy Stepanov | 9842d61 | 2015-11-25 22:52:30 +0000 | [diff] [blame] | 618 | Value *NewAI = IRB.CreatePointerCast(NewTop, AI->getType()); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 619 | if (AI->hasName() && isa<Instruction>(NewAI)) |
| 620 | NewAI->takeName(AI); |
| 621 | |
| 622 | replaceDbgDeclareForAlloca(AI, NewAI, DIB, /*Deref=*/true); |
| 623 | AI->replaceAllUsesWith(NewAI); |
| 624 | AI->eraseFromParent(); |
| 625 | } |
| 626 | |
| 627 | if (!DynamicAllocas.empty()) { |
| 628 | // Now go through the instructions again, replacing stacksave/stackrestore. |
| 629 | for (inst_iterator It = inst_begin(&F), Ie = inst_end(&F); It != Ie;) { |
| 630 | Instruction *I = &*(It++); |
| 631 | auto II = dyn_cast<IntrinsicInst>(I); |
| 632 | if (!II) |
| 633 | continue; |
| 634 | |
| 635 | if (II->getIntrinsicID() == Intrinsic::stacksave) { |
| 636 | IRBuilder<> IRB(II); |
| 637 | Instruction *LI = IRB.CreateLoad(UnsafeStackPtr); |
| 638 | LI->takeName(II); |
| 639 | II->replaceAllUsesWith(LI); |
| 640 | II->eraseFromParent(); |
| 641 | } else if (II->getIntrinsicID() == Intrinsic::stackrestore) { |
| 642 | IRBuilder<> IRB(II); |
| 643 | Instruction *SI = IRB.CreateStore(II->getArgOperand(0), UnsafeStackPtr); |
| 644 | SI->takeName(II); |
| 645 | assert(II->use_empty()); |
| 646 | II->eraseFromParent(); |
| 647 | } |
| 648 | } |
| 649 | } |
| 650 | } |
| 651 | |
| 652 | bool SafeStack::runOnFunction(Function &F) { |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 653 | DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n"); |
| 654 | |
| 655 | if (!F.hasFnAttribute(Attribute::SafeStack)) { |
| 656 | DEBUG(dbgs() << "[SafeStack] safestack is not requested" |
| 657 | " for this function\n"); |
| 658 | return false; |
| 659 | } |
| 660 | |
| 661 | if (F.isDeclaration()) { |
| 662 | DEBUG(dbgs() << "[SafeStack] function definition" |
| 663 | " is not available\n"); |
| 664 | return false; |
| 665 | } |
| 666 | |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 667 | TL = TM ? TM->getSubtargetImpl(F)->getTargetLowering() : nullptr; |
| 668 | SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE(); |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 669 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 670 | { |
| 671 | // Make sure the regular stack protector won't run on this function |
| 672 | // (safestack attribute takes precedence). |
| 673 | AttrBuilder B; |
| 674 | B.addAttribute(Attribute::StackProtect) |
| 675 | .addAttribute(Attribute::StackProtectReq) |
| 676 | .addAttribute(Attribute::StackProtectStrong); |
| 677 | F.removeAttributes( |
| 678 | AttributeSet::FunctionIndex, |
| 679 | AttributeSet::get(F.getContext(), AttributeSet::FunctionIndex, B)); |
| 680 | } |
| 681 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 682 | ++NumFunctions; |
| 683 | |
| 684 | SmallVector<AllocaInst *, 16> StaticAllocas; |
| 685 | SmallVector<AllocaInst *, 4> DynamicAllocas; |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 686 | SmallVector<Argument *, 4> ByValArguments; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 687 | SmallVector<ReturnInst *, 4> Returns; |
| 688 | |
| 689 | // Collect all points where stack gets unwound and needs to be restored |
| 690 | // This is only necessary because the runtime (setjmp and unwind code) is |
| 691 | // not aware of the unsafe stack and won't unwind/restore it prorerly. |
| 692 | // To work around this problem without changing the runtime, we insert |
| 693 | // instrumentation to restore the unsafe stack pointer when necessary. |
| 694 | SmallVector<Instruction *, 4> StackRestorePoints; |
| 695 | |
| 696 | // Find all static and dynamic alloca instructions that must be moved to the |
| 697 | // unsafe stack, all return instructions and stack restore points. |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 698 | findInsts(F, StaticAllocas, DynamicAllocas, ByValArguments, Returns, |
| 699 | StackRestorePoints); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 700 | |
| 701 | if (StaticAllocas.empty() && DynamicAllocas.empty() && |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 702 | ByValArguments.empty() && StackRestorePoints.empty()) |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 703 | return false; // Nothing to do in this function. |
| 704 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 705 | if (!StaticAllocas.empty() || !DynamicAllocas.empty() || |
| 706 | !ByValArguments.empty()) |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 707 | ++NumUnsafeStackFunctions; // This function has the unsafe stack. |
| 708 | |
| 709 | if (!StackRestorePoints.empty()) |
| 710 | ++NumUnsafeStackRestorePointsFunctions; |
| 711 | |
Duncan P. N. Exon Smith | e82c286 | 2015-10-13 17:39:10 +0000 | [diff] [blame] | 712 | IRBuilder<> IRB(&F.front(), F.begin()->getFirstInsertionPt()); |
Evgeniy Stepanov | 9addbc9 | 2015-10-15 21:26:49 +0000 | [diff] [blame] | 713 | UnsafeStackPtr = getOrCreateUnsafeStackPtr(IRB, F); |
Peter Collingbourne | de26a91 | 2015-06-22 20:26:54 +0000 | [diff] [blame] | 714 | |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 715 | // Load the current stack pointer (we'll also use it as a base pointer). |
| 716 | // FIXME: use a dedicated register for it ? |
| 717 | Instruction *BasePointer = |
| 718 | IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr"); |
| 719 | assert(BasePointer->getType() == StackPtrTy); |
| 720 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 721 | // The top of the unsafe stack after all unsafe static allocas are allocated. |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 722 | Value *StaticTop = moveStaticAllocasToUnsafeStack(IRB, F, StaticAllocas, |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 723 | ByValArguments, Returns, |
| 724 | BasePointer); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 725 | |
| 726 | // Safe stack object that stores the current unsafe stack top. It is updated |
| 727 | // as unsafe dynamic (non-constant-sized) allocas are allocated and freed. |
| 728 | // This is only needed if we need to restore stack pointer after longjmp |
| 729 | // or exceptions, and we have dynamic allocations. |
| 730 | // FIXME: a better alternative might be to store the unsafe stack pointer |
| 731 | // before setjmp / invoke instructions. |
| 732 | AllocaInst *DynamicTop = createStackRestorePoints( |
Evgeniy Stepanov | 8685daf | 2015-09-24 01:23:51 +0000 | [diff] [blame] | 733 | IRB, F, StackRestorePoints, StaticTop, !DynamicAllocas.empty()); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 734 | |
| 735 | // Handle dynamic allocas. |
| 736 | moveDynamicAllocasToUnsafeStack(F, UnsafeStackPtr, DynamicTop, |
| 737 | DynamicAllocas); |
| 738 | |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 739 | // Restore the unsafe stack pointer before each return. |
| 740 | for (ReturnInst *RI : Returns) { |
| 741 | IRB.SetInsertPoint(RI); |
| 742 | IRB.CreateStore(BasePointer, UnsafeStackPtr); |
| 743 | } |
| 744 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 745 | DEBUG(dbgs() << "[SafeStack] safestack applied\n"); |
| 746 | return true; |
| 747 | } |
| 748 | |
Hans Wennborg | 083ca9b | 2015-10-06 23:24:35 +0000 | [diff] [blame] | 749 | } // anonymous namespace |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 750 | |
| 751 | char SafeStack::ID = 0; |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 752 | INITIALIZE_TM_PASS_BEGIN(SafeStack, "safe-stack", |
| 753 | "Safe Stack instrumentation pass", false, false) |
| 754 | INITIALIZE_TM_PASS_END(SafeStack, "safe-stack", |
| 755 | "Safe Stack instrumentation pass", false, false) |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 756 | |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 757 | FunctionPass *llvm::createSafeStackPass(const llvm::TargetMachine *TM) { |
| 758 | return new SafeStack(TM); |
| 759 | } |