Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 1 | //===- SafeStack.cpp - Safe Stack Insertion -------------------------------===// |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This pass splits the stack into the safe stack (kept as-is for LLVM backend) |
| 11 | // and the unsafe stack (explicitly allocated and managed through the runtime |
| 12 | // support library). |
| 13 | // |
| 14 | // http://clang.llvm.org/docs/SafeStack.html |
| 15 | // |
| 16 | //===----------------------------------------------------------------------===// |
| 17 | |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 18 | #include "SafeStackColoring.h" |
| 19 | #include "SafeStackLayout.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 20 | #include "llvm/ADT/APInt.h" |
| 21 | #include "llvm/ADT/ArrayRef.h" |
| 22 | #include "llvm/ADT/SmallPtrSet.h" |
| 23 | #include "llvm/ADT/SmallVector.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 24 | #include "llvm/ADT/Statistic.h" |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 25 | #include "llvm/Analysis/AssumptionCache.h" |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 26 | #include "llvm/Analysis/BranchProbabilityInfo.h" |
Evgeniy Stepanov | d5a6fdb | 2018-01-23 21:27:07 +0000 | [diff] [blame] | 27 | #include "llvm/Analysis/InlineCost.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 28 | #include "llvm/Analysis/LoopInfo.h" |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 29 | #include "llvm/Analysis/ScalarEvolution.h" |
| 30 | #include "llvm/Analysis/ScalarEvolutionExpressions.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 31 | #include "llvm/Analysis/TargetLibraryInfo.h" |
David Blaikie | 2be3922 | 2018-03-21 22:34:23 +0000 | [diff] [blame] | 32 | #include "llvm/Analysis/Utils/Local.h" |
David Blaikie | b3bde2e | 2017-11-17 01:07:10 +0000 | [diff] [blame] | 33 | #include "llvm/CodeGen/TargetLowering.h" |
Francis Visoiu Mistrih | 8b61764 | 2017-05-18 17:21:13 +0000 | [diff] [blame] | 34 | #include "llvm/CodeGen/TargetPassConfig.h" |
David Blaikie | b3bde2e | 2017-11-17 01:07:10 +0000 | [diff] [blame] | 35 | #include "llvm/CodeGen/TargetSubtargetInfo.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 36 | #include "llvm/IR/Argument.h" |
| 37 | #include "llvm/IR/Attributes.h" |
| 38 | #include "llvm/IR/CallSite.h" |
| 39 | #include "llvm/IR/ConstantRange.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 40 | #include "llvm/IR/Constants.h" |
Benjamin Kramer | 390c33c | 2016-01-27 16:53:42 +0000 | [diff] [blame] | 41 | #include "llvm/IR/DIBuilder.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 42 | #include "llvm/IR/DataLayout.h" |
| 43 | #include "llvm/IR/DerivedTypes.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 44 | #include "llvm/IR/Dominators.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 45 | #include "llvm/IR/Function.h" |
Benjamin Kramer | 390c33c | 2016-01-27 16:53:42 +0000 | [diff] [blame] | 46 | #include "llvm/IR/IRBuilder.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 47 | #include "llvm/IR/InstIterator.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 48 | #include "llvm/IR/Instruction.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 49 | #include "llvm/IR/Instructions.h" |
| 50 | #include "llvm/IR/IntrinsicInst.h" |
| 51 | #include "llvm/IR/Intrinsics.h" |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 52 | #include "llvm/IR/MDBuilder.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 53 | #include "llvm/IR/Module.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 54 | #include "llvm/IR/Type.h" |
| 55 | #include "llvm/IR/Use.h" |
| 56 | #include "llvm/IR/User.h" |
| 57 | #include "llvm/IR/Value.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 58 | #include "llvm/Pass.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 59 | #include "llvm/Support/Casting.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 60 | #include "llvm/Support/Debug.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 61 | #include "llvm/Support/ErrorHandling.h" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 62 | #include "llvm/Support/MathExtras.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 63 | #include "llvm/Support/raw_ostream.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 64 | #include "llvm/Target/TargetMachine.h" |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 65 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" |
Evgeniy Stepanov | d5a6fdb | 2018-01-23 21:27:07 +0000 | [diff] [blame] | 66 | #include "llvm/Transforms/Utils/Cloning.h" |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 67 | #include <algorithm> |
| 68 | #include <cassert> |
| 69 | #include <cstdint> |
| 70 | #include <string> |
| 71 | #include <utility> |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 72 | |
| 73 | using namespace llvm; |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 74 | using namespace llvm::safestack; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 75 | |
Matthias Braun | 1527baa | 2017-05-25 21:26:32 +0000 | [diff] [blame] | 76 | #define DEBUG_TYPE "safe-stack" |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 77 | |
| 78 | namespace llvm { |
| 79 | |
| 80 | STATISTIC(NumFunctions, "Total number of functions"); |
| 81 | STATISTIC(NumUnsafeStackFunctions, "Number of functions with unsafe stack"); |
| 82 | STATISTIC(NumUnsafeStackRestorePointsFunctions, |
| 83 | "Number of functions that use setjmp or exceptions"); |
| 84 | |
| 85 | STATISTIC(NumAllocas, "Total number of allocas"); |
| 86 | STATISTIC(NumUnsafeStaticAllocas, "Number of unsafe static allocas"); |
| 87 | STATISTIC(NumUnsafeDynamicAllocas, "Number of unsafe dynamic allocas"); |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 88 | STATISTIC(NumUnsafeByValArguments, "Number of unsafe byval arguments"); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 89 | STATISTIC(NumUnsafeStackRestorePoints, "Number of setjmps and landingpads"); |
| 90 | |
| 91 | } // namespace llvm |
| 92 | |
Evgeniy Stepanov | d5a6fdb | 2018-01-23 21:27:07 +0000 | [diff] [blame] | 93 | /// Use __safestack_pointer_address even if the platform has a faster way of |
| 94 | /// access safe stack pointer. |
| 95 | static cl::opt<bool> |
| 96 | SafeStackUsePointerAddress("safestack-use-pointer-address", |
| 97 | cl::init(false), cl::Hidden); |
| 98 | |
| 99 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 100 | namespace { |
| 101 | |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 102 | /// Rewrite an SCEV expression for a memory access address to an expression that |
| 103 | /// represents offset from the given alloca. |
| 104 | /// |
| 105 | /// The implementation simply replaces all mentions of the alloca with zero. |
| 106 | class AllocaOffsetRewriter : public SCEVRewriteVisitor<AllocaOffsetRewriter> { |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 107 | const Value *AllocaPtr; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 108 | |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 109 | public: |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 110 | AllocaOffsetRewriter(ScalarEvolution &SE, const Value *AllocaPtr) |
| 111 | : SCEVRewriteVisitor(SE), AllocaPtr(AllocaPtr) {} |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 112 | |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 113 | const SCEV *visitUnknown(const SCEVUnknown *Expr) { |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 114 | if (Expr->getValue() == AllocaPtr) |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 115 | return SE.getZero(Expr->getType()); |
| 116 | return Expr; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 117 | } |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 118 | }; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 119 | |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 120 | /// The SafeStack pass splits the stack of each function into the safe |
| 121 | /// stack, which is only accessed through memory safe dereferences (as |
| 122 | /// determined statically), and the unsafe stack, which contains all |
| 123 | /// local variables that are accessed in ways that we can't prove to |
| 124 | /// be safe. |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 125 | class SafeStack { |
| 126 | Function &F; |
| 127 | const TargetLoweringBase &TL; |
| 128 | const DataLayout &DL; |
| 129 | ScalarEvolution &SE; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 130 | |
| 131 | Type *StackPtrTy; |
| 132 | Type *IntPtrTy; |
| 133 | Type *Int32Ty; |
| 134 | Type *Int8Ty; |
| 135 | |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 136 | Value *UnsafeStackPtr = nullptr; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 137 | |
| 138 | /// Unsafe stack alignment. Each stack frame must ensure that the stack is |
| 139 | /// aligned to this value. We need to re-align the unsafe stack if the |
| 140 | /// alignment of any object on the stack exceeds this value. |
| 141 | /// |
| 142 | /// 16 seems like a reasonable upper bound on the alignment of objects that we |
| 143 | /// might expect to appear on the stack on most common targets. |
| 144 | enum { StackAlignment = 16 }; |
| 145 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 146 | /// Return the value of the stack canary. |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 147 | Value *getStackGuard(IRBuilder<> &IRB, Function &F); |
| 148 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 149 | /// Load stack guard from the frame and check if it has changed. |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 150 | void checkStackGuard(IRBuilder<> &IRB, Function &F, ReturnInst &RI, |
| 151 | AllocaInst *StackGuardSlot, Value *StackGuard); |
| 152 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 153 | /// Find all static allocas, dynamic allocas, return instructions and |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 154 | /// stack restore points (exception unwind blocks and setjmp calls) in the |
| 155 | /// given function and append them to the respective vectors. |
| 156 | void findInsts(Function &F, SmallVectorImpl<AllocaInst *> &StaticAllocas, |
| 157 | SmallVectorImpl<AllocaInst *> &DynamicAllocas, |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 158 | SmallVectorImpl<Argument *> &ByValArguments, |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 159 | SmallVectorImpl<ReturnInst *> &Returns, |
| 160 | SmallVectorImpl<Instruction *> &StackRestorePoints); |
| 161 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 162 | /// Calculate the allocation size of a given alloca. Returns 0 if the |
Evgeniy Stepanov | a4ac3f4 | 2015-12-01 00:06:13 +0000 | [diff] [blame] | 163 | /// size can not be statically determined. |
| 164 | uint64_t getStaticAllocaAllocationSize(const AllocaInst* AI); |
| 165 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 166 | /// Allocate space for all static allocas in \p StaticAllocas, |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 167 | /// replace allocas with pointers into the unsafe stack and generate code to |
| 168 | /// restore the stack pointer before all return instructions in \p Returns. |
| 169 | /// |
| 170 | /// \returns A pointer to the top of the unsafe stack after all unsafe static |
| 171 | /// allocas are allocated. |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 172 | Value *moveStaticAllocasToUnsafeStack(IRBuilder<> &IRB, Function &F, |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 173 | ArrayRef<AllocaInst *> StaticAllocas, |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 174 | ArrayRef<Argument *> ByValArguments, |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 175 | ArrayRef<ReturnInst *> Returns, |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 176 | Instruction *BasePointer, |
| 177 | AllocaInst *StackGuardSlot); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 178 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 179 | /// Generate code to restore the stack after all stack restore points |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 180 | /// in \p StackRestorePoints. |
| 181 | /// |
| 182 | /// \returns A local variable in which to maintain the dynamic top of the |
| 183 | /// unsafe stack if needed. |
| 184 | AllocaInst * |
Evgeniy Stepanov | 8685daf | 2015-09-24 01:23:51 +0000 | [diff] [blame] | 185 | createStackRestorePoints(IRBuilder<> &IRB, Function &F, |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 186 | ArrayRef<Instruction *> StackRestorePoints, |
| 187 | Value *StaticTop, bool NeedDynamicTop); |
| 188 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 189 | /// Replace all allocas in \p DynamicAllocas with code to allocate |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 190 | /// space dynamically on the unsafe stack and store the dynamic unsafe stack |
| 191 | /// top to \p DynamicTop if non-null. |
| 192 | void moveDynamicAllocasToUnsafeStack(Function &F, Value *UnsafeStackPtr, |
| 193 | AllocaInst *DynamicTop, |
| 194 | ArrayRef<AllocaInst *> DynamicAllocas); |
| 195 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 196 | bool IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 197 | |
| 198 | bool IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U, |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 199 | const Value *AllocaPtr, uint64_t AllocaSize); |
| 200 | bool IsAccessSafe(Value *Addr, uint64_t Size, const Value *AllocaPtr, |
| 201 | uint64_t AllocaSize); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 202 | |
Evgeniy Stepanov | d5a6fdb | 2018-01-23 21:27:07 +0000 | [diff] [blame] | 203 | bool ShouldInlinePointerAddress(CallSite &CS); |
| 204 | void TryInlinePointerAddress(); |
| 205 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 206 | public: |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 207 | SafeStack(Function &F, const TargetLoweringBase &TL, const DataLayout &DL, |
| 208 | ScalarEvolution &SE) |
| 209 | : F(F), TL(TL), DL(DL), SE(SE), |
| 210 | StackPtrTy(Type::getInt8PtrTy(F.getContext())), |
| 211 | IntPtrTy(DL.getIntPtrType(F.getContext())), |
| 212 | Int32Ty(Type::getInt32Ty(F.getContext())), |
| 213 | Int8Ty(Type::getInt8Ty(F.getContext())) {} |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 214 | |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 215 | // Run the transformation on the associated function. |
| 216 | // Returns whether the function was changed. |
| 217 | bool run(); |
| 218 | }; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 219 | |
Evgeniy Stepanov | a4ac3f4 | 2015-12-01 00:06:13 +0000 | [diff] [blame] | 220 | uint64_t SafeStack::getStaticAllocaAllocationSize(const AllocaInst* AI) { |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 221 | uint64_t Size = DL.getTypeAllocSize(AI->getAllocatedType()); |
Evgeniy Stepanov | a4ac3f4 | 2015-12-01 00:06:13 +0000 | [diff] [blame] | 222 | if (AI->isArrayAllocation()) { |
| 223 | auto C = dyn_cast<ConstantInt>(AI->getArraySize()); |
| 224 | if (!C) |
| 225 | return 0; |
| 226 | Size *= C->getZExtValue(); |
| 227 | } |
| 228 | return Size; |
| 229 | } |
| 230 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 231 | bool SafeStack::IsAccessSafe(Value *Addr, uint64_t AccessSize, |
| 232 | const Value *AllocaPtr, uint64_t AllocaSize) { |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 233 | AllocaOffsetRewriter Rewriter(SE, AllocaPtr); |
| 234 | const SCEV *Expr = Rewriter.visit(SE.getSCEV(Addr)); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 235 | |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 236 | uint64_t BitWidth = SE.getTypeSizeInBits(Expr->getType()); |
| 237 | ConstantRange AccessStartRange = SE.getUnsignedRange(Expr); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 238 | ConstantRange SizeRange = |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 239 | ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AccessSize)); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 240 | ConstantRange AccessRange = AccessStartRange.add(SizeRange); |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 241 | ConstantRange AllocaRange = |
| 242 | ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AllocaSize)); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 243 | bool Safe = AllocaRange.contains(AccessRange); |
| 244 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 245 | LLVM_DEBUG( |
| 246 | dbgs() << "[SafeStack] " |
| 247 | << (isa<AllocaInst>(AllocaPtr) ? "Alloca " : "ByValArgument ") |
| 248 | << *AllocaPtr << "\n" |
| 249 | << " Access " << *Addr << "\n" |
| 250 | << " SCEV " << *Expr |
| 251 | << " U: " << SE.getUnsignedRange(Expr) |
| 252 | << ", S: " << SE.getSignedRange(Expr) << "\n" |
| 253 | << " Range " << AccessRange << "\n" |
| 254 | << " AllocaRange " << AllocaRange << "\n" |
| 255 | << " " << (Safe ? "safe" : "unsafe") << "\n"); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 256 | |
| 257 | return Safe; |
| 258 | } |
| 259 | |
| 260 | bool SafeStack::IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U, |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 261 | const Value *AllocaPtr, |
| 262 | uint64_t AllocaSize) { |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 263 | // All MemIntrinsics have destination address in Arg0 and size in Arg2. |
| 264 | if (MI->getRawDest() != U) return true; |
| 265 | const auto *Len = dyn_cast<ConstantInt>(MI->getLength()); |
| 266 | // Non-constant size => unsafe. FIXME: try SCEV getRange. |
| 267 | if (!Len) return false; |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 268 | return IsAccessSafe(U, Len->getZExtValue(), AllocaPtr, AllocaSize); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 269 | } |
| 270 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 271 | /// Check whether a given allocation must be put on the safe |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 272 | /// stack or not. The function analyzes all uses of AI and checks whether it is |
| 273 | /// only accessed in a memory safe way (as decided statically). |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 274 | bool SafeStack::IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize) { |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 275 | // Go through all uses of this alloca and check whether all accesses to the |
| 276 | // allocated object are statically known to be memory safe and, hence, the |
| 277 | // object can be placed on the safe stack. |
| 278 | SmallPtrSet<const Value *, 16> Visited; |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 279 | SmallVector<const Value *, 8> WorkList; |
| 280 | WorkList.push_back(AllocaPtr); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 281 | |
| 282 | // A DFS search through all uses of the alloca in bitcasts/PHI/GEPs/etc. |
| 283 | while (!WorkList.empty()) { |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 284 | const Value *V = WorkList.pop_back_val(); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 285 | for (const Use &UI : V->uses()) { |
| 286 | auto I = cast<const Instruction>(UI.getUser()); |
| 287 | assert(V == UI.get()); |
| 288 | |
| 289 | switch (I->getOpcode()) { |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 290 | case Instruction::Load: |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 291 | if (!IsAccessSafe(UI, DL.getTypeStoreSize(I->getType()), AllocaPtr, |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 292 | AllocaSize)) |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 293 | return false; |
| 294 | break; |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 295 | |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 296 | case Instruction::VAArg: |
| 297 | // "va-arg" from a pointer is safe. |
| 298 | break; |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 299 | case Instruction::Store: |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 300 | if (V == I->getOperand(0)) { |
| 301 | // Stored the pointer - conservatively assume it may be unsafe. |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 302 | LLVM_DEBUG(dbgs() |
| 303 | << "[SafeStack] Unsafe alloca: " << *AllocaPtr |
| 304 | << "\n store of address: " << *I << "\n"); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 305 | return false; |
| 306 | } |
| 307 | |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 308 | if (!IsAccessSafe(UI, DL.getTypeStoreSize(I->getOperand(0)->getType()), |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 309 | AllocaPtr, AllocaSize)) |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 310 | return false; |
| 311 | break; |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 312 | |
| 313 | case Instruction::Ret: |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 314 | // Information leak. |
| 315 | return false; |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 316 | |
| 317 | case Instruction::Call: |
| 318 | case Instruction::Invoke: { |
| 319 | ImmutableCallSite CS(I); |
| 320 | |
| 321 | if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) { |
| 322 | if (II->getIntrinsicID() == Intrinsic::lifetime_start || |
| 323 | II->getIntrinsicID() == Intrinsic::lifetime_end) |
| 324 | continue; |
| 325 | } |
| 326 | |
| 327 | if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) { |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 328 | if (!IsMemIntrinsicSafe(MI, UI, AllocaPtr, AllocaSize)) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 329 | LLVM_DEBUG(dbgs() |
| 330 | << "[SafeStack] Unsafe alloca: " << *AllocaPtr |
| 331 | << "\n unsafe memintrinsic: " << *I << "\n"); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 332 | return false; |
| 333 | } |
| 334 | continue; |
| 335 | } |
| 336 | |
| 337 | // LLVM 'nocapture' attribute is only set for arguments whose address |
| 338 | // is not stored, passed around, or used in any other non-trivial way. |
| 339 | // We assume that passing a pointer to an object as a 'nocapture |
| 340 | // readnone' argument is safe. |
| 341 | // FIXME: a more precise solution would require an interprocedural |
| 342 | // analysis here, which would look at all uses of an argument inside |
| 343 | // the function being called. |
| 344 | ImmutableCallSite::arg_iterator B = CS.arg_begin(), E = CS.arg_end(); |
| 345 | for (ImmutableCallSite::arg_iterator A = B; A != E; ++A) |
| 346 | if (A->get() == V) |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 347 | if (!(CS.doesNotCapture(A - B) && (CS.doesNotAccessMemory(A - B) || |
| 348 | CS.doesNotAccessMemory()))) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 349 | LLVM_DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr |
| 350 | << "\n unsafe call: " << *I << "\n"); |
Evgeniy Stepanov | 447bbdb | 2015-11-13 21:21:42 +0000 | [diff] [blame] | 351 | return false; |
| 352 | } |
| 353 | continue; |
| 354 | } |
| 355 | |
| 356 | default: |
| 357 | if (Visited.insert(I).second) |
| 358 | WorkList.push_back(cast<const Instruction>(I)); |
| 359 | } |
| 360 | } |
| 361 | } |
| 362 | |
| 363 | // All uses of the alloca are safe, we can place it on the safe stack. |
| 364 | return true; |
| 365 | } |
| 366 | |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 367 | Value *SafeStack::getStackGuard(IRBuilder<> &IRB, Function &F) { |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 368 | Value *StackGuardVar = TL.getIRStackGuard(IRB); |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 369 | if (!StackGuardVar) |
| 370 | StackGuardVar = |
| 371 | F.getParent()->getOrInsertGlobal("__stack_chk_guard", StackPtrTy); |
| 372 | return IRB.CreateLoad(StackGuardVar, "StackGuard"); |
| 373 | } |
| 374 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 375 | void SafeStack::findInsts(Function &F, |
| 376 | SmallVectorImpl<AllocaInst *> &StaticAllocas, |
| 377 | SmallVectorImpl<AllocaInst *> &DynamicAllocas, |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 378 | SmallVectorImpl<Argument *> &ByValArguments, |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 379 | SmallVectorImpl<ReturnInst *> &Returns, |
| 380 | SmallVectorImpl<Instruction *> &StackRestorePoints) { |
Nico Rieck | 7819951 | 2015-08-06 19:10:45 +0000 | [diff] [blame] | 381 | for (Instruction &I : instructions(&F)) { |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 382 | if (auto AI = dyn_cast<AllocaInst>(&I)) { |
| 383 | ++NumAllocas; |
| 384 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 385 | uint64_t Size = getStaticAllocaAllocationSize(AI); |
| 386 | if (IsSafeStackAlloca(AI, Size)) |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 387 | continue; |
| 388 | |
| 389 | if (AI->isStaticAlloca()) { |
| 390 | ++NumUnsafeStaticAllocas; |
| 391 | StaticAllocas.push_back(AI); |
| 392 | } else { |
| 393 | ++NumUnsafeDynamicAllocas; |
| 394 | DynamicAllocas.push_back(AI); |
| 395 | } |
| 396 | } else if (auto RI = dyn_cast<ReturnInst>(&I)) { |
| 397 | Returns.push_back(RI); |
| 398 | } else if (auto CI = dyn_cast<CallInst>(&I)) { |
| 399 | // setjmps require stack restore. |
| 400 | if (CI->getCalledFunction() && CI->canReturnTwice()) |
| 401 | StackRestorePoints.push_back(CI); |
| 402 | } else if (auto LP = dyn_cast<LandingPadInst>(&I)) { |
| 403 | // Exception landing pads require stack restore. |
| 404 | StackRestorePoints.push_back(LP); |
| 405 | } else if (auto II = dyn_cast<IntrinsicInst>(&I)) { |
| 406 | if (II->getIntrinsicID() == Intrinsic::gcroot) |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 407 | report_fatal_error( |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 408 | "gcroot intrinsic not compatible with safestack attribute"); |
| 409 | } |
| 410 | } |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 411 | for (Argument &Arg : F.args()) { |
| 412 | if (!Arg.hasByValAttr()) |
| 413 | continue; |
| 414 | uint64_t Size = |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 415 | DL.getTypeStoreSize(Arg.getType()->getPointerElementType()); |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 416 | if (IsSafeStackAlloca(&Arg, Size)) |
| 417 | continue; |
| 418 | |
| 419 | ++NumUnsafeByValArguments; |
| 420 | ByValArguments.push_back(&Arg); |
| 421 | } |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 422 | } |
| 423 | |
| 424 | AllocaInst * |
Evgeniy Stepanov | 8685daf | 2015-09-24 01:23:51 +0000 | [diff] [blame] | 425 | SafeStack::createStackRestorePoints(IRBuilder<> &IRB, Function &F, |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 426 | ArrayRef<Instruction *> StackRestorePoints, |
| 427 | Value *StaticTop, bool NeedDynamicTop) { |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 428 | assert(StaticTop && "The stack top isn't set."); |
| 429 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 430 | if (StackRestorePoints.empty()) |
| 431 | return nullptr; |
| 432 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 433 | // We need the current value of the shadow stack pointer to restore |
| 434 | // after longjmp or exception catching. |
| 435 | |
| 436 | // FIXME: On some platforms this could be handled by the longjmp/exception |
| 437 | // runtime itself. |
| 438 | |
| 439 | AllocaInst *DynamicTop = nullptr; |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 440 | if (NeedDynamicTop) { |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 441 | // If we also have dynamic alloca's, the stack pointer value changes |
| 442 | // throughout the function. For now we store it in an alloca. |
| 443 | DynamicTop = IRB.CreateAlloca(StackPtrTy, /*ArraySize=*/nullptr, |
| 444 | "unsafe_stack_dynamic_ptr"); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 445 | IRB.CreateStore(StaticTop, DynamicTop); |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 446 | } |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 447 | |
| 448 | // Restore current stack pointer after longjmp/exception catch. |
| 449 | for (Instruction *I : StackRestorePoints) { |
| 450 | ++NumUnsafeStackRestorePoints; |
| 451 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 452 | IRB.SetInsertPoint(I->getNextNode()); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 453 | Value *CurrentTop = DynamicTop ? IRB.CreateLoad(DynamicTop) : StaticTop; |
| 454 | IRB.CreateStore(CurrentTop, UnsafeStackPtr); |
| 455 | } |
| 456 | |
| 457 | return DynamicTop; |
| 458 | } |
| 459 | |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 460 | void SafeStack::checkStackGuard(IRBuilder<> &IRB, Function &F, ReturnInst &RI, |
| 461 | AllocaInst *StackGuardSlot, Value *StackGuard) { |
| 462 | Value *V = IRB.CreateLoad(StackGuardSlot); |
| 463 | Value *Cmp = IRB.CreateICmpNE(StackGuard, V); |
| 464 | |
| 465 | auto SuccessProb = BranchProbabilityInfo::getBranchProbStackProtector(true); |
| 466 | auto FailureProb = BranchProbabilityInfo::getBranchProbStackProtector(false); |
| 467 | MDNode *Weights = MDBuilder(F.getContext()) |
| 468 | .createBranchWeights(SuccessProb.getNumerator(), |
| 469 | FailureProb.getNumerator()); |
| 470 | Instruction *CheckTerm = |
| 471 | SplitBlockAndInsertIfThen(Cmp, &RI, |
| 472 | /* Unreachable */ true, Weights); |
| 473 | IRBuilder<> IRBFail(CheckTerm); |
| 474 | // FIXME: respect -fsanitize-trap / -ftrap-function here? |
Mehdi Amini | db11fdf | 2017-04-06 20:23:57 +0000 | [diff] [blame] | 475 | Constant *StackChkFail = F.getParent()->getOrInsertFunction( |
Serge Guelton | 59a2d7b | 2017-04-11 15:01:18 +0000 | [diff] [blame] | 476 | "__stack_chk_fail", IRB.getVoidTy()); |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 477 | IRBFail.CreateCall(StackChkFail, {}); |
| 478 | } |
| 479 | |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 480 | /// We explicitly compute and set the unsafe stack layout for all unsafe |
| 481 | /// static alloca instructions. We save the unsafe "base pointer" in the |
| 482 | /// prologue into a local variable and restore it in the epilogue. |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 483 | Value *SafeStack::moveStaticAllocasToUnsafeStack( |
| 484 | IRBuilder<> &IRB, Function &F, ArrayRef<AllocaInst *> StaticAllocas, |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 485 | ArrayRef<Argument *> ByValArguments, ArrayRef<ReturnInst *> Returns, |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 486 | Instruction *BasePointer, AllocaInst *StackGuardSlot) { |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 487 | if (StaticAllocas.empty() && ByValArguments.empty()) |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 488 | return BasePointer; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 489 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 490 | DIBuilder DIB(*F.getParent()); |
| 491 | |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 492 | StackColoring SSC(F, StaticAllocas); |
| 493 | SSC.run(); |
| 494 | SSC.removeAllMarkers(); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 495 | |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 496 | // Unsafe stack always grows down. |
| 497 | StackLayout SSL(StackAlignment); |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 498 | if (StackGuardSlot) { |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 499 | Type *Ty = StackGuardSlot->getAllocatedType(); |
| 500 | unsigned Align = |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 501 | std::max(DL.getPrefTypeAlignment(Ty), StackGuardSlot->getAlignment()); |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 502 | SSL.addObject(StackGuardSlot, getStaticAllocaAllocationSize(StackGuardSlot), |
Evgeniy Stepanov | 906f6fb | 2016-07-26 00:05:14 +0000 | [diff] [blame] | 503 | Align, SSC.getFullLiveRange()); |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 504 | } |
| 505 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 506 | for (Argument *Arg : ByValArguments) { |
| 507 | Type *Ty = Arg->getType()->getPointerElementType(); |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 508 | uint64_t Size = DL.getTypeStoreSize(Ty); |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 509 | if (Size == 0) |
| 510 | Size = 1; // Don't create zero-sized stack objects. |
| 511 | |
| 512 | // Ensure the object is properly aligned. |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 513 | unsigned Align = std::max((unsigned)DL.getPrefTypeAlignment(Ty), |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 514 | Arg->getParamAlignment()); |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 515 | SSL.addObject(Arg, Size, Align, SSC.getFullLiveRange()); |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 516 | } |
| 517 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 518 | for (AllocaInst *AI : StaticAllocas) { |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 519 | Type *Ty = AI->getAllocatedType(); |
Evgeniy Stepanov | a4ac3f4 | 2015-12-01 00:06:13 +0000 | [diff] [blame] | 520 | uint64_t Size = getStaticAllocaAllocationSize(AI); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 521 | if (Size == 0) |
| 522 | Size = 1; // Don't create zero-sized stack objects. |
| 523 | |
| 524 | // Ensure the object is properly aligned. |
| 525 | unsigned Align = |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 526 | std::max((unsigned)DL.getPrefTypeAlignment(Ty), AI->getAlignment()); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 527 | |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 528 | SSL.addObject(AI, Size, Align, SSC.getLiveRange(AI)); |
| 529 | } |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 530 | |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 531 | SSL.computeLayout(); |
| 532 | unsigned FrameAlignment = SSL.getFrameAlignment(); |
| 533 | |
| 534 | // FIXME: tell SSL that we start at a less-then-MaxAlignment aligned location |
| 535 | // (AlignmentSkew). |
| 536 | if (FrameAlignment > StackAlignment) { |
| 537 | // Re-align the base pointer according to the max requested alignment. |
| 538 | assert(isPowerOf2_32(FrameAlignment)); |
| 539 | IRB.SetInsertPoint(BasePointer->getNextNode()); |
| 540 | BasePointer = cast<Instruction>(IRB.CreateIntToPtr( |
| 541 | IRB.CreateAnd(IRB.CreatePtrToInt(BasePointer, IntPtrTy), |
| 542 | ConstantInt::get(IntPtrTy, ~uint64_t(FrameAlignment - 1))), |
| 543 | StackPtrTy)); |
| 544 | } |
| 545 | |
| 546 | IRB.SetInsertPoint(BasePointer->getNextNode()); |
| 547 | |
| 548 | if (StackGuardSlot) { |
| 549 | unsigned Offset = SSL.getObjectOffset(StackGuardSlot); |
| 550 | Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8* |
| 551 | ConstantInt::get(Int32Ty, -Offset)); |
| 552 | Value *NewAI = |
| 553 | IRB.CreateBitCast(Off, StackGuardSlot->getType(), "StackGuardSlot"); |
| 554 | |
| 555 | // Replace alloc with the new location. |
| 556 | StackGuardSlot->replaceAllUsesWith(NewAI); |
| 557 | StackGuardSlot->eraseFromParent(); |
| 558 | } |
| 559 | |
| 560 | for (Argument *Arg : ByValArguments) { |
| 561 | unsigned Offset = SSL.getObjectOffset(Arg); |
Daniel Neilson | 095d729 | 2018-02-12 22:39:47 +0000 | [diff] [blame] | 562 | unsigned Align = SSL.getObjectAlignment(Arg); |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 563 | Type *Ty = Arg->getType()->getPointerElementType(); |
| 564 | |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 565 | uint64_t Size = DL.getTypeStoreSize(Ty); |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 566 | if (Size == 0) |
| 567 | Size = 1; // Don't create zero-sized stack objects. |
| 568 | |
| 569 | Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8* |
| 570 | ConstantInt::get(Int32Ty, -Offset)); |
| 571 | Value *NewArg = IRB.CreateBitCast(Off, Arg->getType(), |
| 572 | Arg->getName() + ".unsafe-byval"); |
| 573 | |
| 574 | // Replace alloc with the new location. |
| 575 | replaceDbgDeclare(Arg, BasePointer, BasePointer->getNextNode(), DIB, |
Adrian Prantl | d131701 | 2017-12-08 21:58:18 +0000 | [diff] [blame] | 576 | DIExpression::NoDeref, -Offset, DIExpression::NoDeref); |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 577 | Arg->replaceAllUsesWith(NewArg); |
| 578 | IRB.SetInsertPoint(cast<Instruction>(NewArg)->getNextNode()); |
Daniel Neilson | 095d729 | 2018-02-12 22:39:47 +0000 | [diff] [blame] | 579 | IRB.CreateMemCpy(Off, Align, Arg, Arg->getParamAlignment(), Size); |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 580 | } |
| 581 | |
| 582 | // Allocate space for every unsafe static AllocaInst on the unsafe stack. |
| 583 | for (AllocaInst *AI : StaticAllocas) { |
| 584 | IRB.SetInsertPoint(AI); |
| 585 | unsigned Offset = SSL.getObjectOffset(AI); |
| 586 | |
| 587 | uint64_t Size = getStaticAllocaAllocationSize(AI); |
| 588 | if (Size == 0) |
| 589 | Size = 1; // Don't create zero-sized stack objects. |
| 590 | |
Adrian Prantl | d131701 | 2017-12-08 21:58:18 +0000 | [diff] [blame] | 591 | replaceDbgDeclareForAlloca(AI, BasePointer, DIB, DIExpression::NoDeref, |
| 592 | -Offset, DIExpression::NoDeref); |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 593 | replaceDbgValueForAlloca(AI, BasePointer, DIB, -Offset); |
Evgeniy Stepanov | 45fa0fd | 2016-06-16 22:34:04 +0000 | [diff] [blame] | 594 | |
| 595 | // Replace uses of the alloca with the new location. |
| 596 | // Insert address calculation close to each use to work around PR27844. |
| 597 | std::string Name = std::string(AI->getName()) + ".unsafe"; |
| 598 | while (!AI->use_empty()) { |
| 599 | Use &U = *AI->use_begin(); |
| 600 | Instruction *User = cast<Instruction>(U.getUser()); |
| 601 | |
| 602 | Instruction *InsertBefore; |
| 603 | if (auto *PHI = dyn_cast<PHINode>(User)) |
| 604 | InsertBefore = PHI->getIncomingBlock(U)->getTerminator(); |
| 605 | else |
| 606 | InsertBefore = User; |
| 607 | |
| 608 | IRBuilder<> IRBUser(InsertBefore); |
| 609 | Value *Off = IRBUser.CreateGEP(BasePointer, // BasePointer is i8* |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 610 | ConstantInt::get(Int32Ty, -Offset)); |
Evgeniy Stepanov | 45fa0fd | 2016-06-16 22:34:04 +0000 | [diff] [blame] | 611 | Value *Replacement = IRBUser.CreateBitCast(Off, AI->getType(), Name); |
| 612 | |
| 613 | if (auto *PHI = dyn_cast<PHINode>(User)) { |
| 614 | // PHI nodes may have multiple incoming edges from the same BB (why??), |
| 615 | // all must be updated at once with the same incoming value. |
| 616 | auto *BB = PHI->getIncomingBlock(U); |
| 617 | for (unsigned I = 0; I < PHI->getNumIncomingValues(); ++I) |
| 618 | if (PHI->getIncomingBlock(I) == BB) |
| 619 | PHI->setIncomingValue(I, Replacement); |
| 620 | } else { |
| 621 | U.set(Replacement); |
| 622 | } |
| 623 | } |
| 624 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 625 | AI->eraseFromParent(); |
| 626 | } |
| 627 | |
| 628 | // Re-align BasePointer so that our callees would see it aligned as |
| 629 | // expected. |
| 630 | // FIXME: no need to update BasePointer in leaf functions. |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 631 | unsigned FrameSize = alignTo(SSL.getFrameSize(), StackAlignment); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 632 | |
| 633 | // Update shadow stack pointer in the function epilogue. |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 634 | IRB.SetInsertPoint(BasePointer->getNextNode()); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 635 | |
| 636 | Value *StaticTop = |
Evgeniy Stepanov | a5da256 | 2016-06-29 20:37:43 +0000 | [diff] [blame] | 637 | IRB.CreateGEP(BasePointer, ConstantInt::get(Int32Ty, -FrameSize), |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 638 | "unsafe_stack_static_top"); |
| 639 | IRB.CreateStore(StaticTop, UnsafeStackPtr); |
| 640 | return StaticTop; |
| 641 | } |
| 642 | |
| 643 | void SafeStack::moveDynamicAllocasToUnsafeStack( |
| 644 | Function &F, Value *UnsafeStackPtr, AllocaInst *DynamicTop, |
| 645 | ArrayRef<AllocaInst *> DynamicAllocas) { |
| 646 | DIBuilder DIB(*F.getParent()); |
| 647 | |
| 648 | for (AllocaInst *AI : DynamicAllocas) { |
| 649 | IRBuilder<> IRB(AI); |
| 650 | |
| 651 | // Compute the new SP value (after AI). |
| 652 | Value *ArraySize = AI->getArraySize(); |
| 653 | if (ArraySize->getType() != IntPtrTy) |
| 654 | ArraySize = IRB.CreateIntCast(ArraySize, IntPtrTy, false); |
| 655 | |
| 656 | Type *Ty = AI->getAllocatedType(); |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 657 | uint64_t TySize = DL.getTypeAllocSize(Ty); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 658 | Value *Size = IRB.CreateMul(ArraySize, ConstantInt::get(IntPtrTy, TySize)); |
| 659 | |
| 660 | Value *SP = IRB.CreatePtrToInt(IRB.CreateLoad(UnsafeStackPtr), IntPtrTy); |
| 661 | SP = IRB.CreateSub(SP, Size); |
| 662 | |
| 663 | // Align the SP value to satisfy the AllocaInst, type and stack alignments. |
| 664 | unsigned Align = std::max( |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 665 | std::max((unsigned)DL.getPrefTypeAlignment(Ty), AI->getAlignment()), |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 666 | (unsigned)StackAlignment); |
| 667 | |
| 668 | assert(isPowerOf2_32(Align)); |
| 669 | Value *NewTop = IRB.CreateIntToPtr( |
| 670 | IRB.CreateAnd(SP, ConstantInt::get(IntPtrTy, ~uint64_t(Align - 1))), |
| 671 | StackPtrTy); |
| 672 | |
| 673 | // Save the stack pointer. |
| 674 | IRB.CreateStore(NewTop, UnsafeStackPtr); |
| 675 | if (DynamicTop) |
| 676 | IRB.CreateStore(NewTop, DynamicTop); |
| 677 | |
Evgeniy Stepanov | 9842d61 | 2015-11-25 22:52:30 +0000 | [diff] [blame] | 678 | Value *NewAI = IRB.CreatePointerCast(NewTop, AI->getType()); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 679 | if (AI->hasName() && isa<Instruction>(NewAI)) |
| 680 | NewAI->takeName(AI); |
| 681 | |
Adrian Prantl | d131701 | 2017-12-08 21:58:18 +0000 | [diff] [blame] | 682 | replaceDbgDeclareForAlloca(AI, NewAI, DIB, DIExpression::NoDeref, 0, |
| 683 | DIExpression::NoDeref); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 684 | AI->replaceAllUsesWith(NewAI); |
| 685 | AI->eraseFromParent(); |
| 686 | } |
| 687 | |
| 688 | if (!DynamicAllocas.empty()) { |
| 689 | // Now go through the instructions again, replacing stacksave/stackrestore. |
| 690 | for (inst_iterator It = inst_begin(&F), Ie = inst_end(&F); It != Ie;) { |
| 691 | Instruction *I = &*(It++); |
| 692 | auto II = dyn_cast<IntrinsicInst>(I); |
| 693 | if (!II) |
| 694 | continue; |
| 695 | |
| 696 | if (II->getIntrinsicID() == Intrinsic::stacksave) { |
| 697 | IRBuilder<> IRB(II); |
| 698 | Instruction *LI = IRB.CreateLoad(UnsafeStackPtr); |
| 699 | LI->takeName(II); |
| 700 | II->replaceAllUsesWith(LI); |
| 701 | II->eraseFromParent(); |
| 702 | } else if (II->getIntrinsicID() == Intrinsic::stackrestore) { |
| 703 | IRBuilder<> IRB(II); |
| 704 | Instruction *SI = IRB.CreateStore(II->getArgOperand(0), UnsafeStackPtr); |
| 705 | SI->takeName(II); |
| 706 | assert(II->use_empty()); |
| 707 | II->eraseFromParent(); |
| 708 | } |
| 709 | } |
| 710 | } |
| 711 | } |
| 712 | |
Evgeniy Stepanov | d5a6fdb | 2018-01-23 21:27:07 +0000 | [diff] [blame] | 713 | bool SafeStack::ShouldInlinePointerAddress(CallSite &CS) { |
| 714 | Function *Callee = CS.getCalledFunction(); |
| 715 | if (CS.hasFnAttr(Attribute::AlwaysInline) && isInlineViable(*Callee)) |
| 716 | return true; |
| 717 | if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) || |
| 718 | CS.isNoInline()) |
| 719 | return false; |
| 720 | return true; |
| 721 | } |
| 722 | |
| 723 | void SafeStack::TryInlinePointerAddress() { |
| 724 | if (!isa<CallInst>(UnsafeStackPtr)) |
| 725 | return; |
| 726 | |
| 727 | if(F.hasFnAttribute(Attribute::OptimizeNone)) |
| 728 | return; |
| 729 | |
| 730 | CallSite CS(UnsafeStackPtr); |
| 731 | Function *Callee = CS.getCalledFunction(); |
| 732 | if (!Callee || Callee->isDeclaration()) |
| 733 | return; |
| 734 | |
| 735 | if (!ShouldInlinePointerAddress(CS)) |
| 736 | return; |
| 737 | |
| 738 | InlineFunctionInfo IFI; |
| 739 | InlineFunction(CS, IFI); |
| 740 | } |
| 741 | |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 742 | bool SafeStack::run() { |
| 743 | assert(F.hasFnAttribute(Attribute::SafeStack) && |
| 744 | "Can't run SafeStack on a function without the attribute"); |
| 745 | assert(!F.isDeclaration() && "Can't run SafeStack on a function declaration"); |
Evgeniy Stepanov | a2002b0 | 2015-09-23 18:07:56 +0000 | [diff] [blame] | 746 | |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 747 | ++NumFunctions; |
| 748 | |
| 749 | SmallVector<AllocaInst *, 16> StaticAllocas; |
| 750 | SmallVector<AllocaInst *, 4> DynamicAllocas; |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 751 | SmallVector<Argument *, 4> ByValArguments; |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 752 | SmallVector<ReturnInst *, 4> Returns; |
| 753 | |
| 754 | // Collect all points where stack gets unwound and needs to be restored |
| 755 | // This is only necessary because the runtime (setjmp and unwind code) is |
Michael LeMay | 1415355 | 2016-10-17 19:09:19 +0000 | [diff] [blame] | 756 | // not aware of the unsafe stack and won't unwind/restore it properly. |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 757 | // To work around this problem without changing the runtime, we insert |
| 758 | // instrumentation to restore the unsafe stack pointer when necessary. |
| 759 | SmallVector<Instruction *, 4> StackRestorePoints; |
| 760 | |
| 761 | // Find all static and dynamic alloca instructions that must be moved to the |
| 762 | // unsafe stack, all return instructions and stack restore points. |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 763 | findInsts(F, StaticAllocas, DynamicAllocas, ByValArguments, Returns, |
| 764 | StackRestorePoints); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 765 | |
| 766 | if (StaticAllocas.empty() && DynamicAllocas.empty() && |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 767 | ByValArguments.empty() && StackRestorePoints.empty()) |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 768 | return false; // Nothing to do in this function. |
| 769 | |
Evgeniy Stepanov | 42f3b12 | 2015-12-01 00:40:05 +0000 | [diff] [blame] | 770 | if (!StaticAllocas.empty() || !DynamicAllocas.empty() || |
| 771 | !ByValArguments.empty()) |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 772 | ++NumUnsafeStackFunctions; // This function has the unsafe stack. |
| 773 | |
| 774 | if (!StackRestorePoints.empty()) |
| 775 | ++NumUnsafeStackRestorePointsFunctions; |
| 776 | |
Duncan P. N. Exon Smith | e82c286 | 2015-10-13 17:39:10 +0000 | [diff] [blame] | 777 | IRBuilder<> IRB(&F.front(), F.begin()->getFirstInsertionPt()); |
Evgeniy Stepanov | d5a6fdb | 2018-01-23 21:27:07 +0000 | [diff] [blame] | 778 | if (SafeStackUsePointerAddress) { |
| 779 | Value *Fn = F.getParent()->getOrInsertFunction( |
| 780 | "__safestack_pointer_address", StackPtrTy->getPointerTo(0)); |
| 781 | UnsafeStackPtr = IRB.CreateCall(Fn); |
| 782 | } else { |
| 783 | UnsafeStackPtr = TL.getSafeStackPointerLocation(IRB); |
| 784 | } |
Peter Collingbourne | de26a91 | 2015-06-22 20:26:54 +0000 | [diff] [blame] | 785 | |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 786 | // Load the current stack pointer (we'll also use it as a base pointer). |
| 787 | // FIXME: use a dedicated register for it ? |
| 788 | Instruction *BasePointer = |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 789 | IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr"); |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 790 | assert(BasePointer->getType() == StackPtrTy); |
| 791 | |
Evgeniy Stepanov | f17120a | 2016-04-11 22:27:48 +0000 | [diff] [blame] | 792 | AllocaInst *StackGuardSlot = nullptr; |
| 793 | // FIXME: implement weaker forms of stack protector. |
| 794 | if (F.hasFnAttribute(Attribute::StackProtect) || |
| 795 | F.hasFnAttribute(Attribute::StackProtectStrong) || |
| 796 | F.hasFnAttribute(Attribute::StackProtectReq)) { |
| 797 | Value *StackGuard = getStackGuard(IRB, F); |
| 798 | StackGuardSlot = IRB.CreateAlloca(StackPtrTy, nullptr); |
| 799 | IRB.CreateStore(StackGuard, StackGuardSlot); |
| 800 | |
| 801 | for (ReturnInst *RI : Returns) { |
| 802 | IRBuilder<> IRBRet(RI); |
| 803 | checkStackGuard(IRBRet, F, *RI, StackGuardSlot, StackGuard); |
| 804 | } |
| 805 | } |
| 806 | |
| 807 | // The top of the unsafe stack after all unsafe static allocas are |
| 808 | // allocated. |
| 809 | Value *StaticTop = |
| 810 | moveStaticAllocasToUnsafeStack(IRB, F, StaticAllocas, ByValArguments, |
| 811 | Returns, BasePointer, StackGuardSlot); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 812 | |
| 813 | // Safe stack object that stores the current unsafe stack top. It is updated |
| 814 | // as unsafe dynamic (non-constant-sized) allocas are allocated and freed. |
| 815 | // This is only needed if we need to restore stack pointer after longjmp |
| 816 | // or exceptions, and we have dynamic allocations. |
| 817 | // FIXME: a better alternative might be to store the unsafe stack pointer |
| 818 | // before setjmp / invoke instructions. |
| 819 | AllocaInst *DynamicTop = createStackRestorePoints( |
Evgeniy Stepanov | 8685daf | 2015-09-24 01:23:51 +0000 | [diff] [blame] | 820 | IRB, F, StackRestorePoints, StaticTop, !DynamicAllocas.empty()); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 821 | |
| 822 | // Handle dynamic allocas. |
| 823 | moveDynamicAllocasToUnsafeStack(F, UnsafeStackPtr, DynamicTop, |
| 824 | DynamicAllocas); |
| 825 | |
Anna Zaks | cad7994 | 2016-02-02 01:03:11 +0000 | [diff] [blame] | 826 | // Restore the unsafe stack pointer before each return. |
| 827 | for (ReturnInst *RI : Returns) { |
| 828 | IRB.SetInsertPoint(RI); |
| 829 | IRB.CreateStore(BasePointer, UnsafeStackPtr); |
| 830 | } |
| 831 | |
Evgeniy Stepanov | d5a6fdb | 2018-01-23 21:27:07 +0000 | [diff] [blame] | 832 | TryInlinePointerAddress(); |
| 833 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 834 | LLVM_DEBUG(dbgs() << "[SafeStack] safestack applied\n"); |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 835 | return true; |
| 836 | } |
| 837 | |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 838 | class SafeStackLegacyPass : public FunctionPass { |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 839 | const TargetMachine *TM = nullptr; |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 840 | |
| 841 | public: |
| 842 | static char ID; // Pass identification, replacement for typeid.. |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 843 | |
| 844 | SafeStackLegacyPass() : FunctionPass(ID) { |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 845 | initializeSafeStackLegacyPassPass(*PassRegistry::getPassRegistry()); |
| 846 | } |
| 847 | |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 848 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
Francis Visoiu Mistrih | 8b61764 | 2017-05-18 17:21:13 +0000 | [diff] [blame] | 849 | AU.addRequired<TargetPassConfig>(); |
Ahmed Bougacha | 8c358e3 | 2017-05-10 00:39:25 +0000 | [diff] [blame] | 850 | AU.addRequired<TargetLibraryInfoWrapperPass>(); |
| 851 | AU.addRequired<AssumptionCacheTracker>(); |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 852 | } |
| 853 | |
| 854 | bool runOnFunction(Function &F) override { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 855 | LLVM_DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n"); |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 856 | |
| 857 | if (!F.hasFnAttribute(Attribute::SafeStack)) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 858 | LLVM_DEBUG(dbgs() << "[SafeStack] safestack is not requested" |
| 859 | " for this function\n"); |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 860 | return false; |
| 861 | } |
| 862 | |
| 863 | if (F.isDeclaration()) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 864 | LLVM_DEBUG(dbgs() << "[SafeStack] function definition" |
| 865 | " is not available\n"); |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 866 | return false; |
| 867 | } |
| 868 | |
Francis Visoiu Mistrih | 8b61764 | 2017-05-18 17:21:13 +0000 | [diff] [blame] | 869 | TM = &getAnalysis<TargetPassConfig>().getTM<TargetMachine>(); |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 870 | auto *TL = TM->getSubtargetImpl(F)->getTargetLowering(); |
| 871 | if (!TL) |
| 872 | report_fatal_error("TargetLowering instance is required"); |
| 873 | |
| 874 | auto *DL = &F.getParent()->getDataLayout(); |
Ahmed Bougacha | 8c358e3 | 2017-05-10 00:39:25 +0000 | [diff] [blame] | 875 | auto &TLI = getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(); |
| 876 | auto &ACT = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F); |
| 877 | |
| 878 | // Compute DT and LI only for functions that have the attribute. |
| 879 | // This is only useful because the legacy pass manager doesn't let us |
| 880 | // compute analyzes lazily. |
| 881 | // In the backend pipeline, nothing preserves DT before SafeStack, so we |
| 882 | // would otherwise always compute it wastefully, even if there is no |
| 883 | // function with the safestack attribute. |
| 884 | DominatorTree DT(F); |
| 885 | LoopInfo LI(DT); |
| 886 | |
| 887 | ScalarEvolution SE(F, TLI, ACT, DT, LI); |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 888 | |
| 889 | return SafeStack(F, *TL, *DL, SE).run(); |
| 890 | } |
| 891 | }; |
| 892 | |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 893 | } // end anonymous namespace |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 894 | |
Ahmed Bougacha | 00d6822 | 2017-05-10 00:39:22 +0000 | [diff] [blame] | 895 | char SafeStackLegacyPass::ID = 0; |
Eugene Zelenko | 618c555 | 2017-09-13 21:15:20 +0000 | [diff] [blame] | 896 | |
Matthias Braun | 1527baa | 2017-05-25 21:26:32 +0000 | [diff] [blame] | 897 | INITIALIZE_PASS_BEGIN(SafeStackLegacyPass, DEBUG_TYPE, |
Francis Visoiu Mistrih | 8b61764 | 2017-05-18 17:21:13 +0000 | [diff] [blame] | 898 | "Safe Stack instrumentation pass", false, false) |
| 899 | INITIALIZE_PASS_DEPENDENCY(TargetPassConfig) |
Matthias Braun | 1527baa | 2017-05-25 21:26:32 +0000 | [diff] [blame] | 900 | INITIALIZE_PASS_END(SafeStackLegacyPass, DEBUG_TYPE, |
Francis Visoiu Mistrih | 8b61764 | 2017-05-18 17:21:13 +0000 | [diff] [blame] | 901 | "Safe Stack instrumentation pass", false, false) |
Peter Collingbourne | 82437bf | 2015-06-15 21:07:11 +0000 | [diff] [blame] | 902 | |
Francis Visoiu Mistrih | 8b61764 | 2017-05-18 17:21:13 +0000 | [diff] [blame] | 903 | FunctionPass *llvm::createSafeStackPass() { return new SafeStackLegacyPass(); } |