blob: f8c4058ae22a5386456674caa1e98f834887ef37 [file] [log] [blame]
Peter Collingbourne82437bf2015-06-15 21:07:11 +00001//===-- SafeStack.cpp - Safe Stack Insertion ------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass splits the stack into the safe stack (kept as-is for LLVM backend)
11// and the unsafe stack (explicitly allocated and managed through the runtime
12// support library).
13//
14// http://clang.llvm.org/docs/SafeStack.html
15//
16//===----------------------------------------------------------------------===//
17
18#include "llvm/Transforms/Instrumentation.h"
19#include "llvm/ADT/Statistic.h"
20#include "llvm/ADT/Triple.h"
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000021#include "llvm/Analysis/ScalarEvolution.h"
22#include "llvm/Analysis/ScalarEvolutionExpressions.h"
Evgeniy Stepanova2002b02015-09-23 18:07:56 +000023#include "llvm/CodeGen/Passes.h"
Peter Collingbourne82437bf2015-06-15 21:07:11 +000024#include "llvm/IR/Constants.h"
25#include "llvm/IR/DataLayout.h"
26#include "llvm/IR/DerivedTypes.h"
27#include "llvm/IR/DIBuilder.h"
28#include "llvm/IR/Function.h"
29#include "llvm/IR/InstIterator.h"
30#include "llvm/IR/Instructions.h"
31#include "llvm/IR/IntrinsicInst.h"
32#include "llvm/IR/Intrinsics.h"
33#include "llvm/IR/IRBuilder.h"
34#include "llvm/IR/Module.h"
35#include "llvm/Pass.h"
36#include "llvm/Support/CommandLine.h"
37#include "llvm/Support/Debug.h"
38#include "llvm/Support/Format.h"
39#include "llvm/Support/MathExtras.h"
40#include "llvm/Support/raw_os_ostream.h"
Evgeniy Stepanova2002b02015-09-23 18:07:56 +000041#include "llvm/Target/TargetLowering.h"
42#include "llvm/Target/TargetSubtargetInfo.h"
Peter Collingbourne82437bf2015-06-15 21:07:11 +000043#include "llvm/Transforms/Utils/Local.h"
44#include "llvm/Transforms/Utils/ModuleUtils.h"
45
46using namespace llvm;
47
48#define DEBUG_TYPE "safestack"
49
50namespace llvm {
51
52STATISTIC(NumFunctions, "Total number of functions");
53STATISTIC(NumUnsafeStackFunctions, "Number of functions with unsafe stack");
54STATISTIC(NumUnsafeStackRestorePointsFunctions,
55 "Number of functions that use setjmp or exceptions");
56
57STATISTIC(NumAllocas, "Total number of allocas");
58STATISTIC(NumUnsafeStaticAllocas, "Number of unsafe static allocas");
59STATISTIC(NumUnsafeDynamicAllocas, "Number of unsafe dynamic allocas");
60STATISTIC(NumUnsafeStackRestorePoints, "Number of setjmps and landingpads");
61
62} // namespace llvm
63
64namespace {
65
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000066/// Rewrite an SCEV expression for a memory access address to an expression that
67/// represents offset from the given alloca.
68///
69/// The implementation simply replaces all mentions of the alloca with zero.
70class AllocaOffsetRewriter : public SCEVRewriteVisitor<AllocaOffsetRewriter> {
71 const AllocaInst *AI;
Peter Collingbourne82437bf2015-06-15 21:07:11 +000072
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000073public:
74 AllocaOffsetRewriter(ScalarEvolution &SE, const AllocaInst *AI)
75 : SCEVRewriteVisitor(SE), AI(AI) {}
Peter Collingbourne82437bf2015-06-15 21:07:11 +000076
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000077 const SCEV *visitUnknown(const SCEVUnknown *Expr) {
78 if (Expr->getValue() == AI)
79 return SE.getZero(Expr->getType());
80 return Expr;
Peter Collingbourne82437bf2015-06-15 21:07:11 +000081 }
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000082};
Peter Collingbourne82437bf2015-06-15 21:07:11 +000083
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000084/// The SafeStack pass splits the stack of each function into the safe
85/// stack, which is only accessed through memory safe dereferences (as
86/// determined statically), and the unsafe stack, which contains all
87/// local variables that are accessed in ways that we can't prove to
88/// be safe.
Peter Collingbourne82437bf2015-06-15 21:07:11 +000089class SafeStack : public FunctionPass {
Evgeniy Stepanova2002b02015-09-23 18:07:56 +000090 const TargetMachine *TM;
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000091 const TargetLoweringBase *TL;
Peter Collingbourne82437bf2015-06-15 21:07:11 +000092 const DataLayout *DL;
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000093 ScalarEvolution *SE;
Peter Collingbourne82437bf2015-06-15 21:07:11 +000094
95 Type *StackPtrTy;
96 Type *IntPtrTy;
97 Type *Int32Ty;
98 Type *Int8Ty;
99
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000100 Value *UnsafeStackPtr = nullptr;
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000101
102 /// Unsafe stack alignment. Each stack frame must ensure that the stack is
103 /// aligned to this value. We need to re-align the unsafe stack if the
104 /// alignment of any object on the stack exceeds this value.
105 ///
106 /// 16 seems like a reasonable upper bound on the alignment of objects that we
107 /// might expect to appear on the stack on most common targets.
108 enum { StackAlignment = 16 };
109
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000110 /// \brief Build a value representing a pointer to the unsafe stack pointer.
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000111 Value *getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F);
112
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000113 /// \brief Find all static allocas, dynamic allocas, return instructions and
114 /// stack restore points (exception unwind blocks and setjmp calls) in the
115 /// given function and append them to the respective vectors.
116 void findInsts(Function &F, SmallVectorImpl<AllocaInst *> &StaticAllocas,
117 SmallVectorImpl<AllocaInst *> &DynamicAllocas,
118 SmallVectorImpl<ReturnInst *> &Returns,
119 SmallVectorImpl<Instruction *> &StackRestorePoints);
120
Evgeniy Stepanova4ac3f42015-12-01 00:06:13 +0000121 /// \brief Calculate the allocation size of a given alloca. Returns 0 if the
122 /// size can not be statically determined.
123 uint64_t getStaticAllocaAllocationSize(const AllocaInst* AI);
124
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000125 /// \brief Allocate space for all static allocas in \p StaticAllocas,
126 /// replace allocas with pointers into the unsafe stack and generate code to
127 /// restore the stack pointer before all return instructions in \p Returns.
128 ///
129 /// \returns A pointer to the top of the unsafe stack after all unsafe static
130 /// allocas are allocated.
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000131 Value *moveStaticAllocasToUnsafeStack(IRBuilder<> &IRB, Function &F,
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000132 ArrayRef<AllocaInst *> StaticAllocas,
133 ArrayRef<ReturnInst *> Returns);
134
135 /// \brief Generate code to restore the stack after all stack restore points
136 /// in \p StackRestorePoints.
137 ///
138 /// \returns A local variable in which to maintain the dynamic top of the
139 /// unsafe stack if needed.
140 AllocaInst *
Evgeniy Stepanov8685daf2015-09-24 01:23:51 +0000141 createStackRestorePoints(IRBuilder<> &IRB, Function &F,
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000142 ArrayRef<Instruction *> StackRestorePoints,
143 Value *StaticTop, bool NeedDynamicTop);
144
145 /// \brief Replace all allocas in \p DynamicAllocas with code to allocate
146 /// space dynamically on the unsafe stack and store the dynamic unsafe stack
147 /// top to \p DynamicTop if non-null.
148 void moveDynamicAllocasToUnsafeStack(Function &F, Value *UnsafeStackPtr,
149 AllocaInst *DynamicTop,
150 ArrayRef<AllocaInst *> DynamicAllocas);
151
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000152 bool IsSafeStackAlloca(const AllocaInst *AI);
153
154 bool IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
155 const AllocaInst *AI);
156 bool IsAccessSafe(Value *Addr, uint64_t Size, const AllocaInst *AI);
157
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000158public:
159 static char ID; // Pass identification, replacement for typeid.
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000160 SafeStack(const TargetMachine *TM)
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000161 : FunctionPass(ID), TM(TM), TL(nullptr), DL(nullptr) {
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000162 initializeSafeStackPass(*PassRegistry::getPassRegistry());
163 }
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000164 SafeStack() : SafeStack(nullptr) {}
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000165
Hans Wennborgaa15bff2015-09-10 16:49:58 +0000166 void getAnalysisUsage(AnalysisUsage &AU) const override {
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000167 AU.addRequired<ScalarEvolutionWrapperPass>();
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000168 }
169
Hans Wennborgaa15bff2015-09-10 16:49:58 +0000170 bool doInitialization(Module &M) override {
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000171 DL = &M.getDataLayout();
172
173 StackPtrTy = Type::getInt8PtrTy(M.getContext());
174 IntPtrTy = DL->getIntPtrType(M.getContext());
175 Int32Ty = Type::getInt32Ty(M.getContext());
176 Int8Ty = Type::getInt8Ty(M.getContext());
177
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000178 return false;
179 }
180
Hans Wennborgaa15bff2015-09-10 16:49:58 +0000181 bool runOnFunction(Function &F) override;
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000182}; // class SafeStack
183
Evgeniy Stepanova4ac3f42015-12-01 00:06:13 +0000184uint64_t SafeStack::getStaticAllocaAllocationSize(const AllocaInst* AI) {
185 uint64_t Size = DL->getTypeAllocSize(AI->getAllocatedType());
186 if (AI->isArrayAllocation()) {
187 auto C = dyn_cast<ConstantInt>(AI->getArraySize());
188 if (!C)
189 return 0;
190 Size *= C->getZExtValue();
191 }
192 return Size;
193}
194
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000195bool SafeStack::IsAccessSafe(Value *Addr, uint64_t Size, const AllocaInst *AI) {
196 AllocaOffsetRewriter Rewriter(*SE, AI);
197 const SCEV *Expr = Rewriter.visit(SE->getSCEV(Addr));
198
199 uint64_t BitWidth = SE->getTypeSizeInBits(Expr->getType());
200 ConstantRange AccessStartRange = SE->getUnsignedRange(Expr);
201 ConstantRange SizeRange =
202 ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, Size));
203 ConstantRange AccessRange = AccessStartRange.add(SizeRange);
204 ConstantRange AllocaRange = ConstantRange(
Evgeniy Stepanova4ac3f42015-12-01 00:06:13 +0000205 APInt(BitWidth, 0), APInt(BitWidth, getStaticAllocaAllocationSize(AI)));
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000206 bool Safe = AllocaRange.contains(AccessRange);
207
208 DEBUG(dbgs() << "[SafeStack] Alloca " << *AI << "\n"
209 << " Access " << *Addr << "\n"
210 << " SCEV " << *Expr
211 << " U: " << SE->getUnsignedRange(Expr)
212 << ", S: " << SE->getSignedRange(Expr) << "\n"
213 << " Range " << AccessRange << "\n"
214 << " AllocaRange " << AllocaRange << "\n"
215 << " " << (Safe ? "safe" : "unsafe") << "\n");
216
217 return Safe;
218}
219
220bool SafeStack::IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
221 const AllocaInst *AI) {
222 // All MemIntrinsics have destination address in Arg0 and size in Arg2.
223 if (MI->getRawDest() != U) return true;
224 const auto *Len = dyn_cast<ConstantInt>(MI->getLength());
225 // Non-constant size => unsafe. FIXME: try SCEV getRange.
226 if (!Len) return false;
227 return IsAccessSafe(U, Len->getZExtValue(), AI);
228}
229
230/// Check whether a given alloca instruction (AI) should be put on the safe
231/// stack or not. The function analyzes all uses of AI and checks whether it is
232/// only accessed in a memory safe way (as decided statically).
233bool SafeStack::IsSafeStackAlloca(const AllocaInst *AI) {
234 // Go through all uses of this alloca and check whether all accesses to the
235 // allocated object are statically known to be memory safe and, hence, the
236 // object can be placed on the safe stack.
237 SmallPtrSet<const Value *, 16> Visited;
238 SmallVector<const Instruction *, 8> WorkList;
239 WorkList.push_back(AI);
240
241 // A DFS search through all uses of the alloca in bitcasts/PHI/GEPs/etc.
242 while (!WorkList.empty()) {
243 const Instruction *V = WorkList.pop_back_val();
244 for (const Use &UI : V->uses()) {
245 auto I = cast<const Instruction>(UI.getUser());
246 assert(V == UI.get());
247
248 switch (I->getOpcode()) {
249 case Instruction::Load: {
250 if (!IsAccessSafe(UI, DL->getTypeStoreSize(I->getType()), AI))
251 return false;
252 break;
253 }
254 case Instruction::VAArg:
255 // "va-arg" from a pointer is safe.
256 break;
257 case Instruction::Store: {
258 if (V == I->getOperand(0)) {
259 // Stored the pointer - conservatively assume it may be unsafe.
260 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AI
261 << "\n store of address: " << *I << "\n");
262 return false;
263 }
264
265 if (!IsAccessSafe(
266 UI, DL->getTypeStoreSize(I->getOperand(0)->getType()), AI))
267 return false;
268 break;
269 }
270 case Instruction::Ret: {
271 // Information leak.
272 return false;
273 }
274
275 case Instruction::Call:
276 case Instruction::Invoke: {
277 ImmutableCallSite CS(I);
278
279 if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
280 if (II->getIntrinsicID() == Intrinsic::lifetime_start ||
281 II->getIntrinsicID() == Intrinsic::lifetime_end)
282 continue;
283 }
284
285 if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
286 if (!IsMemIntrinsicSafe(MI, UI, AI)) {
287 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AI
288 << "\n unsafe memintrinsic: " << *I
289 << "\n");
290 return false;
291 }
292 continue;
293 }
294
295 // LLVM 'nocapture' attribute is only set for arguments whose address
296 // is not stored, passed around, or used in any other non-trivial way.
297 // We assume that passing a pointer to an object as a 'nocapture
298 // readnone' argument is safe.
299 // FIXME: a more precise solution would require an interprocedural
300 // analysis here, which would look at all uses of an argument inside
301 // the function being called.
302 ImmutableCallSite::arg_iterator B = CS.arg_begin(), E = CS.arg_end();
303 for (ImmutableCallSite::arg_iterator A = B; A != E; ++A)
304 if (A->get() == V)
305 if (!(CS.doesNotCapture(A - B) &&
306 (CS.doesNotAccessMemory(A - B) || CS.doesNotAccessMemory()))) {
307 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AI
308 << "\n unsafe call: " << *I << "\n");
309 return false;
310 }
311 continue;
312 }
313
314 default:
315 if (Visited.insert(I).second)
316 WorkList.push_back(cast<const Instruction>(I));
317 }
318 }
319 }
320
321 // All uses of the alloca are safe, we can place it on the safe stack.
322 return true;
323}
324
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000325Value *SafeStack::getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F) {
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000326 // Check if there is a target-specific location for the unsafe stack pointer.
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000327 if (TL)
328 if (Value *V = TL->getSafeStackPointerLocation(IRB))
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000329 return V;
330
331 // Otherwise, assume the target links with compiler-rt, which provides a
332 // thread-local variable with a magic name.
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000333 Module &M = *F.getParent();
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000334 const char *UnsafeStackPtrVar = "__safestack_unsafe_stack_ptr";
335 auto UnsafeStackPtr =
336 dyn_cast_or_null<GlobalVariable>(M.getNamedValue(UnsafeStackPtrVar));
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000337
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000338 if (!UnsafeStackPtr) {
339 // The global variable is not defined yet, define it ourselves.
340 // We use the initial-exec TLS model because we do not support the
341 // variable living anywhere other than in the main executable.
342 UnsafeStackPtr = new GlobalVariable(
Eugene Zelenkoffec81c2015-11-04 22:32:32 +0000343 M, StackPtrTy, false, GlobalValue::ExternalLinkage, nullptr,
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000344 UnsafeStackPtrVar, nullptr, GlobalValue::InitialExecTLSModel);
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000345 } else {
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000346 // The variable exists, check its type and attributes.
347 if (UnsafeStackPtr->getValueType() != StackPtrTy)
348 report_fatal_error(Twine(UnsafeStackPtrVar) + " must have void* type");
349 if (!UnsafeStackPtr->isThreadLocal())
350 report_fatal_error(Twine(UnsafeStackPtrVar) + " must be thread-local");
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000351 }
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000352 return UnsafeStackPtr;
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000353}
354
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000355void SafeStack::findInsts(Function &F,
356 SmallVectorImpl<AllocaInst *> &StaticAllocas,
357 SmallVectorImpl<AllocaInst *> &DynamicAllocas,
358 SmallVectorImpl<ReturnInst *> &Returns,
359 SmallVectorImpl<Instruction *> &StackRestorePoints) {
Nico Rieck78199512015-08-06 19:10:45 +0000360 for (Instruction &I : instructions(&F)) {
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000361 if (auto AI = dyn_cast<AllocaInst>(&I)) {
362 ++NumAllocas;
363
364 if (IsSafeStackAlloca(AI))
365 continue;
366
367 if (AI->isStaticAlloca()) {
368 ++NumUnsafeStaticAllocas;
369 StaticAllocas.push_back(AI);
370 } else {
371 ++NumUnsafeDynamicAllocas;
372 DynamicAllocas.push_back(AI);
373 }
374 } else if (auto RI = dyn_cast<ReturnInst>(&I)) {
375 Returns.push_back(RI);
376 } else if (auto CI = dyn_cast<CallInst>(&I)) {
377 // setjmps require stack restore.
378 if (CI->getCalledFunction() && CI->canReturnTwice())
379 StackRestorePoints.push_back(CI);
380 } else if (auto LP = dyn_cast<LandingPadInst>(&I)) {
381 // Exception landing pads require stack restore.
382 StackRestorePoints.push_back(LP);
383 } else if (auto II = dyn_cast<IntrinsicInst>(&I)) {
384 if (II->getIntrinsicID() == Intrinsic::gcroot)
385 llvm::report_fatal_error(
386 "gcroot intrinsic not compatible with safestack attribute");
387 }
388 }
389}
390
391AllocaInst *
Evgeniy Stepanov8685daf2015-09-24 01:23:51 +0000392SafeStack::createStackRestorePoints(IRBuilder<> &IRB, Function &F,
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000393 ArrayRef<Instruction *> StackRestorePoints,
394 Value *StaticTop, bool NeedDynamicTop) {
395 if (StackRestorePoints.empty())
396 return nullptr;
397
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000398 // We need the current value of the shadow stack pointer to restore
399 // after longjmp or exception catching.
400
401 // FIXME: On some platforms this could be handled by the longjmp/exception
402 // runtime itself.
403
404 AllocaInst *DynamicTop = nullptr;
405 if (NeedDynamicTop)
406 // If we also have dynamic alloca's, the stack pointer value changes
407 // throughout the function. For now we store it in an alloca.
408 DynamicTop = IRB.CreateAlloca(StackPtrTy, /*ArraySize=*/nullptr,
409 "unsafe_stack_dynamic_ptr");
410
411 if (!StaticTop)
412 // We need the original unsafe stack pointer value, even if there are
413 // no unsafe static allocas.
414 StaticTop = IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr");
415
416 if (NeedDynamicTop)
417 IRB.CreateStore(StaticTop, DynamicTop);
418
419 // Restore current stack pointer after longjmp/exception catch.
420 for (Instruction *I : StackRestorePoints) {
421 ++NumUnsafeStackRestorePoints;
422
423 IRB.SetInsertPoint(cast<Instruction>(I->getNextNode()));
424 Value *CurrentTop = DynamicTop ? IRB.CreateLoad(DynamicTop) : StaticTop;
425 IRB.CreateStore(CurrentTop, UnsafeStackPtr);
426 }
427
428 return DynamicTop;
429}
430
431Value *
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000432SafeStack::moveStaticAllocasToUnsafeStack(IRBuilder<> &IRB, Function &F,
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000433 ArrayRef<AllocaInst *> StaticAllocas,
434 ArrayRef<ReturnInst *> Returns) {
435 if (StaticAllocas.empty())
436 return nullptr;
437
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000438 DIBuilder DIB(*F.getParent());
439
440 // We explicitly compute and set the unsafe stack layout for all unsafe
441 // static alloca instructions. We save the unsafe "base pointer" in the
442 // prologue into a local variable and restore it in the epilogue.
443
444 // Load the current stack pointer (we'll also use it as a base pointer).
445 // FIXME: use a dedicated register for it ?
446 Instruction *BasePointer =
447 IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr");
448 assert(BasePointer->getType() == StackPtrTy);
449
450 for (ReturnInst *RI : Returns) {
451 IRB.SetInsertPoint(RI);
452 IRB.CreateStore(BasePointer, UnsafeStackPtr);
453 }
454
455 // Compute maximum alignment among static objects on the unsafe stack.
456 unsigned MaxAlignment = 0;
457 for (AllocaInst *AI : StaticAllocas) {
458 Type *Ty = AI->getAllocatedType();
459 unsigned Align =
460 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment());
461 if (Align > MaxAlignment)
462 MaxAlignment = Align;
463 }
464
465 if (MaxAlignment > StackAlignment) {
466 // Re-align the base pointer according to the max requested alignment.
467 assert(isPowerOf2_32(MaxAlignment));
468 IRB.SetInsertPoint(cast<Instruction>(BasePointer->getNextNode()));
469 BasePointer = cast<Instruction>(IRB.CreateIntToPtr(
470 IRB.CreateAnd(IRB.CreatePtrToInt(BasePointer, IntPtrTy),
471 ConstantInt::get(IntPtrTy, ~uint64_t(MaxAlignment - 1))),
472 StackPtrTy));
473 }
474
475 // Allocate space for every unsafe static AllocaInst on the unsafe stack.
476 int64_t StaticOffset = 0; // Current stack top.
477 for (AllocaInst *AI : StaticAllocas) {
478 IRB.SetInsertPoint(AI);
479
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000480 Type *Ty = AI->getAllocatedType();
Evgeniy Stepanova4ac3f42015-12-01 00:06:13 +0000481 uint64_t Size = getStaticAllocaAllocationSize(AI);
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000482 if (Size == 0)
483 Size = 1; // Don't create zero-sized stack objects.
484
485 // Ensure the object is properly aligned.
486 unsigned Align =
487 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment());
488
489 // Add alignment.
490 // NOTE: we ensure that BasePointer itself is aligned to >= Align.
491 StaticOffset += Size;
492 StaticOffset = RoundUpToAlignment(StaticOffset, Align);
493
494 Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8*
495 ConstantInt::get(Int32Ty, -StaticOffset));
496 Value *NewAI = IRB.CreateBitCast(Off, AI->getType(), AI->getName());
497 if (AI->hasName() && isa<Instruction>(NewAI))
498 cast<Instruction>(NewAI)->takeName(AI);
499
500 // Replace alloc with the new location.
Evgeniy Stepanovf6081112015-09-30 19:55:43 +0000501 replaceDbgDeclareForAlloca(AI, BasePointer, DIB, /*Deref=*/true, -StaticOffset);
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000502 AI->replaceAllUsesWith(NewAI);
503 AI->eraseFromParent();
504 }
505
506 // Re-align BasePointer so that our callees would see it aligned as
507 // expected.
508 // FIXME: no need to update BasePointer in leaf functions.
509 StaticOffset = RoundUpToAlignment(StaticOffset, StackAlignment);
510
511 // Update shadow stack pointer in the function epilogue.
512 IRB.SetInsertPoint(cast<Instruction>(BasePointer->getNextNode()));
513
514 Value *StaticTop =
515 IRB.CreateGEP(BasePointer, ConstantInt::get(Int32Ty, -StaticOffset),
516 "unsafe_stack_static_top");
517 IRB.CreateStore(StaticTop, UnsafeStackPtr);
518 return StaticTop;
519}
520
521void SafeStack::moveDynamicAllocasToUnsafeStack(
522 Function &F, Value *UnsafeStackPtr, AllocaInst *DynamicTop,
523 ArrayRef<AllocaInst *> DynamicAllocas) {
524 DIBuilder DIB(*F.getParent());
525
526 for (AllocaInst *AI : DynamicAllocas) {
527 IRBuilder<> IRB(AI);
528
529 // Compute the new SP value (after AI).
530 Value *ArraySize = AI->getArraySize();
531 if (ArraySize->getType() != IntPtrTy)
532 ArraySize = IRB.CreateIntCast(ArraySize, IntPtrTy, false);
533
534 Type *Ty = AI->getAllocatedType();
535 uint64_t TySize = DL->getTypeAllocSize(Ty);
536 Value *Size = IRB.CreateMul(ArraySize, ConstantInt::get(IntPtrTy, TySize));
537
538 Value *SP = IRB.CreatePtrToInt(IRB.CreateLoad(UnsafeStackPtr), IntPtrTy);
539 SP = IRB.CreateSub(SP, Size);
540
541 // Align the SP value to satisfy the AllocaInst, type and stack alignments.
542 unsigned Align = std::max(
543 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment()),
544 (unsigned)StackAlignment);
545
546 assert(isPowerOf2_32(Align));
547 Value *NewTop = IRB.CreateIntToPtr(
548 IRB.CreateAnd(SP, ConstantInt::get(IntPtrTy, ~uint64_t(Align - 1))),
549 StackPtrTy);
550
551 // Save the stack pointer.
552 IRB.CreateStore(NewTop, UnsafeStackPtr);
553 if (DynamicTop)
554 IRB.CreateStore(NewTop, DynamicTop);
555
Evgeniy Stepanov9842d612015-11-25 22:52:30 +0000556 Value *NewAI = IRB.CreatePointerCast(NewTop, AI->getType());
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000557 if (AI->hasName() && isa<Instruction>(NewAI))
558 NewAI->takeName(AI);
559
560 replaceDbgDeclareForAlloca(AI, NewAI, DIB, /*Deref=*/true);
561 AI->replaceAllUsesWith(NewAI);
562 AI->eraseFromParent();
563 }
564
565 if (!DynamicAllocas.empty()) {
566 // Now go through the instructions again, replacing stacksave/stackrestore.
567 for (inst_iterator It = inst_begin(&F), Ie = inst_end(&F); It != Ie;) {
568 Instruction *I = &*(It++);
569 auto II = dyn_cast<IntrinsicInst>(I);
570 if (!II)
571 continue;
572
573 if (II->getIntrinsicID() == Intrinsic::stacksave) {
574 IRBuilder<> IRB(II);
575 Instruction *LI = IRB.CreateLoad(UnsafeStackPtr);
576 LI->takeName(II);
577 II->replaceAllUsesWith(LI);
578 II->eraseFromParent();
579 } else if (II->getIntrinsicID() == Intrinsic::stackrestore) {
580 IRBuilder<> IRB(II);
581 Instruction *SI = IRB.CreateStore(II->getArgOperand(0), UnsafeStackPtr);
582 SI->takeName(II);
583 assert(II->use_empty());
584 II->eraseFromParent();
585 }
586 }
587 }
588}
589
590bool SafeStack::runOnFunction(Function &F) {
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000591 DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n");
592
593 if (!F.hasFnAttribute(Attribute::SafeStack)) {
594 DEBUG(dbgs() << "[SafeStack] safestack is not requested"
595 " for this function\n");
596 return false;
597 }
598
599 if (F.isDeclaration()) {
600 DEBUG(dbgs() << "[SafeStack] function definition"
601 " is not available\n");
602 return false;
603 }
604
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000605 TL = TM ? TM->getSubtargetImpl(F)->getTargetLowering() : nullptr;
606 SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE();
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000607
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000608 {
609 // Make sure the regular stack protector won't run on this function
610 // (safestack attribute takes precedence).
611 AttrBuilder B;
612 B.addAttribute(Attribute::StackProtect)
613 .addAttribute(Attribute::StackProtectReq)
614 .addAttribute(Attribute::StackProtectStrong);
615 F.removeAttributes(
616 AttributeSet::FunctionIndex,
617 AttributeSet::get(F.getContext(), AttributeSet::FunctionIndex, B));
618 }
619
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000620 ++NumFunctions;
621
622 SmallVector<AllocaInst *, 16> StaticAllocas;
623 SmallVector<AllocaInst *, 4> DynamicAllocas;
624 SmallVector<ReturnInst *, 4> Returns;
625
626 // Collect all points where stack gets unwound and needs to be restored
627 // This is only necessary because the runtime (setjmp and unwind code) is
628 // not aware of the unsafe stack and won't unwind/restore it prorerly.
629 // To work around this problem without changing the runtime, we insert
630 // instrumentation to restore the unsafe stack pointer when necessary.
631 SmallVector<Instruction *, 4> StackRestorePoints;
632
633 // Find all static and dynamic alloca instructions that must be moved to the
634 // unsafe stack, all return instructions and stack restore points.
635 findInsts(F, StaticAllocas, DynamicAllocas, Returns, StackRestorePoints);
636
637 if (StaticAllocas.empty() && DynamicAllocas.empty() &&
638 StackRestorePoints.empty())
639 return false; // Nothing to do in this function.
640
641 if (!StaticAllocas.empty() || !DynamicAllocas.empty())
642 ++NumUnsafeStackFunctions; // This function has the unsafe stack.
643
644 if (!StackRestorePoints.empty())
645 ++NumUnsafeStackRestorePointsFunctions;
646
Duncan P. N. Exon Smithe82c2862015-10-13 17:39:10 +0000647 IRBuilder<> IRB(&F.front(), F.begin()->getFirstInsertionPt());
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000648 UnsafeStackPtr = getOrCreateUnsafeStackPtr(IRB, F);
Peter Collingbournede26a912015-06-22 20:26:54 +0000649
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000650 // The top of the unsafe stack after all unsafe static allocas are allocated.
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000651 Value *StaticTop = moveStaticAllocasToUnsafeStack(IRB, F, StaticAllocas, Returns);
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000652
653 // Safe stack object that stores the current unsafe stack top. It is updated
654 // as unsafe dynamic (non-constant-sized) allocas are allocated and freed.
655 // This is only needed if we need to restore stack pointer after longjmp
656 // or exceptions, and we have dynamic allocations.
657 // FIXME: a better alternative might be to store the unsafe stack pointer
658 // before setjmp / invoke instructions.
659 AllocaInst *DynamicTop = createStackRestorePoints(
Evgeniy Stepanov8685daf2015-09-24 01:23:51 +0000660 IRB, F, StackRestorePoints, StaticTop, !DynamicAllocas.empty());
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000661
662 // Handle dynamic allocas.
663 moveDynamicAllocasToUnsafeStack(F, UnsafeStackPtr, DynamicTop,
664 DynamicAllocas);
665
666 DEBUG(dbgs() << "[SafeStack] safestack applied\n");
667 return true;
668}
669
Hans Wennborg083ca9b2015-10-06 23:24:35 +0000670} // anonymous namespace
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000671
672char SafeStack::ID = 0;
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000673INITIALIZE_TM_PASS_BEGIN(SafeStack, "safe-stack",
674 "Safe Stack instrumentation pass", false, false)
675INITIALIZE_TM_PASS_END(SafeStack, "safe-stack",
676 "Safe Stack instrumentation pass", false, false)
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000677
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000678FunctionPass *llvm::createSafeStackPass(const llvm::TargetMachine *TM) {
679 return new SafeStack(TM);
680}