blob: e51822f6942d6f76ccc72a8807e297c80669fa53 [file] [log] [blame]
Peter Collingbourne82437bf2015-06-15 21:07:11 +00001//===-- SafeStack.cpp - Safe Stack Insertion ------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass splits the stack into the safe stack (kept as-is for LLVM backend)
11// and the unsafe stack (explicitly allocated and managed through the runtime
12// support library).
13//
14// http://clang.llvm.org/docs/SafeStack.html
15//
16//===----------------------------------------------------------------------===//
17
18#include "llvm/Transforms/Instrumentation.h"
19#include "llvm/ADT/Statistic.h"
20#include "llvm/ADT/Triple.h"
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000021#include "llvm/Analysis/ScalarEvolution.h"
22#include "llvm/Analysis/ScalarEvolutionExpressions.h"
Evgeniy Stepanova2002b02015-09-23 18:07:56 +000023#include "llvm/CodeGen/Passes.h"
Peter Collingbourne82437bf2015-06-15 21:07:11 +000024#include "llvm/IR/Constants.h"
25#include "llvm/IR/DataLayout.h"
26#include "llvm/IR/DerivedTypes.h"
27#include "llvm/IR/DIBuilder.h"
28#include "llvm/IR/Function.h"
29#include "llvm/IR/InstIterator.h"
30#include "llvm/IR/Instructions.h"
31#include "llvm/IR/IntrinsicInst.h"
32#include "llvm/IR/Intrinsics.h"
33#include "llvm/IR/IRBuilder.h"
34#include "llvm/IR/Module.h"
35#include "llvm/Pass.h"
36#include "llvm/Support/CommandLine.h"
37#include "llvm/Support/Debug.h"
38#include "llvm/Support/Format.h"
39#include "llvm/Support/MathExtras.h"
40#include "llvm/Support/raw_os_ostream.h"
Evgeniy Stepanova2002b02015-09-23 18:07:56 +000041#include "llvm/Target/TargetLowering.h"
42#include "llvm/Target/TargetSubtargetInfo.h"
Peter Collingbourne82437bf2015-06-15 21:07:11 +000043#include "llvm/Transforms/Utils/Local.h"
44#include "llvm/Transforms/Utils/ModuleUtils.h"
45
46using namespace llvm;
47
48#define DEBUG_TYPE "safestack"
49
50namespace llvm {
51
52STATISTIC(NumFunctions, "Total number of functions");
53STATISTIC(NumUnsafeStackFunctions, "Number of functions with unsafe stack");
54STATISTIC(NumUnsafeStackRestorePointsFunctions,
55 "Number of functions that use setjmp or exceptions");
56
57STATISTIC(NumAllocas, "Total number of allocas");
58STATISTIC(NumUnsafeStaticAllocas, "Number of unsafe static allocas");
59STATISTIC(NumUnsafeDynamicAllocas, "Number of unsafe dynamic allocas");
60STATISTIC(NumUnsafeStackRestorePoints, "Number of setjmps and landingpads");
61
62} // namespace llvm
63
64namespace {
65
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000066/// Rewrite an SCEV expression for a memory access address to an expression that
67/// represents offset from the given alloca.
68///
69/// The implementation simply replaces all mentions of the alloca with zero.
70class AllocaOffsetRewriter : public SCEVRewriteVisitor<AllocaOffsetRewriter> {
71 const AllocaInst *AI;
Peter Collingbourne82437bf2015-06-15 21:07:11 +000072
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000073public:
74 AllocaOffsetRewriter(ScalarEvolution &SE, const AllocaInst *AI)
75 : SCEVRewriteVisitor(SE), AI(AI) {}
Peter Collingbourne82437bf2015-06-15 21:07:11 +000076
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000077 const SCEV *visitUnknown(const SCEVUnknown *Expr) {
78 if (Expr->getValue() == AI)
79 return SE.getZero(Expr->getType());
80 return Expr;
Peter Collingbourne82437bf2015-06-15 21:07:11 +000081 }
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000082};
Peter Collingbourne82437bf2015-06-15 21:07:11 +000083
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000084/// The SafeStack pass splits the stack of each function into the safe
85/// stack, which is only accessed through memory safe dereferences (as
86/// determined statically), and the unsafe stack, which contains all
87/// local variables that are accessed in ways that we can't prove to
88/// be safe.
Peter Collingbourne82437bf2015-06-15 21:07:11 +000089class SafeStack : public FunctionPass {
Evgeniy Stepanova2002b02015-09-23 18:07:56 +000090 const TargetMachine *TM;
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000091 const TargetLoweringBase *TL;
Peter Collingbourne82437bf2015-06-15 21:07:11 +000092 const DataLayout *DL;
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +000093 ScalarEvolution *SE;
Peter Collingbourne82437bf2015-06-15 21:07:11 +000094
95 Type *StackPtrTy;
96 Type *IntPtrTy;
97 Type *Int32Ty;
98 Type *Int8Ty;
99
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000100 Value *UnsafeStackPtr = nullptr;
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000101
102 /// Unsafe stack alignment. Each stack frame must ensure that the stack is
103 /// aligned to this value. We need to re-align the unsafe stack if the
104 /// alignment of any object on the stack exceeds this value.
105 ///
106 /// 16 seems like a reasonable upper bound on the alignment of objects that we
107 /// might expect to appear on the stack on most common targets.
108 enum { StackAlignment = 16 };
109
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000110 /// \brief Build a value representing a pointer to the unsafe stack pointer.
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000111 Value *getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F);
112
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000113 /// \brief Find all static allocas, dynamic allocas, return instructions and
114 /// stack restore points (exception unwind blocks and setjmp calls) in the
115 /// given function and append them to the respective vectors.
116 void findInsts(Function &F, SmallVectorImpl<AllocaInst *> &StaticAllocas,
117 SmallVectorImpl<AllocaInst *> &DynamicAllocas,
118 SmallVectorImpl<ReturnInst *> &Returns,
119 SmallVectorImpl<Instruction *> &StackRestorePoints);
120
121 /// \brief Allocate space for all static allocas in \p StaticAllocas,
122 /// replace allocas with pointers into the unsafe stack and generate code to
123 /// restore the stack pointer before all return instructions in \p Returns.
124 ///
125 /// \returns A pointer to the top of the unsafe stack after all unsafe static
126 /// allocas are allocated.
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000127 Value *moveStaticAllocasToUnsafeStack(IRBuilder<> &IRB, Function &F,
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000128 ArrayRef<AllocaInst *> StaticAllocas,
129 ArrayRef<ReturnInst *> Returns);
130
131 /// \brief Generate code to restore the stack after all stack restore points
132 /// in \p StackRestorePoints.
133 ///
134 /// \returns A local variable in which to maintain the dynamic top of the
135 /// unsafe stack if needed.
136 AllocaInst *
Evgeniy Stepanov8685daf2015-09-24 01:23:51 +0000137 createStackRestorePoints(IRBuilder<> &IRB, Function &F,
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000138 ArrayRef<Instruction *> StackRestorePoints,
139 Value *StaticTop, bool NeedDynamicTop);
140
141 /// \brief Replace all allocas in \p DynamicAllocas with code to allocate
142 /// space dynamically on the unsafe stack and store the dynamic unsafe stack
143 /// top to \p DynamicTop if non-null.
144 void moveDynamicAllocasToUnsafeStack(Function &F, Value *UnsafeStackPtr,
145 AllocaInst *DynamicTop,
146 ArrayRef<AllocaInst *> DynamicAllocas);
147
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000148 bool IsSafeStackAlloca(const AllocaInst *AI);
149
150 bool IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
151 const AllocaInst *AI);
152 bool IsAccessSafe(Value *Addr, uint64_t Size, const AllocaInst *AI);
153
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000154public:
155 static char ID; // Pass identification, replacement for typeid.
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000156 SafeStack(const TargetMachine *TM)
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000157 : FunctionPass(ID), TM(TM), TL(nullptr), DL(nullptr) {
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000158 initializeSafeStackPass(*PassRegistry::getPassRegistry());
159 }
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000160 SafeStack() : SafeStack(nullptr) {}
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000161
Hans Wennborgaa15bff2015-09-10 16:49:58 +0000162 void getAnalysisUsage(AnalysisUsage &AU) const override {
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000163 AU.addRequired<ScalarEvolutionWrapperPass>();
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000164 }
165
Hans Wennborgaa15bff2015-09-10 16:49:58 +0000166 bool doInitialization(Module &M) override {
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000167 DL = &M.getDataLayout();
168
169 StackPtrTy = Type::getInt8PtrTy(M.getContext());
170 IntPtrTy = DL->getIntPtrType(M.getContext());
171 Int32Ty = Type::getInt32Ty(M.getContext());
172 Int8Ty = Type::getInt8Ty(M.getContext());
173
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000174 return false;
175 }
176
Hans Wennborgaa15bff2015-09-10 16:49:58 +0000177 bool runOnFunction(Function &F) override;
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000178}; // class SafeStack
179
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000180bool SafeStack::IsAccessSafe(Value *Addr, uint64_t Size, const AllocaInst *AI) {
181 AllocaOffsetRewriter Rewriter(*SE, AI);
182 const SCEV *Expr = Rewriter.visit(SE->getSCEV(Addr));
183
184 uint64_t BitWidth = SE->getTypeSizeInBits(Expr->getType());
185 ConstantRange AccessStartRange = SE->getUnsignedRange(Expr);
186 ConstantRange SizeRange =
187 ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, Size));
188 ConstantRange AccessRange = AccessStartRange.add(SizeRange);
189 ConstantRange AllocaRange = ConstantRange(
190 APInt(BitWidth, 0),
191 APInt(BitWidth, DL->getTypeStoreSize(AI->getAllocatedType())));
192 bool Safe = AllocaRange.contains(AccessRange);
193
194 DEBUG(dbgs() << "[SafeStack] Alloca " << *AI << "\n"
195 << " Access " << *Addr << "\n"
196 << " SCEV " << *Expr
197 << " U: " << SE->getUnsignedRange(Expr)
198 << ", S: " << SE->getSignedRange(Expr) << "\n"
199 << " Range " << AccessRange << "\n"
200 << " AllocaRange " << AllocaRange << "\n"
201 << " " << (Safe ? "safe" : "unsafe") << "\n");
202
203 return Safe;
204}
205
206bool SafeStack::IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
207 const AllocaInst *AI) {
208 // All MemIntrinsics have destination address in Arg0 and size in Arg2.
209 if (MI->getRawDest() != U) return true;
210 const auto *Len = dyn_cast<ConstantInt>(MI->getLength());
211 // Non-constant size => unsafe. FIXME: try SCEV getRange.
212 if (!Len) return false;
213 return IsAccessSafe(U, Len->getZExtValue(), AI);
214}
215
216/// Check whether a given alloca instruction (AI) should be put on the safe
217/// stack or not. The function analyzes all uses of AI and checks whether it is
218/// only accessed in a memory safe way (as decided statically).
219bool SafeStack::IsSafeStackAlloca(const AllocaInst *AI) {
220 // Go through all uses of this alloca and check whether all accesses to the
221 // allocated object are statically known to be memory safe and, hence, the
222 // object can be placed on the safe stack.
223 SmallPtrSet<const Value *, 16> Visited;
224 SmallVector<const Instruction *, 8> WorkList;
225 WorkList.push_back(AI);
226
227 // A DFS search through all uses of the alloca in bitcasts/PHI/GEPs/etc.
228 while (!WorkList.empty()) {
229 const Instruction *V = WorkList.pop_back_val();
230 for (const Use &UI : V->uses()) {
231 auto I = cast<const Instruction>(UI.getUser());
232 assert(V == UI.get());
233
234 switch (I->getOpcode()) {
235 case Instruction::Load: {
236 if (!IsAccessSafe(UI, DL->getTypeStoreSize(I->getType()), AI))
237 return false;
238 break;
239 }
240 case Instruction::VAArg:
241 // "va-arg" from a pointer is safe.
242 break;
243 case Instruction::Store: {
244 if (V == I->getOperand(0)) {
245 // Stored the pointer - conservatively assume it may be unsafe.
246 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AI
247 << "\n store of address: " << *I << "\n");
248 return false;
249 }
250
251 if (!IsAccessSafe(
252 UI, DL->getTypeStoreSize(I->getOperand(0)->getType()), AI))
253 return false;
254 break;
255 }
256 case Instruction::Ret: {
257 // Information leak.
258 return false;
259 }
260
261 case Instruction::Call:
262 case Instruction::Invoke: {
263 ImmutableCallSite CS(I);
264
265 if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
266 if (II->getIntrinsicID() == Intrinsic::lifetime_start ||
267 II->getIntrinsicID() == Intrinsic::lifetime_end)
268 continue;
269 }
270
271 if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
272 if (!IsMemIntrinsicSafe(MI, UI, AI)) {
273 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AI
274 << "\n unsafe memintrinsic: " << *I
275 << "\n");
276 return false;
277 }
278 continue;
279 }
280
281 // LLVM 'nocapture' attribute is only set for arguments whose address
282 // is not stored, passed around, or used in any other non-trivial way.
283 // We assume that passing a pointer to an object as a 'nocapture
284 // readnone' argument is safe.
285 // FIXME: a more precise solution would require an interprocedural
286 // analysis here, which would look at all uses of an argument inside
287 // the function being called.
288 ImmutableCallSite::arg_iterator B = CS.arg_begin(), E = CS.arg_end();
289 for (ImmutableCallSite::arg_iterator A = B; A != E; ++A)
290 if (A->get() == V)
291 if (!(CS.doesNotCapture(A - B) &&
292 (CS.doesNotAccessMemory(A - B) || CS.doesNotAccessMemory()))) {
293 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AI
294 << "\n unsafe call: " << *I << "\n");
295 return false;
296 }
297 continue;
298 }
299
300 default:
301 if (Visited.insert(I).second)
302 WorkList.push_back(cast<const Instruction>(I));
303 }
304 }
305 }
306
307 // All uses of the alloca are safe, we can place it on the safe stack.
308 return true;
309}
310
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000311Value *SafeStack::getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F) {
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000312 // Check if there is a target-specific location for the unsafe stack pointer.
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000313 if (TL)
314 if (Value *V = TL->getSafeStackPointerLocation(IRB))
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000315 return V;
316
317 // Otherwise, assume the target links with compiler-rt, which provides a
318 // thread-local variable with a magic name.
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000319 Module &M = *F.getParent();
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000320 const char *UnsafeStackPtrVar = "__safestack_unsafe_stack_ptr";
321 auto UnsafeStackPtr =
322 dyn_cast_or_null<GlobalVariable>(M.getNamedValue(UnsafeStackPtrVar));
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000323
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000324 if (!UnsafeStackPtr) {
325 // The global variable is not defined yet, define it ourselves.
326 // We use the initial-exec TLS model because we do not support the
327 // variable living anywhere other than in the main executable.
328 UnsafeStackPtr = new GlobalVariable(
Eugene Zelenkoffec81c2015-11-04 22:32:32 +0000329 M, StackPtrTy, false, GlobalValue::ExternalLinkage, nullptr,
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000330 UnsafeStackPtrVar, nullptr, GlobalValue::InitialExecTLSModel);
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000331 } else {
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000332 // The variable exists, check its type and attributes.
333 if (UnsafeStackPtr->getValueType() != StackPtrTy)
334 report_fatal_error(Twine(UnsafeStackPtrVar) + " must have void* type");
335 if (!UnsafeStackPtr->isThreadLocal())
336 report_fatal_error(Twine(UnsafeStackPtrVar) + " must be thread-local");
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000337 }
Evgeniy Stepanovd1aad262015-10-26 18:28:25 +0000338 return UnsafeStackPtr;
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000339}
340
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000341void SafeStack::findInsts(Function &F,
342 SmallVectorImpl<AllocaInst *> &StaticAllocas,
343 SmallVectorImpl<AllocaInst *> &DynamicAllocas,
344 SmallVectorImpl<ReturnInst *> &Returns,
345 SmallVectorImpl<Instruction *> &StackRestorePoints) {
Nico Rieck78199512015-08-06 19:10:45 +0000346 for (Instruction &I : instructions(&F)) {
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000347 if (auto AI = dyn_cast<AllocaInst>(&I)) {
348 ++NumAllocas;
349
350 if (IsSafeStackAlloca(AI))
351 continue;
352
353 if (AI->isStaticAlloca()) {
354 ++NumUnsafeStaticAllocas;
355 StaticAllocas.push_back(AI);
356 } else {
357 ++NumUnsafeDynamicAllocas;
358 DynamicAllocas.push_back(AI);
359 }
360 } else if (auto RI = dyn_cast<ReturnInst>(&I)) {
361 Returns.push_back(RI);
362 } else if (auto CI = dyn_cast<CallInst>(&I)) {
363 // setjmps require stack restore.
364 if (CI->getCalledFunction() && CI->canReturnTwice())
365 StackRestorePoints.push_back(CI);
366 } else if (auto LP = dyn_cast<LandingPadInst>(&I)) {
367 // Exception landing pads require stack restore.
368 StackRestorePoints.push_back(LP);
369 } else if (auto II = dyn_cast<IntrinsicInst>(&I)) {
370 if (II->getIntrinsicID() == Intrinsic::gcroot)
371 llvm::report_fatal_error(
372 "gcroot intrinsic not compatible with safestack attribute");
373 }
374 }
375}
376
377AllocaInst *
Evgeniy Stepanov8685daf2015-09-24 01:23:51 +0000378SafeStack::createStackRestorePoints(IRBuilder<> &IRB, Function &F,
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000379 ArrayRef<Instruction *> StackRestorePoints,
380 Value *StaticTop, bool NeedDynamicTop) {
381 if (StackRestorePoints.empty())
382 return nullptr;
383
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000384 // We need the current value of the shadow stack pointer to restore
385 // after longjmp or exception catching.
386
387 // FIXME: On some platforms this could be handled by the longjmp/exception
388 // runtime itself.
389
390 AllocaInst *DynamicTop = nullptr;
391 if (NeedDynamicTop)
392 // If we also have dynamic alloca's, the stack pointer value changes
393 // throughout the function. For now we store it in an alloca.
394 DynamicTop = IRB.CreateAlloca(StackPtrTy, /*ArraySize=*/nullptr,
395 "unsafe_stack_dynamic_ptr");
396
397 if (!StaticTop)
398 // We need the original unsafe stack pointer value, even if there are
399 // no unsafe static allocas.
400 StaticTop = IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr");
401
402 if (NeedDynamicTop)
403 IRB.CreateStore(StaticTop, DynamicTop);
404
405 // Restore current stack pointer after longjmp/exception catch.
406 for (Instruction *I : StackRestorePoints) {
407 ++NumUnsafeStackRestorePoints;
408
409 IRB.SetInsertPoint(cast<Instruction>(I->getNextNode()));
410 Value *CurrentTop = DynamicTop ? IRB.CreateLoad(DynamicTop) : StaticTop;
411 IRB.CreateStore(CurrentTop, UnsafeStackPtr);
412 }
413
414 return DynamicTop;
415}
416
417Value *
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000418SafeStack::moveStaticAllocasToUnsafeStack(IRBuilder<> &IRB, Function &F,
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000419 ArrayRef<AllocaInst *> StaticAllocas,
420 ArrayRef<ReturnInst *> Returns) {
421 if (StaticAllocas.empty())
422 return nullptr;
423
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000424 DIBuilder DIB(*F.getParent());
425
426 // We explicitly compute and set the unsafe stack layout for all unsafe
427 // static alloca instructions. We save the unsafe "base pointer" in the
428 // prologue into a local variable and restore it in the epilogue.
429
430 // Load the current stack pointer (we'll also use it as a base pointer).
431 // FIXME: use a dedicated register for it ?
432 Instruction *BasePointer =
433 IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr");
434 assert(BasePointer->getType() == StackPtrTy);
435
436 for (ReturnInst *RI : Returns) {
437 IRB.SetInsertPoint(RI);
438 IRB.CreateStore(BasePointer, UnsafeStackPtr);
439 }
440
441 // Compute maximum alignment among static objects on the unsafe stack.
442 unsigned MaxAlignment = 0;
443 for (AllocaInst *AI : StaticAllocas) {
444 Type *Ty = AI->getAllocatedType();
445 unsigned Align =
446 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment());
447 if (Align > MaxAlignment)
448 MaxAlignment = Align;
449 }
450
451 if (MaxAlignment > StackAlignment) {
452 // Re-align the base pointer according to the max requested alignment.
453 assert(isPowerOf2_32(MaxAlignment));
454 IRB.SetInsertPoint(cast<Instruction>(BasePointer->getNextNode()));
455 BasePointer = cast<Instruction>(IRB.CreateIntToPtr(
456 IRB.CreateAnd(IRB.CreatePtrToInt(BasePointer, IntPtrTy),
457 ConstantInt::get(IntPtrTy, ~uint64_t(MaxAlignment - 1))),
458 StackPtrTy));
459 }
460
461 // Allocate space for every unsafe static AllocaInst on the unsafe stack.
462 int64_t StaticOffset = 0; // Current stack top.
463 for (AllocaInst *AI : StaticAllocas) {
464 IRB.SetInsertPoint(AI);
465
466 auto CArraySize = cast<ConstantInt>(AI->getArraySize());
467 Type *Ty = AI->getAllocatedType();
468
469 uint64_t Size = DL->getTypeAllocSize(Ty) * CArraySize->getZExtValue();
470 if (Size == 0)
471 Size = 1; // Don't create zero-sized stack objects.
472
473 // Ensure the object is properly aligned.
474 unsigned Align =
475 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment());
476
477 // Add alignment.
478 // NOTE: we ensure that BasePointer itself is aligned to >= Align.
479 StaticOffset += Size;
480 StaticOffset = RoundUpToAlignment(StaticOffset, Align);
481
482 Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8*
483 ConstantInt::get(Int32Ty, -StaticOffset));
484 Value *NewAI = IRB.CreateBitCast(Off, AI->getType(), AI->getName());
485 if (AI->hasName() && isa<Instruction>(NewAI))
486 cast<Instruction>(NewAI)->takeName(AI);
487
488 // Replace alloc with the new location.
Evgeniy Stepanovf6081112015-09-30 19:55:43 +0000489 replaceDbgDeclareForAlloca(AI, BasePointer, DIB, /*Deref=*/true, -StaticOffset);
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000490 AI->replaceAllUsesWith(NewAI);
491 AI->eraseFromParent();
492 }
493
494 // Re-align BasePointer so that our callees would see it aligned as
495 // expected.
496 // FIXME: no need to update BasePointer in leaf functions.
497 StaticOffset = RoundUpToAlignment(StaticOffset, StackAlignment);
498
499 // Update shadow stack pointer in the function epilogue.
500 IRB.SetInsertPoint(cast<Instruction>(BasePointer->getNextNode()));
501
502 Value *StaticTop =
503 IRB.CreateGEP(BasePointer, ConstantInt::get(Int32Ty, -StaticOffset),
504 "unsafe_stack_static_top");
505 IRB.CreateStore(StaticTop, UnsafeStackPtr);
506 return StaticTop;
507}
508
509void SafeStack::moveDynamicAllocasToUnsafeStack(
510 Function &F, Value *UnsafeStackPtr, AllocaInst *DynamicTop,
511 ArrayRef<AllocaInst *> DynamicAllocas) {
512 DIBuilder DIB(*F.getParent());
513
514 for (AllocaInst *AI : DynamicAllocas) {
515 IRBuilder<> IRB(AI);
516
517 // Compute the new SP value (after AI).
518 Value *ArraySize = AI->getArraySize();
519 if (ArraySize->getType() != IntPtrTy)
520 ArraySize = IRB.CreateIntCast(ArraySize, IntPtrTy, false);
521
522 Type *Ty = AI->getAllocatedType();
523 uint64_t TySize = DL->getTypeAllocSize(Ty);
524 Value *Size = IRB.CreateMul(ArraySize, ConstantInt::get(IntPtrTy, TySize));
525
526 Value *SP = IRB.CreatePtrToInt(IRB.CreateLoad(UnsafeStackPtr), IntPtrTy);
527 SP = IRB.CreateSub(SP, Size);
528
529 // Align the SP value to satisfy the AllocaInst, type and stack alignments.
530 unsigned Align = std::max(
531 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment()),
532 (unsigned)StackAlignment);
533
534 assert(isPowerOf2_32(Align));
535 Value *NewTop = IRB.CreateIntToPtr(
536 IRB.CreateAnd(SP, ConstantInt::get(IntPtrTy, ~uint64_t(Align - 1))),
537 StackPtrTy);
538
539 // Save the stack pointer.
540 IRB.CreateStore(NewTop, UnsafeStackPtr);
541 if (DynamicTop)
542 IRB.CreateStore(NewTop, DynamicTop);
543
544 Value *NewAI = IRB.CreateIntToPtr(SP, AI->getType());
545 if (AI->hasName() && isa<Instruction>(NewAI))
546 NewAI->takeName(AI);
547
548 replaceDbgDeclareForAlloca(AI, NewAI, DIB, /*Deref=*/true);
549 AI->replaceAllUsesWith(NewAI);
550 AI->eraseFromParent();
551 }
552
553 if (!DynamicAllocas.empty()) {
554 // Now go through the instructions again, replacing stacksave/stackrestore.
555 for (inst_iterator It = inst_begin(&F), Ie = inst_end(&F); It != Ie;) {
556 Instruction *I = &*(It++);
557 auto II = dyn_cast<IntrinsicInst>(I);
558 if (!II)
559 continue;
560
561 if (II->getIntrinsicID() == Intrinsic::stacksave) {
562 IRBuilder<> IRB(II);
563 Instruction *LI = IRB.CreateLoad(UnsafeStackPtr);
564 LI->takeName(II);
565 II->replaceAllUsesWith(LI);
566 II->eraseFromParent();
567 } else if (II->getIntrinsicID() == Intrinsic::stackrestore) {
568 IRBuilder<> IRB(II);
569 Instruction *SI = IRB.CreateStore(II->getArgOperand(0), UnsafeStackPtr);
570 SI->takeName(II);
571 assert(II->use_empty());
572 II->eraseFromParent();
573 }
574 }
575 }
576}
577
578bool SafeStack::runOnFunction(Function &F) {
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000579 DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n");
580
581 if (!F.hasFnAttribute(Attribute::SafeStack)) {
582 DEBUG(dbgs() << "[SafeStack] safestack is not requested"
583 " for this function\n");
584 return false;
585 }
586
587 if (F.isDeclaration()) {
588 DEBUG(dbgs() << "[SafeStack] function definition"
589 " is not available\n");
590 return false;
591 }
592
Evgeniy Stepanov447bbdb2015-11-13 21:21:42 +0000593 TL = TM ? TM->getSubtargetImpl(F)->getTargetLowering() : nullptr;
594 SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE();
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000595
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000596 {
597 // Make sure the regular stack protector won't run on this function
598 // (safestack attribute takes precedence).
599 AttrBuilder B;
600 B.addAttribute(Attribute::StackProtect)
601 .addAttribute(Attribute::StackProtectReq)
602 .addAttribute(Attribute::StackProtectStrong);
603 F.removeAttributes(
604 AttributeSet::FunctionIndex,
605 AttributeSet::get(F.getContext(), AttributeSet::FunctionIndex, B));
606 }
607
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000608 ++NumFunctions;
609
610 SmallVector<AllocaInst *, 16> StaticAllocas;
611 SmallVector<AllocaInst *, 4> DynamicAllocas;
612 SmallVector<ReturnInst *, 4> Returns;
613
614 // Collect all points where stack gets unwound and needs to be restored
615 // This is only necessary because the runtime (setjmp and unwind code) is
616 // not aware of the unsafe stack and won't unwind/restore it prorerly.
617 // To work around this problem without changing the runtime, we insert
618 // instrumentation to restore the unsafe stack pointer when necessary.
619 SmallVector<Instruction *, 4> StackRestorePoints;
620
621 // Find all static and dynamic alloca instructions that must be moved to the
622 // unsafe stack, all return instructions and stack restore points.
623 findInsts(F, StaticAllocas, DynamicAllocas, Returns, StackRestorePoints);
624
625 if (StaticAllocas.empty() && DynamicAllocas.empty() &&
626 StackRestorePoints.empty())
627 return false; // Nothing to do in this function.
628
629 if (!StaticAllocas.empty() || !DynamicAllocas.empty())
630 ++NumUnsafeStackFunctions; // This function has the unsafe stack.
631
632 if (!StackRestorePoints.empty())
633 ++NumUnsafeStackRestorePointsFunctions;
634
Duncan P. N. Exon Smithe82c2862015-10-13 17:39:10 +0000635 IRBuilder<> IRB(&F.front(), F.begin()->getFirstInsertionPt());
Evgeniy Stepanov9addbc92015-10-15 21:26:49 +0000636 UnsafeStackPtr = getOrCreateUnsafeStackPtr(IRB, F);
Peter Collingbournede26a912015-06-22 20:26:54 +0000637
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000638 // The top of the unsafe stack after all unsafe static allocas are allocated.
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000639 Value *StaticTop = moveStaticAllocasToUnsafeStack(IRB, F, StaticAllocas, Returns);
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000640
641 // Safe stack object that stores the current unsafe stack top. It is updated
642 // as unsafe dynamic (non-constant-sized) allocas are allocated and freed.
643 // This is only needed if we need to restore stack pointer after longjmp
644 // or exceptions, and we have dynamic allocations.
645 // FIXME: a better alternative might be to store the unsafe stack pointer
646 // before setjmp / invoke instructions.
647 AllocaInst *DynamicTop = createStackRestorePoints(
Evgeniy Stepanov8685daf2015-09-24 01:23:51 +0000648 IRB, F, StackRestorePoints, StaticTop, !DynamicAllocas.empty());
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000649
650 // Handle dynamic allocas.
651 moveDynamicAllocasToUnsafeStack(F, UnsafeStackPtr, DynamicTop,
652 DynamicAllocas);
653
654 DEBUG(dbgs() << "[SafeStack] safestack applied\n");
655 return true;
656}
657
Hans Wennborg083ca9b2015-10-06 23:24:35 +0000658} // anonymous namespace
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000659
660char SafeStack::ID = 0;
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000661INITIALIZE_TM_PASS_BEGIN(SafeStack, "safe-stack",
662 "Safe Stack instrumentation pass", false, false)
663INITIALIZE_TM_PASS_END(SafeStack, "safe-stack",
664 "Safe Stack instrumentation pass", false, false)
Peter Collingbourne82437bf2015-06-15 21:07:11 +0000665
Evgeniy Stepanova2002b02015-09-23 18:07:56 +0000666FunctionPass *llvm::createSafeStackPass(const llvm::TargetMachine *TM) {
667 return new SafeStack(TM);
668}