blob: 55bdda3eb1a8589af2977e02dc71be8b0fc2fd41 [file] [log] [blame]
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001//===- HWAddressSanitizer.cpp - detector of uninitialized reads -------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10/// \file
11/// This file is a part of HWAddressSanitizer, an address sanity checker
12/// based on tagged addressing.
13//===----------------------------------------------------------------------===//
14
15#include "llvm/ADT/SmallVector.h"
16#include "llvm/ADT/StringExtras.h"
17#include "llvm/ADT/StringRef.h"
18#include "llvm/ADT/Triple.h"
19#include "llvm/IR/Attributes.h"
20#include "llvm/IR/BasicBlock.h"
21#include "llvm/IR/Constant.h"
22#include "llvm/IR/Constants.h"
23#include "llvm/IR/DataLayout.h"
24#include "llvm/IR/DerivedTypes.h"
25#include "llvm/IR/Function.h"
26#include "llvm/IR/IRBuilder.h"
27#include "llvm/IR/InlineAsm.h"
28#include "llvm/IR/InstVisitor.h"
29#include "llvm/IR/Instruction.h"
30#include "llvm/IR/Instructions.h"
31#include "llvm/IR/IntrinsicInst.h"
32#include "llvm/IR/Intrinsics.h"
33#include "llvm/IR/LLVMContext.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000034#include "llvm/IR/MDBuilder.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000035#include "llvm/IR/Module.h"
36#include "llvm/IR/Type.h"
37#include "llvm/IR/Value.h"
38#include "llvm/Pass.h"
39#include "llvm/Support/Casting.h"
40#include "llvm/Support/CommandLine.h"
41#include "llvm/Support/Debug.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000042#include "llvm/Support/raw_ostream.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000043#include "llvm/Transforms/Instrumentation.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000044#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000045#include "llvm/Transforms/Utils/ModuleUtils.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000046#include "llvm/Transforms/Utils/PromoteMemToReg.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000047
48using namespace llvm;
49
50#define DEBUG_TYPE "hwasan"
51
52static const char *const kHwasanModuleCtorName = "hwasan.module_ctor";
53static const char *const kHwasanInitName = "__hwasan_init";
54
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +000055static const char *const kHwasanShadowMemoryDynamicAddress =
56 "__hwasan_shadow_memory_dynamic_address";
57
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000058// Accesses sizes are powers of two: 1, 2, 4, 8, 16.
59static const size_t kNumberOfAccessSizes = 5;
60
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +000061static const size_t kDefaultShadowScale = 4;
62static const uint64_t kDynamicShadowSentinel =
63 std::numeric_limits<uint64_t>::max();
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +000064static const unsigned kPointerTagShift = 56;
65
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000066static cl::opt<std::string> ClMemoryAccessCallbackPrefix(
67 "hwasan-memory-access-callback-prefix",
68 cl::desc("Prefix for memory access callbacks"), cl::Hidden,
69 cl::init("__hwasan_"));
70
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +000071static cl::opt<bool>
72 ClInstrumentWithCalls("hwasan-instrument-with-calls",
73 cl::desc("instrument reads and writes with callbacks"),
74 cl::Hidden, cl::init(false));
75
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000076static cl::opt<bool> ClInstrumentReads("hwasan-instrument-reads",
77 cl::desc("instrument read instructions"),
78 cl::Hidden, cl::init(true));
79
80static cl::opt<bool> ClInstrumentWrites(
81 "hwasan-instrument-writes", cl::desc("instrument write instructions"),
82 cl::Hidden, cl::init(true));
83
84static cl::opt<bool> ClInstrumentAtomics(
85 "hwasan-instrument-atomics",
86 cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden,
87 cl::init(true));
88
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +000089static cl::opt<bool> ClRecover(
90 "hwasan-recover",
91 cl::desc("Enable recovery mode (continue-after-error)."),
92 cl::Hidden, cl::init(false));
93
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000094static cl::opt<bool> ClInstrumentStack("hwasan-instrument-stack",
95 cl::desc("instrument stack (allocas)"),
96 cl::Hidden, cl::init(true));
97
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +000098static cl::opt<bool> ClGenerateTagsWithCalls(
99 "hwasan-generate-tags-with-calls",
100 cl::desc("generate new tags with runtime library calls"), cl::Hidden,
101 cl::init(false));
102
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000103static cl::opt<int> ClMatchAllTag(
104 "hwasan-match-all-tag",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000105 cl::desc("don't report bad accesses via pointers with this tag"),
106 cl::Hidden, cl::init(-1));
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000107
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000108static cl::opt<bool> ClEnableKhwasan(
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000109 "hwasan-kernel",
110 cl::desc("Enable KernelHWAddressSanitizer instrumentation"),
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000111 cl::Hidden, cl::init(false));
112
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000113// These flags allow to change the shadow mapping and control how shadow memory
114// is accessed. The shadow mapping looks like:
115// Shadow = (Mem >> scale) + offset
116
117static cl::opt<unsigned long long> ClMappingOffset(
118 "hwasan-mapping-offset",
119 cl::desc("HWASan shadow mapping offset [EXPERIMENTAL]"), cl::Hidden,
120 cl::init(0));
121
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000122namespace {
123
124/// \brief An instrumentation pass implementing detection of addressability bugs
125/// using tagged pointers.
126class HWAddressSanitizer : public FunctionPass {
127public:
128 // Pass identification, replacement for typeid.
129 static char ID;
130
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000131 explicit HWAddressSanitizer(bool CompileKernel = false, bool Recover = false)
132 : FunctionPass(ID) {
133 this->Recover = ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover;
134 this->CompileKernel = ClEnableKhwasan.getNumOccurrences() > 0 ?
135 ClEnableKhwasan : CompileKernel;
136 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000137
138 StringRef getPassName() const override { return "HWAddressSanitizer"; }
139
140 bool runOnFunction(Function &F) override;
141 bool doInitialization(Module &M) override;
142
143 void initializeCallbacks(Module &M);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000144
145 void maybeInsertDynamicShadowAtFunctionEntry(Function &F);
146
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000147 void untagPointerOperand(Instruction *I, Value *Addr);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000148 Value *memToShadow(Value *Shadow, Type *Ty, IRBuilder<> &IRB);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000149 void instrumentMemAccessInline(Value *PtrLong, bool IsWrite,
150 unsigned AccessSizeIndex,
151 Instruction *InsertBefore);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000152 bool instrumentMemAccess(Instruction *I);
153 Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite,
154 uint64_t *TypeSize, unsigned *Alignment,
155 Value **MaybeMask);
156
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000157 bool isInterestingAlloca(const AllocaInst &AI);
158 bool tagAlloca(IRBuilder<> &IRB, AllocaInst *AI, Value *Tag);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000159 Value *tagPointer(IRBuilder<> &IRB, Type *Ty, Value *PtrLong, Value *Tag);
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000160 Value *untagPointer(IRBuilder<> &IRB, Value *PtrLong);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000161 bool instrumentStack(SmallVectorImpl<AllocaInst *> &Allocas,
162 SmallVectorImpl<Instruction *> &RetVec);
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000163 Value *getNextTagWithCall(IRBuilder<> &IRB);
164 Value *getStackBaseTag(IRBuilder<> &IRB);
165 Value *getAllocaTag(IRBuilder<> &IRB, Value *StackTag, AllocaInst *AI,
166 unsigned AllocaNo);
167 Value *getUARTag(IRBuilder<> &IRB, Value *StackTag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000168
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000169private:
170 LLVMContext *C;
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000171 Triple TargetTriple;
172
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000173 /// This struct defines the shadow mapping using the rule:
174 /// shadow = (mem >> Scale) + Offset.
175 /// If InGlobal is true, then
176 /// extern char __hwasan_shadow[];
177 /// shadow = (mem >> Scale) + &__hwasan_shadow
178 struct ShadowMapping {
179 int Scale;
180 uint64_t Offset;
181 bool InGlobal;
182
183 void init(Triple &TargetTriple);
184 unsigned getAllocaAlignment() const { return 1U << Scale; }
185 };
186 ShadowMapping Mapping;
187
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000188 Type *IntptrTy;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000189 Type *Int8Ty;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000190
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000191 bool CompileKernel;
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000192 bool Recover;
193
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000194 Function *HwasanCtorFunction;
195
196 Function *HwasanMemoryAccessCallback[2][kNumberOfAccessSizes];
197 Function *HwasanMemoryAccessCallbackSized[2];
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000198
199 Function *HwasanTagMemoryFunc;
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000200 Function *HwasanGenerateTagFunc;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000201
202 Constant *ShadowGlobal;
203
204 Value *LocalDynamicShadow = nullptr;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000205};
206
207} // end anonymous namespace
208
209char HWAddressSanitizer::ID = 0;
210
211INITIALIZE_PASS_BEGIN(
212 HWAddressSanitizer, "hwasan",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000213 "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
214 false)
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000215INITIALIZE_PASS_END(
216 HWAddressSanitizer, "hwasan",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000217 "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
218 false)
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000219
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000220FunctionPass *llvm::createHWAddressSanitizerPass(bool CompileKernel,
221 bool Recover) {
222 assert(!CompileKernel || Recover);
223 return new HWAddressSanitizer(CompileKernel, Recover);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000224}
225
226/// \brief Module-level initialization.
227///
228/// inserts a call to __hwasan_init to the module's constructor list.
229bool HWAddressSanitizer::doInitialization(Module &M) {
230 DEBUG(dbgs() << "Init " << M.getName() << "\n");
231 auto &DL = M.getDataLayout();
232
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000233 TargetTriple = Triple(M.getTargetTriple());
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000234
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000235 Mapping.init(TargetTriple);
236
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000237 C = &(M.getContext());
238 IRBuilder<> IRB(*C);
239 IntptrTy = IRB.getIntPtrTy(DL);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000240 Int8Ty = IRB.getInt8Ty();
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000241
Benjamin Kramerbfc1d972018-01-18 14:19:04 +0000242 HwasanCtorFunction = nullptr;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000243 if (!CompileKernel) {
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000244 std::tie(HwasanCtorFunction, std::ignore) =
245 createSanitizerCtorAndInitFunctions(M, kHwasanModuleCtorName,
246 kHwasanInitName,
247 /*InitArgTypes=*/{},
248 /*InitArgs=*/{});
249 appendToGlobalCtors(M, HwasanCtorFunction, 0);
250 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000251 return true;
252}
253
254void HWAddressSanitizer::initializeCallbacks(Module &M) {
255 IRBuilder<> IRB(*C);
256 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
257 const std::string TypeStr = AccessIsWrite ? "store" : "load";
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000258 const std::string EndingStr = Recover ? "_noabort" : "";
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000259
260 HwasanMemoryAccessCallbackSized[AccessIsWrite] =
261 checkSanitizerInterfaceFunction(M.getOrInsertFunction(
Evgeniy Stepanovc07e0bd2018-01-16 23:15:08 +0000262 ClMemoryAccessCallbackPrefix + TypeStr + "N" + EndingStr,
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000263 FunctionType::get(IRB.getVoidTy(), {IntptrTy, IntptrTy}, false)));
264
265 for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes;
266 AccessSizeIndex++) {
267 HwasanMemoryAccessCallback[AccessIsWrite][AccessSizeIndex] =
268 checkSanitizerInterfaceFunction(M.getOrInsertFunction(
269 ClMemoryAccessCallbackPrefix + TypeStr +
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000270 itostr(1ULL << AccessSizeIndex) + EndingStr,
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000271 FunctionType::get(IRB.getVoidTy(), {IntptrTy}, false)));
272 }
273 }
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000274
275 HwasanTagMemoryFunc = checkSanitizerInterfaceFunction(M.getOrInsertFunction(
276 "__hwasan_tag_memory", IRB.getVoidTy(), IntptrTy, Int8Ty, IntptrTy));
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000277 HwasanGenerateTagFunc = checkSanitizerInterfaceFunction(
278 M.getOrInsertFunction("__hwasan_generate_tag", Int8Ty));
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000279
280 if (Mapping.InGlobal)
281 ShadowGlobal = M.getOrInsertGlobal("__hwasan_shadow",
282 ArrayType::get(IRB.getInt8Ty(), 0));
283}
284
285void HWAddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(Function &F) {
286 // Generate code only when dynamic addressing is needed.
287 if (Mapping.Offset != kDynamicShadowSentinel)
288 return;
289
290 IRBuilder<> IRB(&F.front().front());
291 if (Mapping.InGlobal) {
292 // An empty inline asm with input reg == output reg.
293 // An opaque pointer-to-int cast, basically.
294 InlineAsm *Asm = InlineAsm::get(
295 FunctionType::get(IntptrTy, {ShadowGlobal->getType()}, false),
296 StringRef(""), StringRef("=r,0"),
297 /*hasSideEffects=*/false);
298 LocalDynamicShadow = IRB.CreateCall(Asm, {ShadowGlobal}, ".hwasan.shadow");
299 } else {
300 Value *GlobalDynamicAddress = F.getParent()->getOrInsertGlobal(
301 kHwasanShadowMemoryDynamicAddress, IntptrTy);
302 LocalDynamicShadow = IRB.CreateLoad(GlobalDynamicAddress);
303 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000304}
305
306Value *HWAddressSanitizer::isInterestingMemoryAccess(Instruction *I,
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000307 bool *IsWrite,
308 uint64_t *TypeSize,
309 unsigned *Alignment,
310 Value **MaybeMask) {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000311 // Skip memory accesses inserted by another instrumentation.
312 if (I->getMetadata("nosanitize")) return nullptr;
313
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000314 // Do not instrument the load fetching the dynamic shadow address.
315 if (LocalDynamicShadow == I)
316 return nullptr;
317
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000318 Value *PtrOperand = nullptr;
319 const DataLayout &DL = I->getModule()->getDataLayout();
320 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
321 if (!ClInstrumentReads) return nullptr;
322 *IsWrite = false;
323 *TypeSize = DL.getTypeStoreSizeInBits(LI->getType());
324 *Alignment = LI->getAlignment();
325 PtrOperand = LI->getPointerOperand();
326 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
327 if (!ClInstrumentWrites) return nullptr;
328 *IsWrite = true;
329 *TypeSize = DL.getTypeStoreSizeInBits(SI->getValueOperand()->getType());
330 *Alignment = SI->getAlignment();
331 PtrOperand = SI->getPointerOperand();
332 } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
333 if (!ClInstrumentAtomics) return nullptr;
334 *IsWrite = true;
335 *TypeSize = DL.getTypeStoreSizeInBits(RMW->getValOperand()->getType());
336 *Alignment = 0;
337 PtrOperand = RMW->getPointerOperand();
338 } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
339 if (!ClInstrumentAtomics) return nullptr;
340 *IsWrite = true;
341 *TypeSize = DL.getTypeStoreSizeInBits(XCHG->getCompareOperand()->getType());
342 *Alignment = 0;
343 PtrOperand = XCHG->getPointerOperand();
344 }
345
346 if (PtrOperand) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000347 // Do not instrument accesses from different address spaces; we cannot deal
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000348 // with them.
349 Type *PtrTy = cast<PointerType>(PtrOperand->getType()->getScalarType());
350 if (PtrTy->getPointerAddressSpace() != 0)
351 return nullptr;
352
353 // Ignore swifterror addresses.
354 // swifterror memory addresses are mem2reg promoted by instruction
355 // selection. As such they cannot have regular uses like an instrumentation
356 // function and it makes no sense to track them as memory.
357 if (PtrOperand->isSwiftError())
358 return nullptr;
359 }
360
361 return PtrOperand;
362}
363
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000364static unsigned getPointerOperandIndex(Instruction *I) {
365 if (LoadInst *LI = dyn_cast<LoadInst>(I))
366 return LI->getPointerOperandIndex();
367 if (StoreInst *SI = dyn_cast<StoreInst>(I))
368 return SI->getPointerOperandIndex();
369 if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I))
370 return RMW->getPointerOperandIndex();
371 if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I))
372 return XCHG->getPointerOperandIndex();
373 report_fatal_error("Unexpected instruction");
374 return -1;
375}
376
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000377static size_t TypeSizeToSizeIndex(uint32_t TypeSize) {
378 size_t Res = countTrailingZeros(TypeSize / 8);
379 assert(Res < kNumberOfAccessSizes);
380 return Res;
381}
382
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000383void HWAddressSanitizer::untagPointerOperand(Instruction *I, Value *Addr) {
384 if (TargetTriple.isAArch64())
385 return;
386
387 IRBuilder<> IRB(I);
388 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
389 Value *UntaggedPtr =
390 IRB.CreateIntToPtr(untagPointer(IRB, AddrLong), Addr->getType());
391 I->setOperand(getPointerOperandIndex(I), UntaggedPtr);
392}
393
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000394Value *HWAddressSanitizer::memToShadow(Value *Mem, Type *Ty, IRBuilder<> &IRB) {
395 // Mem >> Scale
396 Value *Shadow = IRB.CreateLShr(Mem, Mapping.Scale);
397 if (Mapping.Offset == 0)
398 return Shadow;
399 // (Mem >> Scale) + Offset
400 Value *ShadowBase;
401 if (LocalDynamicShadow)
402 ShadowBase = LocalDynamicShadow;
403 else
404 ShadowBase = ConstantInt::get(Ty, Mapping.Offset);
405 return IRB.CreateAdd(Shadow, ShadowBase);
406}
407
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000408void HWAddressSanitizer::instrumentMemAccessInline(Value *PtrLong, bool IsWrite,
409 unsigned AccessSizeIndex,
410 Instruction *InsertBefore) {
411 IRBuilder<> IRB(InsertBefore);
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000412 Value *PtrTag = IRB.CreateTrunc(IRB.CreateLShr(PtrLong, kPointerTagShift),
413 IRB.getInt8Ty());
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000414 Value *AddrLong = untagPointer(IRB, PtrLong);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000415 Value *ShadowLong = memToShadow(AddrLong, PtrLong->getType(), IRB);
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000416 Value *MemTag =
417 IRB.CreateLoad(IRB.CreateIntToPtr(ShadowLong, IRB.getInt8PtrTy()));
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000418 Value *TagMismatch = IRB.CreateICmpNE(PtrTag, MemTag);
419
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000420 int matchAllTag = ClMatchAllTag.getNumOccurrences() > 0 ?
421 ClMatchAllTag : (CompileKernel ? 0xFF : -1);
422 if (matchAllTag != -1) {
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000423 Value *TagNotIgnored = IRB.CreateICmpNE(PtrTag,
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000424 ConstantInt::get(PtrTag->getType(), matchAllTag));
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000425 TagMismatch = IRB.CreateAnd(TagMismatch, TagNotIgnored);
426 }
427
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000428 TerminatorInst *CheckTerm =
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000429 SplitBlockAndInsertIfThen(TagMismatch, InsertBefore, !Recover,
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000430 MDBuilder(*C).createBranchWeights(1, 100000));
431
432 IRB.SetInsertPoint(CheckTerm);
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000433 const int64_t AccessInfo = Recover * 0x20 + IsWrite * 0x10 + AccessSizeIndex;
434 InlineAsm *Asm;
435 switch (TargetTriple.getArch()) {
436 case Triple::x86_64:
437 // The signal handler will find the data address in rdi.
438 Asm = InlineAsm::get(
439 FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
440 "int3\nnopl " + itostr(0x40 + AccessInfo) + "(%rax)",
441 "{rdi}",
442 /*hasSideEffects=*/true);
443 break;
444 case Triple::aarch64:
445 case Triple::aarch64_be:
446 // The signal handler will find the data address in x0.
447 Asm = InlineAsm::get(
448 FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
449 "brk #" + itostr(0x900 + AccessInfo),
450 "{x0}",
451 /*hasSideEffects=*/true);
452 break;
453 default:
454 report_fatal_error("unsupported architecture");
455 }
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000456 IRB.CreateCall(Asm, PtrLong);
457}
458
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000459bool HWAddressSanitizer::instrumentMemAccess(Instruction *I) {
460 DEBUG(dbgs() << "Instrumenting: " << *I << "\n");
461 bool IsWrite = false;
462 unsigned Alignment = 0;
463 uint64_t TypeSize = 0;
464 Value *MaybeMask = nullptr;
465 Value *Addr =
466 isInterestingMemoryAccess(I, &IsWrite, &TypeSize, &Alignment, &MaybeMask);
467
468 if (!Addr)
469 return false;
470
471 if (MaybeMask)
472 return false; //FIXME
473
474 IRBuilder<> IRB(I);
475 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
476 if (isPowerOf2_64(TypeSize) &&
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000477 (TypeSize / 8 <= (1UL << (kNumberOfAccessSizes - 1))) &&
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000478 (Alignment >= (1UL << Mapping.Scale) || Alignment == 0 ||
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000479 Alignment >= TypeSize / 8)) {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000480 size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000481 if (ClInstrumentWithCalls) {
482 IRB.CreateCall(HwasanMemoryAccessCallback[IsWrite][AccessSizeIndex],
483 AddrLong);
484 } else {
485 instrumentMemAccessInline(AddrLong, IsWrite, AccessSizeIndex, I);
486 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000487 } else {
488 IRB.CreateCall(HwasanMemoryAccessCallbackSized[IsWrite],
489 {AddrLong, ConstantInt::get(IntptrTy, TypeSize / 8)});
490 }
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000491 untagPointerOperand(I, Addr);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000492
493 return true;
494}
495
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000496static uint64_t getAllocaSizeInBytes(const AllocaInst &AI) {
497 uint64_t ArraySize = 1;
498 if (AI.isArrayAllocation()) {
499 const ConstantInt *CI = dyn_cast<ConstantInt>(AI.getArraySize());
500 assert(CI && "non-constant array size");
501 ArraySize = CI->getZExtValue();
502 }
503 Type *Ty = AI.getAllocatedType();
504 uint64_t SizeInBytes = AI.getModule()->getDataLayout().getTypeAllocSize(Ty);
505 return SizeInBytes * ArraySize;
506}
507
508bool HWAddressSanitizer::tagAlloca(IRBuilder<> &IRB, AllocaInst *AI,
509 Value *Tag) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000510 size_t Size = (getAllocaSizeInBytes(*AI) + Mapping.getAllocaAlignment() - 1) &
511 ~(Mapping.getAllocaAlignment() - 1);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000512
513 Value *JustTag = IRB.CreateTrunc(Tag, IRB.getInt8Ty());
514 if (ClInstrumentWithCalls) {
515 IRB.CreateCall(HwasanTagMemoryFunc,
516 {IRB.CreatePointerCast(AI, IntptrTy), JustTag,
517 ConstantInt::get(IntptrTy, Size)});
518 } else {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000519 size_t ShadowSize = Size >> Mapping.Scale;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000520 Value *ShadowPtr = IRB.CreateIntToPtr(
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000521 memToShadow(IRB.CreatePointerCast(AI, IntptrTy), AI->getType(), IRB),
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000522 IRB.getInt8PtrTy());
523 // If this memset is not inlined, it will be intercepted in the hwasan
524 // runtime library. That's OK, because the interceptor skips the checks if
525 // the address is in the shadow region.
526 // FIXME: the interceptor is not as fast as real memset. Consider lowering
527 // llvm.memset right here into either a sequence of stores, or a call to
528 // hwasan_tag_memory.
529 IRB.CreateMemSet(ShadowPtr, JustTag, ShadowSize, /*Align=*/1);
530 }
531 return true;
532}
533
534static unsigned RetagMask(unsigned AllocaNo) {
535 // A list of 8-bit numbers that have at most one run of non-zero bits.
536 // x = x ^ (mask << 56) can be encoded as a single armv8 instruction for these
537 // masks.
538 // The list does not include the value 255, which is used for UAR.
539 static unsigned FastMasks[] = {
540 0, 1, 2, 3, 4, 6, 7, 8, 12, 14, 15, 16, 24,
541 28, 30, 31, 32, 48, 56, 60, 62, 63, 64, 96, 112, 120,
542 124, 126, 127, 128, 192, 224, 240, 248, 252, 254};
543 return FastMasks[AllocaNo % (sizeof(FastMasks) / sizeof(FastMasks[0]))];
544}
545
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000546Value *HWAddressSanitizer::getNextTagWithCall(IRBuilder<> &IRB) {
547 return IRB.CreateZExt(IRB.CreateCall(HwasanGenerateTagFunc), IntptrTy);
548}
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000549
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000550Value *HWAddressSanitizer::getStackBaseTag(IRBuilder<> &IRB) {
551 if (ClGenerateTagsWithCalls)
552 return nullptr;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000553 // FIXME: use addressofreturnaddress (but implement it in aarch64 backend
554 // first).
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000555 Module *M = IRB.GetInsertBlock()->getParent()->getParent();
556 auto GetStackPointerFn =
557 Intrinsic::getDeclaration(M, Intrinsic::frameaddress);
558 Value *StackPointer = IRB.CreateCall(
559 GetStackPointerFn, {Constant::getNullValue(IRB.getInt32Ty())});
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000560
561 // Extract some entropy from the stack pointer for the tags.
562 // Take bits 20..28 (ASLR entropy) and xor with bits 0..8 (these differ
563 // between functions).
564 Value *StackPointerLong = IRB.CreatePointerCast(StackPointer, IntptrTy);
565 Value *StackTag =
566 IRB.CreateXor(StackPointerLong, IRB.CreateLShr(StackPointerLong, 20),
567 "hwasan.stack.base.tag");
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000568 return StackTag;
569}
570
571Value *HWAddressSanitizer::getAllocaTag(IRBuilder<> &IRB, Value *StackTag,
572 AllocaInst *AI, unsigned AllocaNo) {
573 if (ClGenerateTagsWithCalls)
574 return getNextTagWithCall(IRB);
575 return IRB.CreateXor(StackTag,
576 ConstantInt::get(IntptrTy, RetagMask(AllocaNo)));
577}
578
579Value *HWAddressSanitizer::getUARTag(IRBuilder<> &IRB, Value *StackTag) {
580 if (ClGenerateTagsWithCalls)
581 return getNextTagWithCall(IRB);
582 return IRB.CreateXor(StackTag, ConstantInt::get(IntptrTy, 0xFFU));
583}
584
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000585// Add a tag to an address.
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000586Value *HWAddressSanitizer::tagPointer(IRBuilder<> &IRB, Type *Ty,
587 Value *PtrLong, Value *Tag) {
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000588 Value *TaggedPtrLong;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000589 if (CompileKernel) {
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000590 // Kernel addresses have 0xFF in the most significant byte.
591 Value *ShiftedTag = IRB.CreateOr(
592 IRB.CreateShl(Tag, kPointerTagShift),
593 ConstantInt::get(IntptrTy, (1ULL << kPointerTagShift) - 1));
594 TaggedPtrLong = IRB.CreateAnd(PtrLong, ShiftedTag);
595 } else {
596 // Userspace can simply do OR (tag << 56);
597 Value *ShiftedTag = IRB.CreateShl(Tag, kPointerTagShift);
598 TaggedPtrLong = IRB.CreateOr(PtrLong, ShiftedTag);
599 }
600 return IRB.CreateIntToPtr(TaggedPtrLong, Ty);
601}
602
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000603// Remove tag from an address.
604Value *HWAddressSanitizer::untagPointer(IRBuilder<> &IRB, Value *PtrLong) {
605 Value *UntaggedPtrLong;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000606 if (CompileKernel) {
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000607 // Kernel addresses have 0xFF in the most significant byte.
608 UntaggedPtrLong = IRB.CreateOr(PtrLong,
609 ConstantInt::get(PtrLong->getType(), 0xFFULL << kPointerTagShift));
610 } else {
611 // Userspace addresses have 0x00.
612 UntaggedPtrLong = IRB.CreateAnd(PtrLong,
613 ConstantInt::get(PtrLong->getType(), ~(0xFFULL << kPointerTagShift)));
614 }
615 return UntaggedPtrLong;
616}
617
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000618bool HWAddressSanitizer::instrumentStack(
619 SmallVectorImpl<AllocaInst *> &Allocas,
620 SmallVectorImpl<Instruction *> &RetVec) {
621 Function *F = Allocas[0]->getParent()->getParent();
622 Instruction *InsertPt = &*F->getEntryBlock().begin();
623 IRBuilder<> IRB(InsertPt);
624
625 Value *StackTag = getStackBaseTag(IRB);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000626
627 // Ideally, we want to calculate tagged stack base pointer, and rewrite all
628 // alloca addresses using that. Unfortunately, offsets are not known yet
629 // (unless we use ASan-style mega-alloca). Instead we keep the base tag in a
630 // temp, shift-OR it into each alloca address and xor with the retag mask.
631 // This generates one extra instruction per alloca use.
632 for (unsigned N = 0; N < Allocas.size(); ++N) {
633 auto *AI = Allocas[N];
634 IRB.SetInsertPoint(AI->getNextNode());
635
636 // Replace uses of the alloca with tagged address.
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000637 Value *Tag = getAllocaTag(IRB, StackTag, AI, N);
638 Value *AILong = IRB.CreatePointerCast(AI, IntptrTy);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000639 Value *Replacement = tagPointer(IRB, AI->getType(), AILong, Tag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000640 std::string Name =
641 AI->hasName() ? AI->getName().str() : "alloca." + itostr(N);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000642 Replacement->setName(Name + ".hwasan");
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000643
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000644 for (auto UI = AI->use_begin(), UE = AI->use_end(); UI != UE;) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000645 Use &U = *UI++;
646 if (U.getUser() != AILong)
647 U.set(Replacement);
648 }
649
650 tagAlloca(IRB, AI, Tag);
651
652 for (auto RI : RetVec) {
653 IRB.SetInsertPoint(RI);
654
655 // Re-tag alloca memory with the special UAR tag.
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000656 Value *Tag = getUARTag(IRB, StackTag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000657 tagAlloca(IRB, AI, Tag);
658 }
659 }
660
661 return true;
662}
663
664bool HWAddressSanitizer::isInterestingAlloca(const AllocaInst &AI) {
665 return (AI.getAllocatedType()->isSized() &&
666 // FIXME: instrument dynamic allocas, too
667 AI.isStaticAlloca() &&
668 // alloca() may be called with 0 size, ignore it.
669 getAllocaSizeInBytes(AI) > 0 &&
670 // We are only interested in allocas not promotable to registers.
671 // Promotable allocas are common under -O0.
672 !isAllocaPromotable(&AI) &&
673 // inalloca allocas are not treated as static, and we don't want
674 // dynamic alloca instrumentation for them as well.
675 !AI.isUsedWithInAlloca() &&
676 // swifterror allocas are register promoted by ISel
677 !AI.isSwiftError());
678}
679
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000680bool HWAddressSanitizer::runOnFunction(Function &F) {
681 if (&F == HwasanCtorFunction)
682 return false;
683
684 if (!F.hasFnAttribute(Attribute::SanitizeHWAddress))
685 return false;
686
687 DEBUG(dbgs() << "Function: " << F.getName() << "\n");
688
689 initializeCallbacks(*F.getParent());
690
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000691 assert(!LocalDynamicShadow);
692 maybeInsertDynamicShadowAtFunctionEntry(F);
693
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000694 bool Changed = false;
695 SmallVector<Instruction*, 16> ToInstrument;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000696 SmallVector<AllocaInst*, 8> AllocasToInstrument;
697 SmallVector<Instruction*, 8> RetVec;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000698 for (auto &BB : F) {
699 for (auto &Inst : BB) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000700 if (ClInstrumentStack)
701 if (AllocaInst *AI = dyn_cast<AllocaInst>(&Inst)) {
702 // Realign all allocas. We don't want small uninteresting allocas to
703 // hide in instrumented alloca's padding.
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000704 if (AI->getAlignment() < Mapping.getAllocaAlignment())
705 AI->setAlignment(Mapping.getAllocaAlignment());
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000706 // Instrument some of them.
707 if (isInterestingAlloca(*AI))
708 AllocasToInstrument.push_back(AI);
709 continue;
710 }
711
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000712 if (isa<ReturnInst>(Inst) || isa<ResumeInst>(Inst) ||
713 isa<CleanupReturnInst>(Inst))
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000714 RetVec.push_back(&Inst);
715
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000716 Value *MaybeMask = nullptr;
717 bool IsWrite;
718 unsigned Alignment;
719 uint64_t TypeSize;
720 Value *Addr = isInterestingMemoryAccess(&Inst, &IsWrite, &TypeSize,
721 &Alignment, &MaybeMask);
722 if (Addr || isa<MemIntrinsic>(Inst))
723 ToInstrument.push_back(&Inst);
724 }
725 }
726
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000727 if (!AllocasToInstrument.empty())
728 Changed |= instrumentStack(AllocasToInstrument, RetVec);
729
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000730 for (auto Inst : ToInstrument)
731 Changed |= instrumentMemAccess(Inst);
732
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000733 LocalDynamicShadow = nullptr;
734
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000735 return Changed;
736}
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000737
738void HWAddressSanitizer::ShadowMapping::init(Triple &TargetTriple) {
739 const bool IsAndroid = TargetTriple.isAndroid();
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000740 const bool IsAndroidWithIfuncSupport =
741 IsAndroid && !TargetTriple.isAndroidVersionLT(21);
742
743 Scale = kDefaultShadowScale;
744
Alex Shlyapnikov909fb122018-04-24 00:16:54 +0000745 if (ClEnableKhwasan || ClInstrumentWithCalls || !IsAndroidWithIfuncSupport)
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000746 Offset = 0;
747 else
748 Offset = kDynamicShadowSentinel;
749 if (ClMappingOffset.getNumOccurrences() > 0)
750 Offset = ClMappingOffset;
751
Alex Shlyapnikov909fb122018-04-24 00:16:54 +0000752 InGlobal = IsAndroidWithIfuncSupport;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000753}