blob: bc690ccd5cd26c38f88e118a14efe428e70cdb0a [file] [log] [blame]
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001//===- HWAddressSanitizer.cpp - detector of uninitialized reads -------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10/// \file
11/// This file is a part of HWAddressSanitizer, an address sanity checker
12/// based on tagged addressing.
13//===----------------------------------------------------------------------===//
14
15#include "llvm/ADT/SmallVector.h"
16#include "llvm/ADT/StringExtras.h"
17#include "llvm/ADT/StringRef.h"
18#include "llvm/ADT/Triple.h"
19#include "llvm/IR/Attributes.h"
20#include "llvm/IR/BasicBlock.h"
21#include "llvm/IR/Constant.h"
22#include "llvm/IR/Constants.h"
23#include "llvm/IR/DataLayout.h"
24#include "llvm/IR/DerivedTypes.h"
25#include "llvm/IR/Function.h"
26#include "llvm/IR/IRBuilder.h"
27#include "llvm/IR/InlineAsm.h"
28#include "llvm/IR/InstVisitor.h"
29#include "llvm/IR/Instruction.h"
30#include "llvm/IR/Instructions.h"
31#include "llvm/IR/IntrinsicInst.h"
32#include "llvm/IR/Intrinsics.h"
33#include "llvm/IR/LLVMContext.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000034#include "llvm/IR/MDBuilder.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000035#include "llvm/IR/Module.h"
36#include "llvm/IR/Type.h"
37#include "llvm/IR/Value.h"
38#include "llvm/Pass.h"
39#include "llvm/Support/Casting.h"
40#include "llvm/Support/CommandLine.h"
41#include "llvm/Support/Debug.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000042#include "llvm/Support/raw_ostream.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000043#include "llvm/Transforms/Instrumentation.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000044#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000045#include "llvm/Transforms/Utils/ModuleUtils.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000046#include "llvm/Transforms/Utils/PromoteMemToReg.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000047
48using namespace llvm;
49
50#define DEBUG_TYPE "hwasan"
51
52static const char *const kHwasanModuleCtorName = "hwasan.module_ctor";
53static const char *const kHwasanInitName = "__hwasan_init";
54
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +000055static const char *const kHwasanShadowMemoryDynamicAddress =
56 "__hwasan_shadow_memory_dynamic_address";
57
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000058// Accesses sizes are powers of two: 1, 2, 4, 8, 16.
59static const size_t kNumberOfAccessSizes = 5;
60
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +000061static const size_t kDefaultShadowScale = 4;
62static const uint64_t kDynamicShadowSentinel =
63 std::numeric_limits<uint64_t>::max();
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +000064static const unsigned kPointerTagShift = 56;
65
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000066static cl::opt<std::string> ClMemoryAccessCallbackPrefix(
67 "hwasan-memory-access-callback-prefix",
68 cl::desc("Prefix for memory access callbacks"), cl::Hidden,
69 cl::init("__hwasan_"));
70
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +000071static cl::opt<bool>
72 ClInstrumentWithCalls("hwasan-instrument-with-calls",
73 cl::desc("instrument reads and writes with callbacks"),
74 cl::Hidden, cl::init(false));
75
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000076static cl::opt<bool> ClInstrumentReads("hwasan-instrument-reads",
77 cl::desc("instrument read instructions"),
78 cl::Hidden, cl::init(true));
79
80static cl::opt<bool> ClInstrumentWrites(
81 "hwasan-instrument-writes", cl::desc("instrument write instructions"),
82 cl::Hidden, cl::init(true));
83
84static cl::opt<bool> ClInstrumentAtomics(
85 "hwasan-instrument-atomics",
86 cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden,
87 cl::init(true));
88
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +000089static cl::opt<bool> ClRecover(
90 "hwasan-recover",
91 cl::desc("Enable recovery mode (continue-after-error)."),
92 cl::Hidden, cl::init(false));
93
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000094static cl::opt<bool> ClInstrumentStack("hwasan-instrument-stack",
95 cl::desc("instrument stack (allocas)"),
96 cl::Hidden, cl::init(true));
97
Alex Shlyapnikov788764c2018-06-29 20:20:17 +000098static cl::opt<bool> ClUARRetagToZero(
99 "hwasan-uar-retag-to-zero",
100 cl::desc("Clear alloca tags before returning from the function to allow "
101 "non-instrumented and instrumented function calls mix. When set "
102 "to false, allocas are retagged before returning from the "
103 "function to detect use after return."),
104 cl::Hidden, cl::init(true));
105
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000106static cl::opt<bool> ClGenerateTagsWithCalls(
107 "hwasan-generate-tags-with-calls",
108 cl::desc("generate new tags with runtime library calls"), cl::Hidden,
109 cl::init(false));
110
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000111static cl::opt<int> ClMatchAllTag(
112 "hwasan-match-all-tag",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000113 cl::desc("don't report bad accesses via pointers with this tag"),
114 cl::Hidden, cl::init(-1));
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000115
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000116static cl::opt<bool> ClEnableKhwasan(
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000117 "hwasan-kernel",
118 cl::desc("Enable KernelHWAddressSanitizer instrumentation"),
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000119 cl::Hidden, cl::init(false));
120
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000121// These flags allow to change the shadow mapping and control how shadow memory
122// is accessed. The shadow mapping looks like:
123// Shadow = (Mem >> scale) + offset
124
125static cl::opt<unsigned long long> ClMappingOffset(
126 "hwasan-mapping-offset",
127 cl::desc("HWASan shadow mapping offset [EXPERIMENTAL]"), cl::Hidden,
128 cl::init(0));
129
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +0000130static cl::opt<bool>
131 ClWithIfunc("hwasan-with-ifunc",
132 cl::desc("Access dynamic shadow through an ifunc global on "
133 "platforms that support this"),
134 cl::Hidden, cl::init(false));
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000135namespace {
136
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000137/// An instrumentation pass implementing detection of addressability bugs
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000138/// using tagged pointers.
139class HWAddressSanitizer : public FunctionPass {
140public:
141 // Pass identification, replacement for typeid.
142 static char ID;
143
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000144 explicit HWAddressSanitizer(bool CompileKernel = false, bool Recover = false)
145 : FunctionPass(ID) {
146 this->Recover = ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover;
147 this->CompileKernel = ClEnableKhwasan.getNumOccurrences() > 0 ?
148 ClEnableKhwasan : CompileKernel;
149 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000150
151 StringRef getPassName() const override { return "HWAddressSanitizer"; }
152
153 bool runOnFunction(Function &F) override;
154 bool doInitialization(Module &M) override;
155
156 void initializeCallbacks(Module &M);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000157
158 void maybeInsertDynamicShadowAtFunctionEntry(Function &F);
159
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000160 void untagPointerOperand(Instruction *I, Value *Addr);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000161 Value *memToShadow(Value *Shadow, Type *Ty, IRBuilder<> &IRB);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000162 void instrumentMemAccessInline(Value *PtrLong, bool IsWrite,
163 unsigned AccessSizeIndex,
164 Instruction *InsertBefore);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000165 bool instrumentMemAccess(Instruction *I);
166 Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite,
167 uint64_t *TypeSize, unsigned *Alignment,
168 Value **MaybeMask);
169
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000170 bool isInterestingAlloca(const AllocaInst &AI);
171 bool tagAlloca(IRBuilder<> &IRB, AllocaInst *AI, Value *Tag);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000172 Value *tagPointer(IRBuilder<> &IRB, Type *Ty, Value *PtrLong, Value *Tag);
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000173 Value *untagPointer(IRBuilder<> &IRB, Value *PtrLong);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000174 bool instrumentStack(SmallVectorImpl<AllocaInst *> &Allocas,
175 SmallVectorImpl<Instruction *> &RetVec);
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000176 Value *getNextTagWithCall(IRBuilder<> &IRB);
177 Value *getStackBaseTag(IRBuilder<> &IRB);
178 Value *getAllocaTag(IRBuilder<> &IRB, Value *StackTag, AllocaInst *AI,
179 unsigned AllocaNo);
180 Value *getUARTag(IRBuilder<> &IRB, Value *StackTag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000181
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000182private:
183 LLVMContext *C;
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000184 Triple TargetTriple;
185
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000186 /// This struct defines the shadow mapping using the rule:
187 /// shadow = (mem >> Scale) + Offset.
188 /// If InGlobal is true, then
189 /// extern char __hwasan_shadow[];
190 /// shadow = (mem >> Scale) + &__hwasan_shadow
191 struct ShadowMapping {
192 int Scale;
193 uint64_t Offset;
194 bool InGlobal;
195
196 void init(Triple &TargetTriple);
197 unsigned getAllocaAlignment() const { return 1U << Scale; }
198 };
199 ShadowMapping Mapping;
200
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000201 Type *IntptrTy;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000202 Type *Int8Ty;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000203
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000204 bool CompileKernel;
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000205 bool Recover;
206
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000207 Function *HwasanCtorFunction;
208
209 Function *HwasanMemoryAccessCallback[2][kNumberOfAccessSizes];
210 Function *HwasanMemoryAccessCallbackSized[2];
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000211
212 Function *HwasanTagMemoryFunc;
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000213 Function *HwasanGenerateTagFunc;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000214
215 Constant *ShadowGlobal;
216
217 Value *LocalDynamicShadow = nullptr;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000218};
219
220} // end anonymous namespace
221
222char HWAddressSanitizer::ID = 0;
223
224INITIALIZE_PASS_BEGIN(
225 HWAddressSanitizer, "hwasan",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000226 "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
227 false)
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000228INITIALIZE_PASS_END(
229 HWAddressSanitizer, "hwasan",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000230 "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
231 false)
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000232
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000233FunctionPass *llvm::createHWAddressSanitizerPass(bool CompileKernel,
234 bool Recover) {
235 assert(!CompileKernel || Recover);
236 return new HWAddressSanitizer(CompileKernel, Recover);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000237}
238
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000239/// Module-level initialization.
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000240///
241/// inserts a call to __hwasan_init to the module's constructor list.
242bool HWAddressSanitizer::doInitialization(Module &M) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000243 LLVM_DEBUG(dbgs() << "Init " << M.getName() << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000244 auto &DL = M.getDataLayout();
245
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000246 TargetTriple = Triple(M.getTargetTriple());
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000247
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000248 Mapping.init(TargetTriple);
249
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000250 C = &(M.getContext());
251 IRBuilder<> IRB(*C);
252 IntptrTy = IRB.getIntPtrTy(DL);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000253 Int8Ty = IRB.getInt8Ty();
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000254
Benjamin Kramerbfc1d972018-01-18 14:19:04 +0000255 HwasanCtorFunction = nullptr;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000256 if (!CompileKernel) {
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000257 std::tie(HwasanCtorFunction, std::ignore) =
258 createSanitizerCtorAndInitFunctions(M, kHwasanModuleCtorName,
259 kHwasanInitName,
260 /*InitArgTypes=*/{},
261 /*InitArgs=*/{});
262 appendToGlobalCtors(M, HwasanCtorFunction, 0);
263 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000264 return true;
265}
266
267void HWAddressSanitizer::initializeCallbacks(Module &M) {
268 IRBuilder<> IRB(*C);
269 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
270 const std::string TypeStr = AccessIsWrite ? "store" : "load";
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000271 const std::string EndingStr = Recover ? "_noabort" : "";
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000272
273 HwasanMemoryAccessCallbackSized[AccessIsWrite] =
274 checkSanitizerInterfaceFunction(M.getOrInsertFunction(
Evgeniy Stepanovc07e0bd2018-01-16 23:15:08 +0000275 ClMemoryAccessCallbackPrefix + TypeStr + "N" + EndingStr,
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000276 FunctionType::get(IRB.getVoidTy(), {IntptrTy, IntptrTy}, false)));
277
278 for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes;
279 AccessSizeIndex++) {
280 HwasanMemoryAccessCallback[AccessIsWrite][AccessSizeIndex] =
281 checkSanitizerInterfaceFunction(M.getOrInsertFunction(
282 ClMemoryAccessCallbackPrefix + TypeStr +
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000283 itostr(1ULL << AccessSizeIndex) + EndingStr,
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000284 FunctionType::get(IRB.getVoidTy(), {IntptrTy}, false)));
285 }
286 }
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000287
288 HwasanTagMemoryFunc = checkSanitizerInterfaceFunction(M.getOrInsertFunction(
289 "__hwasan_tag_memory", IRB.getVoidTy(), IntptrTy, Int8Ty, IntptrTy));
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000290 HwasanGenerateTagFunc = checkSanitizerInterfaceFunction(
291 M.getOrInsertFunction("__hwasan_generate_tag", Int8Ty));
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000292
293 if (Mapping.InGlobal)
294 ShadowGlobal = M.getOrInsertGlobal("__hwasan_shadow",
295 ArrayType::get(IRB.getInt8Ty(), 0));
296}
297
298void HWAddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(Function &F) {
299 // Generate code only when dynamic addressing is needed.
300 if (Mapping.Offset != kDynamicShadowSentinel)
301 return;
302
303 IRBuilder<> IRB(&F.front().front());
304 if (Mapping.InGlobal) {
305 // An empty inline asm with input reg == output reg.
306 // An opaque pointer-to-int cast, basically.
307 InlineAsm *Asm = InlineAsm::get(
308 FunctionType::get(IntptrTy, {ShadowGlobal->getType()}, false),
309 StringRef(""), StringRef("=r,0"),
310 /*hasSideEffects=*/false);
311 LocalDynamicShadow = IRB.CreateCall(Asm, {ShadowGlobal}, ".hwasan.shadow");
312 } else {
313 Value *GlobalDynamicAddress = F.getParent()->getOrInsertGlobal(
314 kHwasanShadowMemoryDynamicAddress, IntptrTy);
315 LocalDynamicShadow = IRB.CreateLoad(GlobalDynamicAddress);
316 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000317}
318
319Value *HWAddressSanitizer::isInterestingMemoryAccess(Instruction *I,
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000320 bool *IsWrite,
321 uint64_t *TypeSize,
322 unsigned *Alignment,
323 Value **MaybeMask) {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000324 // Skip memory accesses inserted by another instrumentation.
325 if (I->getMetadata("nosanitize")) return nullptr;
326
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000327 // Do not instrument the load fetching the dynamic shadow address.
328 if (LocalDynamicShadow == I)
329 return nullptr;
330
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000331 Value *PtrOperand = nullptr;
332 const DataLayout &DL = I->getModule()->getDataLayout();
333 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
334 if (!ClInstrumentReads) return nullptr;
335 *IsWrite = false;
336 *TypeSize = DL.getTypeStoreSizeInBits(LI->getType());
337 *Alignment = LI->getAlignment();
338 PtrOperand = LI->getPointerOperand();
339 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
340 if (!ClInstrumentWrites) return nullptr;
341 *IsWrite = true;
342 *TypeSize = DL.getTypeStoreSizeInBits(SI->getValueOperand()->getType());
343 *Alignment = SI->getAlignment();
344 PtrOperand = SI->getPointerOperand();
345 } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
346 if (!ClInstrumentAtomics) return nullptr;
347 *IsWrite = true;
348 *TypeSize = DL.getTypeStoreSizeInBits(RMW->getValOperand()->getType());
349 *Alignment = 0;
350 PtrOperand = RMW->getPointerOperand();
351 } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
352 if (!ClInstrumentAtomics) return nullptr;
353 *IsWrite = true;
354 *TypeSize = DL.getTypeStoreSizeInBits(XCHG->getCompareOperand()->getType());
355 *Alignment = 0;
356 PtrOperand = XCHG->getPointerOperand();
357 }
358
359 if (PtrOperand) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000360 // Do not instrument accesses from different address spaces; we cannot deal
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000361 // with them.
362 Type *PtrTy = cast<PointerType>(PtrOperand->getType()->getScalarType());
363 if (PtrTy->getPointerAddressSpace() != 0)
364 return nullptr;
365
366 // Ignore swifterror addresses.
367 // swifterror memory addresses are mem2reg promoted by instruction
368 // selection. As such they cannot have regular uses like an instrumentation
369 // function and it makes no sense to track them as memory.
370 if (PtrOperand->isSwiftError())
371 return nullptr;
372 }
373
374 return PtrOperand;
375}
376
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000377static unsigned getPointerOperandIndex(Instruction *I) {
378 if (LoadInst *LI = dyn_cast<LoadInst>(I))
379 return LI->getPointerOperandIndex();
380 if (StoreInst *SI = dyn_cast<StoreInst>(I))
381 return SI->getPointerOperandIndex();
382 if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I))
383 return RMW->getPointerOperandIndex();
384 if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I))
385 return XCHG->getPointerOperandIndex();
386 report_fatal_error("Unexpected instruction");
387 return -1;
388}
389
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000390static size_t TypeSizeToSizeIndex(uint32_t TypeSize) {
391 size_t Res = countTrailingZeros(TypeSize / 8);
392 assert(Res < kNumberOfAccessSizes);
393 return Res;
394}
395
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000396void HWAddressSanitizer::untagPointerOperand(Instruction *I, Value *Addr) {
397 if (TargetTriple.isAArch64())
398 return;
399
400 IRBuilder<> IRB(I);
401 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
402 Value *UntaggedPtr =
403 IRB.CreateIntToPtr(untagPointer(IRB, AddrLong), Addr->getType());
404 I->setOperand(getPointerOperandIndex(I), UntaggedPtr);
405}
406
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000407Value *HWAddressSanitizer::memToShadow(Value *Mem, Type *Ty, IRBuilder<> &IRB) {
408 // Mem >> Scale
409 Value *Shadow = IRB.CreateLShr(Mem, Mapping.Scale);
410 if (Mapping.Offset == 0)
411 return Shadow;
412 // (Mem >> Scale) + Offset
413 Value *ShadowBase;
414 if (LocalDynamicShadow)
415 ShadowBase = LocalDynamicShadow;
416 else
417 ShadowBase = ConstantInt::get(Ty, Mapping.Offset);
418 return IRB.CreateAdd(Shadow, ShadowBase);
419}
420
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000421void HWAddressSanitizer::instrumentMemAccessInline(Value *PtrLong, bool IsWrite,
422 unsigned AccessSizeIndex,
423 Instruction *InsertBefore) {
424 IRBuilder<> IRB(InsertBefore);
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000425 Value *PtrTag = IRB.CreateTrunc(IRB.CreateLShr(PtrLong, kPointerTagShift),
426 IRB.getInt8Ty());
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000427 Value *AddrLong = untagPointer(IRB, PtrLong);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000428 Value *ShadowLong = memToShadow(AddrLong, PtrLong->getType(), IRB);
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000429 Value *MemTag =
430 IRB.CreateLoad(IRB.CreateIntToPtr(ShadowLong, IRB.getInt8PtrTy()));
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000431 Value *TagMismatch = IRB.CreateICmpNE(PtrTag, MemTag);
432
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000433 int matchAllTag = ClMatchAllTag.getNumOccurrences() > 0 ?
434 ClMatchAllTag : (CompileKernel ? 0xFF : -1);
435 if (matchAllTag != -1) {
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000436 Value *TagNotIgnored = IRB.CreateICmpNE(PtrTag,
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000437 ConstantInt::get(PtrTag->getType(), matchAllTag));
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000438 TagMismatch = IRB.CreateAnd(TagMismatch, TagNotIgnored);
439 }
440
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000441 TerminatorInst *CheckTerm =
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000442 SplitBlockAndInsertIfThen(TagMismatch, InsertBefore, !Recover,
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000443 MDBuilder(*C).createBranchWeights(1, 100000));
444
445 IRB.SetInsertPoint(CheckTerm);
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000446 const int64_t AccessInfo = Recover * 0x20 + IsWrite * 0x10 + AccessSizeIndex;
447 InlineAsm *Asm;
448 switch (TargetTriple.getArch()) {
449 case Triple::x86_64:
450 // The signal handler will find the data address in rdi.
451 Asm = InlineAsm::get(
452 FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
453 "int3\nnopl " + itostr(0x40 + AccessInfo) + "(%rax)",
454 "{rdi}",
455 /*hasSideEffects=*/true);
456 break;
457 case Triple::aarch64:
458 case Triple::aarch64_be:
459 // The signal handler will find the data address in x0.
460 Asm = InlineAsm::get(
461 FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
462 "brk #" + itostr(0x900 + AccessInfo),
463 "{x0}",
464 /*hasSideEffects=*/true);
465 break;
466 default:
467 report_fatal_error("unsupported architecture");
468 }
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000469 IRB.CreateCall(Asm, PtrLong);
470}
471
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000472bool HWAddressSanitizer::instrumentMemAccess(Instruction *I) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000473 LLVM_DEBUG(dbgs() << "Instrumenting: " << *I << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000474 bool IsWrite = false;
475 unsigned Alignment = 0;
476 uint64_t TypeSize = 0;
477 Value *MaybeMask = nullptr;
478 Value *Addr =
479 isInterestingMemoryAccess(I, &IsWrite, &TypeSize, &Alignment, &MaybeMask);
480
481 if (!Addr)
482 return false;
483
484 if (MaybeMask)
485 return false; //FIXME
486
487 IRBuilder<> IRB(I);
488 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
489 if (isPowerOf2_64(TypeSize) &&
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000490 (TypeSize / 8 <= (1UL << (kNumberOfAccessSizes - 1))) &&
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000491 (Alignment >= (1UL << Mapping.Scale) || Alignment == 0 ||
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000492 Alignment >= TypeSize / 8)) {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000493 size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000494 if (ClInstrumentWithCalls) {
495 IRB.CreateCall(HwasanMemoryAccessCallback[IsWrite][AccessSizeIndex],
496 AddrLong);
497 } else {
498 instrumentMemAccessInline(AddrLong, IsWrite, AccessSizeIndex, I);
499 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000500 } else {
501 IRB.CreateCall(HwasanMemoryAccessCallbackSized[IsWrite],
502 {AddrLong, ConstantInt::get(IntptrTy, TypeSize / 8)});
503 }
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000504 untagPointerOperand(I, Addr);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000505
506 return true;
507}
508
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000509static uint64_t getAllocaSizeInBytes(const AllocaInst &AI) {
510 uint64_t ArraySize = 1;
511 if (AI.isArrayAllocation()) {
512 const ConstantInt *CI = dyn_cast<ConstantInt>(AI.getArraySize());
513 assert(CI && "non-constant array size");
514 ArraySize = CI->getZExtValue();
515 }
516 Type *Ty = AI.getAllocatedType();
517 uint64_t SizeInBytes = AI.getModule()->getDataLayout().getTypeAllocSize(Ty);
518 return SizeInBytes * ArraySize;
519}
520
521bool HWAddressSanitizer::tagAlloca(IRBuilder<> &IRB, AllocaInst *AI,
522 Value *Tag) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000523 size_t Size = (getAllocaSizeInBytes(*AI) + Mapping.getAllocaAlignment() - 1) &
524 ~(Mapping.getAllocaAlignment() - 1);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000525
526 Value *JustTag = IRB.CreateTrunc(Tag, IRB.getInt8Ty());
527 if (ClInstrumentWithCalls) {
528 IRB.CreateCall(HwasanTagMemoryFunc,
529 {IRB.CreatePointerCast(AI, IntptrTy), JustTag,
530 ConstantInt::get(IntptrTy, Size)});
531 } else {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000532 size_t ShadowSize = Size >> Mapping.Scale;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000533 Value *ShadowPtr = IRB.CreateIntToPtr(
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000534 memToShadow(IRB.CreatePointerCast(AI, IntptrTy), AI->getType(), IRB),
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000535 IRB.getInt8PtrTy());
536 // If this memset is not inlined, it will be intercepted in the hwasan
537 // runtime library. That's OK, because the interceptor skips the checks if
538 // the address is in the shadow region.
539 // FIXME: the interceptor is not as fast as real memset. Consider lowering
540 // llvm.memset right here into either a sequence of stores, or a call to
541 // hwasan_tag_memory.
542 IRB.CreateMemSet(ShadowPtr, JustTag, ShadowSize, /*Align=*/1);
543 }
544 return true;
545}
546
547static unsigned RetagMask(unsigned AllocaNo) {
548 // A list of 8-bit numbers that have at most one run of non-zero bits.
549 // x = x ^ (mask << 56) can be encoded as a single armv8 instruction for these
550 // masks.
551 // The list does not include the value 255, which is used for UAR.
552 static unsigned FastMasks[] = {
553 0, 1, 2, 3, 4, 6, 7, 8, 12, 14, 15, 16, 24,
554 28, 30, 31, 32, 48, 56, 60, 62, 63, 64, 96, 112, 120,
555 124, 126, 127, 128, 192, 224, 240, 248, 252, 254};
556 return FastMasks[AllocaNo % (sizeof(FastMasks) / sizeof(FastMasks[0]))];
557}
558
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000559Value *HWAddressSanitizer::getNextTagWithCall(IRBuilder<> &IRB) {
560 return IRB.CreateZExt(IRB.CreateCall(HwasanGenerateTagFunc), IntptrTy);
561}
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000562
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000563Value *HWAddressSanitizer::getStackBaseTag(IRBuilder<> &IRB) {
564 if (ClGenerateTagsWithCalls)
565 return nullptr;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000566 // FIXME: use addressofreturnaddress (but implement it in aarch64 backend
567 // first).
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000568 Module *M = IRB.GetInsertBlock()->getParent()->getParent();
569 auto GetStackPointerFn =
570 Intrinsic::getDeclaration(M, Intrinsic::frameaddress);
571 Value *StackPointer = IRB.CreateCall(
572 GetStackPointerFn, {Constant::getNullValue(IRB.getInt32Ty())});
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000573
574 // Extract some entropy from the stack pointer for the tags.
575 // Take bits 20..28 (ASLR entropy) and xor with bits 0..8 (these differ
576 // between functions).
577 Value *StackPointerLong = IRB.CreatePointerCast(StackPointer, IntptrTy);
578 Value *StackTag =
579 IRB.CreateXor(StackPointerLong, IRB.CreateLShr(StackPointerLong, 20),
580 "hwasan.stack.base.tag");
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000581 return StackTag;
582}
583
584Value *HWAddressSanitizer::getAllocaTag(IRBuilder<> &IRB, Value *StackTag,
585 AllocaInst *AI, unsigned AllocaNo) {
586 if (ClGenerateTagsWithCalls)
587 return getNextTagWithCall(IRB);
588 return IRB.CreateXor(StackTag,
589 ConstantInt::get(IntptrTy, RetagMask(AllocaNo)));
590}
591
592Value *HWAddressSanitizer::getUARTag(IRBuilder<> &IRB, Value *StackTag) {
Alex Shlyapnikov788764c2018-06-29 20:20:17 +0000593 if (ClUARRetagToZero)
594 return ConstantInt::get(IntptrTy, 0);
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000595 if (ClGenerateTagsWithCalls)
596 return getNextTagWithCall(IRB);
597 return IRB.CreateXor(StackTag, ConstantInt::get(IntptrTy, 0xFFU));
598}
599
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000600// Add a tag to an address.
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000601Value *HWAddressSanitizer::tagPointer(IRBuilder<> &IRB, Type *Ty,
602 Value *PtrLong, Value *Tag) {
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000603 Value *TaggedPtrLong;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000604 if (CompileKernel) {
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000605 // Kernel addresses have 0xFF in the most significant byte.
606 Value *ShiftedTag = IRB.CreateOr(
607 IRB.CreateShl(Tag, kPointerTagShift),
608 ConstantInt::get(IntptrTy, (1ULL << kPointerTagShift) - 1));
609 TaggedPtrLong = IRB.CreateAnd(PtrLong, ShiftedTag);
610 } else {
611 // Userspace can simply do OR (tag << 56);
612 Value *ShiftedTag = IRB.CreateShl(Tag, kPointerTagShift);
613 TaggedPtrLong = IRB.CreateOr(PtrLong, ShiftedTag);
614 }
615 return IRB.CreateIntToPtr(TaggedPtrLong, Ty);
616}
617
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000618// Remove tag from an address.
619Value *HWAddressSanitizer::untagPointer(IRBuilder<> &IRB, Value *PtrLong) {
620 Value *UntaggedPtrLong;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000621 if (CompileKernel) {
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000622 // Kernel addresses have 0xFF in the most significant byte.
623 UntaggedPtrLong = IRB.CreateOr(PtrLong,
624 ConstantInt::get(PtrLong->getType(), 0xFFULL << kPointerTagShift));
625 } else {
626 // Userspace addresses have 0x00.
627 UntaggedPtrLong = IRB.CreateAnd(PtrLong,
628 ConstantInt::get(PtrLong->getType(), ~(0xFFULL << kPointerTagShift)));
629 }
630 return UntaggedPtrLong;
631}
632
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000633bool HWAddressSanitizer::instrumentStack(
634 SmallVectorImpl<AllocaInst *> &Allocas,
635 SmallVectorImpl<Instruction *> &RetVec) {
636 Function *F = Allocas[0]->getParent()->getParent();
637 Instruction *InsertPt = &*F->getEntryBlock().begin();
638 IRBuilder<> IRB(InsertPt);
639
640 Value *StackTag = getStackBaseTag(IRB);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000641
642 // Ideally, we want to calculate tagged stack base pointer, and rewrite all
643 // alloca addresses using that. Unfortunately, offsets are not known yet
644 // (unless we use ASan-style mega-alloca). Instead we keep the base tag in a
645 // temp, shift-OR it into each alloca address and xor with the retag mask.
646 // This generates one extra instruction per alloca use.
647 for (unsigned N = 0; N < Allocas.size(); ++N) {
648 auto *AI = Allocas[N];
649 IRB.SetInsertPoint(AI->getNextNode());
650
651 // Replace uses of the alloca with tagged address.
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000652 Value *Tag = getAllocaTag(IRB, StackTag, AI, N);
653 Value *AILong = IRB.CreatePointerCast(AI, IntptrTy);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000654 Value *Replacement = tagPointer(IRB, AI->getType(), AILong, Tag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000655 std::string Name =
656 AI->hasName() ? AI->getName().str() : "alloca." + itostr(N);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000657 Replacement->setName(Name + ".hwasan");
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000658
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000659 for (auto UI = AI->use_begin(), UE = AI->use_end(); UI != UE;) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000660 Use &U = *UI++;
661 if (U.getUser() != AILong)
662 U.set(Replacement);
663 }
664
665 tagAlloca(IRB, AI, Tag);
666
667 for (auto RI : RetVec) {
668 IRB.SetInsertPoint(RI);
669
670 // Re-tag alloca memory with the special UAR tag.
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000671 Value *Tag = getUARTag(IRB, StackTag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000672 tagAlloca(IRB, AI, Tag);
673 }
674 }
675
676 return true;
677}
678
679bool HWAddressSanitizer::isInterestingAlloca(const AllocaInst &AI) {
680 return (AI.getAllocatedType()->isSized() &&
681 // FIXME: instrument dynamic allocas, too
682 AI.isStaticAlloca() &&
683 // alloca() may be called with 0 size, ignore it.
684 getAllocaSizeInBytes(AI) > 0 &&
685 // We are only interested in allocas not promotable to registers.
686 // Promotable allocas are common under -O0.
687 !isAllocaPromotable(&AI) &&
688 // inalloca allocas are not treated as static, and we don't want
689 // dynamic alloca instrumentation for them as well.
690 !AI.isUsedWithInAlloca() &&
691 // swifterror allocas are register promoted by ISel
692 !AI.isSwiftError());
693}
694
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000695bool HWAddressSanitizer::runOnFunction(Function &F) {
696 if (&F == HwasanCtorFunction)
697 return false;
698
699 if (!F.hasFnAttribute(Attribute::SanitizeHWAddress))
700 return false;
701
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000702 LLVM_DEBUG(dbgs() << "Function: " << F.getName() << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000703
704 initializeCallbacks(*F.getParent());
705
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000706 assert(!LocalDynamicShadow);
707 maybeInsertDynamicShadowAtFunctionEntry(F);
708
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000709 bool Changed = false;
710 SmallVector<Instruction*, 16> ToInstrument;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000711 SmallVector<AllocaInst*, 8> AllocasToInstrument;
712 SmallVector<Instruction*, 8> RetVec;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000713 for (auto &BB : F) {
714 for (auto &Inst : BB) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000715 if (ClInstrumentStack)
716 if (AllocaInst *AI = dyn_cast<AllocaInst>(&Inst)) {
717 // Realign all allocas. We don't want small uninteresting allocas to
718 // hide in instrumented alloca's padding.
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000719 if (AI->getAlignment() < Mapping.getAllocaAlignment())
720 AI->setAlignment(Mapping.getAllocaAlignment());
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000721 // Instrument some of them.
722 if (isInterestingAlloca(*AI))
723 AllocasToInstrument.push_back(AI);
724 continue;
725 }
726
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000727 if (isa<ReturnInst>(Inst) || isa<ResumeInst>(Inst) ||
728 isa<CleanupReturnInst>(Inst))
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000729 RetVec.push_back(&Inst);
730
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000731 Value *MaybeMask = nullptr;
732 bool IsWrite;
733 unsigned Alignment;
734 uint64_t TypeSize;
735 Value *Addr = isInterestingMemoryAccess(&Inst, &IsWrite, &TypeSize,
736 &Alignment, &MaybeMask);
737 if (Addr || isa<MemIntrinsic>(Inst))
738 ToInstrument.push_back(&Inst);
739 }
740 }
741
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000742 if (!AllocasToInstrument.empty())
743 Changed |= instrumentStack(AllocasToInstrument, RetVec);
744
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000745 for (auto Inst : ToInstrument)
746 Changed |= instrumentMemAccess(Inst);
747
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000748 LocalDynamicShadow = nullptr;
749
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000750 return Changed;
751}
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000752
753void HWAddressSanitizer::ShadowMapping::init(Triple &TargetTriple) {
754 const bool IsAndroid = TargetTriple.isAndroid();
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000755 const bool IsAndroidWithIfuncSupport =
756 IsAndroid && !TargetTriple.isAndroidVersionLT(21);
757
758 Scale = kDefaultShadowScale;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +0000759 const bool WithIfunc = ClWithIfunc.getNumOccurrences() > 0
760 ? ClWithIfunc
761 : IsAndroidWithIfuncSupport;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000762
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +0000763 if (ClMappingOffset.getNumOccurrences() > 0) {
764 InGlobal = false;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000765 Offset = ClMappingOffset;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +0000766 } else if (ClEnableKhwasan || ClInstrumentWithCalls) {
767 InGlobal = false;
768 Offset = 0;
769 } else if (WithIfunc) {
770 InGlobal = true;
771 Offset = kDynamicShadowSentinel;
772 } else {
773 InGlobal = false;
774 Offset = kDynamicShadowSentinel;
775 }
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000776}