Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 1 | //===- HWAddressSanitizer.cpp - detector of uninitialized reads -------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | /// \file |
| 11 | /// This file is a part of HWAddressSanitizer, an address sanity checker |
| 12 | /// based on tagged addressing. |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
| 15 | #include "llvm/ADT/SmallVector.h" |
| 16 | #include "llvm/ADT/StringExtras.h" |
| 17 | #include "llvm/ADT/StringRef.h" |
| 18 | #include "llvm/ADT/Triple.h" |
| 19 | #include "llvm/IR/Attributes.h" |
| 20 | #include "llvm/IR/BasicBlock.h" |
| 21 | #include "llvm/IR/Constant.h" |
| 22 | #include "llvm/IR/Constants.h" |
| 23 | #include "llvm/IR/DataLayout.h" |
| 24 | #include "llvm/IR/DerivedTypes.h" |
| 25 | #include "llvm/IR/Function.h" |
| 26 | #include "llvm/IR/IRBuilder.h" |
| 27 | #include "llvm/IR/InlineAsm.h" |
| 28 | #include "llvm/IR/InstVisitor.h" |
| 29 | #include "llvm/IR/Instruction.h" |
| 30 | #include "llvm/IR/Instructions.h" |
| 31 | #include "llvm/IR/IntrinsicInst.h" |
| 32 | #include "llvm/IR/Intrinsics.h" |
| 33 | #include "llvm/IR/LLVMContext.h" |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 34 | #include "llvm/IR/MDBuilder.h" |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 35 | #include "llvm/IR/Module.h" |
| 36 | #include "llvm/IR/Type.h" |
| 37 | #include "llvm/IR/Value.h" |
| 38 | #include "llvm/Pass.h" |
| 39 | #include "llvm/Support/Casting.h" |
| 40 | #include "llvm/Support/CommandLine.h" |
| 41 | #include "llvm/Support/Debug.h" |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 42 | #include "llvm/Support/raw_ostream.h" |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 43 | #include "llvm/Transforms/Instrumentation.h" |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 44 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 45 | #include "llvm/Transforms/Utils/ModuleUtils.h" |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 46 | #include "llvm/Transforms/Utils/PromoteMemToReg.h" |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 47 | |
| 48 | using namespace llvm; |
| 49 | |
| 50 | #define DEBUG_TYPE "hwasan" |
| 51 | |
| 52 | static const char *const kHwasanModuleCtorName = "hwasan.module_ctor"; |
| 53 | static const char *const kHwasanInitName = "__hwasan_init"; |
| 54 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 55 | static const char *const kHwasanShadowMemoryDynamicAddress = |
| 56 | "__hwasan_shadow_memory_dynamic_address"; |
| 57 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 58 | // Accesses sizes are powers of two: 1, 2, 4, 8, 16. |
| 59 | static const size_t kNumberOfAccessSizes = 5; |
| 60 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 61 | static const size_t kDefaultShadowScale = 4; |
| 62 | static const uint64_t kDynamicShadowSentinel = |
| 63 | std::numeric_limits<uint64_t>::max(); |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 64 | static const unsigned kPointerTagShift = 56; |
| 65 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 66 | static cl::opt<std::string> ClMemoryAccessCallbackPrefix( |
| 67 | "hwasan-memory-access-callback-prefix", |
| 68 | cl::desc("Prefix for memory access callbacks"), cl::Hidden, |
| 69 | cl::init("__hwasan_")); |
| 70 | |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 71 | static cl::opt<bool> |
| 72 | ClInstrumentWithCalls("hwasan-instrument-with-calls", |
| 73 | cl::desc("instrument reads and writes with callbacks"), |
| 74 | cl::Hidden, cl::init(false)); |
| 75 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 76 | static cl::opt<bool> ClInstrumentReads("hwasan-instrument-reads", |
| 77 | cl::desc("instrument read instructions"), |
| 78 | cl::Hidden, cl::init(true)); |
| 79 | |
| 80 | static cl::opt<bool> ClInstrumentWrites( |
| 81 | "hwasan-instrument-writes", cl::desc("instrument write instructions"), |
| 82 | cl::Hidden, cl::init(true)); |
| 83 | |
| 84 | static cl::opt<bool> ClInstrumentAtomics( |
| 85 | "hwasan-instrument-atomics", |
| 86 | cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden, |
| 87 | cl::init(true)); |
| 88 | |
Evgeniy Stepanov | 3fd1b1a | 2017-12-20 19:05:44 +0000 | [diff] [blame] | 89 | static cl::opt<bool> ClRecover( |
| 90 | "hwasan-recover", |
| 91 | cl::desc("Enable recovery mode (continue-after-error)."), |
| 92 | cl::Hidden, cl::init(false)); |
| 93 | |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 94 | static cl::opt<bool> ClInstrumentStack("hwasan-instrument-stack", |
| 95 | cl::desc("instrument stack (allocas)"), |
| 96 | cl::Hidden, cl::init(true)); |
| 97 | |
Alex Shlyapnikov | 788764c | 2018-06-29 20:20:17 +0000 | [diff] [blame] | 98 | static cl::opt<bool> ClUARRetagToZero( |
| 99 | "hwasan-uar-retag-to-zero", |
| 100 | cl::desc("Clear alloca tags before returning from the function to allow " |
| 101 | "non-instrumented and instrumented function calls mix. When set " |
| 102 | "to false, allocas are retagged before returning from the " |
| 103 | "function to detect use after return."), |
| 104 | cl::Hidden, cl::init(true)); |
| 105 | |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 106 | static cl::opt<bool> ClGenerateTagsWithCalls( |
| 107 | "hwasan-generate-tags-with-calls", |
| 108 | cl::desc("generate new tags with runtime library calls"), cl::Hidden, |
| 109 | cl::init(false)); |
| 110 | |
Evgeniy Stepanov | 1f1a7a7 | 2018-04-04 20:44:59 +0000 | [diff] [blame] | 111 | static cl::opt<int> ClMatchAllTag( |
| 112 | "hwasan-match-all-tag", |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 113 | cl::desc("don't report bad accesses via pointers with this tag"), |
| 114 | cl::Hidden, cl::init(-1)); |
Evgeniy Stepanov | 1f1a7a7 | 2018-04-04 20:44:59 +0000 | [diff] [blame] | 115 | |
Evgeniy Stepanov | 5bd669d | 2018-01-17 23:24:38 +0000 | [diff] [blame] | 116 | static cl::opt<bool> ClEnableKhwasan( |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 117 | "hwasan-kernel", |
| 118 | cl::desc("Enable KernelHWAddressSanitizer instrumentation"), |
Evgeniy Stepanov | 5bd669d | 2018-01-17 23:24:38 +0000 | [diff] [blame] | 119 | cl::Hidden, cl::init(false)); |
| 120 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 121 | // These flags allow to change the shadow mapping and control how shadow memory |
| 122 | // is accessed. The shadow mapping looks like: |
| 123 | // Shadow = (Mem >> scale) + offset |
| 124 | |
| 125 | static cl::opt<unsigned long long> ClMappingOffset( |
| 126 | "hwasan-mapping-offset", |
| 127 | cl::desc("HWASan shadow mapping offset [EXPERIMENTAL]"), cl::Hidden, |
| 128 | cl::init(0)); |
| 129 | |
Evgeniy Stepanov | 453e7ac | 2018-08-10 16:21:37 +0000 | [diff] [blame] | 130 | static cl::opt<bool> |
| 131 | ClWithIfunc("hwasan-with-ifunc", |
| 132 | cl::desc("Access dynamic shadow through an ifunc global on " |
| 133 | "platforms that support this"), |
| 134 | cl::Hidden, cl::init(false)); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 135 | namespace { |
| 136 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 137 | /// An instrumentation pass implementing detection of addressability bugs |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 138 | /// using tagged pointers. |
| 139 | class HWAddressSanitizer : public FunctionPass { |
| 140 | public: |
| 141 | // Pass identification, replacement for typeid. |
| 142 | static char ID; |
| 143 | |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 144 | explicit HWAddressSanitizer(bool CompileKernel = false, bool Recover = false) |
| 145 | : FunctionPass(ID) { |
| 146 | this->Recover = ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover; |
| 147 | this->CompileKernel = ClEnableKhwasan.getNumOccurrences() > 0 ? |
| 148 | ClEnableKhwasan : CompileKernel; |
| 149 | } |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 150 | |
| 151 | StringRef getPassName() const override { return "HWAddressSanitizer"; } |
| 152 | |
| 153 | bool runOnFunction(Function &F) override; |
| 154 | bool doInitialization(Module &M) override; |
| 155 | |
| 156 | void initializeCallbacks(Module &M); |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 157 | |
| 158 | void maybeInsertDynamicShadowAtFunctionEntry(Function &F); |
| 159 | |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 160 | void untagPointerOperand(Instruction *I, Value *Addr); |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 161 | Value *memToShadow(Value *Shadow, Type *Ty, IRBuilder<> &IRB); |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 162 | void instrumentMemAccessInline(Value *PtrLong, bool IsWrite, |
| 163 | unsigned AccessSizeIndex, |
| 164 | Instruction *InsertBefore); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 165 | bool instrumentMemAccess(Instruction *I); |
| 166 | Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite, |
| 167 | uint64_t *TypeSize, unsigned *Alignment, |
| 168 | Value **MaybeMask); |
| 169 | |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 170 | bool isInterestingAlloca(const AllocaInst &AI); |
| 171 | bool tagAlloca(IRBuilder<> &IRB, AllocaInst *AI, Value *Tag); |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 172 | Value *tagPointer(IRBuilder<> &IRB, Type *Ty, Value *PtrLong, Value *Tag); |
Evgeniy Stepanov | 43271b1 | 2018-02-21 19:52:23 +0000 | [diff] [blame] | 173 | Value *untagPointer(IRBuilder<> &IRB, Value *PtrLong); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 174 | bool instrumentStack(SmallVectorImpl<AllocaInst *> &Allocas, |
| 175 | SmallVectorImpl<Instruction *> &RetVec); |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 176 | Value *getNextTagWithCall(IRBuilder<> &IRB); |
| 177 | Value *getStackBaseTag(IRBuilder<> &IRB); |
| 178 | Value *getAllocaTag(IRBuilder<> &IRB, Value *StackTag, AllocaInst *AI, |
| 179 | unsigned AllocaNo); |
| 180 | Value *getUARTag(IRBuilder<> &IRB, Value *StackTag); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 181 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 182 | private: |
| 183 | LLVMContext *C; |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 184 | Triple TargetTriple; |
| 185 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 186 | /// This struct defines the shadow mapping using the rule: |
| 187 | /// shadow = (mem >> Scale) + Offset. |
| 188 | /// If InGlobal is true, then |
| 189 | /// extern char __hwasan_shadow[]; |
| 190 | /// shadow = (mem >> Scale) + &__hwasan_shadow |
| 191 | struct ShadowMapping { |
| 192 | int Scale; |
| 193 | uint64_t Offset; |
| 194 | bool InGlobal; |
| 195 | |
| 196 | void init(Triple &TargetTriple); |
| 197 | unsigned getAllocaAlignment() const { return 1U << Scale; } |
| 198 | }; |
| 199 | ShadowMapping Mapping; |
| 200 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 201 | Type *IntptrTy; |
Evgeniy Stepanov | a265a13 | 2018-08-15 00:39:35 +0000 | [diff] [blame^] | 202 | Type *Int8PtrTy; |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 203 | Type *Int8Ty; |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 204 | |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 205 | bool CompileKernel; |
Evgeniy Stepanov | 3fd1b1a | 2017-12-20 19:05:44 +0000 | [diff] [blame] | 206 | bool Recover; |
| 207 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 208 | Function *HwasanCtorFunction; |
| 209 | |
| 210 | Function *HwasanMemoryAccessCallback[2][kNumberOfAccessSizes]; |
| 211 | Function *HwasanMemoryAccessCallbackSized[2]; |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 212 | |
| 213 | Function *HwasanTagMemoryFunc; |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 214 | Function *HwasanGenerateTagFunc; |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 215 | |
| 216 | Constant *ShadowGlobal; |
| 217 | |
| 218 | Value *LocalDynamicShadow = nullptr; |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 219 | }; |
| 220 | |
| 221 | } // end anonymous namespace |
| 222 | |
| 223 | char HWAddressSanitizer::ID = 0; |
| 224 | |
| 225 | INITIALIZE_PASS_BEGIN( |
| 226 | HWAddressSanitizer, "hwasan", |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 227 | "HWAddressSanitizer: detect memory bugs using tagged addressing.", false, |
| 228 | false) |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 229 | INITIALIZE_PASS_END( |
| 230 | HWAddressSanitizer, "hwasan", |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 231 | "HWAddressSanitizer: detect memory bugs using tagged addressing.", false, |
| 232 | false) |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 233 | |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 234 | FunctionPass *llvm::createHWAddressSanitizerPass(bool CompileKernel, |
| 235 | bool Recover) { |
| 236 | assert(!CompileKernel || Recover); |
| 237 | return new HWAddressSanitizer(CompileKernel, Recover); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 238 | } |
| 239 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 240 | /// Module-level initialization. |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 241 | /// |
| 242 | /// inserts a call to __hwasan_init to the module's constructor list. |
| 243 | bool HWAddressSanitizer::doInitialization(Module &M) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 244 | LLVM_DEBUG(dbgs() << "Init " << M.getName() << "\n"); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 245 | auto &DL = M.getDataLayout(); |
| 246 | |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 247 | TargetTriple = Triple(M.getTargetTriple()); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 248 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 249 | Mapping.init(TargetTriple); |
| 250 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 251 | C = &(M.getContext()); |
| 252 | IRBuilder<> IRB(*C); |
| 253 | IntptrTy = IRB.getIntPtrTy(DL); |
Evgeniy Stepanov | a265a13 | 2018-08-15 00:39:35 +0000 | [diff] [blame^] | 254 | Int8PtrTy = IRB.getInt8PtrTy(); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 255 | Int8Ty = IRB.getInt8Ty(); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 256 | |
Benjamin Kramer | bfc1d97 | 2018-01-18 14:19:04 +0000 | [diff] [blame] | 257 | HwasanCtorFunction = nullptr; |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 258 | if (!CompileKernel) { |
Evgeniy Stepanov | 5bd669d | 2018-01-17 23:24:38 +0000 | [diff] [blame] | 259 | std::tie(HwasanCtorFunction, std::ignore) = |
| 260 | createSanitizerCtorAndInitFunctions(M, kHwasanModuleCtorName, |
| 261 | kHwasanInitName, |
| 262 | /*InitArgTypes=*/{}, |
| 263 | /*InitArgs=*/{}); |
| 264 | appendToGlobalCtors(M, HwasanCtorFunction, 0); |
| 265 | } |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 266 | return true; |
| 267 | } |
| 268 | |
| 269 | void HWAddressSanitizer::initializeCallbacks(Module &M) { |
| 270 | IRBuilder<> IRB(*C); |
| 271 | for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) { |
| 272 | const std::string TypeStr = AccessIsWrite ? "store" : "load"; |
Evgeniy Stepanov | 3fd1b1a | 2017-12-20 19:05:44 +0000 | [diff] [blame] | 273 | const std::string EndingStr = Recover ? "_noabort" : ""; |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 274 | |
| 275 | HwasanMemoryAccessCallbackSized[AccessIsWrite] = |
| 276 | checkSanitizerInterfaceFunction(M.getOrInsertFunction( |
Evgeniy Stepanov | c07e0bd | 2018-01-16 23:15:08 +0000 | [diff] [blame] | 277 | ClMemoryAccessCallbackPrefix + TypeStr + "N" + EndingStr, |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 278 | FunctionType::get(IRB.getVoidTy(), {IntptrTy, IntptrTy}, false))); |
| 279 | |
| 280 | for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes; |
| 281 | AccessSizeIndex++) { |
| 282 | HwasanMemoryAccessCallback[AccessIsWrite][AccessSizeIndex] = |
| 283 | checkSanitizerInterfaceFunction(M.getOrInsertFunction( |
| 284 | ClMemoryAccessCallbackPrefix + TypeStr + |
Evgeniy Stepanov | 3fd1b1a | 2017-12-20 19:05:44 +0000 | [diff] [blame] | 285 | itostr(1ULL << AccessSizeIndex) + EndingStr, |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 286 | FunctionType::get(IRB.getVoidTy(), {IntptrTy}, false))); |
| 287 | } |
| 288 | } |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 289 | |
| 290 | HwasanTagMemoryFunc = checkSanitizerInterfaceFunction(M.getOrInsertFunction( |
Evgeniy Stepanov | a265a13 | 2018-08-15 00:39:35 +0000 | [diff] [blame^] | 291 | "__hwasan_tag_memory", IRB.getVoidTy(), Int8PtrTy, Int8Ty, IntptrTy)); |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 292 | HwasanGenerateTagFunc = checkSanitizerInterfaceFunction( |
| 293 | M.getOrInsertFunction("__hwasan_generate_tag", Int8Ty)); |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 294 | |
| 295 | if (Mapping.InGlobal) |
| 296 | ShadowGlobal = M.getOrInsertGlobal("__hwasan_shadow", |
| 297 | ArrayType::get(IRB.getInt8Ty(), 0)); |
| 298 | } |
| 299 | |
| 300 | void HWAddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(Function &F) { |
| 301 | // Generate code only when dynamic addressing is needed. |
| 302 | if (Mapping.Offset != kDynamicShadowSentinel) |
| 303 | return; |
| 304 | |
| 305 | IRBuilder<> IRB(&F.front().front()); |
| 306 | if (Mapping.InGlobal) { |
| 307 | // An empty inline asm with input reg == output reg. |
| 308 | // An opaque pointer-to-int cast, basically. |
| 309 | InlineAsm *Asm = InlineAsm::get( |
| 310 | FunctionType::get(IntptrTy, {ShadowGlobal->getType()}, false), |
| 311 | StringRef(""), StringRef("=r,0"), |
| 312 | /*hasSideEffects=*/false); |
| 313 | LocalDynamicShadow = IRB.CreateCall(Asm, {ShadowGlobal}, ".hwasan.shadow"); |
| 314 | } else { |
| 315 | Value *GlobalDynamicAddress = F.getParent()->getOrInsertGlobal( |
| 316 | kHwasanShadowMemoryDynamicAddress, IntptrTy); |
| 317 | LocalDynamicShadow = IRB.CreateLoad(GlobalDynamicAddress); |
| 318 | } |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 319 | } |
| 320 | |
| 321 | Value *HWAddressSanitizer::isInterestingMemoryAccess(Instruction *I, |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 322 | bool *IsWrite, |
| 323 | uint64_t *TypeSize, |
| 324 | unsigned *Alignment, |
| 325 | Value **MaybeMask) { |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 326 | // Skip memory accesses inserted by another instrumentation. |
| 327 | if (I->getMetadata("nosanitize")) return nullptr; |
| 328 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 329 | // Do not instrument the load fetching the dynamic shadow address. |
| 330 | if (LocalDynamicShadow == I) |
| 331 | return nullptr; |
| 332 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 333 | Value *PtrOperand = nullptr; |
| 334 | const DataLayout &DL = I->getModule()->getDataLayout(); |
| 335 | if (LoadInst *LI = dyn_cast<LoadInst>(I)) { |
| 336 | if (!ClInstrumentReads) return nullptr; |
| 337 | *IsWrite = false; |
| 338 | *TypeSize = DL.getTypeStoreSizeInBits(LI->getType()); |
| 339 | *Alignment = LI->getAlignment(); |
| 340 | PtrOperand = LI->getPointerOperand(); |
| 341 | } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) { |
| 342 | if (!ClInstrumentWrites) return nullptr; |
| 343 | *IsWrite = true; |
| 344 | *TypeSize = DL.getTypeStoreSizeInBits(SI->getValueOperand()->getType()); |
| 345 | *Alignment = SI->getAlignment(); |
| 346 | PtrOperand = SI->getPointerOperand(); |
| 347 | } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) { |
| 348 | if (!ClInstrumentAtomics) return nullptr; |
| 349 | *IsWrite = true; |
| 350 | *TypeSize = DL.getTypeStoreSizeInBits(RMW->getValOperand()->getType()); |
| 351 | *Alignment = 0; |
| 352 | PtrOperand = RMW->getPointerOperand(); |
| 353 | } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) { |
| 354 | if (!ClInstrumentAtomics) return nullptr; |
| 355 | *IsWrite = true; |
| 356 | *TypeSize = DL.getTypeStoreSizeInBits(XCHG->getCompareOperand()->getType()); |
| 357 | *Alignment = 0; |
| 358 | PtrOperand = XCHG->getPointerOperand(); |
| 359 | } |
| 360 | |
| 361 | if (PtrOperand) { |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 362 | // Do not instrument accesses from different address spaces; we cannot deal |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 363 | // with them. |
| 364 | Type *PtrTy = cast<PointerType>(PtrOperand->getType()->getScalarType()); |
| 365 | if (PtrTy->getPointerAddressSpace() != 0) |
| 366 | return nullptr; |
| 367 | |
| 368 | // Ignore swifterror addresses. |
| 369 | // swifterror memory addresses are mem2reg promoted by instruction |
| 370 | // selection. As such they cannot have regular uses like an instrumentation |
| 371 | // function and it makes no sense to track them as memory. |
| 372 | if (PtrOperand->isSwiftError()) |
| 373 | return nullptr; |
| 374 | } |
| 375 | |
| 376 | return PtrOperand; |
| 377 | } |
| 378 | |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 379 | static unsigned getPointerOperandIndex(Instruction *I) { |
| 380 | if (LoadInst *LI = dyn_cast<LoadInst>(I)) |
| 381 | return LI->getPointerOperandIndex(); |
| 382 | if (StoreInst *SI = dyn_cast<StoreInst>(I)) |
| 383 | return SI->getPointerOperandIndex(); |
| 384 | if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) |
| 385 | return RMW->getPointerOperandIndex(); |
| 386 | if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) |
| 387 | return XCHG->getPointerOperandIndex(); |
| 388 | report_fatal_error("Unexpected instruction"); |
| 389 | return -1; |
| 390 | } |
| 391 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 392 | static size_t TypeSizeToSizeIndex(uint32_t TypeSize) { |
| 393 | size_t Res = countTrailingZeros(TypeSize / 8); |
| 394 | assert(Res < kNumberOfAccessSizes); |
| 395 | return Res; |
| 396 | } |
| 397 | |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 398 | void HWAddressSanitizer::untagPointerOperand(Instruction *I, Value *Addr) { |
| 399 | if (TargetTriple.isAArch64()) |
| 400 | return; |
| 401 | |
| 402 | IRBuilder<> IRB(I); |
| 403 | Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); |
| 404 | Value *UntaggedPtr = |
| 405 | IRB.CreateIntToPtr(untagPointer(IRB, AddrLong), Addr->getType()); |
| 406 | I->setOperand(getPointerOperandIndex(I), UntaggedPtr); |
| 407 | } |
| 408 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 409 | Value *HWAddressSanitizer::memToShadow(Value *Mem, Type *Ty, IRBuilder<> &IRB) { |
| 410 | // Mem >> Scale |
| 411 | Value *Shadow = IRB.CreateLShr(Mem, Mapping.Scale); |
| 412 | if (Mapping.Offset == 0) |
| 413 | return Shadow; |
| 414 | // (Mem >> Scale) + Offset |
| 415 | Value *ShadowBase; |
| 416 | if (LocalDynamicShadow) |
| 417 | ShadowBase = LocalDynamicShadow; |
| 418 | else |
| 419 | ShadowBase = ConstantInt::get(Ty, Mapping.Offset); |
| 420 | return IRB.CreateAdd(Shadow, ShadowBase); |
| 421 | } |
| 422 | |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 423 | void HWAddressSanitizer::instrumentMemAccessInline(Value *PtrLong, bool IsWrite, |
| 424 | unsigned AccessSizeIndex, |
| 425 | Instruction *InsertBefore) { |
| 426 | IRBuilder<> IRB(InsertBefore); |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 427 | Value *PtrTag = IRB.CreateTrunc(IRB.CreateLShr(PtrLong, kPointerTagShift), |
| 428 | IRB.getInt8Ty()); |
Evgeniy Stepanov | 43271b1 | 2018-02-21 19:52:23 +0000 | [diff] [blame] | 429 | Value *AddrLong = untagPointer(IRB, PtrLong); |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 430 | Value *ShadowLong = memToShadow(AddrLong, PtrLong->getType(), IRB); |
Evgeniy Stepanov | a265a13 | 2018-08-15 00:39:35 +0000 | [diff] [blame^] | 431 | Value *MemTag = IRB.CreateLoad(IRB.CreateIntToPtr(ShadowLong, Int8PtrTy)); |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 432 | Value *TagMismatch = IRB.CreateICmpNE(PtrTag, MemTag); |
| 433 | |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 434 | int matchAllTag = ClMatchAllTag.getNumOccurrences() > 0 ? |
| 435 | ClMatchAllTag : (CompileKernel ? 0xFF : -1); |
| 436 | if (matchAllTag != -1) { |
Evgeniy Stepanov | 1f1a7a7 | 2018-04-04 20:44:59 +0000 | [diff] [blame] | 437 | Value *TagNotIgnored = IRB.CreateICmpNE(PtrTag, |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 438 | ConstantInt::get(PtrTag->getType(), matchAllTag)); |
Evgeniy Stepanov | 1f1a7a7 | 2018-04-04 20:44:59 +0000 | [diff] [blame] | 439 | TagMismatch = IRB.CreateAnd(TagMismatch, TagNotIgnored); |
| 440 | } |
| 441 | |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 442 | TerminatorInst *CheckTerm = |
Evgeniy Stepanov | 3fd1b1a | 2017-12-20 19:05:44 +0000 | [diff] [blame] | 443 | SplitBlockAndInsertIfThen(TagMismatch, InsertBefore, !Recover, |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 444 | MDBuilder(*C).createBranchWeights(1, 100000)); |
| 445 | |
| 446 | IRB.SetInsertPoint(CheckTerm); |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 447 | const int64_t AccessInfo = Recover * 0x20 + IsWrite * 0x10 + AccessSizeIndex; |
| 448 | InlineAsm *Asm; |
| 449 | switch (TargetTriple.getArch()) { |
| 450 | case Triple::x86_64: |
| 451 | // The signal handler will find the data address in rdi. |
| 452 | Asm = InlineAsm::get( |
| 453 | FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false), |
| 454 | "int3\nnopl " + itostr(0x40 + AccessInfo) + "(%rax)", |
| 455 | "{rdi}", |
| 456 | /*hasSideEffects=*/true); |
| 457 | break; |
| 458 | case Triple::aarch64: |
| 459 | case Triple::aarch64_be: |
| 460 | // The signal handler will find the data address in x0. |
| 461 | Asm = InlineAsm::get( |
| 462 | FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false), |
| 463 | "brk #" + itostr(0x900 + AccessInfo), |
| 464 | "{x0}", |
| 465 | /*hasSideEffects=*/true); |
| 466 | break; |
| 467 | default: |
| 468 | report_fatal_error("unsupported architecture"); |
| 469 | } |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 470 | IRB.CreateCall(Asm, PtrLong); |
| 471 | } |
| 472 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 473 | bool HWAddressSanitizer::instrumentMemAccess(Instruction *I) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 474 | LLVM_DEBUG(dbgs() << "Instrumenting: " << *I << "\n"); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 475 | bool IsWrite = false; |
| 476 | unsigned Alignment = 0; |
| 477 | uint64_t TypeSize = 0; |
| 478 | Value *MaybeMask = nullptr; |
| 479 | Value *Addr = |
| 480 | isInterestingMemoryAccess(I, &IsWrite, &TypeSize, &Alignment, &MaybeMask); |
| 481 | |
| 482 | if (!Addr) |
| 483 | return false; |
| 484 | |
| 485 | if (MaybeMask) |
| 486 | return false; //FIXME |
| 487 | |
| 488 | IRBuilder<> IRB(I); |
| 489 | Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); |
| 490 | if (isPowerOf2_64(TypeSize) && |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 491 | (TypeSize / 8 <= (1UL << (kNumberOfAccessSizes - 1))) && |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 492 | (Alignment >= (1UL << Mapping.Scale) || Alignment == 0 || |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 493 | Alignment >= TypeSize / 8)) { |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 494 | size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize); |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 495 | if (ClInstrumentWithCalls) { |
| 496 | IRB.CreateCall(HwasanMemoryAccessCallback[IsWrite][AccessSizeIndex], |
| 497 | AddrLong); |
| 498 | } else { |
| 499 | instrumentMemAccessInline(AddrLong, IsWrite, AccessSizeIndex, I); |
| 500 | } |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 501 | } else { |
| 502 | IRB.CreateCall(HwasanMemoryAccessCallbackSized[IsWrite], |
| 503 | {AddrLong, ConstantInt::get(IntptrTy, TypeSize / 8)}); |
| 504 | } |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 505 | untagPointerOperand(I, Addr); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 506 | |
| 507 | return true; |
| 508 | } |
| 509 | |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 510 | static uint64_t getAllocaSizeInBytes(const AllocaInst &AI) { |
| 511 | uint64_t ArraySize = 1; |
| 512 | if (AI.isArrayAllocation()) { |
| 513 | const ConstantInt *CI = dyn_cast<ConstantInt>(AI.getArraySize()); |
| 514 | assert(CI && "non-constant array size"); |
| 515 | ArraySize = CI->getZExtValue(); |
| 516 | } |
| 517 | Type *Ty = AI.getAllocatedType(); |
| 518 | uint64_t SizeInBytes = AI.getModule()->getDataLayout().getTypeAllocSize(Ty); |
| 519 | return SizeInBytes * ArraySize; |
| 520 | } |
| 521 | |
| 522 | bool HWAddressSanitizer::tagAlloca(IRBuilder<> &IRB, AllocaInst *AI, |
| 523 | Value *Tag) { |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 524 | size_t Size = (getAllocaSizeInBytes(*AI) + Mapping.getAllocaAlignment() - 1) & |
| 525 | ~(Mapping.getAllocaAlignment() - 1); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 526 | |
| 527 | Value *JustTag = IRB.CreateTrunc(Tag, IRB.getInt8Ty()); |
| 528 | if (ClInstrumentWithCalls) { |
| 529 | IRB.CreateCall(HwasanTagMemoryFunc, |
Evgeniy Stepanov | a265a13 | 2018-08-15 00:39:35 +0000 | [diff] [blame^] | 530 | {IRB.CreatePointerCast(AI, Int8PtrTy), JustTag, |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 531 | ConstantInt::get(IntptrTy, Size)}); |
| 532 | } else { |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 533 | size_t ShadowSize = Size >> Mapping.Scale; |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 534 | Value *ShadowPtr = IRB.CreateIntToPtr( |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 535 | memToShadow(IRB.CreatePointerCast(AI, IntptrTy), AI->getType(), IRB), |
Evgeniy Stepanov | a265a13 | 2018-08-15 00:39:35 +0000 | [diff] [blame^] | 536 | Int8PtrTy); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 537 | // If this memset is not inlined, it will be intercepted in the hwasan |
| 538 | // runtime library. That's OK, because the interceptor skips the checks if |
| 539 | // the address is in the shadow region. |
| 540 | // FIXME: the interceptor is not as fast as real memset. Consider lowering |
| 541 | // llvm.memset right here into either a sequence of stores, or a call to |
| 542 | // hwasan_tag_memory. |
| 543 | IRB.CreateMemSet(ShadowPtr, JustTag, ShadowSize, /*Align=*/1); |
| 544 | } |
| 545 | return true; |
| 546 | } |
| 547 | |
| 548 | static unsigned RetagMask(unsigned AllocaNo) { |
| 549 | // A list of 8-bit numbers that have at most one run of non-zero bits. |
| 550 | // x = x ^ (mask << 56) can be encoded as a single armv8 instruction for these |
| 551 | // masks. |
| 552 | // The list does not include the value 255, which is used for UAR. |
| 553 | static unsigned FastMasks[] = { |
| 554 | 0, 1, 2, 3, 4, 6, 7, 8, 12, 14, 15, 16, 24, |
| 555 | 28, 30, 31, 32, 48, 56, 60, 62, 63, 64, 96, 112, 120, |
| 556 | 124, 126, 127, 128, 192, 224, 240, 248, 252, 254}; |
| 557 | return FastMasks[AllocaNo % (sizeof(FastMasks) / sizeof(FastMasks[0]))]; |
| 558 | } |
| 559 | |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 560 | Value *HWAddressSanitizer::getNextTagWithCall(IRBuilder<> &IRB) { |
| 561 | return IRB.CreateZExt(IRB.CreateCall(HwasanGenerateTagFunc), IntptrTy); |
| 562 | } |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 563 | |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 564 | Value *HWAddressSanitizer::getStackBaseTag(IRBuilder<> &IRB) { |
| 565 | if (ClGenerateTagsWithCalls) |
| 566 | return nullptr; |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 567 | // FIXME: use addressofreturnaddress (but implement it in aarch64 backend |
| 568 | // first). |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 569 | Module *M = IRB.GetInsertBlock()->getParent()->getParent(); |
| 570 | auto GetStackPointerFn = |
| 571 | Intrinsic::getDeclaration(M, Intrinsic::frameaddress); |
| 572 | Value *StackPointer = IRB.CreateCall( |
| 573 | GetStackPointerFn, {Constant::getNullValue(IRB.getInt32Ty())}); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 574 | |
| 575 | // Extract some entropy from the stack pointer for the tags. |
| 576 | // Take bits 20..28 (ASLR entropy) and xor with bits 0..8 (these differ |
| 577 | // between functions). |
| 578 | Value *StackPointerLong = IRB.CreatePointerCast(StackPointer, IntptrTy); |
| 579 | Value *StackTag = |
| 580 | IRB.CreateXor(StackPointerLong, IRB.CreateLShr(StackPointerLong, 20), |
| 581 | "hwasan.stack.base.tag"); |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 582 | return StackTag; |
| 583 | } |
| 584 | |
| 585 | Value *HWAddressSanitizer::getAllocaTag(IRBuilder<> &IRB, Value *StackTag, |
| 586 | AllocaInst *AI, unsigned AllocaNo) { |
| 587 | if (ClGenerateTagsWithCalls) |
| 588 | return getNextTagWithCall(IRB); |
| 589 | return IRB.CreateXor(StackTag, |
| 590 | ConstantInt::get(IntptrTy, RetagMask(AllocaNo))); |
| 591 | } |
| 592 | |
| 593 | Value *HWAddressSanitizer::getUARTag(IRBuilder<> &IRB, Value *StackTag) { |
Alex Shlyapnikov | 788764c | 2018-06-29 20:20:17 +0000 | [diff] [blame] | 594 | if (ClUARRetagToZero) |
| 595 | return ConstantInt::get(IntptrTy, 0); |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 596 | if (ClGenerateTagsWithCalls) |
| 597 | return getNextTagWithCall(IRB); |
| 598 | return IRB.CreateXor(StackTag, ConstantInt::get(IntptrTy, 0xFFU)); |
| 599 | } |
| 600 | |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 601 | // Add a tag to an address. |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 602 | Value *HWAddressSanitizer::tagPointer(IRBuilder<> &IRB, Type *Ty, |
| 603 | Value *PtrLong, Value *Tag) { |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 604 | Value *TaggedPtrLong; |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 605 | if (CompileKernel) { |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 606 | // Kernel addresses have 0xFF in the most significant byte. |
| 607 | Value *ShiftedTag = IRB.CreateOr( |
| 608 | IRB.CreateShl(Tag, kPointerTagShift), |
| 609 | ConstantInt::get(IntptrTy, (1ULL << kPointerTagShift) - 1)); |
| 610 | TaggedPtrLong = IRB.CreateAnd(PtrLong, ShiftedTag); |
| 611 | } else { |
| 612 | // Userspace can simply do OR (tag << 56); |
| 613 | Value *ShiftedTag = IRB.CreateShl(Tag, kPointerTagShift); |
| 614 | TaggedPtrLong = IRB.CreateOr(PtrLong, ShiftedTag); |
| 615 | } |
| 616 | return IRB.CreateIntToPtr(TaggedPtrLong, Ty); |
| 617 | } |
| 618 | |
Evgeniy Stepanov | 43271b1 | 2018-02-21 19:52:23 +0000 | [diff] [blame] | 619 | // Remove tag from an address. |
| 620 | Value *HWAddressSanitizer::untagPointer(IRBuilder<> &IRB, Value *PtrLong) { |
| 621 | Value *UntaggedPtrLong; |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 622 | if (CompileKernel) { |
Evgeniy Stepanov | 43271b1 | 2018-02-21 19:52:23 +0000 | [diff] [blame] | 623 | // Kernel addresses have 0xFF in the most significant byte. |
| 624 | UntaggedPtrLong = IRB.CreateOr(PtrLong, |
| 625 | ConstantInt::get(PtrLong->getType(), 0xFFULL << kPointerTagShift)); |
| 626 | } else { |
| 627 | // Userspace addresses have 0x00. |
| 628 | UntaggedPtrLong = IRB.CreateAnd(PtrLong, |
| 629 | ConstantInt::get(PtrLong->getType(), ~(0xFFULL << kPointerTagShift))); |
| 630 | } |
| 631 | return UntaggedPtrLong; |
| 632 | } |
| 633 | |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 634 | bool HWAddressSanitizer::instrumentStack( |
| 635 | SmallVectorImpl<AllocaInst *> &Allocas, |
| 636 | SmallVectorImpl<Instruction *> &RetVec) { |
| 637 | Function *F = Allocas[0]->getParent()->getParent(); |
| 638 | Instruction *InsertPt = &*F->getEntryBlock().begin(); |
| 639 | IRBuilder<> IRB(InsertPt); |
| 640 | |
| 641 | Value *StackTag = getStackBaseTag(IRB); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 642 | |
| 643 | // Ideally, we want to calculate tagged stack base pointer, and rewrite all |
| 644 | // alloca addresses using that. Unfortunately, offsets are not known yet |
| 645 | // (unless we use ASan-style mega-alloca). Instead we keep the base tag in a |
| 646 | // temp, shift-OR it into each alloca address and xor with the retag mask. |
| 647 | // This generates one extra instruction per alloca use. |
| 648 | for (unsigned N = 0; N < Allocas.size(); ++N) { |
| 649 | auto *AI = Allocas[N]; |
| 650 | IRB.SetInsertPoint(AI->getNextNode()); |
| 651 | |
| 652 | // Replace uses of the alloca with tagged address. |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 653 | Value *Tag = getAllocaTag(IRB, StackTag, AI, N); |
| 654 | Value *AILong = IRB.CreatePointerCast(AI, IntptrTy); |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 655 | Value *Replacement = tagPointer(IRB, AI->getType(), AILong, Tag); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 656 | std::string Name = |
| 657 | AI->hasName() ? AI->getName().str() : "alloca." + itostr(N); |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 658 | Replacement->setName(Name + ".hwasan"); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 659 | |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 660 | for (auto UI = AI->use_begin(), UE = AI->use_end(); UI != UE;) { |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 661 | Use &U = *UI++; |
| 662 | if (U.getUser() != AILong) |
| 663 | U.set(Replacement); |
| 664 | } |
| 665 | |
| 666 | tagAlloca(IRB, AI, Tag); |
| 667 | |
| 668 | for (auto RI : RetVec) { |
| 669 | IRB.SetInsertPoint(RI); |
| 670 | |
| 671 | // Re-tag alloca memory with the special UAR tag. |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 672 | Value *Tag = getUARTag(IRB, StackTag); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 673 | tagAlloca(IRB, AI, Tag); |
| 674 | } |
| 675 | } |
| 676 | |
| 677 | return true; |
| 678 | } |
| 679 | |
| 680 | bool HWAddressSanitizer::isInterestingAlloca(const AllocaInst &AI) { |
| 681 | return (AI.getAllocatedType()->isSized() && |
| 682 | // FIXME: instrument dynamic allocas, too |
| 683 | AI.isStaticAlloca() && |
| 684 | // alloca() may be called with 0 size, ignore it. |
| 685 | getAllocaSizeInBytes(AI) > 0 && |
| 686 | // We are only interested in allocas not promotable to registers. |
| 687 | // Promotable allocas are common under -O0. |
| 688 | !isAllocaPromotable(&AI) && |
| 689 | // inalloca allocas are not treated as static, and we don't want |
| 690 | // dynamic alloca instrumentation for them as well. |
| 691 | !AI.isUsedWithInAlloca() && |
| 692 | // swifterror allocas are register promoted by ISel |
| 693 | !AI.isSwiftError()); |
| 694 | } |
| 695 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 696 | bool HWAddressSanitizer::runOnFunction(Function &F) { |
| 697 | if (&F == HwasanCtorFunction) |
| 698 | return false; |
| 699 | |
| 700 | if (!F.hasFnAttribute(Attribute::SanitizeHWAddress)) |
| 701 | return false; |
| 702 | |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 703 | LLVM_DEBUG(dbgs() << "Function: " << F.getName() << "\n"); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 704 | |
| 705 | initializeCallbacks(*F.getParent()); |
| 706 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 707 | assert(!LocalDynamicShadow); |
| 708 | maybeInsertDynamicShadowAtFunctionEntry(F); |
| 709 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 710 | bool Changed = false; |
| 711 | SmallVector<Instruction*, 16> ToInstrument; |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 712 | SmallVector<AllocaInst*, 8> AllocasToInstrument; |
| 713 | SmallVector<Instruction*, 8> RetVec; |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 714 | for (auto &BB : F) { |
| 715 | for (auto &Inst : BB) { |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 716 | if (ClInstrumentStack) |
| 717 | if (AllocaInst *AI = dyn_cast<AllocaInst>(&Inst)) { |
| 718 | // Realign all allocas. We don't want small uninteresting allocas to |
| 719 | // hide in instrumented alloca's padding. |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 720 | if (AI->getAlignment() < Mapping.getAllocaAlignment()) |
| 721 | AI->setAlignment(Mapping.getAllocaAlignment()); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 722 | // Instrument some of them. |
| 723 | if (isInterestingAlloca(*AI)) |
| 724 | AllocasToInstrument.push_back(AI); |
| 725 | continue; |
| 726 | } |
| 727 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 728 | if (isa<ReturnInst>(Inst) || isa<ResumeInst>(Inst) || |
| 729 | isa<CleanupReturnInst>(Inst)) |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 730 | RetVec.push_back(&Inst); |
| 731 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 732 | Value *MaybeMask = nullptr; |
| 733 | bool IsWrite; |
| 734 | unsigned Alignment; |
| 735 | uint64_t TypeSize; |
| 736 | Value *Addr = isInterestingMemoryAccess(&Inst, &IsWrite, &TypeSize, |
| 737 | &Alignment, &MaybeMask); |
| 738 | if (Addr || isa<MemIntrinsic>(Inst)) |
| 739 | ToInstrument.push_back(&Inst); |
| 740 | } |
| 741 | } |
| 742 | |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 743 | if (!AllocasToInstrument.empty()) |
| 744 | Changed |= instrumentStack(AllocasToInstrument, RetVec); |
| 745 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 746 | for (auto Inst : ToInstrument) |
| 747 | Changed |= instrumentMemAccess(Inst); |
| 748 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 749 | LocalDynamicShadow = nullptr; |
| 750 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 751 | return Changed; |
| 752 | } |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 753 | |
| 754 | void HWAddressSanitizer::ShadowMapping::init(Triple &TargetTriple) { |
| 755 | const bool IsAndroid = TargetTriple.isAndroid(); |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 756 | const bool IsAndroidWithIfuncSupport = |
| 757 | IsAndroid && !TargetTriple.isAndroidVersionLT(21); |
| 758 | |
| 759 | Scale = kDefaultShadowScale; |
Evgeniy Stepanov | 453e7ac | 2018-08-10 16:21:37 +0000 | [diff] [blame] | 760 | const bool WithIfunc = ClWithIfunc.getNumOccurrences() > 0 |
| 761 | ? ClWithIfunc |
| 762 | : IsAndroidWithIfuncSupport; |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 763 | |
Evgeniy Stepanov | 453e7ac | 2018-08-10 16:21:37 +0000 | [diff] [blame] | 764 | if (ClMappingOffset.getNumOccurrences() > 0) { |
| 765 | InGlobal = false; |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 766 | Offset = ClMappingOffset; |
Evgeniy Stepanov | 453e7ac | 2018-08-10 16:21:37 +0000 | [diff] [blame] | 767 | } else if (ClEnableKhwasan || ClInstrumentWithCalls) { |
| 768 | InGlobal = false; |
| 769 | Offset = 0; |
| 770 | } else if (WithIfunc) { |
| 771 | InGlobal = true; |
| 772 | Offset = kDynamicShadowSentinel; |
| 773 | } else { |
| 774 | InGlobal = false; |
| 775 | Offset = kDynamicShadowSentinel; |
| 776 | } |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 777 | } |