Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 1 | //===- HWAddressSanitizer.cpp - detector of uninitialized reads -------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | /// \file |
| 11 | /// This file is a part of HWAddressSanitizer, an address sanity checker |
| 12 | /// based on tagged addressing. |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
| 15 | #include "llvm/ADT/SmallVector.h" |
| 16 | #include "llvm/ADT/StringExtras.h" |
| 17 | #include "llvm/ADT/StringRef.h" |
| 18 | #include "llvm/ADT/Triple.h" |
| 19 | #include "llvm/IR/Attributes.h" |
| 20 | #include "llvm/IR/BasicBlock.h" |
| 21 | #include "llvm/IR/Constant.h" |
| 22 | #include "llvm/IR/Constants.h" |
| 23 | #include "llvm/IR/DataLayout.h" |
| 24 | #include "llvm/IR/DerivedTypes.h" |
| 25 | #include "llvm/IR/Function.h" |
| 26 | #include "llvm/IR/IRBuilder.h" |
| 27 | #include "llvm/IR/InlineAsm.h" |
| 28 | #include "llvm/IR/InstVisitor.h" |
| 29 | #include "llvm/IR/Instruction.h" |
| 30 | #include "llvm/IR/Instructions.h" |
| 31 | #include "llvm/IR/IntrinsicInst.h" |
| 32 | #include "llvm/IR/Intrinsics.h" |
| 33 | #include "llvm/IR/LLVMContext.h" |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 34 | #include "llvm/IR/MDBuilder.h" |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 35 | #include "llvm/IR/Module.h" |
| 36 | #include "llvm/IR/Type.h" |
| 37 | #include "llvm/IR/Value.h" |
| 38 | #include "llvm/Pass.h" |
| 39 | #include "llvm/Support/Casting.h" |
| 40 | #include "llvm/Support/CommandLine.h" |
| 41 | #include "llvm/Support/Debug.h" |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 42 | #include "llvm/Support/raw_ostream.h" |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 43 | #include "llvm/Transforms/Instrumentation.h" |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 44 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 45 | #include "llvm/Transforms/Utils/ModuleUtils.h" |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 46 | #include "llvm/Transforms/Utils/PromoteMemToReg.h" |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 47 | |
| 48 | using namespace llvm; |
| 49 | |
| 50 | #define DEBUG_TYPE "hwasan" |
| 51 | |
| 52 | static const char *const kHwasanModuleCtorName = "hwasan.module_ctor"; |
| 53 | static const char *const kHwasanInitName = "__hwasan_init"; |
| 54 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 55 | static const char *const kHwasanShadowMemoryDynamicAddress = |
| 56 | "__hwasan_shadow_memory_dynamic_address"; |
| 57 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 58 | // Accesses sizes are powers of two: 1, 2, 4, 8, 16. |
| 59 | static const size_t kNumberOfAccessSizes = 5; |
| 60 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 61 | static const size_t kDefaultShadowScale = 4; |
| 62 | static const uint64_t kDynamicShadowSentinel = |
| 63 | std::numeric_limits<uint64_t>::max(); |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 64 | static const unsigned kPointerTagShift = 56; |
| 65 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 66 | static cl::opt<std::string> ClMemoryAccessCallbackPrefix( |
| 67 | "hwasan-memory-access-callback-prefix", |
| 68 | cl::desc("Prefix for memory access callbacks"), cl::Hidden, |
| 69 | cl::init("__hwasan_")); |
| 70 | |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 71 | static cl::opt<bool> |
| 72 | ClInstrumentWithCalls("hwasan-instrument-with-calls", |
| 73 | cl::desc("instrument reads and writes with callbacks"), |
| 74 | cl::Hidden, cl::init(false)); |
| 75 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 76 | static cl::opt<bool> ClInstrumentReads("hwasan-instrument-reads", |
| 77 | cl::desc("instrument read instructions"), |
| 78 | cl::Hidden, cl::init(true)); |
| 79 | |
| 80 | static cl::opt<bool> ClInstrumentWrites( |
| 81 | "hwasan-instrument-writes", cl::desc("instrument write instructions"), |
| 82 | cl::Hidden, cl::init(true)); |
| 83 | |
| 84 | static cl::opt<bool> ClInstrumentAtomics( |
| 85 | "hwasan-instrument-atomics", |
| 86 | cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden, |
| 87 | cl::init(true)); |
| 88 | |
Evgeniy Stepanov | 3fd1b1a | 2017-12-20 19:05:44 +0000 | [diff] [blame] | 89 | static cl::opt<bool> ClRecover( |
| 90 | "hwasan-recover", |
| 91 | cl::desc("Enable recovery mode (continue-after-error)."), |
| 92 | cl::Hidden, cl::init(false)); |
| 93 | |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 94 | static cl::opt<bool> ClInstrumentStack("hwasan-instrument-stack", |
| 95 | cl::desc("instrument stack (allocas)"), |
| 96 | cl::Hidden, cl::init(true)); |
| 97 | |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 98 | static cl::opt<bool> ClGenerateTagsWithCalls( |
| 99 | "hwasan-generate-tags-with-calls", |
| 100 | cl::desc("generate new tags with runtime library calls"), cl::Hidden, |
| 101 | cl::init(false)); |
| 102 | |
Evgeniy Stepanov | 1f1a7a7 | 2018-04-04 20:44:59 +0000 | [diff] [blame] | 103 | static cl::opt<int> ClMatchAllTag( |
| 104 | "hwasan-match-all-tag", |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 105 | cl::desc("don't report bad accesses via pointers with this tag"), |
| 106 | cl::Hidden, cl::init(-1)); |
Evgeniy Stepanov | 1f1a7a7 | 2018-04-04 20:44:59 +0000 | [diff] [blame] | 107 | |
Evgeniy Stepanov | 5bd669d | 2018-01-17 23:24:38 +0000 | [diff] [blame] | 108 | static cl::opt<bool> ClEnableKhwasan( |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 109 | "hwasan-kernel", |
| 110 | cl::desc("Enable KernelHWAddressSanitizer instrumentation"), |
Evgeniy Stepanov | 5bd669d | 2018-01-17 23:24:38 +0000 | [diff] [blame] | 111 | cl::Hidden, cl::init(false)); |
| 112 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 113 | // These flags allow to change the shadow mapping and control how shadow memory |
| 114 | // is accessed. The shadow mapping looks like: |
| 115 | // Shadow = (Mem >> scale) + offset |
| 116 | |
| 117 | static cl::opt<unsigned long long> ClMappingOffset( |
| 118 | "hwasan-mapping-offset", |
| 119 | cl::desc("HWASan shadow mapping offset [EXPERIMENTAL]"), cl::Hidden, |
| 120 | cl::init(0)); |
| 121 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 122 | namespace { |
| 123 | |
| 124 | /// \brief An instrumentation pass implementing detection of addressability bugs |
| 125 | /// using tagged pointers. |
| 126 | class HWAddressSanitizer : public FunctionPass { |
| 127 | public: |
| 128 | // Pass identification, replacement for typeid. |
| 129 | static char ID; |
| 130 | |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 131 | explicit HWAddressSanitizer(bool CompileKernel = false, bool Recover = false) |
| 132 | : FunctionPass(ID) { |
| 133 | this->Recover = ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover; |
| 134 | this->CompileKernel = ClEnableKhwasan.getNumOccurrences() > 0 ? |
| 135 | ClEnableKhwasan : CompileKernel; |
| 136 | } |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 137 | |
| 138 | StringRef getPassName() const override { return "HWAddressSanitizer"; } |
| 139 | |
| 140 | bool runOnFunction(Function &F) override; |
| 141 | bool doInitialization(Module &M) override; |
| 142 | |
| 143 | void initializeCallbacks(Module &M); |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 144 | |
| 145 | void maybeInsertDynamicShadowAtFunctionEntry(Function &F); |
| 146 | |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 147 | void untagPointerOperand(Instruction *I, Value *Addr); |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 148 | Value *memToShadow(Value *Shadow, Type *Ty, IRBuilder<> &IRB); |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 149 | void instrumentMemAccessInline(Value *PtrLong, bool IsWrite, |
| 150 | unsigned AccessSizeIndex, |
| 151 | Instruction *InsertBefore); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 152 | bool instrumentMemAccess(Instruction *I); |
| 153 | Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite, |
| 154 | uint64_t *TypeSize, unsigned *Alignment, |
| 155 | Value **MaybeMask); |
| 156 | |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 157 | bool isInterestingAlloca(const AllocaInst &AI); |
| 158 | bool tagAlloca(IRBuilder<> &IRB, AllocaInst *AI, Value *Tag); |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 159 | Value *tagPointer(IRBuilder<> &IRB, Type *Ty, Value *PtrLong, Value *Tag); |
Evgeniy Stepanov | 43271b1 | 2018-02-21 19:52:23 +0000 | [diff] [blame] | 160 | Value *untagPointer(IRBuilder<> &IRB, Value *PtrLong); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 161 | bool instrumentStack(SmallVectorImpl<AllocaInst *> &Allocas, |
| 162 | SmallVectorImpl<Instruction *> &RetVec); |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 163 | Value *getNextTagWithCall(IRBuilder<> &IRB); |
| 164 | Value *getStackBaseTag(IRBuilder<> &IRB); |
| 165 | Value *getAllocaTag(IRBuilder<> &IRB, Value *StackTag, AllocaInst *AI, |
| 166 | unsigned AllocaNo); |
| 167 | Value *getUARTag(IRBuilder<> &IRB, Value *StackTag); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 168 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 169 | private: |
| 170 | LLVMContext *C; |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 171 | Triple TargetTriple; |
| 172 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 173 | /// This struct defines the shadow mapping using the rule: |
| 174 | /// shadow = (mem >> Scale) + Offset. |
| 175 | /// If InGlobal is true, then |
| 176 | /// extern char __hwasan_shadow[]; |
| 177 | /// shadow = (mem >> Scale) + &__hwasan_shadow |
| 178 | struct ShadowMapping { |
| 179 | int Scale; |
| 180 | uint64_t Offset; |
| 181 | bool InGlobal; |
| 182 | |
| 183 | void init(Triple &TargetTriple); |
| 184 | unsigned getAllocaAlignment() const { return 1U << Scale; } |
| 185 | }; |
| 186 | ShadowMapping Mapping; |
| 187 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 188 | Type *IntptrTy; |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 189 | Type *Int8Ty; |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 190 | |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 191 | bool CompileKernel; |
Evgeniy Stepanov | 3fd1b1a | 2017-12-20 19:05:44 +0000 | [diff] [blame] | 192 | bool Recover; |
| 193 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 194 | Function *HwasanCtorFunction; |
| 195 | |
| 196 | Function *HwasanMemoryAccessCallback[2][kNumberOfAccessSizes]; |
| 197 | Function *HwasanMemoryAccessCallbackSized[2]; |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 198 | |
| 199 | Function *HwasanTagMemoryFunc; |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 200 | Function *HwasanGenerateTagFunc; |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 201 | |
| 202 | Constant *ShadowGlobal; |
| 203 | |
| 204 | Value *LocalDynamicShadow = nullptr; |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 205 | }; |
| 206 | |
| 207 | } // end anonymous namespace |
| 208 | |
| 209 | char HWAddressSanitizer::ID = 0; |
| 210 | |
| 211 | INITIALIZE_PASS_BEGIN( |
| 212 | HWAddressSanitizer, "hwasan", |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 213 | "HWAddressSanitizer: detect memory bugs using tagged addressing.", false, |
| 214 | false) |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 215 | INITIALIZE_PASS_END( |
| 216 | HWAddressSanitizer, "hwasan", |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 217 | "HWAddressSanitizer: detect memory bugs using tagged addressing.", false, |
| 218 | false) |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 219 | |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 220 | FunctionPass *llvm::createHWAddressSanitizerPass(bool CompileKernel, |
| 221 | bool Recover) { |
| 222 | assert(!CompileKernel || Recover); |
| 223 | return new HWAddressSanitizer(CompileKernel, Recover); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 224 | } |
| 225 | |
| 226 | /// \brief Module-level initialization. |
| 227 | /// |
| 228 | /// inserts a call to __hwasan_init to the module's constructor list. |
| 229 | bool HWAddressSanitizer::doInitialization(Module &M) { |
| 230 | DEBUG(dbgs() << "Init " << M.getName() << "\n"); |
| 231 | auto &DL = M.getDataLayout(); |
| 232 | |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 233 | TargetTriple = Triple(M.getTargetTriple()); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 234 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 235 | Mapping.init(TargetTriple); |
| 236 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 237 | C = &(M.getContext()); |
| 238 | IRBuilder<> IRB(*C); |
| 239 | IntptrTy = IRB.getIntPtrTy(DL); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 240 | Int8Ty = IRB.getInt8Ty(); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 241 | |
Benjamin Kramer | bfc1d97 | 2018-01-18 14:19:04 +0000 | [diff] [blame] | 242 | HwasanCtorFunction = nullptr; |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 243 | if (!CompileKernel) { |
Evgeniy Stepanov | 5bd669d | 2018-01-17 23:24:38 +0000 | [diff] [blame] | 244 | std::tie(HwasanCtorFunction, std::ignore) = |
| 245 | createSanitizerCtorAndInitFunctions(M, kHwasanModuleCtorName, |
| 246 | kHwasanInitName, |
| 247 | /*InitArgTypes=*/{}, |
| 248 | /*InitArgs=*/{}); |
| 249 | appendToGlobalCtors(M, HwasanCtorFunction, 0); |
| 250 | } |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 251 | return true; |
| 252 | } |
| 253 | |
| 254 | void HWAddressSanitizer::initializeCallbacks(Module &M) { |
| 255 | IRBuilder<> IRB(*C); |
| 256 | for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) { |
| 257 | const std::string TypeStr = AccessIsWrite ? "store" : "load"; |
Evgeniy Stepanov | 3fd1b1a | 2017-12-20 19:05:44 +0000 | [diff] [blame] | 258 | const std::string EndingStr = Recover ? "_noabort" : ""; |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 259 | |
| 260 | HwasanMemoryAccessCallbackSized[AccessIsWrite] = |
| 261 | checkSanitizerInterfaceFunction(M.getOrInsertFunction( |
Evgeniy Stepanov | c07e0bd | 2018-01-16 23:15:08 +0000 | [diff] [blame] | 262 | ClMemoryAccessCallbackPrefix + TypeStr + "N" + EndingStr, |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 263 | FunctionType::get(IRB.getVoidTy(), {IntptrTy, IntptrTy}, false))); |
| 264 | |
| 265 | for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes; |
| 266 | AccessSizeIndex++) { |
| 267 | HwasanMemoryAccessCallback[AccessIsWrite][AccessSizeIndex] = |
| 268 | checkSanitizerInterfaceFunction(M.getOrInsertFunction( |
| 269 | ClMemoryAccessCallbackPrefix + TypeStr + |
Evgeniy Stepanov | 3fd1b1a | 2017-12-20 19:05:44 +0000 | [diff] [blame] | 270 | itostr(1ULL << AccessSizeIndex) + EndingStr, |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 271 | FunctionType::get(IRB.getVoidTy(), {IntptrTy}, false))); |
| 272 | } |
| 273 | } |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 274 | |
| 275 | HwasanTagMemoryFunc = checkSanitizerInterfaceFunction(M.getOrInsertFunction( |
| 276 | "__hwasan_tag_memory", IRB.getVoidTy(), IntptrTy, Int8Ty, IntptrTy)); |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 277 | HwasanGenerateTagFunc = checkSanitizerInterfaceFunction( |
| 278 | M.getOrInsertFunction("__hwasan_generate_tag", Int8Ty)); |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 279 | |
| 280 | if (Mapping.InGlobal) |
| 281 | ShadowGlobal = M.getOrInsertGlobal("__hwasan_shadow", |
| 282 | ArrayType::get(IRB.getInt8Ty(), 0)); |
| 283 | } |
| 284 | |
| 285 | void HWAddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(Function &F) { |
| 286 | // Generate code only when dynamic addressing is needed. |
| 287 | if (Mapping.Offset != kDynamicShadowSentinel) |
| 288 | return; |
| 289 | |
| 290 | IRBuilder<> IRB(&F.front().front()); |
| 291 | if (Mapping.InGlobal) { |
| 292 | // An empty inline asm with input reg == output reg. |
| 293 | // An opaque pointer-to-int cast, basically. |
| 294 | InlineAsm *Asm = InlineAsm::get( |
| 295 | FunctionType::get(IntptrTy, {ShadowGlobal->getType()}, false), |
| 296 | StringRef(""), StringRef("=r,0"), |
| 297 | /*hasSideEffects=*/false); |
| 298 | LocalDynamicShadow = IRB.CreateCall(Asm, {ShadowGlobal}, ".hwasan.shadow"); |
| 299 | } else { |
| 300 | Value *GlobalDynamicAddress = F.getParent()->getOrInsertGlobal( |
| 301 | kHwasanShadowMemoryDynamicAddress, IntptrTy); |
| 302 | LocalDynamicShadow = IRB.CreateLoad(GlobalDynamicAddress); |
| 303 | } |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 304 | } |
| 305 | |
| 306 | Value *HWAddressSanitizer::isInterestingMemoryAccess(Instruction *I, |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 307 | bool *IsWrite, |
| 308 | uint64_t *TypeSize, |
| 309 | unsigned *Alignment, |
| 310 | Value **MaybeMask) { |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 311 | // Skip memory accesses inserted by another instrumentation. |
| 312 | if (I->getMetadata("nosanitize")) return nullptr; |
| 313 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 314 | // Do not instrument the load fetching the dynamic shadow address. |
| 315 | if (LocalDynamicShadow == I) |
| 316 | return nullptr; |
| 317 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 318 | Value *PtrOperand = nullptr; |
| 319 | const DataLayout &DL = I->getModule()->getDataLayout(); |
| 320 | if (LoadInst *LI = dyn_cast<LoadInst>(I)) { |
| 321 | if (!ClInstrumentReads) return nullptr; |
| 322 | *IsWrite = false; |
| 323 | *TypeSize = DL.getTypeStoreSizeInBits(LI->getType()); |
| 324 | *Alignment = LI->getAlignment(); |
| 325 | PtrOperand = LI->getPointerOperand(); |
| 326 | } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) { |
| 327 | if (!ClInstrumentWrites) return nullptr; |
| 328 | *IsWrite = true; |
| 329 | *TypeSize = DL.getTypeStoreSizeInBits(SI->getValueOperand()->getType()); |
| 330 | *Alignment = SI->getAlignment(); |
| 331 | PtrOperand = SI->getPointerOperand(); |
| 332 | } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) { |
| 333 | if (!ClInstrumentAtomics) return nullptr; |
| 334 | *IsWrite = true; |
| 335 | *TypeSize = DL.getTypeStoreSizeInBits(RMW->getValOperand()->getType()); |
| 336 | *Alignment = 0; |
| 337 | PtrOperand = RMW->getPointerOperand(); |
| 338 | } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) { |
| 339 | if (!ClInstrumentAtomics) return nullptr; |
| 340 | *IsWrite = true; |
| 341 | *TypeSize = DL.getTypeStoreSizeInBits(XCHG->getCompareOperand()->getType()); |
| 342 | *Alignment = 0; |
| 343 | PtrOperand = XCHG->getPointerOperand(); |
| 344 | } |
| 345 | |
| 346 | if (PtrOperand) { |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 347 | // Do not instrument accesses from different address spaces; we cannot deal |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 348 | // with them. |
| 349 | Type *PtrTy = cast<PointerType>(PtrOperand->getType()->getScalarType()); |
| 350 | if (PtrTy->getPointerAddressSpace() != 0) |
| 351 | return nullptr; |
| 352 | |
| 353 | // Ignore swifterror addresses. |
| 354 | // swifterror memory addresses are mem2reg promoted by instruction |
| 355 | // selection. As such they cannot have regular uses like an instrumentation |
| 356 | // function and it makes no sense to track them as memory. |
| 357 | if (PtrOperand->isSwiftError()) |
| 358 | return nullptr; |
| 359 | } |
| 360 | |
| 361 | return PtrOperand; |
| 362 | } |
| 363 | |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 364 | static unsigned getPointerOperandIndex(Instruction *I) { |
| 365 | if (LoadInst *LI = dyn_cast<LoadInst>(I)) |
| 366 | return LI->getPointerOperandIndex(); |
| 367 | if (StoreInst *SI = dyn_cast<StoreInst>(I)) |
| 368 | return SI->getPointerOperandIndex(); |
| 369 | if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) |
| 370 | return RMW->getPointerOperandIndex(); |
| 371 | if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) |
| 372 | return XCHG->getPointerOperandIndex(); |
| 373 | report_fatal_error("Unexpected instruction"); |
| 374 | return -1; |
| 375 | } |
| 376 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 377 | static size_t TypeSizeToSizeIndex(uint32_t TypeSize) { |
| 378 | size_t Res = countTrailingZeros(TypeSize / 8); |
| 379 | assert(Res < kNumberOfAccessSizes); |
| 380 | return Res; |
| 381 | } |
| 382 | |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 383 | void HWAddressSanitizer::untagPointerOperand(Instruction *I, Value *Addr) { |
| 384 | if (TargetTriple.isAArch64()) |
| 385 | return; |
| 386 | |
| 387 | IRBuilder<> IRB(I); |
| 388 | Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); |
| 389 | Value *UntaggedPtr = |
| 390 | IRB.CreateIntToPtr(untagPointer(IRB, AddrLong), Addr->getType()); |
| 391 | I->setOperand(getPointerOperandIndex(I), UntaggedPtr); |
| 392 | } |
| 393 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 394 | Value *HWAddressSanitizer::memToShadow(Value *Mem, Type *Ty, IRBuilder<> &IRB) { |
| 395 | // Mem >> Scale |
| 396 | Value *Shadow = IRB.CreateLShr(Mem, Mapping.Scale); |
| 397 | if (Mapping.Offset == 0) |
| 398 | return Shadow; |
| 399 | // (Mem >> Scale) + Offset |
| 400 | Value *ShadowBase; |
| 401 | if (LocalDynamicShadow) |
| 402 | ShadowBase = LocalDynamicShadow; |
| 403 | else |
| 404 | ShadowBase = ConstantInt::get(Ty, Mapping.Offset); |
| 405 | return IRB.CreateAdd(Shadow, ShadowBase); |
| 406 | } |
| 407 | |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 408 | void HWAddressSanitizer::instrumentMemAccessInline(Value *PtrLong, bool IsWrite, |
| 409 | unsigned AccessSizeIndex, |
| 410 | Instruction *InsertBefore) { |
| 411 | IRBuilder<> IRB(InsertBefore); |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 412 | Value *PtrTag = IRB.CreateTrunc(IRB.CreateLShr(PtrLong, kPointerTagShift), |
| 413 | IRB.getInt8Ty()); |
Evgeniy Stepanov | 43271b1 | 2018-02-21 19:52:23 +0000 | [diff] [blame] | 414 | Value *AddrLong = untagPointer(IRB, PtrLong); |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 415 | Value *ShadowLong = memToShadow(AddrLong, PtrLong->getType(), IRB); |
Evgeniy Stepanov | 5bd669d | 2018-01-17 23:24:38 +0000 | [diff] [blame] | 416 | Value *MemTag = |
| 417 | IRB.CreateLoad(IRB.CreateIntToPtr(ShadowLong, IRB.getInt8PtrTy())); |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 418 | Value *TagMismatch = IRB.CreateICmpNE(PtrTag, MemTag); |
| 419 | |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 420 | int matchAllTag = ClMatchAllTag.getNumOccurrences() > 0 ? |
| 421 | ClMatchAllTag : (CompileKernel ? 0xFF : -1); |
| 422 | if (matchAllTag != -1) { |
Evgeniy Stepanov | 1f1a7a7 | 2018-04-04 20:44:59 +0000 | [diff] [blame] | 423 | Value *TagNotIgnored = IRB.CreateICmpNE(PtrTag, |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 424 | ConstantInt::get(PtrTag->getType(), matchAllTag)); |
Evgeniy Stepanov | 1f1a7a7 | 2018-04-04 20:44:59 +0000 | [diff] [blame] | 425 | TagMismatch = IRB.CreateAnd(TagMismatch, TagNotIgnored); |
| 426 | } |
| 427 | |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 428 | TerminatorInst *CheckTerm = |
Evgeniy Stepanov | 3fd1b1a | 2017-12-20 19:05:44 +0000 | [diff] [blame] | 429 | SplitBlockAndInsertIfThen(TagMismatch, InsertBefore, !Recover, |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 430 | MDBuilder(*C).createBranchWeights(1, 100000)); |
| 431 | |
| 432 | IRB.SetInsertPoint(CheckTerm); |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 433 | const int64_t AccessInfo = Recover * 0x20 + IsWrite * 0x10 + AccessSizeIndex; |
| 434 | InlineAsm *Asm; |
| 435 | switch (TargetTriple.getArch()) { |
| 436 | case Triple::x86_64: |
| 437 | // The signal handler will find the data address in rdi. |
| 438 | Asm = InlineAsm::get( |
| 439 | FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false), |
| 440 | "int3\nnopl " + itostr(0x40 + AccessInfo) + "(%rax)", |
| 441 | "{rdi}", |
| 442 | /*hasSideEffects=*/true); |
| 443 | break; |
| 444 | case Triple::aarch64: |
| 445 | case Triple::aarch64_be: |
| 446 | // The signal handler will find the data address in x0. |
| 447 | Asm = InlineAsm::get( |
| 448 | FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false), |
| 449 | "brk #" + itostr(0x900 + AccessInfo), |
| 450 | "{x0}", |
| 451 | /*hasSideEffects=*/true); |
| 452 | break; |
| 453 | default: |
| 454 | report_fatal_error("unsupported architecture"); |
| 455 | } |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 456 | IRB.CreateCall(Asm, PtrLong); |
| 457 | } |
| 458 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 459 | bool HWAddressSanitizer::instrumentMemAccess(Instruction *I) { |
| 460 | DEBUG(dbgs() << "Instrumenting: " << *I << "\n"); |
| 461 | bool IsWrite = false; |
| 462 | unsigned Alignment = 0; |
| 463 | uint64_t TypeSize = 0; |
| 464 | Value *MaybeMask = nullptr; |
| 465 | Value *Addr = |
| 466 | isInterestingMemoryAccess(I, &IsWrite, &TypeSize, &Alignment, &MaybeMask); |
| 467 | |
| 468 | if (!Addr) |
| 469 | return false; |
| 470 | |
| 471 | if (MaybeMask) |
| 472 | return false; //FIXME |
| 473 | |
| 474 | IRBuilder<> IRB(I); |
| 475 | Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); |
| 476 | if (isPowerOf2_64(TypeSize) && |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 477 | (TypeSize / 8 <= (1UL << (kNumberOfAccessSizes - 1))) && |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 478 | (Alignment >= (1UL << Mapping.Scale) || Alignment == 0 || |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 479 | Alignment >= TypeSize / 8)) { |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 480 | size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize); |
Evgeniy Stepanov | ecb48e5 | 2017-12-13 01:16:34 +0000 | [diff] [blame] | 481 | if (ClInstrumentWithCalls) { |
| 482 | IRB.CreateCall(HwasanMemoryAccessCallback[IsWrite][AccessSizeIndex], |
| 483 | AddrLong); |
| 484 | } else { |
| 485 | instrumentMemAccessInline(AddrLong, IsWrite, AccessSizeIndex, I); |
| 486 | } |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 487 | } else { |
| 488 | IRB.CreateCall(HwasanMemoryAccessCallbackSized[IsWrite], |
| 489 | {AddrLong, ConstantInt::get(IntptrTy, TypeSize / 8)}); |
| 490 | } |
Alex Shlyapnikov | 83e7841 | 2018-03-23 17:57:54 +0000 | [diff] [blame] | 491 | untagPointerOperand(I, Addr); |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 492 | |
| 493 | return true; |
| 494 | } |
| 495 | |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 496 | static uint64_t getAllocaSizeInBytes(const AllocaInst &AI) { |
| 497 | uint64_t ArraySize = 1; |
| 498 | if (AI.isArrayAllocation()) { |
| 499 | const ConstantInt *CI = dyn_cast<ConstantInt>(AI.getArraySize()); |
| 500 | assert(CI && "non-constant array size"); |
| 501 | ArraySize = CI->getZExtValue(); |
| 502 | } |
| 503 | Type *Ty = AI.getAllocatedType(); |
| 504 | uint64_t SizeInBytes = AI.getModule()->getDataLayout().getTypeAllocSize(Ty); |
| 505 | return SizeInBytes * ArraySize; |
| 506 | } |
| 507 | |
| 508 | bool HWAddressSanitizer::tagAlloca(IRBuilder<> &IRB, AllocaInst *AI, |
| 509 | Value *Tag) { |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 510 | size_t Size = (getAllocaSizeInBytes(*AI) + Mapping.getAllocaAlignment() - 1) & |
| 511 | ~(Mapping.getAllocaAlignment() - 1); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 512 | |
| 513 | Value *JustTag = IRB.CreateTrunc(Tag, IRB.getInt8Ty()); |
| 514 | if (ClInstrumentWithCalls) { |
| 515 | IRB.CreateCall(HwasanTagMemoryFunc, |
| 516 | {IRB.CreatePointerCast(AI, IntptrTy), JustTag, |
| 517 | ConstantInt::get(IntptrTy, Size)}); |
| 518 | } else { |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 519 | size_t ShadowSize = Size >> Mapping.Scale; |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 520 | Value *ShadowPtr = IRB.CreateIntToPtr( |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 521 | memToShadow(IRB.CreatePointerCast(AI, IntptrTy), AI->getType(), IRB), |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 522 | IRB.getInt8PtrTy()); |
| 523 | // If this memset is not inlined, it will be intercepted in the hwasan |
| 524 | // runtime library. That's OK, because the interceptor skips the checks if |
| 525 | // the address is in the shadow region. |
| 526 | // FIXME: the interceptor is not as fast as real memset. Consider lowering |
| 527 | // llvm.memset right here into either a sequence of stores, or a call to |
| 528 | // hwasan_tag_memory. |
| 529 | IRB.CreateMemSet(ShadowPtr, JustTag, ShadowSize, /*Align=*/1); |
| 530 | } |
| 531 | return true; |
| 532 | } |
| 533 | |
| 534 | static unsigned RetagMask(unsigned AllocaNo) { |
| 535 | // A list of 8-bit numbers that have at most one run of non-zero bits. |
| 536 | // x = x ^ (mask << 56) can be encoded as a single armv8 instruction for these |
| 537 | // masks. |
| 538 | // The list does not include the value 255, which is used for UAR. |
| 539 | static unsigned FastMasks[] = { |
| 540 | 0, 1, 2, 3, 4, 6, 7, 8, 12, 14, 15, 16, 24, |
| 541 | 28, 30, 31, 32, 48, 56, 60, 62, 63, 64, 96, 112, 120, |
| 542 | 124, 126, 127, 128, 192, 224, 240, 248, 252, 254}; |
| 543 | return FastMasks[AllocaNo % (sizeof(FastMasks) / sizeof(FastMasks[0]))]; |
| 544 | } |
| 545 | |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 546 | Value *HWAddressSanitizer::getNextTagWithCall(IRBuilder<> &IRB) { |
| 547 | return IRB.CreateZExt(IRB.CreateCall(HwasanGenerateTagFunc), IntptrTy); |
| 548 | } |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 549 | |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 550 | Value *HWAddressSanitizer::getStackBaseTag(IRBuilder<> &IRB) { |
| 551 | if (ClGenerateTagsWithCalls) |
| 552 | return nullptr; |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 553 | // FIXME: use addressofreturnaddress (but implement it in aarch64 backend |
| 554 | // first). |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 555 | Module *M = IRB.GetInsertBlock()->getParent()->getParent(); |
| 556 | auto GetStackPointerFn = |
| 557 | Intrinsic::getDeclaration(M, Intrinsic::frameaddress); |
| 558 | Value *StackPointer = IRB.CreateCall( |
| 559 | GetStackPointerFn, {Constant::getNullValue(IRB.getInt32Ty())}); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 560 | |
| 561 | // Extract some entropy from the stack pointer for the tags. |
| 562 | // Take bits 20..28 (ASLR entropy) and xor with bits 0..8 (these differ |
| 563 | // between functions). |
| 564 | Value *StackPointerLong = IRB.CreatePointerCast(StackPointer, IntptrTy); |
| 565 | Value *StackTag = |
| 566 | IRB.CreateXor(StackPointerLong, IRB.CreateLShr(StackPointerLong, 20), |
| 567 | "hwasan.stack.base.tag"); |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 568 | return StackTag; |
| 569 | } |
| 570 | |
| 571 | Value *HWAddressSanitizer::getAllocaTag(IRBuilder<> &IRB, Value *StackTag, |
| 572 | AllocaInst *AI, unsigned AllocaNo) { |
| 573 | if (ClGenerateTagsWithCalls) |
| 574 | return getNextTagWithCall(IRB); |
| 575 | return IRB.CreateXor(StackTag, |
| 576 | ConstantInt::get(IntptrTy, RetagMask(AllocaNo))); |
| 577 | } |
| 578 | |
| 579 | Value *HWAddressSanitizer::getUARTag(IRBuilder<> &IRB, Value *StackTag) { |
| 580 | if (ClGenerateTagsWithCalls) |
| 581 | return getNextTagWithCall(IRB); |
| 582 | return IRB.CreateXor(StackTag, ConstantInt::get(IntptrTy, 0xFFU)); |
| 583 | } |
| 584 | |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 585 | // Add a tag to an address. |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 586 | Value *HWAddressSanitizer::tagPointer(IRBuilder<> &IRB, Type *Ty, |
| 587 | Value *PtrLong, Value *Tag) { |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 588 | Value *TaggedPtrLong; |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 589 | if (CompileKernel) { |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 590 | // Kernel addresses have 0xFF in the most significant byte. |
| 591 | Value *ShiftedTag = IRB.CreateOr( |
| 592 | IRB.CreateShl(Tag, kPointerTagShift), |
| 593 | ConstantInt::get(IntptrTy, (1ULL << kPointerTagShift) - 1)); |
| 594 | TaggedPtrLong = IRB.CreateAnd(PtrLong, ShiftedTag); |
| 595 | } else { |
| 596 | // Userspace can simply do OR (tag << 56); |
| 597 | Value *ShiftedTag = IRB.CreateShl(Tag, kPointerTagShift); |
| 598 | TaggedPtrLong = IRB.CreateOr(PtrLong, ShiftedTag); |
| 599 | } |
| 600 | return IRB.CreateIntToPtr(TaggedPtrLong, Ty); |
| 601 | } |
| 602 | |
Evgeniy Stepanov | 43271b1 | 2018-02-21 19:52:23 +0000 | [diff] [blame] | 603 | // Remove tag from an address. |
| 604 | Value *HWAddressSanitizer::untagPointer(IRBuilder<> &IRB, Value *PtrLong) { |
| 605 | Value *UntaggedPtrLong; |
Andrey Konovalov | 1ba9d9c | 2018-04-13 18:05:21 +0000 | [diff] [blame] | 606 | if (CompileKernel) { |
Evgeniy Stepanov | 43271b1 | 2018-02-21 19:52:23 +0000 | [diff] [blame] | 607 | // Kernel addresses have 0xFF in the most significant byte. |
| 608 | UntaggedPtrLong = IRB.CreateOr(PtrLong, |
| 609 | ConstantInt::get(PtrLong->getType(), 0xFFULL << kPointerTagShift)); |
| 610 | } else { |
| 611 | // Userspace addresses have 0x00. |
| 612 | UntaggedPtrLong = IRB.CreateAnd(PtrLong, |
| 613 | ConstantInt::get(PtrLong->getType(), ~(0xFFULL << kPointerTagShift))); |
| 614 | } |
| 615 | return UntaggedPtrLong; |
| 616 | } |
| 617 | |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 618 | bool HWAddressSanitizer::instrumentStack( |
| 619 | SmallVectorImpl<AllocaInst *> &Allocas, |
| 620 | SmallVectorImpl<Instruction *> &RetVec) { |
| 621 | Function *F = Allocas[0]->getParent()->getParent(); |
| 622 | Instruction *InsertPt = &*F->getEntryBlock().begin(); |
| 623 | IRBuilder<> IRB(InsertPt); |
| 624 | |
| 625 | Value *StackTag = getStackBaseTag(IRB); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 626 | |
| 627 | // Ideally, we want to calculate tagged stack base pointer, and rewrite all |
| 628 | // alloca addresses using that. Unfortunately, offsets are not known yet |
| 629 | // (unless we use ASan-style mega-alloca). Instead we keep the base tag in a |
| 630 | // temp, shift-OR it into each alloca address and xor with the retag mask. |
| 631 | // This generates one extra instruction per alloca use. |
| 632 | for (unsigned N = 0; N < Allocas.size(); ++N) { |
| 633 | auto *AI = Allocas[N]; |
| 634 | IRB.SetInsertPoint(AI->getNextNode()); |
| 635 | |
| 636 | // Replace uses of the alloca with tagged address. |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 637 | Value *Tag = getAllocaTag(IRB, StackTag, AI, N); |
| 638 | Value *AILong = IRB.CreatePointerCast(AI, IntptrTy); |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 639 | Value *Replacement = tagPointer(IRB, AI->getType(), AILong, Tag); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 640 | std::string Name = |
| 641 | AI->hasName() ? AI->getName().str() : "alloca." + itostr(N); |
Evgeniy Stepanov | 80ccda2 | 2018-02-09 00:59:10 +0000 | [diff] [blame] | 642 | Replacement->setName(Name + ".hwasan"); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 643 | |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 644 | for (auto UI = AI->use_begin(), UE = AI->use_end(); UI != UE;) { |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 645 | Use &U = *UI++; |
| 646 | if (U.getUser() != AILong) |
| 647 | U.set(Replacement); |
| 648 | } |
| 649 | |
| 650 | tagAlloca(IRB, AI, Tag); |
| 651 | |
| 652 | for (auto RI : RetVec) { |
| 653 | IRB.SetInsertPoint(RI); |
| 654 | |
| 655 | // Re-tag alloca memory with the special UAR tag. |
Evgeniy Stepanov | 080e0d4 | 2018-01-13 01:32:15 +0000 | [diff] [blame] | 656 | Value *Tag = getUARTag(IRB, StackTag); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 657 | tagAlloca(IRB, AI, Tag); |
| 658 | } |
| 659 | } |
| 660 | |
| 661 | return true; |
| 662 | } |
| 663 | |
| 664 | bool HWAddressSanitizer::isInterestingAlloca(const AllocaInst &AI) { |
| 665 | return (AI.getAllocatedType()->isSized() && |
| 666 | // FIXME: instrument dynamic allocas, too |
| 667 | AI.isStaticAlloca() && |
| 668 | // alloca() may be called with 0 size, ignore it. |
| 669 | getAllocaSizeInBytes(AI) > 0 && |
| 670 | // We are only interested in allocas not promotable to registers. |
| 671 | // Promotable allocas are common under -O0. |
| 672 | !isAllocaPromotable(&AI) && |
| 673 | // inalloca allocas are not treated as static, and we don't want |
| 674 | // dynamic alloca instrumentation for them as well. |
| 675 | !AI.isUsedWithInAlloca() && |
| 676 | // swifterror allocas are register promoted by ISel |
| 677 | !AI.isSwiftError()); |
| 678 | } |
| 679 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 680 | bool HWAddressSanitizer::runOnFunction(Function &F) { |
| 681 | if (&F == HwasanCtorFunction) |
| 682 | return false; |
| 683 | |
| 684 | if (!F.hasFnAttribute(Attribute::SanitizeHWAddress)) |
| 685 | return false; |
| 686 | |
| 687 | DEBUG(dbgs() << "Function: " << F.getName() << "\n"); |
| 688 | |
| 689 | initializeCallbacks(*F.getParent()); |
| 690 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 691 | assert(!LocalDynamicShadow); |
| 692 | maybeInsertDynamicShadowAtFunctionEntry(F); |
| 693 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 694 | bool Changed = false; |
| 695 | SmallVector<Instruction*, 16> ToInstrument; |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 696 | SmallVector<AllocaInst*, 8> AllocasToInstrument; |
| 697 | SmallVector<Instruction*, 8> RetVec; |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 698 | for (auto &BB : F) { |
| 699 | for (auto &Inst : BB) { |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 700 | if (ClInstrumentStack) |
| 701 | if (AllocaInst *AI = dyn_cast<AllocaInst>(&Inst)) { |
| 702 | // Realign all allocas. We don't want small uninteresting allocas to |
| 703 | // hide in instrumented alloca's padding. |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 704 | if (AI->getAlignment() < Mapping.getAllocaAlignment()) |
| 705 | AI->setAlignment(Mapping.getAllocaAlignment()); |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 706 | // Instrument some of them. |
| 707 | if (isInterestingAlloca(*AI)) |
| 708 | AllocasToInstrument.push_back(AI); |
| 709 | continue; |
| 710 | } |
| 711 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 712 | if (isa<ReturnInst>(Inst) || isa<ResumeInst>(Inst) || |
| 713 | isa<CleanupReturnInst>(Inst)) |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 714 | RetVec.push_back(&Inst); |
| 715 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 716 | Value *MaybeMask = nullptr; |
| 717 | bool IsWrite; |
| 718 | unsigned Alignment; |
| 719 | uint64_t TypeSize; |
| 720 | Value *Addr = isInterestingMemoryAccess(&Inst, &IsWrite, &TypeSize, |
| 721 | &Alignment, &MaybeMask); |
| 722 | if (Addr || isa<MemIntrinsic>(Inst)) |
| 723 | ToInstrument.push_back(&Inst); |
| 724 | } |
| 725 | } |
| 726 | |
Evgeniy Stepanov | 99fa3e7 | 2018-01-11 22:53:30 +0000 | [diff] [blame] | 727 | if (!AllocasToInstrument.empty()) |
| 728 | Changed |= instrumentStack(AllocasToInstrument, RetVec); |
| 729 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 730 | for (auto Inst : ToInstrument) |
| 731 | Changed |= instrumentMemAccess(Inst); |
| 732 | |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 733 | LocalDynamicShadow = nullptr; |
| 734 | |
Evgeniy Stepanov | c667c1f | 2017-12-09 00:21:41 +0000 | [diff] [blame] | 735 | return Changed; |
| 736 | } |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 737 | |
| 738 | void HWAddressSanitizer::ShadowMapping::init(Triple &TargetTriple) { |
| 739 | const bool IsAndroid = TargetTriple.isAndroid(); |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 740 | const bool IsAndroidWithIfuncSupport = |
| 741 | IsAndroid && !TargetTriple.isAndroidVersionLT(21); |
| 742 | |
| 743 | Scale = kDefaultShadowScale; |
| 744 | |
Alex Shlyapnikov | 909fb12 | 2018-04-24 00:16:54 +0000 | [diff] [blame] | 745 | if (ClEnableKhwasan || ClInstrumentWithCalls || !IsAndroidWithIfuncSupport) |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 746 | Offset = 0; |
| 747 | else |
| 748 | Offset = kDynamicShadowSentinel; |
| 749 | if (ClMappingOffset.getNumOccurrences() > 0) |
| 750 | Offset = ClMappingOffset; |
| 751 | |
Alex Shlyapnikov | 909fb12 | 2018-04-24 00:16:54 +0000 | [diff] [blame] | 752 | InGlobal = IsAndroidWithIfuncSupport; |
Alex Shlyapnikov | 99cf54b | 2018-04-20 20:04:04 +0000 | [diff] [blame] | 753 | } |