Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 1 | //===-- ThreadSanitizer.cpp - race detector -------------------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file is a part of ThreadSanitizer, a race detector. |
| 11 | // |
| 12 | // The tool is under development, for the details about previous versions see |
| 13 | // http://code.google.com/p/data-race-test |
| 14 | // |
| 15 | // The instrumentation phase is quite simple: |
| 16 | // - Insert calls to run-time library before every memory access. |
| 17 | // - Optimizations may apply to avoid instrumenting some of the accesses. |
| 18 | // - Insert calls at function entry/exit. |
| 19 | // The rest is handled by the run-time library. |
| 20 | //===----------------------------------------------------------------------===// |
| 21 | |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 22 | #include "llvm/Transforms/Instrumentation.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 23 | #include "llvm/ADT/SmallSet.h" |
| 24 | #include "llvm/ADT/SmallString.h" |
| 25 | #include "llvm/ADT/SmallVector.h" |
| 26 | #include "llvm/ADT/Statistic.h" |
| 27 | #include "llvm/ADT/StringExtras.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 28 | #include "llvm/IR/DataLayout.h" |
| 29 | #include "llvm/IR/Function.h" |
| 30 | #include "llvm/IR/IRBuilder.h" |
Kostya Serebryany | 463aa81 | 2013-03-28 11:21:13 +0000 | [diff] [blame] | 31 | #include "llvm/IR/IntrinsicInst.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 32 | #include "llvm/IR/Intrinsics.h" |
| 33 | #include "llvm/IR/LLVMContext.h" |
| 34 | #include "llvm/IR/Metadata.h" |
| 35 | #include "llvm/IR/Module.h" |
| 36 | #include "llvm/IR/Type.h" |
Kostya Serebryany | abad002 | 2012-03-14 23:33:24 +0000 | [diff] [blame] | 37 | #include "llvm/Support/CommandLine.h" |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 38 | #include "llvm/Support/Debug.h" |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 39 | #include "llvm/Support/MathExtras.h" |
Kostya Serebryany | 6f8a776 | 2012-03-26 17:35:03 +0000 | [diff] [blame] | 40 | #include "llvm/Support/raw_ostream.h" |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 41 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 42 | #include "llvm/Transforms/Utils/ModuleUtils.h" |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 43 | |
| 44 | using namespace llvm; |
| 45 | |
Chandler Carruth | 964daaa | 2014-04-22 02:55:47 +0000 | [diff] [blame] | 46 | #define DEBUG_TYPE "tsan" |
| 47 | |
Kostya Serebryany | d23b18f | 2012-10-04 05:28:50 +0000 | [diff] [blame] | 48 | static cl::opt<bool> ClInstrumentMemoryAccesses( |
| 49 | "tsan-instrument-memory-accesses", cl::init(true), |
| 50 | cl::desc("Instrument memory accesses"), cl::Hidden); |
| 51 | static cl::opt<bool> ClInstrumentFuncEntryExit( |
| 52 | "tsan-instrument-func-entry-exit", cl::init(true), |
| 53 | cl::desc("Instrument function entry and exit"), cl::Hidden); |
| 54 | static cl::opt<bool> ClInstrumentAtomics( |
| 55 | "tsan-instrument-atomics", cl::init(true), |
| 56 | cl::desc("Instrument atomics"), cl::Hidden); |
Kostya Serebryany | 463aa81 | 2013-03-28 11:21:13 +0000 | [diff] [blame] | 57 | static cl::opt<bool> ClInstrumentMemIntrinsics( |
| 58 | "tsan-instrument-memintrinsics", cl::init(true), |
| 59 | cl::desc("Instrument memintrinsics (memset/memcpy/memmove)"), cl::Hidden); |
Kostya Serebryany | abad002 | 2012-03-14 23:33:24 +0000 | [diff] [blame] | 60 | |
Kostya Serebryany | 5a4b7a2 | 2012-04-23 08:44:59 +0000 | [diff] [blame] | 61 | STATISTIC(NumInstrumentedReads, "Number of instrumented reads"); |
| 62 | STATISTIC(NumInstrumentedWrites, "Number of instrumented writes"); |
Alexey Samsonov | f54e3aa | 2012-08-30 13:47:13 +0000 | [diff] [blame] | 63 | STATISTIC(NumOmittedReadsBeforeWrite, |
Kostya Serebryany | 5a4b7a2 | 2012-04-23 08:44:59 +0000 | [diff] [blame] | 64 | "Number of reads ignored due to following writes"); |
| 65 | STATISTIC(NumAccessesWithBadSize, "Number of accesses with bad size"); |
| 66 | STATISTIC(NumInstrumentedVtableWrites, "Number of vtable ptr writes"); |
Dmitry Vyukov | 55e63ef | 2013-03-22 08:51:22 +0000 | [diff] [blame] | 67 | STATISTIC(NumInstrumentedVtableReads, "Number of vtable ptr reads"); |
Kostya Serebryany | 5a4b7a2 | 2012-04-23 08:44:59 +0000 | [diff] [blame] | 68 | STATISTIC(NumOmittedReadsFromConstantGlobals, |
| 69 | "Number of reads from constant globals"); |
| 70 | STATISTIC(NumOmittedReadsFromVtable, "Number of vtable reads"); |
Kostya Serebryany | bf2de80 | 2012-04-10 18:18:56 +0000 | [diff] [blame] | 71 | |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 72 | namespace { |
Kostya Serebryany | bf2de80 | 2012-04-10 18:18:56 +0000 | [diff] [blame] | 73 | |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 74 | /// ThreadSanitizer: instrument the code in module to find races. |
| 75 | struct ThreadSanitizer : public FunctionPass { |
Alexey Samsonov | 6d8bab8 | 2014-06-02 18:08:27 +0000 | [diff] [blame] | 76 | ThreadSanitizer() : FunctionPass(ID), DL(nullptr) {} |
Craig Topper | 3e4c697 | 2014-03-05 09:10:37 +0000 | [diff] [blame] | 77 | const char *getPassName() const override; |
| 78 | bool runOnFunction(Function &F) override; |
| 79 | bool doInitialization(Module &M) override; |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 80 | static char ID; // Pass identification, replacement for typeid. |
| 81 | |
| 82 | private: |
Kostya Serebryany | 4b929da | 2012-11-29 09:54:21 +0000 | [diff] [blame] | 83 | void initializeCallbacks(Module &M); |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 84 | bool instrumentLoadOrStore(Instruction *I); |
| 85 | bool instrumentAtomic(Instruction *I); |
Kostya Serebryany | 463aa81 | 2013-03-28 11:21:13 +0000 | [diff] [blame] | 86 | bool instrumentMemIntrinsic(Instruction *I); |
Kostya Serebryany | ae7188d | 2012-05-02 13:12:19 +0000 | [diff] [blame] | 87 | void chooseInstructionsToInstrument(SmallVectorImpl<Instruction*> &Local, |
| 88 | SmallVectorImpl<Instruction*> &All); |
Kostya Serebryany | 5ba61ac | 2012-04-10 22:29:17 +0000 | [diff] [blame] | 89 | bool addrPointsToConstantData(Value *Addr); |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 90 | int getMemoryAccessFuncIndex(Value *Addr); |
Kostya Serebryany | bf2de80 | 2012-04-10 18:18:56 +0000 | [diff] [blame] | 91 | |
Rafael Espindola | aeff8a9 | 2014-02-24 23:12:18 +0000 | [diff] [blame] | 92 | const DataLayout *DL; |
Kostya Serebryany | 463aa81 | 2013-03-28 11:21:13 +0000 | [diff] [blame] | 93 | Type *IntptrTy; |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 94 | IntegerType *OrdTy; |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 95 | // Callbacks to run-time library are computed in doInitialization. |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 96 | Function *TsanFuncEntry; |
| 97 | Function *TsanFuncExit; |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 98 | // Accesses sizes are powers of two: 1, 2, 4, 8, 16. |
Kostya Serebryany | a8531ee | 2012-02-14 00:52:07 +0000 | [diff] [blame] | 99 | static const size_t kNumberOfAccessSizes = 5; |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 100 | Function *TsanRead[kNumberOfAccessSizes]; |
| 101 | Function *TsanWrite[kNumberOfAccessSizes]; |
| 102 | Function *TsanAtomicLoad[kNumberOfAccessSizes]; |
| 103 | Function *TsanAtomicStore[kNumberOfAccessSizes]; |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 104 | Function *TsanAtomicRMW[AtomicRMWInst::LAST_BINOP + 1][kNumberOfAccessSizes]; |
| 105 | Function *TsanAtomicCAS[kNumberOfAccessSizes]; |
| 106 | Function *TsanAtomicThreadFence; |
| 107 | Function *TsanAtomicSignalFence; |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 108 | Function *TsanVptrUpdate; |
Dmitry Vyukov | 55e63ef | 2013-03-22 08:51:22 +0000 | [diff] [blame] | 109 | Function *TsanVptrLoad; |
Kostya Serebryany | 463aa81 | 2013-03-28 11:21:13 +0000 | [diff] [blame] | 110 | Function *MemmoveFn, *MemcpyFn, *MemsetFn; |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 111 | }; |
| 112 | } // namespace |
| 113 | |
| 114 | char ThreadSanitizer::ID = 0; |
| 115 | INITIALIZE_PASS(ThreadSanitizer, "tsan", |
| 116 | "ThreadSanitizer: detects data races.", |
| 117 | false, false) |
| 118 | |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 119 | const char *ThreadSanitizer::getPassName() const { |
| 120 | return "ThreadSanitizer"; |
| 121 | } |
| 122 | |
Alexey Samsonov | 6d8bab8 | 2014-06-02 18:08:27 +0000 | [diff] [blame] | 123 | FunctionPass *llvm::createThreadSanitizerPass() { |
| 124 | return new ThreadSanitizer(); |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 125 | } |
| 126 | |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 127 | static Function *checkInterfaceFunction(Constant *FuncOrBitcast) { |
| 128 | if (Function *F = dyn_cast<Function>(FuncOrBitcast)) |
| 129 | return F; |
| 130 | FuncOrBitcast->dump(); |
| 131 | report_fatal_error("ThreadSanitizer interface function redefined"); |
| 132 | } |
| 133 | |
Kostya Serebryany | 4b929da | 2012-11-29 09:54:21 +0000 | [diff] [blame] | 134 | void ThreadSanitizer::initializeCallbacks(Module &M) { |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 135 | IRBuilder<> IRB(M.getContext()); |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 136 | // Initialize the callbacks. |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 137 | TsanFuncEntry = checkInterfaceFunction(M.getOrInsertFunction( |
| 138 | "__tsan_func_entry", IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL)); |
| 139 | TsanFuncExit = checkInterfaceFunction(M.getOrInsertFunction( |
| 140 | "__tsan_func_exit", IRB.getVoidTy(), NULL)); |
| 141 | OrdTy = IRB.getInt32Ty(); |
Kostya Serebryany | a8531ee | 2012-02-14 00:52:07 +0000 | [diff] [blame] | 142 | for (size_t i = 0; i < kNumberOfAccessSizes; ++i) { |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 143 | const size_t ByteSize = 1 << i; |
| 144 | const size_t BitSize = ByteSize * 8; |
| 145 | SmallString<32> ReadName("__tsan_read" + itostr(ByteSize)); |
| 146 | TsanRead[i] = checkInterfaceFunction(M.getOrInsertFunction( |
| 147 | ReadName, IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL)); |
| 148 | |
| 149 | SmallString<32> WriteName("__tsan_write" + itostr(ByteSize)); |
| 150 | TsanWrite[i] = checkInterfaceFunction(M.getOrInsertFunction( |
| 151 | WriteName, IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL)); |
| 152 | |
| 153 | Type *Ty = Type::getIntNTy(M.getContext(), BitSize); |
| 154 | Type *PtrTy = Ty->getPointerTo(); |
| 155 | SmallString<32> AtomicLoadName("__tsan_atomic" + itostr(BitSize) + |
| 156 | "_load"); |
| 157 | TsanAtomicLoad[i] = checkInterfaceFunction(M.getOrInsertFunction( |
| 158 | AtomicLoadName, Ty, PtrTy, OrdTy, NULL)); |
| 159 | |
| 160 | SmallString<32> AtomicStoreName("__tsan_atomic" + itostr(BitSize) + |
| 161 | "_store"); |
| 162 | TsanAtomicStore[i] = checkInterfaceFunction(M.getOrInsertFunction( |
| 163 | AtomicStoreName, IRB.getVoidTy(), PtrTy, Ty, OrdTy, |
| 164 | NULL)); |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 165 | |
| 166 | for (int op = AtomicRMWInst::FIRST_BINOP; |
| 167 | op <= AtomicRMWInst::LAST_BINOP; ++op) { |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 168 | TsanAtomicRMW[op][i] = nullptr; |
| 169 | const char *NamePart = nullptr; |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 170 | if (op == AtomicRMWInst::Xchg) |
| 171 | NamePart = "_exchange"; |
| 172 | else if (op == AtomicRMWInst::Add) |
| 173 | NamePart = "_fetch_add"; |
| 174 | else if (op == AtomicRMWInst::Sub) |
| 175 | NamePart = "_fetch_sub"; |
| 176 | else if (op == AtomicRMWInst::And) |
| 177 | NamePart = "_fetch_and"; |
| 178 | else if (op == AtomicRMWInst::Or) |
| 179 | NamePart = "_fetch_or"; |
| 180 | else if (op == AtomicRMWInst::Xor) |
| 181 | NamePart = "_fetch_xor"; |
Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 182 | else if (op == AtomicRMWInst::Nand) |
| 183 | NamePart = "_fetch_nand"; |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 184 | else |
| 185 | continue; |
| 186 | SmallString<32> RMWName("__tsan_atomic" + itostr(BitSize) + NamePart); |
| 187 | TsanAtomicRMW[op][i] = checkInterfaceFunction(M.getOrInsertFunction( |
| 188 | RMWName, Ty, PtrTy, Ty, OrdTy, NULL)); |
| 189 | } |
| 190 | |
| 191 | SmallString<32> AtomicCASName("__tsan_atomic" + itostr(BitSize) + |
| 192 | "_compare_exchange_val"); |
| 193 | TsanAtomicCAS[i] = checkInterfaceFunction(M.getOrInsertFunction( |
Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 194 | AtomicCASName, Ty, PtrTy, Ty, Ty, OrdTy, OrdTy, NULL)); |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 195 | } |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 196 | TsanVptrUpdate = checkInterfaceFunction(M.getOrInsertFunction( |
| 197 | "__tsan_vptr_update", IRB.getVoidTy(), IRB.getInt8PtrTy(), |
| 198 | IRB.getInt8PtrTy(), NULL)); |
Dmitry Vyukov | 55e63ef | 2013-03-22 08:51:22 +0000 | [diff] [blame] | 199 | TsanVptrLoad = checkInterfaceFunction(M.getOrInsertFunction( |
| 200 | "__tsan_vptr_read", IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL)); |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 201 | TsanAtomicThreadFence = checkInterfaceFunction(M.getOrInsertFunction( |
| 202 | "__tsan_atomic_thread_fence", IRB.getVoidTy(), OrdTy, NULL)); |
| 203 | TsanAtomicSignalFence = checkInterfaceFunction(M.getOrInsertFunction( |
| 204 | "__tsan_atomic_signal_fence", IRB.getVoidTy(), OrdTy, NULL)); |
Kostya Serebryany | 463aa81 | 2013-03-28 11:21:13 +0000 | [diff] [blame] | 205 | |
| 206 | MemmoveFn = checkInterfaceFunction(M.getOrInsertFunction( |
| 207 | "memmove", IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), |
| 208 | IRB.getInt8PtrTy(), IntptrTy, NULL)); |
| 209 | MemcpyFn = checkInterfaceFunction(M.getOrInsertFunction( |
| 210 | "memcpy", IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), |
| 211 | IntptrTy, NULL)); |
| 212 | MemsetFn = checkInterfaceFunction(M.getOrInsertFunction( |
| 213 | "memset", IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt32Ty(), |
| 214 | IntptrTy, NULL)); |
Kostya Serebryany | 4b929da | 2012-11-29 09:54:21 +0000 | [diff] [blame] | 215 | } |
| 216 | |
| 217 | bool ThreadSanitizer::doInitialization(Module &M) { |
Rafael Espindola | 9351251 | 2014-02-25 17:30:31 +0000 | [diff] [blame] | 218 | DataLayoutPass *DLP = getAnalysisIfAvailable<DataLayoutPass>(); |
| 219 | if (!DLP) |
Evgeniy Stepanov | 119cb2e | 2014-04-23 12:51:32 +0000 | [diff] [blame] | 220 | report_fatal_error("data layout missing"); |
Rafael Espindola | 9351251 | 2014-02-25 17:30:31 +0000 | [diff] [blame] | 221 | DL = &DLP->getDataLayout(); |
Kostya Serebryany | 4b929da | 2012-11-29 09:54:21 +0000 | [diff] [blame] | 222 | |
| 223 | // Always insert a call to __tsan_init into the module's CTORs. |
| 224 | IRBuilder<> IRB(M.getContext()); |
Rafael Espindola | 37dc9e1 | 2014-02-21 00:06:31 +0000 | [diff] [blame] | 225 | IntptrTy = IRB.getIntPtrTy(DL); |
Kostya Serebryany | 4b929da | 2012-11-29 09:54:21 +0000 | [diff] [blame] | 226 | Value *TsanInit = M.getOrInsertFunction("__tsan_init", |
| 227 | IRB.getVoidTy(), NULL); |
| 228 | appendToGlobalCtors(M, cast<Function>(TsanInit), 0); |
| 229 | |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 230 | return true; |
| 231 | } |
| 232 | |
Kostya Serebryany | 5ba61ac | 2012-04-10 22:29:17 +0000 | [diff] [blame] | 233 | static bool isVtableAccess(Instruction *I) { |
Manman Ren | d8c68b1 | 2013-09-06 22:47:05 +0000 | [diff] [blame] | 234 | if (MDNode *Tag = I->getMetadata(LLVMContext::MD_tbaa)) |
| 235 | return Tag->isTBAAVtableAccess(); |
Kostya Serebryany | 5ba61ac | 2012-04-10 22:29:17 +0000 | [diff] [blame] | 236 | return false; |
| 237 | } |
| 238 | |
| 239 | bool ThreadSanitizer::addrPointsToConstantData(Value *Addr) { |
| 240 | // If this is a GEP, just analyze its pointer operand. |
| 241 | if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Addr)) |
| 242 | Addr = GEP->getPointerOperand(); |
| 243 | |
| 244 | if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Addr)) { |
| 245 | if (GV->isConstant()) { |
| 246 | // Reads from constant globals can not race with any writes. |
Kostya Serebryany | 5a4b7a2 | 2012-04-23 08:44:59 +0000 | [diff] [blame] | 247 | NumOmittedReadsFromConstantGlobals++; |
Kostya Serebryany | 5ba61ac | 2012-04-10 22:29:17 +0000 | [diff] [blame] | 248 | return true; |
| 249 | } |
Alexey Samsonov | f54e3aa | 2012-08-30 13:47:13 +0000 | [diff] [blame] | 250 | } else if (LoadInst *L = dyn_cast<LoadInst>(Addr)) { |
Kostya Serebryany | 5ba61ac | 2012-04-10 22:29:17 +0000 | [diff] [blame] | 251 | if (isVtableAccess(L)) { |
| 252 | // Reads from a vtable pointer can not race with any writes. |
Kostya Serebryany | 5a4b7a2 | 2012-04-23 08:44:59 +0000 | [diff] [blame] | 253 | NumOmittedReadsFromVtable++; |
Kostya Serebryany | 5ba61ac | 2012-04-10 22:29:17 +0000 | [diff] [blame] | 254 | return true; |
| 255 | } |
| 256 | } |
| 257 | return false; |
| 258 | } |
| 259 | |
Kostya Serebryany | bf2de80 | 2012-04-10 18:18:56 +0000 | [diff] [blame] | 260 | // Instrumenting some of the accesses may be proven redundant. |
| 261 | // Currently handled: |
| 262 | // - read-before-write (within same BB, no calls between) |
| 263 | // |
| 264 | // We do not handle some of the patterns that should not survive |
| 265 | // after the classic compiler optimizations. |
| 266 | // E.g. two reads from the same temp should be eliminated by CSE, |
| 267 | // two writes should be eliminated by DSE, etc. |
| 268 | // |
| 269 | // 'Local' is a vector of insns within the same BB (no calls between). |
| 270 | // 'All' is a vector of insns that will be instrumented. |
Kostya Serebryany | ae7188d | 2012-05-02 13:12:19 +0000 | [diff] [blame] | 271 | void ThreadSanitizer::chooseInstructionsToInstrument( |
Kostya Serebryany | bf2de80 | 2012-04-10 18:18:56 +0000 | [diff] [blame] | 272 | SmallVectorImpl<Instruction*> &Local, |
| 273 | SmallVectorImpl<Instruction*> &All) { |
| 274 | SmallSet<Value*, 8> WriteTargets; |
| 275 | // Iterate from the end. |
| 276 | for (SmallVectorImpl<Instruction*>::reverse_iterator It = Local.rbegin(), |
| 277 | E = Local.rend(); It != E; ++It) { |
| 278 | Instruction *I = *It; |
| 279 | if (StoreInst *Store = dyn_cast<StoreInst>(I)) { |
| 280 | WriteTargets.insert(Store->getPointerOperand()); |
| 281 | } else { |
| 282 | LoadInst *Load = cast<LoadInst>(I); |
Kostya Serebryany | 5ba61ac | 2012-04-10 22:29:17 +0000 | [diff] [blame] | 283 | Value *Addr = Load->getPointerOperand(); |
| 284 | if (WriteTargets.count(Addr)) { |
Kostya Serebryany | bf2de80 | 2012-04-10 18:18:56 +0000 | [diff] [blame] | 285 | // We will write to this temp, so no reason to analyze the read. |
Kostya Serebryany | 5a4b7a2 | 2012-04-23 08:44:59 +0000 | [diff] [blame] | 286 | NumOmittedReadsBeforeWrite++; |
Kostya Serebryany | bf2de80 | 2012-04-10 18:18:56 +0000 | [diff] [blame] | 287 | continue; |
| 288 | } |
Kostya Serebryany | 5ba61ac | 2012-04-10 22:29:17 +0000 | [diff] [blame] | 289 | if (addrPointsToConstantData(Addr)) { |
| 290 | // Addr points to some constant data -- it can not race with any writes. |
| 291 | continue; |
| 292 | } |
Kostya Serebryany | bf2de80 | 2012-04-10 18:18:56 +0000 | [diff] [blame] | 293 | } |
| 294 | All.push_back(I); |
| 295 | } |
| 296 | Local.clear(); |
| 297 | } |
| 298 | |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 299 | static bool isAtomic(Instruction *I) { |
| 300 | if (LoadInst *LI = dyn_cast<LoadInst>(I)) |
| 301 | return LI->isAtomic() && LI->getSynchScope() == CrossThread; |
Kostya Serebryany | ae7188d | 2012-05-02 13:12:19 +0000 | [diff] [blame] | 302 | if (StoreInst *SI = dyn_cast<StoreInst>(I)) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 303 | return SI->isAtomic() && SI->getSynchScope() == CrossThread; |
Kostya Serebryany | ae7188d | 2012-05-02 13:12:19 +0000 | [diff] [blame] | 304 | if (isa<AtomicRMWInst>(I)) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 305 | return true; |
Kostya Serebryany | ae7188d | 2012-05-02 13:12:19 +0000 | [diff] [blame] | 306 | if (isa<AtomicCmpXchgInst>(I)) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 307 | return true; |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 308 | if (isa<FenceInst>(I)) |
| 309 | return true; |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 310 | return false; |
| 311 | } |
| 312 | |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 313 | bool ThreadSanitizer::runOnFunction(Function &F) { |
Rafael Espindola | 37dc9e1 | 2014-02-21 00:06:31 +0000 | [diff] [blame] | 314 | if (!DL) return false; |
Kostya Serebryany | 4b929da | 2012-11-29 09:54:21 +0000 | [diff] [blame] | 315 | initializeCallbacks(*F.getParent()); |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 316 | SmallVector<Instruction*, 8> RetVec; |
Kostya Serebryany | bf2de80 | 2012-04-10 18:18:56 +0000 | [diff] [blame] | 317 | SmallVector<Instruction*, 8> AllLoadsAndStores; |
| 318 | SmallVector<Instruction*, 8> LocalLoadsAndStores; |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 319 | SmallVector<Instruction*, 8> AtomicAccesses; |
Kostya Serebryany | 463aa81 | 2013-03-28 11:21:13 +0000 | [diff] [blame] | 320 | SmallVector<Instruction*, 8> MemIntrinCalls; |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 321 | bool Res = false; |
| 322 | bool HasCalls = false; |
Alexey Samsonov | 6d8bab8 | 2014-06-02 18:08:27 +0000 | [diff] [blame] | 323 | bool SanitizeFunction = F.hasFnAttribute(Attribute::SanitizeThread); |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 324 | |
| 325 | // Traverse all instructions, collect loads/stores/returns, check for calls. |
Alexey Samsonov | a02e664 | 2014-05-29 18:40:48 +0000 | [diff] [blame] | 326 | for (auto &BB : F) { |
| 327 | for (auto &Inst : BB) { |
| 328 | if (isAtomic(&Inst)) |
| 329 | AtomicAccesses.push_back(&Inst); |
| 330 | else if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst)) |
| 331 | LocalLoadsAndStores.push_back(&Inst); |
| 332 | else if (isa<ReturnInst>(Inst)) |
| 333 | RetVec.push_back(&Inst); |
| 334 | else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) { |
| 335 | if (isa<MemIntrinsic>(Inst)) |
| 336 | MemIntrinCalls.push_back(&Inst); |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 337 | HasCalls = true; |
Kostya Serebryany | ae7188d | 2012-05-02 13:12:19 +0000 | [diff] [blame] | 338 | chooseInstructionsToInstrument(LocalLoadsAndStores, AllLoadsAndStores); |
Kostya Serebryany | bf2de80 | 2012-04-10 18:18:56 +0000 | [diff] [blame] | 339 | } |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 340 | } |
Kostya Serebryany | ae7188d | 2012-05-02 13:12:19 +0000 | [diff] [blame] | 341 | chooseInstructionsToInstrument(LocalLoadsAndStores, AllLoadsAndStores); |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 342 | } |
| 343 | |
| 344 | // We have collected all loads and stores. |
| 345 | // FIXME: many of these accesses do not need to be checked for races |
| 346 | // (e.g. variables that do not escape, etc). |
| 347 | |
Alexey Samsonov | d3828b8 | 2014-05-31 00:11:37 +0000 | [diff] [blame] | 348 | // Instrument memory accesses only if we want to report bugs in the function. |
| 349 | if (ClInstrumentMemoryAccesses && SanitizeFunction) |
Alexey Samsonov | a02e664 | 2014-05-29 18:40:48 +0000 | [diff] [blame] | 350 | for (auto Inst : AllLoadsAndStores) { |
| 351 | Res |= instrumentLoadOrStore(Inst); |
Kostya Serebryany | d23b18f | 2012-10-04 05:28:50 +0000 | [diff] [blame] | 352 | } |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 353 | |
Alexey Samsonov | d3828b8 | 2014-05-31 00:11:37 +0000 | [diff] [blame] | 354 | // Instrument atomic memory accesses in any case (they can be used to |
| 355 | // implement synchronization). |
Kostya Serebryany | d23b18f | 2012-10-04 05:28:50 +0000 | [diff] [blame] | 356 | if (ClInstrumentAtomics) |
Alexey Samsonov | a02e664 | 2014-05-29 18:40:48 +0000 | [diff] [blame] | 357 | for (auto Inst : AtomicAccesses) { |
| 358 | Res |= instrumentAtomic(Inst); |
Kostya Serebryany | d23b18f | 2012-10-04 05:28:50 +0000 | [diff] [blame] | 359 | } |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 360 | |
Alexey Samsonov | d3828b8 | 2014-05-31 00:11:37 +0000 | [diff] [blame] | 361 | if (ClInstrumentMemIntrinsics && SanitizeFunction) |
Alexey Samsonov | a02e664 | 2014-05-29 18:40:48 +0000 | [diff] [blame] | 362 | for (auto Inst : MemIntrinCalls) { |
| 363 | Res |= instrumentMemIntrinsic(Inst); |
Kostya Serebryany | 463aa81 | 2013-03-28 11:21:13 +0000 | [diff] [blame] | 364 | } |
| 365 | |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 366 | // Instrument function entry/exit points if there were instrumented accesses. |
Kostya Serebryany | d23b18f | 2012-10-04 05:28:50 +0000 | [diff] [blame] | 367 | if ((Res || HasCalls) && ClInstrumentFuncEntryExit) { |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 368 | IRBuilder<> IRB(F.getEntryBlock().getFirstNonPHI()); |
| 369 | Value *ReturnAddress = IRB.CreateCall( |
| 370 | Intrinsic::getDeclaration(F.getParent(), Intrinsic::returnaddress), |
| 371 | IRB.getInt32(0)); |
| 372 | IRB.CreateCall(TsanFuncEntry, ReturnAddress); |
Alexey Samsonov | a02e664 | 2014-05-29 18:40:48 +0000 | [diff] [blame] | 373 | for (auto RetInst : RetVec) { |
| 374 | IRBuilder<> IRBRet(RetInst); |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 375 | IRBRet.CreateCall(TsanFuncExit); |
| 376 | } |
Kostya Serebryany | 6f8a776 | 2012-03-26 17:35:03 +0000 | [diff] [blame] | 377 | Res = true; |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 378 | } |
| 379 | return Res; |
| 380 | } |
| 381 | |
| 382 | bool ThreadSanitizer::instrumentLoadOrStore(Instruction *I) { |
| 383 | IRBuilder<> IRB(I); |
| 384 | bool IsWrite = isa<StoreInst>(*I); |
| 385 | Value *Addr = IsWrite |
| 386 | ? cast<StoreInst>(I)->getPointerOperand() |
| 387 | : cast<LoadInst>(I)->getPointerOperand(); |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 388 | int Idx = getMemoryAccessFuncIndex(Addr); |
| 389 | if (Idx < 0) |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 390 | return false; |
Kostya Serebryany | 6f8a776 | 2012-03-26 17:35:03 +0000 | [diff] [blame] | 391 | if (IsWrite && isVtableAccess(I)) { |
Kostya Serebryany | e36ae68 | 2012-07-05 09:07:31 +0000 | [diff] [blame] | 392 | DEBUG(dbgs() << " VPTR : " << *I << "\n"); |
Kostya Serebryany | 6f8a776 | 2012-03-26 17:35:03 +0000 | [diff] [blame] | 393 | Value *StoredValue = cast<StoreInst>(I)->getValueOperand(); |
Kostya Serebryany | 08b9cf5 | 2013-12-02 08:07:15 +0000 | [diff] [blame] | 394 | // StoredValue may be a vector type if we are storing several vptrs at once. |
| 395 | // In this case, just take the first element of the vector since this is |
| 396 | // enough to find vptr races. |
| 397 | if (isa<VectorType>(StoredValue->getType())) |
| 398 | StoredValue = IRB.CreateExtractElement( |
| 399 | StoredValue, ConstantInt::get(IRB.getInt32Ty(), 0)); |
Kostya Serebryany | 2460c3f | 2013-12-05 15:03:02 +0000 | [diff] [blame] | 400 | if (StoredValue->getType()->isIntegerTy()) |
| 401 | StoredValue = IRB.CreateIntToPtr(StoredValue, IRB.getInt8PtrTy()); |
Kostya Serebryany | e36ae68 | 2012-07-05 09:07:31 +0000 | [diff] [blame] | 402 | // Call TsanVptrUpdate. |
Kostya Serebryany | 6f8a776 | 2012-03-26 17:35:03 +0000 | [diff] [blame] | 403 | IRB.CreateCall2(TsanVptrUpdate, |
| 404 | IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy()), |
Kostya Serebryany | 2460c3f | 2013-12-05 15:03:02 +0000 | [diff] [blame] | 405 | IRB.CreatePointerCast(StoredValue, IRB.getInt8PtrTy())); |
Kostya Serebryany | 5a4b7a2 | 2012-04-23 08:44:59 +0000 | [diff] [blame] | 406 | NumInstrumentedVtableWrites++; |
Kostya Serebryany | 6f8a776 | 2012-03-26 17:35:03 +0000 | [diff] [blame] | 407 | return true; |
| 408 | } |
Dmitry Vyukov | 55e63ef | 2013-03-22 08:51:22 +0000 | [diff] [blame] | 409 | if (!IsWrite && isVtableAccess(I)) { |
| 410 | IRB.CreateCall(TsanVptrLoad, |
| 411 | IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy())); |
| 412 | NumInstrumentedVtableReads++; |
| 413 | return true; |
| 414 | } |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 415 | Value *OnAccessFunc = IsWrite ? TsanWrite[Idx] : TsanRead[Idx]; |
| 416 | IRB.CreateCall(OnAccessFunc, IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy())); |
Kostya Serebryany | 5a4b7a2 | 2012-04-23 08:44:59 +0000 | [diff] [blame] | 417 | if (IsWrite) NumInstrumentedWrites++; |
| 418 | else NumInstrumentedReads++; |
Kostya Serebryany | e2a0e41 | 2012-02-13 22:50:51 +0000 | [diff] [blame] | 419 | return true; |
| 420 | } |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 421 | |
| 422 | static ConstantInt *createOrdering(IRBuilder<> *IRB, AtomicOrdering ord) { |
| 423 | uint32_t v = 0; |
| 424 | switch (ord) { |
| 425 | case NotAtomic: assert(false); |
| 426 | case Unordered: // Fall-through. |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 427 | case Monotonic: v = 0; break; |
Dmitry Vyukov | 93e67b6 | 2012-11-26 14:55:26 +0000 | [diff] [blame] | 428 | // case Consume: v = 1; break; // Not specified yet. |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 429 | case Acquire: v = 2; break; |
| 430 | case Release: v = 3; break; |
| 431 | case AcquireRelease: v = 4; break; |
| 432 | case SequentiallyConsistent: v = 5; break; |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 433 | } |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 434 | return IRB->getInt32(v); |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 435 | } |
| 436 | |
Kostya Serebryany | 463aa81 | 2013-03-28 11:21:13 +0000 | [diff] [blame] | 437 | // If a memset intrinsic gets inlined by the code gen, we will miss races on it. |
| 438 | // So, we either need to ensure the intrinsic is not inlined, or instrument it. |
| 439 | // We do not instrument memset/memmove/memcpy intrinsics (too complicated), |
| 440 | // instead we simply replace them with regular function calls, which are then |
| 441 | // intercepted by the run-time. |
| 442 | // Since tsan is running after everyone else, the calls should not be |
| 443 | // replaced back with intrinsics. If that becomes wrong at some point, |
| 444 | // we will need to call e.g. __tsan_memset to avoid the intrinsics. |
| 445 | bool ThreadSanitizer::instrumentMemIntrinsic(Instruction *I) { |
| 446 | IRBuilder<> IRB(I); |
| 447 | if (MemSetInst *M = dyn_cast<MemSetInst>(I)) { |
| 448 | IRB.CreateCall3(MemsetFn, |
| 449 | IRB.CreatePointerCast(M->getArgOperand(0), IRB.getInt8PtrTy()), |
| 450 | IRB.CreateIntCast(M->getArgOperand(1), IRB.getInt32Ty(), false), |
| 451 | IRB.CreateIntCast(M->getArgOperand(2), IntptrTy, false)); |
| 452 | I->eraseFromParent(); |
| 453 | } else if (MemTransferInst *M = dyn_cast<MemTransferInst>(I)) { |
| 454 | IRB.CreateCall3(isa<MemCpyInst>(M) ? MemcpyFn : MemmoveFn, |
| 455 | IRB.CreatePointerCast(M->getArgOperand(0), IRB.getInt8PtrTy()), |
| 456 | IRB.CreatePointerCast(M->getArgOperand(1), IRB.getInt8PtrTy()), |
| 457 | IRB.CreateIntCast(M->getArgOperand(2), IntptrTy, false)); |
| 458 | I->eraseFromParent(); |
| 459 | } |
| 460 | return false; |
| 461 | } |
| 462 | |
Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 463 | // Both llvm and ThreadSanitizer atomic operations are based on C++11/C1x |
Alp Toker | cb40291 | 2014-01-24 17:20:08 +0000 | [diff] [blame] | 464 | // standards. For background see C++11 standard. A slightly older, publicly |
Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 465 | // available draft of the standard (not entirely up-to-date, but close enough |
| 466 | // for casual browsing) is available here: |
Matt Beaumont-Gay | 000a395 | 2012-11-26 16:27:22 +0000 | [diff] [blame] | 467 | // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3242.pdf |
Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 468 | // The following page contains more background information: |
| 469 | // http://www.hpl.hp.com/personal/Hans_Boehm/c++mm/ |
| 470 | |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 471 | bool ThreadSanitizer::instrumentAtomic(Instruction *I) { |
| 472 | IRBuilder<> IRB(I); |
| 473 | if (LoadInst *LI = dyn_cast<LoadInst>(I)) { |
| 474 | Value *Addr = LI->getPointerOperand(); |
| 475 | int Idx = getMemoryAccessFuncIndex(Addr); |
| 476 | if (Idx < 0) |
| 477 | return false; |
| 478 | const size_t ByteSize = 1 << Idx; |
| 479 | const size_t BitSize = ByteSize * 8; |
| 480 | Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize); |
Micah Villmow | 51e7246 | 2012-10-24 17:25:11 +0000 | [diff] [blame] | 481 | Type *PtrTy = Ty->getPointerTo(); |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 482 | Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy), |
| 483 | createOrdering(&IRB, LI->getOrdering())}; |
Craig Topper | e1d1294 | 2014-08-27 05:25:25 +0000 | [diff] [blame] | 484 | CallInst *C = CallInst::Create(TsanAtomicLoad[Idx], Args); |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 485 | ReplaceInstWithInst(I, C); |
| 486 | |
| 487 | } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) { |
| 488 | Value *Addr = SI->getPointerOperand(); |
| 489 | int Idx = getMemoryAccessFuncIndex(Addr); |
| 490 | if (Idx < 0) |
| 491 | return false; |
| 492 | const size_t ByteSize = 1 << Idx; |
| 493 | const size_t BitSize = ByteSize * 8; |
| 494 | Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize); |
Micah Villmow | 51e7246 | 2012-10-24 17:25:11 +0000 | [diff] [blame] | 495 | Type *PtrTy = Ty->getPointerTo(); |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 496 | Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy), |
| 497 | IRB.CreateIntCast(SI->getValueOperand(), Ty, false), |
| 498 | createOrdering(&IRB, SI->getOrdering())}; |
Craig Topper | e1d1294 | 2014-08-27 05:25:25 +0000 | [diff] [blame] | 499 | CallInst *C = CallInst::Create(TsanAtomicStore[Idx], Args); |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 500 | ReplaceInstWithInst(I, C); |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 501 | } else if (AtomicRMWInst *RMWI = dyn_cast<AtomicRMWInst>(I)) { |
| 502 | Value *Addr = RMWI->getPointerOperand(); |
| 503 | int Idx = getMemoryAccessFuncIndex(Addr); |
| 504 | if (Idx < 0) |
| 505 | return false; |
| 506 | Function *F = TsanAtomicRMW[RMWI->getOperation()][Idx]; |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 507 | if (!F) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 508 | return false; |
| 509 | const size_t ByteSize = 1 << Idx; |
| 510 | const size_t BitSize = ByteSize * 8; |
| 511 | Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize); |
| 512 | Type *PtrTy = Ty->getPointerTo(); |
| 513 | Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy), |
| 514 | IRB.CreateIntCast(RMWI->getValOperand(), Ty, false), |
| 515 | createOrdering(&IRB, RMWI->getOrdering())}; |
Craig Topper | e1d1294 | 2014-08-27 05:25:25 +0000 | [diff] [blame] | 516 | CallInst *C = CallInst::Create(F, Args); |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 517 | ReplaceInstWithInst(I, C); |
| 518 | } else if (AtomicCmpXchgInst *CASI = dyn_cast<AtomicCmpXchgInst>(I)) { |
| 519 | Value *Addr = CASI->getPointerOperand(); |
| 520 | int Idx = getMemoryAccessFuncIndex(Addr); |
| 521 | if (Idx < 0) |
| 522 | return false; |
| 523 | const size_t ByteSize = 1 << Idx; |
| 524 | const size_t BitSize = ByteSize * 8; |
| 525 | Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize); |
| 526 | Type *PtrTy = Ty->getPointerTo(); |
| 527 | Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy), |
| 528 | IRB.CreateIntCast(CASI->getCompareOperand(), Ty, false), |
| 529 | IRB.CreateIntCast(CASI->getNewValOperand(), Ty, false), |
Tim Northover | e94a518 | 2014-03-11 10:48:52 +0000 | [diff] [blame] | 530 | createOrdering(&IRB, CASI->getSuccessOrdering()), |
| 531 | createOrdering(&IRB, CASI->getFailureOrdering())}; |
Tim Northover | 420a216 | 2014-06-13 14:24:07 +0000 | [diff] [blame] | 532 | CallInst *C = IRB.CreateCall(TsanAtomicCAS[Idx], Args); |
| 533 | Value *Success = IRB.CreateICmpEQ(C, CASI->getCompareOperand()); |
| 534 | |
| 535 | Value *Res = IRB.CreateInsertValue(UndefValue::get(CASI->getType()), C, 0); |
| 536 | Res = IRB.CreateInsertValue(Res, Success, 1); |
| 537 | |
| 538 | I->replaceAllUsesWith(Res); |
| 539 | I->eraseFromParent(); |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 540 | } else if (FenceInst *FI = dyn_cast<FenceInst>(I)) { |
| 541 | Value *Args[] = {createOrdering(&IRB, FI->getOrdering())}; |
| 542 | Function *F = FI->getSynchScope() == SingleThread ? |
| 543 | TsanAtomicSignalFence : TsanAtomicThreadFence; |
Craig Topper | e1d1294 | 2014-08-27 05:25:25 +0000 | [diff] [blame] | 544 | CallInst *C = CallInst::Create(F, Args); |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 545 | ReplaceInstWithInst(I, C); |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 546 | } |
| 547 | return true; |
| 548 | } |
| 549 | |
| 550 | int ThreadSanitizer::getMemoryAccessFuncIndex(Value *Addr) { |
| 551 | Type *OrigPtrTy = Addr->getType(); |
| 552 | Type *OrigTy = cast<PointerType>(OrigPtrTy)->getElementType(); |
| 553 | assert(OrigTy->isSized()); |
Rafael Espindola | 37dc9e1 | 2014-02-21 00:06:31 +0000 | [diff] [blame] | 554 | uint32_t TypeSize = DL->getTypeStoreSizeInBits(OrigTy); |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 555 | if (TypeSize != 8 && TypeSize != 16 && |
| 556 | TypeSize != 32 && TypeSize != 64 && TypeSize != 128) { |
| 557 | NumAccessesWithBadSize++; |
| 558 | // Ignore all unusual sizes. |
| 559 | return -1; |
| 560 | } |
Michael J. Spencer | df1ecbd7 | 2013-05-24 22:23:49 +0000 | [diff] [blame] | 561 | size_t Idx = countTrailingZeros(TypeSize / 8); |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 562 | assert(Idx < kNumberOfAccessSizes); |
| 563 | return Idx; |
| 564 | } |