blob: af2e7b9544f044ff805d3f1b951cad12719d54f0 [file] [log] [blame]
Kostya Serebryany60ebb1942012-02-13 22:50:51 +00001//===-- ThreadSanitizer.cpp - race detector -------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer, a race detector.
11//
12// The tool is under development, for the details about previous versions see
13// http://code.google.com/p/data-race-test
14//
15// The instrumentation phase is quite simple:
16// - Insert calls to run-time library before every memory access.
17// - Optimizations may apply to avoid instrumenting some of the accesses.
18// - Insert calls at function entry/exit.
19// The rest is handled by the run-time library.
20//===----------------------------------------------------------------------===//
21
22#define DEBUG_TYPE "tsan"
23
Chandler Carruthd04a8d42012-12-03 16:50:05 +000024#include "llvm/Transforms/Instrumentation.h"
Kostya Serebryanyb5b86d22012-08-24 16:44:47 +000025#include "BlackList.h"
Chandler Carruthd04a8d42012-12-03 16:50:05 +000026#include "llvm/ADT/SmallSet.h"
27#include "llvm/ADT/SmallString.h"
28#include "llvm/ADT/SmallVector.h"
29#include "llvm/ADT/Statistic.h"
30#include "llvm/ADT/StringExtras.h"
31#include "llvm/DataLayout.h"
Chandler Carruth06cb8ed2012-06-29 12:38:19 +000032#include "llvm/Function.h"
33#include "llvm/IRBuilder.h"
34#include "llvm/Intrinsics.h"
35#include "llvm/LLVMContext.h"
36#include "llvm/Metadata.h"
37#include "llvm/Module.h"
Kostya Serebryany6e590e32012-03-14 23:33:24 +000038#include "llvm/Support/CommandLine.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000039#include "llvm/Support/Debug.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000040#include "llvm/Support/MathExtras.h"
Kostya Serebryany52eb69922012-03-26 17:35:03 +000041#include "llvm/Support/raw_ostream.h"
Kostya Serebryanye5079222012-04-27 07:31:53 +000042#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000043#include "llvm/Transforms/Utils/ModuleUtils.h"
Chandler Carruthd04a8d42012-12-03 16:50:05 +000044#include "llvm/Type.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000045
46using namespace llvm;
47
Alexey Samsonovf045df12012-12-28 09:30:44 +000048static cl::opt<std::string> ClBlacklistFile("tsan-blacklist",
Kostya Serebryany6e590e32012-03-14 23:33:24 +000049 cl::desc("Blacklist file"), cl::Hidden);
Kostya Serebryany41d876c2012-10-04 05:28:50 +000050static cl::opt<bool> ClInstrumentMemoryAccesses(
51 "tsan-instrument-memory-accesses", cl::init(true),
52 cl::desc("Instrument memory accesses"), cl::Hidden);
53static cl::opt<bool> ClInstrumentFuncEntryExit(
54 "tsan-instrument-func-entry-exit", cl::init(true),
55 cl::desc("Instrument function entry and exit"), cl::Hidden);
56static cl::opt<bool> ClInstrumentAtomics(
57 "tsan-instrument-atomics", cl::init(true),
58 cl::desc("Instrument atomics"), cl::Hidden);
Kostya Serebryany6e590e32012-03-14 23:33:24 +000059
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +000060STATISTIC(NumInstrumentedReads, "Number of instrumented reads");
61STATISTIC(NumInstrumentedWrites, "Number of instrumented writes");
Alexey Samsonov1dfe9b52012-08-30 13:47:13 +000062STATISTIC(NumOmittedReadsBeforeWrite,
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +000063 "Number of reads ignored due to following writes");
64STATISTIC(NumAccessesWithBadSize, "Number of accesses with bad size");
65STATISTIC(NumInstrumentedVtableWrites, "Number of vtable ptr writes");
66STATISTIC(NumOmittedReadsFromConstantGlobals,
67 "Number of reads from constant globals");
68STATISTIC(NumOmittedReadsFromVtable, "Number of vtable reads");
Kostya Serebryany2076af02012-04-10 18:18:56 +000069
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000070namespace {
Kostya Serebryany2076af02012-04-10 18:18:56 +000071
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000072/// ThreadSanitizer: instrument the code in module to find races.
73struct ThreadSanitizer : public FunctionPass {
Alexey Samsonovf045df12012-12-28 09:30:44 +000074 ThreadSanitizer(StringRef BlacklistFile = StringRef())
75 : FunctionPass(ID),
76 TD(0),
77 BlacklistFile(BlacklistFile.empty() ? ClBlacklistFile
78 : BlacklistFile) { }
Kostya Serebryanye5079222012-04-27 07:31:53 +000079 const char *getPassName() const;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000080 bool runOnFunction(Function &F);
81 bool doInitialization(Module &M);
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000082 static char ID; // Pass identification, replacement for typeid.
83
84 private:
Kostya Serebryany8b390ff2012-11-29 09:54:21 +000085 void initializeCallbacks(Module &M);
Kostya Serebryanye5079222012-04-27 07:31:53 +000086 bool instrumentLoadOrStore(Instruction *I);
87 bool instrumentAtomic(Instruction *I);
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +000088 void chooseInstructionsToInstrument(SmallVectorImpl<Instruction*> &Local,
89 SmallVectorImpl<Instruction*> &All);
Kostya Serebryanycff60c12012-04-10 22:29:17 +000090 bool addrPointsToConstantData(Value *Addr);
Kostya Serebryanye5079222012-04-27 07:31:53 +000091 int getMemoryAccessFuncIndex(Value *Addr);
Kostya Serebryany2076af02012-04-10 18:18:56 +000092
Micah Villmow3574eca2012-10-08 16:38:25 +000093 DataLayout *TD;
Alexey Samsonovf045df12012-12-28 09:30:44 +000094 SmallString<64> BlacklistFile;
Kostya Serebryanyb5b86d22012-08-24 16:44:47 +000095 OwningPtr<BlackList> BL;
Kostya Serebryanye5079222012-04-27 07:31:53 +000096 IntegerType *OrdTy;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000097 // Callbacks to run-time library are computed in doInitialization.
Kostya Serebryanye5079222012-04-27 07:31:53 +000098 Function *TsanFuncEntry;
99 Function *TsanFuncExit;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000100 // Accesses sizes are powers of two: 1, 2, 4, 8, 16.
Kostya Serebryany3eccaa62012-02-14 00:52:07 +0000101 static const size_t kNumberOfAccessSizes = 5;
Kostya Serebryanye5079222012-04-27 07:31:53 +0000102 Function *TsanRead[kNumberOfAccessSizes];
103 Function *TsanWrite[kNumberOfAccessSizes];
104 Function *TsanAtomicLoad[kNumberOfAccessSizes];
105 Function *TsanAtomicStore[kNumberOfAccessSizes];
Dmitry Vyukov9f8a90b2012-11-09 12:55:36 +0000106 Function *TsanAtomicRMW[AtomicRMWInst::LAST_BINOP + 1][kNumberOfAccessSizes];
107 Function *TsanAtomicCAS[kNumberOfAccessSizes];
108 Function *TsanAtomicThreadFence;
109 Function *TsanAtomicSignalFence;
Kostya Serebryanye5079222012-04-27 07:31:53 +0000110 Function *TsanVptrUpdate;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000111};
112} // namespace
113
114char ThreadSanitizer::ID = 0;
115INITIALIZE_PASS(ThreadSanitizer, "tsan",
116 "ThreadSanitizer: detects data races.",
117 false, false)
118
Kostya Serebryanye5079222012-04-27 07:31:53 +0000119const char *ThreadSanitizer::getPassName() const {
120 return "ThreadSanitizer";
121}
122
Alexey Samsonovf045df12012-12-28 09:30:44 +0000123FunctionPass *llvm::createThreadSanitizerPass(StringRef BlacklistFile) {
124 return new ThreadSanitizer(BlacklistFile);
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000125}
126
Kostya Serebryanye5079222012-04-27 07:31:53 +0000127static Function *checkInterfaceFunction(Constant *FuncOrBitcast) {
128 if (Function *F = dyn_cast<Function>(FuncOrBitcast))
129 return F;
130 FuncOrBitcast->dump();
131 report_fatal_error("ThreadSanitizer interface function redefined");
132}
133
Kostya Serebryany8b390ff2012-11-29 09:54:21 +0000134void ThreadSanitizer::initializeCallbacks(Module &M) {
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000135 IRBuilder<> IRB(M.getContext());
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000136 // Initialize the callbacks.
Kostya Serebryanye5079222012-04-27 07:31:53 +0000137 TsanFuncEntry = checkInterfaceFunction(M.getOrInsertFunction(
138 "__tsan_func_entry", IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
139 TsanFuncExit = checkInterfaceFunction(M.getOrInsertFunction(
140 "__tsan_func_exit", IRB.getVoidTy(), NULL));
141 OrdTy = IRB.getInt32Ty();
Kostya Serebryany3eccaa62012-02-14 00:52:07 +0000142 for (size_t i = 0; i < kNumberOfAccessSizes; ++i) {
Kostya Serebryanye5079222012-04-27 07:31:53 +0000143 const size_t ByteSize = 1 << i;
144 const size_t BitSize = ByteSize * 8;
145 SmallString<32> ReadName("__tsan_read" + itostr(ByteSize));
146 TsanRead[i] = checkInterfaceFunction(M.getOrInsertFunction(
147 ReadName, IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
148
149 SmallString<32> WriteName("__tsan_write" + itostr(ByteSize));
150 TsanWrite[i] = checkInterfaceFunction(M.getOrInsertFunction(
151 WriteName, IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
152
153 Type *Ty = Type::getIntNTy(M.getContext(), BitSize);
154 Type *PtrTy = Ty->getPointerTo();
155 SmallString<32> AtomicLoadName("__tsan_atomic" + itostr(BitSize) +
156 "_load");
157 TsanAtomicLoad[i] = checkInterfaceFunction(M.getOrInsertFunction(
158 AtomicLoadName, Ty, PtrTy, OrdTy, NULL));
159
160 SmallString<32> AtomicStoreName("__tsan_atomic" + itostr(BitSize) +
161 "_store");
162 TsanAtomicStore[i] = checkInterfaceFunction(M.getOrInsertFunction(
163 AtomicStoreName, IRB.getVoidTy(), PtrTy, Ty, OrdTy,
164 NULL));
Dmitry Vyukov9f8a90b2012-11-09 12:55:36 +0000165
166 for (int op = AtomicRMWInst::FIRST_BINOP;
167 op <= AtomicRMWInst::LAST_BINOP; ++op) {
168 TsanAtomicRMW[op][i] = NULL;
169 const char *NamePart = NULL;
170 if (op == AtomicRMWInst::Xchg)
171 NamePart = "_exchange";
172 else if (op == AtomicRMWInst::Add)
173 NamePart = "_fetch_add";
174 else if (op == AtomicRMWInst::Sub)
175 NamePart = "_fetch_sub";
176 else if (op == AtomicRMWInst::And)
177 NamePart = "_fetch_and";
178 else if (op == AtomicRMWInst::Or)
179 NamePart = "_fetch_or";
180 else if (op == AtomicRMWInst::Xor)
181 NamePart = "_fetch_xor";
Dmitry Vyukovb10675e2012-11-27 08:09:25 +0000182 else if (op == AtomicRMWInst::Nand)
183 NamePart = "_fetch_nand";
Dmitry Vyukov9f8a90b2012-11-09 12:55:36 +0000184 else
185 continue;
186 SmallString<32> RMWName("__tsan_atomic" + itostr(BitSize) + NamePart);
187 TsanAtomicRMW[op][i] = checkInterfaceFunction(M.getOrInsertFunction(
188 RMWName, Ty, PtrTy, Ty, OrdTy, NULL));
189 }
190
191 SmallString<32> AtomicCASName("__tsan_atomic" + itostr(BitSize) +
192 "_compare_exchange_val");
193 TsanAtomicCAS[i] = checkInterfaceFunction(M.getOrInsertFunction(
Dmitry Vyukov6702e532012-11-26 11:36:19 +0000194 AtomicCASName, Ty, PtrTy, Ty, Ty, OrdTy, OrdTy, NULL));
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000195 }
Kostya Serebryanye5079222012-04-27 07:31:53 +0000196 TsanVptrUpdate = checkInterfaceFunction(M.getOrInsertFunction(
197 "__tsan_vptr_update", IRB.getVoidTy(), IRB.getInt8PtrTy(),
198 IRB.getInt8PtrTy(), NULL));
Dmitry Vyukov9f8a90b2012-11-09 12:55:36 +0000199 TsanAtomicThreadFence = checkInterfaceFunction(M.getOrInsertFunction(
200 "__tsan_atomic_thread_fence", IRB.getVoidTy(), OrdTy, NULL));
201 TsanAtomicSignalFence = checkInterfaceFunction(M.getOrInsertFunction(
202 "__tsan_atomic_signal_fence", IRB.getVoidTy(), OrdTy, NULL));
Kostya Serebryany8b390ff2012-11-29 09:54:21 +0000203}
204
205bool ThreadSanitizer::doInitialization(Module &M) {
206 TD = getAnalysisIfAvailable<DataLayout>();
207 if (!TD)
208 return false;
Alexey Samsonovf045df12012-12-28 09:30:44 +0000209 BL.reset(new BlackList(BlacklistFile));
Kostya Serebryany8b390ff2012-11-29 09:54:21 +0000210
211 // Always insert a call to __tsan_init into the module's CTORs.
212 IRBuilder<> IRB(M.getContext());
213 Value *TsanInit = M.getOrInsertFunction("__tsan_init",
214 IRB.getVoidTy(), NULL);
215 appendToGlobalCtors(M, cast<Function>(TsanInit), 0);
216
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000217 return true;
218}
219
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000220static bool isVtableAccess(Instruction *I) {
221 if (MDNode *Tag = I->getMetadata(LLVMContext::MD_tbaa)) {
222 if (Tag->getNumOperands() < 1) return false;
223 if (MDString *Tag1 = dyn_cast<MDString>(Tag->getOperand(0))) {
224 if (Tag1->getString() == "vtable pointer") return true;
225 }
226 }
227 return false;
228}
229
230bool ThreadSanitizer::addrPointsToConstantData(Value *Addr) {
231 // If this is a GEP, just analyze its pointer operand.
232 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Addr))
233 Addr = GEP->getPointerOperand();
234
235 if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Addr)) {
236 if (GV->isConstant()) {
237 // Reads from constant globals can not race with any writes.
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000238 NumOmittedReadsFromConstantGlobals++;
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000239 return true;
240 }
Alexey Samsonov1dfe9b52012-08-30 13:47:13 +0000241 } else if (LoadInst *L = dyn_cast<LoadInst>(Addr)) {
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000242 if (isVtableAccess(L)) {
243 // Reads from a vtable pointer can not race with any writes.
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000244 NumOmittedReadsFromVtable++;
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000245 return true;
246 }
247 }
248 return false;
249}
250
Kostya Serebryany2076af02012-04-10 18:18:56 +0000251// Instrumenting some of the accesses may be proven redundant.
252// Currently handled:
253// - read-before-write (within same BB, no calls between)
254//
255// We do not handle some of the patterns that should not survive
256// after the classic compiler optimizations.
257// E.g. two reads from the same temp should be eliminated by CSE,
258// two writes should be eliminated by DSE, etc.
259//
260// 'Local' is a vector of insns within the same BB (no calls between).
261// 'All' is a vector of insns that will be instrumented.
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000262void ThreadSanitizer::chooseInstructionsToInstrument(
Kostya Serebryany2076af02012-04-10 18:18:56 +0000263 SmallVectorImpl<Instruction*> &Local,
264 SmallVectorImpl<Instruction*> &All) {
265 SmallSet<Value*, 8> WriteTargets;
266 // Iterate from the end.
267 for (SmallVectorImpl<Instruction*>::reverse_iterator It = Local.rbegin(),
268 E = Local.rend(); It != E; ++It) {
269 Instruction *I = *It;
270 if (StoreInst *Store = dyn_cast<StoreInst>(I)) {
271 WriteTargets.insert(Store->getPointerOperand());
272 } else {
273 LoadInst *Load = cast<LoadInst>(I);
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000274 Value *Addr = Load->getPointerOperand();
275 if (WriteTargets.count(Addr)) {
Kostya Serebryany2076af02012-04-10 18:18:56 +0000276 // We will write to this temp, so no reason to analyze the read.
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000277 NumOmittedReadsBeforeWrite++;
Kostya Serebryany2076af02012-04-10 18:18:56 +0000278 continue;
279 }
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000280 if (addrPointsToConstantData(Addr)) {
281 // Addr points to some constant data -- it can not race with any writes.
282 continue;
283 }
Kostya Serebryany2076af02012-04-10 18:18:56 +0000284 }
285 All.push_back(I);
286 }
287 Local.clear();
288}
289
Kostya Serebryanye5079222012-04-27 07:31:53 +0000290static bool isAtomic(Instruction *I) {
291 if (LoadInst *LI = dyn_cast<LoadInst>(I))
292 return LI->isAtomic() && LI->getSynchScope() == CrossThread;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000293 if (StoreInst *SI = dyn_cast<StoreInst>(I))
Kostya Serebryanye5079222012-04-27 07:31:53 +0000294 return SI->isAtomic() && SI->getSynchScope() == CrossThread;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000295 if (isa<AtomicRMWInst>(I))
Kostya Serebryanye5079222012-04-27 07:31:53 +0000296 return true;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000297 if (isa<AtomicCmpXchgInst>(I))
Kostya Serebryanye5079222012-04-27 07:31:53 +0000298 return true;
Dmitry Vyukov9f8a90b2012-11-09 12:55:36 +0000299 if (isa<FenceInst>(I))
300 return true;
Kostya Serebryanye5079222012-04-27 07:31:53 +0000301 return false;
302}
303
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000304bool ThreadSanitizer::runOnFunction(Function &F) {
305 if (!TD) return false;
Kostya Serebryany6e590e32012-03-14 23:33:24 +0000306 if (BL->isIn(F)) return false;
Kostya Serebryany8b390ff2012-11-29 09:54:21 +0000307 initializeCallbacks(*F.getParent());
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000308 SmallVector<Instruction*, 8> RetVec;
Kostya Serebryany2076af02012-04-10 18:18:56 +0000309 SmallVector<Instruction*, 8> AllLoadsAndStores;
310 SmallVector<Instruction*, 8> LocalLoadsAndStores;
Kostya Serebryanye5079222012-04-27 07:31:53 +0000311 SmallVector<Instruction*, 8> AtomicAccesses;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000312 bool Res = false;
313 bool HasCalls = false;
314
315 // Traverse all instructions, collect loads/stores/returns, check for calls.
316 for (Function::iterator FI = F.begin(), FE = F.end();
317 FI != FE; ++FI) {
318 BasicBlock &BB = *FI;
319 for (BasicBlock::iterator BI = BB.begin(), BE = BB.end();
320 BI != BE; ++BI) {
Kostya Serebryanye5079222012-04-27 07:31:53 +0000321 if (isAtomic(BI))
322 AtomicAccesses.push_back(BI);
323 else if (isa<LoadInst>(BI) || isa<StoreInst>(BI))
Kostya Serebryany2076af02012-04-10 18:18:56 +0000324 LocalLoadsAndStores.push_back(BI);
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000325 else if (isa<ReturnInst>(BI))
326 RetVec.push_back(BI);
Kostya Serebryany2076af02012-04-10 18:18:56 +0000327 else if (isa<CallInst>(BI) || isa<InvokeInst>(BI)) {
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000328 HasCalls = true;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000329 chooseInstructionsToInstrument(LocalLoadsAndStores, AllLoadsAndStores);
Kostya Serebryany2076af02012-04-10 18:18:56 +0000330 }
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000331 }
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000332 chooseInstructionsToInstrument(LocalLoadsAndStores, AllLoadsAndStores);
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000333 }
334
335 // We have collected all loads and stores.
336 // FIXME: many of these accesses do not need to be checked for races
337 // (e.g. variables that do not escape, etc).
338
339 // Instrument memory accesses.
Kostya Serebryany41d876c2012-10-04 05:28:50 +0000340 if (ClInstrumentMemoryAccesses)
341 for (size_t i = 0, n = AllLoadsAndStores.size(); i < n; ++i) {
342 Res |= instrumentLoadOrStore(AllLoadsAndStores[i]);
343 }
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000344
Kostya Serebryanye5079222012-04-27 07:31:53 +0000345 // Instrument atomic memory accesses.
Kostya Serebryany41d876c2012-10-04 05:28:50 +0000346 if (ClInstrumentAtomics)
347 for (size_t i = 0, n = AtomicAccesses.size(); i < n; ++i) {
348 Res |= instrumentAtomic(AtomicAccesses[i]);
349 }
Kostya Serebryanye5079222012-04-27 07:31:53 +0000350
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000351 // Instrument function entry/exit points if there were instrumented accesses.
Kostya Serebryany41d876c2012-10-04 05:28:50 +0000352 if ((Res || HasCalls) && ClInstrumentFuncEntryExit) {
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000353 IRBuilder<> IRB(F.getEntryBlock().getFirstNonPHI());
354 Value *ReturnAddress = IRB.CreateCall(
355 Intrinsic::getDeclaration(F.getParent(), Intrinsic::returnaddress),
356 IRB.getInt32(0));
357 IRB.CreateCall(TsanFuncEntry, ReturnAddress);
358 for (size_t i = 0, n = RetVec.size(); i < n; ++i) {
359 IRBuilder<> IRBRet(RetVec[i]);
360 IRBRet.CreateCall(TsanFuncExit);
361 }
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000362 Res = true;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000363 }
364 return Res;
365}
366
367bool ThreadSanitizer::instrumentLoadOrStore(Instruction *I) {
368 IRBuilder<> IRB(I);
369 bool IsWrite = isa<StoreInst>(*I);
370 Value *Addr = IsWrite
371 ? cast<StoreInst>(I)->getPointerOperand()
372 : cast<LoadInst>(I)->getPointerOperand();
Kostya Serebryanye5079222012-04-27 07:31:53 +0000373 int Idx = getMemoryAccessFuncIndex(Addr);
374 if (Idx < 0)
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000375 return false;
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000376 if (IsWrite && isVtableAccess(I)) {
Kostya Serebryany4a002ab2012-07-05 09:07:31 +0000377 DEBUG(dbgs() << " VPTR : " << *I << "\n");
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000378 Value *StoredValue = cast<StoreInst>(I)->getValueOperand();
Kostya Serebryany4a002ab2012-07-05 09:07:31 +0000379 // StoredValue does not necessary have a pointer type.
380 if (isa<IntegerType>(StoredValue->getType()))
381 StoredValue = IRB.CreateIntToPtr(StoredValue, IRB.getInt8PtrTy());
382 // Call TsanVptrUpdate.
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000383 IRB.CreateCall2(TsanVptrUpdate,
384 IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy()),
385 IRB.CreatePointerCast(StoredValue, IRB.getInt8PtrTy()));
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000386 NumInstrumentedVtableWrites++;
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000387 return true;
388 }
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000389 Value *OnAccessFunc = IsWrite ? TsanWrite[Idx] : TsanRead[Idx];
390 IRB.CreateCall(OnAccessFunc, IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy()));
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000391 if (IsWrite) NumInstrumentedWrites++;
392 else NumInstrumentedReads++;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000393 return true;
394}
Kostya Serebryanye5079222012-04-27 07:31:53 +0000395
396static ConstantInt *createOrdering(IRBuilder<> *IRB, AtomicOrdering ord) {
397 uint32_t v = 0;
398 switch (ord) {
399 case NotAtomic: assert(false);
400 case Unordered: // Fall-through.
Dmitry Vyukovc2e9ca12012-11-09 14:12:16 +0000401 case Monotonic: v = 0; break;
Dmitry Vyukov9a33f9f2012-11-26 14:55:26 +0000402 // case Consume: v = 1; break; // Not specified yet.
Dmitry Vyukovc2e9ca12012-11-09 14:12:16 +0000403 case Acquire: v = 2; break;
404 case Release: v = 3; break;
405 case AcquireRelease: v = 4; break;
406 case SequentiallyConsistent: v = 5; break;
Kostya Serebryanye5079222012-04-27 07:31:53 +0000407 }
Dmitry Vyukovc2e9ca12012-11-09 14:12:16 +0000408 return IRB->getInt32(v);
Kostya Serebryanye5079222012-04-27 07:31:53 +0000409}
410
Dmitry Vyukov6702e532012-11-26 11:36:19 +0000411static ConstantInt *createFailOrdering(IRBuilder<> *IRB, AtomicOrdering ord) {
412 uint32_t v = 0;
413 switch (ord) {
414 case NotAtomic: assert(false);
415 case Unordered: // Fall-through.
416 case Monotonic: v = 0; break;
Dmitry Vyukov9a33f9f2012-11-26 14:55:26 +0000417 // case Consume: v = 1; break; // Not specified yet.
Dmitry Vyukov6702e532012-11-26 11:36:19 +0000418 case Acquire: v = 2; break;
419 case Release: v = 0; break;
420 case AcquireRelease: v = 2; break;
421 case SequentiallyConsistent: v = 5; break;
422 }
423 return IRB->getInt32(v);
424}
425
426// Both llvm and ThreadSanitizer atomic operations are based on C++11/C1x
427// standards. For background see C++11 standard. A slightly older, publically
428// available draft of the standard (not entirely up-to-date, but close enough
429// for casual browsing) is available here:
Matt Beaumont-Gay70af9092012-11-26 16:27:22 +0000430// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3242.pdf
Dmitry Vyukov6702e532012-11-26 11:36:19 +0000431// The following page contains more background information:
432// http://www.hpl.hp.com/personal/Hans_Boehm/c++mm/
433
Kostya Serebryanye5079222012-04-27 07:31:53 +0000434bool ThreadSanitizer::instrumentAtomic(Instruction *I) {
435 IRBuilder<> IRB(I);
436 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
437 Value *Addr = LI->getPointerOperand();
438 int Idx = getMemoryAccessFuncIndex(Addr);
439 if (Idx < 0)
440 return false;
441 const size_t ByteSize = 1 << Idx;
442 const size_t BitSize = ByteSize * 8;
443 Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize);
Micah Villmowb8bce922012-10-24 17:25:11 +0000444 Type *PtrTy = Ty->getPointerTo();
Kostya Serebryanye5079222012-04-27 07:31:53 +0000445 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
446 createOrdering(&IRB, LI->getOrdering())};
447 CallInst *C = CallInst::Create(TsanAtomicLoad[Idx],
448 ArrayRef<Value*>(Args));
449 ReplaceInstWithInst(I, C);
450
451 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
452 Value *Addr = SI->getPointerOperand();
453 int Idx = getMemoryAccessFuncIndex(Addr);
454 if (Idx < 0)
455 return false;
456 const size_t ByteSize = 1 << Idx;
457 const size_t BitSize = ByteSize * 8;
458 Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize);
Micah Villmowb8bce922012-10-24 17:25:11 +0000459 Type *PtrTy = Ty->getPointerTo();
Kostya Serebryanye5079222012-04-27 07:31:53 +0000460 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
461 IRB.CreateIntCast(SI->getValueOperand(), Ty, false),
462 createOrdering(&IRB, SI->getOrdering())};
463 CallInst *C = CallInst::Create(TsanAtomicStore[Idx],
464 ArrayRef<Value*>(Args));
465 ReplaceInstWithInst(I, C);
Dmitry Vyukov9f8a90b2012-11-09 12:55:36 +0000466 } else if (AtomicRMWInst *RMWI = dyn_cast<AtomicRMWInst>(I)) {
467 Value *Addr = RMWI->getPointerOperand();
468 int Idx = getMemoryAccessFuncIndex(Addr);
469 if (Idx < 0)
470 return false;
471 Function *F = TsanAtomicRMW[RMWI->getOperation()][Idx];
472 if (F == NULL)
473 return false;
474 const size_t ByteSize = 1 << Idx;
475 const size_t BitSize = ByteSize * 8;
476 Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize);
477 Type *PtrTy = Ty->getPointerTo();
478 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
479 IRB.CreateIntCast(RMWI->getValOperand(), Ty, false),
480 createOrdering(&IRB, RMWI->getOrdering())};
481 CallInst *C = CallInst::Create(F, ArrayRef<Value*>(Args));
482 ReplaceInstWithInst(I, C);
483 } else if (AtomicCmpXchgInst *CASI = dyn_cast<AtomicCmpXchgInst>(I)) {
484 Value *Addr = CASI->getPointerOperand();
485 int Idx = getMemoryAccessFuncIndex(Addr);
486 if (Idx < 0)
487 return false;
488 const size_t ByteSize = 1 << Idx;
489 const size_t BitSize = ByteSize * 8;
490 Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize);
491 Type *PtrTy = Ty->getPointerTo();
492 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
493 IRB.CreateIntCast(CASI->getCompareOperand(), Ty, false),
494 IRB.CreateIntCast(CASI->getNewValOperand(), Ty, false),
Dmitry Vyukov6702e532012-11-26 11:36:19 +0000495 createOrdering(&IRB, CASI->getOrdering()),
496 createFailOrdering(&IRB, CASI->getOrdering())};
Dmitry Vyukov9f8a90b2012-11-09 12:55:36 +0000497 CallInst *C = CallInst::Create(TsanAtomicCAS[Idx], ArrayRef<Value*>(Args));
498 ReplaceInstWithInst(I, C);
499 } else if (FenceInst *FI = dyn_cast<FenceInst>(I)) {
500 Value *Args[] = {createOrdering(&IRB, FI->getOrdering())};
501 Function *F = FI->getSynchScope() == SingleThread ?
502 TsanAtomicSignalFence : TsanAtomicThreadFence;
503 CallInst *C = CallInst::Create(F, ArrayRef<Value*>(Args));
504 ReplaceInstWithInst(I, C);
Kostya Serebryanye5079222012-04-27 07:31:53 +0000505 }
506 return true;
507}
508
509int ThreadSanitizer::getMemoryAccessFuncIndex(Value *Addr) {
510 Type *OrigPtrTy = Addr->getType();
511 Type *OrigTy = cast<PointerType>(OrigPtrTy)->getElementType();
512 assert(OrigTy->isSized());
513 uint32_t TypeSize = TD->getTypeStoreSizeInBits(OrigTy);
514 if (TypeSize != 8 && TypeSize != 16 &&
515 TypeSize != 32 && TypeSize != 64 && TypeSize != 128) {
516 NumAccessesWithBadSize++;
517 // Ignore all unusual sizes.
518 return -1;
519 }
520 size_t Idx = CountTrailingZeros_32(TypeSize / 8);
521 assert(Idx < kNumberOfAccessSizes);
522 return Idx;
523}