blob: 3d7e268c4f831a57bd656b6aaa0216bd54405d4f [file] [log] [blame]
Kostya Serebryany60ebb1942012-02-13 22:50:51 +00001//===-- ThreadSanitizer.cpp - race detector -------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer, a race detector.
11//
12// The tool is under development, for the details about previous versions see
13// http://code.google.com/p/data-race-test
14//
15// The instrumentation phase is quite simple:
16// - Insert calls to run-time library before every memory access.
17// - Optimizations may apply to avoid instrumenting some of the accesses.
18// - Insert calls at function entry/exit.
19// The rest is handled by the run-time library.
20//===----------------------------------------------------------------------===//
21
22#define DEBUG_TYPE "tsan"
23
Kostya Serebryanyb5b86d22012-08-24 16:44:47 +000024#include "BlackList.h"
Chandler Carruth06cb8ed2012-06-29 12:38:19 +000025#include "llvm/Function.h"
26#include "llvm/IRBuilder.h"
27#include "llvm/Intrinsics.h"
28#include "llvm/LLVMContext.h"
29#include "llvm/Metadata.h"
30#include "llvm/Module.h"
31#include "llvm/Type.h"
Kostya Serebryany2076af02012-04-10 18:18:56 +000032#include "llvm/ADT/SmallSet.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000033#include "llvm/ADT/SmallString.h"
34#include "llvm/ADT/SmallVector.h"
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +000035#include "llvm/ADT/Statistic.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000036#include "llvm/ADT/StringExtras.h"
Kostya Serebryany6e590e32012-03-14 23:33:24 +000037#include "llvm/Support/CommandLine.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000038#include "llvm/Support/Debug.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000039#include "llvm/Support/MathExtras.h"
Kostya Serebryany52eb69922012-03-26 17:35:03 +000040#include "llvm/Support/raw_ostream.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000041#include "llvm/Target/TargetData.h"
42#include "llvm/Transforms/Instrumentation.h"
Kostya Serebryanye5079222012-04-27 07:31:53 +000043#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000044#include "llvm/Transforms/Utils/ModuleUtils.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000045
46using namespace llvm;
47
Kostya Serebryany6e590e32012-03-14 23:33:24 +000048static cl::opt<std::string> ClBlackListFile("tsan-blacklist",
49 cl::desc("Blacklist file"), cl::Hidden);
Kostya Serebryany41d876c2012-10-04 05:28:50 +000050static cl::opt<bool> ClInstrumentMemoryAccesses(
51 "tsan-instrument-memory-accesses", cl::init(true),
52 cl::desc("Instrument memory accesses"), cl::Hidden);
53static cl::opt<bool> ClInstrumentFuncEntryExit(
54 "tsan-instrument-func-entry-exit", cl::init(true),
55 cl::desc("Instrument function entry and exit"), cl::Hidden);
56static cl::opt<bool> ClInstrumentAtomics(
57 "tsan-instrument-atomics", cl::init(true),
58 cl::desc("Instrument atomics"), cl::Hidden);
Kostya Serebryany6e590e32012-03-14 23:33:24 +000059
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +000060STATISTIC(NumInstrumentedReads, "Number of instrumented reads");
61STATISTIC(NumInstrumentedWrites, "Number of instrumented writes");
Alexey Samsonov1dfe9b52012-08-30 13:47:13 +000062STATISTIC(NumOmittedReadsBeforeWrite,
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +000063 "Number of reads ignored due to following writes");
64STATISTIC(NumAccessesWithBadSize, "Number of accesses with bad size");
65STATISTIC(NumInstrumentedVtableWrites, "Number of vtable ptr writes");
66STATISTIC(NumOmittedReadsFromConstantGlobals,
67 "Number of reads from constant globals");
68STATISTIC(NumOmittedReadsFromVtable, "Number of vtable reads");
Kostya Serebryany2076af02012-04-10 18:18:56 +000069
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000070namespace {
Kostya Serebryany2076af02012-04-10 18:18:56 +000071
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000072/// ThreadSanitizer: instrument the code in module to find races.
73struct ThreadSanitizer : public FunctionPass {
74 ThreadSanitizer();
Kostya Serebryanye5079222012-04-27 07:31:53 +000075 const char *getPassName() const;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000076 bool runOnFunction(Function &F);
77 bool doInitialization(Module &M);
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000078 static char ID; // Pass identification, replacement for typeid.
79
80 private:
Kostya Serebryanye5079222012-04-27 07:31:53 +000081 bool instrumentLoadOrStore(Instruction *I);
82 bool instrumentAtomic(Instruction *I);
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +000083 void chooseInstructionsToInstrument(SmallVectorImpl<Instruction*> &Local,
84 SmallVectorImpl<Instruction*> &All);
Kostya Serebryanycff60c12012-04-10 22:29:17 +000085 bool addrPointsToConstantData(Value *Addr);
Kostya Serebryanye5079222012-04-27 07:31:53 +000086 int getMemoryAccessFuncIndex(Value *Addr);
Kostya Serebryany2076af02012-04-10 18:18:56 +000087
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000088 TargetData *TD;
Kostya Serebryanyb5b86d22012-08-24 16:44:47 +000089 OwningPtr<BlackList> BL;
Kostya Serebryanye5079222012-04-27 07:31:53 +000090 IntegerType *OrdTy;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000091 // Callbacks to run-time library are computed in doInitialization.
Kostya Serebryanye5079222012-04-27 07:31:53 +000092 Function *TsanFuncEntry;
93 Function *TsanFuncExit;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000094 // Accesses sizes are powers of two: 1, 2, 4, 8, 16.
Kostya Serebryany3eccaa62012-02-14 00:52:07 +000095 static const size_t kNumberOfAccessSizes = 5;
Kostya Serebryanye5079222012-04-27 07:31:53 +000096 Function *TsanRead[kNumberOfAccessSizes];
97 Function *TsanWrite[kNumberOfAccessSizes];
98 Function *TsanAtomicLoad[kNumberOfAccessSizes];
99 Function *TsanAtomicStore[kNumberOfAccessSizes];
100 Function *TsanVptrUpdate;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000101};
102} // namespace
103
104char ThreadSanitizer::ID = 0;
105INITIALIZE_PASS(ThreadSanitizer, "tsan",
106 "ThreadSanitizer: detects data races.",
107 false, false)
108
Kostya Serebryanye5079222012-04-27 07:31:53 +0000109const char *ThreadSanitizer::getPassName() const {
110 return "ThreadSanitizer";
111}
112
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000113ThreadSanitizer::ThreadSanitizer()
114 : FunctionPass(ID),
115 TD(NULL) {
116}
117
118FunctionPass *llvm::createThreadSanitizerPass() {
119 return new ThreadSanitizer();
120}
121
Kostya Serebryanye5079222012-04-27 07:31:53 +0000122static Function *checkInterfaceFunction(Constant *FuncOrBitcast) {
123 if (Function *F = dyn_cast<Function>(FuncOrBitcast))
124 return F;
125 FuncOrBitcast->dump();
126 report_fatal_error("ThreadSanitizer interface function redefined");
127}
128
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000129bool ThreadSanitizer::doInitialization(Module &M) {
130 TD = getAnalysisIfAvailable<TargetData>();
131 if (!TD)
132 return false;
Kostya Serebryanyb5b86d22012-08-24 16:44:47 +0000133 BL.reset(new BlackList(ClBlackListFile));
Kostya Serebryany6e590e32012-03-14 23:33:24 +0000134
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000135 // Always insert a call to __tsan_init into the module's CTORs.
136 IRBuilder<> IRB(M.getContext());
137 Value *TsanInit = M.getOrInsertFunction("__tsan_init",
138 IRB.getVoidTy(), NULL);
139 appendToGlobalCtors(M, cast<Function>(TsanInit), 0);
140
141 // Initialize the callbacks.
Kostya Serebryanye5079222012-04-27 07:31:53 +0000142 TsanFuncEntry = checkInterfaceFunction(M.getOrInsertFunction(
143 "__tsan_func_entry", IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
144 TsanFuncExit = checkInterfaceFunction(M.getOrInsertFunction(
145 "__tsan_func_exit", IRB.getVoidTy(), NULL));
146 OrdTy = IRB.getInt32Ty();
Kostya Serebryany3eccaa62012-02-14 00:52:07 +0000147 for (size_t i = 0; i < kNumberOfAccessSizes; ++i) {
Kostya Serebryanye5079222012-04-27 07:31:53 +0000148 const size_t ByteSize = 1 << i;
149 const size_t BitSize = ByteSize * 8;
150 SmallString<32> ReadName("__tsan_read" + itostr(ByteSize));
151 TsanRead[i] = checkInterfaceFunction(M.getOrInsertFunction(
152 ReadName, IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
153
154 SmallString<32> WriteName("__tsan_write" + itostr(ByteSize));
155 TsanWrite[i] = checkInterfaceFunction(M.getOrInsertFunction(
156 WriteName, IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
157
158 Type *Ty = Type::getIntNTy(M.getContext(), BitSize);
159 Type *PtrTy = Ty->getPointerTo();
160 SmallString<32> AtomicLoadName("__tsan_atomic" + itostr(BitSize) +
161 "_load");
162 TsanAtomicLoad[i] = checkInterfaceFunction(M.getOrInsertFunction(
163 AtomicLoadName, Ty, PtrTy, OrdTy, NULL));
164
165 SmallString<32> AtomicStoreName("__tsan_atomic" + itostr(BitSize) +
166 "_store");
167 TsanAtomicStore[i] = checkInterfaceFunction(M.getOrInsertFunction(
168 AtomicStoreName, IRB.getVoidTy(), PtrTy, Ty, OrdTy,
169 NULL));
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000170 }
Kostya Serebryanye5079222012-04-27 07:31:53 +0000171 TsanVptrUpdate = checkInterfaceFunction(M.getOrInsertFunction(
172 "__tsan_vptr_update", IRB.getVoidTy(), IRB.getInt8PtrTy(),
173 IRB.getInt8PtrTy(), NULL));
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000174 return true;
175}
176
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000177static bool isVtableAccess(Instruction *I) {
178 if (MDNode *Tag = I->getMetadata(LLVMContext::MD_tbaa)) {
179 if (Tag->getNumOperands() < 1) return false;
180 if (MDString *Tag1 = dyn_cast<MDString>(Tag->getOperand(0))) {
181 if (Tag1->getString() == "vtable pointer") return true;
182 }
183 }
184 return false;
185}
186
187bool ThreadSanitizer::addrPointsToConstantData(Value *Addr) {
188 // If this is a GEP, just analyze its pointer operand.
189 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Addr))
190 Addr = GEP->getPointerOperand();
191
192 if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Addr)) {
193 if (GV->isConstant()) {
194 // Reads from constant globals can not race with any writes.
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000195 NumOmittedReadsFromConstantGlobals++;
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000196 return true;
197 }
Alexey Samsonov1dfe9b52012-08-30 13:47:13 +0000198 } else if (LoadInst *L = dyn_cast<LoadInst>(Addr)) {
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000199 if (isVtableAccess(L)) {
200 // Reads from a vtable pointer can not race with any writes.
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000201 NumOmittedReadsFromVtable++;
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000202 return true;
203 }
204 }
205 return false;
206}
207
Kostya Serebryany2076af02012-04-10 18:18:56 +0000208// Instrumenting some of the accesses may be proven redundant.
209// Currently handled:
210// - read-before-write (within same BB, no calls between)
211//
212// We do not handle some of the patterns that should not survive
213// after the classic compiler optimizations.
214// E.g. two reads from the same temp should be eliminated by CSE,
215// two writes should be eliminated by DSE, etc.
216//
217// 'Local' is a vector of insns within the same BB (no calls between).
218// 'All' is a vector of insns that will be instrumented.
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000219void ThreadSanitizer::chooseInstructionsToInstrument(
Kostya Serebryany2076af02012-04-10 18:18:56 +0000220 SmallVectorImpl<Instruction*> &Local,
221 SmallVectorImpl<Instruction*> &All) {
222 SmallSet<Value*, 8> WriteTargets;
223 // Iterate from the end.
224 for (SmallVectorImpl<Instruction*>::reverse_iterator It = Local.rbegin(),
225 E = Local.rend(); It != E; ++It) {
226 Instruction *I = *It;
227 if (StoreInst *Store = dyn_cast<StoreInst>(I)) {
228 WriteTargets.insert(Store->getPointerOperand());
229 } else {
230 LoadInst *Load = cast<LoadInst>(I);
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000231 Value *Addr = Load->getPointerOperand();
232 if (WriteTargets.count(Addr)) {
Kostya Serebryany2076af02012-04-10 18:18:56 +0000233 // We will write to this temp, so no reason to analyze the read.
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000234 NumOmittedReadsBeforeWrite++;
Kostya Serebryany2076af02012-04-10 18:18:56 +0000235 continue;
236 }
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000237 if (addrPointsToConstantData(Addr)) {
238 // Addr points to some constant data -- it can not race with any writes.
239 continue;
240 }
Kostya Serebryany2076af02012-04-10 18:18:56 +0000241 }
242 All.push_back(I);
243 }
244 Local.clear();
245}
246
Kostya Serebryanye5079222012-04-27 07:31:53 +0000247static bool isAtomic(Instruction *I) {
248 if (LoadInst *LI = dyn_cast<LoadInst>(I))
249 return LI->isAtomic() && LI->getSynchScope() == CrossThread;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000250 if (StoreInst *SI = dyn_cast<StoreInst>(I))
Kostya Serebryanye5079222012-04-27 07:31:53 +0000251 return SI->isAtomic() && SI->getSynchScope() == CrossThread;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000252 if (isa<AtomicRMWInst>(I))
Kostya Serebryanye5079222012-04-27 07:31:53 +0000253 return true;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000254 if (isa<AtomicCmpXchgInst>(I))
Kostya Serebryanye5079222012-04-27 07:31:53 +0000255 return true;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000256 if (FenceInst *FI = dyn_cast<FenceInst>(I))
Kostya Serebryanye5079222012-04-27 07:31:53 +0000257 return FI->getSynchScope() == CrossThread;
258 return false;
259}
260
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000261bool ThreadSanitizer::runOnFunction(Function &F) {
262 if (!TD) return false;
Kostya Serebryany6e590e32012-03-14 23:33:24 +0000263 if (BL->isIn(F)) return false;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000264 SmallVector<Instruction*, 8> RetVec;
Kostya Serebryany2076af02012-04-10 18:18:56 +0000265 SmallVector<Instruction*, 8> AllLoadsAndStores;
266 SmallVector<Instruction*, 8> LocalLoadsAndStores;
Kostya Serebryanye5079222012-04-27 07:31:53 +0000267 SmallVector<Instruction*, 8> AtomicAccesses;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000268 bool Res = false;
269 bool HasCalls = false;
270
271 // Traverse all instructions, collect loads/stores/returns, check for calls.
272 for (Function::iterator FI = F.begin(), FE = F.end();
273 FI != FE; ++FI) {
274 BasicBlock &BB = *FI;
275 for (BasicBlock::iterator BI = BB.begin(), BE = BB.end();
276 BI != BE; ++BI) {
Kostya Serebryanye5079222012-04-27 07:31:53 +0000277 if (isAtomic(BI))
278 AtomicAccesses.push_back(BI);
279 else if (isa<LoadInst>(BI) || isa<StoreInst>(BI))
Kostya Serebryany2076af02012-04-10 18:18:56 +0000280 LocalLoadsAndStores.push_back(BI);
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000281 else if (isa<ReturnInst>(BI))
282 RetVec.push_back(BI);
Kostya Serebryany2076af02012-04-10 18:18:56 +0000283 else if (isa<CallInst>(BI) || isa<InvokeInst>(BI)) {
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000284 HasCalls = true;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000285 chooseInstructionsToInstrument(LocalLoadsAndStores, AllLoadsAndStores);
Kostya Serebryany2076af02012-04-10 18:18:56 +0000286 }
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000287 }
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000288 chooseInstructionsToInstrument(LocalLoadsAndStores, AllLoadsAndStores);
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000289 }
290
291 // We have collected all loads and stores.
292 // FIXME: many of these accesses do not need to be checked for races
293 // (e.g. variables that do not escape, etc).
294
295 // Instrument memory accesses.
Kostya Serebryany41d876c2012-10-04 05:28:50 +0000296 if (ClInstrumentMemoryAccesses)
297 for (size_t i = 0, n = AllLoadsAndStores.size(); i < n; ++i) {
298 Res |= instrumentLoadOrStore(AllLoadsAndStores[i]);
299 }
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000300
Kostya Serebryanye5079222012-04-27 07:31:53 +0000301 // Instrument atomic memory accesses.
Kostya Serebryany41d876c2012-10-04 05:28:50 +0000302 if (ClInstrumentAtomics)
303 for (size_t i = 0, n = AtomicAccesses.size(); i < n; ++i) {
304 Res |= instrumentAtomic(AtomicAccesses[i]);
305 }
Kostya Serebryanye5079222012-04-27 07:31:53 +0000306
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000307 // Instrument function entry/exit points if there were instrumented accesses.
Kostya Serebryany41d876c2012-10-04 05:28:50 +0000308 if ((Res || HasCalls) && ClInstrumentFuncEntryExit) {
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000309 IRBuilder<> IRB(F.getEntryBlock().getFirstNonPHI());
310 Value *ReturnAddress = IRB.CreateCall(
311 Intrinsic::getDeclaration(F.getParent(), Intrinsic::returnaddress),
312 IRB.getInt32(0));
313 IRB.CreateCall(TsanFuncEntry, ReturnAddress);
314 for (size_t i = 0, n = RetVec.size(); i < n; ++i) {
315 IRBuilder<> IRBRet(RetVec[i]);
316 IRBRet.CreateCall(TsanFuncExit);
317 }
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000318 Res = true;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000319 }
320 return Res;
321}
322
323bool ThreadSanitizer::instrumentLoadOrStore(Instruction *I) {
324 IRBuilder<> IRB(I);
325 bool IsWrite = isa<StoreInst>(*I);
326 Value *Addr = IsWrite
327 ? cast<StoreInst>(I)->getPointerOperand()
328 : cast<LoadInst>(I)->getPointerOperand();
Kostya Serebryanye5079222012-04-27 07:31:53 +0000329 int Idx = getMemoryAccessFuncIndex(Addr);
330 if (Idx < 0)
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000331 return false;
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000332 if (IsWrite && isVtableAccess(I)) {
Kostya Serebryany4a002ab2012-07-05 09:07:31 +0000333 DEBUG(dbgs() << " VPTR : " << *I << "\n");
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000334 Value *StoredValue = cast<StoreInst>(I)->getValueOperand();
Kostya Serebryany4a002ab2012-07-05 09:07:31 +0000335 // StoredValue does not necessary have a pointer type.
336 if (isa<IntegerType>(StoredValue->getType()))
337 StoredValue = IRB.CreateIntToPtr(StoredValue, IRB.getInt8PtrTy());
338 // Call TsanVptrUpdate.
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000339 IRB.CreateCall2(TsanVptrUpdate,
340 IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy()),
341 IRB.CreatePointerCast(StoredValue, IRB.getInt8PtrTy()));
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000342 NumInstrumentedVtableWrites++;
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000343 return true;
344 }
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000345 Value *OnAccessFunc = IsWrite ? TsanWrite[Idx] : TsanRead[Idx];
346 IRB.CreateCall(OnAccessFunc, IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy()));
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000347 if (IsWrite) NumInstrumentedWrites++;
348 else NumInstrumentedReads++;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000349 return true;
350}
Kostya Serebryanye5079222012-04-27 07:31:53 +0000351
352static ConstantInt *createOrdering(IRBuilder<> *IRB, AtomicOrdering ord) {
353 uint32_t v = 0;
354 switch (ord) {
355 case NotAtomic: assert(false);
356 case Unordered: // Fall-through.
357 case Monotonic: v = 1 << 0; break;
Alexey Samsonov1dfe9b52012-08-30 13:47:13 +0000358 // case Consume: v = 1 << 1; break; // Not specified yet.
Kostya Serebryanye5079222012-04-27 07:31:53 +0000359 case Acquire: v = 1 << 2; break;
360 case Release: v = 1 << 3; break;
361 case AcquireRelease: v = 1 << 4; break;
362 case SequentiallyConsistent: v = 1 << 5; break;
363 }
Dmitry Vyukov9a8c1122012-10-03 13:00:57 +0000364 // +100500 is temporal to migrate to new enum values.
365 return IRB->getInt32(v + 100500);
Kostya Serebryanye5079222012-04-27 07:31:53 +0000366}
367
368bool ThreadSanitizer::instrumentAtomic(Instruction *I) {
369 IRBuilder<> IRB(I);
370 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
371 Value *Addr = LI->getPointerOperand();
372 int Idx = getMemoryAccessFuncIndex(Addr);
373 if (Idx < 0)
374 return false;
375 const size_t ByteSize = 1 << Idx;
376 const size_t BitSize = ByteSize * 8;
377 Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize);
378 Type *PtrTy = Ty->getPointerTo();
379 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
380 createOrdering(&IRB, LI->getOrdering())};
381 CallInst *C = CallInst::Create(TsanAtomicLoad[Idx],
382 ArrayRef<Value*>(Args));
383 ReplaceInstWithInst(I, C);
384
385 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
386 Value *Addr = SI->getPointerOperand();
387 int Idx = getMemoryAccessFuncIndex(Addr);
388 if (Idx < 0)
389 return false;
390 const size_t ByteSize = 1 << Idx;
391 const size_t BitSize = ByteSize * 8;
392 Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize);
393 Type *PtrTy = Ty->getPointerTo();
394 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
395 IRB.CreateIntCast(SI->getValueOperand(), Ty, false),
396 createOrdering(&IRB, SI->getOrdering())};
397 CallInst *C = CallInst::Create(TsanAtomicStore[Idx],
398 ArrayRef<Value*>(Args));
399 ReplaceInstWithInst(I, C);
400 } else if (isa<AtomicRMWInst>(I)) {
401 // FIXME: Not yet supported.
402 } else if (isa<AtomicCmpXchgInst>(I)) {
403 // FIXME: Not yet supported.
404 } else if (isa<FenceInst>(I)) {
405 // FIXME: Not yet supported.
406 }
407 return true;
408}
409
410int ThreadSanitizer::getMemoryAccessFuncIndex(Value *Addr) {
411 Type *OrigPtrTy = Addr->getType();
412 Type *OrigTy = cast<PointerType>(OrigPtrTy)->getElementType();
413 assert(OrigTy->isSized());
414 uint32_t TypeSize = TD->getTypeStoreSizeInBits(OrigTy);
415 if (TypeSize != 8 && TypeSize != 16 &&
416 TypeSize != 32 && TypeSize != 64 && TypeSize != 128) {
417 NumAccessesWithBadSize++;
418 // Ignore all unusual sizes.
419 return -1;
420 }
421 size_t Idx = CountTrailingZeros_32(TypeSize / 8);
422 assert(Idx < kNumberOfAccessSizes);
423 return Idx;
424}