blob: 4c12a9b624c95fa7bdf3b52c1c09f95d576c627d [file] [log] [blame]
Kostya Serebryany60ebb1942012-02-13 22:50:51 +00001//===-- ThreadSanitizer.cpp - race detector -------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer, a race detector.
11//
12// The tool is under development, for the details about previous versions see
13// http://code.google.com/p/data-race-test
14//
15// The instrumentation phase is quite simple:
16// - Insert calls to run-time library before every memory access.
17// - Optimizations may apply to avoid instrumenting some of the accesses.
18// - Insert calls at function entry/exit.
19// The rest is handled by the run-time library.
20//===----------------------------------------------------------------------===//
21
22#define DEBUG_TYPE "tsan"
23
Kostya Serebryany6e590e32012-03-14 23:33:24 +000024#include "FunctionBlackList.h"
Chandler Carruth06cb8ed2012-06-29 12:38:19 +000025#include "llvm/Function.h"
26#include "llvm/IRBuilder.h"
27#include "llvm/Intrinsics.h"
28#include "llvm/LLVMContext.h"
29#include "llvm/Metadata.h"
30#include "llvm/Module.h"
31#include "llvm/Type.h"
Kostya Serebryany2076af02012-04-10 18:18:56 +000032#include "llvm/ADT/SmallSet.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000033#include "llvm/ADT/SmallString.h"
34#include "llvm/ADT/SmallVector.h"
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +000035#include "llvm/ADT/Statistic.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000036#include "llvm/ADT/StringExtras.h"
Kostya Serebryany6e590e32012-03-14 23:33:24 +000037#include "llvm/Support/CommandLine.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000038#include "llvm/Support/Debug.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000039#include "llvm/Support/MathExtras.h"
Kostya Serebryany52eb69922012-03-26 17:35:03 +000040#include "llvm/Support/raw_ostream.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000041#include "llvm/Target/TargetData.h"
42#include "llvm/Transforms/Instrumentation.h"
Kostya Serebryanye5079222012-04-27 07:31:53 +000043#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000044#include "llvm/Transforms/Utils/ModuleUtils.h"
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000045
46using namespace llvm;
47
Kostya Serebryany6e590e32012-03-14 23:33:24 +000048static cl::opt<std::string> ClBlackListFile("tsan-blacklist",
49 cl::desc("Blacklist file"), cl::Hidden);
50
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +000051STATISTIC(NumInstrumentedReads, "Number of instrumented reads");
52STATISTIC(NumInstrumentedWrites, "Number of instrumented writes");
53STATISTIC(NumOmittedReadsBeforeWrite,
54 "Number of reads ignored due to following writes");
55STATISTIC(NumAccessesWithBadSize, "Number of accesses with bad size");
56STATISTIC(NumInstrumentedVtableWrites, "Number of vtable ptr writes");
57STATISTIC(NumOmittedReadsFromConstantGlobals,
58 "Number of reads from constant globals");
59STATISTIC(NumOmittedReadsFromVtable, "Number of vtable reads");
Kostya Serebryany2076af02012-04-10 18:18:56 +000060
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000061namespace {
Kostya Serebryany2076af02012-04-10 18:18:56 +000062
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000063/// ThreadSanitizer: instrument the code in module to find races.
64struct ThreadSanitizer : public FunctionPass {
65 ThreadSanitizer();
Kostya Serebryanye5079222012-04-27 07:31:53 +000066 const char *getPassName() const;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000067 bool runOnFunction(Function &F);
68 bool doInitialization(Module &M);
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000069 static char ID; // Pass identification, replacement for typeid.
70
71 private:
Kostya Serebryanye5079222012-04-27 07:31:53 +000072 bool instrumentLoadOrStore(Instruction *I);
73 bool instrumentAtomic(Instruction *I);
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +000074 void chooseInstructionsToInstrument(SmallVectorImpl<Instruction*> &Local,
75 SmallVectorImpl<Instruction*> &All);
Kostya Serebryanycff60c12012-04-10 22:29:17 +000076 bool addrPointsToConstantData(Value *Addr);
Kostya Serebryanye5079222012-04-27 07:31:53 +000077 int getMemoryAccessFuncIndex(Value *Addr);
Kostya Serebryany2076af02012-04-10 18:18:56 +000078
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000079 TargetData *TD;
Kostya Serebryany6e590e32012-03-14 23:33:24 +000080 OwningPtr<FunctionBlackList> BL;
Kostya Serebryanye5079222012-04-27 07:31:53 +000081 IntegerType *OrdTy;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000082 // Callbacks to run-time library are computed in doInitialization.
Kostya Serebryanye5079222012-04-27 07:31:53 +000083 Function *TsanFuncEntry;
84 Function *TsanFuncExit;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000085 // Accesses sizes are powers of two: 1, 2, 4, 8, 16.
Kostya Serebryany3eccaa62012-02-14 00:52:07 +000086 static const size_t kNumberOfAccessSizes = 5;
Kostya Serebryanye5079222012-04-27 07:31:53 +000087 Function *TsanRead[kNumberOfAccessSizes];
88 Function *TsanWrite[kNumberOfAccessSizes];
89 Function *TsanAtomicLoad[kNumberOfAccessSizes];
90 Function *TsanAtomicStore[kNumberOfAccessSizes];
91 Function *TsanVptrUpdate;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +000092};
93} // namespace
94
95char ThreadSanitizer::ID = 0;
96INITIALIZE_PASS(ThreadSanitizer, "tsan",
97 "ThreadSanitizer: detects data races.",
98 false, false)
99
Kostya Serebryanye5079222012-04-27 07:31:53 +0000100const char *ThreadSanitizer::getPassName() const {
101 return "ThreadSanitizer";
102}
103
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000104ThreadSanitizer::ThreadSanitizer()
105 : FunctionPass(ID),
106 TD(NULL) {
107}
108
109FunctionPass *llvm::createThreadSanitizerPass() {
110 return new ThreadSanitizer();
111}
112
Kostya Serebryanye5079222012-04-27 07:31:53 +0000113static Function *checkInterfaceFunction(Constant *FuncOrBitcast) {
114 if (Function *F = dyn_cast<Function>(FuncOrBitcast))
115 return F;
116 FuncOrBitcast->dump();
117 report_fatal_error("ThreadSanitizer interface function redefined");
118}
119
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000120bool ThreadSanitizer::doInitialization(Module &M) {
121 TD = getAnalysisIfAvailable<TargetData>();
122 if (!TD)
123 return false;
Kostya Serebryany6e590e32012-03-14 23:33:24 +0000124 BL.reset(new FunctionBlackList(ClBlackListFile));
125
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000126 // Always insert a call to __tsan_init into the module's CTORs.
127 IRBuilder<> IRB(M.getContext());
128 Value *TsanInit = M.getOrInsertFunction("__tsan_init",
129 IRB.getVoidTy(), NULL);
130 appendToGlobalCtors(M, cast<Function>(TsanInit), 0);
131
132 // Initialize the callbacks.
Kostya Serebryanye5079222012-04-27 07:31:53 +0000133 TsanFuncEntry = checkInterfaceFunction(M.getOrInsertFunction(
134 "__tsan_func_entry", IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
135 TsanFuncExit = checkInterfaceFunction(M.getOrInsertFunction(
136 "__tsan_func_exit", IRB.getVoidTy(), NULL));
137 OrdTy = IRB.getInt32Ty();
Kostya Serebryany3eccaa62012-02-14 00:52:07 +0000138 for (size_t i = 0; i < kNumberOfAccessSizes; ++i) {
Kostya Serebryanye5079222012-04-27 07:31:53 +0000139 const size_t ByteSize = 1 << i;
140 const size_t BitSize = ByteSize * 8;
141 SmallString<32> ReadName("__tsan_read" + itostr(ByteSize));
142 TsanRead[i] = checkInterfaceFunction(M.getOrInsertFunction(
143 ReadName, IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
144
145 SmallString<32> WriteName("__tsan_write" + itostr(ByteSize));
146 TsanWrite[i] = checkInterfaceFunction(M.getOrInsertFunction(
147 WriteName, IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
148
149 Type *Ty = Type::getIntNTy(M.getContext(), BitSize);
150 Type *PtrTy = Ty->getPointerTo();
151 SmallString<32> AtomicLoadName("__tsan_atomic" + itostr(BitSize) +
152 "_load");
153 TsanAtomicLoad[i] = checkInterfaceFunction(M.getOrInsertFunction(
154 AtomicLoadName, Ty, PtrTy, OrdTy, NULL));
155
156 SmallString<32> AtomicStoreName("__tsan_atomic" + itostr(BitSize) +
157 "_store");
158 TsanAtomicStore[i] = checkInterfaceFunction(M.getOrInsertFunction(
159 AtomicStoreName, IRB.getVoidTy(), PtrTy, Ty, OrdTy,
160 NULL));
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000161 }
Kostya Serebryanye5079222012-04-27 07:31:53 +0000162 TsanVptrUpdate = checkInterfaceFunction(M.getOrInsertFunction(
163 "__tsan_vptr_update", IRB.getVoidTy(), IRB.getInt8PtrTy(),
164 IRB.getInt8PtrTy(), NULL));
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000165 return true;
166}
167
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000168static bool isVtableAccess(Instruction *I) {
169 if (MDNode *Tag = I->getMetadata(LLVMContext::MD_tbaa)) {
170 if (Tag->getNumOperands() < 1) return false;
171 if (MDString *Tag1 = dyn_cast<MDString>(Tag->getOperand(0))) {
172 if (Tag1->getString() == "vtable pointer") return true;
173 }
174 }
175 return false;
176}
177
178bool ThreadSanitizer::addrPointsToConstantData(Value *Addr) {
179 // If this is a GEP, just analyze its pointer operand.
180 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Addr))
181 Addr = GEP->getPointerOperand();
182
183 if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Addr)) {
184 if (GV->isConstant()) {
185 // Reads from constant globals can not race with any writes.
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000186 NumOmittedReadsFromConstantGlobals++;
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000187 return true;
188 }
189 } else if(LoadInst *L = dyn_cast<LoadInst>(Addr)) {
190 if (isVtableAccess(L)) {
191 // Reads from a vtable pointer can not race with any writes.
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000192 NumOmittedReadsFromVtable++;
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000193 return true;
194 }
195 }
196 return false;
197}
198
Kostya Serebryany2076af02012-04-10 18:18:56 +0000199// Instrumenting some of the accesses may be proven redundant.
200// Currently handled:
201// - read-before-write (within same BB, no calls between)
202//
203// We do not handle some of the patterns that should not survive
204// after the classic compiler optimizations.
205// E.g. two reads from the same temp should be eliminated by CSE,
206// two writes should be eliminated by DSE, etc.
207//
208// 'Local' is a vector of insns within the same BB (no calls between).
209// 'All' is a vector of insns that will be instrumented.
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000210void ThreadSanitizer::chooseInstructionsToInstrument(
Kostya Serebryany2076af02012-04-10 18:18:56 +0000211 SmallVectorImpl<Instruction*> &Local,
212 SmallVectorImpl<Instruction*> &All) {
213 SmallSet<Value*, 8> WriteTargets;
214 // Iterate from the end.
215 for (SmallVectorImpl<Instruction*>::reverse_iterator It = Local.rbegin(),
216 E = Local.rend(); It != E; ++It) {
217 Instruction *I = *It;
218 if (StoreInst *Store = dyn_cast<StoreInst>(I)) {
219 WriteTargets.insert(Store->getPointerOperand());
220 } else {
221 LoadInst *Load = cast<LoadInst>(I);
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000222 Value *Addr = Load->getPointerOperand();
223 if (WriteTargets.count(Addr)) {
Kostya Serebryany2076af02012-04-10 18:18:56 +0000224 // We will write to this temp, so no reason to analyze the read.
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000225 NumOmittedReadsBeforeWrite++;
Kostya Serebryany2076af02012-04-10 18:18:56 +0000226 continue;
227 }
Kostya Serebryanycff60c12012-04-10 22:29:17 +0000228 if (addrPointsToConstantData(Addr)) {
229 // Addr points to some constant data -- it can not race with any writes.
230 continue;
231 }
Kostya Serebryany2076af02012-04-10 18:18:56 +0000232 }
233 All.push_back(I);
234 }
235 Local.clear();
236}
237
Kostya Serebryanye5079222012-04-27 07:31:53 +0000238static bool isAtomic(Instruction *I) {
239 if (LoadInst *LI = dyn_cast<LoadInst>(I))
240 return LI->isAtomic() && LI->getSynchScope() == CrossThread;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000241 if (StoreInst *SI = dyn_cast<StoreInst>(I))
Kostya Serebryanye5079222012-04-27 07:31:53 +0000242 return SI->isAtomic() && SI->getSynchScope() == CrossThread;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000243 if (isa<AtomicRMWInst>(I))
Kostya Serebryanye5079222012-04-27 07:31:53 +0000244 return true;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000245 if (isa<AtomicCmpXchgInst>(I))
Kostya Serebryanye5079222012-04-27 07:31:53 +0000246 return true;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000247 if (FenceInst *FI = dyn_cast<FenceInst>(I))
Kostya Serebryanye5079222012-04-27 07:31:53 +0000248 return FI->getSynchScope() == CrossThread;
249 return false;
250}
251
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000252bool ThreadSanitizer::runOnFunction(Function &F) {
253 if (!TD) return false;
Kostya Serebryany6e590e32012-03-14 23:33:24 +0000254 if (BL->isIn(F)) return false;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000255 SmallVector<Instruction*, 8> RetVec;
Kostya Serebryany2076af02012-04-10 18:18:56 +0000256 SmallVector<Instruction*, 8> AllLoadsAndStores;
257 SmallVector<Instruction*, 8> LocalLoadsAndStores;
Kostya Serebryanye5079222012-04-27 07:31:53 +0000258 SmallVector<Instruction*, 8> AtomicAccesses;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000259 bool Res = false;
260 bool HasCalls = false;
261
262 // Traverse all instructions, collect loads/stores/returns, check for calls.
263 for (Function::iterator FI = F.begin(), FE = F.end();
264 FI != FE; ++FI) {
265 BasicBlock &BB = *FI;
266 for (BasicBlock::iterator BI = BB.begin(), BE = BB.end();
267 BI != BE; ++BI) {
Kostya Serebryanye5079222012-04-27 07:31:53 +0000268 if (isAtomic(BI))
269 AtomicAccesses.push_back(BI);
270 else if (isa<LoadInst>(BI) || isa<StoreInst>(BI))
Kostya Serebryany2076af02012-04-10 18:18:56 +0000271 LocalLoadsAndStores.push_back(BI);
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000272 else if (isa<ReturnInst>(BI))
273 RetVec.push_back(BI);
Kostya Serebryany2076af02012-04-10 18:18:56 +0000274 else if (isa<CallInst>(BI) || isa<InvokeInst>(BI)) {
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000275 HasCalls = true;
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000276 chooseInstructionsToInstrument(LocalLoadsAndStores, AllLoadsAndStores);
Kostya Serebryany2076af02012-04-10 18:18:56 +0000277 }
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000278 }
Kostya Serebryany37cb9ac2012-05-02 13:12:19 +0000279 chooseInstructionsToInstrument(LocalLoadsAndStores, AllLoadsAndStores);
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000280 }
281
282 // We have collected all loads and stores.
283 // FIXME: many of these accesses do not need to be checked for races
284 // (e.g. variables that do not escape, etc).
285
286 // Instrument memory accesses.
Kostya Serebryany2076af02012-04-10 18:18:56 +0000287 for (size_t i = 0, n = AllLoadsAndStores.size(); i < n; ++i) {
288 Res |= instrumentLoadOrStore(AllLoadsAndStores[i]);
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000289 }
290
Kostya Serebryanye5079222012-04-27 07:31:53 +0000291 // Instrument atomic memory accesses.
292 for (size_t i = 0, n = AtomicAccesses.size(); i < n; ++i) {
293 Res |= instrumentAtomic(AtomicAccesses[i]);
294 }
295
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000296 // Instrument function entry/exit points if there were instrumented accesses.
297 if (Res || HasCalls) {
298 IRBuilder<> IRB(F.getEntryBlock().getFirstNonPHI());
299 Value *ReturnAddress = IRB.CreateCall(
300 Intrinsic::getDeclaration(F.getParent(), Intrinsic::returnaddress),
301 IRB.getInt32(0));
302 IRB.CreateCall(TsanFuncEntry, ReturnAddress);
303 for (size_t i = 0, n = RetVec.size(); i < n; ++i) {
304 IRBuilder<> IRBRet(RetVec[i]);
305 IRBRet.CreateCall(TsanFuncExit);
306 }
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000307 Res = true;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000308 }
309 return Res;
310}
311
312bool ThreadSanitizer::instrumentLoadOrStore(Instruction *I) {
313 IRBuilder<> IRB(I);
314 bool IsWrite = isa<StoreInst>(*I);
315 Value *Addr = IsWrite
316 ? cast<StoreInst>(I)->getPointerOperand()
317 : cast<LoadInst>(I)->getPointerOperand();
Kostya Serebryanye5079222012-04-27 07:31:53 +0000318 int Idx = getMemoryAccessFuncIndex(Addr);
319 if (Idx < 0)
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000320 return false;
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000321 if (IsWrite && isVtableAccess(I)) {
322 Value *StoredValue = cast<StoreInst>(I)->getValueOperand();
323 IRB.CreateCall2(TsanVptrUpdate,
324 IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy()),
325 IRB.CreatePointerCast(StoredValue, IRB.getInt8PtrTy()));
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000326 NumInstrumentedVtableWrites++;
Kostya Serebryany52eb69922012-03-26 17:35:03 +0000327 return true;
328 }
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000329 Value *OnAccessFunc = IsWrite ? TsanWrite[Idx] : TsanRead[Idx];
330 IRB.CreateCall(OnAccessFunc, IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy()));
Kostya Serebryany2d5fdf82012-04-23 08:44:59 +0000331 if (IsWrite) NumInstrumentedWrites++;
332 else NumInstrumentedReads++;
Kostya Serebryany60ebb1942012-02-13 22:50:51 +0000333 return true;
334}
Kostya Serebryanye5079222012-04-27 07:31:53 +0000335
336static ConstantInt *createOrdering(IRBuilder<> *IRB, AtomicOrdering ord) {
337 uint32_t v = 0;
338 switch (ord) {
339 case NotAtomic: assert(false);
340 case Unordered: // Fall-through.
341 case Monotonic: v = 1 << 0; break;
342 // case Consume: v = 1 << 1; break; // Not specified yet.
343 case Acquire: v = 1 << 2; break;
344 case Release: v = 1 << 3; break;
345 case AcquireRelease: v = 1 << 4; break;
346 case SequentiallyConsistent: v = 1 << 5; break;
347 }
348 return IRB->getInt32(v);
349}
350
351bool ThreadSanitizer::instrumentAtomic(Instruction *I) {
352 IRBuilder<> IRB(I);
353 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
354 Value *Addr = LI->getPointerOperand();
355 int Idx = getMemoryAccessFuncIndex(Addr);
356 if (Idx < 0)
357 return false;
358 const size_t ByteSize = 1 << Idx;
359 const size_t BitSize = ByteSize * 8;
360 Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize);
361 Type *PtrTy = Ty->getPointerTo();
362 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
363 createOrdering(&IRB, LI->getOrdering())};
364 CallInst *C = CallInst::Create(TsanAtomicLoad[Idx],
365 ArrayRef<Value*>(Args));
366 ReplaceInstWithInst(I, C);
367
368 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
369 Value *Addr = SI->getPointerOperand();
370 int Idx = getMemoryAccessFuncIndex(Addr);
371 if (Idx < 0)
372 return false;
373 const size_t ByteSize = 1 << Idx;
374 const size_t BitSize = ByteSize * 8;
375 Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize);
376 Type *PtrTy = Ty->getPointerTo();
377 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
378 IRB.CreateIntCast(SI->getValueOperand(), Ty, false),
379 createOrdering(&IRB, SI->getOrdering())};
380 CallInst *C = CallInst::Create(TsanAtomicStore[Idx],
381 ArrayRef<Value*>(Args));
382 ReplaceInstWithInst(I, C);
383 } else if (isa<AtomicRMWInst>(I)) {
384 // FIXME: Not yet supported.
385 } else if (isa<AtomicCmpXchgInst>(I)) {
386 // FIXME: Not yet supported.
387 } else if (isa<FenceInst>(I)) {
388 // FIXME: Not yet supported.
389 }
390 return true;
391}
392
393int ThreadSanitizer::getMemoryAccessFuncIndex(Value *Addr) {
394 Type *OrigPtrTy = Addr->getType();
395 Type *OrigTy = cast<PointerType>(OrigPtrTy)->getElementType();
396 assert(OrigTy->isSized());
397 uint32_t TypeSize = TD->getTypeStoreSizeInBits(OrigTy);
398 if (TypeSize != 8 && TypeSize != 16 &&
399 TypeSize != 32 && TypeSize != 64 && TypeSize != 128) {
400 NumAccessesWithBadSize++;
401 // Ignore all unusual sizes.
402 return -1;
403 }
404 size_t Idx = CountTrailingZeros_32(TypeSize / 8);
405 assert(Idx < kNumberOfAccessSizes);
406 return Idx;
407}