blob: c6ddcdb3742608202bca498745308c77f9a9659c [file] [log] [blame]
Alexey Samsonov3b2f9f42012-06-04 13:55:19 +00001//===-- tsan_sync.cc ------------------------------------------------------===//
Kostya Serebryany4ad375f2012-05-10 13:48:04 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer (TSan), a race detector.
11//
12//===----------------------------------------------------------------------===//
Alexey Samsonov8bd90982012-06-07 09:50:16 +000013#include "sanitizer_common/sanitizer_placement_new.h"
Kostya Serebryany4ad375f2012-05-10 13:48:04 +000014#include "tsan_sync.h"
Kostya Serebryany4ad375f2012-05-10 13:48:04 +000015#include "tsan_rtl.h"
16#include "tsan_mman.h"
17
18namespace __tsan {
19
Dmitry Vyukovfd5ebcd2012-12-06 12:16:15 +000020SyncVar::SyncVar(uptr addr, u64 uid)
Kostya Serebryany4ad375f2012-05-10 13:48:04 +000021 : mtx(MutexTypeSyncVar, StatMtxSyncVar)
22 , addr(addr)
Dmitry Vyukovfd5ebcd2012-12-06 12:16:15 +000023 , uid(uid)
Kostya Serebryany4ad375f2012-05-10 13:48:04 +000024 , owner_tid(kInvalidTid)
Dmitry Vyukov3482ec32012-08-16 15:08:49 +000025 , last_lock()
Kostya Serebryany4ad375f2012-05-10 13:48:04 +000026 , recursion()
27 , is_rw()
28 , is_recursive()
Dmitry Vyukov19ae9f32012-08-16 14:21:09 +000029 , is_broken()
30 , is_linker_init() {
Kostya Serebryany4ad375f2012-05-10 13:48:04 +000031}
32
33SyncTab::Part::Part()
34 : mtx(MutexTypeSyncTab, StatMtxSyncTab)
35 , val() {
36}
37
38SyncTab::SyncTab() {
39}
40
41SyncTab::~SyncTab() {
42 for (int i = 0; i < kPartCount; i++) {
43 while (tab_[i].val) {
44 SyncVar *tmp = tab_[i].val;
45 tab_[i].val = tmp->next;
46 DestroyAndFree(tmp);
47 }
48 }
49}
50
Dmitry Vyukovfd5ebcd2012-12-06 12:16:15 +000051SyncVar* SyncTab::GetOrCreateAndLock(ThreadState *thr, uptr pc,
52 uptr addr, bool write_lock) {
53 return GetAndLock(thr, pc, addr, write_lock, true);
54}
55
56SyncVar* SyncTab::GetIfExistsAndLock(uptr addr, bool write_lock) {
57 return GetAndLock(0, 0, addr, write_lock, false);
58}
59
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000060SyncVar* SyncTab::Create(ThreadState *thr, uptr pc, uptr addr) {
61 StatInc(thr, StatSyncCreated);
62 void *mem = internal_alloc(MBlockSync, sizeof(SyncVar));
63 const u64 uid = atomic_fetch_add(&uid_gen_, 1, memory_order_relaxed);
64 SyncVar *res = new(mem) SyncVar(addr, uid);
65#ifndef TSAN_GO
Dmitry Vyukova2216202013-03-18 08:27:47 +000066 res->creation_stack_id = CurrentStackId(thr, pc);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000067#endif
68 return res;
69}
70
Kostya Serebryany4ad375f2012-05-10 13:48:04 +000071SyncVar* SyncTab::GetAndLock(ThreadState *thr, uptr pc,
Dmitry Vyukovfd5ebcd2012-12-06 12:16:15 +000072 uptr addr, bool write_lock, bool create) {
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +000073#ifndef TSAN_GO
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000074 { // NOLINT
75 SyncVar *res = GetJavaSync(thr, pc, addr, write_lock, create);
76 if (res)
77 return res;
78 }
79
Dmitry Vyukov44e1beae2012-12-18 14:44:44 +000080 // Here we ask only PrimaryAllocator, because
81 // SecondaryAllocator::PointerIsMine() is slow and we have fallback on
82 // the hashmap anyway.
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +000083 if (PrimaryAllocator::PointerIsMine((void*)addr)) {
84 MBlock *b = user_mblock(thr, (void*)addr);
Dmitry Vyukov54d9c812013-04-24 09:20:25 +000085 CHECK_NE(b, 0);
Dmitry Vyukov4ddd37b2013-03-18 19:47:36 +000086 MBlock::ScopedLock l(b);
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +000087 SyncVar *res = 0;
Dmitry Vyukov4ddd37b2013-03-18 19:47:36 +000088 for (res = b->ListHead(); res; res = res->next) {
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +000089 if (res->addr == addr)
90 break;
91 }
92 if (res == 0) {
Dmitry Vyukovfd5ebcd2012-12-06 12:16:15 +000093 if (!create)
94 return 0;
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000095 res = Create(thr, pc, addr);
Dmitry Vyukov4ddd37b2013-03-18 19:47:36 +000096 b->ListPush(res);
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +000097 }
98 if (write_lock)
99 res->mtx.Lock();
100 else
101 res->mtx.ReadLock();
102 return res;
103 }
104#endif
105
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000106 Part *p = &tab_[PartIdx(addr)];
107 {
108 ReadLock l(&p->mtx);
109 for (SyncVar *res = p->val; res; res = res->next) {
110 if (res->addr == addr) {
111 if (write_lock)
112 res->mtx.Lock();
113 else
114 res->mtx.ReadLock();
115 return res;
116 }
117 }
118 }
Dmitry Vyukovfd5ebcd2012-12-06 12:16:15 +0000119 if (!create)
120 return 0;
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000121 {
122 Lock l(&p->mtx);
123 SyncVar *res = p->val;
124 for (; res; res = res->next) {
125 if (res->addr == addr)
126 break;
127 }
128 if (res == 0) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000129 res = Create(thr, pc, addr);
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000130 res->next = p->val;
131 p->val = res;
132 }
133 if (write_lock)
134 res->mtx.Lock();
135 else
136 res->mtx.ReadLock();
137 return res;
138 }
139}
140
141SyncVar* SyncTab::GetAndRemove(ThreadState *thr, uptr pc, uptr addr) {
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +0000142#ifndef TSAN_GO
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000143 { // NOLINT
144 SyncVar *res = GetAndRemoveJavaSync(thr, pc, addr);
145 if (res)
146 return res;
147 }
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +0000148 if (PrimaryAllocator::PointerIsMine((void*)addr)) {
149 MBlock *b = user_mblock(thr, (void*)addr);
Dmitry Vyukov54d9c812013-04-24 09:20:25 +0000150 CHECK_NE(b, 0);
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +0000151 SyncVar *res = 0;
152 {
Dmitry Vyukov4ddd37b2013-03-18 19:47:36 +0000153 MBlock::ScopedLock l(b);
154 res = b->ListHead();
155 if (res) {
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +0000156 if (res->addr == addr) {
Dmitry Vyukove59bed42012-08-18 11:49:00 +0000157 if (res->is_linker_init)
158 return 0;
Dmitry Vyukov4ddd37b2013-03-18 19:47:36 +0000159 b->ListPop();
160 } else {
161 SyncVar **prev = &res->next;
162 res = *prev;
163 while (res) {
164 if (res->addr == addr) {
165 if (res->is_linker_init)
166 return 0;
167 *prev = res->next;
168 break;
169 }
170 prev = &res->next;
171 res = *prev;
172 }
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +0000173 }
Dmitry Vyukov4ddd37b2013-03-18 19:47:36 +0000174 if (res) {
175 StatInc(thr, StatSyncDestroyed);
176 res->mtx.Lock();
177 res->mtx.Unlock();
178 }
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +0000179 }
180 }
Dmitry Vyukov1c0b3c62012-08-15 17:27:20 +0000181 return res;
182 }
183#endif
184
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000185 Part *p = &tab_[PartIdx(addr)];
186 SyncVar *res = 0;
187 {
188 Lock l(&p->mtx);
189 SyncVar **prev = &p->val;
190 res = *prev;
191 while (res) {
192 if (res->addr == addr) {
Dmitry Vyukove59bed42012-08-18 11:49:00 +0000193 if (res->is_linker_init)
194 return 0;
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000195 *prev = res->next;
196 break;
197 }
198 prev = &res->next;
199 res = *prev;
200 }
201 }
202 if (res) {
203 StatInc(thr, StatSyncDestroyed);
204 res->mtx.Lock();
205 res->mtx.Unlock();
206 }
207 return res;
208}
209
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000210int SyncTab::PartIdx(uptr addr) {
211 return (addr >> 3) % kPartCount;
212}
213
214StackTrace::StackTrace()
215 : n_()
Dmitry Vyukovde1fd1c2012-06-22 11:08:55 +0000216 , s_()
217 , c_() {
218}
219
220StackTrace::StackTrace(uptr *buf, uptr cnt)
221 : n_()
222 , s_(buf)
223 , c_(cnt) {
224 CHECK_NE(buf, 0);
225 CHECK_NE(cnt, 0);
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000226}
227
228StackTrace::~StackTrace() {
229 Reset();
230}
231
232void StackTrace::Reset() {
Dmitry Vyukovde1fd1c2012-06-22 11:08:55 +0000233 if (s_ && !c_) {
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000234 CHECK_NE(n_, 0);
235 internal_free(s_);
236 s_ = 0;
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000237 }
Dmitry Vyukovde1fd1c2012-06-22 11:08:55 +0000238 n_ = 0;
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000239}
240
241void StackTrace::Init(const uptr *pcs, uptr cnt) {
242 Reset();
243 if (cnt == 0)
244 return;
Dmitry Vyukovde1fd1c2012-06-22 11:08:55 +0000245 if (c_) {
246 CHECK_NE(s_, 0);
247 CHECK_LE(cnt, c_);
248 } else {
249 s_ = (uptr*)internal_alloc(MBlockStackTrace, cnt * sizeof(s_[0]));
250 }
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000251 n_ = cnt;
Dmitry Vyukovde1fd1c2012-06-22 11:08:55 +0000252 internal_memcpy(s_, pcs, cnt * sizeof(s_[0]));
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000253}
254
255void StackTrace::ObtainCurrent(ThreadState *thr, uptr toppc) {
256 Reset();
Dmitry Vyukov5bfac972012-07-16 16:44:47 +0000257 n_ = thr->shadow_stack_pos - thr->shadow_stack;
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000258 if (n_ + !!toppc == 0)
259 return;
Dmitry Vyukovc87e7282012-09-06 15:18:14 +0000260 uptr start = 0;
Dmitry Vyukovde1fd1c2012-06-22 11:08:55 +0000261 if (c_) {
262 CHECK_NE(s_, 0);
Dmitry Vyukovc87e7282012-09-06 15:18:14 +0000263 if (n_ + !!toppc > c_) {
264 start = n_ - c_ + !!toppc;
265 n_ = c_ - !!toppc;
266 }
Dmitry Vyukovde1fd1c2012-06-22 11:08:55 +0000267 } else {
268 s_ = (uptr*)internal_alloc(MBlockStackTrace,
269 (n_ + !!toppc) * sizeof(s_[0]));
270 }
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000271 for (uptr i = 0; i < n_; i++)
Dmitry Vyukovc87e7282012-09-06 15:18:14 +0000272 s_[i] = thr->shadow_stack[start + i];
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000273 if (toppc) {
274 s_[n_] = toppc;
275 n_++;
276 }
277}
278
279void StackTrace::CopyFrom(const StackTrace& other) {
280 Reset();
281 Init(other.Begin(), other.Size());
282}
283
284bool StackTrace::IsEmpty() const {
285 return n_ == 0;
286}
287
288uptr StackTrace::Size() const {
289 return n_;
290}
291
292uptr StackTrace::Get(uptr i) const {
293 CHECK_LT(i, n_);
294 return s_[i];
295}
296
297const uptr *StackTrace::Begin() const {
298 return s_;
299}
300
301} // namespace __tsan