blob: d0c003ea74355f6f91825b0ba007ad4b44403933 [file] [log] [blame]
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +00001//===-- tsan_interface_java.cc --------------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer (TSan), a race detector.
11//
12//===----------------------------------------------------------------------===//
13
14#include "tsan_interface_java.h"
15#include "tsan_rtl.h"
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000016#include "tsan_mutex.h"
17#include "sanitizer_common/sanitizer_internal_defs.h"
18#include "sanitizer_common/sanitizer_common.h"
19#include "sanitizer_common/sanitizer_placement_new.h"
Dmitry Vyukov5cf581a2013-06-17 19:57:03 +000020#include "sanitizer_common/sanitizer_stacktrace.h"
Dmitry Vyukov150406b2013-09-21 23:06:00 +000021#include "sanitizer_common/sanitizer_procmaps.h"
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +000022
23using namespace __tsan; // NOLINT
24
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000025namespace __tsan {
26
27const uptr kHeapShadow = 0x300000000000ull;
28const uptr kHeapAlignment = 8;
29
30struct BlockDesc {
31 bool begin;
32 Mutex mtx;
33 SyncVar *head;
34
35 BlockDesc()
Dmitry Vyukov22be55e2012-12-21 11:30:14 +000036 : mtx(MutexTypeJavaMBlock, StatMtxJavaMBlock)
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000037 , head() {
38 CHECK_EQ(begin, false);
39 begin = true;
40 }
41
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000042 ~BlockDesc() {
43 CHECK_EQ(begin, true);
44 begin = false;
45 ThreadState *thr = cur_thread();
46 SyncVar *s = head;
47 while (s) {
48 SyncVar *s1 = s->next;
49 StatInc(thr, StatSyncDestroyed);
50 s->mtx.Lock();
51 s->mtx.Unlock();
52 thr->mset.Remove(s->GetId());
53 DestroyAndFree(s);
54 s = s1;
55 }
56 }
57};
58
59struct JavaContext {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000060 const uptr heap_begin;
61 const uptr heap_size;
62 BlockDesc *heap_shadow;
63
64 JavaContext(jptr heap_begin, jptr heap_size)
Dmitry Vyukov22be55e2012-12-21 11:30:14 +000065 : heap_begin(heap_begin)
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000066 , heap_size(heap_size) {
67 uptr size = heap_size / kHeapAlignment * sizeof(BlockDesc);
68 heap_shadow = (BlockDesc*)MmapFixedNoReserve(kHeapShadow, size);
69 if ((uptr)heap_shadow != kHeapShadow) {
70 Printf("ThreadSanitizer: failed to mmap Java heap shadow\n");
71 Die();
72 }
73 }
74};
75
76class ScopedJavaFunc {
77 public:
78 ScopedJavaFunc(ThreadState *thr, uptr pc)
79 : thr_(thr) {
80 Initialize(thr_);
81 FuncEntry(thr, pc);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000082 }
83
84 ~ScopedJavaFunc() {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000085 FuncExit(thr_);
86 // FIXME(dvyukov): process pending signals.
87 }
88
89 private:
90 ThreadState *thr_;
91};
92
93static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1];
94static JavaContext *jctx;
95
96static BlockDesc *getblock(uptr addr) {
97 uptr i = (addr - jctx->heap_begin) / kHeapAlignment;
98 return &jctx->heap_shadow[i];
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +000099}
100
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000101static uptr USED getmem(BlockDesc *b) {
102 uptr i = b - jctx->heap_shadow;
103 uptr p = jctx->heap_begin + i * kHeapAlignment;
104 CHECK_GE(p, jctx->heap_begin);
105 CHECK_LT(p, jctx->heap_begin + jctx->heap_size);
106 return p;
107}
108
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000109static BlockDesc *getblockbegin(uptr addr) {
110 for (BlockDesc *b = getblock(addr);; b--) {
111 CHECK_GE(b, jctx->heap_shadow);
112 if (b->begin)
113 return b;
114 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000115 return 0;
116}
117
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000118SyncVar* GetJavaSync(ThreadState *thr, uptr pc, uptr addr,
119 bool write_lock, bool create) {
120 if (jctx == 0 || addr < jctx->heap_begin
121 || addr >= jctx->heap_begin + jctx->heap_size)
122 return 0;
123 BlockDesc *b = getblockbegin(addr);
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000124 DPrintf("#%d: GetJavaSync %p->%p\n", thr->tid, addr, b);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000125 Lock l(&b->mtx);
126 SyncVar *s = b->head;
127 for (; s; s = s->next) {
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000128 if (s->addr == addr) {
129 DPrintf("#%d: found existing sync for %p\n", thr->tid, addr);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000130 break;
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000131 }
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000132 }
133 if (s == 0 && create) {
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000134 DPrintf("#%d: creating new sync for %p\n", thr->tid, addr);
Dmitry Vyukovc9e12aa2014-03-20 10:36:20 +0000135 s = ctx->synctab.Create(thr, pc, addr);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000136 s->next = b->head;
137 b->head = s;
138 }
139 if (s) {
140 if (write_lock)
141 s->mtx.Lock();
142 else
143 s->mtx.ReadLock();
144 }
145 return s;
146}
147
148SyncVar* GetAndRemoveJavaSync(ThreadState *thr, uptr pc, uptr addr) {
149 // We do not destroy Java mutexes other than in __tsan_java_free().
150 return 0;
151}
152
Alexey Samsonov49a32c12013-01-30 07:45:58 +0000153} // namespace __tsan
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000154
155#define SCOPED_JAVA_FUNC(func) \
156 ThreadState *thr = cur_thread(); \
157 const uptr caller_pc = GET_CALLER_PC(); \
Dmitry Vyukov5cf581a2013-06-17 19:57:03 +0000158 const uptr pc = __sanitizer::StackTrace::GetCurrentPc(); \
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000159 (void)pc; \
160 ScopedJavaFunc scoped(thr, caller_pc); \
161/**/
162
163void __tsan_java_init(jptr heap_begin, jptr heap_size) {
164 SCOPED_JAVA_FUNC(__tsan_java_init);
165 DPrintf("#%d: java_init(%p, %p)\n", thr->tid, heap_begin, heap_size);
166 CHECK_EQ(jctx, 0);
167 CHECK_GT(heap_begin, 0);
168 CHECK_GT(heap_size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000169 CHECK_EQ(heap_begin % kHeapAlignment, 0);
170 CHECK_EQ(heap_size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000171 CHECK_LT(heap_begin, heap_begin + heap_size);
172 jctx = new(jctx_buf) JavaContext(heap_begin, heap_size);
173}
174
175int __tsan_java_fini() {
176 SCOPED_JAVA_FUNC(__tsan_java_fini);
177 DPrintf("#%d: java_fini()\n", thr->tid);
178 CHECK_NE(jctx, 0);
179 // FIXME(dvyukov): this does not call atexit() callbacks.
180 int status = Finalize(thr);
181 DPrintf("#%d: java_fini() = %d\n", thr->tid, status);
182 return status;
183}
184
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000185void __tsan_java_alloc(jptr ptr, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000186 SCOPED_JAVA_FUNC(__tsan_java_alloc);
187 DPrintf("#%d: java_alloc(%p, %p)\n", thr->tid, ptr, size);
188 CHECK_NE(jctx, 0);
189 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000190 CHECK_EQ(ptr % kHeapAlignment, 0);
191 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000192 CHECK_GE(ptr, jctx->heap_begin);
193 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
194
195 BlockDesc *b = getblock(ptr);
196 new(b) BlockDesc();
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000197}
198
199void __tsan_java_free(jptr ptr, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000200 SCOPED_JAVA_FUNC(__tsan_java_free);
201 DPrintf("#%d: java_free(%p, %p)\n", thr->tid, ptr, size);
202 CHECK_NE(jctx, 0);
203 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000204 CHECK_EQ(ptr % kHeapAlignment, 0);
205 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000206 CHECK_GE(ptr, jctx->heap_begin);
207 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
208
209 BlockDesc *beg = getblock(ptr);
210 BlockDesc *end = getblock(ptr + size);
211 for (BlockDesc *b = beg; b != end; b++) {
212 if (b->begin)
213 b->~BlockDesc();
214 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000215}
216
217void __tsan_java_move(jptr src, jptr dst, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000218 SCOPED_JAVA_FUNC(__tsan_java_move);
219 DPrintf("#%d: java_move(%p, %p, %p)\n", thr->tid, src, dst, size);
220 CHECK_NE(jctx, 0);
221 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000222 CHECK_EQ(src % kHeapAlignment, 0);
223 CHECK_EQ(dst % kHeapAlignment, 0);
224 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000225 CHECK_GE(src, jctx->heap_begin);
226 CHECK_LE(src + size, jctx->heap_begin + jctx->heap_size);
227 CHECK_GE(dst, jctx->heap_begin);
228 CHECK_LE(dst + size, jctx->heap_begin + jctx->heap_size);
229 CHECK(dst >= src + size || src >= dst + size);
230
231 // Assuming it's not running concurrently with threads that do
232 // memory accesses and mutex operations (stop-the-world phase).
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000233 { // NOLINT
234 BlockDesc *s = getblock(src);
235 BlockDesc *d = getblock(dst);
236 BlockDesc *send = getblock(src + size);
237 for (; s != send; s++, d++) {
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000238 CHECK_EQ(d->begin, false);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000239 if (s->begin) {
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000240 DPrintf("#%d: moving block %p->%p\n", thr->tid, getmem(s), getmem(d));
241 new(d) BlockDesc;
242 d->head = s->head;
243 for (SyncVar *sync = d->head; sync; sync = sync->next) {
244 uptr newaddr = sync->addr - src + dst;
245 DPrintf("#%d: moving sync %p->%p\n", thr->tid, sync->addr, newaddr);
246 sync->addr = newaddr;
247 }
248 s->head = 0;
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000249 s->~BlockDesc();
250 }
251 }
252 }
253
254 { // NOLINT
255 u64 *s = (u64*)MemToShadow(src);
256 u64 *d = (u64*)MemToShadow(dst);
257 u64 *send = (u64*)MemToShadow(src + size);
258 for (; s != send; s++, d++) {
259 *d = *s;
260 *s = 0;
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000261 }
262 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000263}
264
265void __tsan_java_mutex_lock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000266 SCOPED_JAVA_FUNC(__tsan_java_mutex_lock);
267 DPrintf("#%d: java_mutex_lock(%p)\n", thr->tid, addr);
268 CHECK_NE(jctx, 0);
269 CHECK_GE(addr, jctx->heap_begin);
270 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
271
Dmitry Vyukovc9af8182013-05-17 12:03:46 +0000272 MutexCreate(thr, pc, addr, true, true, true);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000273 MutexLock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000274}
275
276void __tsan_java_mutex_unlock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000277 SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock);
278 DPrintf("#%d: java_mutex_unlock(%p)\n", thr->tid, addr);
279 CHECK_NE(jctx, 0);
280 CHECK_GE(addr, jctx->heap_begin);
281 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
282
283 MutexUnlock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000284}
285
286void __tsan_java_mutex_read_lock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000287 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_lock);
288 DPrintf("#%d: java_mutex_read_lock(%p)\n", thr->tid, addr);
289 CHECK_NE(jctx, 0);
290 CHECK_GE(addr, jctx->heap_begin);
291 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
292
Dmitry Vyukovc9af8182013-05-17 12:03:46 +0000293 MutexCreate(thr, pc, addr, true, true, true);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000294 MutexReadLock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000295}
296
297void __tsan_java_mutex_read_unlock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000298 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_unlock);
299 DPrintf("#%d: java_mutex_read_unlock(%p)\n", thr->tid, addr);
300 CHECK_NE(jctx, 0);
301 CHECK_GE(addr, jctx->heap_begin);
302 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000303
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000304 MutexReadUnlock(thr, pc, addr);
305}
Dmitry Vyukovc9af8182013-05-17 12:03:46 +0000306
307void __tsan_java_mutex_lock_rec(jptr addr, int rec) {
308 SCOPED_JAVA_FUNC(__tsan_java_mutex_lock_rec);
309 DPrintf("#%d: java_mutex_lock_rec(%p, %d)\n", thr->tid, addr, rec);
310 CHECK_NE(jctx, 0);
311 CHECK_GE(addr, jctx->heap_begin);
312 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
313 CHECK_GT(rec, 0);
314
315 MutexCreate(thr, pc, addr, true, true, true);
316 MutexLock(thr, pc, addr, rec);
317}
318
319int __tsan_java_mutex_unlock_rec(jptr addr) {
320 SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock_rec);
321 DPrintf("#%d: java_mutex_unlock_rec(%p)\n", thr->tid, addr);
322 CHECK_NE(jctx, 0);
323 CHECK_GE(addr, jctx->heap_begin);
324 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
325
326 return MutexUnlock(thr, pc, addr, true);
327}