blob: 53f14cf07b5962aaf4c66bc584c5d4b6a1414860 [file] [log] [blame]
Dmitry Vyukove78b6202012-12-20 10:21:30 +00001//===-- tsan_interface_java.cc --------------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer (TSan), a race detector.
11//
12//===----------------------------------------------------------------------===//
13
14#include "tsan_interface_java.h"
15#include "tsan_rtl.h"
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +000016#include "tsan_mutex.h"
17#include "sanitizer_common/sanitizer_internal_defs.h"
18#include "sanitizer_common/sanitizer_common.h"
19#include "sanitizer_common/sanitizer_placement_new.h"
Dmitry Vyukove7718bc2013-06-17 19:57:03 +000020#include "sanitizer_common/sanitizer_stacktrace.h"
Dmitry Vyukovd1886532013-09-21 23:06:00 +000021#include "sanitizer_common/sanitizer_procmaps.h"
Dmitry Vyukove78b6202012-12-20 10:21:30 +000022
23using namespace __tsan; // NOLINT
24
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +000025namespace __tsan {
26
27const uptr kHeapShadow = 0x300000000000ull;
28const uptr kHeapAlignment = 8;
29
30struct BlockDesc {
31 bool begin;
32 Mutex mtx;
33 SyncVar *head;
34
35 BlockDesc()
Dmitry Vyukovf4e4f932012-12-21 11:30:14 +000036 : mtx(MutexTypeJavaMBlock, StatMtxJavaMBlock)
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +000037 , head() {
38 CHECK_EQ(begin, false);
39 begin = true;
40 }
41
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +000042 ~BlockDesc() {
43 CHECK_EQ(begin, true);
44 begin = false;
45 ThreadState *thr = cur_thread();
46 SyncVar *s = head;
47 while (s) {
48 SyncVar *s1 = s->next;
49 StatInc(thr, StatSyncDestroyed);
50 s->mtx.Lock();
51 s->mtx.Unlock();
52 thr->mset.Remove(s->GetId());
53 DestroyAndFree(s);
54 s = s1;
55 }
56 }
57};
58
59struct JavaContext {
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +000060 const uptr heap_begin;
61 const uptr heap_size;
62 BlockDesc *heap_shadow;
63
64 JavaContext(jptr heap_begin, jptr heap_size)
Dmitry Vyukovf4e4f932012-12-21 11:30:14 +000065 : heap_begin(heap_begin)
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +000066 , heap_size(heap_size) {
67 uptr size = heap_size / kHeapAlignment * sizeof(BlockDesc);
68 heap_shadow = (BlockDesc*)MmapFixedNoReserve(kHeapShadow, size);
69 if ((uptr)heap_shadow != kHeapShadow) {
70 Printf("ThreadSanitizer: failed to mmap Java heap shadow\n");
71 Die();
72 }
73 }
74};
75
76class ScopedJavaFunc {
77 public:
78 ScopedJavaFunc(ThreadState *thr, uptr pc)
79 : thr_(thr) {
80 Initialize(thr_);
81 FuncEntry(thr, pc);
82 CHECK_EQ(thr_->in_rtl, 0);
83 thr_->in_rtl++;
84 }
85
86 ~ScopedJavaFunc() {
87 thr_->in_rtl--;
88 CHECK_EQ(thr_->in_rtl, 0);
89 FuncExit(thr_);
90 // FIXME(dvyukov): process pending signals.
91 }
92
93 private:
94 ThreadState *thr_;
95};
96
97static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1];
98static JavaContext *jctx;
99
100static BlockDesc *getblock(uptr addr) {
101 uptr i = (addr - jctx->heap_begin) / kHeapAlignment;
102 return &jctx->heap_shadow[i];
Dmitry Vyukove78b6202012-12-20 10:21:30 +0000103}
104
Dmitry Vyukovc3d708b2012-12-21 13:23:48 +0000105static uptr USED getmem(BlockDesc *b) {
106 uptr i = b - jctx->heap_shadow;
107 uptr p = jctx->heap_begin + i * kHeapAlignment;
108 CHECK_GE(p, jctx->heap_begin);
109 CHECK_LT(p, jctx->heap_begin + jctx->heap_size);
110 return p;
111}
112
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000113static BlockDesc *getblockbegin(uptr addr) {
114 for (BlockDesc *b = getblock(addr);; b--) {
115 CHECK_GE(b, jctx->heap_shadow);
116 if (b->begin)
117 return b;
118 }
Dmitry Vyukove78b6202012-12-20 10:21:30 +0000119 return 0;
120}
121
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000122SyncVar* GetJavaSync(ThreadState *thr, uptr pc, uptr addr,
123 bool write_lock, bool create) {
124 if (jctx == 0 || addr < jctx->heap_begin
125 || addr >= jctx->heap_begin + jctx->heap_size)
126 return 0;
127 BlockDesc *b = getblockbegin(addr);
Dmitry Vyukovc3d708b2012-12-21 13:23:48 +0000128 DPrintf("#%d: GetJavaSync %p->%p\n", thr->tid, addr, b);
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000129 Lock l(&b->mtx);
130 SyncVar *s = b->head;
131 for (; s; s = s->next) {
Dmitry Vyukovc3d708b2012-12-21 13:23:48 +0000132 if (s->addr == addr) {
133 DPrintf("#%d: found existing sync for %p\n", thr->tid, addr);
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000134 break;
Dmitry Vyukovc3d708b2012-12-21 13:23:48 +0000135 }
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000136 }
137 if (s == 0 && create) {
Dmitry Vyukovc3d708b2012-12-21 13:23:48 +0000138 DPrintf("#%d: creating new sync for %p\n", thr->tid, addr);
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000139 s = CTX()->synctab.Create(thr, pc, addr);
140 s->next = b->head;
141 b->head = s;
142 }
143 if (s) {
144 if (write_lock)
145 s->mtx.Lock();
146 else
147 s->mtx.ReadLock();
148 }
149 return s;
150}
151
152SyncVar* GetAndRemoveJavaSync(ThreadState *thr, uptr pc, uptr addr) {
153 // We do not destroy Java mutexes other than in __tsan_java_free().
154 return 0;
155}
156
Alexey Samsonovba5e9962013-01-30 07:45:58 +0000157} // namespace __tsan
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000158
159#define SCOPED_JAVA_FUNC(func) \
160 ThreadState *thr = cur_thread(); \
161 const uptr caller_pc = GET_CALLER_PC(); \
Dmitry Vyukove7718bc2013-06-17 19:57:03 +0000162 const uptr pc = __sanitizer::StackTrace::GetCurrentPc(); \
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000163 (void)pc; \
164 ScopedJavaFunc scoped(thr, caller_pc); \
165/**/
166
167void __tsan_java_init(jptr heap_begin, jptr heap_size) {
168 SCOPED_JAVA_FUNC(__tsan_java_init);
169 DPrintf("#%d: java_init(%p, %p)\n", thr->tid, heap_begin, heap_size);
170 CHECK_EQ(jctx, 0);
171 CHECK_GT(heap_begin, 0);
172 CHECK_GT(heap_size, 0);
Dmitry Vyukovef867242012-12-21 11:16:40 +0000173 CHECK_EQ(heap_begin % kHeapAlignment, 0);
174 CHECK_EQ(heap_size % kHeapAlignment, 0);
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000175 CHECK_LT(heap_begin, heap_begin + heap_size);
176 jctx = new(jctx_buf) JavaContext(heap_begin, heap_size);
177}
178
179int __tsan_java_fini() {
180 SCOPED_JAVA_FUNC(__tsan_java_fini);
181 DPrintf("#%d: java_fini()\n", thr->tid);
182 CHECK_NE(jctx, 0);
183 // FIXME(dvyukov): this does not call atexit() callbacks.
184 int status = Finalize(thr);
185 DPrintf("#%d: java_fini() = %d\n", thr->tid, status);
186 return status;
187}
188
Dmitry Vyukove78b6202012-12-20 10:21:30 +0000189void __tsan_java_alloc(jptr ptr, jptr size) {
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000190 SCOPED_JAVA_FUNC(__tsan_java_alloc);
191 DPrintf("#%d: java_alloc(%p, %p)\n", thr->tid, ptr, size);
192 CHECK_NE(jctx, 0);
193 CHECK_NE(size, 0);
Dmitry Vyukovef867242012-12-21 11:16:40 +0000194 CHECK_EQ(ptr % kHeapAlignment, 0);
195 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000196 CHECK_GE(ptr, jctx->heap_begin);
197 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
198
199 BlockDesc *b = getblock(ptr);
200 new(b) BlockDesc();
Dmitry Vyukove78b6202012-12-20 10:21:30 +0000201}
202
203void __tsan_java_free(jptr ptr, jptr size) {
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000204 SCOPED_JAVA_FUNC(__tsan_java_free);
205 DPrintf("#%d: java_free(%p, %p)\n", thr->tid, ptr, size);
206 CHECK_NE(jctx, 0);
207 CHECK_NE(size, 0);
Dmitry Vyukovef867242012-12-21 11:16:40 +0000208 CHECK_EQ(ptr % kHeapAlignment, 0);
209 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000210 CHECK_GE(ptr, jctx->heap_begin);
211 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
212
213 BlockDesc *beg = getblock(ptr);
214 BlockDesc *end = getblock(ptr + size);
215 for (BlockDesc *b = beg; b != end; b++) {
216 if (b->begin)
217 b->~BlockDesc();
218 }
Dmitry Vyukove78b6202012-12-20 10:21:30 +0000219}
220
221void __tsan_java_move(jptr src, jptr dst, jptr size) {
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000222 SCOPED_JAVA_FUNC(__tsan_java_move);
223 DPrintf("#%d: java_move(%p, %p, %p)\n", thr->tid, src, dst, size);
224 CHECK_NE(jctx, 0);
225 CHECK_NE(size, 0);
Dmitry Vyukovef867242012-12-21 11:16:40 +0000226 CHECK_EQ(src % kHeapAlignment, 0);
227 CHECK_EQ(dst % kHeapAlignment, 0);
228 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000229 CHECK_GE(src, jctx->heap_begin);
230 CHECK_LE(src + size, jctx->heap_begin + jctx->heap_size);
231 CHECK_GE(dst, jctx->heap_begin);
232 CHECK_LE(dst + size, jctx->heap_begin + jctx->heap_size);
233 CHECK(dst >= src + size || src >= dst + size);
234
235 // Assuming it's not running concurrently with threads that do
236 // memory accesses and mutex operations (stop-the-world phase).
Dmitry Vyukovef867242012-12-21 11:16:40 +0000237 { // NOLINT
238 BlockDesc *s = getblock(src);
239 BlockDesc *d = getblock(dst);
240 BlockDesc *send = getblock(src + size);
241 for (; s != send; s++, d++) {
Dmitry Vyukovc3d708b2012-12-21 13:23:48 +0000242 CHECK_EQ(d->begin, false);
Dmitry Vyukovef867242012-12-21 11:16:40 +0000243 if (s->begin) {
Dmitry Vyukovc3d708b2012-12-21 13:23:48 +0000244 DPrintf("#%d: moving block %p->%p\n", thr->tid, getmem(s), getmem(d));
245 new(d) BlockDesc;
246 d->head = s->head;
247 for (SyncVar *sync = d->head; sync; sync = sync->next) {
248 uptr newaddr = sync->addr - src + dst;
249 DPrintf("#%d: moving sync %p->%p\n", thr->tid, sync->addr, newaddr);
250 sync->addr = newaddr;
251 }
252 s->head = 0;
Dmitry Vyukovef867242012-12-21 11:16:40 +0000253 s->~BlockDesc();
254 }
255 }
256 }
257
258 { // NOLINT
259 u64 *s = (u64*)MemToShadow(src);
260 u64 *d = (u64*)MemToShadow(dst);
261 u64 *send = (u64*)MemToShadow(src + size);
262 for (; s != send; s++, d++) {
263 *d = *s;
264 *s = 0;
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000265 }
266 }
Dmitry Vyukove78b6202012-12-20 10:21:30 +0000267}
268
269void __tsan_java_mutex_lock(jptr addr) {
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000270 SCOPED_JAVA_FUNC(__tsan_java_mutex_lock);
271 DPrintf("#%d: java_mutex_lock(%p)\n", thr->tid, addr);
272 CHECK_NE(jctx, 0);
273 CHECK_GE(addr, jctx->heap_begin);
274 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
275
Dmitry Vyukov8354fae2013-05-17 12:03:46 +0000276 MutexCreate(thr, pc, addr, true, true, true);
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000277 MutexLock(thr, pc, addr);
Dmitry Vyukove78b6202012-12-20 10:21:30 +0000278}
279
280void __tsan_java_mutex_unlock(jptr addr) {
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000281 SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock);
282 DPrintf("#%d: java_mutex_unlock(%p)\n", thr->tid, addr);
283 CHECK_NE(jctx, 0);
284 CHECK_GE(addr, jctx->heap_begin);
285 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
286
287 MutexUnlock(thr, pc, addr);
Dmitry Vyukove78b6202012-12-20 10:21:30 +0000288}
289
290void __tsan_java_mutex_read_lock(jptr addr) {
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000291 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_lock);
292 DPrintf("#%d: java_mutex_read_lock(%p)\n", thr->tid, addr);
293 CHECK_NE(jctx, 0);
294 CHECK_GE(addr, jctx->heap_begin);
295 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
296
Dmitry Vyukov8354fae2013-05-17 12:03:46 +0000297 MutexCreate(thr, pc, addr, true, true, true);
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000298 MutexReadLock(thr, pc, addr);
Dmitry Vyukove78b6202012-12-20 10:21:30 +0000299}
300
301void __tsan_java_mutex_read_unlock(jptr addr) {
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000302 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_unlock);
303 DPrintf("#%d: java_mutex_read_unlock(%p)\n", thr->tid, addr);
304 CHECK_NE(jctx, 0);
305 CHECK_GE(addr, jctx->heap_begin);
306 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
Dmitry Vyukove78b6202012-12-20 10:21:30 +0000307
Dmitry Vyukov21cc85d2012-12-20 17:29:34 +0000308 MutexReadUnlock(thr, pc, addr);
309}
Dmitry Vyukov8354fae2013-05-17 12:03:46 +0000310
311void __tsan_java_mutex_lock_rec(jptr addr, int rec) {
312 SCOPED_JAVA_FUNC(__tsan_java_mutex_lock_rec);
313 DPrintf("#%d: java_mutex_lock_rec(%p, %d)\n", thr->tid, addr, rec);
314 CHECK_NE(jctx, 0);
315 CHECK_GE(addr, jctx->heap_begin);
316 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
317 CHECK_GT(rec, 0);
318
319 MutexCreate(thr, pc, addr, true, true, true);
320 MutexLock(thr, pc, addr, rec);
321}
322
323int __tsan_java_mutex_unlock_rec(jptr addr) {
324 SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock_rec);
325 DPrintf("#%d: java_mutex_unlock_rec(%p)\n", thr->tid, addr);
326 CHECK_NE(jctx, 0);
327 CHECK_GE(addr, jctx->heap_begin);
328 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
329
330 return MutexUnlock(thr, pc, addr, true);
331}