blob: 71e0747c3646f30f7d62581407e35056dcbab412 [file] [log] [blame]
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +00001//===-- tsan_interface_java.cc --------------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer (TSan), a race detector.
11//
12//===----------------------------------------------------------------------===//
13
14#include "tsan_interface_java.h"
15#include "tsan_rtl.h"
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000016#include "tsan_mutex.h"
17#include "sanitizer_common/sanitizer_internal_defs.h"
18#include "sanitizer_common/sanitizer_common.h"
19#include "sanitizer_common/sanitizer_placement_new.h"
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +000020
21using namespace __tsan; // NOLINT
22
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000023namespace __tsan {
24
25const uptr kHeapShadow = 0x300000000000ull;
26const uptr kHeapAlignment = 8;
27
28struct BlockDesc {
29 bool begin;
30 Mutex mtx;
31 SyncVar *head;
32
33 BlockDesc()
Dmitry Vyukov22be55e2012-12-21 11:30:14 +000034 : mtx(MutexTypeJavaMBlock, StatMtxJavaMBlock)
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000035 , head() {
36 CHECK_EQ(begin, false);
37 begin = true;
38 }
39
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000040 ~BlockDesc() {
41 CHECK_EQ(begin, true);
42 begin = false;
43 ThreadState *thr = cur_thread();
44 SyncVar *s = head;
45 while (s) {
46 SyncVar *s1 = s->next;
47 StatInc(thr, StatSyncDestroyed);
48 s->mtx.Lock();
49 s->mtx.Unlock();
50 thr->mset.Remove(s->GetId());
51 DestroyAndFree(s);
52 s = s1;
53 }
54 }
55};
56
57struct JavaContext {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000058 const uptr heap_begin;
59 const uptr heap_size;
60 BlockDesc *heap_shadow;
61
62 JavaContext(jptr heap_begin, jptr heap_size)
Dmitry Vyukov22be55e2012-12-21 11:30:14 +000063 : heap_begin(heap_begin)
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000064 , heap_size(heap_size) {
65 uptr size = heap_size / kHeapAlignment * sizeof(BlockDesc);
66 heap_shadow = (BlockDesc*)MmapFixedNoReserve(kHeapShadow, size);
67 if ((uptr)heap_shadow != kHeapShadow) {
68 Printf("ThreadSanitizer: failed to mmap Java heap shadow\n");
69 Die();
70 }
71 }
72};
73
74class ScopedJavaFunc {
75 public:
76 ScopedJavaFunc(ThreadState *thr, uptr pc)
77 : thr_(thr) {
78 Initialize(thr_);
79 FuncEntry(thr, pc);
80 CHECK_EQ(thr_->in_rtl, 0);
81 thr_->in_rtl++;
82 }
83
84 ~ScopedJavaFunc() {
85 thr_->in_rtl--;
86 CHECK_EQ(thr_->in_rtl, 0);
87 FuncExit(thr_);
88 // FIXME(dvyukov): process pending signals.
89 }
90
91 private:
92 ThreadState *thr_;
93};
94
95static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1];
96static JavaContext *jctx;
97
98static BlockDesc *getblock(uptr addr) {
99 uptr i = (addr - jctx->heap_begin) / kHeapAlignment;
100 return &jctx->heap_shadow[i];
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000101}
102
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000103static uptr USED getmem(BlockDesc *b) {
104 uptr i = b - jctx->heap_shadow;
105 uptr p = jctx->heap_begin + i * kHeapAlignment;
106 CHECK_GE(p, jctx->heap_begin);
107 CHECK_LT(p, jctx->heap_begin + jctx->heap_size);
108 return p;
109}
110
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000111static BlockDesc *getblockbegin(uptr addr) {
112 for (BlockDesc *b = getblock(addr);; b--) {
113 CHECK_GE(b, jctx->heap_shadow);
114 if (b->begin)
115 return b;
116 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000117 return 0;
118}
119
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000120SyncVar* GetJavaSync(ThreadState *thr, uptr pc, uptr addr,
121 bool write_lock, bool create) {
122 if (jctx == 0 || addr < jctx->heap_begin
123 || addr >= jctx->heap_begin + jctx->heap_size)
124 return 0;
125 BlockDesc *b = getblockbegin(addr);
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000126 DPrintf("#%d: GetJavaSync %p->%p\n", thr->tid, addr, b);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000127 Lock l(&b->mtx);
128 SyncVar *s = b->head;
129 for (; s; s = s->next) {
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000130 if (s->addr == addr) {
131 DPrintf("#%d: found existing sync for %p\n", thr->tid, addr);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000132 break;
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000133 }
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000134 }
135 if (s == 0 && create) {
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000136 DPrintf("#%d: creating new sync for %p\n", thr->tid, addr);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000137 s = CTX()->synctab.Create(thr, pc, addr);
138 s->next = b->head;
139 b->head = s;
140 }
141 if (s) {
142 if (write_lock)
143 s->mtx.Lock();
144 else
145 s->mtx.ReadLock();
146 }
147 return s;
148}
149
150SyncVar* GetAndRemoveJavaSync(ThreadState *thr, uptr pc, uptr addr) {
151 // We do not destroy Java mutexes other than in __tsan_java_free().
152 return 0;
153}
154
Alexey Samsonov49a32c12013-01-30 07:45:58 +0000155} // namespace __tsan
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000156
157#define SCOPED_JAVA_FUNC(func) \
158 ThreadState *thr = cur_thread(); \
159 const uptr caller_pc = GET_CALLER_PC(); \
160 const uptr pc = (uptr)&func; \
161 (void)pc; \
162 ScopedJavaFunc scoped(thr, caller_pc); \
163/**/
164
165void __tsan_java_init(jptr heap_begin, jptr heap_size) {
166 SCOPED_JAVA_FUNC(__tsan_java_init);
167 DPrintf("#%d: java_init(%p, %p)\n", thr->tid, heap_begin, heap_size);
168 CHECK_EQ(jctx, 0);
169 CHECK_GT(heap_begin, 0);
170 CHECK_GT(heap_size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000171 CHECK_EQ(heap_begin % kHeapAlignment, 0);
172 CHECK_EQ(heap_size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000173 CHECK_LT(heap_begin, heap_begin + heap_size);
174 jctx = new(jctx_buf) JavaContext(heap_begin, heap_size);
175}
176
177int __tsan_java_fini() {
178 SCOPED_JAVA_FUNC(__tsan_java_fini);
179 DPrintf("#%d: java_fini()\n", thr->tid);
180 CHECK_NE(jctx, 0);
181 // FIXME(dvyukov): this does not call atexit() callbacks.
182 int status = Finalize(thr);
183 DPrintf("#%d: java_fini() = %d\n", thr->tid, status);
184 return status;
185}
186
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000187void __tsan_java_alloc(jptr ptr, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000188 SCOPED_JAVA_FUNC(__tsan_java_alloc);
189 DPrintf("#%d: java_alloc(%p, %p)\n", thr->tid, ptr, size);
190 CHECK_NE(jctx, 0);
191 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000192 CHECK_EQ(ptr % kHeapAlignment, 0);
193 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000194 CHECK_GE(ptr, jctx->heap_begin);
195 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
196
197 BlockDesc *b = getblock(ptr);
198 new(b) BlockDesc();
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000199}
200
201void __tsan_java_free(jptr ptr, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000202 SCOPED_JAVA_FUNC(__tsan_java_free);
203 DPrintf("#%d: java_free(%p, %p)\n", thr->tid, ptr, size);
204 CHECK_NE(jctx, 0);
205 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000206 CHECK_EQ(ptr % kHeapAlignment, 0);
207 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000208 CHECK_GE(ptr, jctx->heap_begin);
209 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
210
211 BlockDesc *beg = getblock(ptr);
212 BlockDesc *end = getblock(ptr + size);
213 for (BlockDesc *b = beg; b != end; b++) {
214 if (b->begin)
215 b->~BlockDesc();
216 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000217}
218
219void __tsan_java_move(jptr src, jptr dst, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000220 SCOPED_JAVA_FUNC(__tsan_java_move);
221 DPrintf("#%d: java_move(%p, %p, %p)\n", thr->tid, src, dst, size);
222 CHECK_NE(jctx, 0);
223 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000224 CHECK_EQ(src % kHeapAlignment, 0);
225 CHECK_EQ(dst % kHeapAlignment, 0);
226 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000227 CHECK_GE(src, jctx->heap_begin);
228 CHECK_LE(src + size, jctx->heap_begin + jctx->heap_size);
229 CHECK_GE(dst, jctx->heap_begin);
230 CHECK_LE(dst + size, jctx->heap_begin + jctx->heap_size);
231 CHECK(dst >= src + size || src >= dst + size);
232
233 // Assuming it's not running concurrently with threads that do
234 // memory accesses and mutex operations (stop-the-world phase).
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000235 { // NOLINT
236 BlockDesc *s = getblock(src);
237 BlockDesc *d = getblock(dst);
238 BlockDesc *send = getblock(src + size);
239 for (; s != send; s++, d++) {
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000240 CHECK_EQ(d->begin, false);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000241 if (s->begin) {
Dmitry Vyukova33bf272012-12-21 13:23:48 +0000242 DPrintf("#%d: moving block %p->%p\n", thr->tid, getmem(s), getmem(d));
243 new(d) BlockDesc;
244 d->head = s->head;
245 for (SyncVar *sync = d->head; sync; sync = sync->next) {
246 uptr newaddr = sync->addr - src + dst;
247 DPrintf("#%d: moving sync %p->%p\n", thr->tid, sync->addr, newaddr);
248 sync->addr = newaddr;
249 }
250 s->head = 0;
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000251 s->~BlockDesc();
252 }
253 }
254 }
255
256 { // NOLINT
257 u64 *s = (u64*)MemToShadow(src);
258 u64 *d = (u64*)MemToShadow(dst);
259 u64 *send = (u64*)MemToShadow(src + size);
260 for (; s != send; s++, d++) {
261 *d = *s;
262 *s = 0;
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000263 }
264 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000265}
266
267void __tsan_java_mutex_lock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000268 SCOPED_JAVA_FUNC(__tsan_java_mutex_lock);
269 DPrintf("#%d: java_mutex_lock(%p)\n", thr->tid, addr);
270 CHECK_NE(jctx, 0);
271 CHECK_GE(addr, jctx->heap_begin);
272 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
273
Dmitry Vyukovc9af8182013-05-17 12:03:46 +0000274 MutexCreate(thr, pc, addr, true, true, true);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000275 MutexLock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000276}
277
278void __tsan_java_mutex_unlock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000279 SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock);
280 DPrintf("#%d: java_mutex_unlock(%p)\n", thr->tid, addr);
281 CHECK_NE(jctx, 0);
282 CHECK_GE(addr, jctx->heap_begin);
283 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
284
285 MutexUnlock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000286}
287
288void __tsan_java_mutex_read_lock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000289 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_lock);
290 DPrintf("#%d: java_mutex_read_lock(%p)\n", thr->tid, addr);
291 CHECK_NE(jctx, 0);
292 CHECK_GE(addr, jctx->heap_begin);
293 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
294
Dmitry Vyukovc9af8182013-05-17 12:03:46 +0000295 MutexCreate(thr, pc, addr, true, true, true);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000296 MutexReadLock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000297}
298
299void __tsan_java_mutex_read_unlock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000300 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_unlock);
301 DPrintf("#%d: java_mutex_read_unlock(%p)\n", thr->tid, addr);
302 CHECK_NE(jctx, 0);
303 CHECK_GE(addr, jctx->heap_begin);
304 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000305
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000306 MutexReadUnlock(thr, pc, addr);
307}
Dmitry Vyukovc9af8182013-05-17 12:03:46 +0000308
309void __tsan_java_mutex_lock_rec(jptr addr, int rec) {
310 SCOPED_JAVA_FUNC(__tsan_java_mutex_lock_rec);
311 DPrintf("#%d: java_mutex_lock_rec(%p, %d)\n", thr->tid, addr, rec);
312 CHECK_NE(jctx, 0);
313 CHECK_GE(addr, jctx->heap_begin);
314 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
315 CHECK_GT(rec, 0);
316
317 MutexCreate(thr, pc, addr, true, true, true);
318 MutexLock(thr, pc, addr, rec);
319}
320
321int __tsan_java_mutex_unlock_rec(jptr addr) {
322 SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock_rec);
323 DPrintf("#%d: java_mutex_unlock_rec(%p)\n", thr->tid, addr);
324 CHECK_NE(jctx, 0);
325 CHECK_GE(addr, jctx->heap_begin);
326 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
327
328 return MutexUnlock(thr, pc, addr, true);
329}