blob: c49d58b3ad3b135ddba6452d4bb878693dc8bccd [file] [log] [blame]
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +00001//===-- tsan_interface_java.cc --------------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer (TSan), a race detector.
11//
12//===----------------------------------------------------------------------===//
13
14#include "tsan_interface_java.h"
15#include "tsan_rtl.h"
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000016#include "tsan_mutex.h"
17#include "sanitizer_common/sanitizer_internal_defs.h"
18#include "sanitizer_common/sanitizer_common.h"
19#include "sanitizer_common/sanitizer_placement_new.h"
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +000020
21using namespace __tsan; // NOLINT
22
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000023namespace __tsan {
24
25const uptr kHeapShadow = 0x300000000000ull;
26const uptr kHeapAlignment = 8;
27
28struct BlockDesc {
29 bool begin;
30 Mutex mtx;
31 SyncVar *head;
32
33 BlockDesc()
34 : mtx(MutexTypeJava, StatMtxJava)
35 , head() {
36 CHECK_EQ(begin, false);
37 begin = true;
38 }
39
40 explicit BlockDesc(BlockDesc *b)
41 : mtx(MutexTypeJava, StatMtxJava)
42 , head(b->head) {
43 CHECK_EQ(begin, false);
44 begin = true;
45 b->head = 0;
46 }
47
48 ~BlockDesc() {
49 CHECK_EQ(begin, true);
50 begin = false;
51 ThreadState *thr = cur_thread();
52 SyncVar *s = head;
53 while (s) {
54 SyncVar *s1 = s->next;
55 StatInc(thr, StatSyncDestroyed);
56 s->mtx.Lock();
57 s->mtx.Unlock();
58 thr->mset.Remove(s->GetId());
59 DestroyAndFree(s);
60 s = s1;
61 }
62 }
63};
64
65struct JavaContext {
66 Mutex mtx;
67 const uptr heap_begin;
68 const uptr heap_size;
69 BlockDesc *heap_shadow;
70
71 JavaContext(jptr heap_begin, jptr heap_size)
72 : mtx(MutexTypeJava, StatMtxJava)
73 , heap_begin(heap_begin)
74 , heap_size(heap_size) {
75 uptr size = heap_size / kHeapAlignment * sizeof(BlockDesc);
76 heap_shadow = (BlockDesc*)MmapFixedNoReserve(kHeapShadow, size);
77 if ((uptr)heap_shadow != kHeapShadow) {
78 Printf("ThreadSanitizer: failed to mmap Java heap shadow\n");
79 Die();
80 }
81 }
82};
83
84class ScopedJavaFunc {
85 public:
86 ScopedJavaFunc(ThreadState *thr, uptr pc)
87 : thr_(thr) {
88 Initialize(thr_);
89 FuncEntry(thr, pc);
90 CHECK_EQ(thr_->in_rtl, 0);
91 thr_->in_rtl++;
92 }
93
94 ~ScopedJavaFunc() {
95 thr_->in_rtl--;
96 CHECK_EQ(thr_->in_rtl, 0);
97 FuncExit(thr_);
98 // FIXME(dvyukov): process pending signals.
99 }
100
101 private:
102 ThreadState *thr_;
103};
104
105static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1];
106static JavaContext *jctx;
107
108static BlockDesc *getblock(uptr addr) {
109 uptr i = (addr - jctx->heap_begin) / kHeapAlignment;
110 return &jctx->heap_shadow[i];
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000111}
112
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000113static BlockDesc *getblockbegin(uptr addr) {
114 for (BlockDesc *b = getblock(addr);; b--) {
115 CHECK_GE(b, jctx->heap_shadow);
116 if (b->begin)
117 return b;
118 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000119 return 0;
120}
121
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000122SyncVar* GetJavaSync(ThreadState *thr, uptr pc, uptr addr,
123 bool write_lock, bool create) {
124 if (jctx == 0 || addr < jctx->heap_begin
125 || addr >= jctx->heap_begin + jctx->heap_size)
126 return 0;
127 BlockDesc *b = getblockbegin(addr);
128 Lock l(&b->mtx);
129 SyncVar *s = b->head;
130 for (; s; s = s->next) {
131 if (s->addr == addr)
132 break;
133 }
134 if (s == 0 && create) {
135 s = CTX()->synctab.Create(thr, pc, addr);
136 s->next = b->head;
137 b->head = s;
138 }
139 if (s) {
140 if (write_lock)
141 s->mtx.Lock();
142 else
143 s->mtx.ReadLock();
144 }
145 return s;
146}
147
148SyncVar* GetAndRemoveJavaSync(ThreadState *thr, uptr pc, uptr addr) {
149 // We do not destroy Java mutexes other than in __tsan_java_free().
150 return 0;
151}
152
153} // namespace __tsan {
154
155#define SCOPED_JAVA_FUNC(func) \
156 ThreadState *thr = cur_thread(); \
157 const uptr caller_pc = GET_CALLER_PC(); \
158 const uptr pc = (uptr)&func; \
159 (void)pc; \
160 ScopedJavaFunc scoped(thr, caller_pc); \
161/**/
162
163void __tsan_java_init(jptr heap_begin, jptr heap_size) {
164 SCOPED_JAVA_FUNC(__tsan_java_init);
165 DPrintf("#%d: java_init(%p, %p)\n", thr->tid, heap_begin, heap_size);
166 CHECK_EQ(jctx, 0);
167 CHECK_GT(heap_begin, 0);
168 CHECK_GT(heap_size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000169 CHECK_EQ(heap_begin % kHeapAlignment, 0);
170 CHECK_EQ(heap_size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000171 CHECK_LT(heap_begin, heap_begin + heap_size);
172 jctx = new(jctx_buf) JavaContext(heap_begin, heap_size);
173}
174
175int __tsan_java_fini() {
176 SCOPED_JAVA_FUNC(__tsan_java_fini);
177 DPrintf("#%d: java_fini()\n", thr->tid);
178 CHECK_NE(jctx, 0);
179 // FIXME(dvyukov): this does not call atexit() callbacks.
180 int status = Finalize(thr);
181 DPrintf("#%d: java_fini() = %d\n", thr->tid, status);
182 return status;
183}
184
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000185void __tsan_java_alloc(jptr ptr, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000186 SCOPED_JAVA_FUNC(__tsan_java_alloc);
187 DPrintf("#%d: java_alloc(%p, %p)\n", thr->tid, ptr, size);
188 CHECK_NE(jctx, 0);
189 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000190 CHECK_EQ(ptr % kHeapAlignment, 0);
191 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000192 CHECK_GE(ptr, jctx->heap_begin);
193 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
194
195 BlockDesc *b = getblock(ptr);
196 new(b) BlockDesc();
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000197}
198
199void __tsan_java_free(jptr ptr, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000200 SCOPED_JAVA_FUNC(__tsan_java_free);
201 DPrintf("#%d: java_free(%p, %p)\n", thr->tid, ptr, size);
202 CHECK_NE(jctx, 0);
203 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000204 CHECK_EQ(ptr % kHeapAlignment, 0);
205 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000206 CHECK_GE(ptr, jctx->heap_begin);
207 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
208
209 BlockDesc *beg = getblock(ptr);
210 BlockDesc *end = getblock(ptr + size);
211 for (BlockDesc *b = beg; b != end; b++) {
212 if (b->begin)
213 b->~BlockDesc();
214 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000215}
216
217void __tsan_java_move(jptr src, jptr dst, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000218 SCOPED_JAVA_FUNC(__tsan_java_move);
219 DPrintf("#%d: java_move(%p, %p, %p)\n", thr->tid, src, dst, size);
220 CHECK_NE(jctx, 0);
221 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000222 CHECK_EQ(src % kHeapAlignment, 0);
223 CHECK_EQ(dst % kHeapAlignment, 0);
224 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000225 CHECK_GE(src, jctx->heap_begin);
226 CHECK_LE(src + size, jctx->heap_begin + jctx->heap_size);
227 CHECK_GE(dst, jctx->heap_begin);
228 CHECK_LE(dst + size, jctx->heap_begin + jctx->heap_size);
229 CHECK(dst >= src + size || src >= dst + size);
230
231 // Assuming it's not running concurrently with threads that do
232 // memory accesses and mutex operations (stop-the-world phase).
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000233 { // NOLINT
234 BlockDesc *s = getblock(src);
235 BlockDesc *d = getblock(dst);
236 BlockDesc *send = getblock(src + size);
237 for (; s != send; s++, d++) {
238 if (s->begin) {
239 new(d) BlockDesc(s);
240 s->~BlockDesc();
241 }
242 }
243 }
244
245 { // NOLINT
246 u64 *s = (u64*)MemToShadow(src);
247 u64 *d = (u64*)MemToShadow(dst);
248 u64 *send = (u64*)MemToShadow(src + size);
249 for (; s != send; s++, d++) {
250 *d = *s;
251 *s = 0;
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000252 }
253 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000254}
255
256void __tsan_java_mutex_lock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000257 SCOPED_JAVA_FUNC(__tsan_java_mutex_lock);
258 DPrintf("#%d: java_mutex_lock(%p)\n", thr->tid, addr);
259 CHECK_NE(jctx, 0);
260 CHECK_GE(addr, jctx->heap_begin);
261 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
262
263 MutexLock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000264}
265
266void __tsan_java_mutex_unlock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000267 SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock);
268 DPrintf("#%d: java_mutex_unlock(%p)\n", thr->tid, addr);
269 CHECK_NE(jctx, 0);
270 CHECK_GE(addr, jctx->heap_begin);
271 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
272
273 MutexUnlock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000274}
275
276void __tsan_java_mutex_read_lock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000277 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_lock);
278 DPrintf("#%d: java_mutex_read_lock(%p)\n", thr->tid, addr);
279 CHECK_NE(jctx, 0);
280 CHECK_GE(addr, jctx->heap_begin);
281 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
282
283 MutexReadLock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000284}
285
286void __tsan_java_mutex_read_unlock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000287 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_unlock);
288 DPrintf("#%d: java_mutex_read_unlock(%p)\n", thr->tid, addr);
289 CHECK_NE(jctx, 0);
290 CHECK_GE(addr, jctx->heap_begin);
291 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000292
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000293 MutexReadUnlock(thr, pc, addr);
294}