blob: 2bebed8eb2183346371c22c9ca0e59261ae09d24 [file] [log] [blame]
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +00001//===-- tsan_interface_java.cc --------------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer (TSan), a race detector.
11//
12//===----------------------------------------------------------------------===//
13
14#include "tsan_interface_java.h"
15#include "tsan_rtl.h"
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000016#include "tsan_mutex.h"
17#include "sanitizer_common/sanitizer_internal_defs.h"
18#include "sanitizer_common/sanitizer_common.h"
19#include "sanitizer_common/sanitizer_placement_new.h"
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +000020
21using namespace __tsan; // NOLINT
22
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000023namespace __tsan {
24
25const uptr kHeapShadow = 0x300000000000ull;
26const uptr kHeapAlignment = 8;
27
28struct BlockDesc {
29 bool begin;
30 Mutex mtx;
31 SyncVar *head;
32
33 BlockDesc()
Dmitry Vyukov22be55e2012-12-21 11:30:14 +000034 : mtx(MutexTypeJavaMBlock, StatMtxJavaMBlock)
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000035 , head() {
36 CHECK_EQ(begin, false);
37 begin = true;
38 }
39
40 explicit BlockDesc(BlockDesc *b)
Dmitry Vyukov22be55e2012-12-21 11:30:14 +000041 : mtx(MutexTypeJavaMBlock, StatMtxJavaMBlock)
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000042 , head(b->head) {
43 CHECK_EQ(begin, false);
44 begin = true;
45 b->head = 0;
46 }
47
48 ~BlockDesc() {
49 CHECK_EQ(begin, true);
50 begin = false;
51 ThreadState *thr = cur_thread();
52 SyncVar *s = head;
53 while (s) {
54 SyncVar *s1 = s->next;
55 StatInc(thr, StatSyncDestroyed);
56 s->mtx.Lock();
57 s->mtx.Unlock();
58 thr->mset.Remove(s->GetId());
59 DestroyAndFree(s);
60 s = s1;
61 }
62 }
63};
64
65struct JavaContext {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000066 const uptr heap_begin;
67 const uptr heap_size;
68 BlockDesc *heap_shadow;
69
70 JavaContext(jptr heap_begin, jptr heap_size)
Dmitry Vyukov22be55e2012-12-21 11:30:14 +000071 : heap_begin(heap_begin)
Dmitry Vyukov2547ac62012-12-20 17:29:34 +000072 , heap_size(heap_size) {
73 uptr size = heap_size / kHeapAlignment * sizeof(BlockDesc);
74 heap_shadow = (BlockDesc*)MmapFixedNoReserve(kHeapShadow, size);
75 if ((uptr)heap_shadow != kHeapShadow) {
76 Printf("ThreadSanitizer: failed to mmap Java heap shadow\n");
77 Die();
78 }
79 }
80};
81
82class ScopedJavaFunc {
83 public:
84 ScopedJavaFunc(ThreadState *thr, uptr pc)
85 : thr_(thr) {
86 Initialize(thr_);
87 FuncEntry(thr, pc);
88 CHECK_EQ(thr_->in_rtl, 0);
89 thr_->in_rtl++;
90 }
91
92 ~ScopedJavaFunc() {
93 thr_->in_rtl--;
94 CHECK_EQ(thr_->in_rtl, 0);
95 FuncExit(thr_);
96 // FIXME(dvyukov): process pending signals.
97 }
98
99 private:
100 ThreadState *thr_;
101};
102
103static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1];
104static JavaContext *jctx;
105
106static BlockDesc *getblock(uptr addr) {
107 uptr i = (addr - jctx->heap_begin) / kHeapAlignment;
108 return &jctx->heap_shadow[i];
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000109}
110
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000111static BlockDesc *getblockbegin(uptr addr) {
112 for (BlockDesc *b = getblock(addr);; b--) {
113 CHECK_GE(b, jctx->heap_shadow);
114 if (b->begin)
115 return b;
116 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000117 return 0;
118}
119
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000120SyncVar* GetJavaSync(ThreadState *thr, uptr pc, uptr addr,
121 bool write_lock, bool create) {
122 if (jctx == 0 || addr < jctx->heap_begin
123 || addr >= jctx->heap_begin + jctx->heap_size)
124 return 0;
125 BlockDesc *b = getblockbegin(addr);
126 Lock l(&b->mtx);
127 SyncVar *s = b->head;
128 for (; s; s = s->next) {
129 if (s->addr == addr)
130 break;
131 }
132 if (s == 0 && create) {
133 s = CTX()->synctab.Create(thr, pc, addr);
134 s->next = b->head;
135 b->head = s;
136 }
137 if (s) {
138 if (write_lock)
139 s->mtx.Lock();
140 else
141 s->mtx.ReadLock();
142 }
143 return s;
144}
145
146SyncVar* GetAndRemoveJavaSync(ThreadState *thr, uptr pc, uptr addr) {
147 // We do not destroy Java mutexes other than in __tsan_java_free().
148 return 0;
149}
150
151} // namespace __tsan {
152
153#define SCOPED_JAVA_FUNC(func) \
154 ThreadState *thr = cur_thread(); \
155 const uptr caller_pc = GET_CALLER_PC(); \
156 const uptr pc = (uptr)&func; \
157 (void)pc; \
158 ScopedJavaFunc scoped(thr, caller_pc); \
159/**/
160
161void __tsan_java_init(jptr heap_begin, jptr heap_size) {
162 SCOPED_JAVA_FUNC(__tsan_java_init);
163 DPrintf("#%d: java_init(%p, %p)\n", thr->tid, heap_begin, heap_size);
164 CHECK_EQ(jctx, 0);
165 CHECK_GT(heap_begin, 0);
166 CHECK_GT(heap_size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000167 CHECK_EQ(heap_begin % kHeapAlignment, 0);
168 CHECK_EQ(heap_size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000169 CHECK_LT(heap_begin, heap_begin + heap_size);
170 jctx = new(jctx_buf) JavaContext(heap_begin, heap_size);
171}
172
173int __tsan_java_fini() {
174 SCOPED_JAVA_FUNC(__tsan_java_fini);
175 DPrintf("#%d: java_fini()\n", thr->tid);
176 CHECK_NE(jctx, 0);
177 // FIXME(dvyukov): this does not call atexit() callbacks.
178 int status = Finalize(thr);
179 DPrintf("#%d: java_fini() = %d\n", thr->tid, status);
180 return status;
181}
182
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000183void __tsan_java_alloc(jptr ptr, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000184 SCOPED_JAVA_FUNC(__tsan_java_alloc);
185 DPrintf("#%d: java_alloc(%p, %p)\n", thr->tid, ptr, size);
186 CHECK_NE(jctx, 0);
187 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000188 CHECK_EQ(ptr % kHeapAlignment, 0);
189 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000190 CHECK_GE(ptr, jctx->heap_begin);
191 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
192
193 BlockDesc *b = getblock(ptr);
194 new(b) BlockDesc();
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000195}
196
197void __tsan_java_free(jptr ptr, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000198 SCOPED_JAVA_FUNC(__tsan_java_free);
199 DPrintf("#%d: java_free(%p, %p)\n", thr->tid, ptr, size);
200 CHECK_NE(jctx, 0);
201 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000202 CHECK_EQ(ptr % kHeapAlignment, 0);
203 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000204 CHECK_GE(ptr, jctx->heap_begin);
205 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
206
207 BlockDesc *beg = getblock(ptr);
208 BlockDesc *end = getblock(ptr + size);
209 for (BlockDesc *b = beg; b != end; b++) {
210 if (b->begin)
211 b->~BlockDesc();
212 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000213}
214
215void __tsan_java_move(jptr src, jptr dst, jptr size) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000216 SCOPED_JAVA_FUNC(__tsan_java_move);
217 DPrintf("#%d: java_move(%p, %p, %p)\n", thr->tid, src, dst, size);
218 CHECK_NE(jctx, 0);
219 CHECK_NE(size, 0);
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000220 CHECK_EQ(src % kHeapAlignment, 0);
221 CHECK_EQ(dst % kHeapAlignment, 0);
222 CHECK_EQ(size % kHeapAlignment, 0);
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000223 CHECK_GE(src, jctx->heap_begin);
224 CHECK_LE(src + size, jctx->heap_begin + jctx->heap_size);
225 CHECK_GE(dst, jctx->heap_begin);
226 CHECK_LE(dst + size, jctx->heap_begin + jctx->heap_size);
227 CHECK(dst >= src + size || src >= dst + size);
228
229 // Assuming it's not running concurrently with threads that do
230 // memory accesses and mutex operations (stop-the-world phase).
Dmitry Vyukova5b57102012-12-21 11:16:40 +0000231 { // NOLINT
232 BlockDesc *s = getblock(src);
233 BlockDesc *d = getblock(dst);
234 BlockDesc *send = getblock(src + size);
235 for (; s != send; s++, d++) {
236 if (s->begin) {
237 new(d) BlockDesc(s);
238 s->~BlockDesc();
239 }
240 }
241 }
242
243 { // NOLINT
244 u64 *s = (u64*)MemToShadow(src);
245 u64 *d = (u64*)MemToShadow(dst);
246 u64 *send = (u64*)MemToShadow(src + size);
247 for (; s != send; s++, d++) {
248 *d = *s;
249 *s = 0;
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000250 }
251 }
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000252}
253
254void __tsan_java_mutex_lock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000255 SCOPED_JAVA_FUNC(__tsan_java_mutex_lock);
256 DPrintf("#%d: java_mutex_lock(%p)\n", thr->tid, addr);
257 CHECK_NE(jctx, 0);
258 CHECK_GE(addr, jctx->heap_begin);
259 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
260
261 MutexLock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000262}
263
264void __tsan_java_mutex_unlock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000265 SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock);
266 DPrintf("#%d: java_mutex_unlock(%p)\n", thr->tid, addr);
267 CHECK_NE(jctx, 0);
268 CHECK_GE(addr, jctx->heap_begin);
269 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
270
271 MutexUnlock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000272}
273
274void __tsan_java_mutex_read_lock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000275 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_lock);
276 DPrintf("#%d: java_mutex_read_lock(%p)\n", thr->tid, addr);
277 CHECK_NE(jctx, 0);
278 CHECK_GE(addr, jctx->heap_begin);
279 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
280
281 MutexReadLock(thr, pc, addr);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000282}
283
284void __tsan_java_mutex_read_unlock(jptr addr) {
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000285 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_unlock);
286 DPrintf("#%d: java_mutex_read_unlock(%p)\n", thr->tid, addr);
287 CHECK_NE(jctx, 0);
288 CHECK_GE(addr, jctx->heap_begin);
289 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
Dmitry Vyukovd088b3b2012-12-20 10:21:30 +0000290
Dmitry Vyukov2547ac62012-12-20 17:29:34 +0000291 MutexReadUnlock(thr, pc, addr);
292}