Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 1 | //=-- lsan_interceptors.cc ------------------------------------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file is a part of LeakSanitizer. |
| 11 | // Interceptors for standalone LSan. |
| 12 | // |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 15 | #include "interception/interception.h" |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 16 | #include "sanitizer_common/sanitizer_allocator.h" |
| 17 | #include "sanitizer_common/sanitizer_atomic.h" |
| 18 | #include "sanitizer_common/sanitizer_common.h" |
| 19 | #include "sanitizer_common/sanitizer_flags.h" |
| 20 | #include "sanitizer_common/sanitizer_internal_defs.h" |
| 21 | #include "sanitizer_common/sanitizer_linux.h" |
| 22 | #include "sanitizer_common/sanitizer_platform_limits_posix.h" |
| 23 | #include "lsan.h" |
| 24 | #include "lsan_allocator.h" |
| 25 | #include "lsan_thread.h" |
| 26 | |
| 27 | using namespace __lsan; |
| 28 | |
| 29 | extern "C" { |
| 30 | int pthread_attr_init(void *attr); |
| 31 | int pthread_attr_destroy(void *attr); |
| 32 | int pthread_attr_getdetachstate(void *attr, int *v); |
| 33 | int pthread_key_create(unsigned *key, void (*destructor)(void* v)); |
| 34 | int pthread_setspecific(unsigned key, const void *v); |
| 35 | } |
| 36 | |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 37 | #define ENSURE_LSAN_INITED do { \ |
| 38 | CHECK(!lsan_init_is_running); \ |
| 39 | if (!lsan_inited) \ |
| 40 | __lsan_init(); \ |
| 41 | } while (0) |
| 42 | |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 43 | ///// Malloc/free interceptors. ///// |
| 44 | |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 45 | const bool kAlwaysClearMemory = true; |
| 46 | |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 47 | namespace std { |
| 48 | struct nothrow_t; |
| 49 | } |
| 50 | |
| 51 | INTERCEPTOR(void*, malloc, uptr size) { |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 52 | ENSURE_LSAN_INITED; |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 53 | GET_STACK_TRACE_MALLOC; |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 54 | return Allocate(stack, size, 1, kAlwaysClearMemory); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 55 | } |
| 56 | |
| 57 | INTERCEPTOR(void, free, void *p) { |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 58 | ENSURE_LSAN_INITED; |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 59 | Deallocate(p); |
| 60 | } |
| 61 | |
| 62 | INTERCEPTOR(void*, calloc, uptr nmemb, uptr size) { |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 63 | if (lsan_init_is_running) { |
| 64 | // Hack: dlsym calls calloc before REAL(calloc) is retrieved from dlsym. |
| 65 | const uptr kCallocPoolSize = 1024; |
| 66 | static uptr calloc_memory_for_dlsym[kCallocPoolSize]; |
| 67 | static uptr allocated; |
| 68 | uptr size_in_words = ((nmemb * size) + kWordSize - 1) / kWordSize; |
| 69 | void *mem = (void*)&calloc_memory_for_dlsym[allocated]; |
| 70 | allocated += size_in_words; |
| 71 | CHECK(allocated < kCallocPoolSize); |
| 72 | return mem; |
| 73 | } |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 74 | if (CallocShouldReturnNullDueToOverflow(size, nmemb)) return nullptr; |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 75 | ENSURE_LSAN_INITED; |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 76 | GET_STACK_TRACE_MALLOC; |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 77 | size *= nmemb; |
| 78 | return Allocate(stack, size, 1, true); |
| 79 | } |
| 80 | |
| 81 | INTERCEPTOR(void*, realloc, void *q, uptr size) { |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 82 | ENSURE_LSAN_INITED; |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 83 | GET_STACK_TRACE_MALLOC; |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 84 | return Reallocate(stack, q, size, 1); |
| 85 | } |
| 86 | |
| 87 | INTERCEPTOR(void*, memalign, uptr alignment, uptr size) { |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 88 | ENSURE_LSAN_INITED; |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 89 | GET_STACK_TRACE_MALLOC; |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 90 | return Allocate(stack, size, alignment, kAlwaysClearMemory); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 91 | } |
| 92 | |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 93 | INTERCEPTOR(void*, aligned_alloc, uptr alignment, uptr size) { |
| 94 | ENSURE_LSAN_INITED; |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 95 | GET_STACK_TRACE_MALLOC; |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 96 | return Allocate(stack, size, alignment, kAlwaysClearMemory); |
| 97 | } |
| 98 | |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 99 | INTERCEPTOR(int, posix_memalign, void **memptr, uptr alignment, uptr size) { |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 100 | ENSURE_LSAN_INITED; |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 101 | GET_STACK_TRACE_MALLOC; |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 102 | *memptr = Allocate(stack, size, alignment, kAlwaysClearMemory); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 103 | // FIXME: Return ENOMEM if user requested more than max alloc size. |
| 104 | return 0; |
| 105 | } |
| 106 | |
| 107 | INTERCEPTOR(void*, valloc, uptr size) { |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 108 | ENSURE_LSAN_INITED; |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 109 | GET_STACK_TRACE_MALLOC; |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 110 | if (size == 0) |
| 111 | size = GetPageSizeCached(); |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 112 | return Allocate(stack, size, GetPageSizeCached(), kAlwaysClearMemory); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 113 | } |
| 114 | |
| 115 | INTERCEPTOR(uptr, malloc_usable_size, void *ptr) { |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 116 | ENSURE_LSAN_INITED; |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 117 | return GetMallocUsableSize(ptr); |
| 118 | } |
| 119 | |
| 120 | struct fake_mallinfo { |
| 121 | int x[10]; |
| 122 | }; |
| 123 | |
| 124 | INTERCEPTOR(struct fake_mallinfo, mallinfo, void) { |
| 125 | struct fake_mallinfo res; |
| 126 | internal_memset(&res, 0, sizeof(res)); |
| 127 | return res; |
| 128 | } |
| 129 | |
| 130 | INTERCEPTOR(int, mallopt, int cmd, int value) { |
| 131 | return -1; |
| 132 | } |
| 133 | |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 134 | INTERCEPTOR(void*, pvalloc, uptr size) { |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 135 | ENSURE_LSAN_INITED; |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 136 | GET_STACK_TRACE_MALLOC; |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 137 | uptr PageSize = GetPageSizeCached(); |
| 138 | size = RoundUpTo(size, PageSize); |
| 139 | if (size == 0) { |
| 140 | // pvalloc(0) should allocate one page. |
| 141 | size = PageSize; |
| 142 | } |
| 143 | return Allocate(stack, size, GetPageSizeCached(), kAlwaysClearMemory); |
| 144 | } |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 145 | |
Stephen Hines | 2d1fdb2 | 2014-05-28 23:58:16 -0700 | [diff] [blame] | 146 | INTERCEPTOR(void, cfree, void *p) ALIAS(WRAPPER_NAME(free)); |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 147 | |
| 148 | #define OPERATOR_NEW_BODY \ |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 149 | ENSURE_LSAN_INITED; \ |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 150 | GET_STACK_TRACE_MALLOC; \ |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 151 | return Allocate(stack, size, 1, kAlwaysClearMemory); |
| 152 | |
| 153 | INTERCEPTOR_ATTRIBUTE |
| 154 | void *operator new(uptr size) { OPERATOR_NEW_BODY; } |
| 155 | INTERCEPTOR_ATTRIBUTE |
| 156 | void *operator new[](uptr size) { OPERATOR_NEW_BODY; } |
| 157 | INTERCEPTOR_ATTRIBUTE |
| 158 | void *operator new(uptr size, std::nothrow_t const&) { OPERATOR_NEW_BODY; } |
| 159 | INTERCEPTOR_ATTRIBUTE |
| 160 | void *operator new[](uptr size, std::nothrow_t const&) { OPERATOR_NEW_BODY; } |
| 161 | |
| 162 | #define OPERATOR_DELETE_BODY \ |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 163 | ENSURE_LSAN_INITED; \ |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 164 | Deallocate(ptr); |
| 165 | |
| 166 | INTERCEPTOR_ATTRIBUTE |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 167 | void operator delete(void *ptr) NOEXCEPT { OPERATOR_DELETE_BODY; } |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 168 | INTERCEPTOR_ATTRIBUTE |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 169 | void operator delete[](void *ptr) NOEXCEPT { OPERATOR_DELETE_BODY; } |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 170 | INTERCEPTOR_ATTRIBUTE |
| 171 | void operator delete(void *ptr, std::nothrow_t const&) { OPERATOR_DELETE_BODY; } |
| 172 | INTERCEPTOR_ATTRIBUTE |
| 173 | void operator delete[](void *ptr, std::nothrow_t const &) { |
| 174 | OPERATOR_DELETE_BODY; |
| 175 | } |
| 176 | |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 177 | // We need this to intercept the __libc_memalign calls that are used to |
| 178 | // allocate dynamic TLS space in ld-linux.so. |
Stephen Hines | 2d1fdb2 | 2014-05-28 23:58:16 -0700 | [diff] [blame] | 179 | INTERCEPTOR(void *, __libc_memalign, uptr align, uptr s) |
| 180 | ALIAS(WRAPPER_NAME(memalign)); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 181 | |
| 182 | ///// Thread initialization and finalization. ///// |
| 183 | |
| 184 | static unsigned g_thread_finalize_key; |
| 185 | |
| 186 | static void thread_finalize(void *v) { |
| 187 | uptr iter = (uptr)v; |
| 188 | if (iter > 1) { |
| 189 | if (pthread_setspecific(g_thread_finalize_key, (void*)(iter - 1))) { |
| 190 | Report("LeakSanitizer: failed to set thread key.\n"); |
| 191 | Die(); |
| 192 | } |
| 193 | return; |
| 194 | } |
| 195 | ThreadFinish(); |
| 196 | } |
| 197 | |
| 198 | struct ThreadParam { |
| 199 | void *(*callback)(void *arg); |
| 200 | void *param; |
| 201 | atomic_uintptr_t tid; |
| 202 | }; |
| 203 | |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 204 | extern "C" void *__lsan_thread_start_func(void *arg) { |
| 205 | ThreadParam *p = (ThreadParam*)arg; |
| 206 | void* (*callback)(void *arg) = p->callback; |
| 207 | void *param = p->param; |
| 208 | // Wait until the last iteration to maximize the chance that we are the last |
| 209 | // destructor to run. |
| 210 | if (pthread_setspecific(g_thread_finalize_key, |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 211 | (void*)GetPthreadDestructorIterations())) { |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 212 | Report("LeakSanitizer: failed to set thread key.\n"); |
| 213 | Die(); |
| 214 | } |
| 215 | int tid = 0; |
| 216 | while ((tid = atomic_load(&p->tid, memory_order_acquire)) == 0) |
| 217 | internal_sched_yield(); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 218 | SetCurrentThread(tid); |
| 219 | ThreadStart(tid, GetTid()); |
Stephen Hines | 86277eb | 2015-03-23 12:06:32 -0700 | [diff] [blame] | 220 | atomic_store(&p->tid, 0, memory_order_release); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 221 | return callback(param); |
| 222 | } |
| 223 | |
Sergey Matveev | fc1a612 | 2013-05-23 10:24:44 +0000 | [diff] [blame] | 224 | INTERCEPTOR(int, pthread_create, void *th, void *attr, |
| 225 | void *(*callback)(void *), void *param) { |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 226 | ENSURE_LSAN_INITED; |
Sergey Matveev | c6ac98d | 2013-07-08 12:57:24 +0000 | [diff] [blame] | 227 | EnsureMainThreadIDIsCorrect(); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 228 | __sanitizer_pthread_attr_t myattr; |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 229 | if (!attr) { |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 230 | pthread_attr_init(&myattr); |
| 231 | attr = &myattr; |
| 232 | } |
Stephen Hines | 2d1fdb2 | 2014-05-28 23:58:16 -0700 | [diff] [blame] | 233 | AdjustStackSize(attr); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 234 | int detached = 0; |
| 235 | pthread_attr_getdetachstate(attr, &detached); |
| 236 | ThreadParam p; |
| 237 | p.callback = callback; |
| 238 | p.param = param; |
| 239 | atomic_store(&p.tid, 0, memory_order_relaxed); |
| 240 | int res = REAL(pthread_create)(th, attr, __lsan_thread_start_func, &p); |
| 241 | if (res == 0) { |
| 242 | int tid = ThreadCreate(GetCurrentThread(), *(uptr *)th, detached); |
| 243 | CHECK_NE(tid, 0); |
| 244 | atomic_store(&p.tid, tid, memory_order_release); |
| 245 | while (atomic_load(&p.tid, memory_order_acquire) != 0) |
| 246 | internal_sched_yield(); |
| 247 | } |
| 248 | if (attr == &myattr) |
| 249 | pthread_attr_destroy(&myattr); |
| 250 | return res; |
| 251 | } |
| 252 | |
| 253 | INTERCEPTOR(int, pthread_join, void *th, void **ret) { |
Sergey Matveev | 74c8879 | 2013-11-25 17:39:36 +0000 | [diff] [blame] | 254 | ENSURE_LSAN_INITED; |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 255 | int tid = ThreadTid((uptr)th); |
| 256 | int res = REAL(pthread_join)(th, ret); |
| 257 | if (res == 0) |
| 258 | ThreadJoin(tid); |
| 259 | return res; |
| 260 | } |
| 261 | |
| 262 | namespace __lsan { |
| 263 | |
| 264 | void InitializeInterceptors() { |
| 265 | INTERCEPT_FUNCTION(malloc); |
| 266 | INTERCEPT_FUNCTION(free); |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 267 | INTERCEPT_FUNCTION(cfree); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 268 | INTERCEPT_FUNCTION(calloc); |
| 269 | INTERCEPT_FUNCTION(realloc); |
| 270 | INTERCEPT_FUNCTION(memalign); |
| 271 | INTERCEPT_FUNCTION(posix_memalign); |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 272 | INTERCEPT_FUNCTION(__libc_memalign); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 273 | INTERCEPT_FUNCTION(valloc); |
Sergey Matveev | d16d723 | 2013-06-25 14:05:52 +0000 | [diff] [blame] | 274 | INTERCEPT_FUNCTION(pvalloc); |
Sergey Matveev | 7ea7d20 | 2013-05-20 11:01:40 +0000 | [diff] [blame] | 275 | INTERCEPT_FUNCTION(malloc_usable_size); |
| 276 | INTERCEPT_FUNCTION(mallinfo); |
| 277 | INTERCEPT_FUNCTION(mallopt); |
| 278 | INTERCEPT_FUNCTION(pthread_create); |
| 279 | INTERCEPT_FUNCTION(pthread_join); |
| 280 | |
| 281 | if (pthread_key_create(&g_thread_finalize_key, &thread_finalize)) { |
| 282 | Report("LeakSanitizer: failed to create thread key.\n"); |
| 283 | Die(); |
| 284 | } |
| 285 | } |
| 286 | |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 287 | } // namespace __lsan |