Alexey Samsonov | 603c4be | 2012-06-04 13:55:19 +0000 | [diff] [blame] | 1 | //===-- tsan_mman.cc ------------------------------------------------------===// |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file is a part of ThreadSanitizer (TSan), a race detector. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 13 | #include "sanitizer_common/sanitizer_allocator_interface.h" |
Alexey Samsonov | f7667cc | 2012-06-07 11:54:08 +0000 | [diff] [blame] | 14 | #include "sanitizer_common/sanitizer_common.h" |
Dmitry Vyukov | 2e87051 | 2012-08-15 15:35:15 +0000 | [diff] [blame] | 15 | #include "sanitizer_common/sanitizer_placement_new.h" |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 16 | #include "tsan_mman.h" |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 17 | #include "tsan_rtl.h" |
| 18 | #include "tsan_report.h" |
| 19 | #include "tsan_flags.h" |
| 20 | |
Alexey Samsonov | 4f0ea39 | 2012-09-24 13:19:47 +0000 | [diff] [blame] | 21 | // May be overriden by front-end. |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 22 | SANITIZER_WEAK_DEFAULT_IMPL |
| 23 | void __sanitizer_malloc_hook(void *ptr, uptr size) { |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 24 | (void)ptr; |
| 25 | (void)size; |
| 26 | } |
Alexey Samsonov | 4f0ea39 | 2012-09-24 13:19:47 +0000 | [diff] [blame] | 27 | |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 28 | SANITIZER_WEAK_DEFAULT_IMPL |
| 29 | void __sanitizer_free_hook(void *ptr) { |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 30 | (void)ptr; |
| 31 | } |
Alexey Samsonov | 4f0ea39 | 2012-09-24 13:19:47 +0000 | [diff] [blame] | 32 | |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 33 | namespace __tsan { |
| 34 | |
Dmitry Vyukov | e93e505 | 2013-03-18 10:32:21 +0000 | [diff] [blame] | 35 | struct MapUnmapCallback { |
| 36 | void OnMap(uptr p, uptr size) const { } |
| 37 | void OnUnmap(uptr p, uptr size) const { |
| 38 | // We are about to unmap a chunk of user memory. |
| 39 | // Mark the corresponding shadow memory as not needed. |
Dmitry Vyukov | 7ac33ac | 2013-03-18 15:49:07 +0000 | [diff] [blame] | 40 | DontNeedShadowFor(p, size); |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 41 | // Mark the corresponding meta shadow memory as not needed. |
| 42 | // Note the block does not contain any meta info at this point |
| 43 | // (this happens after free). |
| 44 | const uptr kMetaRatio = kMetaShadowCell / kMetaShadowSize; |
| 45 | const uptr kPageSize = GetPageSizeCached() * kMetaRatio; |
| 46 | // Block came from LargeMmapAllocator, so must be large. |
| 47 | // We rely on this in the calculations below. |
| 48 | CHECK_GE(size, 2 * kPageSize); |
| 49 | uptr diff = RoundUp(p, kPageSize) - p; |
| 50 | if (diff != 0) { |
| 51 | p += diff; |
| 52 | size -= diff; |
| 53 | } |
| 54 | diff = p + size - RoundDown(p + size, kPageSize); |
| 55 | if (diff != 0) |
| 56 | size -= diff; |
| 57 | FlushUnneededShadowMemory((uptr)MemToMeta(p), size / kMetaRatio); |
Dmitry Vyukov | e93e505 | 2013-03-18 10:32:21 +0000 | [diff] [blame] | 58 | } |
| 59 | }; |
| 60 | |
Dmitry Vyukov | ff35f1d | 2012-08-30 13:02:30 +0000 | [diff] [blame] | 61 | static char allocator_placeholder[sizeof(Allocator)] ALIGNED(64); |
| 62 | Allocator *allocator() { |
Dmitry Vyukov | 2e87051 | 2012-08-15 15:35:15 +0000 | [diff] [blame] | 63 | return reinterpret_cast<Allocator*>(&allocator_placeholder); |
| 64 | } |
| 65 | |
| 66 | void InitializeAllocator() { |
Stephen Hines | 86277eb | 2015-03-23 12:06:32 -0700 | [diff] [blame] | 67 | allocator()->Init(common_flags()->allocator_may_return_null); |
Dmitry Vyukov | 2e87051 | 2012-08-15 15:35:15 +0000 | [diff] [blame] | 68 | } |
| 69 | |
Dmitry Vyukov | bdd844c | 2013-01-24 09:08:03 +0000 | [diff] [blame] | 70 | void AllocatorThreadStart(ThreadState *thr) { |
| 71 | allocator()->InitCache(&thr->alloc_cache); |
Alexey Samsonov | 1f3c2fe | 2013-05-29 09:15:39 +0000 | [diff] [blame] | 72 | internal_allocator()->InitCache(&thr->internal_alloc_cache); |
Dmitry Vyukov | bdd844c | 2013-01-24 09:08:03 +0000 | [diff] [blame] | 73 | } |
| 74 | |
| 75 | void AllocatorThreadFinish(ThreadState *thr) { |
| 76 | allocator()->DestroyCache(&thr->alloc_cache); |
Alexey Samsonov | 1f3c2fe | 2013-05-29 09:15:39 +0000 | [diff] [blame] | 77 | internal_allocator()->DestroyCache(&thr->internal_alloc_cache); |
Dmitry Vyukov | bdd844c | 2013-01-24 09:08:03 +0000 | [diff] [blame] | 78 | } |
| 79 | |
| 80 | void AllocatorPrintStats() { |
| 81 | allocator()->PrintStats(); |
Dmitry Vyukov | 2e87051 | 2012-08-15 15:35:15 +0000 | [diff] [blame] | 82 | } |
| 83 | |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 84 | static void SignalUnsafeCall(ThreadState *thr, uptr pc) { |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 85 | if (atomic_load_relaxed(&thr->in_signal_handler) == 0 || |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 86 | !flags()->report_signal_unsafe) |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 87 | return; |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 88 | VarSizeStackTrace stack; |
| 89 | ObtainCurrentStack(thr, pc, &stack); |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 90 | if (IsFiredSuppression(ctx, ReportTypeSignalUnsafe, stack)) |
| 91 | return; |
Alexey Samsonov | 2bbd8be | 2013-03-15 13:48:44 +0000 | [diff] [blame] | 92 | ThreadRegistryLock l(ctx->thread_registry); |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 93 | ScopedReport rep(ReportTypeSignalUnsafe); |
Pirama Arumuga Nainar | 799172d | 2016-03-03 15:50:30 -0800 | [diff] [blame^] | 94 | rep.AddStack(stack, true); |
| 95 | OutputReport(thr, rep); |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 96 | } |
| 97 | |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 98 | void *user_alloc(ThreadState *thr, uptr pc, uptr sz, uptr align, bool signal) { |
Dmitry Vyukov | 7423c78 | 2013-03-22 17:06:22 +0000 | [diff] [blame] | 99 | if ((sz >= (1ull << 40)) || (align >= (1ull << 40))) |
Stephen Hines | 86277eb | 2015-03-23 12:06:32 -0700 | [diff] [blame] | 100 | return allocator()->ReturnNullOrDie(); |
Dmitry Vyukov | 2e87051 | 2012-08-15 15:35:15 +0000 | [diff] [blame] | 101 | void *p = allocator()->Allocate(&thr->alloc_cache, sz, align); |
| 102 | if (p == 0) |
Dmitry Vyukov | 7b8bee1 | 2012-05-18 09:41:52 +0000 | [diff] [blame] | 103 | return 0; |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 104 | if (ctx && ctx->initialized) |
| 105 | OnUserAlloc(thr, pc, (uptr)p, sz, true); |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 106 | if (signal) |
| 107 | SignalUnsafeCall(thr, pc); |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 108 | return p; |
| 109 | } |
| 110 | |
Stephen Hines | 86277eb | 2015-03-23 12:06:32 -0700 | [diff] [blame] | 111 | void *user_calloc(ThreadState *thr, uptr pc, uptr size, uptr n) { |
| 112 | if (CallocShouldReturnNullDueToOverflow(size, n)) |
| 113 | return allocator()->ReturnNullOrDie(); |
| 114 | void *p = user_alloc(thr, pc, n * size); |
| 115 | if (p) |
| 116 | internal_memset(p, 0, n * size); |
| 117 | return p; |
| 118 | } |
| 119 | |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 120 | void user_free(ThreadState *thr, uptr pc, void *p, bool signal) { |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 121 | if (ctx && ctx->initialized) |
| 122 | OnUserFree(thr, pc, (uptr)p, true); |
Dmitry Vyukov | 2e87051 | 2012-08-15 15:35:15 +0000 | [diff] [blame] | 123 | allocator()->Deallocate(&thr->alloc_cache, p); |
Stephen Hines | 6d18623 | 2014-11-26 17:56:19 -0800 | [diff] [blame] | 124 | if (signal) |
| 125 | SignalUnsafeCall(thr, pc); |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 126 | } |
| 127 | |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 128 | void OnUserAlloc(ThreadState *thr, uptr pc, uptr p, uptr sz, bool write) { |
| 129 | DPrintf("#%d: alloc(%zu) = %p\n", thr->tid, sz, p); |
| 130 | ctx->metamap.AllocBlock(thr, pc, p, sz); |
| 131 | if (write && thr->ignore_reads_and_writes == 0) |
| 132 | MemoryRangeImitateWrite(thr, pc, (uptr)p, sz); |
| 133 | else |
| 134 | MemoryResetRange(thr, pc, (uptr)p, sz); |
| 135 | } |
| 136 | |
| 137 | void OnUserFree(ThreadState *thr, uptr pc, uptr p, bool write) { |
| 138 | CHECK_NE(p, (void*)0); |
| 139 | uptr sz = ctx->metamap.FreeBlock(thr, pc, p); |
| 140 | DPrintf("#%d: free(%p, %zu)\n", thr->tid, p, sz); |
| 141 | if (write && thr->ignore_reads_and_writes == 0) |
| 142 | MemoryRangeFreed(thr, pc, (uptr)p, sz); |
| 143 | } |
| 144 | |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 145 | void *user_realloc(ThreadState *thr, uptr pc, void *p, uptr sz) { |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 146 | void *p2 = 0; |
| 147 | // FIXME: Handle "shrinking" more efficiently, |
| 148 | // it seems that some software actually does this. |
| 149 | if (sz) { |
| 150 | p2 = user_alloc(thr, pc, sz); |
Dmitry Vyukov | efd9582 | 2012-05-21 06:46:27 +0000 | [diff] [blame] | 151 | if (p2 == 0) |
| 152 | return 0; |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 153 | if (p) { |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 154 | uptr oldsz = user_alloc_usable_size(p); |
| 155 | internal_memcpy(p2, p, min(oldsz, sz)); |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 156 | } |
| 157 | } |
Dmitry Vyukov | f51c386 | 2013-03-18 19:47:36 +0000 | [diff] [blame] | 158 | if (p) |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 159 | user_free(thr, pc, p); |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 160 | return p2; |
| 161 | } |
| 162 | |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 163 | uptr user_alloc_usable_size(const void *p) { |
Alexey Samsonov | 8a6b5e5 | 2013-02-25 08:43:10 +0000 | [diff] [blame] | 164 | if (p == 0) |
| 165 | return 0; |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 166 | MBlock *b = ctx->metamap.GetBlock((uptr)p); |
| 167 | return b ? b->siz : 0; |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 168 | } |
| 169 | |
Alexey Samsonov | 4f0ea39 | 2012-09-24 13:19:47 +0000 | [diff] [blame] | 170 | void invoke_malloc_hook(void *ptr, uptr size) { |
Alexey Samsonov | 4f0ea39 | 2012-09-24 13:19:47 +0000 | [diff] [blame] | 171 | ThreadState *thr = cur_thread(); |
Stephen Hines | 2d1fdb2 | 2014-05-28 23:58:16 -0700 | [diff] [blame] | 172 | if (ctx == 0 || !ctx->initialized || thr->ignore_interceptors) |
Alexey Samsonov | 4f0ea39 | 2012-09-24 13:19:47 +0000 | [diff] [blame] | 173 | return; |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 174 | __sanitizer_malloc_hook(ptr, size); |
Alexey Samsonov | 4f0ea39 | 2012-09-24 13:19:47 +0000 | [diff] [blame] | 175 | } |
| 176 | |
| 177 | void invoke_free_hook(void *ptr) { |
Alexey Samsonov | 4f0ea39 | 2012-09-24 13:19:47 +0000 | [diff] [blame] | 178 | ThreadState *thr = cur_thread(); |
Stephen Hines | 2d1fdb2 | 2014-05-28 23:58:16 -0700 | [diff] [blame] | 179 | if (ctx == 0 || !ctx->initialized || thr->ignore_interceptors) |
Alexey Samsonov | 4f0ea39 | 2012-09-24 13:19:47 +0000 | [diff] [blame] | 180 | return; |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 181 | __sanitizer_free_hook(ptr); |
Alexey Samsonov | 4f0ea39 | 2012-09-24 13:19:47 +0000 | [diff] [blame] | 182 | } |
| 183 | |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 184 | void *internal_alloc(MBlockType typ, uptr sz) { |
| 185 | ThreadState *thr = cur_thread(); |
Dmitry Vyukov | 9ad7c32 | 2012-06-22 11:08:55 +0000 | [diff] [blame] | 186 | if (thr->nomalloc) { |
| 187 | thr->nomalloc = 0; // CHECK calls internal_malloc(). |
| 188 | CHECK(0); |
| 189 | } |
Alexey Samsonov | 1f3c2fe | 2013-05-29 09:15:39 +0000 | [diff] [blame] | 190 | return InternalAlloc(sz, &thr->internal_alloc_cache); |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 191 | } |
| 192 | |
| 193 | void internal_free(void *p) { |
| 194 | ThreadState *thr = cur_thread(); |
Dmitry Vyukov | 9ad7c32 | 2012-06-22 11:08:55 +0000 | [diff] [blame] | 195 | if (thr->nomalloc) { |
| 196 | thr->nomalloc = 0; // CHECK calls internal_malloc(). |
| 197 | CHECK(0); |
| 198 | } |
Alexey Samsonov | 1f3c2fe | 2013-05-29 09:15:39 +0000 | [diff] [blame] | 199 | InternalFree(p, &thr->internal_alloc_cache); |
Kostya Serebryany | 7ac4148 | 2012-05-10 13:48:04 +0000 | [diff] [blame] | 200 | } |
| 201 | |
| 202 | } // namespace __tsan |
Dmitry Vyukov | 1253082 | 2013-01-23 12:08:03 +0000 | [diff] [blame] | 203 | |
| 204 | using namespace __tsan; |
| 205 | |
| 206 | extern "C" { |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 207 | uptr __sanitizer_get_current_allocated_bytes() { |
| 208 | uptr stats[AllocatorStatCount]; |
| 209 | allocator()->GetStats(stats); |
| 210 | return stats[AllocatorStatAllocated]; |
| 211 | } |
Dmitry Vyukov | 1253082 | 2013-01-23 12:08:03 +0000 | [diff] [blame] | 212 | |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 213 | uptr __sanitizer_get_heap_size() { |
| 214 | uptr stats[AllocatorStatCount]; |
| 215 | allocator()->GetStats(stats); |
| 216 | return stats[AllocatorStatMapped]; |
| 217 | } |
Dmitry Vyukov | 1253082 | 2013-01-23 12:08:03 +0000 | [diff] [blame] | 218 | |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 219 | uptr __sanitizer_get_free_bytes() { |
| 220 | return 1; |
| 221 | } |
Dmitry Vyukov | 1253082 | 2013-01-23 12:08:03 +0000 | [diff] [blame] | 222 | |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 223 | uptr __sanitizer_get_unmapped_bytes() { |
| 224 | return 1; |
| 225 | } |
Dmitry Vyukov | 1253082 | 2013-01-23 12:08:03 +0000 | [diff] [blame] | 226 | |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 227 | uptr __sanitizer_get_estimated_allocated_size(uptr size) { |
Dmitry Vyukov | 1253082 | 2013-01-23 12:08:03 +0000 | [diff] [blame] | 228 | return size; |
| 229 | } |
Dmitry Vyukov | 1253082 | 2013-01-23 12:08:03 +0000 | [diff] [blame] | 230 | |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 231 | int __sanitizer_get_ownership(const void *p) { |
| 232 | return allocator()->GetBlockBegin(p) != 0; |
| 233 | } |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 234 | |
| 235 | uptr __sanitizer_get_allocated_size(const void *p) { |
| 236 | return user_alloc_usable_size(p); |
| 237 | } |
Dmitry Vyukov | 3ce2170 | 2013-03-18 17:21:15 +0000 | [diff] [blame] | 238 | |
| 239 | void __tsan_on_thread_idle() { |
| 240 | ThreadState *thr = cur_thread(); |
| 241 | allocator()->SwallowCache(&thr->alloc_cache); |
Alexey Samsonov | 1f3c2fe | 2013-05-29 09:15:39 +0000 | [diff] [blame] | 242 | internal_allocator()->SwallowCache(&thr->internal_alloc_cache); |
Stephen Hines | 6a211c5 | 2014-07-21 00:49:56 -0700 | [diff] [blame] | 243 | ctx->metamap.OnThreadIdle(thr); |
Dmitry Vyukov | 3ce2170 | 2013-03-18 17:21:15 +0000 | [diff] [blame] | 244 | } |
Dmitry Vyukov | 1253082 | 2013-01-23 12:08:03 +0000 | [diff] [blame] | 245 | } // extern "C" |