blob: 91fdf0aa1dca39404d29329c192b0ac752031286 [file] [log] [blame]
Kostya Serebryany2679f192012-12-10 14:19:15 +00001//===-- asan_fake_stack.cc ------------------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of AddressSanitizer, an address sanity checker.
11//
12// FakeStack is used to detect use-after-return bugs.
13//===----------------------------------------------------------------------===//
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -080014
Kostya Serebryany2679f192012-12-10 14:19:15 +000015#include "asan_allocator.h"
Alexey Samsonov7e843492013-03-28 15:42:43 +000016#include "asan_poisoning.h"
Kostya Serebryany2679f192012-12-10 14:19:15 +000017#include "asan_thread.h"
Kostya Serebryany2679f192012-12-10 14:19:15 +000018
19namespace __asan {
20
Kostya Serebryany8f7ec322013-09-13 05:57:58 +000021static const u64 kMagic1 = kAsanStackAfterReturnMagic;
22static const u64 kMagic2 = (kMagic1 << 8) | kMagic1;
23static const u64 kMagic4 = (kMagic2 << 16) | kMagic2;
24static const u64 kMagic8 = (kMagic4 << 32) | kMagic4;
25
Pirama Arumuga Nainarcdce50b2015-07-01 12:26:56 -070026static const u64 kAllocaRedzoneSize = 32UL;
27static const u64 kAllocaRedzoneMask = 31UL;
28
Kostya Serebryany8f7ec322013-09-13 05:57:58 +000029// For small size classes inline PoisonShadow for better performance.
30ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
31 CHECK_EQ(SHADOW_SCALE, 3); // This code expects SHADOW_SCALE=3.
32 u64 *shadow = reinterpret_cast<u64*>(MemToShadow(ptr));
33 if (class_id <= 6) {
Stephen Hines86277eb2015-03-23 12:06:32 -070034 for (uptr i = 0; i < (1U << class_id); i++) {
Kostya Serebryany8f7ec322013-09-13 05:57:58 +000035 shadow[i] = magic;
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -080036 // Make sure this does not become memset.
37 SanitizerBreakOptimization(nullptr);
Stephen Hines86277eb2015-03-23 12:06:32 -070038 }
Kostya Serebryany8f7ec322013-09-13 05:57:58 +000039 } else {
40 // The size class is too big, it's cheaper to poison only size bytes.
Kostya Serebryanydff16d42013-09-13 07:20:35 +000041 PoisonShadow(ptr, size, static_cast<u8>(magic));
Kostya Serebryany8f7ec322013-09-13 05:57:58 +000042 }
43}
44
Kostya Serebryanye1c68c32013-09-27 11:37:23 +000045FakeStack *FakeStack::Create(uptr stack_size_log) {
46 static uptr kMinStackSizeLog = 16;
47 static uptr kMaxStackSizeLog = FIRST_32_SECOND_64(24, 28);
48 if (stack_size_log < kMinStackSizeLog)
49 stack_size_log = kMinStackSizeLog;
50 if (stack_size_log > kMaxStackSizeLog)
51 stack_size_log = kMaxStackSizeLog;
Stephen Hines2d1fdb22014-05-28 23:58:16 -070052 uptr size = RequiredSize(stack_size_log);
Kostya Serebryanye1c68c32013-09-27 11:37:23 +000053 FakeStack *res = reinterpret_cast<FakeStack *>(
Stephen Hines2d1fdb22014-05-28 23:58:16 -070054 flags()->uar_noreserve ? MmapNoReserveOrDie(size, "FakeStack")
55 : MmapOrDie(size, "FakeStack"));
Kostya Serebryanye1c68c32013-09-27 11:37:23 +000056 res->stack_size_log_ = stack_size_log;
Stephen Hines2d1fdb22014-05-28 23:58:16 -070057 u8 *p = reinterpret_cast<u8 *>(res);
58 VReport(1, "T%d: FakeStack created: %p -- %p stack_size_log: %zd; "
59 "mmapped %zdK, noreserve=%d \n",
60 GetCurrentTidOrInvalid(), p,
61 p + FakeStack::RequiredSize(stack_size_log), stack_size_log,
62 size >> 10, flags()->uar_noreserve);
Kostya Serebryanye1c68c32013-09-27 11:37:23 +000063 return res;
64}
65
Stephen Hines2d1fdb22014-05-28 23:58:16 -070066void FakeStack::Destroy(int tid) {
Kostya Serebryanye1c68c32013-09-27 11:37:23 +000067 PoisonAll(0);
Stephen Hines86277eb2015-03-23 12:06:32 -070068 if (Verbosity() >= 2) {
Stephen Hines2d1fdb22014-05-28 23:58:16 -070069 InternalScopedString str(kNumberOfSizeClasses * 50);
70 for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++)
71 str.append("%zd: %zd/%zd; ", class_id, hint_position_[class_id],
72 NumberOfFrames(stack_size_log(), class_id));
73 Report("T%d: FakeStack destroyed: %s\n", tid, str.data());
74 }
75 uptr size = RequiredSize(stack_size_log_);
76 FlushUnneededASanShadowMemory(reinterpret_cast<uptr>(this), size);
77 UnmapOrDie(this, size);
Kostya Serebryanye1c68c32013-09-27 11:37:23 +000078}
79
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +000080void FakeStack::PoisonAll(u8 magic) {
81 PoisonShadow(reinterpret_cast<uptr>(this), RequiredSize(stack_size_log()),
82 magic);
Kostya Serebryany2679f192012-12-10 14:19:15 +000083}
84
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -080085#if !defined(_MSC_VER) || defined(__clang__)
Kostya Serebryanyb1173c22013-09-12 14:41:10 +000086ALWAYS_INLINE USED
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -080087#endif
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +000088FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,
89 uptr real_stack) {
90 CHECK_LT(class_id, kNumberOfSizeClasses);
Kostya Serebryany89de4572013-09-12 13:25:29 +000091 if (needs_gc_)
92 GC(real_stack);
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +000093 uptr &hint_position = hint_position_[class_id];
94 const int num_iter = NumberOfFrames(stack_size_log, class_id);
95 u8 *flags = GetFlags(stack_size_log, class_id);
96 for (int i = 0; i < num_iter; i++) {
97 uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++);
Kostya Serebryanydff16d42013-09-13 07:20:35 +000098 // This part is tricky. On one hand, checking and setting flags[pos]
99 // should be atomic to ensure async-signal safety. But on the other hand,
100 // if the signal arrives between checking and setting flags[pos], the
101 // signal handler's fake stack will start from a different hint_position
102 // and so will not touch this particular byte. So, it is safe to do this
103 // with regular non-atimic load and store (at least I was not able to make
104 // this code crash).
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +0000105 if (flags[pos]) continue;
Kostya Serebryanydff16d42013-09-13 07:20:35 +0000106 flags[pos] = 1;
107 FakeFrame *res = reinterpret_cast<FakeFrame *>(
108 GetFrame(stack_size_log, class_id, pos));
109 res->real_stack = real_stack;
Kostya Serebryanyb3889872013-09-17 07:42:54 +0000110 *SavedFlagPtr(reinterpret_cast<uptr>(res), class_id) = &flags[pos];
Kostya Serebryanydff16d42013-09-13 07:20:35 +0000111 return res;
Kostya Serebryany2679f192012-12-10 14:19:15 +0000112 }
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -0800113 return nullptr; // We are out of fake stack.
Kostya Serebryany2679f192012-12-10 14:19:15 +0000114}
115
Stephen Hines2d1fdb22014-05-28 23:58:16 -0700116uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) {
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +0000117 uptr stack_size_log = this->stack_size_log();
118 uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0));
119 uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log);
120 if (ptr < beg || ptr >= end) return 0;
121 uptr class_id = (ptr - beg) >> stack_size_log;
122 uptr base = beg + (class_id << stack_size_log);
123 CHECK_LE(base, ptr);
124 CHECK_LT(ptr, base + (1UL << stack_size_log));
125 uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id);
Stephen Hines2d1fdb22014-05-28 23:58:16 -0700126 uptr res = base + pos * BytesInSizeClass(class_id);
127 *frame_end = res + BytesInSizeClass(class_id);
128 *frame_beg = res + sizeof(FakeFrame);
129 return res;
Kostya Serebryany2679f192012-12-10 14:19:15 +0000130}
131
Kostya Serebryany89de4572013-09-12 13:25:29 +0000132void FakeStack::HandleNoReturn() {
133 needs_gc_ = true;
134}
135
136// When throw, longjmp or some such happens we don't call OnFree() and
137// as the result may leak one or more fake frames, but the good news is that
138// we are notified about all such events by HandleNoReturn().
139// If we recently had such no-return event we need to collect garbage frames.
140// We do it based on their 'real_stack' values -- everything that is lower
141// than the current real_stack is garbage.
142NOINLINE void FakeStack::GC(uptr real_stack) {
143 uptr collected = 0;
144 for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
Kostya Serebryany89de4572013-09-12 13:25:29 +0000145 u8 *flags = GetFlags(stack_size_log(), class_id);
146 for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
147 i++) {
148 if (flags[i] == 0) continue; // not allocated.
149 FakeFrame *ff = reinterpret_cast<FakeFrame *>(
150 GetFrame(stack_size_log(), class_id, i));
151 if (ff->real_stack < real_stack) {
152 flags[i] = 0;
153 collected++;
154 }
155 }
156 }
157 needs_gc_ = false;
158}
159
Sergey Matveevc5193352013-10-14 14:04:50 +0000160void FakeStack::ForEachFakeFrame(RangeIteratorCallback callback, void *arg) {
161 for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
162 u8 *flags = GetFlags(stack_size_log(), class_id);
163 for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
164 i++) {
165 if (flags[i] == 0) continue; // not allocated.
166 FakeFrame *ff = reinterpret_cast<FakeFrame *>(
167 GetFrame(stack_size_log(), class_id, i));
168 uptr begin = reinterpret_cast<uptr>(ff);
169 callback(begin, begin + FakeStack::BytesInSizeClass(class_id), arg);
170 }
171 }
172}
173
Kostya Serebryanyff640122013-09-13 08:58:22 +0000174#if SANITIZER_LINUX && !SANITIZER_ANDROID
Kostya Serebryany9433af32013-09-13 06:32:26 +0000175static THREADLOCAL FakeStack *fake_stack_tls;
176
177FakeStack *GetTLSFakeStack() {
178 return fake_stack_tls;
179}
180void SetTLSFakeStack(FakeStack *fs) {
181 fake_stack_tls = fs;
182}
183#else
184FakeStack *GetTLSFakeStack() { return 0; }
185void SetTLSFakeStack(FakeStack *fs) { }
Kostya Serebryanyff640122013-09-13 08:58:22 +0000186#endif // SANITIZER_LINUX && !SANITIZER_ANDROID
Kostya Serebryany9433af32013-09-13 06:32:26 +0000187
Kostya Serebryany6147f022013-09-13 06:04:18 +0000188static FakeStack *GetFakeStack() {
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +0000189 AsanThread *t = GetCurrentThread();
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -0800190 if (!t) return nullptr;
Kostya Serebryany6147f022013-09-13 06:04:18 +0000191 return t->fake_stack();
192}
193
194static FakeStack *GetFakeStackFast() {
Kostya Serebryany9433af32013-09-13 06:32:26 +0000195 if (FakeStack *fs = GetTLSFakeStack())
196 return fs;
Kostya Serebryany230e52f2013-09-18 10:35:12 +0000197 if (!__asan_option_detect_stack_use_after_return)
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -0800198 return nullptr;
Kostya Serebryany6147f022013-09-13 06:04:18 +0000199 return GetFakeStack();
Kostya Serebryany6147f022013-09-13 06:04:18 +0000200}
201
Stephen Hines86277eb2015-03-23 12:06:32 -0700202ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size) {
Kostya Serebryany6147f022013-09-13 06:04:18 +0000203 FakeStack *fs = GetFakeStackFast();
Stephen Hines86277eb2015-03-23 12:06:32 -0700204 if (!fs) return 0;
205 uptr local_stack;
206 uptr real_stack = reinterpret_cast<uptr>(&local_stack);
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +0000207 FakeFrame *ff = fs->Allocate(fs->stack_size_log(), class_id, real_stack);
Stephen Hines86277eb2015-03-23 12:06:32 -0700208 if (!ff) return 0; // Out of fake stack.
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +0000209 uptr ptr = reinterpret_cast<uptr>(ff);
Kostya Serebryany8f7ec322013-09-13 05:57:58 +0000210 SetShadow(ptr, size, class_id, 0);
Kostya Serebryany2679f192012-12-10 14:19:15 +0000211 return ptr;
212}
213
Stephen Hines86277eb2015-03-23 12:06:32 -0700214ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size) {
Kostya Serebryanyb3889872013-09-17 07:42:54 +0000215 FakeStack::Deallocate(ptr, class_id);
Kostya Serebryany8f7ec322013-09-13 05:57:58 +0000216 SetShadow(ptr, size, class_id, kMagic8);
Kostya Serebryany2679f192012-12-10 14:19:15 +0000217}
Kostya Serebryany34e3ed12013-09-10 13:16:26 +0000218
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -0800219} // namespace __asan
Kostya Serebryany34e3ed12013-09-10 13:16:26 +0000220
221// ---------------------- Interface ---------------- {{{1
Stephen Hines2d1fdb22014-05-28 23:58:16 -0700222using namespace __asan;
Kostya Serebryany34e3ed12013-09-10 13:16:26 +0000223#define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id) \
224 extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \
Stephen Hines86277eb2015-03-23 12:06:32 -0700225 __asan_stack_malloc_##class_id(uptr size) { \
226 return OnMalloc(class_id, size); \
Kostya Serebryany34e3ed12013-09-10 13:16:26 +0000227 } \
228 extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id( \
Stephen Hines86277eb2015-03-23 12:06:32 -0700229 uptr ptr, uptr size) { \
230 OnFree(ptr, class_id, size); \
Kostya Serebryany34e3ed12013-09-10 13:16:26 +0000231 }
232
233DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0)
234DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1)
235DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2)
236DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3)
237DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4)
238DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5)
239DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6)
240DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7)
241DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8)
242DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9)
243DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10)
Stephen Hines2d1fdb22014-05-28 23:58:16 -0700244extern "C" {
245SANITIZER_INTERFACE_ATTRIBUTE
246void *__asan_get_current_fake_stack() { return GetFakeStackFast(); }
247
248SANITIZER_INTERFACE_ATTRIBUTE
249void *__asan_addr_is_in_fake_stack(void *fake_stack, void *addr, void **beg,
250 void **end) {
251 FakeStack *fs = reinterpret_cast<FakeStack*>(fake_stack);
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -0800252 if (!fs) return nullptr;
Stephen Hines2d1fdb22014-05-28 23:58:16 -0700253 uptr frame_beg, frame_end;
254 FakeFrame *frame = reinterpret_cast<FakeFrame *>(fs->AddrIsInFakeStack(
255 reinterpret_cast<uptr>(addr), &frame_beg, &frame_end));
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -0800256 if (!frame) return nullptr;
Stephen Hines2d1fdb22014-05-28 23:58:16 -0700257 if (frame->magic != kCurrentStackFrameMagic)
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -0800258 return nullptr;
Stephen Hines2d1fdb22014-05-28 23:58:16 -0700259 if (beg) *beg = reinterpret_cast<void*>(frame_beg);
260 if (end) *end = reinterpret_cast<void*>(frame_end);
261 return reinterpret_cast<void*>(frame->real_stack);
262}
Pirama Arumuga Nainarcdce50b2015-07-01 12:26:56 -0700263
264SANITIZER_INTERFACE_ATTRIBUTE
265void __asan_alloca_poison(uptr addr, uptr size) {
266 uptr LeftRedzoneAddr = addr - kAllocaRedzoneSize;
267 uptr PartialRzAddr = addr + size;
268 uptr RightRzAddr = (PartialRzAddr + kAllocaRedzoneMask) & ~kAllocaRedzoneMask;
269 uptr PartialRzAligned = PartialRzAddr & ~(SHADOW_GRANULARITY - 1);
270 FastPoisonShadow(LeftRedzoneAddr, kAllocaRedzoneSize, kAsanAllocaLeftMagic);
271 FastPoisonShadowPartialRightRedzone(
272 PartialRzAligned, PartialRzAddr % SHADOW_GRANULARITY,
273 RightRzAddr - PartialRzAligned, kAsanAllocaRightMagic);
274 FastPoisonShadow(RightRzAddr, kAllocaRedzoneSize, kAsanAllocaRightMagic);
275}
276
277SANITIZER_INTERFACE_ATTRIBUTE
278void __asan_allocas_unpoison(uptr top, uptr bottom) {
279 if ((!top) || (top > bottom)) return;
280 REAL(memset)(reinterpret_cast<void*>(MemToShadow(top)), 0,
281 (bottom - top) / SHADOW_GRANULARITY);
282}
Pirama Arumuga Nainar799172d2016-03-03 15:50:30 -0800283} // extern "C"