blob: 4f617cac8605b06adad38cb03b3f212f9fce523e [file] [log] [blame]
Kostya Serebryany2679f192012-12-10 14:19:15 +00001//===-- asan_fake_stack.cc ------------------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of AddressSanitizer, an address sanity checker.
11//
12// FakeStack is used to detect use-after-return bugs.
13//===----------------------------------------------------------------------===//
14#include "asan_allocator.h"
Alexey Samsonov7e843492013-03-28 15:42:43 +000015#include "asan_poisoning.h"
Kostya Serebryany2679f192012-12-10 14:19:15 +000016#include "asan_thread.h"
Kostya Serebryany2679f192012-12-10 14:19:15 +000017
18namespace __asan {
19
Kostya Serebryany8f7ec322013-09-13 05:57:58 +000020static const u64 kMagic1 = kAsanStackAfterReturnMagic;
21static const u64 kMagic2 = (kMagic1 << 8) | kMagic1;
22static const u64 kMagic4 = (kMagic2 << 16) | kMagic2;
23static const u64 kMagic8 = (kMagic4 << 32) | kMagic4;
24
25// For small size classes inline PoisonShadow for better performance.
26ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
27 CHECK_EQ(SHADOW_SCALE, 3); // This code expects SHADOW_SCALE=3.
28 u64 *shadow = reinterpret_cast<u64*>(MemToShadow(ptr));
29 if (class_id <= 6) {
Kostya Serebryanydff16d42013-09-13 07:20:35 +000030 for (uptr i = 0; i < (1U << class_id); i++)
Kostya Serebryany8f7ec322013-09-13 05:57:58 +000031 shadow[i] = magic;
32 } else {
33 // The size class is too big, it's cheaper to poison only size bytes.
Kostya Serebryanydff16d42013-09-13 07:20:35 +000034 PoisonShadow(ptr, size, static_cast<u8>(magic));
Kostya Serebryany8f7ec322013-09-13 05:57:58 +000035 }
36}
37
Kostya Serebryanye1c68c32013-09-27 11:37:23 +000038FakeStack *FakeStack::Create(uptr stack_size_log) {
39 static uptr kMinStackSizeLog = 16;
40 static uptr kMaxStackSizeLog = FIRST_32_SECOND_64(24, 28);
41 if (stack_size_log < kMinStackSizeLog)
42 stack_size_log = kMinStackSizeLog;
43 if (stack_size_log > kMaxStackSizeLog)
44 stack_size_log = kMaxStackSizeLog;
45 FakeStack *res = reinterpret_cast<FakeStack *>(
46 MmapOrDie(RequiredSize(stack_size_log), "FakeStack"));
47 res->stack_size_log_ = stack_size_log;
48 if (flags()->verbosity) {
49 u8 *p = reinterpret_cast<u8 *>(res);
50 Report("T%d: FakeStack created: %p -- %p stack_size_log: %zd \n",
51 GetCurrentTidOrInvalid(), p,
52 p + FakeStack::RequiredSize(stack_size_log), stack_size_log);
53 }
54 return res;
55}
56
57void FakeStack::Destroy() {
58 PoisonAll(0);
59 UnmapOrDie(this, RequiredSize(stack_size_log_));
60}
61
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +000062void FakeStack::PoisonAll(u8 magic) {
63 PoisonShadow(reinterpret_cast<uptr>(this), RequiredSize(stack_size_log()),
64 magic);
Kostya Serebryany2679f192012-12-10 14:19:15 +000065}
66
Kostya Serebryanyb1173c22013-09-12 14:41:10 +000067ALWAYS_INLINE USED
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +000068FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,
69 uptr real_stack) {
70 CHECK_LT(class_id, kNumberOfSizeClasses);
Kostya Serebryany89de4572013-09-12 13:25:29 +000071 if (needs_gc_)
72 GC(real_stack);
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +000073 uptr &hint_position = hint_position_[class_id];
74 const int num_iter = NumberOfFrames(stack_size_log, class_id);
75 u8 *flags = GetFlags(stack_size_log, class_id);
76 for (int i = 0; i < num_iter; i++) {
77 uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++);
Kostya Serebryanydff16d42013-09-13 07:20:35 +000078 // This part is tricky. On one hand, checking and setting flags[pos]
79 // should be atomic to ensure async-signal safety. But on the other hand,
80 // if the signal arrives between checking and setting flags[pos], the
81 // signal handler's fake stack will start from a different hint_position
82 // and so will not touch this particular byte. So, it is safe to do this
83 // with regular non-atimic load and store (at least I was not able to make
84 // this code crash).
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +000085 if (flags[pos]) continue;
Kostya Serebryanydff16d42013-09-13 07:20:35 +000086 flags[pos] = 1;
87 FakeFrame *res = reinterpret_cast<FakeFrame *>(
88 GetFrame(stack_size_log, class_id, pos));
89 res->real_stack = real_stack;
Kostya Serebryanyb3889872013-09-17 07:42:54 +000090 *SavedFlagPtr(reinterpret_cast<uptr>(res), class_id) = &flags[pos];
Kostya Serebryanydff16d42013-09-13 07:20:35 +000091 return res;
Kostya Serebryany2679f192012-12-10 14:19:15 +000092 }
Kostya Serebryanye1c68c32013-09-27 11:37:23 +000093 return 0; // We are out of fake stack.
Kostya Serebryany2679f192012-12-10 14:19:15 +000094}
95
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +000096uptr FakeStack::AddrIsInFakeStack(uptr ptr) {
97 uptr stack_size_log = this->stack_size_log();
98 uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0));
99 uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log);
100 if (ptr < beg || ptr >= end) return 0;
101 uptr class_id = (ptr - beg) >> stack_size_log;
102 uptr base = beg + (class_id << stack_size_log);
103 CHECK_LE(base, ptr);
104 CHECK_LT(ptr, base + (1UL << stack_size_log));
105 uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id);
106 return base + pos * BytesInSizeClass(class_id);
Kostya Serebryany2679f192012-12-10 14:19:15 +0000107}
108
Kostya Serebryany89de4572013-09-12 13:25:29 +0000109void FakeStack::HandleNoReturn() {
110 needs_gc_ = true;
111}
112
113// When throw, longjmp or some such happens we don't call OnFree() and
114// as the result may leak one or more fake frames, but the good news is that
115// we are notified about all such events by HandleNoReturn().
116// If we recently had such no-return event we need to collect garbage frames.
117// We do it based on their 'real_stack' values -- everything that is lower
118// than the current real_stack is garbage.
119NOINLINE void FakeStack::GC(uptr real_stack) {
120 uptr collected = 0;
121 for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
Kostya Serebryany89de4572013-09-12 13:25:29 +0000122 u8 *flags = GetFlags(stack_size_log(), class_id);
123 for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
124 i++) {
125 if (flags[i] == 0) continue; // not allocated.
126 FakeFrame *ff = reinterpret_cast<FakeFrame *>(
127 GetFrame(stack_size_log(), class_id, i));
128 if (ff->real_stack < real_stack) {
129 flags[i] = 0;
130 collected++;
131 }
132 }
133 }
134 needs_gc_ = false;
135}
136
Sergey Matveevc5193352013-10-14 14:04:50 +0000137void FakeStack::ForEachFakeFrame(RangeIteratorCallback callback, void *arg) {
138 for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
139 u8 *flags = GetFlags(stack_size_log(), class_id);
140 for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
141 i++) {
142 if (flags[i] == 0) continue; // not allocated.
143 FakeFrame *ff = reinterpret_cast<FakeFrame *>(
144 GetFrame(stack_size_log(), class_id, i));
145 uptr begin = reinterpret_cast<uptr>(ff);
146 callback(begin, begin + FakeStack::BytesInSizeClass(class_id), arg);
147 }
148 }
149}
150
Kostya Serebryanyff640122013-09-13 08:58:22 +0000151#if SANITIZER_LINUX && !SANITIZER_ANDROID
Kostya Serebryany9433af32013-09-13 06:32:26 +0000152static THREADLOCAL FakeStack *fake_stack_tls;
153
154FakeStack *GetTLSFakeStack() {
155 return fake_stack_tls;
156}
157void SetTLSFakeStack(FakeStack *fs) {
158 fake_stack_tls = fs;
159}
160#else
161FakeStack *GetTLSFakeStack() { return 0; }
162void SetTLSFakeStack(FakeStack *fs) { }
Kostya Serebryanyff640122013-09-13 08:58:22 +0000163#endif // SANITIZER_LINUX && !SANITIZER_ANDROID
Kostya Serebryany9433af32013-09-13 06:32:26 +0000164
Kostya Serebryany6147f022013-09-13 06:04:18 +0000165static FakeStack *GetFakeStack() {
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +0000166 AsanThread *t = GetCurrentThread();
Kostya Serebryany6147f022013-09-13 06:04:18 +0000167 if (!t) return 0;
168 return t->fake_stack();
169}
170
171static FakeStack *GetFakeStackFast() {
Kostya Serebryany9433af32013-09-13 06:32:26 +0000172 if (FakeStack *fs = GetTLSFakeStack())
173 return fs;
Kostya Serebryany230e52f2013-09-18 10:35:12 +0000174 if (!__asan_option_detect_stack_use_after_return)
175 return 0;
Kostya Serebryany6147f022013-09-13 06:04:18 +0000176 return GetFakeStack();
Kostya Serebryany6147f022013-09-13 06:04:18 +0000177}
178
179ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size, uptr real_stack) {
180 FakeStack *fs = GetFakeStackFast();
Kostya Serebryanyc98fc1f2013-09-12 08:34:50 +0000181 if (!fs) return real_stack;
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +0000182 FakeFrame *ff = fs->Allocate(fs->stack_size_log(), class_id, real_stack);
Kostya Serebryanye1c68c32013-09-27 11:37:23 +0000183 if (!ff)
184 return real_stack; // Out of fake stack, return the real one.
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +0000185 uptr ptr = reinterpret_cast<uptr>(ff);
Kostya Serebryany8f7ec322013-09-13 05:57:58 +0000186 SetShadow(ptr, size, class_id, 0);
Kostya Serebryany2679f192012-12-10 14:19:15 +0000187 return ptr;
188}
189
Kostya Serebryany34e3ed12013-09-10 13:16:26 +0000190ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size, uptr real_stack) {
Kostya Serebryanyac3ae5d2013-09-12 07:11:58 +0000191 if (ptr == real_stack)
192 return;
Kostya Serebryanyb3889872013-09-17 07:42:54 +0000193 FakeStack::Deallocate(ptr, class_id);
Kostya Serebryany8f7ec322013-09-13 05:57:58 +0000194 SetShadow(ptr, size, class_id, kMagic8);
Kostya Serebryany2679f192012-12-10 14:19:15 +0000195}
Kostya Serebryany34e3ed12013-09-10 13:16:26 +0000196
197} // namespace __asan
198
199// ---------------------- Interface ---------------- {{{1
200#define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id) \
201 extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \
202 __asan_stack_malloc_##class_id(uptr size, uptr real_stack) { \
203 return __asan::OnMalloc(class_id, size, real_stack); \
204 } \
205 extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id( \
206 uptr ptr, uptr size, uptr real_stack) { \
207 __asan::OnFree(ptr, class_id, size, real_stack); \
208 }
209
210DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0)
211DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1)
212DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2)
213DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3)
214DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4)
215DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5)
216DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6)
217DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7)
218DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8)
219DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9)
220DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10)