blob: 2fe8ef4fe68ba2ba88e45104f7ced24a45f8e934 [file] [log] [blame]
Kostya Serebryany2679f192012-12-10 14:19:15 +00001//===-- asan_fake_stack.cc ------------------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of AddressSanitizer, an address sanity checker.
11//
12// FakeStack is used to detect use-after-return bugs.
13//===----------------------------------------------------------------------===//
14#include "asan_allocator.h"
Alexey Samsonov7e843492013-03-28 15:42:43 +000015#include "asan_poisoning.h"
Kostya Serebryany2679f192012-12-10 14:19:15 +000016#include "asan_thread.h"
Kostya Serebryany2679f192012-12-10 14:19:15 +000017
18namespace __asan {
19
20FakeStack::FakeStack() {
Kostya Serebryanya27bdf72013-04-05 14:40:25 +000021 CHECK(REAL(memset));
Kostya Serebryany2679f192012-12-10 14:19:15 +000022 REAL(memset)(this, 0, sizeof(*this));
23}
24
25bool FakeStack::AddrIsInSizeClass(uptr addr, uptr size_class) {
26 uptr mem = allocated_size_classes_[size_class];
27 uptr size = ClassMmapSize(size_class);
28 bool res = mem && addr >= mem && addr < mem + size;
29 return res;
30}
31
32uptr FakeStack::AddrIsInFakeStack(uptr addr) {
Kostya Serebryany71c9e9e2013-04-11 15:35:40 +000033 for (uptr size_class = 0; size_class < kNumberOfSizeClasses; size_class++) {
34 if (!AddrIsInSizeClass(addr, size_class)) continue;
35 uptr size_class_first_ptr = allocated_size_classes_[size_class];
36 uptr size = ClassSize(size_class);
37 CHECK_LE(size_class_first_ptr, addr);
38 CHECK_GT(size_class_first_ptr + ClassMmapSize(size_class), addr);
39 return size_class_first_ptr + ((addr - size_class_first_ptr) / size) * size;
Kostya Serebryany2679f192012-12-10 14:19:15 +000040 }
41 return 0;
42}
43
44// We may want to compute this during compilation.
Kostya Serebryany71c9e9e2013-04-11 15:35:40 +000045ALWAYS_INLINE uptr FakeStack::ComputeSizeClass(uptr alloc_size) {
Kostya Serebryany2679f192012-12-10 14:19:15 +000046 uptr rounded_size = RoundUpToPowerOfTwo(alloc_size);
47 uptr log = Log2(rounded_size);
Kostya Serebryany71c9e9e2013-04-11 15:35:40 +000048 CHECK_LE(alloc_size, (1UL << log));
49 CHECK_GT(alloc_size, (1UL << (log-1)));
Kostya Serebryany2679f192012-12-10 14:19:15 +000050 uptr res = log < kMinStackFrameSizeLog ? 0 : log - kMinStackFrameSizeLog;
Kostya Serebryany71c9e9e2013-04-11 15:35:40 +000051 CHECK_LT(res, kNumberOfSizeClasses);
52 CHECK_GE(ClassSize(res), rounded_size);
Kostya Serebryany2679f192012-12-10 14:19:15 +000053 return res;
54}
55
56void FakeFrameFifo::FifoPush(FakeFrame *node) {
57 CHECK(node);
58 node->next = 0;
59 if (first_ == 0 && last_ == 0) {
60 first_ = last_ = node;
61 } else {
62 CHECK(first_);
63 CHECK(last_);
64 last_->next = node;
65 last_ = node;
66 }
67}
68
69FakeFrame *FakeFrameFifo::FifoPop() {
70 CHECK(first_ && last_ && "Exhausted fake stack");
71 FakeFrame *res = 0;
72 if (first_ == last_) {
73 res = first_;
74 first_ = last_ = 0;
75 } else {
76 res = first_;
77 first_ = first_->next;
78 }
79 return res;
80}
81
82void FakeStack::Init(uptr stack_size) {
83 stack_size_ = stack_size;
84 alive_ = true;
85}
86
87void FakeStack::Cleanup() {
88 alive_ = false;
89 for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
90 uptr mem = allocated_size_classes_[i];
91 if (mem) {
92 PoisonShadow(mem, ClassMmapSize(i), 0);
93 allocated_size_classes_[i] = 0;
94 UnmapOrDie((void*)mem, ClassMmapSize(i));
95 }
96 }
97}
98
99uptr FakeStack::ClassMmapSize(uptr size_class) {
100 return RoundUpToPowerOfTwo(stack_size_);
101}
102
103void FakeStack::AllocateOneSizeClass(uptr size_class) {
104 CHECK(ClassMmapSize(size_class) >= GetPageSizeCached());
105 uptr new_mem = (uptr)MmapOrDie(
106 ClassMmapSize(size_class), __FUNCTION__);
107 // Printf("T%d new_mem[%zu]: %p-%p mmap %zu\n",
Alexey Samsonov89c13842013-03-20 09:23:28 +0000108 // GetCurrentThread()->tid(),
Kostya Serebryany2679f192012-12-10 14:19:15 +0000109 // size_class, new_mem, new_mem + ClassMmapSize(size_class),
110 // ClassMmapSize(size_class));
111 uptr i;
112 for (i = 0; i < ClassMmapSize(size_class);
113 i += ClassSize(size_class)) {
114 size_classes_[size_class].FifoPush((FakeFrame*)(new_mem + i));
115 }
116 CHECK(i == ClassMmapSize(size_class));
117 allocated_size_classes_[size_class] = new_mem;
118}
119
Kostya Serebryany71c9e9e2013-04-11 15:35:40 +0000120ALWAYS_INLINE uptr FakeStack::AllocateStack(uptr size, uptr real_stack) {
Kostya Serebryany2679f192012-12-10 14:19:15 +0000121 if (!alive_) return real_stack;
122 CHECK(size <= kMaxStackMallocSize && size > 1);
123 uptr size_class = ComputeSizeClass(size);
124 if (!allocated_size_classes_[size_class]) {
125 AllocateOneSizeClass(size_class);
126 }
127 FakeFrame *fake_frame = size_classes_[size_class].FifoPop();
128 CHECK(fake_frame);
129 fake_frame->size_minus_one = size - 1;
130 fake_frame->real_stack = real_stack;
131 while (FakeFrame *top = call_stack_.top()) {
132 if (top->real_stack > real_stack) break;
133 call_stack_.LifoPop();
134 DeallocateFrame(top);
135 }
136 call_stack_.LifoPush(fake_frame);
137 uptr ptr = (uptr)fake_frame;
138 PoisonShadow(ptr, size, 0);
139 return ptr;
140}
141
Kostya Serebryany71c9e9e2013-04-11 15:35:40 +0000142ALWAYS_INLINE void FakeStack::DeallocateFrame(FakeFrame *fake_frame) {
Kostya Serebryany2679f192012-12-10 14:19:15 +0000143 CHECK(alive_);
Timur Iskhodzhanov5e97ba32013-05-29 14:11:44 +0000144 uptr size = static_cast<uptr>(fake_frame->size_minus_one + 1);
Kostya Serebryany2679f192012-12-10 14:19:15 +0000145 uptr size_class = ComputeSizeClass(size);
146 CHECK(allocated_size_classes_[size_class]);
147 uptr ptr = (uptr)fake_frame;
148 CHECK(AddrIsInSizeClass(ptr, size_class));
149 CHECK(AddrIsInSizeClass(ptr + size - 1, size_class));
150 size_classes_[size_class].FifoPush(fake_frame);
151}
152
Kostya Serebryany71c9e9e2013-04-11 15:35:40 +0000153ALWAYS_INLINE void FakeStack::OnFree(uptr ptr, uptr size, uptr real_stack) {
Kostya Serebryany2679f192012-12-10 14:19:15 +0000154 FakeFrame *fake_frame = (FakeFrame*)ptr;
Kostya Serebryanya27bdf72013-04-05 14:40:25 +0000155 CHECK_EQ(fake_frame->magic, kRetiredStackFrameMagic);
156 CHECK_NE(fake_frame->descr, 0);
157 CHECK_EQ(fake_frame->size_minus_one, size - 1);
Kostya Serebryany2679f192012-12-10 14:19:15 +0000158 PoisonShadow(ptr, size, kAsanStackAfterReturnMagic);
159}
160
161} // namespace __asan
162
163// ---------------------- Interface ---------------- {{{1
164using namespace __asan; // NOLINT
165
166uptr __asan_stack_malloc(uptr size, uptr real_stack) {
167 if (!flags()->use_fake_stack) return real_stack;
Alexey Samsonov89c13842013-03-20 09:23:28 +0000168 AsanThread *t = GetCurrentThread();
Kostya Serebryany2679f192012-12-10 14:19:15 +0000169 if (!t) {
170 // TSD is gone, use the real stack.
171 return real_stack;
172 }
173 uptr ptr = t->fake_stack().AllocateStack(size, real_stack);
174 // Printf("__asan_stack_malloc %p %zu %p\n", ptr, size, real_stack);
175 return ptr;
176}
177
178void __asan_stack_free(uptr ptr, uptr size, uptr real_stack) {
179 if (!flags()->use_fake_stack) return;
180 if (ptr != real_stack) {
181 FakeStack::OnFree(ptr, size, real_stack);
182 }
183}