Dmitry Vyukov | bde4c9c | 2014-05-29 13:50:54 +0000 | [diff] [blame] | 1 | //===-- tsan_stack_trace.cc -----------------------------------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file is a part of ThreadSanitizer (TSan), a race detector. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
Dmitry Vyukov | bde4c9c | 2014-05-29 13:50:54 +0000 | [diff] [blame] | 13 | #include "tsan_stack_trace.h" |
| 14 | #include "tsan_rtl.h" |
| 15 | #include "tsan_mman.h" |
| 16 | |
| 17 | namespace __tsan { |
| 18 | |
| 19 | StackTrace::StackTrace() |
| 20 | : n_() |
| 21 | , s_() |
| 22 | , c_() { |
| 23 | } |
| 24 | |
| 25 | StackTrace::StackTrace(uptr *buf, uptr cnt) |
| 26 | : n_() |
| 27 | , s_(buf) |
| 28 | , c_(cnt) { |
| 29 | CHECK_NE(buf, 0); |
| 30 | CHECK_NE(cnt, 0); |
| 31 | } |
| 32 | |
| 33 | StackTrace::~StackTrace() { |
| 34 | Reset(); |
| 35 | } |
| 36 | |
| 37 | void StackTrace::Reset() { |
| 38 | if (s_ && !c_) { |
| 39 | CHECK_NE(n_, 0); |
| 40 | internal_free(s_); |
| 41 | s_ = 0; |
| 42 | } |
| 43 | n_ = 0; |
| 44 | } |
| 45 | |
| 46 | void StackTrace::Init(const uptr *pcs, uptr cnt) { |
| 47 | Reset(); |
| 48 | if (cnt == 0) |
| 49 | return; |
| 50 | if (c_) { |
| 51 | CHECK_NE(s_, 0); |
| 52 | CHECK_LE(cnt, c_); |
| 53 | } else { |
| 54 | s_ = (uptr*)internal_alloc(MBlockStackTrace, cnt * sizeof(s_[0])); |
| 55 | } |
| 56 | n_ = cnt; |
| 57 | internal_memcpy(s_, pcs, cnt * sizeof(s_[0])); |
| 58 | } |
| 59 | |
| 60 | void StackTrace::ObtainCurrent(ThreadState *thr, uptr toppc) { |
| 61 | Reset(); |
| 62 | n_ = thr->shadow_stack_pos - thr->shadow_stack; |
| 63 | if (n_ + !!toppc == 0) |
| 64 | return; |
| 65 | uptr start = 0; |
| 66 | if (c_) { |
| 67 | CHECK_NE(s_, 0); |
| 68 | if (n_ + !!toppc > c_) { |
| 69 | start = n_ - c_ + !!toppc; |
| 70 | n_ = c_ - !!toppc; |
| 71 | } |
| 72 | } else { |
| 73 | // Cap potentially huge stacks. |
| 74 | if (n_ + !!toppc > kTraceStackSize) { |
| 75 | start = n_ - kTraceStackSize + !!toppc; |
| 76 | n_ = kTraceStackSize - !!toppc; |
| 77 | } |
| 78 | s_ = (uptr*)internal_alloc(MBlockStackTrace, |
| 79 | (n_ + !!toppc) * sizeof(s_[0])); |
| 80 | } |
| 81 | for (uptr i = 0; i < n_; i++) |
| 82 | s_[i] = thr->shadow_stack[start + i]; |
| 83 | if (toppc) { |
| 84 | s_[n_] = toppc; |
| 85 | n_++; |
| 86 | } |
| 87 | } |
| 88 | |
Dmitry Vyukov | bde4c9c | 2014-05-29 13:50:54 +0000 | [diff] [blame] | 89 | bool StackTrace::IsEmpty() const { |
| 90 | return n_ == 0; |
| 91 | } |
| 92 | |
| 93 | uptr StackTrace::Size() const { |
| 94 | return n_; |
| 95 | } |
| 96 | |
| 97 | uptr StackTrace::Get(uptr i) const { |
| 98 | CHECK_LT(i, n_); |
| 99 | return s_[i]; |
| 100 | } |
| 101 | |
| 102 | const uptr *StackTrace::Begin() const { |
| 103 | return s_; |
| 104 | } |
| 105 | |
| 106 | } // namespace __tsan |