Dmitry Vyukov | 7a9fa7d | 2012-06-29 17:10:08 +0000 | [diff] [blame] | 1 | //===-- sanitizer_mutex.h ---------------------------------------*- C++ -*-===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
Dmitry Vyukov | 513f023 | 2012-06-29 17:32:18 +0000 | [diff] [blame] | 9 | // |
| 10 | // This file is a part of ThreadSanitizer/AddressSanitizer runtime. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
Dmitry Vyukov | 7a9fa7d | 2012-06-29 17:10:08 +0000 | [diff] [blame] | 13 | |
| 14 | #ifndef SANITIZER_MUTEX_H |
| 15 | #define SANITIZER_MUTEX_H |
| 16 | |
| 17 | #include "sanitizer_internal_defs.h" |
| 18 | #include "sanitizer_atomic.h" |
| 19 | |
| 20 | namespace __sanitizer { |
| 21 | |
Dmitry Vyukov | 513f023 | 2012-06-29 17:32:18 +0000 | [diff] [blame] | 22 | class SpinMutex { |
| 23 | public: |
| 24 | SpinMutex() { |
| 25 | atomic_store(&state_, 0, memory_order_relaxed); |
| 26 | } |
| 27 | |
| 28 | void Lock() { |
| 29 | while (atomic_exchange(&state_, 1, memory_order_acquire)) |
| 30 | proc_yield(10); |
| 31 | } |
| 32 | |
| 33 | void Unlock() { |
| 34 | atomic_store(&state_, 0, memory_order_release); |
| 35 | } |
| 36 | |
| 37 | private: |
| 38 | atomic_uint8_t state_; |
| 39 | |
| 40 | SpinMutex(const SpinMutex&); |
| 41 | void operator=(const SpinMutex&); |
| 42 | }; |
| 43 | |
Dmitry Vyukov | 7a9fa7d | 2012-06-29 17:10:08 +0000 | [diff] [blame] | 44 | template<typename MutexType> |
| 45 | class GenericScopedLock { |
| 46 | public: |
| 47 | explicit GenericScopedLock(MutexType *mu) |
| 48 | : mu_(mu) { |
| 49 | mu_->Lock(); |
| 50 | } |
| 51 | |
| 52 | ~GenericScopedLock() { |
| 53 | mu_->Unlock(); |
| 54 | } |
| 55 | |
| 56 | private: |
| 57 | MutexType *mu_; |
| 58 | |
| 59 | GenericScopedLock(const GenericScopedLock&); |
| 60 | void operator=(const GenericScopedLock&); |
| 61 | }; |
| 62 | |
| 63 | template<typename MutexType> |
| 64 | class GenericScopedReadLock { |
| 65 | public: |
| 66 | explicit GenericScopedReadLock(MutexType *mu) |
| 67 | : mu_(mu) { |
| 68 | mu_->ReadLock(); |
| 69 | } |
| 70 | |
| 71 | ~GenericScopedReadLock() { |
| 72 | mu_->ReadUnlock(); |
| 73 | } |
| 74 | |
| 75 | private: |
| 76 | MutexType *mu_; |
| 77 | |
| 78 | GenericScopedReadLock(const GenericScopedReadLock&); |
| 79 | void operator=(const GenericScopedReadLock&); |
| 80 | }; |
| 81 | |
Dmitry Vyukov | b462dfc | 2012-07-02 06:54:24 +0000 | [diff] [blame^] | 82 | typedef GenericScopedLock<SpinMutex> SpinMutexLock; |
| 83 | |
Dmitry Vyukov | 7a9fa7d | 2012-06-29 17:10:08 +0000 | [diff] [blame] | 84 | } // namespace __sanitizer |
| 85 | |
| 86 | #endif // SANITIZER_MUTEX_H |