blob: ca3e2f9a483925a7136dff4bed0b44151ae6ddf4 [file] [log] [blame]
Dmitry Vyukov7a9fa7d2012-06-29 17:10:08 +00001//===-- sanitizer_mutex.h ---------------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Dmitry Vyukov513f0232012-06-29 17:32:18 +00009//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11//
12//===----------------------------------------------------------------------===//
Dmitry Vyukov7a9fa7d2012-06-29 17:10:08 +000013
14#ifndef SANITIZER_MUTEX_H
15#define SANITIZER_MUTEX_H
16
Dmitry Vyukov7a9fa7d2012-06-29 17:10:08 +000017#include "sanitizer_atomic.h"
Dmitry Vyukovb13099c2012-07-02 07:09:21 +000018#include "sanitizer_internal_defs.h"
19#include "sanitizer_libc.h"
Dmitry Vyukov7a9fa7d2012-06-29 17:10:08 +000020
21namespace __sanitizer {
22
Dmitry Vyukov513f0232012-06-29 17:32:18 +000023class SpinMutex {
24 public:
25 SpinMutex() {
26 atomic_store(&state_, 0, memory_order_relaxed);
27 }
28
29 void Lock() {
Dmitry Vyukovb13099c2012-07-02 07:09:21 +000030 if (atomic_exchange(&state_, 1, memory_order_acquire) == 0)
31 return;
32 LockSlow();
Dmitry Vyukov513f0232012-06-29 17:32:18 +000033 }
34
35 void Unlock() {
36 atomic_store(&state_, 0, memory_order_release);
37 }
38
39 private:
40 atomic_uint8_t state_;
41
Dmitry Vyukovb13099c2012-07-02 07:09:21 +000042 void NOINLINE LockSlow() {
43 for (int i = 0;; i++) {
44 if (i < 10)
45 proc_yield(10);
46 else
47 internal_sched_yield();
48 if (atomic_load(&state_, memory_order_relaxed) == 0
49 && atomic_exchange(&state_, 1, memory_order_acquire) == 0)
50 return;
51 }
52 }
53
Dmitry Vyukov513f0232012-06-29 17:32:18 +000054 SpinMutex(const SpinMutex&);
55 void operator=(const SpinMutex&);
56};
57
Dmitry Vyukov7a9fa7d2012-06-29 17:10:08 +000058template<typename MutexType>
59class GenericScopedLock {
60 public:
61 explicit GenericScopedLock(MutexType *mu)
62 : mu_(mu) {
63 mu_->Lock();
64 }
65
66 ~GenericScopedLock() {
67 mu_->Unlock();
68 }
69
70 private:
71 MutexType *mu_;
72
73 GenericScopedLock(const GenericScopedLock&);
74 void operator=(const GenericScopedLock&);
75};
76
77template<typename MutexType>
78class GenericScopedReadLock {
79 public:
80 explicit GenericScopedReadLock(MutexType *mu)
81 : mu_(mu) {
82 mu_->ReadLock();
83 }
84
85 ~GenericScopedReadLock() {
86 mu_->ReadUnlock();
87 }
88
89 private:
90 MutexType *mu_;
91
92 GenericScopedReadLock(const GenericScopedReadLock&);
93 void operator=(const GenericScopedReadLock&);
94};
95
Dmitry Vyukovb462dfc2012-07-02 06:54:24 +000096typedef GenericScopedLock<SpinMutex> SpinMutexLock;
97
Dmitry Vyukov7a9fa7d2012-06-29 17:10:08 +000098} // namespace __sanitizer
99
100#endif // SANITIZER_MUTEX_H