sanitizer_mutex.h revision 1.1.1.2 1 //===-- sanitizer_mutex.h ---------------------------------------*- C++ -*-===//
2 //
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
5 //
6 //===----------------------------------------------------------------------===//
7 //
8 // This file is a part of ThreadSanitizer/AddressSanitizer runtime.
9 //
10 //===----------------------------------------------------------------------===//
11
12 #ifndef SANITIZER_MUTEX_H
13 #define SANITIZER_MUTEX_H
14
15 #include "sanitizer_atomic.h"
16 #include "sanitizer_internal_defs.h"
17 #include "sanitizer_libc.h"
18
19 namespace __sanitizer {
20
21 class StaticSpinMutex {
22 public:
23 void Init() {
24 atomic_store(&state_, 0, memory_order_relaxed);
25 }
26
27 void Lock() {
28 if (TryLock())
29 return;
30 LockSlow();
31 }
32
33 bool TryLock() {
34 return atomic_exchange(&state_, 1, memory_order_acquire) == 0;
35 }
36
37 void Unlock() {
38 atomic_store(&state_, 0, memory_order_release);
39 }
40
41 void CheckLocked() {
42 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1);
43 }
44
45 private:
46 atomic_uint8_t state_;
47
48 void NOINLINE LockSlow() {
49 for (int i = 0;; i++) {
50 if (i < 10)
51 proc_yield(10);
52 else
53 internal_sched_yield();
54 if (atomic_load(&state_, memory_order_relaxed) == 0
55 && atomic_exchange(&state_, 1, memory_order_acquire) == 0)
56 return;
57 }
58 }
59 };
60
61 class SpinMutex : public StaticSpinMutex {
62 public:
63 SpinMutex() {
64 Init();
65 }
66
67 private:
68 SpinMutex(const SpinMutex&);
69 void operator=(const SpinMutex&);
70 };
71
72 class BlockingMutex {
73 public:
74 explicit BlockingMutex(LinkerInitialized);
75 BlockingMutex();
76 void Lock();
77 void Unlock();
78 void CheckLocked();
79 private:
80 uptr opaque_storage_[10];
81 uptr owner_; // for debugging
82 };
83
84 // Reader-writer spin mutex.
85 class RWMutex {
86 public:
87 RWMutex() {
88 atomic_store(&state_, kUnlocked, memory_order_relaxed);
89 }
90
91 ~RWMutex() {
92 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked);
93 }
94
95 void Lock() {
96 u32 cmp = kUnlocked;
97 if (atomic_compare_exchange_strong(&state_, &cmp, kWriteLock,
98 memory_order_acquire))
99 return;
100 LockSlow();
101 }
102
103 void Unlock() {
104 u32 prev = atomic_fetch_sub(&state_, kWriteLock, memory_order_release);
105 DCHECK_NE(prev & kWriteLock, 0);
106 (void)prev;
107 }
108
109 void ReadLock() {
110 u32 prev = atomic_fetch_add(&state_, kReadLock, memory_order_acquire);
111 if ((prev & kWriteLock) == 0)
112 return;
113 ReadLockSlow();
114 }
115
116 void ReadUnlock() {
117 u32 prev = atomic_fetch_sub(&state_, kReadLock, memory_order_release);
118 DCHECK_EQ(prev & kWriteLock, 0);
119 DCHECK_GT(prev & ~kWriteLock, 0);
120 (void)prev;
121 }
122
123 void CheckLocked() {
124 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked);
125 }
126
127 private:
128 atomic_uint32_t state_;
129
130 enum {
131 kUnlocked = 0,
132 kWriteLock = 1,
133 kReadLock = 2
134 };
135
136 void NOINLINE LockSlow() {
137 for (int i = 0;; i++) {
138 if (i < 10)
139 proc_yield(10);
140 else
141 internal_sched_yield();
142 u32 cmp = atomic_load(&state_, memory_order_relaxed);
143 if (cmp == kUnlocked &&
144 atomic_compare_exchange_weak(&state_, &cmp, kWriteLock,
145 memory_order_acquire))
146 return;
147 }
148 }
149
150 void NOINLINE ReadLockSlow() {
151 for (int i = 0;; i++) {
152 if (i < 10)
153 proc_yield(10);
154 else
155 internal_sched_yield();
156 u32 prev = atomic_load(&state_, memory_order_acquire);
157 if ((prev & kWriteLock) == 0)
158 return;
159 }
160 }
161
162 RWMutex(const RWMutex&);
163 void operator = (const RWMutex&);
164 };
165
166 template<typename MutexType>
167 class GenericScopedLock {
168 public:
169 explicit GenericScopedLock(MutexType *mu)
170 : mu_(mu) {
171 mu_->Lock();
172 }
173
174 ~GenericScopedLock() {
175 mu_->Unlock();
176 }
177
178 private:
179 MutexType *mu_;
180
181 GenericScopedLock(const GenericScopedLock&);
182 void operator=(const GenericScopedLock&);
183 };
184
185 template<typename MutexType>
186 class GenericScopedReadLock {
187 public:
188 explicit GenericScopedReadLock(MutexType *mu)
189 : mu_(mu) {
190 mu_->ReadLock();
191 }
192
193 ~GenericScopedReadLock() {
194 mu_->ReadUnlock();
195 }
196
197 private:
198 MutexType *mu_;
199
200 GenericScopedReadLock(const GenericScopedReadLock&);
201 void operator=(const GenericScopedReadLock&);
202 };
203
204 typedef GenericScopedLock<StaticSpinMutex> SpinMutexLock;
205 typedef GenericScopedLock<BlockingMutex> BlockingMutexLock;
206 typedef GenericScopedLock<RWMutex> RWMutexLock;
207 typedef GenericScopedReadLock<RWMutex> RWMutexReadLock;
208
209 } // namespace __sanitizer
210
211 #endif // SANITIZER_MUTEX_H
212