tracing_mutex/
lockapi.rs
1use lock_api::GuardNoSend;
12use lock_api::RawMutex;
13use lock_api::RawMutexFair;
14use lock_api::RawMutexTimed;
15use lock_api::RawRwLock;
16use lock_api::RawRwLockDowngrade;
17use lock_api::RawRwLockFair;
18use lock_api::RawRwLockRecursive;
19use lock_api::RawRwLockRecursiveTimed;
20use lock_api::RawRwLockTimed;
21use lock_api::RawRwLockUpgrade;
22use lock_api::RawRwLockUpgradeDowngrade;
23use lock_api::RawRwLockUpgradeFair;
24use lock_api::RawRwLockUpgradeTimed;
25
26use crate::LazyMutexId;
27
28#[derive(Debug, Default)]
33pub struct TracingWrapper<T> {
34 inner: T,
35 id: LazyMutexId,
37}
38
39impl<T> TracingWrapper<T> {
40 fn mark_held(&self) {
42 self.id.mark_held();
43 }
44
45 unsafe fn mark_released(&self) {
52 self.id.mark_released();
53 }
54
55 fn lock(&self, f: impl FnOnce()) {
57 self.mark_held();
58 f();
59 }
60
61 unsafe fn unlock(&self, f: impl FnOnce()) {
63 f();
64 self.mark_released();
65 }
66
67 fn conditionally_lock(&self, f: impl FnOnce() -> bool) -> bool {
76 self.mark_held();
78
79 if f() {
80 true
81 } else {
82 unsafe { self.mark_released() }
84 false
85 }
86 }
87}
88
89unsafe impl<T> RawMutex for TracingWrapper<T>
90where
91 T: RawMutex,
92{
93 const INIT: Self = Self {
94 inner: T::INIT,
95 id: LazyMutexId::new(),
96 };
97
98 type GuardMarker = GuardNoSend;
102
103 fn lock(&self) {
104 self.lock(|| self.inner.lock());
105 }
106
107 fn try_lock(&self) -> bool {
108 self.conditionally_lock(|| self.inner.try_lock())
109 }
110
111 unsafe fn unlock(&self) {
112 self.unlock(|| self.inner.unlock());
113 }
114
115 fn is_locked(&self) -> bool {
116 self.inner.is_locked()
118 }
119}
120
121unsafe impl<T> RawMutexFair for TracingWrapper<T>
122where
123 T: RawMutexFair,
124{
125 unsafe fn unlock_fair(&self) {
126 self.unlock(|| self.inner.unlock_fair())
127 }
128
129 unsafe fn bump(&self) {
130 self.inner.bump();
133 }
134}
135
136unsafe impl<T> RawMutexTimed for TracingWrapper<T>
137where
138 T: RawMutexTimed,
139{
140 type Duration = T::Duration;
141
142 type Instant = T::Instant;
143
144 fn try_lock_for(&self, timeout: Self::Duration) -> bool {
145 self.conditionally_lock(|| self.inner.try_lock_for(timeout))
146 }
147
148 fn try_lock_until(&self, timeout: Self::Instant) -> bool {
149 self.conditionally_lock(|| self.inner.try_lock_until(timeout))
150 }
151}
152
153unsafe impl<T> RawRwLock for TracingWrapper<T>
154where
155 T: RawRwLock,
156{
157 const INIT: Self = Self {
158 inner: T::INIT,
159 id: LazyMutexId::new(),
160 };
161
162 type GuardMarker = GuardNoSend;
166
167 fn lock_shared(&self) {
168 self.lock(|| self.inner.lock_shared());
169 }
170
171 fn try_lock_shared(&self) -> bool {
172 self.conditionally_lock(|| self.inner.try_lock_shared())
173 }
174
175 unsafe fn unlock_shared(&self) {
176 self.unlock(|| self.inner.unlock_shared());
177 }
178
179 fn lock_exclusive(&self) {
180 self.lock(|| self.inner.lock_exclusive());
181 }
182
183 fn try_lock_exclusive(&self) -> bool {
184 self.conditionally_lock(|| self.inner.try_lock_exclusive())
185 }
186
187 unsafe fn unlock_exclusive(&self) {
188 self.unlock(|| self.inner.unlock_exclusive());
189 }
190
191 fn is_locked(&self) -> bool {
192 self.inner.is_locked()
193 }
194}
195
196unsafe impl<T> RawRwLockDowngrade for TracingWrapper<T>
197where
198 T: RawRwLockDowngrade,
199{
200 unsafe fn downgrade(&self) {
201 self.inner.downgrade()
203 }
204}
205
206unsafe impl<T> RawRwLockUpgrade for TracingWrapper<T>
207where
208 T: RawRwLockUpgrade,
209{
210 fn lock_upgradable(&self) {
211 self.lock(|| self.inner.lock_upgradable());
212 }
213
214 fn try_lock_upgradable(&self) -> bool {
215 self.conditionally_lock(|| self.inner.try_lock_upgradable())
216 }
217
218 unsafe fn unlock_upgradable(&self) {
219 self.unlock(|| self.inner.unlock_upgradable());
220 }
221
222 unsafe fn upgrade(&self) {
223 self.inner.upgrade();
224 }
225
226 unsafe fn try_upgrade(&self) -> bool {
227 self.inner.try_upgrade()
228 }
229}
230
231unsafe impl<T> RawRwLockFair for TracingWrapper<T>
232where
233 T: RawRwLockFair,
234{
235 unsafe fn unlock_shared_fair(&self) {
236 self.unlock(|| self.inner.unlock_shared_fair());
237 }
238
239 unsafe fn unlock_exclusive_fair(&self) {
240 self.unlock(|| self.inner.unlock_exclusive_fair());
241 }
242
243 unsafe fn bump_shared(&self) {
244 self.inner.bump_shared();
245 }
246
247 unsafe fn bump_exclusive(&self) {
248 self.inner.bump_exclusive();
249 }
250}
251
252unsafe impl<T> RawRwLockRecursive for TracingWrapper<T>
253where
254 T: RawRwLockRecursive,
255{
256 fn lock_shared_recursive(&self) {
257 self.lock(|| self.inner.lock_shared_recursive());
258 }
259
260 fn try_lock_shared_recursive(&self) -> bool {
261 self.conditionally_lock(|| self.inner.try_lock_shared_recursive())
262 }
263}
264
265unsafe impl<T> RawRwLockRecursiveTimed for TracingWrapper<T>
266where
267 T: RawRwLockRecursiveTimed,
268{
269 fn try_lock_shared_recursive_for(&self, timeout: Self::Duration) -> bool {
270 self.conditionally_lock(|| self.inner.try_lock_shared_recursive_for(timeout))
271 }
272
273 fn try_lock_shared_recursive_until(&self, timeout: Self::Instant) -> bool {
274 self.conditionally_lock(|| self.inner.try_lock_shared_recursive_until(timeout))
275 }
276}
277
278unsafe impl<T> RawRwLockTimed for TracingWrapper<T>
279where
280 T: RawRwLockTimed,
281{
282 type Duration = T::Duration;
283
284 type Instant = T::Instant;
285
286 fn try_lock_shared_for(&self, timeout: Self::Duration) -> bool {
287 self.conditionally_lock(|| self.inner.try_lock_shared_for(timeout))
288 }
289
290 fn try_lock_shared_until(&self, timeout: Self::Instant) -> bool {
291 self.conditionally_lock(|| self.inner.try_lock_shared_until(timeout))
292 }
293
294 fn try_lock_exclusive_for(&self, timeout: Self::Duration) -> bool {
295 self.conditionally_lock(|| self.inner.try_lock_exclusive_for(timeout))
296 }
297
298 fn try_lock_exclusive_until(&self, timeout: Self::Instant) -> bool {
299 self.conditionally_lock(|| self.inner.try_lock_exclusive_until(timeout))
300 }
301}
302
303unsafe impl<T> RawRwLockUpgradeDowngrade for TracingWrapper<T>
304where
305 T: RawRwLockUpgradeDowngrade,
306{
307 unsafe fn downgrade_upgradable(&self) {
308 self.inner.downgrade_upgradable()
309 }
310
311 unsafe fn downgrade_to_upgradable(&self) {
312 self.inner.downgrade_to_upgradable()
313 }
314}
315
316unsafe impl<T> RawRwLockUpgradeFair for TracingWrapper<T>
317where
318 T: RawRwLockUpgradeFair,
319{
320 unsafe fn unlock_upgradable_fair(&self) {
321 self.unlock(|| self.inner.unlock_upgradable_fair())
322 }
323
324 unsafe fn bump_upgradable(&self) {
325 self.inner.bump_upgradable()
326 }
327}
328
329unsafe impl<T> RawRwLockUpgradeTimed for TracingWrapper<T>
330where
331 T: RawRwLockUpgradeTimed,
332{
333 fn try_lock_upgradable_for(&self, timeout: Self::Duration) -> bool {
334 self.conditionally_lock(|| self.inner.try_lock_upgradable_for(timeout))
335 }
336
337 fn try_lock_upgradable_until(&self, timeout: Self::Instant) -> bool {
338 self.conditionally_lock(|| self.inner.try_lock_upgradable_until(timeout))
339 }
340
341 unsafe fn try_upgrade_for(&self, timeout: Self::Duration) -> bool {
342 self.inner.try_upgrade_for(timeout)
343 }
344
345 unsafe fn try_upgrade_until(&self, timeout: Self::Instant) -> bool {
346 self.inner.try_upgrade_until(timeout)
347 }
348}