1#![warn(
12 clippy::pedantic,
13 rust_2018_idioms,
14 missing_docs,
15 unused_qualifications
16)]
17#![cfg_attr(not(test), no_std)]
18
19use ::core::{
20 cell::UnsafeCell,
21 fmt::{self, Debug, Display, Formatter},
22 marker::PhantomData,
23 mem::ManuallyDrop,
24 ops::{Deref, DerefMut},
25 ptr::NonNull,
26 sync::atomic::{self, AtomicUsize},
27};
28
29#[derive(Default)]
31pub struct TryRwLock<T> {
32 readers: AtomicUsize,
35 data: UnsafeCell<T>,
37}
38
39impl<T> TryRwLock<T> {
40 #[must_use]
42 pub const fn new(data: T) -> Self {
43 Self {
44 readers: AtomicUsize::new(0),
45 data: UnsafeCell::new(data),
46 }
47 }
48
49 pub fn try_read(&self) -> Option<ReadGuard<'_, T>> {
54 self.readers
55 .fetch_update(
56 atomic::Ordering::Acquire,
57 atomic::Ordering::Relaxed,
58 |readers| readers.checked_add(1),
59 )
60 .ok()
61 .map(|_| unsafe { ReadGuard::new(self) })
62 }
63
64 pub fn try_write(&self) -> Option<WriteGuard<'_, T>> {
68 self.readers
69 .compare_exchange(
70 0,
71 usize::MAX,
72 atomic::Ordering::Acquire,
73 atomic::Ordering::Relaxed,
74 )
75 .ok()
76 .map(|_| unsafe { WriteGuard::new(self) })
77 }
78
79 #[must_use]
81 pub fn into_inner(self) -> T {
82 self.data.into_inner()
83 }
84
85 #[must_use]
89 pub fn get_mut(&mut self) -> &mut T {
90 self.data.get_mut()
91 }
92
93 #[must_use]
95 pub fn is_locked(&self) -> bool {
96 self.readers.load(atomic::Ordering::Acquire) != 0
97 }
98
99 #[must_use]
101 pub fn is_write_locked(&self) -> bool {
102 self.readers.load(atomic::Ordering::Acquire) == usize::MAX
103 }
104}
105
106impl<T: Debug> Debug for TryRwLock<T> {
107 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
108 #[allow(clippy::option_if_let_else)]
109 if let Some(guard) = self.try_read() {
110 f.debug_struct("TryRwLock").field("data", &*guard).finish()
111 } else {
112 struct LockedPlaceholder;
113 impl Debug for LockedPlaceholder {
114 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
115 f.write_str("<locked>")
116 }
117 }
118
119 f.debug_struct("TryRwLock")
120 .field("data", &LockedPlaceholder)
121 .finish()
122 }
123 }
124}
125
126impl<T> From<T> for TryRwLock<T> {
127 fn from(data: T) -> Self {
128 Self::new(data)
129 }
130}
131
132unsafe impl<T: Send> Send for TryRwLock<T> {}
135
136unsafe impl<T: Send + Sync> Sync for TryRwLock<T> {}
140
141#[must_use = "if unused the TryRwLock will immediately unlock"]
143pub struct ReadGuard<'lock, T, U = T> {
144 data: NonNull<U>,
145 lock: &'lock TryRwLock<T>,
146}
147
148unsafe impl<T: Sync, U: Sync> Send for ReadGuard<'_, T, U> {}
151unsafe impl<T: Sync, U: Sync> Sync for ReadGuard<'_, T, U> {}
152
153impl<'lock, T> ReadGuard<'lock, T> {
154 unsafe fn new(lock: &'lock TryRwLock<T>) -> Self {
155 Self {
156 data: NonNull::new(lock.data.get()).expect("`UnsafeCell::get` never returns null"),
157 lock,
158 }
159 }
160}
161
162impl<'lock, T, U> ReadGuard<'lock, T, U> {
163 #[must_use]
165 pub fn rwlock(guard: &Self) -> &'lock TryRwLock<T> {
166 guard.lock
167 }
168
169 pub fn try_upgrade(guard: Self) -> Result<WriteGuard<'lock, T>, Self> {
178 match guard.lock.readers.compare_exchange(
179 1,
180 usize::MAX,
181 atomic::Ordering::Acquire,
182 atomic::Ordering::Relaxed,
183 ) {
184 Ok(_) => {
185 let guard = ManuallyDrop::new(guard);
186 Ok(unsafe { WriteGuard::new(guard.lock) })
187 }
188 Err(_) => Err(guard),
189 }
190 }
191
192 pub fn map<V>(guard: Self, f: impl FnOnce(&U) -> &V) -> ReadGuard<'lock, T, V> {
194 let guard = ManuallyDrop::new(guard);
195 ReadGuard {
196 data: NonNull::from(f(&**guard)),
197 lock: guard.lock,
198 }
199 }
200
201 pub fn unmap(guard: Self) -> ReadGuard<'lock, T> {
203 let guard = ManuallyDrop::new(guard);
204 unsafe { ReadGuard::new(guard.lock) }
205 }
206}
207
208impl<T, U> Deref for ReadGuard<'_, T, U> {
209 type Target = U;
210
211 fn deref(&self) -> &Self::Target {
212 unsafe { self.data.as_ref() }
213 }
214}
215
216impl<T, U> Drop for ReadGuard<'_, T, U> {
217 fn drop(&mut self) {
218 self.lock.readers.fetch_sub(1, atomic::Ordering::Release);
219 }
220}
221
222impl<T, U: Debug> Debug for ReadGuard<'_, T, U> {
223 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
224 f.debug_struct("TryRwLockReadGuard")
225 .field("data", &**self)
226 .finish()
227 }
228}
229
230impl<T, U: Display> Display for ReadGuard<'_, T, U> {
231 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
232 Display::fmt(&**self, f)
233 }
234}
235
236#[must_use = "if unused the TryRwLock will immediately unlock"]
238pub struct WriteGuard<'lock, T, U = T> {
239 data: NonNull<U>,
240 lock: &'lock TryRwLock<T>,
241 _invariant_over_u: PhantomData<&'lock mut U>,
242}
243
244unsafe impl<T, U: Send> Send for WriteGuard<'_, T, U> {}
247unsafe impl<T, U: Sync> Sync for WriteGuard<'_, T, U> {}
248
249impl<'lock, T> WriteGuard<'lock, T> {
250 unsafe fn new(lock: &'lock TryRwLock<T>) -> Self {
251 Self {
252 data: NonNull::new(lock.data.get()).expect("`UnsafeCell::get` never returns null"),
253 lock,
254 _invariant_over_u: PhantomData,
255 }
256 }
257}
258
259impl<'lock, T, U> WriteGuard<'lock, T, U> {
260 #[must_use]
262 pub fn rwlock(guard: &Self) -> &'lock TryRwLock<T> {
263 guard.lock
264 }
265
266 pub fn downgrade(guard: Self) -> ReadGuard<'lock, T> {
271 let guard = ManuallyDrop::new(guard);
272 guard.lock.readers.store(1, atomic::Ordering::Release);
273 unsafe { ReadGuard::new(guard.lock) }
274 }
275
276 pub fn map<V>(guard: Self, f: impl FnOnce(&mut U) -> &mut V) -> WriteGuard<'lock, T, V> {
278 let mut guard = ManuallyDrop::new(guard);
279 WriteGuard {
280 data: NonNull::from(f(&mut **guard)),
281 lock: guard.lock,
282 _invariant_over_u: PhantomData,
283 }
284 }
285
286 pub fn unmap(guard: Self) -> WriteGuard<'lock, T> {
288 let guard = ManuallyDrop::new(guard);
289 unsafe { WriteGuard::new(guard.lock) }
290 }
291}
292
293impl<T, U> Deref for WriteGuard<'_, T, U> {
294 type Target = U;
295
296 fn deref(&self) -> &Self::Target {
297 unsafe { self.data.as_ref() }
298 }
299}
300impl<T, U> DerefMut for WriteGuard<'_, T, U> {
301 fn deref_mut(&mut self) -> &mut Self::Target {
302 unsafe { self.data.as_mut() }
303 }
304}
305
306impl<T, U> Drop for WriteGuard<'_, T, U> {
307 fn drop(&mut self) {
308 self.lock.readers.store(0, atomic::Ordering::Release);
309 }
310}
311
312impl<T, U: Debug> Debug for WriteGuard<'_, T, U> {
313 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
314 f.debug_struct("TryRwLockWriteGuard")
315 .field("data", &**self)
316 .finish()
317 }
318}
319
320impl<T, U: Display> Display for WriteGuard<'_, T, U> {
321 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
322 Display::fmt(&**self, f)
323 }
324}
325
326#[test]
327fn test_read() {
328 let lock = TryRwLock::new("Hello World!".to_owned());
329 assert!(!lock.is_locked());
330 assert!(!lock.is_write_locked());
331
332 let guard_1 = lock.try_read().unwrap();
333 let guard_2 = lock.try_read().unwrap();
334
335 assert_eq!(&*guard_1, "Hello World!");
336 assert_eq!(&*guard_2, "Hello World!");
337
338 assert!(lock.try_write().is_none());
339 assert!(lock.is_locked());
340 assert!(!lock.is_write_locked());
341 let guard_1 = ReadGuard::try_upgrade(guard_1).unwrap_err();
342 let guard_2 = ReadGuard::try_upgrade(guard_2).unwrap_err();
343
344 drop(guard_1);
345
346 assert!(lock.try_write().is_none());
347 assert!(lock.try_read().is_some());
348 let guard_2 = ReadGuard::try_upgrade(guard_2).unwrap();
349 assert!(lock.try_read().is_none());
350 let guard_2 = WriteGuard::downgrade(guard_2);
351 assert!(lock.try_read().is_some());
352
353 drop(guard_2);
354
355 assert!(!lock.is_locked());
356 assert!(!lock.is_write_locked());
357}
358
359#[test]
360fn test_read_map() {
361 let lock = TryRwLock::new(vec![1u8, 2, 3]);
362
363 let guard_1 = ReadGuard::map(lock.try_read().unwrap(), |v| &v[0]);
364 let guard_2 = ReadGuard::map(lock.try_read().unwrap(), |v| &v[1]);
365 let guard_3 = ReadGuard::map(lock.try_read().unwrap(), |v| &v[2]);
366
367 assert!(lock.is_locked());
368 assert!(!lock.is_write_locked());
369 assert_eq!(lock.readers.load(atomic::Ordering::Relaxed), 3);
370
371 assert_eq!(*guard_1, 1);
372 assert_eq!(*guard_2, 2);
373 assert_eq!(*guard_3, 3);
374
375 let guard_1 = ReadGuard::unmap(guard_1);
376 assert_eq!(*guard_1, [1, 2, 3]);
377
378 drop(guard_1);
379 drop(guard_2);
380 drop(guard_3);
381
382 assert!(!lock.is_locked());
383 assert!(!lock.is_write_locked());
384 assert_eq!(lock.readers.load(atomic::Ordering::Relaxed), 0);
385}
386
387#[test]
388fn test_write() {
389 let lock = TryRwLock::new("Hello World!".to_owned());
390
391 let mut guard = lock.try_write().unwrap();
392
393 assert_eq!(&*guard, "Hello World!");
394 *guard = "Foo".to_owned();
395 assert_eq!(&*guard, "Foo");
396
397 assert!(lock.is_locked());
398 assert!(lock.is_write_locked());
399 assert!(lock.try_read().is_none());
400 assert!(lock.try_write().is_none());
401
402 drop(guard);
403
404 assert!(!lock.is_locked());
405 assert!(!lock.is_write_locked());
406 assert_eq!(&*lock.try_read().unwrap(), "Foo");
407}
408
409#[test]
410fn test_write_map() {
411 let lock = TryRwLock::new(vec![1u8, 2, 3]);
412
413 let guard = WriteGuard::map(lock.try_write().unwrap(), |v| &mut v[0]);
414
415 assert!(lock.is_locked());
416 assert!(lock.is_write_locked());
417 assert_eq!(lock.readers.load(atomic::Ordering::Relaxed), usize::MAX);
418
419 assert_eq!(*guard, 1);
420
421 let guard = WriteGuard::unmap(guard);
422 assert_eq!(*guard, [1, 2, 3]);
423
424 drop(guard);
425
426 assert!(!lock.is_locked());
427 assert!(!lock.is_write_locked());
428 assert_eq!(lock.readers.load(atomic::Ordering::Relaxed), 0);
429}