hybrid_lock/lib.rs
1// Copyright 2022 Seiichi Uchida
2
3//! Hybrid locking, or [`parking_lot::RwLock`] with support for optimistic locking.
4//!
5//! See [the paper](https://dl.acm.org/doi/abs/10.1145/3399666.3399908) for details.
6
7use std::{
8 ops::{Deref, DerefMut},
9 sync::atomic::{fence, AtomicU64, Ordering},
10};
11
12use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard};
13
14/// RAII structure used to release the shared read access of a lock when dropped.
15pub struct HybridRwLockReadGuard<'a, T> {
16 guard: RwLockReadGuard<'a, T>,
17 rw_lock: &'a HybridLock<T>,
18}
19
20impl<'a, T> Deref for HybridRwLockReadGuard<'a, T> {
21 type Target = T;
22
23 fn deref(&self) -> &Self::Target {
24 self.guard.deref()
25 }
26}
27
28/// RAII structure used to release the exclusive write access of a lock when dropped.
29pub struct HybridRwLockWriteGuard<'a, T> {
30 guard: RwLockWriteGuard<'a, T>,
31 rw_lock: &'a HybridLock<T>,
32}
33
34impl<'a, T> Deref for HybridRwLockWriteGuard<'a, T> {
35 type Target = T;
36
37 fn deref(&self) -> &Self::Target {
38 self.guard.deref()
39 }
40}
41
42impl<'a, T> DerefMut for HybridRwLockWriteGuard<'a, T> {
43 fn deref_mut(&mut self) -> &mut Self::Target {
44 self.guard.deref_mut()
45 }
46}
47
48/// A hybrid lock.
49pub struct HybridLock<T> {
50 // T will be in `UnsafeCell`.
51 rw_lock: RwLock<T>,
52 version: AtomicU64,
53}
54
55impl<T> HybridLock<T> {
56 /// Creates a new instance of [`HybridLock`].
57 pub fn new(t: T) -> HybridLock<T> {
58 HybridLock {
59 rw_lock: RwLock::new(t),
60 version: AtomicU64::default(),
61 }
62 }
63
64 /// Locks this hybrid lock with shared read access.
65 ///
66 /// The calling thread will be blocked until there is no writer which holds the lock.
67 pub fn read(&self) -> HybridRwLockReadGuard<T> {
68 let guard = self.rw_lock.read();
69 HybridRwLockReadGuard {
70 guard,
71 rw_lock: self,
72 }
73 }
74
75 /// Locks this hybrid lock with exclusive write access.
76 ///
77 /// The calling thread will be blocked until there are no readers or writers which hold the lock.
78 pub fn write(&self) -> HybridRwLockWriteGuard<T> {
79 let guard = self.rw_lock.write();
80 HybridRwLockWriteGuard {
81 guard,
82 rw_lock: self,
83 }
84 }
85
86 /// Runs the given callback without acquiring the lock with fallback mode.
87 ///
88 /// The calling thread will be blocked when falling back to acquiring a shared access.
89 /// This will happen when the optimisic run fails due to a concurrent writer.
90 #[doc = include_str!("./callback-safety.md")]
91 pub unsafe fn optimistic<F, R>(&self, f: F) -> R
92 where
93 F: Fn(*const T) -> R,
94 {
95 if let Some(result) = self.try_optimistic(&f) {
96 result
97 } else {
98 self.fallback(f)
99 }
100 }
101
102 /// Runs the given callback without acquiring the lock.
103 #[doc = include_str!("./callback-safety.md")]
104 pub unsafe fn try_optimistic<F, R>(&self, f: F) -> Option<R>
105 where
106 F: Fn(*const T) -> R,
107 {
108 if self.rw_lock.is_locked_exclusive() {
109 return None;
110 }
111
112 let pre_version = self.current_version();
113 let result = f(self.rw_lock.data_ptr());
114
115 if self.rw_lock.is_locked_exclusive() {
116 return None;
117 }
118
119 let post_version = self.current_version();
120 if pre_version == post_version {
121 Some(result)
122 } else {
123 None
124 }
125 }
126
127 /// Returns a raw pointer to the underlying data.
128 ///
129 /// This is useful when you want to validate the optimisitc operations by yourself,
130 /// e.g., when the operation involves one or more optimistic locks.
131 ///
132 /// ## Example
133 ///
134 /// ```rust
135 /// # use hybrid_lock::HybridLock;
136 /// let a = HybridLock::new(1);
137 /// let pre_version = a.current_version();
138 /// let val = unsafe { a.data_ptr().read() };
139 /// let post_version = a.current_version();
140 /// if pre_version == post_version {
141 /// // No concurrent writer.
142 /// assert_eq!(val, 1);
143 /// }
144 /// ```
145 pub fn data_ptr(&self) -> *const T {
146 self.rw_lock.data_ptr() as *const T
147 }
148
149 /// Gets the current version of this lock.
150 ///
151 /// ## Example
152 ///
153 /// ```rust
154 /// # use hybrid_lock::HybridLock;
155 /// let a = HybridLock::new(1);
156 /// let pre_version = a.current_version();
157 /// let val = unsafe { a.data_ptr().read() };
158 /// let post_version = a.current_version();
159 /// if pre_version == post_version {
160 /// // No concurrent writer.
161 /// assert_eq!(val, 1);
162 /// }
163 /// ```
164 pub fn current_version(&self) -> u64 {
165 // This `atomic::fence` prevents the reordering of `is_locked_exclusive()` and `self.version.load`.
166 // This is necessary as we don't know whether the RwLock uses the memory ordering strong enough to
167 // prevent such reordering.
168 fence(Ordering::Acquire);
169 self.version.load(Ordering::Acquire)
170 }
171
172 fn fallback<F, R>(&self, f: F) -> R
173 where
174 F: Fn(*const T) -> R,
175 {
176 let guard = self.read();
177 f(guard.rw_lock.rw_lock.data_ptr() as *const T)
178 }
179}
180
181impl<'a, T> Drop for HybridRwLockWriteGuard<'a, T> {
182 fn drop(&mut self) {
183 self.rw_lock.version.fetch_add(1, Ordering::Release);
184 }
185}