refcell_lock_api/
raw.rs

1//! The actual implementation of [lock_api::RawRwLock] based on a [RefCell](core::cell::RefCell).
2//!
3//! ## Implementation Differences
4//! Unfortunately, the underlying implementation cannot reuse [`core::cell::RefCell`] directly,
5//! because it needs access to implementation internals.
6//!
7//! However, the implementation should behave identically from an external point of view.
8//!
9//! This implementation was based on the version from the stdlib on Jan. 15, 2014.
10//! Here is a link to the original source code:
11//! <https://github.com/rust-lang/rust/blob/714b29a17ff5/library/core/src/cell.rs>
12
13use core::cell::Cell;
14use core::fmt::{Display, Formatter};
15use core::panic::Location;
16use lock_api::{GuardNoSend, RawMutex, RawRwLock, RawRwLockRecursive};
17
18pub struct CellMutex(CellRwLock);
19unsafe impl RawMutex for CellMutex {
20    #[allow(clippy::declare_interior_mutable_const)] // Used as workaround for `const fn` in trait
21    const INIT: Self = CellMutex(CellRwLock::INIT);
22    type GuardMarker = GuardNoSend;
23
24    #[inline]
25    #[track_caller]
26    fn lock(&self) {
27        self.0.lock_exclusive()
28    }
29
30    #[inline]
31    #[track_caller]
32    fn try_lock(&self) -> bool {
33        self.0.try_lock_exclusive()
34    }
35
36    #[inline]
37    #[track_caller]
38    unsafe fn unlock(&self) {
39        self.0.unlock_exclusive()
40    }
41
42    #[inline]
43    #[track_caller]
44    fn is_locked(&self) -> bool {
45        self.0.is_locked()
46    }
47}
48
49/// Maintains a count of the number of borrows active,
50/// and whether they are mutable or immutable.
51///
52/// ## Original stdlib docs
53/// Positive values represent the number of `Ref` active. Negative values
54/// represent the number of `RefMut` active. Multiple `RefMut`s can only be
55/// active at a time if they refer to distinct, nonoverlapping components of a
56/// `RefCell` (e.g., different ranges of a slice).
57///
58/// `Ref` and `RefMut` are both two words in size, and so there will likely never
59/// be enough `Ref`s or `RefMut`s in existence to overflow half of the `usize`
60/// range. Thus, a `BorrowFlag` will probably never overflow or underflow.
61/// However, this is not a guarantee, as a pathological program could repeatedly
62/// create and then mem::forget `Ref`s or `RefMut`s. Thus, all code must
63/// explicitly check for overflow and underflow in order to avoid unsafety, or at
64/// least behave correctly in the event that overflow or underflow happens (e.g.,
65/// see BorrowRef::new).
66///
67/// ## Differences from stdlib implementation
68/// There are some differences from the implementation used in the stdlib:
69/// 1. Multiple mutable references are forbidden
70/// 2. Uses a newtype instead of a type alias
71#[derive(Copy, Clone, Eq, PartialEq, Debug)]
72struct BorrowFlag {
73    count: isize,
74}
75impl BorrowFlag {
76    pub const UNUSED: BorrowFlag = BorrowFlag { count: 0 };
77    #[inline]
78    pub fn state(self) -> BorrowState {
79        // USing comparison chain for speed
80        #[allow(clippy::comparison_chain)]
81        if self.count < 0 {
82            BorrowState::MutableBorrow
83        } else if self.count > 0 {
84            BorrowState::SharedBorrow
85        } else {
86            BorrowState::Unused
87        }
88    }
89}
90
91#[derive(Copy, Clone, Debug, Eq, PartialEq)]
92enum BorrowState {
93    MutableBorrow,
94    Unused,
95    SharedBorrow,
96}
97
98/// A single-threaded implementation of [lock_api::RawRwLock]
99/// that is implemented using a [RefCell](core::cell::RefCell).
100///
101/// This can be used to abstract over single-threaded and multi-threaded code.
102#[derive(Debug)]
103pub struct CellRwLock {
104    borrow_count: Cell<BorrowFlag>,
105    /// Stores the location of the earliest active borrow.
106    ///
107    /// Should be present whenever `self.is_locked()`.
108    ///
109    /// Used for giving better panic messages.
110    /// This is enabled in debug mode by default,
111    /// but can be controlled by feature flags.
112    #[cfg(debug_location)]
113    earliest_borrow_location: Cell<Option<&'static Location<'static>>>,
114}
115
116impl CellRwLock {
117    #[inline]
118    fn earliest_borrow_location(&self) -> Option<&'static Location<'static>> {
119        #[cfg(debug_location)]
120        {
121            self.earliest_borrow_location.get()
122        }
123        #[cfg(not(debug_location))]
124        {
125            None
126        }
127    }
128
129    #[inline]
130    #[track_caller]
131    fn try_borrow_exclusively(&self) -> Result<(), BorrowFailError> {
132        if matches!(self.borrow_count.get().state(), BorrowState::Unused) {
133            assert_eq!(self.borrow_count.get().count, 0);
134            self.borrow_count.set(BorrowFlag { count: -1 });
135            #[cfg(debug_location)]
136            self.earliest_borrow_location.set(Location::caller());
137            Ok(())
138        } else {
139            Err(BorrowFailError {
140                is_exclusive: true,
141                existing_location: self.earliest_borrow_location(),
142            })
143        }
144    }
145
146    #[inline]
147    #[track_caller]
148    fn try_borrow_shared(&self) -> Result<(), BorrowFailError> {
149        if matches!(
150            self.borrow_count.get().state(),
151            BorrowState::Unused | BorrowState::SharedBorrow
152        ) {
153            self.borrow_count.set(BorrowFlag {
154                /*
155                 * Overflow can happen if repeatedly calling mem::forget
156                 *
157                 * A program that leaks this rapid is so degenerate
158                 * that we unconditionally panic without giving a Result::Err
159                 */
160                count: self
161                    .borrow_count
162                    .get()
163                    .count
164                    .checked_add(1)
165                    .expect("Overflow shared borrows"),
166            });
167            Ok(())
168        } else {
169            debug_assert_eq!(self.borrow_count.get().state(), BorrowState::MutableBorrow);
170            Err(BorrowFailError {
171                is_exclusive: false,
172                existing_location: self.earliest_borrow_location(),
173            })
174        }
175    }
176}
177#[derive(Debug)]
178struct BorrowFailError {
179    is_exclusive: bool,
180    existing_location: Option<&'static Location<'static>>,
181}
182
183impl BorrowFailError {
184    #[cold]
185    #[track_caller]
186    pub fn panic(&self) -> ! {
187        panic!("{self}")
188    }
189}
190impl Display for BorrowFailError {
191    fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
192        f.write_str("Unable to ")?;
193        if self.is_exclusive {
194            f.write_str("exclusively ")?
195        }
196        f.write_str("borrow")?;
197        if let Some(existing_location) = self.existing_location {
198            write!(
199                f,
200                ": {existing_borrow_kind} borrowed at {existing_location}",
201                existing_borrow_kind = if self.is_exclusive {
202                    "Already"
203                } else {
204                    "Exclusively"
205                }
206            )?;
207        }
208        Ok(())
209    }
210}
211unsafe impl RawRwLock for CellRwLock {
212    #[allow(clippy::declare_interior_mutable_const)] // Used as workaround for `const fn` in trait
213    const INIT: Self = CellRwLock {
214        borrow_count: Cell::new(BorrowFlag::UNUSED),
215        #[cfg(debug_location)]
216        earliest_borrow_location: Cell::new(None),
217    };
218    type GuardMarker = GuardNoSend;
219
220    #[track_caller]
221    #[inline]
222    fn lock_shared(&self) {
223        /*
224         * TODO: Do we want to require using read_recursive?
225         *
226         * This may be a stumbling block when switching to a real
227         * lock which blocks on recursive block.
228         */
229        match self.try_borrow_shared() {
230            Ok(()) => {}
231            Err(fail) => fail.panic(),
232        }
233    }
234
235    #[track_caller]
236    #[inline]
237    fn try_lock_shared(&self) -> bool {
238        self.try_borrow_shared().is_ok()
239    }
240
241    #[inline]
242    #[track_caller]
243    unsafe fn unlock_shared(&self) {
244        debug_assert_eq!(self.borrow_count.get().state(), BorrowState::SharedBorrow);
245        debug_assert!(self.borrow_count.get().count > 0);
246        self.borrow_count.set(BorrowFlag {
247            count: self.borrow_count.get().count - 1,
248        });
249        if !self.is_locked() {
250            #[cfg(debug_location)]
251            self.earliest_borrow_location.set(None);
252        }
253    }
254
255    #[inline]
256    #[track_caller]
257    fn lock_exclusive(&self) {
258        match self.try_borrow_exclusively() {
259            Ok(()) => (),
260            Err(e) => e.panic(),
261        }
262    }
263
264    #[inline]
265    #[track_caller]
266    fn try_lock_exclusive(&self) -> bool {
267        self.try_borrow_exclusively().is_ok()
268    }
269
270    #[inline]
271    #[track_caller]
272    unsafe fn unlock_exclusive(&self) {
273        debug_assert_eq!(self.borrow_count.get().state(), BorrowState::MutableBorrow);
274        debug_assert!(self.borrow_count.get().count < 0);
275        self.borrow_count.set(BorrowFlag {
276            count: self.borrow_count.get().count + 1,
277        });
278        if !self.is_locked() {
279            #[cfg(debug_location)]
280            self.earliest_borrow_location.set(None);
281        }
282    }
283
284    #[inline]
285    fn is_locked(&self) -> bool {
286        match self.borrow_count.get().state() {
287            BorrowState::Unused => false,
288            BorrowState::MutableBorrow | BorrowState::SharedBorrow => true,
289        }
290    }
291
292    #[inline]
293    fn is_locked_exclusive(&self) -> bool {
294        matches!(self.borrow_count.get().state(), BorrowState::MutableBorrow)
295    }
296}
297unsafe impl RawRwLockRecursive for CellRwLock {
298    #[inline]
299    #[track_caller]
300    fn lock_shared_recursive(&self) {
301        self.lock_shared()
302    }
303
304    #[inline]
305    #[track_caller]
306    fn try_lock_shared_recursive(&self) -> bool {
307        self.try_lock_shared()
308    }
309}