rt_ref/
cell_ref.rs

1use std::{
2    mem,
3    ops::Deref,
4    sync::atomic::{AtomicUsize, Ordering},
5};
6
7use crate::RefOverflow;
8
9/// An immutable reference to data in a `Cell`.
10///
11/// Access the value via `std::ops::Deref` (e.g. `*val`)
12#[derive(Debug)]
13pub struct CellRef<'a, T>
14where
15    T: ?Sized + 'a,
16{
17    pub(crate) flag: &'a AtomicUsize,
18    pub(crate) value: &'a T,
19}
20
21/// Cast max `isize` as `usize`, so we don't have to do it in multiple places.
22pub(crate) const REF_LIMIT_MAX: usize = isize::MAX as usize;
23
24impl<'a, T> CellRef<'a, T>
25where
26    T: ?Sized,
27{
28    /// Returns a clone of this `CellRef`.
29    ///
30    /// This method allows handling of reference overflows, but:
31    ///
32    /// * Having 2 billion (32-bit system) / 9 quintillion (64-bit system)
33    ///   references to an object is not a realistic scenario in most
34    ///   applications.
35    ///
36    /// * Applications that hold `CellRef`s with an ever-increasing reference
37    ///   count are not supported by this library.
38    ///
39    ///     Reaching `isize::MAX` may be possible with
40    ///     `std::mem::forget(CellRef::clone(&r))`.
41    // https://github.com/rust-lang/rust-clippy/issues/14275
42    #[allow(clippy::doc_overindented_list_items)]
43    pub fn try_clone(&self) -> Result<Self, RefOverflow> {
44        let previous_value = self.flag.fetch_add(1, Ordering::Relaxed);
45
46        let overflow = previous_value >= REF_LIMIT_MAX;
47        if unlikely(overflow) {
48            self.flag.fetch_sub(1, Ordering::Relaxed);
49            Err(RefOverflow)
50        } else {
51            Ok(CellRef {
52                flag: self.flag,
53                value: self.value,
54            })
55        }
56    }
57
58    /// Makes a new `CellRef` for a component of the borrowed data which
59    /// preserves the existing borrow.
60    ///
61    /// The `Cell` is already immutably borrowed, so this cannot fail.
62    ///
63    /// This is an associated function that needs to be used as
64    /// `CellRef::map(...)`. A method would interfere with methods of the
65    /// same name on the contents of a `CellRef` used through `Deref`.
66    /// Further this preserves the borrow of the value and hence does the
67    /// proper cleanup when it's dropped.
68    ///
69    /// # Examples
70    ///
71    /// This can be used to avoid pointer indirection when a boxed item is
72    /// stored in the `Cell`.
73    ///
74    /// ```rust
75    /// use rt_ref::{Cell, CellRef};
76    ///
77    /// let cb = Cell::new(Box::new(5));
78    ///
79    /// // Borrowing the cell causes the `CellRef` to store a reference to the `Box`, which is a
80    /// // pointer to the value on the heap, not the actual value.
81    /// let boxed_ref: CellRef<'_, Box<usize>> = cb.borrow();
82    /// assert_eq!(**boxed_ref, 5); // Notice the double deref to get the actual value.
83    ///
84    /// // By using `map` we can let `CellRef` store a reference directly to the value on the heap.
85    /// let pure_ref: CellRef<'_, usize> = CellRef::map(boxed_ref, Box::as_ref);
86    ///
87    /// assert_eq!(*pure_ref, 5);
88    /// ```
89    ///
90    /// We can also use `map` to get a reference to a sub-part of the borrowed
91    /// value.
92    ///
93    /// ```rust
94    /// # use rt_ref::{Cell, CellRef};
95    ///
96    /// let c = Cell::new((5, 'b'));
97    /// let b1: CellRef<'_, (u32, char)> = c.borrow();
98    /// let b2: CellRef<'_, u32> = CellRef::map(b1, |t| &t.0);
99    /// assert_eq!(*b2, 5);
100    /// ```
101    pub fn map<U, F>(self, f: F) -> CellRef<'a, U>
102    where
103        F: FnOnce(&T) -> &U,
104        U: ?Sized,
105    {
106        let flag = unsafe { &*(self.flag as *const _) };
107        let value = unsafe { &*(self.value as *const _) };
108
109        mem::forget(self);
110
111        CellRef {
112            flag,
113            value: f(value),
114        }
115    }
116}
117
118impl<'a, T> Deref for CellRef<'a, T>
119where
120    T: ?Sized,
121{
122    type Target = T;
123
124    fn deref(&self) -> &T {
125        self.value
126    }
127}
128
129impl<'a, T> Drop for CellRef<'a, T>
130where
131    T: ?Sized,
132{
133    fn drop(&mut self) {
134        self.flag.fetch_sub(1, Ordering::Release);
135    }
136}
137
138impl<'a, T> Clone for CellRef<'a, T>
139where
140    T: ?Sized,
141{
142    /// Returns a clone of this `CellRef`.
143    ///
144    /// # Panics
145    ///
146    /// Panics if the number of references is `isize::MAX`:
147    ///
148    /// * Having 2 billion / 9 quintillion references to an object is not a
149    ///   realistic scenario in most applications.
150    /// * Applications that hold `CellRef`s with an ever-increasing reference
151    ///   count are not supported by this library.
152    ///
153    ///     Reaching `isize::MAX` may be possible with
154    ///     `std::mem::forget(CellRef::clone(&r))`.
155    // https://github.com/rust-lang/rust-clippy/issues/14275
156    #[allow(clippy::doc_overindented_list_items)]
157    fn clone(&self) -> Self {
158        self.try_clone()
159            .unwrap_or_else(|e| panic!("Failed to clone `CellRef`: {e}"))
160    }
161}
162
163/// Trick to mimic `std::intrinsics::unlikely` on stable Rust.
164#[cold]
165#[inline(always)]
166fn cold() {}
167
168#[inline(always)]
169fn unlikely(cond: bool) -> bool {
170    if cond {
171        cold();
172    }
173    cond
174}
175
176#[cfg(test)]
177mod tests {
178    use std::{
179        error::Error,
180        sync::atomic::{AtomicUsize, Ordering},
181    };
182
183    use crate::RefOverflow;
184
185    use super::{CellRef, REF_LIMIT_MAX};
186
187    #[test]
188    fn try_clone_returns_ok_when_ref_count_less_than_isize_max() {
189        let flag = &AtomicUsize::new(1);
190        let value = &1u32;
191        let cell_ref = CellRef { flag, value };
192
193        assert_eq!(1, cell_ref.flag.load(Ordering::SeqCst));
194
195        let try_clone_result = cell_ref.try_clone();
196
197        let cloned = try_clone_result.expect("try_clone_result to be ok");
198        assert_eq!(2, cloned.flag.load(Ordering::SeqCst));
199    }
200
201    #[test]
202    fn try_clone_returns_err_when_ref_count_equals_isize_max() {
203        let flag = &AtomicUsize::new(REF_LIMIT_MAX);
204        let value = &1u32;
205        let cell_ref = CellRef { flag, value };
206
207        assert_eq!(REF_LIMIT_MAX, cell_ref.flag.load(Ordering::SeqCst));
208
209        let try_clone_result = cell_ref.try_clone();
210
211        let e = try_clone_result.expect_err("try_clone_result to be err");
212        assert_eq!(RefOverflow, e);
213        assert!(e.source().is_none());
214
215        // Ensure that the overflow is not persisted
216        assert_eq!(REF_LIMIT_MAX, cell_ref.flag.load(Ordering::SeqCst));
217    }
218
219    #[test]
220    fn clone_returns_cell_ref_when_ref_count_less_than_isize_max() {
221        let flag = &AtomicUsize::new(1);
222        let value = &1u32;
223        let cell_ref = CellRef { flag, value };
224
225        assert_eq!(1, cell_ref.flag.load(Ordering::SeqCst));
226
227        let cloned = cell_ref.clone();
228
229        assert_eq!(2, cell_ref.flag.load(Ordering::SeqCst));
230        assert_eq!(2, cloned.flag.load(Ordering::SeqCst));
231    }
232
233    #[test]
234    #[should_panic(expected = "Failed to clone `CellRef`: Ref count exceeded `isize::MAX`")]
235    fn clone_panics_when_ref_count_equals_isize_max() {
236        let flag = &AtomicUsize::new(REF_LIMIT_MAX);
237        let value = &1u32;
238        let cell_ref = CellRef { flag, value };
239
240        assert_eq!(REF_LIMIT_MAX, cell_ref.flag.load(Ordering::SeqCst));
241
242        let _clone = cell_ref.clone();
243    }
244}