moveref/
move_ref.rs

1use core::{
2    ops::{Deref, DerefMut},
3    pin::Pin,
4};
5
6use crate::slot_storage::SlotStorageStatus;
7
8/// A "reference" type which *uniquely* owns its referent type `T` with respect to external storage
9/// with lifetime `'frame`.
10///
11/// Conceptually, it has these characteristics:
12///
13/// - similar to `&'frame mut` because it *uniquely* references other data with lifetime `'frame`
14/// - similar to `Box` because it is *owning*
15///
16/// The distinguishing characteristic of [`MoveRef`] from `&mut` and [`Box`](crate::Box) is how it
17/// is created with a backing storage [`Slot`](crate::Slot) and how that defines its ownership of
18/// the referent data, and ultimately how the backing storage is responsible for running the
19/// destructor for its referent when it finally goes out of scope.
20///
21/// A motivating example for [`MoveRef`] is the concept of placement-initialization in C++:
22///
23/// Imagine we define FFI bindings for a C++ class we intend to use in Rust.
24///
25/// Creating instances for this class on the heap is straightforward and well understood: we can use
26/// raw pointers and eventually either convert to a reference or [`Box`](crate::Box).
27///
28/// Creating instances for this class on the stack is more difficult. We can use
29/// [`MaybeUninit`](core::mem::MaybeUninit) to create a chunk of data and initialize into that.
30///
31/// But we have to be particularly careful when using the result because in Rust, data moves by
32/// default, rather than copies by default as in C++. So any access of the data in Rust could
33/// potentially move the data out from under some expected location in C++ and cause a crash when
34/// execution proceeds again in C++.
35///
36/// So we need a type which acts like a (mutable) reference but does not let us move simply by
37/// accessing it. This would be similar to a [`Pin<&mut T>`], where the [`Pin`] prevents movement,
38/// but the inner `&mut` still allows mutation.
39///
40/// But we also want the possibility to *actually* move the data in some cases, like we would
41/// explicitly do in C++ with a move constructor or move assignment operation.
42///
43/// This interface is exactly what [`MoveRef`] provides, along with [`DerefMove`](crate::DerefMove).
44pub struct MoveRef<'frame, T: ?Sized> {
45    /// The underlying mutable reference with referent stored in some external [`Slot`](crate::Slot).
46    pub(crate) ptr: &'frame mut T,
47    /// Status flags for the storage which track initialization, dropping state, and reference count.
48    pub(crate) status: SlotStorageStatus<'frame>,
49}
50
51impl<'frame, T: ?Sized> core::fmt::Debug for MoveRef<'frame, T>
52where
53    T: core::fmt::Debug,
54{
55    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
56        return core::fmt::Debug::fmt(self.ptr, f);
57    }
58}
59
60impl<T: ?Sized> Deref for MoveRef<'_, T> {
61    type Target = T;
62
63    #[inline]
64    fn deref(&self) -> &Self::Target {
65        return self.ptr;
66    }
67}
68
69impl<T: ?Sized> DerefMut for MoveRef<'_, T> {
70    #[inline]
71    fn deref_mut(&mut self) -> &mut Self::Target {
72        return self.ptr;
73    }
74}
75
76impl<T: ?Sized> Drop for MoveRef<'_, T> {
77    #[inline]
78    fn drop(&mut self) {
79        if self.status.is_released() {
80            return; // tarpaulin
81        }
82        self.status.terminate();
83        unsafe { core::ptr::drop_in_place(self.ptr) }
84    }
85}
86
87impl<'frame, T: ?Sized> MoveRef<'frame, T> {
88    /// Create a new unchecked [`MoveRef`] from a mutable ref and [`SlotStorageStatus`].
89    #[inline]
90    pub(crate) unsafe fn new_unchecked(
91        ptr: &'frame mut T,
92        status: SlotStorageStatus<'frame>,
93    ) -> Self {
94        return Self { ptr, status };
95    }
96
97    /// Transform a [`MoveRef<T>`] into a [`Pin<MoveRef<T>>`]. This is safe because the interface
98    /// for [`MoveRef`] enforces that its referent will not be implicitly moved or have its storage
99    /// invalidated until the [`MoveRef<T>`] (and its backing [`Slot`](crate::Slot)) is dropped.
100    #[must_use]
101    #[inline]
102    pub fn into_pin(self) -> Pin<Self> {
103        return unsafe { Pin::new_unchecked(self) }; // tarpaulin
104    }
105
106    /// Consume a [`Pin<Self>`] and return a raw `*mut T`. This operation inhibits destruction of
107    /// `T` by implicit [`Drop`] and the caller becomes responsible for eventual explicit
108    /// destruction and cleanup, otherwise the memory will leak.
109    #[inline]
110    #[must_use]
111    pub fn release(pin: Pin<Self>) -> *mut T {
112        let mov = unsafe { Pin::into_inner_unchecked(pin) }; // tarpaulin
113        unsafe { mov.status.release() };
114        return mov.ptr;
115    }
116}
117
118impl<'frame, T> MoveRef<'frame, T> {
119    #[must_use]
120    #[inline]
121    pub fn into_inner(self) -> T {
122        let pin = unsafe { Pin::new_unchecked(self) }; // tarpaulin
123        let ptr = MoveRef::release(pin);
124        return unsafe { core::ptr::read(ptr) };
125    }
126
127    #[must_use]
128    #[inline]
129    pub fn as_ptr(&self) -> *const T {
130        return self.ptr;
131    }
132
133    #[must_use]
134    #[inline]
135    pub fn as_mut_ptr(&mut self) -> *mut T {
136        return self.ptr;
137    }
138}
139
140impl<'s, 't, S, T: ?Sized> PartialEq<MoveRef<'s, S>> for MoveRef<'t, T>
141where
142    T: PartialEq<S>,
143{
144    #[inline]
145    fn eq(&self, other: &MoveRef<'s, S>) -> bool {
146        return self.ptr == other.ptr;
147    }
148}
149
150impl<'t, T> Eq for MoveRef<'t, T> where T: PartialEq
151{
152}
153
154impl<'s, 't, S, T: ?Sized> PartialOrd<MoveRef<'s, S>> for MoveRef<'t, T>
155where
156    T: PartialOrd<S>,
157{
158    #[inline]
159    fn partial_cmp(&self, other: &MoveRef<'s, S>) -> Option<core::cmp::Ordering> {
160        return self.ptr.partial_cmp(&other.ptr);
161    }
162
163    #[inline]
164    fn lt(&self, other: &MoveRef<'s, S>) -> bool {
165        return self.ptr.lt(&other.ptr);
166    }
167
168    #[inline]
169    fn le(&self, other: &MoveRef<'s, S>) -> bool {
170        return self.ptr.le(&other.ptr);
171    }
172
173    #[inline]
174    fn gt(&self, other: &MoveRef<'s, S>) -> bool {
175        return self.ptr.gt(&other.ptr);
176    }
177
178    #[inline]
179    fn ge(&self, other: &MoveRef<'s, S>) -> bool {
180        return self.ptr.ge(&other.ptr);
181    }
182}
183
184impl<'t, T> Ord for MoveRef<'t, T>
185where
186    T: Ord,
187{
188    #[inline]
189    fn cmp(&self, other: &Self) -> core::cmp::Ordering {
190        return self.ptr.cmp(&other.ptr);
191    }
192}
193
194impl<'t, T: ?Sized> core::hash::Hash for MoveRef<'t, T>
195where
196    T: core::hash::Hash,
197{
198    #[inline]
199    fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
200        self.ptr.hash(state);
201    }
202}
203
204#[cfg(test)]
205mod test {
206    use super::*;
207    use crate::*;
208
209    #[cfg(feature = "alloc")]
210    #[test]
211    fn deref_move_of_move_ref() {
212        bind!(x: MoveRef<crate::Box<i32>> = &move crate::Box::new(5));
213        bind!(y: MoveRef<crate::Box<i32>> = &move *x);
214        let z = y;
215        assert_eq!(**z, 5);
216    }
217
218    #[cfg(feature = "alloc")]
219    #[test]
220    fn deref_move_of_box() {
221        let x = crate::Box::new(5);
222        bind!(y: MoveRef<i32> = &move *x);
223        let z = y;
224        assert_eq!(*z, 5);
225    }
226
227    #[cfg(feature = "alloc")]
228    #[test]
229    fn move_ref_into_inner() {
230        bind!(x: MoveRef<crate::Box<i32>> = &move crate::Box::new(5));
231        let y = x.into_inner();
232        assert_eq!(*y, 5);
233    }
234
235    #[test]
236    #[should_panic(expected = "a critical reference counter at")]
237    fn forget_move_ref() {
238        bind!(x: MoveRef<i32> = &move 42);
239        core::mem::forget(x);
240    }
241
242    #[test]
243    #[should_panic(expected = "a critical reference counter at")]
244    fn forget_move_ref_temporary() {
245        core::mem::forget(expr!(&move 42));
246    }
247
248    #[cfg_attr(miri, ignore)]
249    #[cfg(all(feature = "alloc", not(feature = "valgrind")))]
250    #[test]
251    #[should_panic(expected = "a critical reference counter at")]
252    fn forget_deref_moved_box() {
253        let mut x = crate::Box::new(5);
254        let ptr = x.as_mut() as *mut i32;
255        core::mem::forget(expr!(&move *x));
256        unsafe {
257            alloc::alloc::dealloc(ptr as *mut u8, alloc::alloc::Layout::new::<i32>());
258        }
259    }
260
261    #[test]
262    fn release_inhibits_drop() {
263        struct T;
264        impl Drop for T {
265            fn drop(&mut self) {
266                panic!();
267            }
268        }
269        let val = T;
270        bind!(t = crate::new::of(val));
271        let _ = MoveRef::release(t);
272    }
273
274    mod coverage {
275        use super::*;
276
277        mod move_ref {
278            use super::*;
279
280            const VAL1: &str = "value1";
281            const VAL2: &str = "value2";
282
283            #[test]
284            fn as_ptr() {
285                bind!(val = &move *Box::new(VAL1));
286                let ptr = val.as_ptr();
287                assert_eq!(VAL1, unsafe { *ptr });
288            }
289
290            #[test]
291            fn as_mut_ptr() {
292                bind!(mut val = &move *Box::new(VAL1));
293                let ptr = val.as_mut_ptr();
294                assert_eq!(VAL1, unsafe { *ptr });
295                unsafe { ptr.write(VAL2) };
296                assert_eq!(VAL2, unsafe { *ptr });
297            }
298
299            #[test]
300            fn deref_mut() {
301                bind!(mut val = &move VAL1);
302                assert_eq!(VAL1, *val);
303                *val = VAL2;
304                assert_eq!(VAL2, *val);
305            }
306
307            #[test]
308            fn fmt() {
309                use crate::alloc::format;
310                bind!(val = &move VAL1);
311                assert_eq!(format!("{VAL1:#?}"), format!("{val:#?}"));
312            }
313
314            #[test]
315            fn partial_eq() {
316                bind!(lhs = &move VAL1);
317                bind!(rhs = &move VAL1);
318                assert!(lhs.eq(&rhs));
319            }
320
321            #[test]
322            fn partial_cmp() {
323                bind!(lhs = &move VAL1);
324                bind!(rhs = &move VAL1);
325                assert!(matches!(
326                    lhs.partial_cmp(&rhs),
327                    Some(core::cmp::Ordering::Equal)
328                ));
329            }
330
331            #[test]
332            fn lt() {
333                bind!(lhs = &move VAL1);
334                bind!(rhs = &move VAL2);
335                assert!(lhs.lt(&rhs));
336            }
337
338            #[test]
339            fn le() {
340                bind!(lhs = &move VAL1);
341                bind!(rhs = &move VAL2);
342                assert!(lhs.le(&rhs));
343            }
344
345            #[test]
346            fn gt() {
347                bind!(lhs = &move VAL2);
348                bind!(rhs = &move VAL1);
349                assert!(lhs.gt(&rhs));
350            }
351
352            #[test]
353            fn ge() {
354                bind!(lhs = &move VAL2);
355                bind!(rhs = &move VAL1);
356                assert!(lhs.ge(&rhs));
357            }
358
359            #[test]
360            fn cmp() {
361                bind!(lhs = &move VAL1);
362                bind!(rhs = &move VAL2);
363                assert!(matches!(lhs.cmp(&rhs), core::cmp::Ordering::Less));
364            }
365
366            #[cfg(feature = "default")]
367            #[test]
368            fn hash() {
369                use core::hash::{Hash, Hasher};
370                bind!(lhs = &move VAL1);
371                let hash1 = {
372                    let mut hasher = seahash::SeaHasher::new();
373                    lhs.hash(&mut hasher);
374                    hasher.finish()
375                };
376                bind!(rhs = &move VAL1);
377                let hash2 = {
378                    let mut hasher = seahash::SeaHasher::new();
379                    rhs.hash(&mut hasher);
380                    hasher.finish()
381                };
382                assert_eq!(hash1, hash2);
383            }
384        }
385    }
386}