infinity_pool/handles/
managed_mut.rs

1use std::borrow::{Borrow, BorrowMut};
2use std::ops::{Deref, DerefMut};
3use std::pin::Pin;
4use std::ptr::NonNull;
5use std::sync::Arc;
6use std::{fmt, mem, ptr};
7
8use parking_lot::Mutex;
9
10use crate::{Pooled, RawOpaquePoolSend, RawPooledMut};
11
12// Note that while this is a thread-safe handle, we do not require `T: Send` because
13// we do not want to require every trait we cast into via trait object to be `Send`.
14// It is the responsibility of the pool to ensure that only `Send` objects are inserted.
15
16/// A unique thread-safe reference-counting handle for a pooled object.
17#[doc = include_str!("../../doc/snippets/ref_counted_handle_implications.md")]
18#[doc = include_str!("../../doc/snippets/unique_handle_implications.md")]
19#[doc = include_str!("../../doc/snippets/nonlocal_handle_thread_safety.md")]
20pub struct PooledMut<T: ?Sized> {
21    inner: RawPooledMut<T>,
22    pool: Arc<Mutex<RawOpaquePoolSend>>,
23}
24
25impl<T: ?Sized> PooledMut<T> {
26    #[must_use]
27    pub(crate) fn new(inner: RawPooledMut<T>, pool: Arc<Mutex<RawOpaquePoolSend>>) -> Self {
28        Self { inner, pool }
29    }
30
31    #[doc = include_str!("../../doc/snippets/handle_ptr.md")]
32    #[must_use]
33    #[inline]
34    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
35    pub fn ptr(&self) -> NonNull<T> {
36        self.inner.ptr()
37    }
38
39    #[doc = include_str!("../../doc/snippets/handle_into_shared.md")]
40    #[must_use]
41    #[inline]
42    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
43    pub fn into_shared(self) -> Pooled<T> {
44        let (inner, pool) = self.into_parts();
45
46        Pooled::new(inner, pool)
47    }
48
49    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
50    fn into_parts(self) -> (RawPooledMut<T>, Arc<Mutex<RawOpaquePoolSend>>) {
51        // We transfer these fields to the caller, so we do not want the current handle
52        // to be dropped. Hence we perform raw reads to extract the fields directly.
53
54        // SAFETY: The target is valid for reads.
55        let pool = unsafe { ptr::read(&raw const self.pool) };
56        // SAFETY: The target is valid for reads.
57        let inner = unsafe { ptr::read(&raw const self.inner) };
58
59        // We are just "destructuring with Drop" here.
60        mem::forget(self);
61
62        (inner, pool)
63    }
64
65    #[doc = include_str!("../../doc/snippets/ref_counted_as_pin.md")]
66    #[must_use]
67    #[inline]
68    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
69    pub fn as_pin(&self) -> Pin<&T> {
70        // SAFETY: Pooled items are always pinned.
71        unsafe { Pin::new_unchecked(self) }
72    }
73
74    #[doc = include_str!("../../doc/snippets/ref_counted_as_pin_mut.md")]
75    #[must_use]
76    #[inline]
77    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
78    pub fn as_pin_mut(&mut self) -> Pin<&mut T> {
79        // SAFETY: This is a unique handle, so we guarantee borrow safety
80        // of the target object by borrowing the handle itself.
81        let as_mut = unsafe { self.ptr().as_mut() };
82
83        // SAFETY: Pooled items are always pinned.
84        unsafe { Pin::new_unchecked(as_mut) }
85    }
86
87    /// Casts this handle to reference the target as a trait object.
88    ///
89    /// This method is only intended for use by the [`define_pooled_dyn_cast!`] macro
90    /// for type-safe casting operations.
91    ///
92    /// # Safety
93    ///
94    /// The caller must guarantee that the provided closure's input and output references
95    /// point to the same object.
96    #[doc(hidden)]
97    #[must_use]
98    #[inline]
99    pub unsafe fn __private_cast_dyn_with_fn<U: ?Sized, F>(self, cast_fn: F) -> PooledMut<U>
100    where
101        F: FnOnce(&mut T) -> &mut U,
102    {
103        let (inner, pool) = self.into_parts();
104
105        // SAFETY: Forwarding callback safety guarantees from the caller.
106        // We are an exclusive handle, so we always have the right to create
107        // exclusive references to the target of the handle, satisfying that requirement.
108        let new_inner = unsafe { inner.__private_cast_dyn_with_fn(cast_fn) };
109
110        PooledMut {
111            inner: new_inner,
112            pool,
113        }
114    }
115}
116
117impl<T> PooledMut<T>
118where
119    T: Unpin,
120{
121    #[doc = include_str!("../../doc/snippets/ref_counted_into_inner.md")]
122    #[must_use]
123    #[inline]
124    pub fn into_inner(self) -> T {
125        let (inner, pool) = self.into_parts();
126
127        let mut pool = pool.lock();
128        pool.remove_mut_unpin(inner)
129    }
130}
131
132impl<T: ?Sized> fmt::Debug for PooledMut<T> {
133    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
134        f.debug_struct("PooledMut")
135            .field("inner", &self.inner)
136            .field("pool", &self.pool)
137            .finish()
138    }
139}
140
141impl<T: ?Sized> Deref for PooledMut<T> {
142    type Target = T;
143
144    #[inline]
145    fn deref(&self) -> &Self::Target {
146        // SAFETY: This is a unique handle, so we guarantee borrow safety
147        // of the target object by borrowing the handle itself.
148        // We guarantee liveness by being a reference counted handle.
149        unsafe { self.ptr().as_ref() }
150    }
151}
152
153impl<T> DerefMut for PooledMut<T>
154where
155    T: ?Sized + Unpin,
156{
157    #[inline]
158    fn deref_mut(&mut self) -> &mut Self::Target {
159        // SAFETY: This is a unique handle, so we guarantee borrow safety
160        // of the target object by borrowing the handle itself.
161        // We guarantee liveness by being a reference counted handle.
162        unsafe { self.ptr().as_mut() }
163    }
164}
165
166impl<T: ?Sized> Borrow<T> for PooledMut<T> {
167    #[inline]
168    fn borrow(&self) -> &T {
169        self
170    }
171}
172
173impl<T> BorrowMut<T> for PooledMut<T>
174where
175    T: ?Sized + Unpin,
176{
177    #[inline]
178    fn borrow_mut(&mut self) -> &mut T {
179        self
180    }
181}
182
183impl<T: ?Sized> AsRef<T> for PooledMut<T> {
184    #[inline]
185    fn as_ref(&self) -> &T {
186        self
187    }
188}
189
190impl<T> AsMut<T> for PooledMut<T>
191where
192    T: ?Sized + Unpin,
193{
194    #[inline]
195    fn as_mut(&mut self) -> &mut T {
196        self
197    }
198}
199
200impl<T: ?Sized> Drop for PooledMut<T> {
201    fn drop(&mut self) {
202        // While `RawPooledMut` is technically not Copy, we use our insider knowledge
203        // that actually it is in reality just a fat pointer, so we can actually copy it.
204        // The only reason it is not Copy is to ensure uniqueness, which we do not care
205        // about here because the copy in `self` is going away. We just do not want to
206        // insert an Option that we have to check in every method.
207        //
208        // SAFETY: The target is valid for reads.
209        let inner = unsafe { ptr::read(&raw const self.inner) };
210
211        let mut pool = self.pool.lock();
212        pool.remove_mut(inner);
213    }
214}
215
216#[cfg(test)]
217mod tests {
218    use std::cell::Cell;
219    use std::thread;
220
221    use static_assertions::{assert_impl_all, assert_not_impl_any};
222
223    use super::*;
224
225    // u32 is Sync, so PooledMut<u32> should be Send (but not Sync).
226    assert_impl_all!(PooledMut<u32>: Send);
227    assert_not_impl_any!(PooledMut<u32>: Sync);
228
229    // Cell is Send but not Sync, so PooledMut<Cell> should now be Send (but not Sync)
230    // because unique handles only need T: Send.
231    assert_impl_all!(PooledMut<Cell<u32>>: Send);
232    assert_not_impl_any!(PooledMut<Cell<u32>>: Sync);
233
234    // Non-Send types should make the handle non-Send.
235    assert_not_impl_any!(PooledMut<std::rc::Rc<i32>>: Send);
236
237    // We expect no destructor because we treat it as `Copy` in our own Drop::drop().
238    assert_not_impl_any!(RawPooledMut<()>: Drop);
239
240    #[test]
241    fn unique_handle_can_cross_threads_with_send_only() {
242        use crate::OpaquePool;
243
244        // A type that is Send but not Sync.
245        struct Counter {
246            value: Cell<i32>,
247        }
248
249        // SAFETY: Counter is designed to be Send but not Sync for testing.
250        unsafe impl Send for Counter {}
251
252        impl Counter {
253            fn new(value: i32) -> Self {
254                Self {
255                    value: Cell::new(value),
256                }
257            }
258
259            fn increment(&self) {
260                self.value.set(self.value.get() + 1);
261            }
262
263            fn get(&self) -> i32 {
264                self.value.get()
265            }
266        }
267
268        let pool = OpaquePool::with_layout_of::<Counter>();
269        let handle = pool.insert(Counter::new(0));
270
271        // Increment in main thread.
272        handle.increment();
273        assert_eq!(handle.get(), 1);
274
275        // Move handle to another thread (requires Send but not Sync).
276        let handle_in_thread = thread::spawn(move || {
277            handle.increment();
278            assert_eq!(handle.get(), 2);
279            handle
280        })
281        .join()
282        .unwrap();
283
284        // Back in main thread.
285        assert_eq!(handle_in_thread.get(), 2);
286    }
287}