infinity_pool/handles/
managed_mut.rs

1use std::any::type_name;
2use std::borrow::{Borrow, BorrowMut};
3use std::ops::{Deref, DerefMut};
4use std::pin::Pin;
5use std::ptr::NonNull;
6use std::sync::Arc;
7use std::{fmt, mem, ptr};
8
9use parking_lot::Mutex;
10
11use crate::{Pooled, RawOpaquePoolThreadSafe, RawPooledMut};
12
13// Note that while this is a thread-safe handle, we do not require `T: Send` because
14// we do not want to require every trait we cast into via trait object to be `Send`.
15// It is the responsibility of the pool to ensure that only `Send` objects are inserted.
16
17/// A unique thread-safe reference-counting handle for a pooled object.
18#[doc = include_str!("../../doc/snippets/ref_counted_handle_implications.md")]
19#[doc = include_str!("../../doc/snippets/unique_handle_implications.md")]
20#[doc = include_str!("../../doc/snippets/nonlocal_handle_thread_safety.md")]
21pub struct PooledMut<T: ?Sized> {
22    // We inherit our thread-safety traits from this one (Send from T, Sync always).
23    inner: RawPooledMut<T>,
24
25    pool: Arc<Mutex<RawOpaquePoolThreadSafe>>,
26}
27
28impl<T: ?Sized> PooledMut<T> {
29    /// # Safety
30    ///
31    /// Even though the signature does not require `T: Send`, the underlying object must be `Send`.
32    /// The signature does not require it to be compatible with casting to trait objects that do
33    /// not have `Send` as a supertrait.
34    #[must_use]
35    pub(crate) unsafe fn new(
36        inner: RawPooledMut<T>,
37        pool: Arc<Mutex<RawOpaquePoolThreadSafe>>,
38    ) -> Self {
39        Self { inner, pool }
40    }
41
42    #[doc = include_str!("../../doc/snippets/handle_ptr.md")]
43    #[must_use]
44    #[inline]
45    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
46    pub fn ptr(&self) -> NonNull<T> {
47        self.inner.ptr()
48    }
49
50    #[doc = include_str!("../../doc/snippets/handle_into_shared.md")]
51    #[must_use]
52    #[inline]
53    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
54    pub fn into_shared(self) -> Pooled<T> {
55        let (inner, pool) = self.into_parts();
56
57        // SAFETY: Guaranteed by ::new() - we only ever create these handles for `T: Send` but
58        // we may just lose the `Send` trait from the signature if we cast or erase the type.
59        unsafe { Pooled::new(inner, pool) }
60    }
61
62    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
63    fn into_parts(self) -> (RawPooledMut<T>, Arc<Mutex<RawOpaquePoolThreadSafe>>) {
64        // We transfer these fields to the caller, so we do not want the current handle
65        // to be dropped. Hence we perform raw reads to extract the fields directly.
66
67        // SAFETY: The target is valid for reads.
68        let pool = unsafe { ptr::read(&raw const self.pool) };
69        // SAFETY: The target is valid for reads.
70        let inner = unsafe { ptr::read(&raw const self.inner) };
71
72        // We are just "destructuring with Drop" here.
73        mem::forget(self);
74
75        (inner, pool)
76    }
77
78    #[doc = include_str!("../../doc/snippets/ref_counted_as_pin.md")]
79    #[must_use]
80    #[inline]
81    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
82    pub fn as_pin(&self) -> Pin<&T> {
83        // SAFETY: Pooled items are always pinned.
84        unsafe { Pin::new_unchecked(self) }
85    }
86
87    #[doc = include_str!("../../doc/snippets/ref_counted_as_pin_mut.md")]
88    #[must_use]
89    #[inline]
90    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
91    pub fn as_pin_mut(&mut self) -> Pin<&mut T> {
92        // SAFETY: This is a unique handle, so we guarantee borrow safety
93        // of the target object by borrowing the handle itself.
94        let as_mut = unsafe { self.ptr().as_mut() };
95
96        // SAFETY: Pooled items are always pinned.
97        unsafe { Pin::new_unchecked(as_mut) }
98    }
99
100    /// Casts this handle to reference the target as a trait object.
101    ///
102    /// This method is only intended for use by the [`define_pooled_dyn_cast!`] macro
103    /// for type-safe casting operations.
104    ///
105    /// # Safety
106    ///
107    /// The caller must guarantee that the provided closure's input and output references
108    /// point to the same object.
109    #[doc(hidden)]
110    #[must_use]
111    #[inline]
112    pub unsafe fn __private_cast_dyn_with_fn<U: ?Sized, F>(self, cast_fn: F) -> PooledMut<U>
113    where
114        F: FnOnce(&mut T) -> &mut U,
115    {
116        let (inner, pool) = self.into_parts();
117
118        // SAFETY: Forwarding callback safety guarantees from the caller.
119        // We are an exclusive handle, so we always have the right to create
120        // exclusive references to the target of the handle, satisfying that requirement.
121        let new_inner = unsafe { inner.__private_cast_dyn_with_fn(cast_fn) };
122
123        PooledMut {
124            inner: new_inner,
125            pool,
126        }
127    }
128
129    /// Erase the type information from this handle, converting it to `PooledMut<()>`.
130    ///
131    /// This is useful for extending the lifetime of an object in the pool without retaining
132    /// type information. The type-erased handle prevents access to the object but ensures
133    /// it remains in the pool.
134    #[must_use]
135    #[inline]
136    #[cfg_attr(test, mutants::skip)] // All mutations unviable - save some time.
137    pub fn erase(self) -> PooledMut<()> {
138        let (inner, pool) = self.into_parts();
139
140        PooledMut {
141            inner: inner.erase(),
142            pool,
143        }
144    }
145}
146
147impl<T> PooledMut<T>
148where
149    T: Unpin,
150{
151    #[doc = include_str!("../../doc/snippets/ref_counted_into_inner.md")]
152    #[must_use]
153    #[inline]
154    pub fn into_inner(self) -> T {
155        let (inner, pool) = self.into_parts();
156
157        let mut pool = pool.lock();
158        // SAFETY: We are a managed unique handle, so we are the only one who is allowed to remove
159        // the object from the pool - as long as we exist, the object exists in the pool. We keep
160        // the pool alive for as long as any handle to it exists, so the pool must still exist.
161        unsafe { pool.remove_unpin(inner) }
162    }
163}
164
165#[cfg_attr(coverage_nightly, coverage(off))] // No API contract to test.
166impl<T: ?Sized> fmt::Debug for PooledMut<T> {
167    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
168        f.debug_struct(type_name::<Self>())
169            .field("inner", &self.inner)
170            .field("pool", &self.pool)
171            .finish()
172    }
173}
174
175impl<T: ?Sized> Deref for PooledMut<T> {
176    type Target = T;
177
178    #[inline]
179    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
180    fn deref(&self) -> &Self::Target {
181        // SAFETY: This is a unique handle, so we guarantee borrow safety
182        // of the target object by borrowing the handle itself.
183        // We guarantee liveness by being a reference counted handle.
184        unsafe { self.ptr().as_ref() }
185    }
186}
187
188impl<T> DerefMut for PooledMut<T>
189where
190    T: ?Sized + Unpin,
191{
192    #[inline]
193    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
194    fn deref_mut(&mut self) -> &mut Self::Target {
195        // SAFETY: This is a unique handle, so we guarantee borrow safety
196        // of the target object by borrowing the handle itself.
197        // We guarantee liveness by being a reference counted handle.
198        unsafe { self.ptr().as_mut() }
199    }
200}
201
202impl<T: ?Sized> Borrow<T> for PooledMut<T> {
203    #[inline]
204    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
205    fn borrow(&self) -> &T {
206        self
207    }
208}
209
210impl<T> BorrowMut<T> for PooledMut<T>
211where
212    T: ?Sized + Unpin,
213{
214    #[inline]
215    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
216    fn borrow_mut(&mut self) -> &mut T {
217        self
218    }
219}
220
221impl<T: ?Sized> AsRef<T> for PooledMut<T> {
222    #[inline]
223    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
224    fn as_ref(&self) -> &T {
225        self
226    }
227}
228
229impl<T> AsMut<T> for PooledMut<T>
230where
231    T: ?Sized + Unpin,
232{
233    #[inline]
234    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
235    fn as_mut(&mut self) -> &mut T {
236        self
237    }
238}
239
240impl<T: ?Sized> Drop for PooledMut<T> {
241    fn drop(&mut self) {
242        // While `RawPooledMut` is technically not Copy, we use our insider knowledge
243        // that actually it is in reality just a fat pointer, so we can actually copy it.
244        // The only reason it is not Copy is to ensure uniqueness, which we do not care
245        // about here because the copy in `self` is going away. We just do not want to
246        // insert an Option that we have to check in every method.
247        //
248        // SAFETY: The target is valid for reads.
249        let inner = unsafe { ptr::read(&raw const self.inner) };
250
251        let mut pool = self.pool.lock();
252
253        // SAFETY: We are a managed unique handle, so we are the only one who is allowed to remove
254        // the object from the pool - as long as we exist, the object exists in the pool. We keep
255        // the pool alive for as long as any handle to it exists, so the pool must still exist.
256        unsafe {
257            pool.remove(inner);
258        }
259    }
260}
261
262#[cfg(test)]
263#[cfg_attr(coverage_nightly, coverage(off))]
264mod tests {
265    use std::cell::Cell;
266    use std::thread;
267
268    use static_assertions::{assert_impl_all, assert_not_impl_any};
269
270    use super::*;
271    use crate::{NotSendNotSync, NotSendSync, SendAndSync, SendNotSync};
272
273    assert_impl_all!(PooledMut<SendAndSync>: Send, Sync);
274    assert_impl_all!(PooledMut<SendNotSync>: Send, Sync);
275    assert_impl_all!(PooledMut<NotSendNotSync>: Sync);
276    assert_impl_all!(PooledMut<NotSendSync>: Sync);
277
278    assert_not_impl_any!(PooledMut<NotSendNotSync>: Send);
279    assert_not_impl_any!(PooledMut<NotSendSync>: Send);
280
281    // This is a unique handle, it cannot be cloneable/copyable.
282    assert_not_impl_any!(PooledMut<SendAndSync>: Clone, Copy);
283
284    // We must have a destructor because we need to remove the object on destroy.
285    assert_impl_all!(PooledMut<SendAndSync>: Drop);
286
287    // We expect no destructor because we treat it as `Copy` in our own Drop::drop().
288    assert_not_impl_any!(RawPooledMut<()>: Drop);
289
290    #[test]
291    fn unique_handle_can_cross_threads_with_send_only() {
292        use crate::OpaquePool;
293
294        // A type that is Send but not Sync.
295        struct Counter {
296            value: Cell<i32>,
297        }
298
299        // SAFETY: Counter is designed to be Send but not Sync for testing.
300        unsafe impl Send for Counter {}
301
302        impl Counter {
303            fn new(value: i32) -> Self {
304                Self {
305                    value: Cell::new(value),
306                }
307            }
308
309            fn increment(&self) {
310                self.value.set(self.value.get() + 1);
311            }
312
313            fn get(&self) -> i32 {
314                self.value.get()
315            }
316        }
317
318        let pool = OpaquePool::with_layout_of::<Counter>();
319        let handle = pool.insert(Counter::new(0));
320
321        // Increment in main thread.
322        handle.increment();
323        assert_eq!(handle.get(), 1);
324
325        // Move handle to another thread (requires Send but not Sync).
326        let handle_in_thread = thread::spawn(move || {
327            handle.increment();
328            assert_eq!(handle.get(), 2);
329            handle
330        })
331        .join()
332        .unwrap();
333
334        // Back in main thread.
335        assert_eq!(handle_in_thread.get(), 2);
336    }
337
338    #[test]
339    fn erase_extends_lifetime() {
340        use crate::OpaquePool;
341
342        let pool = OpaquePool::with_layout_of::<u32>();
343        let handle = pool.insert(42);
344
345        // Erase the unique handle.
346        let erased = handle.erase();
347
348        // Object still alive in erased handle.
349        assert_eq!(pool.len(), 1);
350
351        // Drop erased handle, object is removed.
352        drop(erased);
353        assert_eq!(pool.len(), 0);
354    }
355
356    #[test]
357    fn erase_then_convert_to_shared() {
358        use crate::OpaquePool;
359
360        let pool = OpaquePool::with_layout_of::<String>();
361        let handle = pool.insert(String::from("test"));
362
363        // Erase and convert to shared.
364        let erased_mut = handle.erase();
365        let erased_shared = erased_mut.into_shared();
366        let erased_clone = erased_shared.clone();
367
368        assert_eq!(pool.len(), 1);
369
370        drop(erased_shared);
371        assert_eq!(pool.len(), 1);
372
373        drop(erased_clone);
374        assert_eq!(pool.len(), 0);
375    }
376}