Skip to main content

infinity_pool/handles/
blind_raw_mut.rs

1use std::any::type_name;
2use std::fmt;
3use std::pin::Pin;
4use std::ptr::NonNull;
5
6use crate::{LayoutKey, RawBlindPooled, RawPooledMut};
7
8/// A unique handle to an object in a [`RawBlindPool`][crate::RawBlindPool].
9#[doc = include_str!("../../doc/snippets/raw_handle_implications.md")]
10#[doc = include_str!("../../doc/snippets/unique_handle_implications.md")]
11#[doc = include_str!("../../doc/snippets/unique_raw_handle_implications.md")]
12#[doc = include_str!("../../doc/snippets/nonlocal_handle_thread_safety.md")]
13pub struct RawBlindPooledMut<T>
14where
15    // We support casting to trait objects, hence `?Sized`.
16    T: ?Sized,
17{
18    key: LayoutKey,
19
20    // We inherit our thread-safety traits from this one (Send from T, Sync always).
21    inner: RawPooledMut<T>,
22}
23
24impl<T: ?Sized> RawBlindPooledMut<T> {
25    #[must_use]
26    pub(crate) fn new(key: LayoutKey, inner: RawPooledMut<T>) -> Self {
27        Self { key, inner }
28    }
29
30    #[doc = include_str!("../../doc/snippets/handle_ptr.md")]
31    #[must_use]
32    #[inline]
33    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
34    pub fn ptr(&self) -> NonNull<T> {
35        self.inner.ptr()
36    }
37
38    /// Transforms the unique handle into a shared handle that can be cloned and copied freely.
39    ///
40    /// A shared handle does not support the creation of exclusive references to the target object
41    /// and requires the caller to guarantee that no further access is attempted through any handle
42    /// after removing the object from the pool.
43    #[must_use]
44    #[inline]
45    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
46    pub fn into_shared(self) -> RawBlindPooled<T> {
47        RawBlindPooled::new(self.key, self.inner.into_shared())
48    }
49
50    #[doc = include_str!("../../doc/snippets/raw_as_pin.md")]
51    #[must_use]
52    #[inline]
53    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
54    pub unsafe fn as_pin(&self) -> Pin<&T> {
55        // SAFETY: Forwarding safety guarantees from the caller.
56        let as_ref = unsafe { self.as_ref() };
57
58        // SAFETY: Pooled items are always pinned.
59        unsafe { Pin::new_unchecked(as_ref) }
60    }
61
62    #[doc = include_str!("../../doc/snippets/raw_as_pin_mut.md")]
63    #[must_use]
64    #[inline]
65    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
66    pub unsafe fn as_pin_mut(&mut self) -> Pin<&mut T> {
67        // SAFETY: This is a unique handle, so we guarantee borrow safety
68        // of the target object by borrowing the handle itself. Pointer validity
69        // requires pool to be alive, which is a safety requirement of this function.
70        let as_mut = unsafe { self.ptr().as_mut() };
71
72        // SAFETY: Pooled items are always pinned.
73        unsafe { Pin::new_unchecked(as_mut) }
74    }
75
76    #[doc = include_str!("../../doc/snippets/raw_mut_as_ref.md")]
77    #[must_use]
78    #[inline]
79    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
80    pub unsafe fn as_ref(&self) -> &T {
81        // SAFETY: This is a unique handle, so we guarantee borrow safety
82        // of the target object by borrowing the handle itself. Pointer validity
83        // requires pool to be alive, which is a safety requirement of this function.
84        unsafe { self.ptr().as_ref() }
85    }
86
87    /// Casts this handle to reference the target as a trait object.
88    ///
89    /// This method is only intended for use by the [`define_pooled_dyn_cast!`] macro
90    /// for type-safe casting operations.
91    ///
92    /// # Safety
93    ///
94    /// The caller must guarantee that the provided closure's input and output references
95    /// point to the same object.
96    ///
97    /// The caller must guarantee that the pool will remain alive for the duration the returned
98    /// reference is used.
99    #[doc(hidden)]
100    #[must_use]
101    #[inline]
102    pub unsafe fn __private_cast_dyn_with_fn<U: ?Sized, F>(self, cast_fn: F) -> RawBlindPooledMut<U>
103    where
104        F: FnOnce(&mut T) -> &mut U,
105    {
106        // SAFETY: Forwarding callback safety guarantees from the caller.
107        // We are an exclusive handle, so we always have the right to create
108        // exclusive references to the target of the handle, satisfying that requirement.
109        let new_inner = unsafe { self.inner.__private_cast_dyn_with_fn(cast_fn) };
110
111        RawBlindPooledMut {
112            key: self.key,
113            inner: new_inner,
114        }
115    }
116
117    /// Erase the type information from this handle, converting it to `RawBlindPooledMut<()>`.
118    ///
119    /// This is useful for extending the lifetime of an object in the pool without retaining
120    /// type information. The type-erased handle prevents access to the object but ensures
121    /// it remains in the pool.
122    #[must_use]
123    #[inline]
124    #[cfg_attr(test, mutants::skip)] // All mutations unviable - save some time.
125    pub fn erase(self) -> RawBlindPooledMut<()>
126    where
127        T: Send,
128    {
129        RawBlindPooledMut {
130            key: self.key,
131            // This is safe only because `RawBlindPool` is always `!Send`, requiring unsafe code
132            // to send across threads (which implies the user manually guaranteeing only `T: Send`
133            // are placed into the pool). This is a behavior that the inner handle relies upon.
134            // It assumes (correctly) that the pool it is associated with cannot be sent to another
135            // thread (via safe code) unless `T: Send`. In our case, we just blanket-disable it
136            // for soundness, as we cannot programmatically know whether everything in the pool
137            // is `Send` or not.
138            inner: self.inner.erase(),
139        }
140    }
141}
142
143impl<T: ?Sized + Unpin> RawBlindPooledMut<T> {
144    #[doc = include_str!("../../doc/snippets/raw_as_mut.md")]
145    #[must_use]
146    #[inline]
147    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
148    pub unsafe fn as_mut(&mut self) -> &mut T {
149        // SAFETY: This is a unique handle, so we guarantee borrow safety
150        // of the target object by borrowing the handle itself. Pointer validity
151        // requires pool to be alive, which is a safety requirement of this function.
152        unsafe { self.ptr().as_mut() }
153    }
154}
155
156#[cfg_attr(coverage_nightly, coverage(off))] // No API contract to test.
157impl<T: ?Sized> fmt::Debug for RawBlindPooledMut<T> {
158    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
159        f.debug_struct(type_name::<Self>())
160            .field("key", &self.key)
161            .field("inner", &self.inner)
162            .finish()
163    }
164}
165
166#[cfg(test)]
167#[cfg_attr(coverage_nightly, coverage(off))]
168mod tests {
169    use std::cell::Cell;
170    use std::thread;
171
172    use static_assertions::{assert_impl_all, assert_not_impl_any};
173
174    use super::*;
175    use crate::{NotSendNotSync, NotSendSync, RawBlindPool, SendAndSync, SendNotSync};
176
177    assert_impl_all!(RawBlindPooledMut<SendAndSync>: Send, Sync);
178    assert_impl_all!(RawBlindPooledMut<SendNotSync>: Send, Sync);
179    assert_impl_all!(RawBlindPooledMut<NotSendNotSync>: Sync);
180    assert_impl_all!(RawBlindPooledMut<NotSendSync>: Sync);
181
182    assert_not_impl_any!(RawBlindPooledMut<NotSendNotSync>: Send);
183    assert_not_impl_any!(RawBlindPooledMut<NotSendSync>: Send);
184
185    // This is a unique handle, it cannot be cloneable/copyable.
186    assert_not_impl_any!(RawBlindPooledMut<SendAndSync>: Clone, Copy);
187
188    // This is not strictly a requirement but a destructor is not something we expect here.
189    assert_not_impl_any!(RawBlindPooledMut<SendAndSync>: Drop);
190
191    #[test]
192    fn unique_handle_can_cross_threads_with_send_only() {
193        // A type that is Send but not Sync.
194        struct Counter {
195            value: Cell<i32>,
196        }
197
198        // SAFETY: Counter is designed to be Send but not Sync for testing.
199        unsafe impl Send for Counter {}
200
201        impl Counter {
202            fn new(value: i32) -> Self {
203                Self {
204                    value: Cell::new(value),
205                }
206            }
207
208            fn increment(&self) {
209                self.value.set(self.value.get() + 1);
210            }
211
212            fn get(&self) -> i32 {
213                self.value.get()
214            }
215        }
216
217        let mut pool = RawBlindPool::new();
218        let handle = pool.insert(Counter::new(0));
219
220        // Increment in main thread.
221        // SAFETY: Handle is valid and pool is still alive.
222        unsafe { handle.ptr().as_ref() }.increment();
223        // SAFETY: Handle is valid and pool is still alive.
224        assert_eq!(unsafe { handle.ptr().as_ref() }.get(), 1);
225
226        // Move handle to another thread (requires Send but not Sync).
227        let handle_in_thread = thread::spawn(move || {
228            // SAFETY: Handle is valid and pool is still alive.
229            unsafe { handle.ptr().as_ref() }.increment();
230            // SAFETY: Handle is valid and pool is still alive.
231            assert_eq!(unsafe { handle.ptr().as_ref() }.get(), 2);
232            handle
233        })
234        .join()
235        .unwrap();
236
237        // Back in main thread.
238        // SAFETY: Handle is valid and pool is still alive.
239        assert_eq!(unsafe { handle_in_thread.ptr().as_ref() }.get(), 2);
240    }
241
242    #[test]
243    fn as_pin_returns_pinned_reference() {
244        let mut pool = RawBlindPool::new();
245        let handle = pool.insert(42_u32);
246
247        // SAFETY: Handle is valid and pool is still alive.
248        let pinned = unsafe { handle.as_pin() };
249        assert_eq!(*pinned.get_ref(), 42);
250    }
251
252    #[test]
253    fn as_pin_mut_returns_pinned_mutable_reference() {
254        let mut pool = RawBlindPool::new();
255        let mut handle = pool.insert(42_u32);
256
257        // SAFETY: Handle is valid and pool is still alive.
258        *unsafe { handle.as_pin_mut() }.get_mut() = 99;
259
260        // SAFETY: Handle is valid and pool is still alive.
261        assert_eq!(*unsafe { handle.as_ref() }, 99);
262    }
263
264    #[test]
265    fn as_ref_returns_reference() {
266        let mut pool = RawBlindPool::new();
267        let handle = pool.insert(42_u32);
268
269        // SAFETY: Handle is valid and pool is still alive.
270        let reference = unsafe { handle.as_ref() };
271        assert_eq!(*reference, 42);
272    }
273
274    #[test]
275    fn as_mut_returns_mutable_reference() {
276        let mut pool = RawBlindPool::new();
277        let mut handle = pool.insert(42_u32);
278
279        // SAFETY: Handle is valid and pool is still alive.
280        *unsafe { handle.as_mut() } = 99;
281
282        // SAFETY: Handle is valid and pool is still alive.
283        assert_eq!(*unsafe { handle.as_ref() }, 99);
284    }
285
286    #[test]
287    fn erase_creates_type_erased_handle() {
288        let mut pool = RawBlindPool::new();
289        let handle = pool.insert(42_u32);
290
291        let erased = handle.erase();
292
293        assert_eq!(pool.len(), 1);
294
295        // SAFETY: Handle is valid and pool is still alive.
296        unsafe {
297            pool.remove(erased);
298        }
299        assert_eq!(pool.len(), 0);
300    }
301
302    #[test]
303    fn into_shared_converts_to_shared() {
304        let mut pool = RawBlindPool::new();
305        let handle = pool.insert(42_u32);
306
307        let shared = handle.into_shared();
308
309        // SAFETY: Handle is valid and pool is still alive.
310        assert_eq!(*unsafe { shared.as_ref() }, 42);
311    }
312}