Skip to main content

infinity_pool/handles/
blind_local_mut.rs

1use std::any::type_name;
2use std::borrow::{Borrow, BorrowMut};
3use std::cell::RefCell;
4use std::ops::{Deref, DerefMut};
5use std::pin::Pin;
6use std::ptr::NonNull;
7use std::{fmt, mem, ptr};
8
9use crate::{LayoutKey, LocalBlindPoolCore, LocalBlindPooled, RawPooledMut};
10
11/// A unique single-threaded reference-counting handle for a pooled object.
12#[doc = include_str!("../../doc/snippets/ref_counted_handle_implications.md")]
13#[doc = include_str!("../../doc/snippets/unique_handle_implications.md")]
14///
15/// # Thread safety
16///
17/// This type is single-threaded.
18pub struct LocalBlindPooledMut<T: ?Sized> {
19    inner: RawPooledMut<T>,
20    key: LayoutKey,
21
22    // This gives us our thread-safety characteristics (single-threaded),
23    // overriding those of `RawPooledMut<T>`. This is expected because we align
24    // with the stricter constraints of the pool itself, even if the underlying
25    // slab storage allows for more flexibility.
26    core: LocalBlindPoolCore,
27}
28
29impl<T: ?Sized> LocalBlindPooledMut<T> {
30    #[must_use]
31    pub(crate) fn new(inner: RawPooledMut<T>, key: LayoutKey, core: LocalBlindPoolCore) -> Self {
32        Self { inner, key, core }
33    }
34
35    #[doc = include_str!("../../doc/snippets/handle_ptr.md")]
36    #[must_use]
37    #[inline]
38    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
39    pub fn ptr(&self) -> NonNull<T> {
40        self.inner.ptr()
41    }
42
43    #[doc = include_str!("../../doc/snippets/handle_into_shared.md")]
44    #[must_use]
45    #[inline]
46    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
47    pub fn into_shared(self) -> LocalBlindPooled<T> {
48        let (inner, layout, core) = self.into_parts();
49
50        LocalBlindPooled::new(inner, layout, core)
51    }
52
53    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
54    fn into_parts(self) -> (RawPooledMut<T>, LayoutKey, LocalBlindPoolCore) {
55        // We transfer these fields to the caller, so we do not want the current handle
56        // to be dropped. Hence we perform raw reads to extract the fields directly.
57
58        // SAFETY: The target is valid for reads.
59        let inner = unsafe { ptr::read(&raw const self.inner) };
60        // SAFETY: The target is valid for reads.
61        let key = unsafe { ptr::read(&raw const self.key) };
62        // SAFETY: The target is valid for reads.
63        let core = unsafe { ptr::read(&raw const self.core) };
64
65        // We are just "destructuring with Drop" here.
66        mem::forget(self);
67
68        (inner, key, core)
69    }
70
71    #[doc = include_str!("../../doc/snippets/ref_counted_as_pin.md")]
72    #[must_use]
73    #[inline]
74    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
75    pub fn as_pin(&self) -> Pin<&T> {
76        // SAFETY: LocalBlindPooled items are always pinned.
77        unsafe { Pin::new_unchecked(self) }
78    }
79
80    #[doc = include_str!("../../doc/snippets/ref_counted_as_pin_mut.md")]
81    #[must_use]
82    #[inline]
83    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
84    pub fn as_pin_mut(&mut self) -> Pin<&mut T> {
85        // SAFETY: This is a unique handle, so we guarantee borrow safety
86        // of the target object by borrowing the handle itself.
87        let as_mut = unsafe { self.ptr().as_mut() };
88
89        // SAFETY: LocalBlindPooled items are always pinned.
90        unsafe { Pin::new_unchecked(as_mut) }
91    }
92
93    /// Casts this handle to reference the target as a trait object.
94    ///
95    /// This method is only intended for use by the [`define_pooled_dyn_cast!`] macro
96    /// for type-safe casting operations.
97    ///
98    /// # Safety
99    ///
100    /// The caller must guarantee that the provided closure's input and output references
101    /// point to the same object.
102    #[doc(hidden)]
103    #[must_use]
104    #[inline]
105    pub unsafe fn __private_cast_dyn_with_fn<U: ?Sized, F>(
106        self,
107        cast_fn: F,
108    ) -> LocalBlindPooledMut<U>
109    where
110        F: FnOnce(&mut T) -> &mut U,
111    {
112        let (inner, key, core) = self.into_parts();
113
114        // SAFETY: Forwarding callback safety guarantees from the caller.
115        // We are an exclusive handle, so we always have the right to create
116        // exclusive references to the target of the handle, satisfying that requirement.
117        let new_inner = unsafe { inner.__private_cast_dyn_with_fn(cast_fn) };
118
119        LocalBlindPooledMut {
120            inner: new_inner,
121            key,
122            core,
123        }
124    }
125
126    /// Erase the type information from this handle, converting it to `LocalBlindPooledMut<()>`.
127    ///
128    /// This is useful for extending the lifetime of an object in the pool without retaining
129    /// type information. The type-erased handle prevents access to the object but ensures
130    /// it remains in the pool.
131    #[must_use]
132    #[inline]
133    #[cfg_attr(test, mutants::skip)] // All mutations unviable - save some time.
134    pub fn erase(self) -> LocalBlindPooledMut<()> {
135        let (inner, key, core) = self.into_parts();
136
137        // SAFETY: This handle is single-threaded, no cross-thread access even if `T: Send`.
138        let slab_handle_erased = unsafe { inner.slab_handle().erase() };
139
140        let inner_erased = RawPooledMut::new(inner.slab_index(), slab_handle_erased);
141
142        LocalBlindPooledMut {
143            inner: inner_erased,
144            key,
145            core,
146        }
147    }
148}
149
150impl<T> LocalBlindPooledMut<T>
151where
152    T: Unpin,
153{
154    #[doc = include_str!("../../doc/snippets/ref_counted_into_inner.md")]
155    #[must_use]
156    #[inline]
157    #[cfg_attr(test, mutants::skip)] // cargo-mutants tries many unviable mutations, wasting precious build minutes.
158    pub fn into_inner(self) -> T {
159        let (inner, key, core) = self.into_parts();
160
161        let mut core = RefCell::borrow_mut(&core);
162
163        let pool = core
164            .get_mut(&key)
165            .expect("if the handle still exists, the inner pool must still exist");
166
167        // SAFETY: We are a managed unique handle, so we are the only one who is allowed to remove
168        // the object from the pool - as long as we exist, the object exists in the pool. We keep
169        // the pool alive for as long as any handle to it exists, so the pool must still exist.
170        unsafe { pool.remove_unpin(inner) }
171    }
172}
173
174#[cfg_attr(coverage_nightly, coverage(off))] // No API contract to test.
175impl<T: ?Sized> fmt::Debug for LocalBlindPooledMut<T> {
176    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
177        f.debug_struct(type_name::<Self>())
178            .field("inner", &self.inner)
179            .field("key", &self.key)
180            .field("core", &self.core)
181            .finish()
182    }
183}
184
185impl<T: ?Sized> Deref for LocalBlindPooledMut<T> {
186    type Target = T;
187
188    #[inline]
189    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
190    fn deref(&self) -> &Self::Target {
191        // SAFETY: This is a unique handle, so we guarantee borrow safety
192        // of the target object by borrowing the handle itself.
193        // We guarantee liveness by being a reference counted handle.
194        unsafe { self.ptr().as_ref() }
195    }
196}
197
198impl<T> DerefMut for LocalBlindPooledMut<T>
199where
200    T: ?Sized + Unpin,
201{
202    #[inline]
203    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
204    fn deref_mut(&mut self) -> &mut Self::Target {
205        // SAFETY: This is a unique handle, so we guarantee borrow safety
206        // of the target object by borrowing the handle itself.
207        // We guarantee liveness by being a reference counted handle.
208        unsafe { self.ptr().as_mut() }
209    }
210}
211
212impl<T: ?Sized> Borrow<T> for LocalBlindPooledMut<T> {
213    #[inline]
214    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
215    fn borrow(&self) -> &T {
216        self
217    }
218}
219
220impl<T> BorrowMut<T> for LocalBlindPooledMut<T>
221where
222    T: ?Sized + Unpin,
223{
224    #[inline]
225    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
226    fn borrow_mut(&mut self) -> &mut T {
227        self
228    }
229}
230
231impl<T: ?Sized> AsRef<T> for LocalBlindPooledMut<T> {
232    #[inline]
233    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
234    fn as_ref(&self) -> &T {
235        self
236    }
237}
238
239impl<T> AsMut<T> for LocalBlindPooledMut<T>
240where
241    T: ?Sized + Unpin,
242{
243    #[inline]
244    #[cfg_attr(test, mutants::skip)] // Cargo-mutants does not understand this signature - every mutation is unviable waste of time.
245    fn as_mut(&mut self) -> &mut T {
246        self
247    }
248}
249
250impl<T: ?Sized> Drop for LocalBlindPooledMut<T> {
251    fn drop(&mut self) {
252        // While `RawLocalBlindPooledMut` is technically not Copy, we use our insider knowledge
253        // that actually it is in reality just a fat pointer, so we can actually copy it.
254        // The only reason it is not Copy is to ensure uniqueness, which we do not care
255        // about here because the copy in `self` is going away. We just do not want to
256        // insert an Option that we have to check in every method.
257        //
258        // SAFETY: The target is valid for reads.
259        let inner = unsafe { ptr::read(&raw const self.inner) };
260
261        let mut core = RefCell::borrow_mut(&self.core);
262
263        let pool = core
264            .get_mut(&self.key)
265            .expect("if the handle still exists, the inner pool must still exist");
266
267        // SAFETY: We are a managed unique handle, so we are the only one who is allowed to remove
268        // the object from the pool - as long as we exist, the object exists in the pool. We keep
269        // the pool alive for as long as any handle to it exists, so the pool must still exist.
270        unsafe {
271            pool.remove(inner);
272        }
273    }
274}
275
276#[cfg(test)]
277#[cfg_attr(coverage_nightly, coverage(off))]
278mod tests {
279    use std::borrow::{Borrow, BorrowMut};
280
281    use static_assertions::assert_not_impl_any;
282
283    use super::*;
284    use crate::{LocalBlindPool, NotSendNotSync, NotSendSync, SendAndSync, SendNotSync};
285
286    assert_not_impl_any!(LocalBlindPooledMut<SendAndSync>: Send, Sync);
287    assert_not_impl_any!(LocalBlindPooledMut<SendNotSync>: Send, Sync);
288    assert_not_impl_any!(LocalBlindPooledMut<NotSendNotSync>: Send, Sync);
289    assert_not_impl_any!(LocalBlindPooledMut<NotSendSync>: Send, Sync);
290
291    // This is a unique handle, must not be cloneable/copyable.
292    assert_not_impl_any!(LocalBlindPooledMut<SendAndSync>: Clone, Copy);
293
294    // We expect no destructor because we treat it as `Copy` in our own Drop::drop().
295    assert_not_impl_any!(RawPooledMut<()>: Drop);
296
297    #[test]
298    fn as_pin_returns_pinned_reference() {
299        let pool = LocalBlindPool::new();
300        let handle = pool.insert(42_u32);
301
302        let pinned = handle.as_pin();
303        assert_eq!(*pinned.get_ref(), 42);
304    }
305
306    #[test]
307    fn as_pin_mut_returns_pinned_mutable_reference() {
308        let pool = LocalBlindPool::new();
309        let mut handle = pool.insert(42_u32);
310
311        *handle.as_pin_mut().get_mut() = 99;
312        assert_eq!(*handle, 99);
313    }
314
315    #[test]
316    fn deref_returns_reference_to_value() {
317        let pool = LocalBlindPool::new();
318        let handle = pool.insert(42_u32);
319
320        assert_eq!(*handle, 42);
321    }
322
323    #[test]
324    fn deref_mut_allows_mutation() {
325        let pool = LocalBlindPool::new();
326        let mut handle = pool.insert(42_u32);
327
328        *handle = 99;
329        assert_eq!(*handle, 99);
330    }
331
332    #[test]
333    fn borrow_returns_reference() {
334        let pool = LocalBlindPool::new();
335        let handle = pool.insert(42_u32);
336
337        let borrowed: &u32 = handle.borrow();
338        assert_eq!(*borrowed, 42);
339    }
340
341    #[test]
342    fn borrow_mut_returns_mutable_reference() {
343        let pool = LocalBlindPool::new();
344        let mut handle = pool.insert(42_u32);
345
346        let borrowed: &mut u32 = handle.borrow_mut();
347        *borrowed = 99;
348        assert_eq!(*handle, 99);
349    }
350
351    #[test]
352    fn as_ref_returns_reference() {
353        let pool = LocalBlindPool::new();
354        let handle = pool.insert(42_u32);
355
356        let reference: &u32 = handle.as_ref();
357        assert_eq!(*reference, 42);
358    }
359
360    #[test]
361    fn as_mut_returns_mutable_reference() {
362        let pool = LocalBlindPool::new();
363        let mut handle = pool.insert(42_u32);
364
365        let reference: &mut u32 = handle.as_mut();
366        *reference = 99;
367        assert_eq!(*handle, 99);
368    }
369
370    #[test]
371    fn erase_extends_lifetime() {
372        let pool = LocalBlindPool::new();
373        let handle = pool.insert(42_u32);
374
375        let erased = handle.erase();
376
377        assert_eq!(pool.len(), 1);
378
379        drop(erased);
380        assert_eq!(pool.len(), 0);
381    }
382
383    #[test]
384    fn into_inner_returns_value() {
385        let pool = LocalBlindPool::new();
386        let handle = pool.insert(42_u32);
387
388        let value = handle.into_inner();
389        assert_eq!(value, 42);
390        assert_eq!(pool.len(), 0);
391    }
392
393    #[test]
394    fn into_shared_converts_to_shared_handle() {
395        let pool = LocalBlindPool::new();
396        let handle = pool.insert(42_u32);
397
398        let shared = handle.into_shared();
399        assert_eq!(*shared, 42);
400        assert_eq!(pool.len(), 1);
401    }
402
403    #[test]
404    fn from_converts_mut_to_shared() {
405        let pool = LocalBlindPool::new();
406        let handle = pool.insert(42_u32);
407
408        let shared: LocalBlindPooled<u32> = LocalBlindPooled::from(handle);
409        assert_eq!(*shared, 42);
410        assert_eq!(pool.len(), 1);
411    }
412}