infinity_pool/handles/
managed_mut.rs1use std::any::type_name;
2use std::borrow::{Borrow, BorrowMut};
3use std::ops::{Deref, DerefMut};
4use std::pin::Pin;
5use std::ptr::NonNull;
6use std::sync::Arc;
7use std::{fmt, mem, ptr};
8
9use parking_lot::Mutex;
10
11use crate::{Pooled, RawOpaquePoolThreadSafe, RawPooledMut};
12
13#[doc = include_str!("../../doc/snippets/ref_counted_handle_implications.md")]
19#[doc = include_str!("../../doc/snippets/unique_handle_implications.md")]
20#[doc = include_str!("../../doc/snippets/nonlocal_handle_thread_safety.md")]
21pub struct PooledMut<T: ?Sized> {
22 inner: RawPooledMut<T>,
24
25 pool: Arc<Mutex<RawOpaquePoolThreadSafe>>,
26}
27
28impl<T: ?Sized> PooledMut<T> {
29 #[must_use]
35 pub(crate) unsafe fn new(
36 inner: RawPooledMut<T>,
37 pool: Arc<Mutex<RawOpaquePoolThreadSafe>>,
38 ) -> Self {
39 Self { inner, pool }
40 }
41
42 #[doc = include_str!("../../doc/snippets/handle_ptr.md")]
43 #[must_use]
44 #[inline]
45 #[cfg_attr(test, mutants::skip)] pub fn ptr(&self) -> NonNull<T> {
47 self.inner.ptr()
48 }
49
50 #[doc = include_str!("../../doc/snippets/handle_into_shared.md")]
51 #[must_use]
52 #[inline]
53 #[cfg_attr(test, mutants::skip)] pub fn into_shared(self) -> Pooled<T> {
55 let (inner, pool) = self.into_parts();
56
57 unsafe { Pooled::new(inner, pool) }
60 }
61
62 #[cfg_attr(test, mutants::skip)] fn into_parts(self) -> (RawPooledMut<T>, Arc<Mutex<RawOpaquePoolThreadSafe>>) {
64 let pool = unsafe { ptr::read(&raw const self.pool) };
69 let inner = unsafe { ptr::read(&raw const self.inner) };
71
72 mem::forget(self);
74
75 (inner, pool)
76 }
77
78 #[doc = include_str!("../../doc/snippets/ref_counted_as_pin.md")]
79 #[must_use]
80 #[inline]
81 #[cfg_attr(test, mutants::skip)] pub fn as_pin(&self) -> Pin<&T> {
83 unsafe { Pin::new_unchecked(self) }
85 }
86
87 #[doc = include_str!("../../doc/snippets/ref_counted_as_pin_mut.md")]
88 #[must_use]
89 #[inline]
90 #[cfg_attr(test, mutants::skip)] pub fn as_pin_mut(&mut self) -> Pin<&mut T> {
92 let as_mut = unsafe { self.ptr().as_mut() };
95
96 unsafe { Pin::new_unchecked(as_mut) }
98 }
99
100 #[doc(hidden)]
110 #[must_use]
111 #[inline]
112 pub unsafe fn __private_cast_dyn_with_fn<U: ?Sized, F>(self, cast_fn: F) -> PooledMut<U>
113 where
114 F: FnOnce(&mut T) -> &mut U,
115 {
116 let (inner, pool) = self.into_parts();
117
118 let new_inner = unsafe { inner.__private_cast_dyn_with_fn(cast_fn) };
122
123 PooledMut {
124 inner: new_inner,
125 pool,
126 }
127 }
128
129 #[must_use]
135 #[inline]
136 #[cfg_attr(test, mutants::skip)] pub fn erase(self) -> PooledMut<()> {
138 let (inner, pool) = self.into_parts();
139
140 PooledMut {
141 inner: inner.erase(),
142 pool,
143 }
144 }
145}
146
147impl<T> PooledMut<T>
148where
149 T: Unpin,
150{
151 #[doc = include_str!("../../doc/snippets/ref_counted_into_inner.md")]
152 #[must_use]
153 #[inline]
154 pub fn into_inner(self) -> T {
155 let (inner, pool) = self.into_parts();
156
157 let mut pool = pool.lock();
158 unsafe { pool.remove_unpin(inner) }
162 }
163}
164
165#[cfg_attr(coverage_nightly, coverage(off))] impl<T: ?Sized> fmt::Debug for PooledMut<T> {
167 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
168 f.debug_struct(type_name::<Self>())
169 .field("inner", &self.inner)
170 .field("pool", &self.pool)
171 .finish()
172 }
173}
174
175impl<T: ?Sized> Deref for PooledMut<T> {
176 type Target = T;
177
178 #[inline]
179 #[cfg_attr(test, mutants::skip)] fn deref(&self) -> &Self::Target {
181 unsafe { self.ptr().as_ref() }
185 }
186}
187
188impl<T> DerefMut for PooledMut<T>
189where
190 T: ?Sized + Unpin,
191{
192 #[inline]
193 #[cfg_attr(test, mutants::skip)] fn deref_mut(&mut self) -> &mut Self::Target {
195 unsafe { self.ptr().as_mut() }
199 }
200}
201
202impl<T: ?Sized> Borrow<T> for PooledMut<T> {
203 #[inline]
204 #[cfg_attr(test, mutants::skip)] fn borrow(&self) -> &T {
206 self
207 }
208}
209
210impl<T> BorrowMut<T> for PooledMut<T>
211where
212 T: ?Sized + Unpin,
213{
214 #[inline]
215 #[cfg_attr(test, mutants::skip)] fn borrow_mut(&mut self) -> &mut T {
217 self
218 }
219}
220
221impl<T: ?Sized> AsRef<T> for PooledMut<T> {
222 #[inline]
223 #[cfg_attr(test, mutants::skip)] fn as_ref(&self) -> &T {
225 self
226 }
227}
228
229impl<T> AsMut<T> for PooledMut<T>
230where
231 T: ?Sized + Unpin,
232{
233 #[inline]
234 #[cfg_attr(test, mutants::skip)] fn as_mut(&mut self) -> &mut T {
236 self
237 }
238}
239
240impl<T: ?Sized> Drop for PooledMut<T> {
241 fn drop(&mut self) {
242 let inner = unsafe { ptr::read(&raw const self.inner) };
250
251 let mut pool = self.pool.lock();
252
253 unsafe {
257 pool.remove(inner);
258 }
259 }
260}
261
262#[cfg(test)]
263#[cfg_attr(coverage_nightly, coverage(off))]
264mod tests {
265 use std::cell::Cell;
266 use std::thread;
267
268 use static_assertions::{assert_impl_all, assert_not_impl_any};
269
270 use super::*;
271 use crate::{NotSendNotSync, NotSendSync, SendAndSync, SendNotSync};
272
273 assert_impl_all!(PooledMut<SendAndSync>: Send, Sync);
274 assert_impl_all!(PooledMut<SendNotSync>: Send, Sync);
275 assert_impl_all!(PooledMut<NotSendNotSync>: Sync);
276 assert_impl_all!(PooledMut<NotSendSync>: Sync);
277
278 assert_not_impl_any!(PooledMut<NotSendNotSync>: Send);
279 assert_not_impl_any!(PooledMut<NotSendSync>: Send);
280
281 assert_not_impl_any!(PooledMut<SendAndSync>: Clone, Copy);
283
284 assert_impl_all!(PooledMut<SendAndSync>: Drop);
286
287 assert_not_impl_any!(RawPooledMut<()>: Drop);
289
290 #[test]
291 fn unique_handle_can_cross_threads_with_send_only() {
292 use crate::OpaquePool;
293
294 struct Counter {
296 value: Cell<i32>,
297 }
298
299 unsafe impl Send for Counter {}
301
302 impl Counter {
303 fn new(value: i32) -> Self {
304 Self {
305 value: Cell::new(value),
306 }
307 }
308
309 fn increment(&self) {
310 self.value.set(self.value.get() + 1);
311 }
312
313 fn get(&self) -> i32 {
314 self.value.get()
315 }
316 }
317
318 let pool = OpaquePool::with_layout_of::<Counter>();
319 let handle = pool.insert(Counter::new(0));
320
321 handle.increment();
323 assert_eq!(handle.get(), 1);
324
325 let handle_in_thread = thread::spawn(move || {
327 handle.increment();
328 assert_eq!(handle.get(), 2);
329 handle
330 })
331 .join()
332 .unwrap();
333
334 assert_eq!(handle_in_thread.get(), 2);
336 }
337
338 #[test]
339 fn erase_extends_lifetime() {
340 use crate::OpaquePool;
341
342 let pool = OpaquePool::with_layout_of::<u32>();
343 let handle = pool.insert(42);
344
345 let erased = handle.erase();
347
348 assert_eq!(pool.len(), 1);
350
351 drop(erased);
353 assert_eq!(pool.len(), 0);
354 }
355
356 #[test]
357 fn erase_then_convert_to_shared() {
358 use crate::OpaquePool;
359
360 let pool = OpaquePool::with_layout_of::<String>();
361 let handle = pool.insert(String::from("test"));
362
363 let erased_mut = handle.erase();
365 let erased_shared = erased_mut.into_shared();
366 let erased_clone = erased_shared.clone();
367
368 assert_eq!(pool.len(), 1);
369
370 drop(erased_shared);
371 assert_eq!(pool.len(), 1);
372
373 drop(erased_clone);
374 assert_eq!(pool.len(), 0);
375 }
376}