infinity_pool/handles/
blind_raw_mut.rs1use std::any::type_name;
2use std::fmt;
3use std::pin::Pin;
4use std::ptr::NonNull;
5
6use crate::{LayoutKey, RawBlindPooled, RawPooledMut};
7
8#[doc = include_str!("../../doc/snippets/raw_handle_implications.md")]
10#[doc = include_str!("../../doc/snippets/unique_handle_implications.md")]
11#[doc = include_str!("../../doc/snippets/unique_raw_handle_implications.md")]
12#[doc = include_str!("../../doc/snippets/nonlocal_handle_thread_safety.md")]
13pub struct RawBlindPooledMut<T>
14where
15 T: ?Sized,
17{
18 key: LayoutKey,
19
20 inner: RawPooledMut<T>,
22}
23
24impl<T: ?Sized> RawBlindPooledMut<T> {
25 #[must_use]
26 pub(crate) fn new(key: LayoutKey, inner: RawPooledMut<T>) -> Self {
27 Self { key, inner }
28 }
29
30 #[doc = include_str!("../../doc/snippets/handle_ptr.md")]
31 #[must_use]
32 #[inline]
33 #[cfg_attr(test, mutants::skip)] pub fn ptr(&self) -> NonNull<T> {
35 self.inner.ptr()
36 }
37
38 #[must_use]
44 #[inline]
45 #[cfg_attr(test, mutants::skip)] pub fn into_shared(self) -> RawBlindPooled<T> {
47 RawBlindPooled::new(self.key, self.inner.into_shared())
48 }
49
50 #[doc = include_str!("../../doc/snippets/raw_as_pin.md")]
51 #[must_use]
52 #[inline]
53 #[cfg_attr(test, mutants::skip)] pub unsafe fn as_pin(&self) -> Pin<&T> {
55 let as_ref = unsafe { self.as_ref() };
57
58 unsafe { Pin::new_unchecked(as_ref) }
60 }
61
62 #[doc = include_str!("../../doc/snippets/raw_as_pin_mut.md")]
63 #[must_use]
64 #[inline]
65 #[cfg_attr(test, mutants::skip)] pub unsafe fn as_pin_mut(&mut self) -> Pin<&mut T> {
67 let as_mut = unsafe { self.ptr().as_mut() };
71
72 unsafe { Pin::new_unchecked(as_mut) }
74 }
75
76 #[doc = include_str!("../../doc/snippets/raw_mut_as_ref.md")]
77 #[must_use]
78 #[inline]
79 #[cfg_attr(test, mutants::skip)] pub unsafe fn as_ref(&self) -> &T {
81 unsafe { self.ptr().as_ref() }
85 }
86
87 #[doc(hidden)]
100 #[must_use]
101 #[inline]
102 pub unsafe fn __private_cast_dyn_with_fn<U: ?Sized, F>(self, cast_fn: F) -> RawBlindPooledMut<U>
103 where
104 F: FnOnce(&mut T) -> &mut U,
105 {
106 let new_inner = unsafe { self.inner.__private_cast_dyn_with_fn(cast_fn) };
110
111 RawBlindPooledMut {
112 key: self.key,
113 inner: new_inner,
114 }
115 }
116
117 #[must_use]
123 #[inline]
124 #[cfg_attr(test, mutants::skip)] pub fn erase(self) -> RawBlindPooledMut<()>
126 where
127 T: Send,
128 {
129 RawBlindPooledMut {
130 key: self.key,
131 inner: self.inner.erase(),
139 }
140 }
141}
142
143impl<T: ?Sized + Unpin> RawBlindPooledMut<T> {
144 #[doc = include_str!("../../doc/snippets/raw_as_mut.md")]
145 #[must_use]
146 #[inline]
147 #[cfg_attr(test, mutants::skip)] pub unsafe fn as_mut(&mut self) -> &mut T {
149 unsafe { self.ptr().as_mut() }
153 }
154}
155
156#[cfg_attr(coverage_nightly, coverage(off))] impl<T: ?Sized> fmt::Debug for RawBlindPooledMut<T> {
158 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
159 f.debug_struct(type_name::<Self>())
160 .field("key", &self.key)
161 .field("inner", &self.inner)
162 .finish()
163 }
164}
165
166#[cfg(test)]
167#[cfg_attr(coverage_nightly, coverage(off))]
168mod tests {
169 use std::cell::Cell;
170 use std::thread;
171
172 use static_assertions::{assert_impl_all, assert_not_impl_any};
173
174 use super::*;
175 use crate::{NotSendNotSync, NotSendSync, RawBlindPool, SendAndSync, SendNotSync};
176
177 assert_impl_all!(RawBlindPooledMut<SendAndSync>: Send, Sync);
178 assert_impl_all!(RawBlindPooledMut<SendNotSync>: Send, Sync);
179 assert_impl_all!(RawBlindPooledMut<NotSendNotSync>: Sync);
180 assert_impl_all!(RawBlindPooledMut<NotSendSync>: Sync);
181
182 assert_not_impl_any!(RawBlindPooledMut<NotSendNotSync>: Send);
183 assert_not_impl_any!(RawBlindPooledMut<NotSendSync>: Send);
184
185 assert_not_impl_any!(RawBlindPooledMut<SendAndSync>: Clone, Copy);
187
188 assert_not_impl_any!(RawBlindPooledMut<SendAndSync>: Drop);
190
191 #[test]
192 fn unique_handle_can_cross_threads_with_send_only() {
193 struct Counter {
195 value: Cell<i32>,
196 }
197
198 unsafe impl Send for Counter {}
200
201 impl Counter {
202 fn new(value: i32) -> Self {
203 Self {
204 value: Cell::new(value),
205 }
206 }
207
208 fn increment(&self) {
209 self.value.set(self.value.get() + 1);
210 }
211
212 fn get(&self) -> i32 {
213 self.value.get()
214 }
215 }
216
217 let mut pool = RawBlindPool::new();
218 let handle = pool.insert(Counter::new(0));
219
220 unsafe { handle.ptr().as_ref() }.increment();
223 assert_eq!(unsafe { handle.ptr().as_ref() }.get(), 1);
225
226 let handle_in_thread = thread::spawn(move || {
228 unsafe { handle.ptr().as_ref() }.increment();
230 assert_eq!(unsafe { handle.ptr().as_ref() }.get(), 2);
232 handle
233 })
234 .join()
235 .unwrap();
236
237 assert_eq!(unsafe { handle_in_thread.ptr().as_ref() }.get(), 2);
240 }
241
242 #[test]
243 fn as_pin_returns_pinned_reference() {
244 let mut pool = RawBlindPool::new();
245 let handle = pool.insert(42_u32);
246
247 let pinned = unsafe { handle.as_pin() };
249 assert_eq!(*pinned.get_ref(), 42);
250 }
251
252 #[test]
253 fn as_pin_mut_returns_pinned_mutable_reference() {
254 let mut pool = RawBlindPool::new();
255 let mut handle = pool.insert(42_u32);
256
257 *unsafe { handle.as_pin_mut() }.get_mut() = 99;
259
260 assert_eq!(*unsafe { handle.as_ref() }, 99);
262 }
263
264 #[test]
265 fn as_ref_returns_reference() {
266 let mut pool = RawBlindPool::new();
267 let handle = pool.insert(42_u32);
268
269 let reference = unsafe { handle.as_ref() };
271 assert_eq!(*reference, 42);
272 }
273
274 #[test]
275 fn as_mut_returns_mutable_reference() {
276 let mut pool = RawBlindPool::new();
277 let mut handle = pool.insert(42_u32);
278
279 *unsafe { handle.as_mut() } = 99;
281
282 assert_eq!(*unsafe { handle.as_ref() }, 99);
284 }
285
286 #[test]
287 fn erase_creates_type_erased_handle() {
288 let mut pool = RawBlindPool::new();
289 let handle = pool.insert(42_u32);
290
291 let erased = handle.erase();
292
293 assert_eq!(pool.len(), 1);
294
295 unsafe {
297 pool.remove(erased);
298 }
299 assert_eq!(pool.len(), 0);
300 }
301
302 #[test]
303 fn into_shared_converts_to_shared() {
304 let mut pool = RawBlindPool::new();
305 let handle = pool.insert(42_u32);
306
307 let shared = handle.into_shared();
308
309 assert_eq!(*unsafe { shared.as_ref() }, 42);
311 }
312}