intuicio_data/
managed_box.rs

1#![allow(clippy::arc_with_non_send_sync)]
2
3use crate::{
4    Finalize,
5    lifetime::{
6        Lifetime, LifetimeLazy, LifetimeRef, LifetimeRefMut, ValueReadAccess, ValueWriteAccess,
7    },
8    managed::{
9        DynamicManaged, DynamicManagedLazy, DynamicManagedRef, DynamicManagedRefMut, Managed,
10        ManagedLazy, ManagedRef, ManagedRefMut,
11    },
12    type_hash::TypeHash,
13};
14use std::{alloc::Layout, cell::UnsafeCell, sync::Arc};
15
16pub struct ManagedBox<T> {
17    inner: Arc<UnsafeCell<Managed<T>>>,
18}
19
20unsafe impl<T> Send for ManagedBox<T> {}
21unsafe impl<T> Sync for ManagedBox<T> {}
22
23impl<T: Default> Default for ManagedBox<T> {
24    fn default() -> Self {
25        Self::new(T::default())
26    }
27}
28
29impl<T> ManagedBox<T> {
30    pub fn new(value: T) -> Self
31    where
32        T: Finalize,
33    {
34        Self {
35            inner: Arc::new(UnsafeCell::new(Managed::new(value))),
36        }
37    }
38
39    pub fn new_raw(data: T, lifetime: Lifetime) -> Self {
40        Self {
41            inner: Arc::new(UnsafeCell::new(Managed::new_raw(data, lifetime))),
42        }
43    }
44
45    pub fn into_dynamic(self) -> Option<DynamicManagedBox> {
46        Arc::try_unwrap(self.inner).ok().and_then(|inner| {
47            inner
48                .into_inner()
49                .into_dynamic()
50                .map(|result| DynamicManagedBox {
51                    inner: Arc::new(UnsafeCell::new(result)),
52                })
53        })
54    }
55
56    pub fn instances_count(&self) -> usize {
57        Arc::strong_count(&self.inner)
58    }
59
60    pub fn does_share_reference(&self, other: &Self) -> bool {
61        Arc::ptr_eq(&self.inner, &other.inner)
62    }
63
64    pub fn type_hash(&self) -> TypeHash {
65        TypeHash::of::<T>()
66    }
67
68    pub fn lifetime_borrow(&self) -> Option<LifetimeRef> {
69        unsafe { (&*self.inner.get()).lifetime().borrow() }
70    }
71
72    pub fn lifetime_borrow_mut(&self) -> Option<LifetimeRefMut> {
73        unsafe { (&*self.inner.get()).lifetime().borrow_mut() }
74    }
75
76    pub fn lifetime_lazy(&self) -> LifetimeLazy {
77        unsafe { (&*self.inner.get()).lifetime().lazy() }
78    }
79
80    pub fn read(&'_ self) -> Option<ValueReadAccess<'_, T>> {
81        unsafe { (&*self.inner.get()).read() }
82    }
83
84    pub async fn read_async(&'_ self) -> ValueReadAccess<'_, T> {
85        unsafe { (&*self.inner.get()).read_async().await }
86    }
87
88    pub fn write(&'_ mut self) -> Option<ValueWriteAccess<'_, T>> {
89        unsafe { (&mut *self.inner.get()).write() }
90    }
91
92    pub async fn write_async(&'_ mut self) -> ValueWriteAccess<'_, T> {
93        unsafe { (&mut *self.inner.get()).write_async().await }
94    }
95
96    pub fn borrow(&self) -> Option<ManagedRef<T>> {
97        unsafe { (&*self.inner.get()).borrow() }
98    }
99
100    pub async fn borrow_async(&self) -> ManagedRef<T> {
101        unsafe { (&*self.inner.get()).borrow_async().await }
102    }
103
104    pub fn borrow_mut(&mut self) -> Option<ManagedRefMut<T>> {
105        unsafe { (&mut *self.inner.get()).borrow_mut() }
106    }
107
108    pub async fn borrow_mut_async(&mut self) -> ManagedRefMut<T> {
109        unsafe { (&mut *self.inner.get()).borrow_mut_async().await }
110    }
111
112    pub fn lazy(&mut self) -> ManagedLazy<T> {
113        unsafe { (&mut *self.inner.get()).lazy() }
114    }
115
116    /// # Safety
117    pub unsafe fn lazy_immutable(&self) -> ManagedLazy<T> {
118        unsafe { (&*self.inner.get()).lazy_immutable() }
119    }
120
121    /// # Safety
122    pub unsafe fn as_ptr(&self) -> *const T {
123        unsafe { (&*self.inner.get()).as_ptr() }
124    }
125
126    /// # Safety
127    pub unsafe fn as_mut_ptr(&mut self) -> *mut T {
128        unsafe { (&mut *self.inner.get()).as_mut_ptr() }
129    }
130}
131
132impl<T> Clone for ManagedBox<T> {
133    fn clone(&self) -> Self {
134        Self {
135            inner: self.inner.clone(),
136        }
137    }
138}
139
140pub struct DynamicManagedBox {
141    inner: Arc<UnsafeCell<DynamicManaged>>,
142}
143
144unsafe impl Send for DynamicManagedBox {}
145unsafe impl Sync for DynamicManagedBox {}
146
147impl DynamicManagedBox {
148    pub fn new<T: Finalize>(data: T) -> Result<Self, T> {
149        Ok(Self {
150            inner: Arc::new(UnsafeCell::new(DynamicManaged::new(data)?)),
151        })
152    }
153
154    pub fn new_raw(
155        type_hash: TypeHash,
156        lifetime: Lifetime,
157        memory: *mut u8,
158        layout: Layout,
159        finalizer: unsafe fn(*mut ()),
160    ) -> Option<Self> {
161        Some(Self {
162            inner: Arc::new(UnsafeCell::new(DynamicManaged::new_raw(
163                type_hash, lifetime, memory, layout, finalizer,
164            )?)),
165        })
166    }
167
168    pub fn new_uninitialized(
169        type_hash: TypeHash,
170        layout: Layout,
171        finalizer: unsafe fn(*mut ()),
172    ) -> Self {
173        Self {
174            inner: Arc::new(UnsafeCell::new(DynamicManaged::new_uninitialized(
175                type_hash,
176                layout.pad_to_align(),
177                finalizer,
178            ))),
179        }
180    }
181
182    pub fn into_typed<T>(self) -> Result<ManagedBox<T>, Self> {
183        match Arc::try_unwrap(self.inner) {
184            Ok(inner) => match inner.into_inner().into_typed() {
185                Ok(result) => Ok(ManagedBox {
186                    inner: Arc::new(UnsafeCell::new(result)),
187                }),
188                Err(dynamic) => Err(Self {
189                    inner: Arc::new(UnsafeCell::new(dynamic)),
190                }),
191            },
192            Err(result) => Err(Self { inner: result }),
193        }
194    }
195
196    pub fn instances_count(&self) -> usize {
197        Arc::strong_count(&self.inner)
198    }
199
200    pub fn does_share_reference(&self, other: &Self) -> bool {
201        Arc::ptr_eq(&self.inner, &other.inner)
202    }
203
204    pub fn type_hash(&self) -> TypeHash {
205        unsafe { *(&*self.inner.get()).type_hash() }
206    }
207
208    pub fn lifetime_borrow(&self) -> Option<LifetimeRef> {
209        unsafe { (&*self.inner.get()).lifetime().borrow() }
210    }
211
212    pub fn lifetime_borrow_mut(&self) -> Option<LifetimeRefMut> {
213        unsafe { (&*self.inner.get()).lifetime().borrow_mut() }
214    }
215
216    pub fn lifetime_lazy(&self) -> LifetimeLazy {
217        unsafe { (&*self.inner.get()).lifetime().lazy() }
218    }
219
220    pub fn is<T>(&self) -> bool {
221        unsafe { (&*self.inner.get()).is::<T>() }
222    }
223
224    pub fn borrow(&self) -> Option<DynamicManagedRef> {
225        unsafe { (&*self.inner.get()).borrow() }
226    }
227
228    pub async fn borrow_async(&self) -> DynamicManagedRef {
229        unsafe { (&*self.inner.get()).borrow_async().await }
230    }
231
232    pub fn borrow_mut(&mut self) -> Option<DynamicManagedRefMut> {
233        unsafe { (&mut *self.inner.get()).borrow_mut() }
234    }
235
236    pub async fn borrow_mut_async(&mut self) -> DynamicManagedRefMut {
237        unsafe { (&mut *self.inner.get()).borrow_mut_async().await }
238    }
239
240    pub fn lazy(&self) -> DynamicManagedLazy {
241        unsafe { (&*self.inner.get()).lazy() }
242    }
243
244    pub fn read<T>(&'_ self) -> Option<ValueReadAccess<'_, T>> {
245        unsafe { (&*self.inner.get()).read() }
246    }
247
248    pub async fn read_async<'a, T: 'a>(&'a self) -> Option<ValueReadAccess<'a, T>> {
249        unsafe { (&*self.inner.get()).read_async().await }
250    }
251
252    pub fn write<T>(&'_ mut self) -> Option<ValueWriteAccess<'_, T>> {
253        unsafe { (&mut *self.inner.get()).write() }
254    }
255
256    pub async fn write_async<'a, T: 'a>(&'a mut self) -> Option<ValueWriteAccess<'a, T>> {
257        unsafe { (&mut *self.inner.get()).write_async().await }
258    }
259
260    /// # Safety
261    pub unsafe fn memory(&self) -> &[u8] {
262        unsafe { (&*self.inner.get()).memory() }
263    }
264
265    /// # Safety
266    pub unsafe fn memory_mut(&mut self) -> &mut [u8] {
267        unsafe { (&mut *self.inner.get()).memory_mut() }
268    }
269
270    /// # Safety
271    pub unsafe fn as_ptr<T>(&self) -> Option<*const T> {
272        unsafe { (&*self.inner.get()).as_ptr() }
273    }
274
275    /// # Safety
276    pub unsafe fn as_mut_ptr<T>(&mut self) -> Option<*mut T> {
277        unsafe { (&mut *self.inner.get()).as_mut_ptr() }
278    }
279
280    /// # Safety
281    pub unsafe fn as_ptr_raw(&self) -> *const u8 {
282        unsafe { (&*self.inner.get()).as_ptr_raw() }
283    }
284
285    /// # Safety
286    pub unsafe fn as_mut_ptr_raw(&mut self) -> *mut u8 {
287        unsafe { (&mut *self.inner.get()).as_mut_ptr_raw() }
288    }
289}
290
291impl Clone for DynamicManagedBox {
292    fn clone(&self) -> Self {
293        Self {
294            inner: self.inner.clone(),
295        }
296    }
297}
298
299#[cfg(test)]
300mod tests {
301    use super::*;
302
303    fn is_async<T: Send + Sync>() {}
304
305    #[test]
306    fn test_managed_box() {
307        is_async::<ManagedBox<i32>>();
308
309        let a = ManagedBox::new(42usize);
310        assert_eq!(*a.read().unwrap(), 42);
311        assert_eq!(a.instances_count(), 1);
312        let mut b = a.clone();
313        assert_eq!(a.instances_count(), 2);
314        assert_eq!(b.instances_count(), 2);
315        assert!(a.does_share_reference(&b));
316        assert_eq!(*b.read().unwrap(), 42);
317        *b.write().unwrap() = 10;
318        assert_eq!(*a.read().unwrap(), 10);
319        assert_eq!(*b.read().unwrap(), 10);
320        drop(a);
321        assert_eq!(b.instances_count(), 1);
322        drop(b);
323    }
324
325    #[test]
326    fn test_dynamic_managed_box() {
327        is_async::<DynamicManagedBox>();
328
329        let a = DynamicManagedBox::new(42usize).ok().unwrap();
330        assert!(a.is::<usize>());
331        assert_eq!(*a.read::<usize>().unwrap(), 42);
332        assert_eq!(a.instances_count(), 1);
333        let mut b = a.clone();
334        assert!(b.is::<usize>());
335        assert_eq!(a.instances_count(), 2);
336        assert_eq!(b.instances_count(), 2);
337        assert!(a.does_share_reference(&b));
338        assert_eq!(*b.read::<usize>().unwrap(), 42);
339        *b.write::<usize>().unwrap() = 10;
340        assert_eq!(*a.read::<usize>().unwrap(), 10);
341        assert_eq!(*b.read::<usize>().unwrap(), 10);
342        drop(a);
343        assert_eq!(b.instances_count(), 1);
344        drop(b);
345    }
346
347    #[test]
348    fn test_managed_box_borrows() {
349        let v = ManagedBox::new(42usize);
350        let r = v.borrow().unwrap();
351        drop(v);
352        assert!(r.read().is_none());
353    }
354
355    #[test]
356    fn test_fuzz_managed_box() {
357        let builders = [
358            || DynamicManagedBox::new(1u8).ok().unwrap(),
359            || DynamicManagedBox::new(2u16).ok().unwrap(),
360            || DynamicManagedBox::new(3u32).ok().unwrap(),
361            || DynamicManagedBox::new(4u64).ok().unwrap(),
362            || DynamicManagedBox::new(5u128).ok().unwrap(),
363            || DynamicManagedBox::new([42u8; 1000]).ok().unwrap(),
364            || DynamicManagedBox::new([42u8; 10000]).ok().unwrap(),
365            || DynamicManagedBox::new([42u8; 100000]).ok().unwrap(),
366        ];
367        let mut boxes = std::array::from_fn::<_, 50, _>(|_| None);
368        for index in 0..100 {
369            let source = index % builders.len();
370            let target = index % boxes.len();
371            boxes[target] = Some((builders[source])());
372        }
373    }
374}