attachable_slab_allocator/
slab_box.rs1use crate::locks::LockTrait;
8use crate::mem_lay::get_layout;
9use crate::slab::Slab;
10use crate::utils::NonNullEx;
11use core::alloc::Layout;
12use core::marker::PhantomData;
13use core::ops::{Deref, DerefMut};
14use core::ptr::{NonNull, drop_in_place};
15
16pub struct SlabBox<T, LOCK, const SLAB_SIZE: usize>
21where
22 LOCK: LockTrait,
23{
24 ptr: NonNull<T>,
25 _marker: PhantomData<LOCK>,
27}
28
29unsafe impl<T, LOCK, const SLAB_SIZE: usize> Send for SlabBox<T, LOCK, SLAB_SIZE>
30where
31 T: Send,
32 LOCK: LockTrait + Sync,
33{
34}
35unsafe impl<T, LOCK, const SLAB_SIZE: usize> Sync for SlabBox<T, LOCK, SLAB_SIZE>
36where
37 T: Sync,
38 LOCK: LockTrait + Sync,
39{
40}
41
42impl<T, LOCK, const SLAB_SIZE: usize> SlabBox<T, LOCK, SLAB_SIZE>
43where
44 LOCK: LockTrait,
45{
46 const SLAB_LAYOUT: Layout = get_layout(SLAB_SIZE);
48
49 pub fn new(ptr: NonNull<T>) -> Self {
54 Self {
55 ptr,
56 _marker: PhantomData,
57 }
58 }
59
60 fn free_ptr(&mut self) {
64 unsafe {
65 drop_in_place(self.ptr.as_ptr());
66 Slab::<T, LOCK>::free_slot(self.ptr, Self::SLAB_LAYOUT).expect("internal error");
67 }
68 }
69
70 pub fn as_ptr(&self) -> *mut T {
72 self.ptr.as_ptr()
73 }
74}
75
76impl<T, LOCK, const SLAB_SIZE: usize> Drop for SlabBox<T, LOCK, SLAB_SIZE>
77where
78 LOCK: LockTrait,
79{
80 fn drop(&mut self) {
82 self.free_ptr();
83 }
84}
85
86impl<T, LOCK, const SLAB_SIZE: usize> Deref for SlabBox<T, LOCK, SLAB_SIZE>
87where
88 LOCK: LockTrait,
89{
90 type Target = T;
91
92 fn deref(&self) -> &Self::Target {
94 self.ptr.unsafe_ref()
95 }
96}
97
98impl<T, LOCK, const SLAB_SIZE: usize> DerefMut for SlabBox<T, LOCK, SLAB_SIZE>
99where
100 LOCK: LockTrait,
101{
102 fn deref_mut(&mut self) -> &mut Self::Target {
104 self.ptr.unsafe_mut_ref()
105 }
106}