byte_slab/slab_arc.rs
1//! A reference counted allocation
2//!
3//! A `SlabArc` is obtained by consuming a `SlabBox`. As the underlying allocation
4//! may be shared, a `SlabArc` does not allow for the contents to be modified.
5//! `SlabArc`s may be cheaply cloned (which increases the reference count), allowing
6//! for multiple (immutable) access to the same data. The underlying memory is freed
7//! for reuse automatically when the reference count reaches zero.
8
9use crate::slab_slice_arc::SlabSliceArc;
10use core::{
11 ops::Deref,
12 sync::atomic::Ordering,
13};
14use core::marker::PhantomData;
15
16use crate::byte_slab::BSlab;
17
18// TODO: This doesn't HAVE to be 'static, but it makes my life easier
19// if you want not-that, I guess open an issue and let me know?
20/// A reference counted, BSlab allocated chunk of bytes.
21///
22/// `SlabArc`s implement the `Deref` trait for access to the underlying allocation
23///
24/// ## Example
25///
26/// ```rust
27/// use byte_slab::BSlab;
28/// use std::thread::spawn;
29///
30/// static SLAB: BSlab<4, 128> = BSlab::new();
31///
32/// fn main() {
33/// // Initialize the byte slab
34/// SLAB.init().unwrap();
35///
36/// let mut box_1 = SLAB.alloc_box().unwrap();
37///
38/// // Fill
39/// assert_eq!(box_1.len(), 128);
40/// box_1.iter_mut().enumerate().for_each(|(i, x)| *x = i as u8);
41///
42/// // Convert the Box into an Arc for sharing
43/// let arc_1 = box_1.into_arc();
44///
45/// // And we can cheaply clone by increasing the reference count
46/// let arc_1_1 = arc_1.clone();
47///
48/// // We can now send the arc to another thread
49/// let hdl = spawn(move || {
50/// arc_1.iter().enumerate().for_each(|(i, x)| assert_eq!(i as u8, *x));
51/// });
52///
53/// // ... while still retaining a local handle to the same data
54/// arc_1_1.iter().enumerate().for_each(|(i, x)| assert_eq!(i as u8, *x));
55///
56/// hdl.join();
57/// }
58/// ```
59pub struct SlabArc<const N: usize, const SZ: usize> {
60 pub(crate) slab: &'static BSlab<N, SZ>,
61 pub(crate) idx: usize,
62}
63
64pub struct RerooterKey<'a> {
65 pub(crate) start: *const u8,
66 pub(crate) end: *const u8,
67 pub(crate) pdlt: PhantomData<&'a ()>,
68 pub(crate) slab: *const (),
69 pub(crate) idx: usize,
70}
71
72impl<const N: usize, const SZ: usize> SlabArc<N, SZ> {
73 /// Create a `SlabSliceArc` from this `SlabArc`, with a full view
74 /// of the underlying data
75 pub fn full_sub_slice_arc(&self) -> SlabSliceArc<N, SZ> {
76 SlabSliceArc {
77 arc: self.clone(),
78 start: 0,
79 len: self.len(),
80 }
81 }
82
83 pub fn rerooter_key<'a>(&'a self) -> RerooterKey<'a> {
84 let slice = self.deref();
85
86 RerooterKey {
87 start: slice.as_ptr(),
88 end: unsafe { slice.as_ptr().add(slice.len()) },
89 pdlt: PhantomData,
90 slab: (self.slab as *const BSlab<N, SZ>).cast(),
91 idx: self.idx,
92 }
93 }
94
95 /// Create a `SlabSliceArc` from this `SlabArc`, with a partial view
96 /// of the underlying data.
97 ///
98 /// This function will fail if `start` and `len` do not describe a valid
99 /// region of the `SlabArc`.
100 pub fn sub_slice_arc(&self, start: usize, len: usize) -> Result<SlabSliceArc<N, SZ>, ()> {
101 let new_arc = self.clone();
102
103 let good_start = start < SZ;
104 let good_len = (start + len) <= SZ;
105
106 if good_start && good_len {
107 let new_slice_arc = SlabSliceArc {
108 arc: new_arc,
109 start,
110 len,
111 };
112 Ok(new_slice_arc)
113 } else {
114 Err(())
115 }
116 }
117}
118
119impl<const N: usize, const SZ: usize> Drop for SlabArc<N, SZ> {
120 fn drop(&mut self) {
121 // drop refct
122 let arc = unsafe { self.slab.get_idx_unchecked(self.idx).arc };
123 let refct = arc.fetch_sub(1, Ordering::SeqCst);
124
125 // We just dropped the refct to zero. Release the structure
126 if refct == 1 {
127 if let Ok(q) = self.slab.get_q() {
128 while let Err(_) = q.enqueue(self.idx) {}
129 }
130
131 }
132 }
133}
134
135impl<const N: usize, const SZ: usize> Deref for SlabArc<N, SZ> {
136 type Target = [u8; SZ];
137
138 fn deref(&self) -> &Self::Target {
139 let buf = unsafe { self.slab.get_idx_unchecked(self.idx).buf };
140
141 unsafe { &*buf.get() }
142 }
143}
144
145impl<const N: usize, const SZ: usize> Clone for SlabArc<N, SZ> {
146 fn clone(&self) -> Self {
147 let arc = unsafe { self.slab.get_idx_unchecked(self.idx).arc };
148
149 let old_ct = arc.fetch_add(1, Ordering::SeqCst);
150 assert!(old_ct >= 1);
151
152 Self {
153 slab: self.slab,
154 idx: self.idx,
155 }
156 }
157}