1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
use crate::slab_slice_arc::SlabSliceArc;
use core::{
ops::Deref,
sync::atomic::Ordering,
};
use crate::byte_slab::BSlab;
pub struct SlabArc<const N: usize, const SZ: usize> {
pub(crate) slab: &'static BSlab<N, SZ>,
pub(crate) idx: usize,
}
impl<const N: usize, const SZ: usize> SlabArc<N, SZ> {
pub fn full_sub_slice_arc(&self) -> SlabSliceArc<N, SZ> {
SlabSliceArc {
arc: self.clone(),
start: 0,
len: self.len(),
}
}
pub fn sub_slice_arc(&self, start: usize, len: usize) -> Result<SlabSliceArc<N, SZ>, ()> {
let new_arc = self.clone();
let good_start = start < SZ;
let good_len = (start + len) <= SZ;
if good_start && good_len {
let new_slice_arc = SlabSliceArc {
arc: new_arc,
start,
len,
};
Ok(new_slice_arc)
} else {
Err(())
}
}
}
impl<const N: usize, const SZ: usize> Drop for SlabArc<N, SZ> {
fn drop(&mut self) {
let arc = unsafe { self.slab.get_idx_unchecked(self.idx).arc };
let refct = arc.fetch_sub(1, Ordering::SeqCst);
if refct == 1 {
if let Ok(q) = self.slab.get_q() {
while let Err(_) = q.enqueue(self.idx) {}
}
}
}
}
impl<const N: usize, const SZ: usize> Deref for SlabArc<N, SZ> {
type Target = [u8; SZ];
fn deref(&self) -> &Self::Target {
let buf = unsafe { self.slab.get_idx_unchecked(self.idx).buf };
unsafe { &*buf.get() }
}
}
impl<const N: usize, const SZ: usize> Clone for SlabArc<N, SZ> {
fn clone(&self) -> Self {
let arc = unsafe { self.slab.get_idx_unchecked(self.idx).arc };
let old_ct = arc.fetch_add(1, Ordering::SeqCst);
assert!(old_ct >= 1);
Self {
slab: self.slab,
idx: self.idx,
}
}
}