Skip to main content

buddy_slab_allocator/slab/
slab_byte_allocator.rs

1//! Slab byte allocator implementation for Axvisor.
2//!
3//! This module implements an improved slab allocator for small object allocation
4//! with pooled linked lists, inspired by asterinas design.
5
6use core::alloc::Layout;
7use core::ptr::NonNull;
8
9#[cfg(feature = "log")]
10use log::warn;
11
12use crate::{AllocError, AllocResult, ByteAllocator};
13
14// Re-export public types from sibling modules
15pub use super::slab_cache::SlabCache;
16pub use super::slab_node::SlabNode;
17
18/// Size classes for slab allocation
19#[derive(Debug, Clone, Copy, PartialEq, Eq)]
20#[repr(usize)]
21pub enum SizeClass {
22    Bytes8 = 8,
23    Bytes16 = 16,
24    Bytes32 = 32,
25    Bytes64 = 64,
26    Bytes128 = 128,
27    Bytes256 = 256,
28    Bytes512 = 512,
29    Bytes1024 = 1024,
30    Bytes2048 = 2048,
31}
32
33impl SizeClass {
34    pub const COUNT: usize = 9;
35    const MAX_OBJ_SIZE: usize = 2048;
36
37    /// Select size class from memory layout
38    pub fn from_layout(layout: Layout) -> Option<Self> {
39        let required_size = layout.size().max(layout.align());
40
41        if required_size > Self::MAX_OBJ_SIZE {
42            warn!(
43                "Invalid layout: size={}, align={}",
44                layout.size(),
45                layout.align()
46            );
47            return None;
48        }
49
50        Some(match required_size {
51            0..=8 => SizeClass::Bytes8,
52            9..=16 => SizeClass::Bytes16,
53            17..=32 => SizeClass::Bytes32,
54            33..=64 => SizeClass::Bytes64,
55            65..=128 => SizeClass::Bytes128,
56            129..=256 => SizeClass::Bytes256,
57            257..=512 => SizeClass::Bytes512,
58            513..=1024 => SizeClass::Bytes1024,
59            1025..=2048 => SizeClass::Bytes2048,
60            _ => unreachable!(
61                "Invalid layout: size={}, align={}",
62                layout.size(),
63                layout.align()
64            ),
65        })
66    }
67
68    pub fn size(&self) -> usize {
69        *self as usize
70    }
71
72    pub fn to_index(&self) -> usize {
73        match self {
74            SizeClass::Bytes8 => 0,
75            SizeClass::Bytes16 => 1,
76            SizeClass::Bytes32 => 2,
77            SizeClass::Bytes64 => 3,
78            SizeClass::Bytes128 => 4,
79            SizeClass::Bytes256 => 5,
80            SizeClass::Bytes512 => 6,
81            SizeClass::Bytes1024 => 7,
82            SizeClass::Bytes2048 => 8,
83        }
84    }
85
86    pub fn from_index(index: usize) -> Option<Self> {
87        match index {
88            0 => Some(SizeClass::Bytes8),
89            1 => Some(SizeClass::Bytes16),
90            2 => Some(SizeClass::Bytes32),
91            3 => Some(SizeClass::Bytes64),
92            4 => Some(SizeClass::Bytes128),
93            5 => Some(SizeClass::Bytes256),
94            6 => Some(SizeClass::Bytes512),
95            7 => Some(SizeClass::Bytes1024),
96            8 => Some(SizeClass::Bytes2048),
97            _ => None,
98        }
99    }
100}
101
102/// Page allocator trait for slab allocator
103pub trait PageAllocatorForSlab {
104    fn alloc_pages(&mut self, num_pages: usize, alignment: usize) -> AllocResult<usize>;
105    fn dealloc_pages(&mut self, pos: usize, num_pages: usize);
106}
107
108/// Slab byte allocator with pooled linked lists
109pub struct SlabByteAllocator<const PAGE_SIZE: usize = { crate::DEFAULT_PAGE_SIZE }> {
110    caches: [SlabCache; SizeClass::COUNT],
111    page_allocator: Option<*mut dyn PageAllocatorForSlab>,
112    total_bytes: usize,
113    allocated_bytes: usize,
114}
115
116// SAFETY: SlabByteAllocator is used behind SpinNoIrq locks
117unsafe impl<const PAGE_SIZE: usize> Send for SlabByteAllocator<PAGE_SIZE> {}
118unsafe impl<const PAGE_SIZE: usize> Sync for SlabByteAllocator<PAGE_SIZE> {}
119
120impl<const PAGE_SIZE: usize> SlabByteAllocator<PAGE_SIZE> {
121    pub const fn new() -> Self {
122        Self {
123            caches: [
124                SlabCache::new(SizeClass::Bytes8),
125                SlabCache::new(SizeClass::Bytes16),
126                SlabCache::new(SizeClass::Bytes32),
127                SlabCache::new(SizeClass::Bytes64),
128                SlabCache::new(SizeClass::Bytes128),
129                SlabCache::new(SizeClass::Bytes256),
130                SlabCache::new(SizeClass::Bytes512),
131                SlabCache::new(SizeClass::Bytes1024),
132                SlabCache::new(SizeClass::Bytes2048),
133            ],
134            page_allocator: None,
135            total_bytes: 0,
136            allocated_bytes: 0,
137        }
138    }
139
140    /// Initialize the allocator
141    pub fn init(&mut self) {}
142
143    pub fn set_page_allocator(&mut self, page_allocator: *mut dyn PageAllocatorForSlab) {
144        self.page_allocator = Some(page_allocator);
145    }
146}
147
148impl<const PAGE_SIZE: usize> Default for SlabByteAllocator<PAGE_SIZE> {
149    fn default() -> Self {
150        Self::new()
151    }
152}
153
154impl<const PAGE_SIZE: usize> ByteAllocator for SlabByteAllocator<PAGE_SIZE> {
155    fn alloc(&mut self, layout: Layout) -> AllocResult<NonNull<u8>> {
156        let size_class = SizeClass::from_layout(layout).ok_or(AllocError::InvalidParam)?;
157
158        let Some(page_allocator_ptr) = self.page_allocator else {
159            return Err(AllocError::NoMemory);
160        };
161
162        let page_allocator = unsafe { &mut *page_allocator_ptr };
163        let cache = &mut self.caches[size_class.to_index()];
164
165        let (obj_addr, page_bytes) = cache.alloc_object(page_allocator, PAGE_SIZE)?;
166        self.allocated_bytes += layout.size().max(layout.align());
167        self.total_bytes += page_bytes;
168
169        Ok(unsafe { NonNull::new_unchecked(obj_addr as *mut u8) })
170    }
171
172    fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
173        let size_class = SizeClass::from_layout(layout).expect("Invalid layout");
174        let obj_addr = ptr.as_ptr() as usize;
175
176        let Some(page_allocator_ptr) = self.page_allocator else {
177            return;
178        };
179
180        let page_allocator = unsafe { &mut *page_allocator_ptr };
181        let cache = &mut self.caches[size_class.to_index()];
182
183        let (freed_bytes, actually_freed) =
184            cache.dealloc_object(obj_addr, page_allocator, PAGE_SIZE);
185
186        // Only update allocated_bytes if this was not a double-free
187        if actually_freed {
188            self.allocated_bytes = self
189                .allocated_bytes
190                .saturating_sub(layout.size().max(layout.align()));
191        }
192        self.total_bytes = self.total_bytes.saturating_sub(freed_bytes);
193    }
194
195    fn total_bytes(&self) -> usize {
196        self.total_bytes
197    }
198
199    fn used_bytes(&self) -> usize {
200        self.allocated_bytes
201    }
202
203    fn available_bytes(&self) -> usize {
204        self.total_bytes.saturating_sub(self.allocated_bytes)
205    }
206}
207
208#[cfg(test)]
209mod tests {
210    use super::*;
211
212    #[test]
213    fn test_size_class() {
214        assert_eq!(
215            SizeClass::from_layout(Layout::from_size_align(8, 8).unwrap()),
216            Some(SizeClass::Bytes8)
217        );
218        assert_eq!(
219            SizeClass::from_layout(Layout::from_size_align(16, 8).unwrap()),
220            Some(SizeClass::Bytes16)
221        );
222        assert_eq!(
223            SizeClass::from_layout(Layout::from_size_align(2048, 8).unwrap()),
224            Some(SizeClass::Bytes2048)
225        );
226        assert_eq!(
227            SizeClass::from_layout(Layout::from_size_align(2049, 8).unwrap()),
228            None
229        );
230    }
231
232    #[test]
233    fn test_size_class_boundaries() {
234        // Test all size class boundaries
235        assert_eq!(SizeClass::Bytes8.size(), 8);
236        assert_eq!(SizeClass::Bytes16.size(), 16);
237        assert_eq!(SizeClass::Bytes32.size(), 32);
238        assert_eq!(SizeClass::Bytes64.size(), 64);
239        assert_eq!(SizeClass::Bytes128.size(), 128);
240        assert_eq!(SizeClass::Bytes256.size(), 256);
241        assert_eq!(SizeClass::Bytes512.size(), 512);
242        assert_eq!(SizeClass::Bytes1024.size(), 1024);
243        assert_eq!(SizeClass::Bytes2048.size(), 2048);
244    }
245
246    #[test]
247    fn test_size_class_alignment_limits() {
248        // Alignment too large should return None
249        assert_eq!(
250            SizeClass::from_layout(Layout::from_size_align(64, 4096).unwrap()),
251            None
252        );
253    }
254}