1use crate::*;
4
5#[cfg(feature = "unstable")]
16const fn cmin(a: usize, b: usize) -> usize {
17 [a, b][(a > b) as usize]
18}
19
20#[cfg(not(feature = "unstable"))]
22fn cmin(a: usize, b: usize) -> usize {
23 core::cmp::min(a, b)
24}
25
26pub struct SCAllocator<'a, P: AllocablePage> {
48 pub(crate) size: usize,
50 pub(crate) allocation_count: usize,
52 pub(crate) obj_per_page: usize,
54 pub(crate) empty_slabs: PageList<'a, P>,
56 pub(crate) slabs: PageList<'a, P>,
58 pub(crate) full_slabs: PageList<'a, P>,
60}
61
62macro_rules! new_sc_allocator {
65 ($size:expr) => {
66 SCAllocator {
67 size: $size,
68 allocation_count: 0,
69 obj_per_page: cmin((P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD) / $size, 8 * 64),
70 empty_slabs: PageList::new(),
71 slabs: PageList::new(),
72 full_slabs: PageList::new(),
73 }
74 };
75}
76
77impl<'a, P: AllocablePage> SCAllocator<'a, P> {
78 const REBALANCE_COUNT: usize = 64;
79
80 #[cfg(feature = "unstable")]
82 pub const fn new(size: usize) -> SCAllocator<'a, P> {
83 new_sc_allocator!(size)
84 }
85
86 #[cfg(not(feature = "unstable"))]
87 pub fn new(size: usize) -> SCAllocator<'a, P> {
88 new_sc_allocator!(size)
89 }
90
91 pub fn size(&self) -> usize {
93 self.size
94 }
95
96 fn insert_partial_slab(&mut self, new_head: &'a mut P) {
98 self.slabs.insert_front(new_head);
99 }
100
101 fn insert_empty(&mut self, new_head: &'a mut P) {
103 assert_eq!(
104 new_head as *const P as usize % P::SIZE,
105 0,
106 "Inserted page is not aligned to page-size."
107 );
108 self.empty_slabs.insert_front(new_head);
109 }
110
111 pub(crate) fn check_page_assignments(&mut self) {
115 for slab_page in self.full_slabs.iter_mut() {
116 if !slab_page.is_full() {
117 trace!("move {:p} full -> partial", slab_page);
119 self.move_full_to_partial(slab_page);
120 }
121 }
122
123 for slab_page in self.slabs.iter_mut() {
124 if slab_page.is_empty(self.obj_per_page) {
125 trace!("move {:p} partial -> empty", slab_page);
127 self.move_to_empty(slab_page);
128 }
129 }
130 }
131
132 fn move_to_empty(&mut self, page: &'a mut P) {
134 let page_ptr = page as *const P;
135
136 debug_assert!(self.slabs.contains(page_ptr));
137 debug_assert!(
138 !self.empty_slabs.contains(page_ptr),
139 "Page {:p} already in emtpy_slabs",
140 page_ptr
141 );
142
143 self.slabs.remove_from_list(page);
144 self.empty_slabs.insert_front(page);
145
146 debug_assert!(!self.slabs.contains(page_ptr));
147 debug_assert!(self.empty_slabs.contains(page_ptr));
148 }
149
150 fn move_partial_to_full(&mut self, page: &'a mut P) {
152 let page_ptr = page as *const P;
153
154 debug_assert!(self.slabs.contains(page_ptr));
155 debug_assert!(!self.full_slabs.contains(page_ptr));
156
157 self.slabs.remove_from_list(page);
158 self.full_slabs.insert_front(page);
159
160 debug_assert!(!self.slabs.contains(page_ptr));
161 debug_assert!(self.full_slabs.contains(page_ptr));
162 }
163
164 fn move_full_to_partial(&mut self, page: &'a mut P) {
166 let page_ptr = page as *const P;
167
168 debug_assert!(!self.slabs.contains(page_ptr));
169 debug_assert!(self.full_slabs.contains(page_ptr));
170
171 self.full_slabs.remove_from_list(page);
172 self.slabs.insert_front(page);
173
174 debug_assert!(self.slabs.contains(page_ptr));
175 debug_assert!(!self.full_slabs.contains(page_ptr));
176 }
177
178 fn try_allocate_from_pagelist(&mut self, sc_layout: Layout) -> *mut u8 {
186 for slab_page in self.slabs.iter_mut() {
191 let ptr = slab_page.allocate(sc_layout);
192 if !ptr.is_null() {
193 if slab_page.is_full() {
194 trace!("move {:p} partial -> full", slab_page);
195 self.move_partial_to_full(slab_page);
196 }
197 self.allocation_count += 1;
198 return ptr;
199 } else {
200 continue;
201 }
202 }
203
204 if self.allocation_count > SCAllocator::<P>::REBALANCE_COUNT {
206 self.check_page_assignments();
207 self.allocation_count = 0;
208 }
209
210 ptr::null_mut()
211 }
212
213 pub fn try_reclaim_pages<F>(&mut self, to_reclaim: usize, dealloc: &mut F) -> usize
214 where
215 F: FnMut(*mut P),
216 {
217 self.check_page_assignments();
218 let mut reclaimed = 0;
219 while reclaimed < to_reclaim {
220 if let Some(page) = self.empty_slabs.pop() {
221 dealloc(page as *mut P);
222 reclaimed += 1;
223 } else {
224 break;
225 }
226 }
227
228 reclaimed
229 }
230
231 pub unsafe fn refill(&mut self, page: &'a mut P) {
236 page.bitfield_mut()
237 .initialize(self.size, P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD);
238 *page.prev() = Rawlink::none();
239 *page.next() = Rawlink::none();
240 trace!("adding page to SCAllocator {:p}", page);
241 self.insert_empty(page);
242 }
243
244 pub fn allocate(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocationError> {
252 trace!(
253 "SCAllocator({}) is trying to allocate {:?}",
254 self.size,
255 layout
256 );
257 assert!(layout.size() <= self.size);
258 assert!(self.size <= (P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD));
259 let new_layout = unsafe { Layout::from_size_align_unchecked(self.size, layout.align()) };
260 assert!(new_layout.size() >= layout.size());
261
262 let ptr = {
263 let ptr = self.try_allocate_from_pagelist(new_layout);
266 if ptr.is_null() && self.empty_slabs.head.is_some() {
267 let empty_page = self.empty_slabs.pop().expect("We checked head.is_some()");
269 debug_assert!(!self.empty_slabs.contains(empty_page));
270
271 let ptr = empty_page.allocate(layout);
272 debug_assert!(!ptr.is_null(), "Allocation must have succeeded here.");
273
274 trace!(
275 "move {:p} empty -> partial empty count {}",
276 empty_page,
277 self.empty_slabs.elements
278 );
279 self.insert_partial_slab(empty_page);
281 ptr
282 } else {
283 ptr
284 }
285 };
286
287 let res = NonNull::new(ptr).ok_or(AllocationError::OutOfMemory);
288
289 if !ptr.is_null() {
290 trace!(
291 "SCAllocator({}) allocated ptr=0x{:x}",
292 self.size,
293 ptr as usize
294 );
295 }
296
297 res
298 }
299
300 pub fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) -> Result<(), AllocationError> {
306 assert!(layout.size() <= self.size);
307 assert!(self.size <= (P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD));
308 trace!(
309 "SCAllocator({}) is trying to deallocate ptr = {:p} layout={:?} P.size= {}",
310 self.size,
311 ptr,
312 layout,
313 P::SIZE
314 );
315
316 let page = (ptr.as_ptr() as usize) & !(P::SIZE - 1) as usize;
317
318 let slab_page = unsafe { mem::transmute::<VAddr, &'a mut P>(page) };
321 let new_layout = unsafe { Layout::from_size_align_unchecked(self.size, layout.align()) };
322
323 let ret = slab_page.deallocate(ptr, new_layout);
324 debug_assert!(ret.is_ok(), "Slab page deallocate won't fail at the moment");
325 ret
326 }
327}