1use crate::{MemAlloc, SIZE_64K};
2use core::ptr::null_mut;
3
4pub(crate) const MAX_SLAB_SIZE: usize = 65512 - 8;
5
6pub(crate) struct SlabAllocator<PAGEALLOC: MemAlloc> {
7 pub(crate) page_alloc: PAGEALLOC,
8
9 slab16_partial: *mut Slab16,
10 slab32_partial: *mut Slab32,
11 slab64_partial: *mut Slab64,
12 slab128_partial: *mut Slab128,
13 slab256_partial: *mut Slab256,
14 slab512_partial: *mut Slab512,
15 slab1024_partial: *mut Slab1024,
16 slab2040_partial: *mut Slab2040,
17 slab4088_partial: *mut Slab4088,
18 slab8184_partial: *mut Slab8184,
19 slab16376_partial: *mut Slab16376,
20 slab32752_partial: *mut Slab32752,
21 slab65512_partial: *mut Slab65512, slab16_full: *mut Slab16,
24 slab32_full: *mut Slab32,
25 slab64_full: *mut Slab64,
26 slab128_full: *mut Slab128,
27 slab256_full: *mut Slab256,
28 slab512_full: *mut Slab512,
29 slab1024_full: *mut Slab1024,
30 slab2040_full: *mut Slab2040,
31 slab4088_full: *mut Slab4088,
32 slab8184_full: *mut Slab8184,
33 slab16376_full: *mut Slab16376,
34 slab32752_full: *mut Slab32752,
35 slab65512_full: *mut Slab65512,
36}
37
38unsafe fn alloc_memory<PAGEALLOC: MemAlloc, SLAB: Slab>(
39 page_alloc: &mut PAGEALLOC,
40 slab_partial: &mut *mut SLAB,
41 slab_full: &mut *mut SLAB,
42) -> Option<*mut u8> {
43 let slab_partial_top = slab_partial;
44 let slab_partial = *slab_partial_top;
45
46 let slab_full_top = slab_full;
47 let slab_full = *slab_full_top;
48
49 match slab_partial.as_mut() {
50 Some(partial) => {
51 let ret = partial.alloc(); if partial.is_full() {
54 if let Some(next) = partial.next().as_mut() {
55 next.set_prev(null_mut());
56 }
57
58 *slab_partial_top = partial.next();
59
60 if let Some(full) = slab_full.as_mut() {
61 full.set_prev(slab_partial);
62 }
63
64 partial.set_next(slab_full);
65 }
66
67 Some(ret)
68 }
69 None => {
70 if let Some(addr) = page_alloc.alloc(SIZE_64K) {
71 let slab_ptr = addr as *mut SLAB;
72
73 if let Some(slab) = slab_ptr.as_mut() {
74 slab.init();
75
76 let ret = slab.alloc();
77
78 if slab.is_full() {
79 if let Some(full) = slab_full.as_mut() {
81 full.set_prev(slab_ptr);
82 }
83
84 slab.set_next(slab_full);
85 *slab_full_top = slab_ptr;
86 } else {
87 *slab_partial_top = slab_ptr;
88 }
89
90 Some(ret)
91 } else {
92 None
93 }
94 } else {
95 None
96 }
97 }
98 }
99}
100
101unsafe fn dealloc_memory<PAGEALLOC: MemAlloc, SLAB: Slab>(
102 ptr: *mut u8,
103 addr_slab: usize,
104 page_alloc: &mut PAGEALLOC,
105 slab_partial: &mut *mut SLAB,
106 slab_full: &mut *mut SLAB,
107) -> Option<usize> {
108 if let Some(slab) = (addr_slab as *mut SLAB).as_mut() {
109 let is_full = slab.is_full();
110 slab.free(ptr);
111 if is_full {
112 if let Some(prev) = slab.prev().as_mut() {
113 prev.set_next(slab.next());
114 } else {
115 *slab_full = slab.next();
116 }
117
118 if let Some(next) = slab.next().as_mut() {
119 next.set_prev(slab.prev());
120 }
121
122 if slab.is_empty() {
123 page_alloc.free(addr_slab as *mut u8);
124 Some(addr_slab) } else {
126 if let Some(partial) = slab_partial.as_mut() {
127 partial.set_prev(slab);
128 slab.set_next(partial);
129 } else {
130 slab.set_next(null_mut());
131 }
132
133 slab.set_prev(null_mut());
134 *slab_partial = slab;
135
136 None
137 }
138 } else if slab.is_empty() {
139 if let Some(prev) = slab.prev().as_mut() {
140 prev.set_next(slab.next());
141 } else {
142 *slab_partial = slab.next();
143 }
144
145 if let Some(next) = slab.next().as_mut() {
146 next.set_prev(slab.prev());
147 }
148
149 page_alloc.free(addr_slab as *mut u8);
150 Some(addr_slab) } else {
152 None
153 }
154 } else {
155 None
156 }
157}
158
159impl<PAGEALLOC: MemAlloc> SlabAllocator<PAGEALLOC> {
160 pub(crate) unsafe fn slab_alloc(&mut self, size: usize) -> Option<*mut u8> {
161 let n = (size as u64 + 8 - 1).leading_zeros();
162
163 match n {
164 61 | 60 => alloc_memory(
165 &mut self.page_alloc,
166 &mut self.slab16_partial,
167 &mut self.slab16_full,
168 ),
169 59 => alloc_memory(
170 &mut self.page_alloc,
171 &mut self.slab32_partial,
172 &mut self.slab32_full,
173 ),
174 58 => alloc_memory(
175 &mut self.page_alloc,
176 &mut self.slab64_partial,
177 &mut self.slab64_full,
178 ),
179 57 => alloc_memory(
180 &mut self.page_alloc,
181 &mut self.slab128_partial,
182 &mut self.slab128_full,
183 ),
184 56 => alloc_memory(
185 &mut self.page_alloc,
186 &mut self.slab256_partial,
187 &mut self.slab256_full,
188 ),
189 55 => alloc_memory(
190 &mut self.page_alloc,
191 &mut self.slab512_partial,
192 &mut self.slab512_full,
193 ),
194 54 => alloc_memory(
195 &mut self.page_alloc,
196 &mut self.slab1024_partial,
197 &mut self.slab1024_full,
198 ),
199 _ => {
200 if size <= 4088 - 16 {
201 if size <= 2040 - 16 {
202 alloc_memory(
204 &mut self.page_alloc,
205 &mut self.slab2040_partial,
206 &mut self.slab2040_full,
207 )
208 } else {
209 alloc_memory(
211 &mut self.page_alloc,
212 &mut self.slab4088_partial,
213 &mut self.slab4088_full,
214 )
215 }
216 } else if size <= 16376 - 16 {
217 if size <= 8184 - 16 {
218 alloc_memory(
220 &mut self.page_alloc,
221 &mut self.slab8184_partial,
222 &mut self.slab8184_full,
223 )
224 } else {
225 alloc_memory(
227 &mut self.page_alloc,
228 &mut self.slab16376_partial,
229 &mut self.slab16376_full,
230 )
231 }
232 } else if size <= 32752 - 16 {
233 alloc_memory(
235 &mut self.page_alloc,
236 &mut self.slab32752_partial,
237 &mut self.slab32752_full,
238 )
239 } else if size <= 65512 - 8 {
240 alloc_memory(
242 &mut self.page_alloc,
243 &mut self.slab65512_partial,
244 &mut self.slab65512_full,
245 )
246 } else {
247 None
248 }
249 }
250 }
251 }
252
253 pub(crate) unsafe fn slab_dealloc(&mut self, ptr: *mut u8) -> Option<usize> {
255 let addr_slab = *((ptr as usize - 8) as *const u64);
256 let size = *((addr_slab + 65532) as *const u32);
257
258 match size {
271 16 => dealloc_memory(
272 ptr,
273 addr_slab as usize,
274 &mut self.page_alloc,
275 &mut self.slab16_partial,
276 &mut self.slab16_full,
277 ),
278 32 => dealloc_memory(
279 ptr,
280 addr_slab as usize,
281 &mut self.page_alloc,
282 &mut self.slab32_partial,
283 &mut self.slab32_full,
284 ),
285 64 => dealloc_memory(
286 ptr,
287 addr_slab as usize,
288 &mut self.page_alloc,
289 &mut self.slab64_partial,
290 &mut self.slab64_full,
291 ),
292 128 => dealloc_memory(
293 ptr,
294 addr_slab as usize,
295 &mut self.page_alloc,
296 &mut self.slab128_partial,
297 &mut self.slab128_full,
298 ),
299 256 => dealloc_memory(
300 ptr,
301 addr_slab as usize,
302 &mut self.page_alloc,
303 &mut self.slab256_partial,
304 &mut self.slab256_full,
305 ),
306 512 => dealloc_memory(
307 ptr,
308 addr_slab as usize,
309 &mut self.page_alloc,
310 &mut self.slab512_partial,
311 &mut self.slab512_full,
312 ),
313 1024 => dealloc_memory(
314 ptr,
315 addr_slab as usize,
316 &mut self.page_alloc,
317 &mut self.slab1024_partial,
318 &mut self.slab1024_full,
319 ),
320 2040 => dealloc_memory(
321 ptr,
322 addr_slab as usize,
323 &mut self.page_alloc,
324 &mut self.slab2040_partial,
325 &mut self.slab2040_full,
326 ),
327 4088 => dealloc_memory(
328 ptr,
329 addr_slab as usize,
330 &mut self.page_alloc,
331 &mut self.slab4088_partial,
332 &mut self.slab4088_full,
333 ),
334 8184 => dealloc_memory(
335 ptr,
336 addr_slab as usize,
337 &mut self.page_alloc,
338 &mut self.slab8184_partial,
339 &mut self.slab8184_full,
340 ),
341 16376 => dealloc_memory(
342 ptr,
343 addr_slab as usize,
344 &mut self.page_alloc,
345 &mut self.slab16376_partial,
346 &mut self.slab16376_full,
347 ),
348 32752 => dealloc_memory(
349 ptr,
350 addr_slab as usize,
351 &mut self.page_alloc,
352 &mut self.slab32752_partial,
353 &mut self.slab32752_full,
354 ),
355 65512 => dealloc_memory(
356 ptr,
357 addr_slab as usize,
358 &mut self.page_alloc,
359 &mut self.slab65512_partial,
360 &mut self.slab65512_full,
361 ),
362 _ => None,
363 }
364 }
365
366 pub(crate) fn new(addr: usize, size: usize) -> Self {
367 Self {
368 page_alloc: PAGEALLOC::new(addr, size),
369 slab16_partial: null_mut(),
370 slab32_partial: null_mut(),
371 slab64_partial: null_mut(),
372 slab128_partial: null_mut(),
373 slab256_partial: null_mut(),
374 slab512_partial: null_mut(),
375 slab1024_partial: null_mut(),
376 slab2040_partial: null_mut(),
377 slab4088_partial: null_mut(),
378 slab8184_partial: null_mut(),
379 slab16376_partial: null_mut(),
380 slab32752_partial: null_mut(),
381 slab65512_partial: null_mut(),
382 slab16_full: null_mut(),
383 slab32_full: null_mut(),
384 slab64_full: null_mut(),
385 slab128_full: null_mut(),
386 slab256_full: null_mut(),
387 slab512_full: null_mut(),
388 slab1024_full: null_mut(),
389 slab2040_full: null_mut(),
390 slab4088_full: null_mut(),
391 slab8184_full: null_mut(),
392 slab16376_full: null_mut(),
393 slab32752_full: null_mut(),
394 slab65512_full: null_mut(),
395 }
396 }
397}
398
399trait Slab {
400 fn alloc(&mut self) -> *mut u8;
401 fn free(&mut self, ptr: *mut u8);
402 fn is_full(&self) -> bool;
403 fn is_empty(&self) -> bool;
404 fn init(&mut self);
405 fn next(&self) -> *mut Self;
406 fn prev(&self) -> *mut Self;
407 fn set_next(&mut self, next: *mut Self);
408 fn set_prev(&mut self, prev: *mut Self);
409 }
411
412macro_rules! SlabSmall {
413 ($id:ident, $n:expr, $shift:expr, $l1val:expr, $l2val:expr, $size:expr) => {
414 #[repr(C)]
415 struct $id {
416 buf: [u8; 65536 - 32 - 8 * $n],
417 l1_bitmap: u64,
418 l2_bitmap: [u64; $n],
419 prev: *mut $id,
420 next: *mut $id,
421 num: u32,
422 size: u32,
423 }
424
425 impl Slab for $id {
426 fn next(&self) -> *mut Self {
427 self.next
428 }
429
430 fn set_next(&mut self, next: *mut Self) {
431 self.next = next;
432 }
433
434 fn prev(&self) -> *mut Self {
435 self.prev
436 }
437
438 fn set_prev(&mut self, prev: *mut Self) {
439 self.prev = prev;
440 }
441
442 fn alloc(&mut self) -> *mut u8 {
451 let idx1 = (!self.l1_bitmap).leading_zeros() as usize;
452 let idx2 = (!self.l2_bitmap[idx1]).leading_zeros() as usize;
453
454 self.l2_bitmap[idx1] |= 1 << (63 - idx2);
455 if self.l2_bitmap[idx1] == !0 {
456 self.l1_bitmap |= 1 << (63 - idx1);
457 }
458
459 let size = self.size as usize;
460 let idx = idx1 * size * 64 + idx2 * size;
461
462 if idx >= 65536 - 32 - 8 * $n {
463 panic!("allocation error");
464 }
465
466 let ptr = &mut (self.buf[idx]) as *mut u8;
467 let ptr64 = ptr as *mut usize;
468
469 unsafe {
471 *ptr64 = self as *mut $id as usize;
472 }
473
474 self.num += 1;
475
476 &mut (self.buf[idx + 8]) as *mut u8
477 }
478
479 fn free(&mut self, ptr: *mut u8) {
481 let addr = ptr as usize - 8;
482 let org = self as *mut $id as usize;
483 let len = addr - org;
484 let idx = (len >> $shift) as usize;
485
486 let idx1 = idx >> 6; let idx2 = idx & 0b111111;
488
489 self.l1_bitmap &= !(1 << (63 - idx1));
490 self.l2_bitmap[idx1] &= !(1 << (63 - idx2));
491 self.num -= 1;
492 }
493
494 fn is_full(&self) -> bool {
495 self.l1_bitmap == !0
496 }
497
498 fn is_empty(&self) -> bool {
499 self.num == 0
500 }
501
502 fn init(&mut self) {
503 self.l1_bitmap = $l1val;
504 for it in self.l2_bitmap.iter_mut() {
505 *it = 0;
506 }
507 self.l2_bitmap[$n - 1] = $l2val;
508 self.prev = null_mut();
509 self.next = null_mut();
510 self.num = 0;
511 self.size = $size;
512 }
513
514 }
543 };
544}
545
546SlabSmall!(Slab16, 64, 4, 0, 0xFFFFFFFF | (0b11 << 32), 16);
550
551SlabSmall!(Slab32, 32, 5, 0xFFFFFFFF, 0b111111111, 32);
555
556SlabSmall!(Slab64, 16, 6, 0xFFFFFFFFFFFF, 0b111, 64);
560
561SlabSmall!(Slab128, 8, 7, 0xFFFFFFFFFFFFFF, 1, 128);
565
566SlabSmall!(Slab256, 4, 8, 0xFFFFFFFFFFFFFFF, 1, 256);
570
571SlabSmall!(Slab512, 2, 9, 0x3FFFFFFFFFFFFFFF, 1, 512);
575
576SlabSmall!(Slab1024, 1, 10, 0x7FFFFFFFFFFFFFFF, 1, 1024);
580
581#[repr(C)]
582struct SlabMemory {
583 idx1: usize,
584 slab: usize,
585}
586
587macro_rules! SlabLarge {
588 ($id:ident, $l1val:expr, $size:expr) => {
589 #[repr(C)]
590 struct $id {
591 buf: [u8; 65504],
592 prev: *mut $id,
593 next: *mut $id,
594 l1_bitmap: u64,
595 num: u32,
596 size: u32,
597 }
598
599 impl Slab for $id {
600 fn next(&self) -> *mut Self {
601 self.next
602 }
603
604 fn set_next(&mut self, next: *mut Self) {
605 self.next = next;
606 }
607
608 fn prev(&self) -> *mut Self {
609 self.prev
610 }
611
612 fn set_prev(&mut self, prev: *mut Self) {
613 self.prev = prev;
614 }
615
616 fn alloc(&mut self) -> *mut u8 {
628 let idx1 = (!self.l1_bitmap).leading_zeros() as usize;
629 self.l1_bitmap |= 1 << (63 - idx1);
630
631 let idx = idx1 * self.size as usize;
632 let ptr = &mut (self.buf[idx]) as *mut u8;
633 let mem = ptr as *mut SlabMemory;
634
635 unsafe {
637 (*mem).idx1 = idx1;
638 (*mem).slab = self as *mut $id as usize;
639 }
640
641 self.num += 1;
642
643 &mut (self.buf[idx + 16]) as *mut u8
644 }
645
646 fn free(&mut self, ptr: *mut u8) {
648 let addr = ptr as usize;
649 let idx1 = unsafe { *((addr - 16) as *mut usize) };
650
651 self.l1_bitmap &= !(1 << (63 - idx1));
652 self.num -= 1;
653 }
654
655 fn is_full(&self) -> bool {
656 self.l1_bitmap == !0
657 }
658
659 fn is_empty(&self) -> bool {
660 self.num == 0
661 }
662
663 fn init(&mut self) {
664 self.prev = null_mut();
665 self.next = null_mut();
666 self.l1_bitmap = $l1val;
667 self.size = $size;
668 self.num = 0;
669 }
670
671 }
685 };
686}
687
688SlabLarge!(Slab2040, 0xFFFFFFFF, 2040);
691
692SlabLarge!(Slab4088, 0xFFFFFFFFFFFF, 4088);
695
696SlabLarge!(Slab8184, 0xFFFFFFFFFFFFFF, 8184);
699
700SlabLarge!(Slab16376, 0xFFFFFFFFFFFFFFF, 16376);
703
704SlabLarge!(Slab32752, 0x3FFFFFFFFFFFFFFF, 32752);
707
708#[repr(C)]
709struct Slab65512 {
710 buf: [u8; 65512],
711 prev: *mut Slab65512,
712 next: *mut Slab65512,
713 num: u32,
714 size: u32, }
716
717impl Slab for Slab65512 {
718 fn next(&self) -> *mut Self {
719 self.next
720 }
721
722 fn set_next(&mut self, next: *mut Self) {
723 self.next = next;
724 }
725
726 fn prev(&self) -> *mut Self {
727 self.prev
728 }
729
730 fn set_prev(&mut self, prev: *mut Self) {
731 self.prev = prev;
732 }
733
734 fn alloc(&mut self) -> *mut u8 {
743 let ptr = &mut (self.buf[0]) as *mut u8;
744 let ptr64 = ptr as *mut usize;
745
746 unsafe {
748 *ptr64 = self as *mut Slab65512 as usize;
749 }
750
751 self.num = 1;
752
753 &mut (self.buf[8]) as *mut u8
754 }
755
756 fn free(&mut self, _ptr: *mut u8) {
757 self.num = 0;
758 }
759
760 fn is_full(&self) -> bool {
761 true
762 }
763
764 fn is_empty(&self) -> bool {
765 self.num == 0
766 }
767
768 fn init(&mut self) {
769 self.next = null_mut();
770 self.prev = null_mut();
771 self.size = 65512;
772 self.num = 0;
773 }
774
775 }
786
787