memac/
slab.rs

1use crate::{MemAlloc, SIZE_64K};
2use core::ptr::null_mut;
3
4pub(crate) const MAX_SLAB_SIZE: usize = 65512 - 8;
5
6pub(crate) struct SlabAllocator<PAGEALLOC: MemAlloc> {
7    pub(crate) page_alloc: PAGEALLOC,
8
9    slab16_partial: *mut Slab16,
10    slab32_partial: *mut Slab32,
11    slab64_partial: *mut Slab64,
12    slab128_partial: *mut Slab128,
13    slab256_partial: *mut Slab256,
14    slab512_partial: *mut Slab512,
15    slab1024_partial: *mut Slab1024,
16    slab2040_partial: *mut Slab2040,
17    slab4088_partial: *mut Slab4088,
18    slab8184_partial: *mut Slab8184,
19    slab16376_partial: *mut Slab16376,
20    slab32752_partial: *mut Slab32752,
21    slab65512_partial: *mut Slab65512, // always null
22
23    slab16_full: *mut Slab16,
24    slab32_full: *mut Slab32,
25    slab64_full: *mut Slab64,
26    slab128_full: *mut Slab128,
27    slab256_full: *mut Slab256,
28    slab512_full: *mut Slab512,
29    slab1024_full: *mut Slab1024,
30    slab2040_full: *mut Slab2040,
31    slab4088_full: *mut Slab4088,
32    slab8184_full: *mut Slab8184,
33    slab16376_full: *mut Slab16376,
34    slab32752_full: *mut Slab32752,
35    slab65512_full: *mut Slab65512,
36}
37
38unsafe fn alloc_memory<PAGEALLOC: MemAlloc, SLAB: Slab>(
39    page_alloc: &mut PAGEALLOC,
40    slab_partial: &mut *mut SLAB,
41    slab_full: &mut *mut SLAB,
42) -> Option<*mut u8> {
43    let slab_partial_top = slab_partial;
44    let slab_partial = *slab_partial_top;
45
46    let slab_full_top = slab_full;
47    let slab_full = *slab_full_top;
48
49    match slab_partial.as_mut() {
50        Some(partial) => {
51            let ret = partial.alloc(); // Allocate a memory region.
52
53            if partial.is_full() {
54                if let Some(next) = partial.next().as_mut() {
55                    next.set_prev(null_mut());
56                }
57
58                *slab_partial_top = partial.next();
59
60                if let Some(full) = slab_full.as_mut() {
61                    full.set_prev(slab_partial);
62                }
63
64                partial.set_next(slab_full);
65            }
66
67            Some(ret)
68        }
69        None => {
70            if let Some(addr) = page_alloc.alloc(SIZE_64K) {
71                let slab_ptr = addr as *mut SLAB;
72
73                if let Some(slab) = slab_ptr.as_mut() {
74                    slab.init();
75
76                    let ret = slab.alloc();
77
78                    if slab.is_full() {
79                        // for only Slab65512
80                        if let Some(full) = slab_full.as_mut() {
81                            full.set_prev(slab_ptr);
82                        }
83
84                        slab.set_next(slab_full);
85                        *slab_full_top = slab_ptr;
86                    } else {
87                        *slab_partial_top = slab_ptr;
88                    }
89
90                    Some(ret)
91                } else {
92                    None
93                }
94            } else {
95                None
96            }
97        }
98    }
99}
100
101unsafe fn dealloc_memory<PAGEALLOC: MemAlloc, SLAB: Slab>(
102    ptr: *mut u8,
103    addr_slab: usize,
104    page_alloc: &mut PAGEALLOC,
105    slab_partial: &mut *mut SLAB,
106    slab_full: &mut *mut SLAB,
107) -> Option<usize> {
108    if let Some(slab) = (addr_slab as *mut SLAB).as_mut() {
109        let is_full = slab.is_full();
110        slab.free(ptr);
111        if is_full {
112            if let Some(prev) = slab.prev().as_mut() {
113                prev.set_next(slab.next());
114            } else {
115                *slab_full = slab.next();
116            }
117
118            if let Some(next) = slab.next().as_mut() {
119                next.set_prev(slab.prev());
120            }
121
122            if slab.is_empty() {
123                page_alloc.free(addr_slab as *mut u8);
124                Some(addr_slab) // Should unmap this page.
125            } else {
126                if let Some(partial) = slab_partial.as_mut() {
127                    partial.set_prev(slab);
128                    slab.set_next(partial);
129                } else {
130                    slab.set_next(null_mut());
131                }
132
133                slab.set_prev(null_mut());
134                *slab_partial = slab;
135
136                None
137            }
138        } else if slab.is_empty() {
139            if let Some(prev) = slab.prev().as_mut() {
140                prev.set_next(slab.next());
141            } else {
142                *slab_partial = slab.next();
143            }
144
145            if let Some(next) = slab.next().as_mut() {
146                next.set_prev(slab.prev());
147            }
148
149            page_alloc.free(addr_slab as *mut u8);
150            Some(addr_slab) // Should unmap this page.
151        } else {
152            None
153        }
154    } else {
155        None
156    }
157}
158
159impl<PAGEALLOC: MemAlloc> SlabAllocator<PAGEALLOC> {
160    pub(crate) unsafe fn slab_alloc(&mut self, size: usize) -> Option<*mut u8> {
161        let n = (size as u64 + 8 - 1).leading_zeros();
162
163        match n {
164            61 | 60 => alloc_memory(
165                &mut self.page_alloc,
166                &mut self.slab16_partial,
167                &mut self.slab16_full,
168            ),
169            59 => alloc_memory(
170                &mut self.page_alloc,
171                &mut self.slab32_partial,
172                &mut self.slab32_full,
173            ),
174            58 => alloc_memory(
175                &mut self.page_alloc,
176                &mut self.slab64_partial,
177                &mut self.slab64_full,
178            ),
179            57 => alloc_memory(
180                &mut self.page_alloc,
181                &mut self.slab128_partial,
182                &mut self.slab128_full,
183            ),
184            56 => alloc_memory(
185                &mut self.page_alloc,
186                &mut self.slab256_partial,
187                &mut self.slab256_full,
188            ),
189            55 => alloc_memory(
190                &mut self.page_alloc,
191                &mut self.slab512_partial,
192                &mut self.slab512_full,
193            ),
194            54 => alloc_memory(
195                &mut self.page_alloc,
196                &mut self.slab1024_partial,
197                &mut self.slab1024_full,
198            ),
199            _ => {
200                if size <= 4088 - 16 {
201                    if size <= 2040 - 16 {
202                        // Slab2040
203                        alloc_memory(
204                            &mut self.page_alloc,
205                            &mut self.slab2040_partial,
206                            &mut self.slab2040_full,
207                        )
208                    } else {
209                        // Slab4088
210                        alloc_memory(
211                            &mut self.page_alloc,
212                            &mut self.slab4088_partial,
213                            &mut self.slab4088_full,
214                        )
215                    }
216                } else if size <= 16376 - 16 {
217                    if size <= 8184 - 16 {
218                        // Slab8184
219                        alloc_memory(
220                            &mut self.page_alloc,
221                            &mut self.slab8184_partial,
222                            &mut self.slab8184_full,
223                        )
224                    } else {
225                        // Slab16376
226                        alloc_memory(
227                            &mut self.page_alloc,
228                            &mut self.slab16376_partial,
229                            &mut self.slab16376_full,
230                        )
231                    }
232                } else if size <= 32752 - 16 {
233                    // Slab32752
234                    alloc_memory(
235                        &mut self.page_alloc,
236                        &mut self.slab32752_partial,
237                        &mut self.slab32752_full,
238                    )
239                } else if size <= 65512 - 8 {
240                    // Slab65512
241                    alloc_memory(
242                        &mut self.page_alloc,
243                        &mut self.slab65512_partial,
244                        &mut self.slab65512_full,
245                    )
246                } else {
247                    None
248                }
249            }
250        }
251    }
252
253    /// Return a 64KiB page address if page should be unmapped.
254    pub(crate) unsafe fn slab_dealloc(&mut self, ptr: *mut u8) -> Option<usize> {
255        let addr_slab = *((ptr as usize - 8) as *const u64);
256        let size = *((addr_slab + 65532) as *const u32);
257
258        /*
259                driver::uart::puts("dealloc:\n");
260                driver::uart::puts("  ptr: 0x");
261                driver::uart::hex(ptr as u64);
262                driver::uart::puts("\n");
263                driver::uart::puts("  addr_slab: 0x");
264                driver::uart::hex(addr_slab);
265                driver::uart::puts("\n");
266                driver::uart::puts("  size: ");
267                driver::uart::decimal(size as u64);
268                driver::uart::puts("\n");
269        */
270        match size {
271            16 => dealloc_memory(
272                ptr,
273                addr_slab as usize,
274                &mut self.page_alloc,
275                &mut self.slab16_partial,
276                &mut self.slab16_full,
277            ),
278            32 => dealloc_memory(
279                ptr,
280                addr_slab as usize,
281                &mut self.page_alloc,
282                &mut self.slab32_partial,
283                &mut self.slab32_full,
284            ),
285            64 => dealloc_memory(
286                ptr,
287                addr_slab as usize,
288                &mut self.page_alloc,
289                &mut self.slab64_partial,
290                &mut self.slab64_full,
291            ),
292            128 => dealloc_memory(
293                ptr,
294                addr_slab as usize,
295                &mut self.page_alloc,
296                &mut self.slab128_partial,
297                &mut self.slab128_full,
298            ),
299            256 => dealloc_memory(
300                ptr,
301                addr_slab as usize,
302                &mut self.page_alloc,
303                &mut self.slab256_partial,
304                &mut self.slab256_full,
305            ),
306            512 => dealloc_memory(
307                ptr,
308                addr_slab as usize,
309                &mut self.page_alloc,
310                &mut self.slab512_partial,
311                &mut self.slab512_full,
312            ),
313            1024 => dealloc_memory(
314                ptr,
315                addr_slab as usize,
316                &mut self.page_alloc,
317                &mut self.slab1024_partial,
318                &mut self.slab1024_full,
319            ),
320            2040 => dealloc_memory(
321                ptr,
322                addr_slab as usize,
323                &mut self.page_alloc,
324                &mut self.slab2040_partial,
325                &mut self.slab2040_full,
326            ),
327            4088 => dealloc_memory(
328                ptr,
329                addr_slab as usize,
330                &mut self.page_alloc,
331                &mut self.slab4088_partial,
332                &mut self.slab4088_full,
333            ),
334            8184 => dealloc_memory(
335                ptr,
336                addr_slab as usize,
337                &mut self.page_alloc,
338                &mut self.slab8184_partial,
339                &mut self.slab8184_full,
340            ),
341            16376 => dealloc_memory(
342                ptr,
343                addr_slab as usize,
344                &mut self.page_alloc,
345                &mut self.slab16376_partial,
346                &mut self.slab16376_full,
347            ),
348            32752 => dealloc_memory(
349                ptr,
350                addr_slab as usize,
351                &mut self.page_alloc,
352                &mut self.slab32752_partial,
353                &mut self.slab32752_full,
354            ),
355            65512 => dealloc_memory(
356                ptr,
357                addr_slab as usize,
358                &mut self.page_alloc,
359                &mut self.slab65512_partial,
360                &mut self.slab65512_full,
361            ),
362            _ => None,
363        }
364    }
365
366    pub(crate) fn new(addr: usize, size: usize) -> Self {
367        Self {
368            page_alloc: PAGEALLOC::new(addr, size),
369            slab16_partial: null_mut(),
370            slab32_partial: null_mut(),
371            slab64_partial: null_mut(),
372            slab128_partial: null_mut(),
373            slab256_partial: null_mut(),
374            slab512_partial: null_mut(),
375            slab1024_partial: null_mut(),
376            slab2040_partial: null_mut(),
377            slab4088_partial: null_mut(),
378            slab8184_partial: null_mut(),
379            slab16376_partial: null_mut(),
380            slab32752_partial: null_mut(),
381            slab65512_partial: null_mut(),
382            slab16_full: null_mut(),
383            slab32_full: null_mut(),
384            slab64_full: null_mut(),
385            slab128_full: null_mut(),
386            slab256_full: null_mut(),
387            slab512_full: null_mut(),
388            slab1024_full: null_mut(),
389            slab2040_full: null_mut(),
390            slab4088_full: null_mut(),
391            slab8184_full: null_mut(),
392            slab16376_full: null_mut(),
393            slab32752_full: null_mut(),
394            slab65512_full: null_mut(),
395        }
396    }
397}
398
399trait Slab {
400    fn alloc(&mut self) -> *mut u8;
401    fn free(&mut self, ptr: *mut u8);
402    fn is_full(&self) -> bool;
403    fn is_empty(&self) -> bool;
404    fn init(&mut self);
405    fn next(&self) -> *mut Self;
406    fn prev(&self) -> *mut Self;
407    fn set_next(&mut self, next: *mut Self);
408    fn set_prev(&mut self, prev: *mut Self);
409    // fn print(&self);
410}
411
412macro_rules! SlabSmall {
413    ($id:ident, $n:expr, $shift:expr, $l1val:expr, $l2val:expr, $size:expr) => {
414        #[repr(C)]
415        struct $id {
416            buf: [u8; 65536 - 32 - 8 * $n],
417            l1_bitmap: u64,
418            l2_bitmap: [u64; $n],
419            prev: *mut $id,
420            next: *mut $id,
421            num: u32,
422            size: u32,
423        }
424
425        impl Slab for $id {
426            fn next(&self) -> *mut Self {
427                self.next
428            }
429
430            fn set_next(&mut self, next: *mut Self) {
431                self.next = next;
432            }
433
434            fn prev(&self) -> *mut Self {
435                self.prev
436            }
437
438            fn set_prev(&mut self, prev: *mut Self) {
439                self.prev = prev;
440            }
441
442            // +------------------+
443            // | pointer to slab  |
444            // |    (8 bytes)     |
445            // +------------------+ <- return value
446            // |       data       |
447            // | (size - 8 bytes) |
448            // |                  |
449            /// allocate a memory region whose size is self.size - 8 bytes
450            fn alloc(&mut self) -> *mut u8 {
451                let idx1 = (!self.l1_bitmap).leading_zeros() as usize;
452                let idx2 = (!self.l2_bitmap[idx1]).leading_zeros() as usize;
453
454                self.l2_bitmap[idx1] |= 1 << (63 - idx2);
455                if self.l2_bitmap[idx1] == !0 {
456                    self.l1_bitmap |= 1 << (63 - idx1);
457                }
458
459                let size = self.size as usize;
460                let idx = idx1 * size * 64 + idx2 * size;
461
462                if idx >= 65536 - 32 - 8 * $n {
463                    panic!("allocation error");
464                }
465
466                let ptr = &mut (self.buf[idx]) as *mut u8;
467                let ptr64 = ptr as *mut usize;
468
469                // first 64 bits points the slab
470                unsafe {
471                    *ptr64 = self as *mut $id as usize;
472                }
473
474                self.num += 1;
475
476                &mut (self.buf[idx + 8]) as *mut u8
477            }
478
479            /// deallocate the memory region pointed by ptr which is returned by alloc
480            fn free(&mut self, ptr: *mut u8) {
481                let addr = ptr as usize - 8;
482                let org = self as *mut $id as usize;
483                let len = addr - org;
484                let idx = (len >> $shift) as usize;
485
486                let idx1 = idx >> 6; // divide by 64
487                let idx2 = idx & 0b111111;
488
489                self.l1_bitmap &= !(1 << (63 - idx1));
490                self.l2_bitmap[idx1] &= !(1 << (63 - idx2));
491                self.num -= 1;
492            }
493
494            fn is_full(&self) -> bool {
495                self.l1_bitmap == !0
496            }
497
498            fn is_empty(&self) -> bool {
499                self.num == 0
500            }
501
502            fn init(&mut self) {
503                self.l1_bitmap = $l1val;
504                for it in self.l2_bitmap.iter_mut() {
505                    *it = 0;
506                }
507                self.l2_bitmap[$n - 1] = $l2val;
508                self.prev = null_mut();
509                self.next = null_mut();
510                self.num = 0;
511                self.size = $size;
512            }
513
514            // fn print(&self) {
515            //     driver::uart::puts("num: ");
516            //     driver::uart::decimal(self.num as u64);
517            //     driver::uart::puts("\nL1 bitmap: 0x");
518            //     driver::uart::hex(self.l1_bitmap);
519            //     driver::uart::puts("\n");
520
521            //     driver::uart::puts("L2 bitmap:\n");
522            //     let mut i = 0;
523            //     for v in self.l2_bitmap.iter() {
524            //         driver::uart::puts(" 0x");
525            //         driver::uart::hex(*v);
526            //         if i == 3 {
527            //             driver::uart::puts("\n");
528            //             i = 0;
529            //         } else {
530            //             i += 1;
531            //         }
532            //     }
533
534            //     match unsafe { self.next.as_ref() } {
535            //         Some(next) => {
536            //             driver::uart::puts("\n");
537            //             next.print();
538            //         }
539            //         None => {}
540            //     }
541            // }
542        }
543    };
544}
545
546// l1_bitmap = 0 (initial value)
547// l2_bitmap[63] = 0xFFFF FFFF | 0b11 << 32 (initial value)
548// size = 16
549SlabSmall!(Slab16, 64, 4, 0, 0xFFFFFFFF | (0b11 << 32), 16);
550
551// l1_bitmap = 0xFFFF FFFF (initial value)
552// l2_bitmap[31] = 0b111111111 (initial value)
553// size = 32
554SlabSmall!(Slab32, 32, 5, 0xFFFFFFFF, 0b111111111, 32);
555
556// l1_bitmap = 0xFFFF FFFF FFFF (initial value)
557// l2_bitmap[15] = 0b111 (initial value)
558// size = 64
559SlabSmall!(Slab64, 16, 6, 0xFFFFFFFFFFFF, 0b111, 64);
560
561// l1_bitmap = 0xFFFF FFFF FFFF FF (initial value)
562// l2_bitmap[7] = 0b1 (initial value)
563// size = 128
564SlabSmall!(Slab128, 8, 7, 0xFFFFFFFFFFFFFF, 1, 128);
565
566// l1_bitmap = 0xFFFF FFFF FFFF FFF (initial value)
567// l2_bitmap[3] = 0b1 (initial value)
568// size = 256
569SlabSmall!(Slab256, 4, 8, 0xFFFFFFFFFFFFFFF, 1, 256);
570
571// l1_bitmap = 0x3FFF FFFF FFFF FFFF (initial value)
572// l2_bitmap[1] = 0b1 (initial value)
573// size = 512
574SlabSmall!(Slab512, 2, 9, 0x3FFFFFFFFFFFFFFF, 1, 512);
575
576// l1_bitmap = 0x7FFF FFFF FFFF FFFF (initial value)
577// l2_bitmap[0] = 0b1 (initial value)
578// size = 1024
579SlabSmall!(Slab1024, 1, 10, 0x7FFFFFFFFFFFFFFF, 1, 1024);
580
581#[repr(C)]
582struct SlabMemory {
583    idx1: usize,
584    slab: usize,
585}
586
587macro_rules! SlabLarge {
588    ($id:ident, $l1val:expr, $size:expr) => {
589        #[repr(C)]
590        struct $id {
591            buf: [u8; 65504],
592            prev: *mut $id,
593            next: *mut $id,
594            l1_bitmap: u64,
595            num: u32,
596            size: u32,
597        }
598
599        impl Slab for $id {
600            fn next(&self) -> *mut Self {
601                self.next
602            }
603
604            fn set_next(&mut self, next: *mut Self) {
605                self.next = next;
606            }
607
608            fn prev(&self) -> *mut Self {
609                self.prev
610            }
611
612            fn set_prev(&mut self, prev: *mut Self) {
613                self.prev = prev;
614            }
615
616            // +-------------------+
617            // |       index       |
618            // |     (8 bytes)     |
619            // +-------------------+
620            // |  pointer to slab  |
621            // |     (8 bytes)     |
622            // +-------------------+ <- return value
623            // |       data        |
624            // | (size - 16 bytes) |
625            // |                   |
626            /// allocate a memory region whose size is self.size - 16 bytes
627            fn alloc(&mut self) -> *mut u8 {
628                let idx1 = (!self.l1_bitmap).leading_zeros() as usize;
629                self.l1_bitmap |= 1 << (63 - idx1);
630
631                let idx = idx1 * self.size as usize;
632                let ptr = &mut (self.buf[idx]) as *mut u8;
633                let mem = ptr as *mut SlabMemory;
634
635                // first 128 bits contain meta information
636                unsafe {
637                    (*mem).idx1 = idx1;
638                    (*mem).slab = self as *mut $id as usize;
639                }
640
641                self.num += 1;
642
643                &mut (self.buf[idx + 16]) as *mut u8
644            }
645
646            /// deallocate the memory region pointed by ptr which is returned by alloc
647            fn free(&mut self, ptr: *mut u8) {
648                let addr = ptr as usize;
649                let idx1 = unsafe { *((addr - 16) as *mut usize) };
650
651                self.l1_bitmap &= !(1 << (63 - idx1));
652                self.num -= 1;
653            }
654
655            fn is_full(&self) -> bool {
656                self.l1_bitmap == !0
657            }
658
659            fn is_empty(&self) -> bool {
660                self.num == 0
661            }
662
663            fn init(&mut self) {
664                self.prev = null_mut();
665                self.next = null_mut();
666                self.l1_bitmap = $l1val;
667                self.size = $size;
668                self.num = 0;
669            }
670
671            // fn print(&self) {
672            //     driver::uart::puts("L1 bitmap: 0x");
673            //     driver::uart::hex(self.l1_bitmap);
674            //     driver::uart::puts("\n");
675
676            //     match unsafe { self.next.as_ref() } {
677            //         Some(next) => {
678            //             driver::uart::puts("\n");
679            //             next.print();
680            //         }
681            //         None => {}
682            //     }
683            // }
684        }
685    };
686}
687
688// l1_bitmap = 0xFFFF FFFF (initial value)
689// size = 2040
690SlabLarge!(Slab2040, 0xFFFFFFFF, 2040);
691
692// l1_bitmap = 0xFFFF FFFF FFFF (initial value)
693// size = 4088
694SlabLarge!(Slab4088, 0xFFFFFFFFFFFF, 4088);
695
696// l1_bitmap = 0xFFFF FFFF FFFF FF (initial value)
697// size = 8184
698SlabLarge!(Slab8184, 0xFFFFFFFFFFFFFF, 8184);
699
700// l1_bitmap = 0xFFFF FFFF FFFF FFF (initial value)
701// size = 16376
702SlabLarge!(Slab16376, 0xFFFFFFFFFFFFFFF, 16376);
703
704// l1_bitmap = 0x3FFF FFFF FFFF FFFF (initial value)
705// size = 32752
706SlabLarge!(Slab32752, 0x3FFFFFFFFFFFFFFF, 32752);
707
708#[repr(C)]
709struct Slab65512 {
710    buf: [u8; 65512],
711    prev: *mut Slab65512,
712    next: *mut Slab65512,
713    num: u32,
714    size: u32, // must be 65512
715}
716
717impl Slab for Slab65512 {
718    fn next(&self) -> *mut Self {
719        self.next
720    }
721
722    fn set_next(&mut self, next: *mut Self) {
723        self.next = next;
724    }
725
726    fn prev(&self) -> *mut Self {
727        self.prev
728    }
729
730    fn set_prev(&mut self, prev: *mut Self) {
731        self.prev = prev;
732    }
733
734    // +------------------+
735    // | pointer to slab  |
736    // |    (8 bytes)     |
737    // +------------------+ <- return value
738    // |       data       |
739    // | (size - 8 bytes) |
740    // |                  |
741    /// allocate a memory region whose size is 65504 bytes
742    fn alloc(&mut self) -> *mut u8 {
743        let ptr = &mut (self.buf[0]) as *mut u8;
744        let ptr64 = ptr as *mut usize;
745
746        // first 64 bits points the slab
747        unsafe {
748            *ptr64 = self as *mut Slab65512 as usize;
749        }
750
751        self.num = 1;
752
753        &mut (self.buf[8]) as *mut u8
754    }
755
756    fn free(&mut self, _ptr: *mut u8) {
757        self.num = 0;
758    }
759
760    fn is_full(&self) -> bool {
761        true
762    }
763
764    fn is_empty(&self) -> bool {
765        self.num == 0
766    }
767
768    fn init(&mut self) {
769        self.next = null_mut();
770        self.prev = null_mut();
771        self.size = 65512;
772        self.num = 0;
773    }
774
775    // fn print(&self) {
776    //     driver::uart::puts("1");
777
778    //     match unsafe { self.next.as_ref() } {
779    //         Some(next) => {
780    //             next.print();
781    //         }
782    //         None => {}
783    //     }
784    // }
785}
786
787// macro_rules! print_slabs {
788//     ($s:literal, $slab_partial:ident, $slab_full:ident) => {
789//         driver::uart::puts("\n");
790//         driver::uart::puts($s);
791//         driver::uart::puts("_partial:\n");
792//         match unsafe { SLAB_ALLOC.$slab_partial.as_ref() } {
793//             Some(slab) => {
794//                 slab.print();
795//             }
796//             None => {}
797//         }
798
799//         driver::uart::puts("\n");
800//         driver::uart::puts($s);
801//         driver::uart::puts("_full:\n");
802//         match unsafe { SLAB_ALLOC.$slab_full.as_ref() } {
803//             Some(slab) => {
804//                 slab.print();
805//             }
806//             None => {}
807//         }
808//     };
809// }
810
811// pub fn print_slabs() {
812//     print_slabs!("slab16", slab16_partial, slab16_full);
813//     print_slabs!("slab32", slab32_partial, slab32_full);
814//     print_slabs!("slab64", slab64_partial, slab64_full);
815//     print_slabs!("slab128", slab128_partial, slab128_full);
816//     print_slabs!("slab256", slab256_partial, slab256_full);
817//     print_slabs!("slab512", slab512_partial, slab512_full);
818//     print_slabs!("slab1024", slab1024_partial, slab1024_full);
819//     print_slabs!("slab2040", slab2040_partial, slab2040_full);
820//     print_slabs!("slab4088", slab4088_partial, slab4088_full);
821//     print_slabs!("slab8184", slab8184_partial, slab8184_full);
822//     print_slabs!("slab16376", slab16376_partial, slab16376_full);
823//     print_slabs!("slab32752", slab32752_partial, slab32752_full);
824//     print_slabs!("slab65512", slab65512_partial, slab65512_full);
825//     driver::uart::puts("\n");
826// }