1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
//! Fast allocator
//! Optimized for fixed small memory block.

// Fix size 64 Bytes
pub const BLOCK_SIZE: usize = 64;

struct Node {
    next: *mut Node,
    prev: *mut Node,
}

impl Node {
    fn init(list: *mut Node) {
        unsafe {
            (*list).next = list;
            (*list).prev = list;
        }
    }

    fn remove(list: *mut Node) {
        unsafe {
            (*(*list).prev).next = (*list).next;
            (*(*list).next).prev = (*list).prev;
        }
    }

    fn pop(list: *mut Node) -> *mut Node {
        let n_list: *mut Node = unsafe { (*list).next };
        Self::remove(n_list);
        n_list
    }

    fn push(list: *mut Node, p: *mut u8) {
        let p = p.cast::<Node>();
        unsafe {
            let n_list: Node = Node {
                prev: list,
                next: (*list).next,
            };
            p.write_unaligned(n_list);
            (*(*list).next).prev = p;
            (*list).next = p;
        }
    }

    fn is_empty(list: *const Node) -> bool {
        unsafe { (*list).next as *const Node == list }
    }
}

#[derive(Clone, Copy)]
pub struct FastAllocParam {
    base_addr: *const u8,
    len: usize,
}

impl FastAllocParam {
    pub const fn new(base_addr: *const u8, len: usize) -> Self {
        FastAllocParam { base_addr, len }
    }
}

pub struct FastAlloc {
    /// memory start addr
    base_addr: usize,
    /// memory end addr
    end_addr: usize,
    free: *mut Node,
}

impl FastAlloc {
    /// # Safety
    ///
    /// The `base_addr..(base_addr + len)` must be allocated before using,
    /// and must guarantee no others write to the memory range, otherwise behavior is undefined.
    pub unsafe fn new(param: FastAllocParam) -> Self {
        let FastAllocParam { base_addr, len } = param;
        let base_addr = base_addr as usize;
        let end_addr = base_addr + len;
        debug_assert_eq!(len % BLOCK_SIZE, 0);

        let nblocks = len / BLOCK_SIZE;

        // initialize free list
        let free = base_addr as *mut Node;
        Node::init(free);

        let mut addr = base_addr;
        for _ in 0..(nblocks - 1) {
            addr += BLOCK_SIZE;
            Node::push(free, addr as *mut u8);
        }

        FastAlloc {
            base_addr,
            end_addr,
            free,
        }
    }

    pub fn contains_ptr(&self, p: *mut u8) -> bool {
        let addr = p as usize;
        addr >= self.base_addr && addr < self.end_addr
    }

    pub fn malloc(&mut self, nbytes: usize) -> *mut u8 {
        if nbytes > BLOCK_SIZE || self.free.is_null() {
            return core::ptr::null_mut();
        }

        let is_last = Node::is_empty(self.free);
        let p = Node::pop(self.free) as *mut u8;
        if is_last {
            self.free = core::ptr::null_mut();
        }
        p
    }

    pub fn free(&mut self, p: *mut u8) {
        debug_assert!(self.contains_ptr(p));
        if self.free.is_null() {
            let n = p.cast();
            Node::init(n);
            self.free = n;
        } else {
            Node::push(self.free, p);
        }
    }
}