linear_malloc/
lib.rs

1//! An ultra simple single-threaded linear allocator.
2//!
3//! Useful for applications running under cachegrind/callgrind.
4
5use core::ffi::c_void;
6use core::ptr;
7
8const HEAP_SIZE: usize = 1024 * 1024 * 128; // 128 Mbit
9const MAX_ALIGN: usize = 16;
10
11static mut HEAP: [u8; HEAP_SIZE] = [0; HEAP_SIZE];
12static mut CURRENT: *mut u8 = unsafe { &HEAP[0] as *const _ as *mut _ };
13
14#[no_mangle]
15pub unsafe extern "C" fn malloc(size: usize) -> *mut c_void {
16    let prev = CURRENT;
17
18    let new_pos = CURRENT.add(MAX_ALIGN);
19    let new_pos = new_pos.add(size);
20    let new_pos = new_pos.add(new_pos.align_offset(MAX_ALIGN));
21    let offset = new_pos.offset_from(&HEAP[0]); // This is UB
22    if offset > HEAP_SIZE as isize {
23        // TODO: Better error handling
24        libc::exit(1);
25    }
26    CURRENT = new_pos;
27
28    ptr::write(prev as *mut usize, size);
29    let prev = prev.add(MAX_ALIGN);
30    prev as *mut c_void
31}
32
33#[no_mangle]
34pub unsafe extern "C" fn free(_ptr: *mut c_void) {
35    // Noop
36}
37
38#[no_mangle]
39pub unsafe extern "C" fn calloc(count: usize, size: usize) -> *mut c_void {
40    malloc(count * size)
41}
42
43#[no_mangle]
44pub unsafe extern "C" fn realloc(old: *mut c_void, new_size: usize) -> *mut c_void {
45    let new = malloc(new_size);
46    if !old.is_null() {
47        let old_size = ptr::read((old as *mut u8).sub(MAX_ALIGN) as *mut usize);
48        ptr::copy_nonoverlapping(old as *mut u8, new as *mut u8, old_size.min(new_size));
49        free(old);
50    }
51    new
52}