1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
//! Implementation of the HermitCore Allocator for dynamically allocating heap memory
//! in the kernel.

use core::alloc::{GlobalAlloc, Layout};

use align_address::Align;
use hermit_sync::RawInterruptTicketMutex;
use talc::{ErrOnOom, Span, Talc, Talck};

pub struct LockedAllocator(Talck<RawInterruptTicketMutex, ErrOnOom>);

impl LockedAllocator {
	pub const fn new() -> Self {
		Self(Talc::new(ErrOnOom).lock())
	}

	#[inline]
	fn align_layout(layout: Layout) -> Layout {
		let size = layout
			.size()
			.align_up(core::mem::size_of::<crossbeam_utils::CachePadded<u8>>());
		let align = layout
			.align()
			.max(core::mem::align_of::<crossbeam_utils::CachePadded<u8>>());
		Layout::from_size_align(size, align).unwrap()
	}

	pub unsafe fn init(&self, heap_bottom: *mut u8, heap_size: usize) {
		let arena = Span::from_base_size(heap_bottom, heap_size);
		unsafe {
			self.0.talc().init(arena);
		}
	}

	pub unsafe fn extend(&self, heap_bottom: *mut u8, heap_size: usize) {
		let arena = Span::from_base_size(heap_bottom, heap_size);
		unsafe {
			self.0.talc().extend(arena);
		}
	}
}

/// To avoid false sharing, the global memory allocator align
/// all requests to a cache line.
unsafe impl GlobalAlloc for LockedAllocator {
	unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
		let layout = Self::align_layout(layout);
		unsafe { self.0.alloc(layout) }
	}

	unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
		let layout = Self::align_layout(layout);
		unsafe { self.0.dealloc(ptr, layout) }
	}
}

#[cfg(all(test, not(target_os = "none")))]
mod tests {
	use core::mem;

	use super::*;

	#[test]
	fn empty() {
		const ARENA_SIZE: usize = 0x1000;
		let mut arena: [u8; ARENA_SIZE] = [0; ARENA_SIZE];
		let allocator: LockedAllocator = LockedAllocator::new();
		unsafe {
			allocator.init(&mut arena as *mut [u8] as *mut u8, ARENA_SIZE);
		}

		let layout = Layout::from_size_align(1, 1).unwrap();
		// we have 4 kbyte  memory
		assert!(unsafe { !allocator.alloc(layout.clone()).is_null() });

		let layout = Layout::from_size_align(0x1000, mem::align_of::<usize>()).unwrap();
		let addr = unsafe { allocator.alloc(layout) };
		assert!(addr.is_null());
	}
}