1#![no_std]
2
3extern crate alloc;
4
5use core::alloc::{GlobalAlloc, Layout};
6
7use core::cell::UnsafeCell;
8use core::ptr::null_mut;
9use core::sync::atomic::{AtomicUsize, Ordering::SeqCst};
10const ARENA_SIZE: usize = 8192 * 8192;
11const MAX_SUPPORTED_ALIGN: usize = 4096;
12#[repr(C, align(4096))] struct FAlloc {
14 arena: UnsafeCell<[u8; ARENA_SIZE]>,
15 remaining: AtomicUsize, }
17
18#[global_allocator]
19static ALLOCATOR: FAlloc = FAlloc {
20 arena: UnsafeCell::new([0x55; ARENA_SIZE]),
21 remaining: AtomicUsize::new(ARENA_SIZE),
22};
23unsafe impl Sync for FAlloc {}
24unsafe impl GlobalAlloc for FAlloc {
25 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
26 let size = layout.size();
27 let align = layout.align();
28 let align_mask_to_round_down = !(align - 1);
31 if align > MAX_SUPPORTED_ALIGN {
32 return null_mut();
33 }
34 let mut allocated = 0;
35 if self
36 .remaining
37 .fetch_update(SeqCst, SeqCst, |mut remaining| {
38 if size > remaining {
39 return None;
40 }
41 remaining -= size;
42 remaining &= align_mask_to_round_down;
43 allocated = remaining;
44 Some(remaining)
45 })
46 .is_err()
47 {
48 return null_mut();
49 };
50 self.arena.get().cast::<u8>().add(allocated)
51 }
52 unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {}
53}
54#[cfg(test)]
55mod tests {
56 use super::*;
57 use alloc::vec;
58
59 #[test]
60 fn it_works() {
61 let _ = unsafe { ALLOCATOR.alloc(Layout::from_size_align(1024, 4).unwrap()) };
62 assert!(true);
63 }
64
65 #[test]
66 fn vec_test() {
67 let _ = vec![1, 2, 3];
68 assert!(true);
69 }
70 #[test]
71 fn vec_test_2() {
72 let mut v = vec![1, 2, 3];
73 for _ in 0..10000 {
74 v.push(1)
75 }
76 }
77}