composable_allocators/
stacked.rs

1use crate::base::*;
2use core::alloc::{self, AllocError, Allocator};
3use core::mem::{MaybeUninit};
4use core::ptr::NonNull;
5use core::sync::atomic::{AtomicPtr, AtomicUsize, Ordering};
6
7pub struct Stacked {
8    buf_ptr: AtomicPtr<u8>,
9    buf_len: usize,
10    allocated: AtomicUsize,
11    allocations_count: AtomicUsize,
12}
13
14impl Drop for Stacked {
15    fn drop(&mut self) {
16        assert!(self.allocations_count.load(Ordering::Relaxed) == 0, "memory leaks in Stacked allocator");
17    }
18}
19
20unsafe impl NonUnwinding for Stacked { }
21
22impl Stacked {
23    pub const fn from_static_slice(
24        buf: &'static mut [MaybeUninit<u8>],
25    ) -> Self {
26        Stacked {
27            buf_ptr: AtomicPtr::new(buf.as_mut_ptr() as *mut u8),
28            buf_len: buf.len(),
29            allocated: AtomicUsize::new(0),
30            allocations_count: AtomicUsize::new(0),
31        }
32    }
33
34    pub const fn from_static_array<const BUF_LEN: usize>(
35        buf: &'static mut [MaybeUninit<u8>; BUF_LEN],
36    ) -> Self {
37        Stacked {
38            buf_ptr: AtomicPtr::new(buf.as_mut_ptr() as *mut u8),
39            buf_len: BUF_LEN,
40            allocated: AtomicUsize::new(0),
41            allocations_count: AtomicUsize::new(0),
42        }
43    }
44
45    /// # Safety
46    ///
47    /// `buf_ptr` should be a valid unique pointer to a slice with `params.buf_len()` bytes length.
48    ///
49    /// Arguments should satisfy
50    /// `buf_len <= isize::MAX as usize`,
51    /// and
52    /// `(isize::MAX as usize) - buf_len >= buf_ptr as usize`
53    pub unsafe fn with_buf_raw<T>(
54        buf_ptr: NonNull<MaybeUninit<u8>>,
55        buf_len: usize,
56        f: impl for<'a> FnOnce(&'a Stacked) -> T
57    ) -> T {
58        let stacked = Stacked {
59            buf_ptr: AtomicPtr::new(buf_ptr.as_ptr() as *mut u8),
60            buf_len,
61            allocated: AtomicUsize::new(0),
62            allocations_count: AtomicUsize::new(0),
63        };
64        f(&stacked)
65    }
66
67    unsafe fn grow_raw(
68        &self, 
69        ptr: NonNull<u8>, 
70        old_layout: alloc::Layout, 
71        new_layout: alloc::Layout,
72        zeroed: bool,
73    ) -> Result<NonNull<[u8]>, AllocError> {
74        if new_layout.align() > old_layout.align() { return Err(AllocError); }
75        let start_offset = ptr.as_ptr().offset_from(self.buf_ptr.load(Ordering::Relaxed)) as usize;
76        if new_layout.size() > self.buf_len - start_offset { return Err(AllocError); }
77        let old_end_offset = start_offset + old_layout.size();
78        let new_end_offset = start_offset + new_layout.size();
79        self.allocated.compare_exchange(old_end_offset, new_end_offset, Ordering::Relaxed, Ordering::Relaxed)
80            .map_err(|_| AllocError)?;
81        if zeroed {
82            ptr.as_ptr().add(old_layout.size()).write_bytes(0, new_layout.size() - old_layout.size());
83        }
84        Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()))
85    }
86}
87
88pub fn with_size<const BUF_LEN: usize, T>(
89    f: impl for<'a> FnOnce(&'a Stacked) -> T
90) -> T {
91    let mut buf: [MaybeUninit<u8>; BUF_LEN] = [MaybeUninit::uninit(); BUF_LEN];
92    let buf_ptr = unsafe { NonNull::new_unchecked(buf.as_mut_ptr()) };
93    assert!((isize::MAX as usize) - BUF_LEN >= buf_ptr.as_ptr() as usize);
94    unsafe { Stacked::with_buf_raw(buf_ptr, BUF_LEN, f) }
95}
96
97pub fn with_buf<T>(
98    buf: &mut [MaybeUninit<u8>],
99    f: impl for<'a> FnOnce(&'a Stacked) -> T
100) -> T {
101    let buf_len = buf.len();
102    assert!(buf_len <= isize::MAX as usize && (isize::MAX as usize) - buf_len >= buf.as_ptr() as usize);
103    let buf_ptr = unsafe { NonNull::new_unchecked(buf.as_mut_ptr()) };
104    unsafe { Stacked::with_buf_raw(buf_ptr, buf_len, f) }
105}
106
107unsafe impl Fallbackable for Stacked {
108    unsafe fn has_allocated(&self, ptr: NonNull<u8>, _layout: alloc::Layout) -> bool {
109        if let Some(offset) = (ptr.as_ptr() as usize).checked_sub(self.buf_ptr.load(Ordering::Relaxed) as usize) {
110            offset < self.buf_len && self.buf_ptr.load(Ordering::Relaxed).add(offset) == ptr.as_ptr()
111        } else {
112            false
113        }
114    }
115
116    fn allows_fallback(&self, _layout: alloc::Layout) -> bool {
117        true
118    }
119}
120
121unsafe impl Allocator for Stacked {
122    fn allocate(&self, layout: alloc::Layout) -> Result<NonNull<[u8]>, AllocError> {
123        let mut padding = MaybeUninit::uninit();
124        let allocated = self.allocated.fetch_update(Ordering::Relaxed, Ordering::Relaxed, |allocated| {
125            let ptr = unsafe { self.buf_ptr.load(Ordering::Relaxed).add(allocated) };
126            let padding = padding.write((layout.align() - (ptr as usize) % layout.align()) % layout.align());
127            let size = padding.checked_add(layout.size())?;
128            if size > self.buf_len - allocated { return None; }
129            Some(allocated + size)
130        }).map_err(|_| AllocError)?;
131        let ptr = unsafe { self.buf_ptr.load(Ordering::Relaxed).add(allocated) };
132        let padding = unsafe { padding.assume_init() };
133        self.allocations_count.fetch_update(Ordering::Relaxed, Ordering::Relaxed, |allocations_count|
134            allocations_count.checked_add(1)
135        ).map_err(|_| AllocError)?;
136        let res = NonNull::slice_from_raw_parts(
137            unsafe { NonNull::new_unchecked(ptr.add(padding)) },
138            layout.size()
139        );
140        Ok(res)
141    }
142
143    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: alloc::Layout) {
144        let start_offset = ptr.as_ptr().offset_from(self.buf_ptr.load(Ordering::Relaxed)) as usize;
145        let end_offset = start_offset + layout.size();
146        let _ = self.allocated.compare_exchange(end_offset, start_offset, Ordering::Relaxed, Ordering::Relaxed);
147        self.allocations_count.fetch_sub(1, Ordering::Relaxed);
148    }
149
150    unsafe fn grow(
151        &self, 
152        ptr: NonNull<u8>, 
153        old_layout: alloc::Layout, 
154        new_layout: alloc::Layout
155    ) -> Result<NonNull<[u8]>, AllocError> {
156        self.grow_raw(ptr, old_layout, new_layout, false)
157    }
158
159    unsafe fn grow_zeroed(
160        &self, 
161        ptr: NonNull<u8>, 
162        old_layout: alloc::Layout, 
163        new_layout: alloc::Layout
164    ) -> Result<NonNull<[u8]>, AllocError> {
165        self.grow_raw(ptr, old_layout, new_layout, true)
166    }
167
168    unsafe fn shrink(
169        &self, 
170        ptr: NonNull<u8>, 
171        old_layout: alloc::Layout, 
172        new_layout: alloc::Layout
173    ) -> Result<NonNull<[u8]>, AllocError> {
174        if new_layout.align() > old_layout.align() { return Err(AllocError); }
175        let start_offset = ptr.as_ptr().offset_from(self.buf_ptr.load(Ordering::Relaxed)) as usize;
176        let old_end_offset = start_offset + old_layout.size();
177        let new_end_offset = start_offset + new_layout.size();
178        let size = match self.allocated.compare_exchange(old_end_offset, new_end_offset, Ordering::Relaxed, Ordering::Relaxed) {
179            Ok(_) => new_layout.size(),
180            Err(_) => old_layout.size(),
181        };
182        Ok(NonNull::slice_from_raw_parts(ptr, size))
183    }
184}