oxigdal_embedded/
alloc_utils.rs1#[cfg(feature = "alloc")]
6use alloc::alloc::{GlobalAlloc, Layout};
7use core::ptr::NonNull;
8
9use crate::error::{EmbeddedError, Result};
10use crate::memory_pool::MemoryPool;
11
12pub struct BumpAllocator<P: MemoryPool> {
17 pool: P,
18}
19
20impl<P: MemoryPool> BumpAllocator<P> {
21 pub const fn new(pool: P) -> Self {
23 Self { pool }
24 }
25
26 pub fn allocate(&self, size: usize, align: usize) -> Result<NonNull<u8>> {
32 self.pool.allocate(size, align)
33 }
34
35 pub fn capacity(&self) -> usize {
37 self.pool.capacity()
38 }
39
40 pub fn used(&self) -> usize {
42 self.pool.used()
43 }
44
45 pub fn available(&self) -> usize {
47 self.pool.available()
48 }
49
50 pub unsafe fn reset(&self) -> Result<()> {
56 unsafe { self.pool.reset() }
58 }
59}
60
61#[cfg(feature = "alloc")]
62unsafe impl<P: MemoryPool + Sync> GlobalAlloc for BumpAllocator<P> {
63 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
64 match self.pool.allocate(layout.size(), layout.align()) {
65 Ok(ptr) => ptr.as_ptr(),
66 Err(_) => core::ptr::null_mut(),
67 }
68 }
69
70 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
71 let _ = ptr;
73 let _ = layout;
74 }
75}
76
77pub struct StackAllocator<const N: usize> {
81 buffer: [u8; N],
82 offset: core::cell::Cell<usize>,
83}
84
85impl<const N: usize> StackAllocator<N> {
86 pub const fn new() -> Self {
88 Self {
89 buffer: [0u8; N],
90 offset: core::cell::Cell::new(0),
91 }
92 }
93
94 pub fn allocate(&self, size: usize, align: usize) -> Result<NonNull<u8>> {
100 if size == 0 {
101 return Err(EmbeddedError::InvalidParameter);
102 }
103
104 if !align.is_power_of_two() {
105 return Err(EmbeddedError::InvalidAlignment {
106 required: align,
107 actual: 0,
108 });
109 }
110
111 let current_offset = self.offset.get();
112 let base_addr = self.buffer.as_ptr() as usize;
113 let aligned_offset = (current_offset + align - 1) & !(align - 1);
114
115 let new_offset = match aligned_offset.checked_add(size) {
116 Some(offset) if offset <= N => offset,
117 _ => {
118 return Err(EmbeddedError::BufferTooSmall {
119 required: size,
120 available: N.saturating_sub(current_offset),
121 });
122 }
123 };
124
125 self.offset.set(new_offset);
126
127 let ptr_addr = base_addr.wrapping_add(aligned_offset);
128 let ptr = unsafe { NonNull::new_unchecked(ptr_addr as *mut u8) };
130 Ok(ptr)
131 }
132
133 pub unsafe fn pop(&self, size: usize) -> Result<()> {
139 let current_offset = self.offset.get();
140 if size > current_offset {
141 return Err(EmbeddedError::InvalidParameter);
142 }
143
144 self.offset.set(current_offset - size);
145 Ok(())
146 }
147
148 pub fn used(&self) -> usize {
150 self.offset.get()
151 }
152
153 pub fn available(&self) -> usize {
155 N.saturating_sub(self.offset.get())
156 }
157
158 pub fn reset(&self) {
160 self.offset.set(0);
161 }
162}
163
164impl<const N: usize> Default for StackAllocator<N> {
165 fn default() -> Self {
166 Self::new()
167 }
168}
169
170pub struct Arena<const N: usize> {
174 buffer: core::cell::UnsafeCell<[u8; N]>,
175 offset: core::cell::Cell<usize>,
176}
177
178impl<const N: usize> Arena<N> {
179 pub const fn new() -> Self {
181 Self {
182 buffer: core::cell::UnsafeCell::new([0u8; N]),
183 offset: core::cell::Cell::new(0),
184 }
185 }
186
187 pub fn allocate(&self, size: usize, align: usize) -> Result<NonNull<u8>> {
193 if size == 0 {
194 return Err(EmbeddedError::InvalidParameter);
195 }
196
197 if !align.is_power_of_two() {
198 return Err(EmbeddedError::InvalidAlignment {
199 required: align,
200 actual: 0,
201 });
202 }
203
204 let current_offset = self.offset.get();
205 let aligned_offset = (current_offset + align - 1) & !(align - 1);
206
207 let new_offset = match aligned_offset.checked_add(size) {
208 Some(offset) if offset <= N => offset,
209 _ => {
210 return Err(EmbeddedError::BufferTooSmall {
211 required: size,
212 available: N.saturating_sub(current_offset),
213 });
214 }
215 };
216
217 self.offset.set(new_offset);
218
219 let base_ptr = self.buffer.get() as *mut u8;
221 let ptr = unsafe { base_ptr.add(aligned_offset) };
222 let nonnull = NonNull::new(ptr).ok_or(EmbeddedError::AllocationFailed)?;
223 Ok(nonnull)
224 }
225
226 pub fn allocate_typed<T>(&self) -> Result<NonNull<T>> {
235 let ptr = self.allocate(core::mem::size_of::<T>(), core::mem::align_of::<T>())?;
236 Ok(ptr.cast::<T>())
237 }
238
239 pub fn clear(&self) {
241 self.offset.set(0);
242 }
243
244 pub fn used(&self) -> usize {
246 self.offset.get()
247 }
248
249 pub fn available(&self) -> usize {
251 N.saturating_sub(self.offset.get())
252 }
253}
254
255impl<const N: usize> Default for Arena<N> {
256 fn default() -> Self {
257 Self::new()
258 }
259}
260
261#[cfg(test)]
262mod tests {
263 use super::*;
264 use crate::memory_pool::StaticPool;
265
266 #[test]
267 fn test_bump_allocator() {
268 let pool = StaticPool::<1024>::new();
269 let allocator = BumpAllocator::new(pool);
270
271 let ptr1 = allocator.allocate(64, 8).expect("allocation failed");
272 let ptr2 = allocator.allocate(128, 16).expect("allocation failed");
273
274 assert_ne!(ptr1, ptr2);
275 assert!(allocator.used() > 0);
276 }
277
278 #[test]
279 fn test_stack_allocator() {
280 let allocator = StackAllocator::<1024>::new();
281
282 let _ptr1 = allocator.allocate(64, 8).expect("allocation failed");
283 assert_eq!(allocator.used(), 64);
284
285 let _ptr2 = allocator.allocate(128, 16).expect("allocation failed");
286 assert!(allocator.used() >= 64 + 128);
287
288 unsafe {
290 allocator.pop(128).expect("pop failed");
291 }
292 assert_eq!(allocator.used(), 64);
293 }
294
295 #[test]
296 fn test_arena_allocator() {
297 let arena = Arena::<1024>::new();
298
299 let _ptr1 = arena.allocate(64, 8).expect("allocation failed");
300 let _ptr2 = arena.allocate(128, 16).expect("allocation failed");
301
302 assert!(arena.used() > 0);
303
304 arena.clear();
305 assert_eq!(arena.used(), 0);
306 }
307
308 #[test]
309 fn test_arena_typed_allocation() {
310 let arena = Arena::<1024>::new();
311
312 let mut ptr: NonNull<u64> = arena.allocate_typed().expect("allocation failed");
313 let value = unsafe { ptr.as_mut() };
315 *value = 42;
316 assert_eq!(*value, 42);
317 }
318}