1#[cfg(not(feature = "std"))]
11use core::{
12 alloc::{GlobalAlloc, Layout},
13 cell::UnsafeCell,
14 ptr::{self, NonNull},
15 sync::atomic::{AtomicUsize, Ordering},
16};
17
18#[cfg(feature = "std")]
19use std::{
20 alloc::{GlobalAlloc, Layout},
21 cell::UnsafeCell,
22 ptr::{self, NonNull},
23 sync::atomic::{AtomicUsize, Ordering},
24};
25
26pub struct FixedPool<const SIZE: usize, const ALIGN: usize> {
31 buffer: UnsafeCell<[u8; SIZE]>,
32 free_list: AtomicUsize,
33 block_size: usize,
34 num_blocks: usize,
35}
36
37impl<const SIZE: usize, const ALIGN: usize> FixedPool<SIZE, ALIGN> {
38 #[allow(clippy::manual_div_ceil)] pub const fn new(block_size: usize) -> Self {
54 let aligned_block_size = ((block_size + ALIGN - 1) / ALIGN) * ALIGN;
55 let num_blocks = SIZE / aligned_block_size;
56
57 Self {
58 buffer: UnsafeCell::new([0u8; SIZE]),
59 free_list: AtomicUsize::new(0),
60 block_size: aligned_block_size,
61 num_blocks,
62 }
63 }
64
65 pub fn alloc(&self) -> Option<NonNull<u8>> {
69 loop {
70 let free_idx = self.free_list.load(Ordering::Acquire);
71 if free_idx >= self.num_blocks {
72 return None; }
74
75 if self
77 .free_list
78 .compare_exchange(free_idx, free_idx + 1, Ordering::AcqRel, Ordering::Acquire)
79 .is_ok()
80 {
81 let offset = free_idx * self.block_size;
82 let ptr = unsafe {
83 let buf = &*self.buffer.get();
84 buf.as_ptr().add(offset) as *mut u8
85 };
86 return NonNull::new(ptr);
87 }
88 }
90 }
91
92 pub unsafe fn dealloc(&self, _ptr: NonNull<u8>) {
98 let current = self.free_list.load(Ordering::Acquire);
102 if current > 0 {
103 self.free_list.fetch_sub(1, Ordering::Release);
104 }
105 }
106
107 pub unsafe fn reset(&self) {
113 self.free_list.store(0, Ordering::Release);
114 }
115
116 pub fn available(&self) -> usize {
118 let used = self.free_list.load(Ordering::Acquire);
119 self.num_blocks.saturating_sub(used)
120 }
121
122 pub const fn capacity(&self) -> usize {
124 self.num_blocks
125 }
126
127 pub const fn block_size(&self) -> usize {
129 self.block_size
130 }
131}
132
133unsafe impl<const SIZE: usize, const ALIGN: usize> Send for FixedPool<SIZE, ALIGN> {}
135unsafe impl<const SIZE: usize, const ALIGN: usize> Sync for FixedPool<SIZE, ALIGN> {}
136
137pub struct BumpAllocator<const SIZE: usize> {
142 buffer: UnsafeCell<[u8; SIZE]>,
143 offset: AtomicUsize,
144}
145
146impl<const SIZE: usize> BumpAllocator<SIZE> {
147 pub const fn new() -> Self {
149 Self {
150 buffer: UnsafeCell::new([0u8; SIZE]),
151 offset: AtomicUsize::new(0),
152 }
153 }
154
155 pub fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
159 let size = layout.size();
160 let align = layout.align();
161
162 loop {
163 let current_offset = self.offset.load(Ordering::Acquire);
164
165 let aligned_offset = (current_offset + align - 1) & !(align - 1);
167 let new_offset = aligned_offset + size;
168
169 if new_offset > SIZE {
170 return None; }
172
173 if self
175 .offset
176 .compare_exchange(
177 current_offset,
178 new_offset,
179 Ordering::AcqRel,
180 Ordering::Acquire,
181 )
182 .is_ok()
183 {
184 let ptr = unsafe {
185 let buf = &*self.buffer.get();
186 buf.as_ptr().add(aligned_offset) as *mut u8
187 };
188 return NonNull::new(ptr);
189 }
190 }
192 }
193
194 pub unsafe fn reset(&self) {
200 self.offset.store(0, Ordering::Release);
201 }
202
203 pub fn used(&self) -> usize {
205 self.offset.load(Ordering::Acquire)
206 }
207
208 pub fn available(&self) -> usize {
210 SIZE.saturating_sub(self.used())
211 }
212
213 pub const fn capacity(&self) -> usize {
215 SIZE
216 }
217}
218
219unsafe impl<const SIZE: usize> Send for BumpAllocator<SIZE> {}
221unsafe impl<const SIZE: usize> Sync for BumpAllocator<SIZE> {}
222
223impl<const SIZE: usize> Default for BumpAllocator<SIZE> {
224 fn default() -> Self {
225 Self::new()
226 }
227}
228
229pub struct StackAllocator<const SIZE: usize> {
234 buffer: UnsafeCell<[u8; SIZE]>,
235 offset: AtomicUsize,
236}
237
238impl<const SIZE: usize> StackAllocator<SIZE> {
239 pub const fn new() -> Self {
241 Self {
242 buffer: UnsafeCell::new([0u8; SIZE]),
243 offset: AtomicUsize::new(0),
244 }
245 }
246
247 pub fn push(&self, layout: Layout) -> Option<(NonNull<u8>, usize)> {
251 let size = layout.size();
252 let align = layout.align();
253
254 loop {
255 let current_offset = self.offset.load(Ordering::Acquire);
256
257 let aligned_offset = (current_offset + align - 1) & !(align - 1);
259 let new_offset = aligned_offset + size;
260
261 if new_offset > SIZE {
262 return None; }
264
265 if self
267 .offset
268 .compare_exchange(
269 current_offset,
270 new_offset,
271 Ordering::AcqRel,
272 Ordering::Acquire,
273 )
274 .is_ok()
275 {
276 let ptr = unsafe {
277 let buf = &*self.buffer.get();
278 buf.as_ptr().add(aligned_offset) as *mut u8
279 };
280 return NonNull::new(ptr).map(|p| (p, current_offset));
282 }
283 }
285 }
286
287 pub unsafe fn pop(&self, saved_offset: usize) {
297 self.offset.store(saved_offset, Ordering::Release);
298 }
299
300 pub fn depth(&self) -> usize {
302 self.offset.load(Ordering::Acquire)
303 }
304
305 pub fn available(&self) -> usize {
307 SIZE.saturating_sub(self.depth())
308 }
309
310 pub const fn capacity(&self) -> usize {
312 SIZE
313 }
314}
315
316unsafe impl<const SIZE: usize> Send for StackAllocator<SIZE> {}
318unsafe impl<const SIZE: usize> Sync for StackAllocator<SIZE> {}
319
320impl<const SIZE: usize> Default for StackAllocator<SIZE> {
321 fn default() -> Self {
322 Self::new()
323 }
324}
325
326pub struct EmbeddedAllocator<const SIZE: usize, const POOL_SIZE: usize> {
330 bump: BumpAllocator<SIZE>,
331 pool: FixedPool<POOL_SIZE, 8>,
332}
333
334impl<const SIZE: usize, const POOL_SIZE: usize> EmbeddedAllocator<SIZE, POOL_SIZE> {
335 pub const fn new(block_size: usize) -> Self {
341 Self {
342 bump: BumpAllocator::new(),
343 pool: FixedPool::new(block_size),
344 }
345 }
346
347 pub unsafe fn reset(&self) {
353 self.bump.reset();
354 self.pool.reset();
355 }
356
357 pub fn used(&self) -> usize {
359 self.bump.used() + (self.pool.capacity() - self.pool.available()) * self.pool.block_size()
360 }
361
362 pub const fn capacity(&self) -> usize {
364 SIZE + POOL_SIZE
365 }
366}
367
368unsafe impl<const SIZE: usize, const POOL_SIZE: usize> GlobalAlloc
369 for EmbeddedAllocator<SIZE, POOL_SIZE>
370{
371 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
372 if layout.size() <= self.pool.block_size() {
374 if let Some(ptr) = self.pool.alloc() {
375 return ptr.as_ptr();
376 }
377 }
378
379 self.bump
381 .alloc(layout)
382 .map(|p| p.as_ptr())
383 .unwrap_or(ptr::null_mut())
384 }
385
386 unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) {
387 if let Some(nonnull) = NonNull::new(ptr) {
389 self.pool.dealloc(nonnull);
390 }
391 }
392}
393
394unsafe impl<const SIZE: usize, const POOL_SIZE: usize> Send for EmbeddedAllocator<SIZE, POOL_SIZE> {}
396unsafe impl<const SIZE: usize, const POOL_SIZE: usize> Sync for EmbeddedAllocator<SIZE, POOL_SIZE> {}
397
398pub struct StackGuard<'a, const SIZE: usize> {
400 allocator: &'a StackAllocator<SIZE>,
401 saved_offset: usize,
402}
403
404impl<'a, const SIZE: usize> Drop for StackGuard<'a, SIZE> {
405 fn drop(&mut self) {
406 unsafe {
407 self.allocator.pop(self.saved_offset);
408 }
409 }
410}
411
412impl<const SIZE: usize> StackAllocator<SIZE> {
413 pub fn scoped_alloc(&self, layout: Layout) -> Option<(NonNull<u8>, StackGuard<'_, SIZE>)> {
415 self.push(layout).map(|(ptr, saved_offset)| {
416 let guard = StackGuard {
417 allocator: self,
418 saved_offset,
419 };
420 (ptr, guard)
421 })
422 }
423}
424
425#[cfg(test)]
426mod tests {
427 use super::*;
428
429 #[test]
430 fn test_fixed_pool_basic() {
431 let pool: FixedPool<1024, 8> = FixedPool::new(64);
432 assert_eq!(pool.capacity(), 16); assert_eq!(pool.available(), 16);
434
435 let ptr1 = pool.alloc().expect("Failed to allocate");
436 assert_eq!(pool.available(), 15);
437
438 let ptr2 = pool.alloc().expect("Failed to allocate");
439 assert_eq!(pool.available(), 14);
440
441 assert_ne!(ptr1.as_ptr(), ptr2.as_ptr());
442
443 unsafe {
444 pool.dealloc(ptr1);
445 pool.dealloc(ptr2);
446 }
447 }
448
449 #[test]
450 fn test_fixed_pool_exhaustion() {
451 let pool: FixedPool<128, 8> = FixedPool::new(32);
452 assert_eq!(pool.capacity(), 4);
453
454 let mut ptrs = vec![];
455 for _ in 0..4 {
456 ptrs.push(pool.alloc().expect("Failed to allocate"));
457 }
458
459 assert!(pool.alloc().is_none());
461 assert_eq!(pool.available(), 0);
462 }
463
464 #[test]
465 fn test_bump_allocator_basic() {
466 let bump: BumpAllocator<1024> = BumpAllocator::new();
467 assert_eq!(bump.capacity(), 1024);
468 assert_eq!(bump.used(), 0);
469
470 let layout = Layout::from_size_align(64, 8).unwrap();
471 let ptr1 = bump.alloc(layout).expect("Failed to allocate");
472 assert_eq!(bump.used(), 64);
473
474 let ptr2 = bump.alloc(layout).expect("Failed to allocate");
475 assert_eq!(bump.used(), 128);
476
477 assert_ne!(ptr1.as_ptr(), ptr2.as_ptr());
478 }
479
480 #[test]
481 fn test_bump_allocator_alignment() {
482 let bump: BumpAllocator<1024> = BumpAllocator::new();
483
484 let layout1 = Layout::from_size_align(1, 1).unwrap();
486 let _ptr1 = bump.alloc(layout1).expect("Failed to allocate");
487
488 let layout2 = Layout::from_size_align(8, 8).unwrap();
489 let ptr2 = bump.alloc(layout2).expect("Failed to allocate");
490
491 assert_eq!(ptr2.as_ptr() as usize % 8, 0);
493 }
494
495 #[test]
496 fn test_bump_allocator_reset() {
497 let bump: BumpAllocator<1024> = BumpAllocator::new();
498
499 let layout = Layout::from_size_align(64, 8).unwrap();
500 let _ptr = bump.alloc(layout).expect("Failed to allocate");
501 assert_eq!(bump.used(), 64);
502
503 unsafe {
504 bump.reset();
505 }
506 assert_eq!(bump.used(), 0);
507 }
508
509 #[test]
510 fn test_stack_allocator_basic() {
511 let stack: StackAllocator<1024> = StackAllocator::new();
512 assert_eq!(stack.capacity(), 1024);
513 assert_eq!(stack.depth(), 0);
514
515 let layout = Layout::from_size_align(64, 8).unwrap();
516 let (ptr1, offset1) = stack.push(layout).expect("Failed to allocate");
517 assert!(stack.depth() >= 64);
518
519 let (ptr2, offset2) = stack.push(layout).expect("Failed to allocate");
520 assert!(stack.depth() >= 128);
521
522 assert_ne!(ptr1.as_ptr(), ptr2.as_ptr());
523
524 unsafe {
526 stack.pop(offset2);
527 assert!(stack.depth() < 128);
528 stack.pop(offset1);
529 assert_eq!(stack.depth(), 0);
530 }
531 }
532
533 #[test]
534 fn test_stack_allocator_scoped() {
535 let stack: StackAllocator<1024> = StackAllocator::new();
536
537 let layout = Layout::from_size_align(64, 8).unwrap();
538 {
539 let (_ptr1, _guard1) = stack.scoped_alloc(layout).expect("Failed to allocate");
540 assert!(stack.depth() >= 64);
541
542 {
543 let (_ptr2, _guard2) = stack.scoped_alloc(layout).expect("Failed to allocate");
544 assert!(stack.depth() >= 128);
545 }
546 assert!(stack.depth() < 128);
548 }
549 }
552
553 #[test]
554 fn test_embedded_allocator_basic() {
555 let allocator: EmbeddedAllocator<1024, 512> = EmbeddedAllocator::new(32);
556 assert_eq!(allocator.capacity(), 1536);
557
558 unsafe {
559 let layout = Layout::from_size_align(16, 8).unwrap();
560 let ptr1 = allocator.alloc(layout);
561 assert!(!ptr1.is_null());
562
563 let ptr2 = allocator.alloc(layout);
564 assert!(!ptr2.is_null());
565 assert_ne!(ptr1, ptr2);
566
567 allocator.dealloc(ptr1, layout);
568 allocator.dealloc(ptr2, layout);
569 }
570 }
571
572 #[test]
573 fn test_embedded_allocator_small_large() {
574 let allocator: EmbeddedAllocator<2048, 1024> = EmbeddedAllocator::new(64);
575
576 unsafe {
577 let small_layout = Layout::from_size_align(32, 8).unwrap();
579 let small_ptr = allocator.alloc(small_layout);
580 assert!(!small_ptr.is_null());
581
582 let large_layout = Layout::from_size_align(128, 8).unwrap();
584 let large_ptr = allocator.alloc(large_layout);
585 assert!(!large_ptr.is_null());
586
587 assert_ne!(small_ptr, large_ptr);
588
589 allocator.dealloc(small_ptr, small_layout);
590 allocator.dealloc(large_ptr, large_layout);
591 }
592 }
593
594 #[test]
595 fn test_fixed_pool_reset() {
596 let pool: FixedPool<1024, 8> = FixedPool::new(64);
597
598 let _ptr1 = pool.alloc().expect("Failed to allocate");
599 let _ptr2 = pool.alloc().expect("Failed to allocate");
600 assert_eq!(pool.available(), 14);
601
602 unsafe {
603 pool.reset();
604 }
605 assert_eq!(pool.available(), 16);
606 }
607}