1#![no_std]
49
50use anyhow::{anyhow, Result};
51use core::{alloc::Layout, ptr::NonNull};
52use parking_lot::Mutex;
53
54pub struct MemoryPoolAllocator<const N: usize, const M: usize> {
61 inner: Mutex<PoolInner<N, M>>,
62 #[cfg(feature = "statistics")]
63 stats: Mutex<PoolStats>,
64}
65
66struct PoolInner<const N: usize, const M: usize> {
67 pool: *mut u8,
69 meta: [Option<usize>; M],
71}
72
73#[derive(Debug, Clone, Copy, PartialEq, Eq)]
75pub enum AllocError {
76 OutOfMemory,
78 InvalidLayout,
80 InvalidPointer,
82 NotAllocated,
84}
85
86impl core::fmt::Display for AllocError {
87 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
88 match self {
89 Self::OutOfMemory => write!(f, "Out of memory"),
90 Self::InvalidLayout => write!(f, "Invalid layout parameters"),
91 Self::InvalidPointer => write!(f, "Pointer not from this allocator"),
92 Self::NotAllocated => write!(f, "Pointer not currently allocated"),
93 }
94 }
95}
96
97impl From<anyhow::Error> for AllocError {
98 fn from(err: anyhow::Error) -> Self {
99 if err.is::<AllocError>() {
100 *err.downcast_ref::<AllocError>().unwrap()
101 } else {
102 AllocError::InvalidLayout
103 }
104 }
105}
106
107#[cfg(feature = "statistics")]
108#[derive(Debug, Clone)]
110pub struct PoolStats {
111 pub allocated_chunks: usize,
112 pub allocation_errors: usize,
113 pub deallocation_errors: usize,
114}
115
116#[cfg(feature = "statistics")]
117impl PoolStats {
118 const fn new() -> Self {
119 Self {
120 allocated_chunks: 0,
121 allocation_errors: 0,
122 deallocation_errors: 0,
123 }
124 }
125}
126
127unsafe impl<const N: usize, const M: usize> Sync for MemoryPoolAllocator<N, M> {}
129unsafe impl<const N: usize, const M: usize> Send for MemoryPoolAllocator<N, M> {}
130
131impl<const N: usize, const M: usize> MemoryPoolAllocator<N, M> {
132 pub const CHUNK_SIZE: usize = N / M;
134
135 pub const unsafe fn new(pool: *mut u8) -> Self {
138 Self {
139 inner: Mutex::new(PoolInner {
140 pool,
141 meta: [None; M],
142 }),
143 #[cfg(feature = "statistics")]
144 stats: Mutex::new(PoolStats::new()),
145 }
146 }
147
148 pub fn try_allocate(&self, layout: Layout) -> Result<*mut u8> {
150 if layout.size() == 0 {
152 return Ok(NonNull::dangling().as_ptr());
153 }
154
155 if !layout.align().is_power_of_two() || layout.align() > N {
157 #[cfg(feature = "statistics")]
158 {
159 self.stats.lock().allocation_errors += 1;
160 }
161 return Err(anyhow!(AllocError::InvalidLayout).context("Invalid alignment or size"));
162 }
163
164 let chunks_needed = (layout.size() + Self::CHUNK_SIZE - 1) / Self::CHUNK_SIZE;
165 if chunks_needed > M {
166 #[cfg(feature = "statistics")]
167 {
168 self.stats.lock().allocation_errors += 1;
169 }
170 return Err(anyhow!(AllocError::OutOfMemory).context("Failed to find free region"));
171 }
172
173 let mut inner = self.inner.lock();
174 let pool_base = inner.pool as usize;
175
176 if let Some((start_chunk, total_chunks)) = self.find_free_region(&inner, chunks_needed, layout.align()) {
178 self.mark_allocated(&mut inner.meta, start_chunk, total_chunks)?;
179
180 let ptr_addr = pool_base + start_chunk * Self::CHUNK_SIZE;
182
183 #[cfg(feature = "statistics")]
184 {
185 let mut stats = self.stats.lock();
186 stats.allocated_chunks += total_chunks;
187 }
188 return Ok(ptr_addr as *mut u8);
189 }
190
191 #[cfg(feature = "statistics")]
192 {
193 self.stats.lock().allocation_errors += 1;
194 }
195 Err(anyhow!(AllocError::OutOfMemory).context("Failed to find free region"))
196 }
197
198 pub fn try_deallocate(&self, ptr: *mut u8) -> Result<()> {
200 if ptr.is_null() {
202 #[cfg(feature = "statistics")]
203 {
204 self.stats.lock().deallocation_errors += 1;
205 }
206 return Err(anyhow!(AllocError::InvalidPointer).context("Cannot deallocate null pointer"));
207 }
208
209 let mut inner = self.inner.lock();
210 let pool_base = inner.pool as usize;
211 let ptr_addr = ptr as usize;
212
213 if ptr_addr < pool_base || ptr_addr >= pool_base + N {
215 #[cfg(feature = "statistics")]
216 {
217 self.stats.lock().deallocation_errors += 1;
218 }
219 return Err(anyhow!(AllocError::InvalidPointer).context("Pointer not from this allocator"));
220 }
221
222 if (ptr_addr - pool_base) % Self::CHUNK_SIZE != 0 {
224 #[cfg(feature = "statistics")]
225 {
226 self.stats.lock().deallocation_errors += 1;
227 }
228 return Err(anyhow!(AllocError::InvalidPointer).context("Pointer not aligned to chunk size"));
229 }
230
231 let start_chunk = (ptr_addr - pool_base) / Self::CHUNK_SIZE;
232 if start_chunk >= M || !self.is_chunk_allocated(&inner.meta, start_chunk) {
233 #[cfg(feature = "statistics")]
234 {
235 self.stats.lock().deallocation_errors += 1;
236 }
237 return Err(anyhow!(AllocError::NotAllocated).context("Pointer not currently allocated"));
238 }
239
240 let total_chunks = match inner.meta[start_chunk] {
242 Some(size) => size,
243 None => {
244 #[cfg(feature = "statistics")]
245 {
246 self.stats.lock().deallocation_errors += 1;
247 }
248 return Err(anyhow!(AllocError::NotAllocated).context("Chunk already free"));
249 }
250 };
251
252 #[cfg(feature = "zero-on-free")]
254 {
255 unsafe {
256 let start_ptr = (pool_base + start_chunk * Self::CHUNK_SIZE) as *mut u8;
257 core::ptr::write_bytes(start_ptr, 0, total_chunks * Self::CHUNK_SIZE);
258 }
259 }
260
261 self.mark_chunks_free(&mut inner.meta, start_chunk)?;
263
264 #[cfg(feature = "statistics")]
266 {
267 let mut stats = self.stats.lock();
268 stats.allocated_chunks = stats.allocated_chunks.saturating_sub(total_chunks);
269 }
270
271 Ok(())
272 }
273
274 #[inline]
279 fn is_chunk_allocated(&self, meta: &[Option<usize>; M], chunk_idx: usize) -> bool {
280 if chunk_idx < M {
281 matches!(meta[chunk_idx], Some(_))
282 } else {
283 false
284 }
285 }
286
287 fn mark_allocated(&self, meta: &mut [Option<usize>; M], start_chunk: usize, chunk_size: usize) -> Result<()> {
289 if start_chunk + chunk_size > M {
290 return Err(anyhow!(AllocError::OutOfMemory).context("Not enough space to allocate chunks"));
291 }
292 meta[start_chunk] = Some(chunk_size);
293 Ok(())
294 }
295
296 fn mark_chunks_free(&self, meta: &mut [Option<usize>; M], start_chunk: usize) -> Result<()> {
298 match meta[start_chunk] {
299 Some(size) => {
300 if start_chunk + size > M {
301 return Err(anyhow!(AllocError::OutOfMemory).context("Invalid chunk range to free"));
302 }
303
304 for i in start_chunk..start_chunk + size {
305 if i < M {
306 meta[i] = None; }
308 }
309
310 Ok(())
311 }
312 None => {
313 Err(anyhow!(AllocError::NotAllocated).context("Chunk already free"))
314 }
315 }
316 }
317
318 fn find_free_region(&self, inner: &PoolInner<N, M>, chunks_needed: usize, align: usize) -> Option<(usize,usize)> {
320
321 let pool_base = inner.pool as usize;
322
323 let mut start = 0;
324 while start + chunks_needed <= M {
325 let block_addr = pool_base + start * Self::CHUNK_SIZE;
327 let aligned_addr = (block_addr + align - 1) & !(align - 1);
329 let alignment_waste = aligned_addr - block_addr;
331 let alignment_chunks = (alignment_waste + Self::CHUNK_SIZE - 1) / Self::CHUNK_SIZE;
333 let total_chunks_needed = alignment_chunks + chunks_needed;
335 if start + total_chunks_needed > M {
337 break;
338 }
339 let mut found = true;
341 for i in 0..total_chunks_needed {
343 if self.is_chunk_allocated(&inner.meta, start + i) {
344 found = false;
345 start = start + i + 1;
346 break;
347 }
348 }
349 if found {
351 return Some((start + alignment_chunks, chunks_needed));
352 }
353 }
354 None
355 }
356
357}
358
359#[cfg(feature = "zero-on-drop")]
360impl<const N: usize, const M: usize> Drop for MemoryPoolAllocator<N, M> {
361 fn drop(&mut self) {
362 let inner = self.inner.lock();
364 unsafe {
365 core::ptr::write_bytes(inner.pool, 0, N);
366 }
367 }
368}
369
370#[cfg(test)]
371mod tests {
372 use super::*;
373
374 #[test]
375 fn test_basic_allocation() {
376 type Alloc = MemoryPoolAllocator<1024, 64>;
377 let mut mem = [0u8; 1024];
378 let allocator = unsafe { Alloc::new(mem.as_mut_ptr()) };
379
380 let layout = Layout::from_size_align(16, 8).unwrap();
381 let ptr = allocator.try_allocate(layout).unwrap();
382
383 assert_eq!(ptr as usize % 8, 0);
385
386 assert!(allocator.try_deallocate(ptr).is_ok());
388
389 #[cfg(feature = "statistics")]
390 {
391 let stats = allocator.stats.lock();
392 assert_eq!(stats.allocated_chunks, 0);
393 }
394 }
395
396 #[test]
397 fn test_multiple_allocations() {
398 type Alloc = MemoryPoolAllocator<1024, 64>;
399 let mut mem = [0u8; 1024];
400 let allocator = unsafe { Alloc::new(mem.as_mut_ptr()) };
401
402 let layout = Layout::from_size_align(16, 8).unwrap();
403 let mut ptrs = [core::ptr::null_mut(); 10];
404 let mut count = 0;
405
406 for i in 0..10 {
408 match allocator.try_allocate(layout) {
409 Ok(ptr) => {
410 ptrs[i] = ptr;
411 count += 1;
412 }
413 Err(_) => break,
414 }
415 }
416
417 assert!(count > 0);
418
419 for i in 0..count {
421 assert_eq!(ptrs[i] as usize % 8, 0);
422 for j in (i+1)..count {
423 assert_ne!(ptrs[i], ptrs[j]);
424 }
425 }
426
427 for i in 0..count {
429 assert!(allocator.try_deallocate(ptrs[i]).is_ok());
430 }
431
432 #[cfg(feature = "statistics")]
433 {
434 let stats = allocator.stats.lock();
435 assert_eq!(stats.allocated_chunks, 0);
436 }
437 }
438
439 #[test]
440 fn test_alignment_handling() {
441 type Alloc = MemoryPoolAllocator<2048, 32>;
442 #[repr(align(32))]
443 struct Aligned {
444 mem: [u8;1024]
445 }
446 let mut aligned = Aligned { mem: [0; 1024] };
447 let allocator = unsafe { Alloc::new(aligned.mem.as_mut_ptr()) };
448
449 let layout1 = Layout::from_size_align(32, 16).unwrap();
451 let ptr1 = allocator.try_allocate(layout1).unwrap();
452 assert_eq!(ptr1 as usize % 16, 0);
453
454 let layout2 = Layout::from_size_align(64, 32).unwrap();
455 let ptr2 = allocator.try_allocate(layout2).unwrap();
456 assert_eq!(ptr2 as usize % 32, 0);
457
458 allocator.try_deallocate(ptr1).unwrap();
460 allocator.try_deallocate(ptr2).unwrap();
461 }
462
463 #[test]
464 #[cfg(feature = "statistics")]
465 fn test_error_handling() {
466
467 type Alloc = MemoryPoolAllocator<512, 32>;
468 let mut mem = [0u8; 1024];
469 let allocator = unsafe { Alloc::new(mem.as_mut_ptr()) };
470
471 let layout = Layout::from_size_align(16, 8).unwrap();
472
473 for _ in 0..32 {
475 assert!(allocator.try_allocate(layout).is_ok());
476 }
477
478 assert!(allocator.try_allocate(layout).is_err());
480
481 let stats = allocator.stats.lock();
483 assert!(stats.allocation_errors > 0);
484 }
485
486 #[test]
487 fn test_full_pool() {
488 type Alloc = MemoryPoolAllocator<64, 4>;
489 let mut mem = [0u8; 1024];
490 let allocator = unsafe { Alloc::new(mem.as_mut_ptr()) };
491
492 let layout = Layout::from_size_align(16, 8).unwrap();
493
494 let ptr1 = allocator.try_allocate(layout).unwrap();
496 let ptr2 = allocator.try_allocate(layout).unwrap();
497 let ptr3 = allocator.try_allocate(layout).unwrap();
498 let ptr4 = allocator.try_allocate(layout).unwrap();
499
500 assert!(allocator.try_allocate(layout).is_err());
502
503 allocator.try_deallocate(ptr2).unwrap();
505 let ptr5 = allocator.try_allocate(layout).unwrap();
506
507 allocator.try_deallocate(ptr1).unwrap();
509 allocator.try_deallocate(ptr3).unwrap();
510 allocator.try_deallocate(ptr4).unwrap();
511 allocator.try_deallocate(ptr5).unwrap();
512 }
513
514 #[test]
515 #[cfg(feature = "zero-on-free")]
516 fn test_memory_zeroing() {
517 type Alloc = MemoryPoolAllocator<64, 4>;
518 let mut mem = [0u8; 1024];
519 let allocator = unsafe { Alloc::new(mem.as_mut_ptr()) };
520
521 let layout = Layout::from_size_align(16, 8).unwrap();
522 let ptr = allocator.try_allocate(layout).unwrap();
523
524 unsafe {
526 let data_ptr = ptr.add(8);
527 core::ptr::write_bytes(data_ptr, 0xAB, 8);
528 }
529
530 allocator.try_deallocate(ptr).unwrap();
532
533 let ptr2 = allocator.try_allocate(layout).unwrap();
535
536 let mut buffer = [0u8; 8];
538 unsafe {
539 let data_ptr = ptr2.add(8);
540 core::ptr::copy_nonoverlapping(data_ptr, buffer.as_mut_ptr(), 8);
541 }
542 assert!(buffer.iter().all(|&b| b == 0));
543
544 allocator.try_deallocate(ptr2).unwrap();
545 }
546}