fixed_size_allocator/
lib.rs

1#![allow(incomplete_features)]
2#![feature(generic_const_exprs)]
3
4use std::ptr::NonNull;
5use std::pin::Pin;
6use std::mem::{self, MaybeUninit};
7use std::marker::PhantomPinned;
8
9use const_assert::{Assert, IsTrue};
10
11
12/// Enum representing the pssible errors that may happen when freeing a pointer.
13#[derive(Debug, Clone, Copy)]
14pub enum FreeError {
15
16    /// The freed pointer was a null pointer
17    NullPtrFree,
18
19    /// The freed pointer was already marked as free.
20    DoubleFree,
21
22    /// The freed pointer is outside the allocator's heap bounds.
23    FreeOutOfBounds,
24
25    /// The freed pointer is not aligned with any block's start address.
26    UnalignedFree,
27
28}
29
30
31pub struct FixedSizeAllocator<const S: usize, const N: usize> {
32
33    /// The heap
34    memory: [MaybeUninit<[u8; S]>; N],
35
36    /// Keep track of which blocks are free and which are allocated. A free block is `true` and an allocated block is `False`.
37    free_table: [bool; N],
38
39    /// The total amount of free memory in bytes.
40    total_free: usize,
41
42    /// A marker to tell the compiler that this struct must not be moved because it's in an address-sensitive state.
43    _pin: PhantomPinned
44
45}
46
47impl<const S: usize, const N: usize> FixedSizeAllocator<S, N> 
48where 
49    Assert<{ S > 0 }>: IsTrue
50{
51
52    /// Construct a new fixed-size allocator on the stack and return it.
53    /// Optionally, you can initialize the heap with `0` bytes by setting the `zero_initialized` flag. !This doesn't mean that the allocator is initialized!
54    /// The returned allocator struct must immediately be pinned via `pin!()`. Using an unpinned allocator created on the stack is undefined behavior.
55    /// This function is marked as unsafe because it's the programmer's responsibility to correctly instantiate and pin an allocator on the stack.
56    pub unsafe fn new_unpinned(zero_initialized: bool) -> Self {
57
58        let memory = if zero_initialized {
59            [MaybeUninit::zeroed(); N]
60        } else {
61            [MaybeUninit::uninit(); N]
62        };
63
64        Self {
65            memory,
66            free_table: [true; N],
67            total_free: S * N,
68            _pin: Default::default()
69        }
70    }
71
72
73    /// Return the start address of the heap.
74    /// This coincides with the address of the first memory block.
75    fn heap_start_address(&self) -> NonNull<u8> {
76        unsafe {
77            NonNull::new_unchecked(
78                self.memory.as_ptr() as *mut u8
79            )
80        }   
81    }
82
83
84    /// Return the upper bound address of the heap.
85    /// Note that this is an invalid pointer as it points to the first byte after the heap.
86    fn upper_heap_bound(&self) -> NonNull<u8> {
87        unsafe {
88            self.heap_start_address().byte_add(S * N)
89        }
90    }
91
92
93    /// Create a new fixed-size allocator on the heap using the standard system allocator.
94    /// Optionally, you can initialize the heap with `0` bytes by setting the `zero_initialized` flag.
95    /// Note that the returned allocator is pinned, meaning it cannot be moved once it's created. This is why a Pin<Box<>> is returned instead. 
96    pub fn new(zero_initialized: bool) -> Pin<Box<Self>> {
97        
98        Box::pin( unsafe {
99            Self::new_unpinned(zero_initialized)
100        })
101    }
102
103
104    /// Return the total size of the heap in bytes.
105    pub const fn heap_size(&self) -> usize {
106        S * N
107    }
108
109
110    /// Return the total number of memory blocks.
111    pub const fn block_count(&self) -> usize {
112        N
113    }
114
115
116    /// Return the size of a memory block.
117    pub const fn block_size(&self) -> usize {
118        S
119    }
120
121
122    /// Return the total amount of free memory in bytes.
123    pub const fn total_free(&self) -> usize {
124        self.total_free
125    }
126
127
128    /// Return the total amount of memory allocated in bytes.
129    pub const fn total_allocated(&self) -> usize {
130        self.heap_size() - self.total_free()
131    }
132
133
134    /// Return how many blocks are free.
135    pub fn free_blocks(&self) -> usize {
136        self.free_table
137            .iter()
138            .filter(|&&is_free| is_free)
139            .count()
140    }
141
142
143    /// Return how many blocks are allocated (not free).
144    pub fn allocated_blocks(&self) -> usize {
145        N - self.free_blocks()
146    }
147
148
149    pub unsafe fn alloc_untyped(self: Pin<&mut Self>) -> Option<NonNull<u8>> {
150
151        let self_data = unsafe { self.get_unchecked_mut() };
152
153        if self_data.total_free() == 0 {
154            // Avoid searching for a free block if we already know we haven't got enough free memory
155            None
156        } else {
157            // Binary search for a free block
158            
159            let mut left = 0;
160            let mut right = self_data.free_table.len() - 1;
161
162            while left <= right {
163
164                if self_data.free_table[left] {
165                    self_data.total_free -= self_data.block_size();
166                    self_data.free_table[left] = false;
167                    return Some(self_data.real_ptr_from_block_index(left));
168                }
169
170                if self_data.free_table[right] {
171                    self_data.total_free -= self_data.block_size();
172                    self_data.free_table[right] = false;
173                    return Some(self_data.real_ptr_from_block_index(right));
174                }
175
176                left += 1;
177                right -= 1;
178            }
179
180            None
181        }
182    }
183
184
185    /// Try to allocate a memory block of that fits `T`, where `T` is `S`-sized.
186    pub fn alloc<T>(self: Pin<&mut Self>) -> Option<NonNull<T>>
187    where 
188        Assert<{ mem::size_of::<T>() == S }>: IsTrue
189    {
190        unsafe {
191            mem::transmute(self.alloc_untyped())
192        }
193    }
194
195
196    /// Convert from a block's metadata index in the free table to the block's real address in memory.
197    fn real_ptr_from_block_index<T>(&self, block_index: usize) -> NonNull<T> {
198
199        // block address = heap start + (block index * block size)
200        unsafe {
201            NonNull::new_unchecked(
202                self.heap_start_address().as_ptr().byte_add(block_index * self.block_size()) as *mut T
203            )
204        }
205    }
206    
207    
208    /// Convert from a block's real address in memory to its metadata entry index in the free table.
209    /// Note that the given pointer must be aligned with the block's start address.
210    fn block_index_from_real_ptr<T>(&self, ptr: NonNull<T>) -> Result<usize, FreeError> {
211
212        // The pointer is invalid if it points outside of the heap.
213        if (ptr.as_ptr() as usize) < (self.heap_start_address().as_ptr() as usize) || (ptr.as_ptr() as usize) >= (self.upper_heap_bound().as_ptr() as usize) {
214            return Err(FreeError::FreeOutOfBounds)
215        }
216
217        // Calculate the positive offset from the heap start address.
218        let offset = (ptr.as_ptr() as usize) - (self.heap_start_address().as_ptr() as usize);
219           
220        // Check for alignment
221        if offset % self.block_size() != 0 {
222            Err(FreeError::UnalignedFree)
223        } else {
224            Ok(
225                // Calculate the actual block index
226                offset / self.block_size()
227            )
228        }
229    }
230
231
232    /// Free the given non-null pointer.
233    /// Note that the pointer must be aligned to the block's start address.
234    pub fn free_nonnull<T>(self: Pin<&mut Self>, ptr: NonNull<T>) -> Result<(), FreeError> {
235
236        let self_data = unsafe { self.get_unchecked_mut() };
237
238        // Calculate the block's metadata index in the free table.
239        let block_index = self_data.block_index_from_real_ptr(ptr)?;
240
241        if self_data.free_table[block_index] {
242            Err(FreeError::DoubleFree)
243        } else {
244            // Mark the block as free and keep track of the total free memory.
245            self_data.free_table[block_index] = true;
246            self_data.total_free += self_data.block_size();
247            Ok(())
248        }
249    }
250
251
252    /// Free the given pointer.
253    /// Note that the pointer must be aligned to the block's start address.
254    pub fn free<T>(self: Pin<&mut Self>, ptr: *const T) -> Result<(), FreeError> {
255
256        if let Some(ptr) = NonNull::new(ptr as *mut T) {
257            self.free_nonnull(ptr)
258        } else {
259            Err(FreeError::NullPtrFree)
260        }
261    }
262
263
264    /// Free all the memory blocks.
265    /// This function is inherently unsafe because it invalidates all pointers to previously allocated blocks.
266    pub unsafe fn free_all(self: Pin<&mut Self>) {
267
268        let self_data = unsafe { self.get_unchecked_mut() };
269
270        // Simply mark all blocks as free
271        self_data.free_table = [true; N];
272        self_data.total_free = self_data.heap_size();
273    }
274
275}
276
277
278#[cfg(test)]
279mod tests {
280
281    use std::{pin::pin, ptr};
282
283    use super::*;
284
285
286    #[test]
287    fn check_new() {
288
289        let alloc = FixedSizeAllocator::<8, 64>::new(false);
290
291        assert_eq!(alloc.block_size(), 8);
292        assert_eq!(alloc.heap_size(), 64*8);
293        assert_eq!(alloc.block_count(), 64);
294        assert_eq!(alloc.free_blocks(), 64);
295        assert_eq!(alloc.allocated_blocks(), 0);
296        assert_eq!(alloc.total_allocated(), 0);
297        assert_eq!(alloc.total_free(), alloc.heap_size());
298    }
299
300
301    #[test]
302    fn check_alloc() {
303
304        let mut alloc = FixedSizeAllocator::<8, 64>::new(false);
305
306        for _ in 0..64 {
307            assert!(alloc.as_mut().alloc::<usize>().is_some());
308        }
309
310        assert_eq!(alloc.total_free(), 0);
311        assert_eq!(alloc.free_blocks(), 0);
312    }
313
314
315    #[test]
316    fn check_free() {
317
318        let mut alloc = FixedSizeAllocator::<8, 64>::new(false);
319
320        let mut ptrs = Vec::with_capacity(64);
321        for _ in 0..64 {
322            ptrs.push(
323                alloc.as_mut().alloc::<usize>().unwrap()
324            );
325        }
326
327        for ptr in ptrs {
328            if let Err(e) = alloc.as_mut().free_nonnull(ptr) {
329                panic!("Unexpected error {:?}", e);
330            }
331        }
332
333        assert_eq!(alloc.total_free(), alloc.heap_size());
334        assert_eq!(alloc.free_blocks(), alloc.block_count());
335    }
336
337
338    #[test]
339    fn check_new_stack() {
340
341        let alloc = pin!( unsafe {
342            FixedSizeAllocator::<8, 64>::new_unpinned(false)
343        });
344
345        assert_eq!(alloc.block_size(), 8);
346        assert_eq!(alloc.heap_size(), 64*8);
347        assert_eq!(alloc.block_count(), 64);
348        assert_eq!(alloc.free_blocks(), 64);
349        assert_eq!(alloc.allocated_blocks(), 0);
350        assert_eq!(alloc.total_allocated(), 0);
351        assert_eq!(alloc.total_free(), alloc.heap_size());
352    }
353
354
355    #[test]
356    fn check_alloc_stack() {
357
358        let mut alloc = pin!( unsafe {
359            FixedSizeAllocator::<8, 64>::new_unpinned(false)
360        });
361
362        for _ in 0..64 {
363            assert!(alloc.as_mut().alloc::<usize>().is_some());
364        }
365
366        assert_eq!(alloc.total_free(), 0);
367        assert_eq!(alloc.free_blocks(), 0);
368    }
369
370
371    #[test]
372    fn check_free_stack() {
373
374        let mut alloc = pin!( unsafe {
375            FixedSizeAllocator::<8, 64>::new_unpinned(false)
376        });
377        
378        let mut ptrs = Vec::with_capacity(64);
379        for _ in 0..64 {
380            ptrs.push(
381                alloc.as_mut().alloc::<usize>().unwrap()
382            );
383        }
384
385        for ptr in ptrs {
386            if let Err(e) = alloc.as_mut().free_nonnull(ptr) {
387                panic!("Unexpected error {:?}", e);
388            }
389        }
390
391        assert_eq!(alloc.total_free(), alloc.heap_size());
392        assert_eq!(alloc.free_blocks(), alloc.block_count());
393    }
394
395
396    #[test]
397    fn check_invalid_usage() {
398
399        let mut alloc = FixedSizeAllocator::<{mem::size_of::<usize>()}, 64>::new(false);
400
401        assert!(matches!(alloc.as_mut().free(ptr::null::<usize>()), Err(FreeError::NullPtrFree)));
402        assert!(matches!(alloc.as_mut().free(1 as *const usize), Err(FreeError::FreeOutOfBounds)));
403        assert!(matches!(alloc.as_mut().free(usize::MAX as *const usize), Err(FreeError::FreeOutOfBounds)));
404
405        let ptr: NonNull<usize> = alloc.as_mut().alloc().unwrap();
406
407        assert!(alloc.as_mut().free_nonnull(ptr).is_ok());
408        assert!(matches!(alloc.as_mut().free_nonnull(ptr), Err(FreeError::DoubleFree)));
409    }
410
411
412    #[test]
413    fn check_invalid_usage_stack() {
414
415        let mut alloc = pin!( unsafe {
416            FixedSizeAllocator::<{mem::size_of::<usize>()}, 64>::new_unpinned(false)
417        });
418
419        assert!(matches!(alloc.as_mut().free(ptr::null::<usize>()), Err(FreeError::NullPtrFree)));
420        assert!(matches!(alloc.as_mut().free(1 as *const usize), Err(FreeError::FreeOutOfBounds)));
421        assert!(matches!(alloc.as_mut().free(usize::MAX as *const usize), Err(FreeError::FreeOutOfBounds)));
422
423        let ptr: NonNull<usize> = alloc.as_mut().alloc().unwrap();
424
425        assert!(alloc.as_mut().free_nonnull(ptr).is_ok());
426        assert!(matches!(alloc.as_mut().free_nonnull(ptr), Err(FreeError::DoubleFree)));
427    }
428
429
430    #[test]
431    fn check_alloc_all_free_all() {
432
433        let mut alloc = FixedSizeAllocator::<{mem::size_of::<u32>()}, 64>::new(false);
434
435        for i in 0..64 {
436            let ptr = alloc.as_mut().alloc::<u32>().unwrap();
437            unsafe {
438                ptr.write(i);
439            }
440        }
441
442        assert_eq!(alloc.total_free(), 0);
443        assert_eq!(alloc.total_allocated(), alloc.heap_size());
444        assert_eq!(alloc.free_blocks(), 0);
445        assert_eq!(alloc.allocated_blocks(), alloc.block_count());
446        
447        unsafe {
448            alloc.as_mut().free_all();
449        }
450
451        assert_eq!(alloc.total_free(), alloc.heap_size());
452        assert_eq!(alloc.free_blocks(), alloc.block_count());
453        assert_eq!(alloc.allocated_blocks(), 0);
454        assert_eq!(alloc.total_allocated(), 0);
455    }
456
457
458}
459