fixed_size_allocator/
lib.rs1#![allow(incomplete_features)]
2#![feature(generic_const_exprs)]
3
4use std::ptr::NonNull;
5use std::pin::Pin;
6use std::mem::{self, MaybeUninit};
7use std::marker::PhantomPinned;
8
9use const_assert::{Assert, IsTrue};
10
11
12#[derive(Debug, Clone, Copy)]
14pub enum FreeError {
15
16 NullPtrFree,
18
19 DoubleFree,
21
22 FreeOutOfBounds,
24
25 UnalignedFree,
27
28}
29
30
31pub struct FixedSizeAllocator<const S: usize, const N: usize> {
32
33 memory: [MaybeUninit<[u8; S]>; N],
35
36 free_table: [bool; N],
38
39 total_free: usize,
41
42 _pin: PhantomPinned
44
45}
46
47impl<const S: usize, const N: usize> FixedSizeAllocator<S, N>
48where
49 Assert<{ S > 0 }>: IsTrue
50{
51
52 pub unsafe fn new_unpinned(zero_initialized: bool) -> Self {
57
58 let memory = if zero_initialized {
59 [MaybeUninit::zeroed(); N]
60 } else {
61 [MaybeUninit::uninit(); N]
62 };
63
64 Self {
65 memory,
66 free_table: [true; N],
67 total_free: S * N,
68 _pin: Default::default()
69 }
70 }
71
72
73 fn heap_start_address(&self) -> NonNull<u8> {
76 unsafe {
77 NonNull::new_unchecked(
78 self.memory.as_ptr() as *mut u8
79 )
80 }
81 }
82
83
84 fn upper_heap_bound(&self) -> NonNull<u8> {
87 unsafe {
88 self.heap_start_address().byte_add(S * N)
89 }
90 }
91
92
93 pub fn new(zero_initialized: bool) -> Pin<Box<Self>> {
97
98 Box::pin( unsafe {
99 Self::new_unpinned(zero_initialized)
100 })
101 }
102
103
104 pub const fn heap_size(&self) -> usize {
106 S * N
107 }
108
109
110 pub const fn block_count(&self) -> usize {
112 N
113 }
114
115
116 pub const fn block_size(&self) -> usize {
118 S
119 }
120
121
122 pub const fn total_free(&self) -> usize {
124 self.total_free
125 }
126
127
128 pub const fn total_allocated(&self) -> usize {
130 self.heap_size() - self.total_free()
131 }
132
133
134 pub fn free_blocks(&self) -> usize {
136 self.free_table
137 .iter()
138 .filter(|&&is_free| is_free)
139 .count()
140 }
141
142
143 pub fn allocated_blocks(&self) -> usize {
145 N - self.free_blocks()
146 }
147
148
149 pub unsafe fn alloc_untyped(self: Pin<&mut Self>) -> Option<NonNull<u8>> {
150
151 let self_data = unsafe { self.get_unchecked_mut() };
152
153 if self_data.total_free() == 0 {
154 None
156 } else {
157 let mut left = 0;
160 let mut right = self_data.free_table.len() - 1;
161
162 while left <= right {
163
164 if self_data.free_table[left] {
165 self_data.total_free -= self_data.block_size();
166 self_data.free_table[left] = false;
167 return Some(self_data.real_ptr_from_block_index(left));
168 }
169
170 if self_data.free_table[right] {
171 self_data.total_free -= self_data.block_size();
172 self_data.free_table[right] = false;
173 return Some(self_data.real_ptr_from_block_index(right));
174 }
175
176 left += 1;
177 right -= 1;
178 }
179
180 None
181 }
182 }
183
184
185 pub fn alloc<T>(self: Pin<&mut Self>) -> Option<NonNull<T>>
187 where
188 Assert<{ mem::size_of::<T>() == S }>: IsTrue
189 {
190 unsafe {
191 mem::transmute(self.alloc_untyped())
192 }
193 }
194
195
196 fn real_ptr_from_block_index<T>(&self, block_index: usize) -> NonNull<T> {
198
199 unsafe {
201 NonNull::new_unchecked(
202 self.heap_start_address().as_ptr().byte_add(block_index * self.block_size()) as *mut T
203 )
204 }
205 }
206
207
208 fn block_index_from_real_ptr<T>(&self, ptr: NonNull<T>) -> Result<usize, FreeError> {
211
212 if (ptr.as_ptr() as usize) < (self.heap_start_address().as_ptr() as usize) || (ptr.as_ptr() as usize) >= (self.upper_heap_bound().as_ptr() as usize) {
214 return Err(FreeError::FreeOutOfBounds)
215 }
216
217 let offset = (ptr.as_ptr() as usize) - (self.heap_start_address().as_ptr() as usize);
219
220 if offset % self.block_size() != 0 {
222 Err(FreeError::UnalignedFree)
223 } else {
224 Ok(
225 offset / self.block_size()
227 )
228 }
229 }
230
231
232 pub fn free_nonnull<T>(self: Pin<&mut Self>, ptr: NonNull<T>) -> Result<(), FreeError> {
235
236 let self_data = unsafe { self.get_unchecked_mut() };
237
238 let block_index = self_data.block_index_from_real_ptr(ptr)?;
240
241 if self_data.free_table[block_index] {
242 Err(FreeError::DoubleFree)
243 } else {
244 self_data.free_table[block_index] = true;
246 self_data.total_free += self_data.block_size();
247 Ok(())
248 }
249 }
250
251
252 pub fn free<T>(self: Pin<&mut Self>, ptr: *const T) -> Result<(), FreeError> {
255
256 if let Some(ptr) = NonNull::new(ptr as *mut T) {
257 self.free_nonnull(ptr)
258 } else {
259 Err(FreeError::NullPtrFree)
260 }
261 }
262
263
264 pub unsafe fn free_all(self: Pin<&mut Self>) {
267
268 let self_data = unsafe { self.get_unchecked_mut() };
269
270 self_data.free_table = [true; N];
272 self_data.total_free = self_data.heap_size();
273 }
274
275}
276
277
278#[cfg(test)]
279mod tests {
280
281 use std::{pin::pin, ptr};
282
283 use super::*;
284
285
286 #[test]
287 fn check_new() {
288
289 let alloc = FixedSizeAllocator::<8, 64>::new(false);
290
291 assert_eq!(alloc.block_size(), 8);
292 assert_eq!(alloc.heap_size(), 64*8);
293 assert_eq!(alloc.block_count(), 64);
294 assert_eq!(alloc.free_blocks(), 64);
295 assert_eq!(alloc.allocated_blocks(), 0);
296 assert_eq!(alloc.total_allocated(), 0);
297 assert_eq!(alloc.total_free(), alloc.heap_size());
298 }
299
300
301 #[test]
302 fn check_alloc() {
303
304 let mut alloc = FixedSizeAllocator::<8, 64>::new(false);
305
306 for _ in 0..64 {
307 assert!(alloc.as_mut().alloc::<usize>().is_some());
308 }
309
310 assert_eq!(alloc.total_free(), 0);
311 assert_eq!(alloc.free_blocks(), 0);
312 }
313
314
315 #[test]
316 fn check_free() {
317
318 let mut alloc = FixedSizeAllocator::<8, 64>::new(false);
319
320 let mut ptrs = Vec::with_capacity(64);
321 for _ in 0..64 {
322 ptrs.push(
323 alloc.as_mut().alloc::<usize>().unwrap()
324 );
325 }
326
327 for ptr in ptrs {
328 if let Err(e) = alloc.as_mut().free_nonnull(ptr) {
329 panic!("Unexpected error {:?}", e);
330 }
331 }
332
333 assert_eq!(alloc.total_free(), alloc.heap_size());
334 assert_eq!(alloc.free_blocks(), alloc.block_count());
335 }
336
337
338 #[test]
339 fn check_new_stack() {
340
341 let alloc = pin!( unsafe {
342 FixedSizeAllocator::<8, 64>::new_unpinned(false)
343 });
344
345 assert_eq!(alloc.block_size(), 8);
346 assert_eq!(alloc.heap_size(), 64*8);
347 assert_eq!(alloc.block_count(), 64);
348 assert_eq!(alloc.free_blocks(), 64);
349 assert_eq!(alloc.allocated_blocks(), 0);
350 assert_eq!(alloc.total_allocated(), 0);
351 assert_eq!(alloc.total_free(), alloc.heap_size());
352 }
353
354
355 #[test]
356 fn check_alloc_stack() {
357
358 let mut alloc = pin!( unsafe {
359 FixedSizeAllocator::<8, 64>::new_unpinned(false)
360 });
361
362 for _ in 0..64 {
363 assert!(alloc.as_mut().alloc::<usize>().is_some());
364 }
365
366 assert_eq!(alloc.total_free(), 0);
367 assert_eq!(alloc.free_blocks(), 0);
368 }
369
370
371 #[test]
372 fn check_free_stack() {
373
374 let mut alloc = pin!( unsafe {
375 FixedSizeAllocator::<8, 64>::new_unpinned(false)
376 });
377
378 let mut ptrs = Vec::with_capacity(64);
379 for _ in 0..64 {
380 ptrs.push(
381 alloc.as_mut().alloc::<usize>().unwrap()
382 );
383 }
384
385 for ptr in ptrs {
386 if let Err(e) = alloc.as_mut().free_nonnull(ptr) {
387 panic!("Unexpected error {:?}", e);
388 }
389 }
390
391 assert_eq!(alloc.total_free(), alloc.heap_size());
392 assert_eq!(alloc.free_blocks(), alloc.block_count());
393 }
394
395
396 #[test]
397 fn check_invalid_usage() {
398
399 let mut alloc = FixedSizeAllocator::<{mem::size_of::<usize>()}, 64>::new(false);
400
401 assert!(matches!(alloc.as_mut().free(ptr::null::<usize>()), Err(FreeError::NullPtrFree)));
402 assert!(matches!(alloc.as_mut().free(1 as *const usize), Err(FreeError::FreeOutOfBounds)));
403 assert!(matches!(alloc.as_mut().free(usize::MAX as *const usize), Err(FreeError::FreeOutOfBounds)));
404
405 let ptr: NonNull<usize> = alloc.as_mut().alloc().unwrap();
406
407 assert!(alloc.as_mut().free_nonnull(ptr).is_ok());
408 assert!(matches!(alloc.as_mut().free_nonnull(ptr), Err(FreeError::DoubleFree)));
409 }
410
411
412 #[test]
413 fn check_invalid_usage_stack() {
414
415 let mut alloc = pin!( unsafe {
416 FixedSizeAllocator::<{mem::size_of::<usize>()}, 64>::new_unpinned(false)
417 });
418
419 assert!(matches!(alloc.as_mut().free(ptr::null::<usize>()), Err(FreeError::NullPtrFree)));
420 assert!(matches!(alloc.as_mut().free(1 as *const usize), Err(FreeError::FreeOutOfBounds)));
421 assert!(matches!(alloc.as_mut().free(usize::MAX as *const usize), Err(FreeError::FreeOutOfBounds)));
422
423 let ptr: NonNull<usize> = alloc.as_mut().alloc().unwrap();
424
425 assert!(alloc.as_mut().free_nonnull(ptr).is_ok());
426 assert!(matches!(alloc.as_mut().free_nonnull(ptr), Err(FreeError::DoubleFree)));
427 }
428
429
430 #[test]
431 fn check_alloc_all_free_all() {
432
433 let mut alloc = FixedSizeAllocator::<{mem::size_of::<u32>()}, 64>::new(false);
434
435 for i in 0..64 {
436 let ptr = alloc.as_mut().alloc::<u32>().unwrap();
437 unsafe {
438 ptr.write(i);
439 }
440 }
441
442 assert_eq!(alloc.total_free(), 0);
443 assert_eq!(alloc.total_allocated(), alloc.heap_size());
444 assert_eq!(alloc.free_blocks(), 0);
445 assert_eq!(alloc.allocated_blocks(), alloc.block_count());
446
447 unsafe {
448 alloc.as_mut().free_all();
449 }
450
451 assert_eq!(alloc.total_free(), alloc.heap_size());
452 assert_eq!(alloc.free_blocks(), alloc.block_count());
453 assert_eq!(alloc.allocated_blocks(), 0);
454 assert_eq!(alloc.total_allocated(), 0);
455 }
456
457
458}
459