lite_alloc/single_threaded/
bump_freelist.rs1use crate::{PAGE_SIZE, grow_memory};
2use core::{
3 alloc::{GlobalAlloc, Layout},
4 ptr::{self, null_mut},
5};
6
7unsafe impl Sync for BumpFreeListAllocator {}
19
20pub struct BumpFreeListAllocator;
34
35impl BumpFreeListAllocator {
36 pub const fn new() -> Self {
37 Self
38 }
39}
40
41struct Node {
44 next: *mut Node,
45 size: usize,
46}
47
48static mut FREE_LIST: *mut Node = null_mut();
56
57static mut HEAP_TOP: usize = 0;
60static mut HEAP_END: usize = 0;
61
62unsafe impl GlobalAlloc for BumpFreeListAllocator {
63 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
64 let align_req = layout.align().max(16);
69 let size = layout.size().max(16);
70
71 let size = (size + 15) & !15;
74
75 unsafe {
82 let mut prev = ptr::addr_of_mut!(FREE_LIST);
83 let mut curr = *prev;
84
85 while !curr.is_null() {
86 if (*curr).size >= size {
87 *prev = (*curr).next;
90 return curr as *mut u8;
91 }
92 prev = ptr::addr_of_mut!((*curr).next);
95 curr = *prev;
96 }
97 }
98
99 unsafe { self.bump_alloc(size, align_req) }
103 }
104
105 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
106 let size = layout.size().max(16);
109 let size = (size + 15) & !15;
110
111 unsafe {
116 let node = ptr as *mut Node;
117 (*node).size = size;
118 (*node).next = FREE_LIST;
119 FREE_LIST = node;
120 }
121 }
122
123 #[cfg(feature = "realloc")]
124 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
125 let old_size = (layout.size().max(16) + 15) & !15;
128 let req_new_size = (new_size.max(16) + 15) & !15;
129
130 let heap_top = unsafe { HEAP_TOP };
132
133 if ptr as usize + old_size == heap_top {
134 let diff = req_new_size.saturating_sub(old_size);
135 if diff == 0 {
136 return ptr;
137 }
138
139 unsafe {
142 if HEAP_TOP + diff <= HEAP_END {
143 HEAP_TOP += diff;
144 return ptr;
145 }
146
147 let pages_needed =
150 ((HEAP_TOP + diff - HEAP_END + PAGE_SIZE - 1) / PAGE_SIZE).max(1);
151 if grow_memory(pages_needed) != usize::MAX {
152 HEAP_END += pages_needed * PAGE_SIZE;
153 HEAP_TOP += diff;
154 return ptr;
155 }
156 }
157 }
158
159 unsafe {
162 let new_ptr = self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()));
163 if !new_ptr.is_null() {
164 ptr::copy_nonoverlapping(ptr, new_ptr, layout.size());
165 self.dealloc(ptr, layout);
166 }
167 new_ptr
168 }
169 }
170}
171
172impl BumpFreeListAllocator {
173 unsafe fn bump_alloc(&self, size: usize, align: usize) -> *mut u8 {
174 unsafe {
175 let mut ptr = HEAP_TOP;
176 ptr = (ptr + align - 1) & !(align - 1);
179
180 if ptr + size > HEAP_END || ptr < HEAP_TOP {
181 let bytes_needed = (ptr + size).saturating_sub(HEAP_END);
182 let pages_needed = ((bytes_needed + PAGE_SIZE - 1) / PAGE_SIZE).max(1);
183
184 let prev_page = grow_memory(pages_needed);
185 if prev_page == usize::MAX {
186 return null_mut();
187 }
188
189 if HEAP_END == 0 {
190 let memory_start = prev_page * PAGE_SIZE;
191 ptr = memory_start;
192 ptr = (ptr + align - 1) & !(align - 1);
193 HEAP_END = memory_start + pages_needed * PAGE_SIZE;
194 } else {
195 HEAP_END += pages_needed * PAGE_SIZE;
196 }
197 }
198
199 HEAP_TOP = ptr + size;
200 ptr as *mut u8
201 }
202 }
203
204 pub unsafe fn reset() {
210 unsafe {
211 FREE_LIST = null_mut();
212 HEAP_TOP = 0;
213 HEAP_END = 0;
214 }
215 }
216}
217
218#[cfg(test)]
219mod tests {
220 use super::*;
221 use crate::reset_heap;
222 use core::alloc::Layout;
223 use std::sync::{Mutex, MutexGuard};
224
225 static TEST_MUTEX: Mutex<()> = Mutex::new(());
226
227 struct SafeAllocator {
228 inner: BumpFreeListAllocator,
229 _guard: MutexGuard<'static, ()>,
230 }
231
232 impl SafeAllocator {
233 fn new() -> Self {
234 let guard = TEST_MUTEX.lock().unwrap();
235 unsafe {
236 BumpFreeListAllocator::reset(); reset_heap(); Self {
239 inner: BumpFreeListAllocator::new(),
240 _guard: guard,
241 }
242 }
243 }
244
245 fn alloc(&self, layout: Layout) -> *mut u8 {
246 unsafe { self.inner.alloc(layout) }
247 }
248
249 fn dealloc(&self, ptr: *mut u8, layout: Layout) {
250 unsafe { self.inner.dealloc(ptr, layout) }
251 }
252 }
253
254 impl Drop for SafeAllocator {
255 fn drop(&mut self) {
256 unsafe {
257 BumpFreeListAllocator::reset();
258 reset_heap();
259 }
260 }
261 }
262
263 #[test]
264 fn test_basic_allocation() {
265 let allocator = SafeAllocator::new();
266 let layout = Layout::new::<u64>();
267 let ptr = allocator.alloc(layout);
268 assert!(!ptr.is_null());
269
270 unsafe {
271 *ptr.cast::<u64>() = 42;
272 assert_eq!(*ptr.cast::<u64>(), 42);
273 }
274
275 allocator.dealloc(ptr, layout);
276 }
277
278 #[test]
279 fn test_multiple_allocations() {
280 let allocator = SafeAllocator::new();
281 let layout = Layout::new::<u32>();
282
283 let ptr1 = allocator.alloc(layout);
284 let ptr2 = allocator.alloc(layout);
285
286 assert!(!ptr1.is_null());
287 assert!(!ptr2.is_null());
288 assert_ne!(ptr1, ptr2);
289
290 let diff = (ptr2 as usize).wrapping_sub(ptr1 as usize);
292 assert!(diff >= 16);
293 }
294
295 #[test]
296 fn test_memory_grow() {
297 let allocator = SafeAllocator::new();
298 let layout = Layout::from_size_align(40 * 1024, 16).unwrap();
300
301 let ptr1 = allocator.alloc(layout);
302 let ptr2 = allocator.alloc(layout);
303
304 assert!(!ptr1.is_null());
305 assert!(!ptr2.is_null());
306 assert_ne!(ptr1, ptr2);
307
308 unsafe {
309 ptr1.write_bytes(1, layout.size());
310 ptr2.write_bytes(2, layout.size());
311 }
312 }
313
314 #[test]
315 fn test_freelist_reuse() {
316 let allocator = SafeAllocator::new();
317 let layout = Layout::from_size_align(128, 16).unwrap();
318
319 let ptr1 = allocator.alloc(layout);
320 allocator.dealloc(ptr1, layout);
321
322 let ptr2 = allocator.alloc(layout);
323 assert_eq!(ptr1, ptr2);
325 }
326
327 #[test]
328 fn test_realloc_in_place() {
329 let allocator = SafeAllocator::new();
330 let layout = Layout::from_size_align(32, 16).unwrap();
331 let ptr = allocator.alloc(layout);
332 assert!(!ptr.is_null());
333
334 unsafe {
335 ptr.write_bytes(0xAA, layout.size());
336 }
337
338 let new_size = 64;
340 let new_layout = Layout::from_size_align(new_size, 16).unwrap();
341 let realloc_ptr = unsafe { allocator.inner.realloc(ptr, layout, new_size) };
342
343 #[cfg(feature = "realloc")]
344 assert_eq!(ptr, realloc_ptr); #[cfg(not(feature = "realloc"))]
346 assert_ne!(ptr, realloc_ptr); unsafe {
348 assert_eq!(*realloc_ptr.cast::<u8>(), 0xAA); assert_eq!(*realloc_ptr.add(31).cast::<u8>(), 0xAA); realloc_ptr
351 .add(32)
352 .write_bytes(0xBB, new_size - layout.size()); assert_eq!(*realloc_ptr.add(32).cast::<u8>(), 0xBB);
354 }
355 allocator.dealloc(realloc_ptr, new_layout);
356 }
357
358 #[test]
359 fn test_realloc_not_in_place() {
360 let allocator = SafeAllocator::new();
361 let layout1 = Layout::from_size_align(32, 16).unwrap();
362 let ptr1 = allocator.alloc(layout1); assert!(!ptr1.is_null());
364
365 let layout2 = Layout::from_size_align(32, 16).unwrap();
366 let ptr2 = allocator.alloc(layout2); assert!(!ptr2.is_null());
368 assert_ne!(ptr1, ptr2);
369
370 unsafe {
371 ptr1.write_bytes(0xAA, layout1.size());
372 }
373
374 let new_size = 64;
376 let new_layout = Layout::from_size_align(new_size, 16).unwrap();
377 let realloc_ptr = unsafe { allocator.inner.realloc(ptr1, layout1, new_size) };
378
379 assert!(!realloc_ptr.is_null());
380 assert_ne!(ptr1, realloc_ptr); unsafe {
382 assert_eq!(*realloc_ptr.cast::<u8>(), 0xAA); assert_eq!(*realloc_ptr.add(31).cast::<u8>(), 0xAA); }
385
386 allocator.dealloc(realloc_ptr, new_layout);
387 allocator.dealloc(ptr2, layout2);
388 }
389
390 #[test]
391 fn test_realloc_shrink() {
392 let allocator = SafeAllocator::new();
393 let layout = Layout::from_size_align(64, 16).unwrap();
394 let ptr = allocator.alloc(layout);
395 assert!(!ptr.is_null());
396
397 unsafe {
398 ptr.write_bytes(0xCC, layout.size());
399 }
400
401 let new_size = 32;
403 let new_layout = Layout::from_size_align(new_size, 16).unwrap();
404 let realloc_ptr = unsafe { allocator.inner.realloc(ptr, layout, new_size) };
405
406 #[cfg(feature = "realloc")]
407 assert_eq!(ptr, realloc_ptr); #[cfg(not(feature = "realloc"))]
410 assert_ne!(ptr, realloc_ptr);
411
412 unsafe {
413 assert_eq!(*realloc_ptr.cast::<u8>(), 0xCC); assert_eq!(*realloc_ptr.add(31).cast::<u8>(), 0xCC); }
416 allocator.dealloc(realloc_ptr, new_layout);
417 }
418}