1use std::{alloc::Layout, ptr::NonNull};
2
3use crate::{AllocError, Allocator, chunk::Chunk};
4
5#[derive(Debug)]
30pub struct BufferArena {
31 pub(crate) store: Chunk,
33 pub(crate) ptr: NonNull<u8>,
35}
36
37impl Default for BufferArena {
38 #[inline(always)]
39 fn default() -> Self {
40 let store = Chunk::default();
41 let ptr = store.base();
42 Self { store, ptr }
43 }
44}
45
46impl BufferArena {
47 #[inline]
64 pub fn with_capacity(capacity: usize) -> Self {
65 let store = Chunk::with_capacity(capacity);
66 let ptr = store.base();
67 Self { store, ptr }
68 }
69
70 #[inline]
81 pub fn used(&self) -> usize {
82 unsafe { self.ptr.byte_offset_from_unsigned(self.store.base()) }
83 }
84
85 #[inline]
96 pub fn remaining(&self) -> usize {
97 unsafe { self.store.limit().byte_offset_from_unsigned(self.ptr) }
98 }
99
100 #[inline]
110 pub fn contains(&self, ptr: NonNull<u8>) -> bool {
111 self.store.contains(ptr)
112 }
113
114 #[inline]
127 pub fn sufficient_for(&self, layout: Layout) -> bool {
128 self.ptr.align_offset(layout.align()) + layout.size() <= self.remaining()
129 }
130}
131
132impl From<Chunk> for BufferArena {
136 #[inline]
137 fn from(value: Chunk) -> Self {
138 let ptr = value.base();
139 Self { store: value, ptr }
140 }
141}
142
143impl Allocator for BufferArena {
149 #[inline]
160 unsafe fn allocate(&mut self, layout: Layout) -> Result<NonNull<[u8]>, crate::AllocError> {
161 debug_assert!(layout.align() > 0);
162 debug_assert!(layout.align() <= 4096);
163 if self.sufficient_for(layout) {
164 let ptr = unsafe { self.ptr.add(self.ptr.align_offset(layout.align())) };
165 self.ptr = unsafe { ptr.add(layout.size()) };
166 Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
167 } else {
168 Err(AllocError::CapacityExceeded {
169 requested: layout.size(),
170 remaining: self.remaining(),
171 })
172 }
173 }
174
175 #[inline]
186 unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
187 debug_assert!(self.store.contains(ptr.cast()));
188 debug_assert!(unsafe { ptr.add(layout.size()) } <= self.store.limit());
189 self.ptr = ptr;
190 }
191
192 #[inline]
204 unsafe fn grow(
205 &mut self,
206 ptr: NonNull<u8>,
207 old_layout: Layout,
208 new_layout: Layout,
209 ) -> Result<NonNull<[u8]>, crate::AllocError> {
210 debug_assert_eq!(
211 unsafe { ptr.add(old_layout.size()) },
212 self.ptr,
213 "last allocation only"
214 );
215 debug_assert_eq!(old_layout.align(), new_layout.align());
216 match old_layout.size().cmp(&new_layout.size()) {
217 std::cmp::Ordering::Less => {
218 if unsafe { ptr.add(new_layout.size()) } <= self.store.limit() {
219 self.ptr = unsafe { ptr.add(new_layout.size()) };
220 return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
221 } else {
222 Err(AllocError::CapacityExceeded {
223 requested: new_layout.size() - old_layout.size(),
224 remaining: self.remaining(),
225 })
226 }
227 }
228 std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
229 std::cmp::Ordering::Greater => unreachable!("use shrink instead"),
230 }
231 }
232
233 #[inline]
244 unsafe fn shrink(
245 &mut self,
246 ptr: NonNull<u8>,
247 old_layout: Layout,
248 new_layout: Layout,
249 ) -> Result<NonNull<[u8]>, crate::AllocError> {
250 debug_assert_eq!(
251 unsafe { ptr.add(old_layout.size()) },
252 self.ptr,
253 "last allocation only"
254 );
255 debug_assert_eq!(old_layout.align(), new_layout.align());
256 match old_layout.size().cmp(&new_layout.size()) {
257 std::cmp::Ordering::Greater => {
258 self.ptr = unsafe { ptr.add(new_layout.size()) };
259 return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
260 }
261 std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
262 std::cmp::Ordering::Less => unreachable!("use grow instead"),
263 }
264 }
265}
266
267impl Into<Chunk> for BufferArena {
271 #[inline]
273 fn into(self) -> Chunk {
274 self.store
275 }
276}
277
278#[cfg(test)]
279mod tests {
280 use super::*;
281
282 #[test]
283 fn test_buffer_arena_new_and_capacity() {
284 let cap = 4096;
285 let arena = BufferArena::with_capacity(cap);
286 assert_eq!(arena.used(), 0);
287 assert_eq!(arena.remaining(), cap);
288 }
289
290 #[test]
291 fn test_chunk_conversion() {
292 let chunk = Chunk::with_capacity(4096);
293 let arena: BufferArena = chunk.into();
294 assert_eq!(arena.used(), 0);
295 assert_eq!(arena.remaining(), 4096);
296
297 let to_chunk: Chunk = arena.into();
298 assert_eq!(to_chunk.capacity(), 4096);
299 }
300
301 #[test]
302 fn test_allocate_and_deallocate() {
303 let mut arena = BufferArena::with_capacity(4096);
304 let layout = Layout::from_size_align(8, 1).unwrap();
305 let ptr = unsafe { arena.allocate(layout).unwrap() };
306 assert_eq!(ptr.len(), 8);
307 assert_eq!(arena.used(), 8);
308
309 unsafe { arena.deallocate(ptr.cast(), layout) };
311 assert_eq!(arena.used(), 0);
312 }
313
314 #[test]
315 fn test_alignment() {
316 let mut arena = BufferArena::with_capacity(4096);
317 unsafe { arena.ptr.write_bytes(0, arena.remaining()) };
318 let mut prev_end = arena.ptr;
319
320 for (i, align) in [1, 2, 4, 8, 16, 32, 4096].into_iter().rev().enumerate() {
321 let size = i + 1;
322 let layout = Layout::from_size_align(size, align).unwrap();
323 let ptr = unsafe { arena.allocate_zeroed(layout).unwrap() };
324 let addr = ptr.cast::<u8>().as_ptr() as usize;
325 assert_eq!(addr % align, 0, "addr {ptr:?} not aligned to {align}");
327 let fill = size as u8;
329 unsafe { ptr.cast::<u8>().write_bytes(fill, layout.size()) };
330 let data = unsafe { ptr.as_ref() };
331 assert_eq!(data, vec![fill; size].as_slice());
332
333 assert!(ptr.cast() >= prev_end, "Allocation overlapped previous");
335 prev_end = unsafe { ptr.cast().add(layout.size()) };
336 }
337 assert_eq!(arena.used(), 79);
338 let written =
339 unsafe { std::slice::from_raw_parts(arena.store.base().as_ptr(), arena.used()) };
340 assert_eq!(
341 written,
342 [
343 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
344 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 0, 0, 0, 0, 0,
345 4, 4, 4, 4, 5, 5, 5, 5, 5, 0, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7
346 ]
347 .as_ref()
348 );
349 unsafe { arena.deallocate(arena.store.base(), Layout::from_size_align_unchecked(8, 1)) };
350 assert_eq!(arena.used(), 0);
351 }
352
353 #[test]
354 fn test_allocate_full_arena() {
355 let mut arena = BufferArena::with_capacity(4096);
356 let layout = Layout::from_size_align(4096, 1).unwrap();
357 let ptr = unsafe { arena.allocate(layout).unwrap() };
358 assert_eq!(ptr.len(), 4096);
359 assert_eq!(arena.used(), 4096);
360
361 let layout2 = Layout::from_size_align(1, 1).unwrap();
363 assert!(unsafe { arena.allocate(layout2) }.is_err());
364 }
365
366 #[test]
367 fn test_grow_allocation() {
368 let mut arena = BufferArena::with_capacity(4096);
369 let layout = Layout::from_size_align(8, 1).unwrap();
370 let ptr = unsafe { arena.allocate(layout).unwrap() };
371
372 let new_layout = Layout::from_size_align(16, 1).unwrap();
373 let grown = unsafe { arena.grow(ptr.cast(), layout, new_layout).unwrap() };
374 assert_eq!(grown.len(), 16);
375 assert_eq!(arena.used(), 16);
376 }
377
378 #[test]
379 fn test_shrink_allocation() {
380 let mut arena = BufferArena::with_capacity(4096);
381 let layout = Layout::from_size_align(16, 1).unwrap();
382 let ptr = unsafe { arena.allocate(layout).unwrap() };
383
384 let new_layout = Layout::from_size_align(8, 1).unwrap();
385 let shrunk = unsafe { arena.shrink(ptr.cast(), layout, new_layout).unwrap() };
386 assert_eq!(shrunk.len(), 8);
387 assert_eq!(arena.used(), 8);
389 }
390
391 #[test]
392 fn test_multiple_allocate_and_deallocate() {
393 use std::alloc::Layout;
394
395 let mut arena = BufferArena::with_capacity(4096);
396 let layout = Layout::from_size_align(8, 1).unwrap();
397
398 for _ in 0..5 {
400 let ptr = unsafe { arena.allocate(layout).unwrap() };
401 assert_eq!(ptr.len(), 8);
402 assert_eq!(arena.used(), 8);
403 unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
404 assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
405
406 unsafe { arena.deallocate(ptr.cast(), layout) };
408 assert_eq!(arena.used(), 0);
409 }
410
411 let mut ptrs = Vec::new();
413 for _ in 0..4 {
414 let ptr = unsafe { arena.allocate(layout).unwrap() };
415 unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
416 assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
417 ptrs.push(ptr);
418 }
419 assert_eq!(arena.used(), 32);
420
421 for ptr in ptrs.into_iter().rev() {
422 unsafe { arena.deallocate(ptr.cast(), layout) };
423 }
424 assert_eq!(arena.used(), 0);
425 }
426
427 #[test]
428 fn test_multi_alloc() {
429 let mut arena = BufferArena::with_capacity(4096);
431
432 let layout = Layout::from_size_align(8, 8).unwrap();
434 let ptr = unsafe { arena.allocate(layout) }.unwrap();
435 unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
436
437 let data1 = unsafe { ptr.as_ref() };
439 assert_eq!(data1, [0xAA; 8].as_slice());
440
441 let remaining = arena.remaining();
443 assert_eq!(remaining, 4088);
444
445 let new_layout = Layout::from_size_align(layout.size() + 4, layout.align()).unwrap();
448 let grown_ptr = unsafe { arena.grow(ptr.cast(), layout, new_layout) }.unwrap();
449
450 unsafe {
452 grown_ptr
453 .cast::<u8>()
454 .add(layout.size())
455 .write_bytes(0xBB, 4)
456 };
457
458 assert_eq!(arena.remaining(), 4084);
459
460 let grown_data = unsafe {
462 std::slice::from_raw_parts(grown_ptr.as_ptr() as *const u8, new_layout.size())
463 };
464 let mut expected = vec![0xAA; layout.size()];
465 expected.extend_from_slice(&[0xBB; 4]);
466 assert_eq!(grown_data, expected.as_slice());
467
468 let layout = unsafe { Layout::from_size_align_unchecked(4, 4) };
469 let ptr = unsafe { arena.allocate(layout).unwrap() };
470 assert_eq!(ptr.len(), layout.size());
471 assert_eq!(arena.remaining(), 4080);
472 }
473}