1use std::{alloc::Layout, ptr::NonNull};
2
3use crate::{AllocError, Allocator, chunk::Chunk};
4
5#[derive(Debug)]
30pub struct BufferArena {
31 pub(crate) store: Chunk,
33 pub(crate) ptr: NonNull<u8>,
35}
36
37impl Default for BufferArena {
38 #[inline(always)]
39 fn default() -> Self {
40 let store = Chunk::default();
41 let ptr = store.base;
42 Self { store, ptr }
43 }
44}
45
46impl BufferArena {
47 #[inline]
64 pub fn with_capacity(capacity: usize) -> Self {
65 let store = Chunk::new(capacity);
66 let ptr = store.base;
67 Self { store, ptr }
68 }
69
70 #[inline]
81 pub fn used(&self) -> usize {
82 unsafe { self.ptr.byte_offset_from_unsigned(self.store.base) }
83 }
84
85 #[inline]
96 pub fn remaining(&self) -> usize {
97 unsafe { self.store.limit.byte_offset_from_unsigned(self.ptr) }
98 }
99
100 #[inline]
110 pub fn contains(&self, ptr: NonNull<u8>) -> bool {
111 self.store.contains(ptr)
112 }
113
114 #[inline]
127 pub fn sufficient_for(&self, layout: Layout) -> bool {
128 self.ptr.align_offset(layout.align()) + layout.size() <= self.remaining()
129 }
130}
131
132impl From<Chunk> for BufferArena {
136 #[inline]
137 fn from(value: Chunk) -> Self {
138 let ptr = value.base;
139 Self { store: value, ptr }
140 }
141}
142
143impl Allocator for BufferArena {
149 #[inline]
160 unsafe fn allocate(&mut self, layout: Layout) -> Result<NonNull<[u8]>, crate::AllocError> {
161 debug_assert!(layout.align() > 0);
162 debug_assert!(layout.align() <= 4096);
163 if self.sufficient_for(layout) {
164 let ptr = unsafe { self.ptr.add(self.ptr.align_offset(layout.align())) };
165 self.ptr = unsafe { ptr.add(layout.size()) };
166 Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
167 } else {
168 Err(AllocError::CapacityExceeded {
169 requested: layout.size(),
170 remaining: self.remaining(),
171 })
172 }
173 }
174
175 #[inline]
186 unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
187 debug_assert!(self.store.contains(ptr.cast()));
188 debug_assert!(unsafe { ptr.add(layout.size()) } <= self.store.limit);
189 self.ptr = ptr;
190 }
191
192 #[inline]
204 unsafe fn grow(
205 &mut self,
206 ptr: NonNull<u8>,
207 old_layout: Layout,
208 new_layout: Layout,
209 ) -> Result<NonNull<[u8]>, crate::AllocError> {
210 debug_assert_eq!(
211 unsafe { ptr.add(old_layout.size()) },
212 self.ptr,
213 "grow is only supported for the last allocation: {ptr:?} old_:{old_layout:?}, new_{new_layout:?}"
214 );
215 debug_assert_eq!(old_layout.align(), new_layout.align());
216 match old_layout.size().cmp(&new_layout.size()) {
217 std::cmp::Ordering::Less => {
218 if unsafe { ptr.add(new_layout.size()) } <= self.store.limit {
219 self.ptr = unsafe { ptr.add(new_layout.size()) };
220 return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
221 } else {
222 Err(AllocError::CapacityExceeded {
223 requested: new_layout.size() - old_layout.size(),
224 remaining: self.remaining(),
225 })
226 }
227 }
228 std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
229 std::cmp::Ordering::Greater => unreachable!("use shrink instead"),
230 }
231 }
232
233 #[inline]
244 unsafe fn shrink(
245 &mut self,
246 ptr: NonNull<u8>,
247 old_layout: Layout,
248 new_layout: Layout,
249 ) -> Result<NonNull<[u8]>, crate::AllocError> {
250 debug_assert_eq!(unsafe { ptr.add(old_layout.size()) }, self.ptr);
251 debug_assert_eq!(old_layout.align(), new_layout.align());
252 match old_layout.size().cmp(&new_layout.size()) {
253 std::cmp::Ordering::Greater => {
254 self.ptr = unsafe { ptr.add(new_layout.size()) };
255 return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
256 }
257 std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
258 std::cmp::Ordering::Less => unreachable!("use grow instead"),
259 }
260 }
261}
262
263impl Into<Chunk> for BufferArena {
267 #[inline]
269 fn into(self) -> Chunk {
270 self.store
271 }
272}
273
274#[cfg(test)]
275mod tests {
276 use super::*;
277
278 #[test]
279 fn test_buffer_arena_new_and_capacity() {
280 let cap = 63;
281 let arena = BufferArena::with_capacity(cap);
282 assert_eq!(arena.used(), 0);
283 assert_eq!(arena.remaining(), 64);
284 assert_eq!(arena.used(), 0);
285 }
286
287 #[test]
288 fn test_chunk_conversion() {
289 let chunk = Chunk::new(128);
290 let arena: BufferArena = chunk.into();
291 assert_eq!(arena.used(), 0);
292 assert_eq!(arena.remaining(), 128);
293
294 let to_chunk: Chunk = arena.into();
295 assert_eq!(to_chunk.capacity(), 128);
296 }
297
298 #[test]
299 fn test_allocate_and_deallocate() {
300 let mut arena = BufferArena::with_capacity(32);
301 let layout = Layout::from_size_align(8, 1).unwrap();
302 let ptr = unsafe { arena.allocate(layout).unwrap() };
303 assert_eq!(ptr.len(), 8);
304 assert_eq!(arena.used(), 8);
305
306 unsafe { arena.deallocate(ptr.cast(), layout) };
308 assert_eq!(arena.used(), 0);
309 }
310
311 #[test]
312 fn test_alignment() {
313 let mut arena = BufferArena::with_capacity(128);
314 unsafe { arena.ptr.write_bytes(0, arena.remaining()) };
315 let mut prev_end = arena.ptr;
316
317 for (i, align) in [1, 2, 4, 8, 16, 32, 4096].into_iter().rev().enumerate() {
318 let size = i + 1;
319 let layout = Layout::from_size_align(size, align).unwrap();
320 let ptr = unsafe { arena.allocate_zeroed(layout).unwrap() };
321 let addr = ptr.cast::<u8>().as_ptr() as usize;
322 assert_eq!(addr % align, 0, "addr {ptr:?} not aligned to {align}");
324 let fill = size as u8;
326 unsafe { ptr.cast::<u8>().write_bytes(fill, layout.size()) };
327 let data = unsafe { ptr.as_ref() };
328 assert_eq!(data, vec![fill; size].as_slice());
329
330 assert!(ptr.cast() >= prev_end, "Allocation overlapped previous");
332 prev_end = unsafe { ptr.cast().add(layout.size()) };
333 }
334 assert_eq!(arena.used(), 79);
335 let written =
336 unsafe { std::slice::from_raw_parts(arena.store.base.as_ptr(), arena.used()) };
337 assert_eq!(
338 written,
339 [
340 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
341 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 0, 0, 0, 0, 0,
342 4, 4, 4, 4, 5, 5, 5, 5, 5, 0, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7
343 ]
344 .as_ref()
345 );
346 unsafe { arena.deallocate(arena.store.base, Layout::from_size_align_unchecked(8, 1)) };
347 assert_eq!(arena.used(), 0);
348 }
349
350 #[test]
351 fn test_allocate_full_arena() {
352 let mut arena = BufferArena::with_capacity(16);
353 let layout = Layout::from_size_align(16, 1).unwrap();
354 let ptr = unsafe { arena.allocate(layout).unwrap() };
355 assert_eq!(ptr.len(), 16);
356 assert_eq!(arena.used(), 16);
357
358 let layout2 = Layout::from_size_align(1, 1).unwrap();
360 assert!(unsafe { arena.allocate(layout2) }.is_err());
361 }
362
363 #[test]
364 fn test_grow_allocation() {
365 let mut arena = BufferArena::with_capacity(32);
366 let layout = Layout::from_size_align(8, 1).unwrap();
367 let ptr = unsafe { arena.allocate(layout).unwrap() };
368
369 let new_layout = Layout::from_size_align(16, 1).unwrap();
370 let grown = unsafe { arena.grow(ptr.cast(), layout, new_layout).unwrap() };
371 assert_eq!(grown.len(), 16);
372 assert_eq!(arena.used(), 16);
373 }
374
375 #[test]
376 fn test_shrink_allocation() {
377 let mut arena = BufferArena::with_capacity(32);
378 let layout = Layout::from_size_align(16, 1).unwrap();
379 let ptr = unsafe { arena.allocate(layout).unwrap() };
380
381 let new_layout = Layout::from_size_align(8, 1).unwrap();
382 let shrunk = unsafe { arena.shrink(ptr.cast(), layout, new_layout).unwrap() };
383 assert_eq!(shrunk.len(), 8);
384 assert_eq!(arena.used(), 8);
386 }
387
388 #[test]
389 fn test_multiple_allocate_and_deallocate() {
390 use std::alloc::Layout;
391
392 let mut arena = BufferArena::with_capacity(64);
393 let layout = Layout::from_size_align(8, 1).unwrap();
394
395 for _ in 0..5 {
397 let ptr = unsafe { arena.allocate(layout).unwrap() };
398 assert_eq!(ptr.len(), 8);
399 assert_eq!(arena.used(), 8);
400 unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
401 assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
402
403 unsafe { arena.deallocate(ptr.cast(), layout) };
405 assert_eq!(arena.used(), 0);
406 }
407
408 let mut ptrs = Vec::new();
410 for _ in 0..4 {
411 let ptr = unsafe { arena.allocate(layout).unwrap() };
412 unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
413 assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
414 ptrs.push(ptr);
415 }
416 assert_eq!(arena.used(), 32);
417
418 for ptr in ptrs.into_iter().rev() {
419 unsafe { arena.deallocate(ptr.cast(), layout) };
420 }
421 assert_eq!(arena.used(), 0);
422 }
423
424 #[test]
425 fn test_multi_alloc() {
426 let mut arena = BufferArena::with_capacity(16);
428
429 let layout = Layout::from_size_align(8, 8).unwrap();
431 let ptr = unsafe { arena.allocate(layout) }.unwrap();
432 unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
433
434 let data1 = unsafe { ptr.as_ref() };
436 assert_eq!(data1, [0xAA; 8].as_slice());
437
438 let remaining = arena.remaining();
440 assert_eq!(remaining, 8);
441
442 let new_layout = Layout::from_size_align(layout.size() + 4, layout.align()).unwrap();
445 let grown_ptr = unsafe { arena.grow(ptr.cast(), layout, new_layout) }.unwrap();
446
447 unsafe {
449 grown_ptr
450 .cast::<u8>()
451 .add(layout.size())
452 .write_bytes(0xBB, 4)
453 };
454
455 assert_eq!(arena.remaining(), 4);
456
457 let grown_data = unsafe {
459 std::slice::from_raw_parts(grown_ptr.as_ptr() as *const u8, new_layout.size())
460 };
461 let mut expected = vec![0xAA; layout.size()];
462 expected.extend_from_slice(&[0xBB; 4]);
463 assert_eq!(grown_data, expected.as_slice());
464
465 let layout = unsafe { Layout::from_size_align_unchecked(4, 4) };
466 let ptr = unsafe { arena.allocate(layout).unwrap() };
467 assert_eq!(ptr.len(), layout.size());
468 assert_eq!(arena.remaining(), 0);
469 }
470}