1use std::{alloc::Layout, ptr::NonNull};
2
3use crate::{Allocator, chunk::Chunk};
4
5#[derive(Debug)]
30pub struct BufferArena {
31 pub(crate) store: Chunk,
33 pub(crate) ptr: NonNull<u8>,
35}
36
37impl Default for BufferArena {
38 fn default() -> Self {
39 let store = Chunk::new(1 << 12);
40 let ptr = store.base;
41 Self { store, ptr }
42 }
43}
44
45impl BufferArena {
46 #[inline(always)]
63 pub fn new(capacity: usize) -> Self {
64 let store = Chunk::new(capacity);
65 let ptr = store.base;
66 Self { store, ptr }
67 }
68
69 #[inline(always)]
80 pub fn used(&self) -> usize {
81 unsafe { self.ptr.byte_offset_from_unsigned(self.store.base) }
82 }
83
84 #[inline(always)]
95 pub fn remaining(&self) -> usize {
96 unsafe { self.store.limit.byte_offset_from_unsigned(self.ptr) }
97 }
98
99 #[inline(always)]
109 pub fn contains(&self, ptr: NonNull<u8>) -> bool {
110 self.store.contains(ptr)
111 }
112
113 #[inline(always)]
126 pub fn sufficient_for(&self, layout: Layout) -> bool {
127 self.ptr.align_offset(layout.align()) + layout.size() <= self.remaining()
128 }
129}
130
131impl From<Chunk> for BufferArena {
135 fn from(value: Chunk) -> Self {
136 let ptr = value.base;
137 Self { store: value, ptr }
138 }
139}
140
141impl Allocator for BufferArena {
147 #[inline(always)]
158 unsafe fn allocate(&mut self, layout: Layout) -> Result<NonNull<[u8]>, crate::AllocError> {
159 debug_assert!(layout.align() > 0);
160 debug_assert!(layout.align() <= 4096);
161 if self.sufficient_for(layout) {
162 let ptr = unsafe { self.ptr.add(self.ptr.align_offset(layout.align())) };
163 self.ptr = unsafe { ptr.add(layout.size()) };
164 Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
165 } else {
166 Err(crate::AllocError {})
167 }
168 }
169
170 #[inline(always)]
181 unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
182 debug_assert!(self.store.contains(ptr.cast()));
183 debug_assert!(unsafe { ptr.add(layout.size()) } <= self.store.limit);
184 if self.store.contains(ptr) {
185 self.ptr = ptr;
186 }
187 }
188
189 #[inline(always)]
201 unsafe fn grow(
202 &mut self,
203 ptr: NonNull<u8>,
204 old_layout: Layout,
205 new_layout: Layout,
206 ) -> Result<NonNull<[u8]>, crate::AllocError> {
207 debug_assert_eq!(
208 unsafe { ptr.add(old_layout.size()) },
209 self.ptr,
210 "grow is only supported for the last allocation: {ptr:?} old_:{old_layout:?}, new_{new_layout:?}"
211 );
212 debug_assert_eq!(old_layout.align(), new_layout.align());
213 match old_layout.size().cmp(&new_layout.size()) {
214 std::cmp::Ordering::Less => {
215 debug_assert!(unsafe { ptr.add(new_layout.size()) } <= self.store.limit);
216 self.ptr = unsafe { ptr.add(new_layout.size()) };
217 return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
218 }
219 std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
220 std::cmp::Ordering::Greater => unreachable!("use shrink instead"),
221 }
222 }
223
224 #[inline(always)]
235 unsafe fn shrink(
236 &mut self,
237 ptr: NonNull<u8>,
238 old_layout: Layout,
239 new_layout: Layout,
240 ) -> Result<NonNull<[u8]>, crate::AllocError> {
241 debug_assert_eq!(unsafe { ptr.add(old_layout.size()) }, self.ptr);
242 debug_assert_eq!(old_layout.align(), new_layout.align());
243 match old_layout.size().cmp(&new_layout.size()) {
244 std::cmp::Ordering::Greater => {
245 self.ptr = unsafe { ptr.add(new_layout.size()) };
246 return Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()));
247 }
248 std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
249 std::cmp::Ordering::Less => Err(crate::AllocError {}),
250 }
251 }
252}
253
254impl Into<Chunk> for BufferArena {
258 #[inline(always)]
260 fn into(self) -> Chunk {
261 self.store
262 }
263}
264
265#[cfg(test)]
266mod tests {
267 use super::*;
268
269 #[test]
270 fn test_buffer_arena_new_and_capacity() {
271 let cap = 63;
272 let arena = BufferArena::new(cap);
273 assert_eq!(arena.used(), 0);
274 assert_eq!(arena.remaining(), 64);
275 assert_eq!(arena.used(), 0);
276 }
277
278 #[test]
279 fn test_chunk_conversion() {
280 let chunk = Chunk::new(128);
281 let arena: BufferArena = chunk.into();
282 assert_eq!(arena.used(), 0);
283 assert_eq!(arena.remaining(), 128);
284
285 let to_chunk: Chunk = arena.into();
286 assert_eq!(to_chunk.capacity(), 128);
287 }
288
289 #[test]
290 fn test_allocate_and_deallocate() {
291 let mut arena = BufferArena::new(32);
292 let layout = Layout::from_size_align(8, 1).unwrap();
293 let ptr = unsafe { arena.allocate(layout).unwrap() };
294 assert_eq!(ptr.len(), 8);
295 assert_eq!(arena.used(), 8);
296
297 unsafe { arena.deallocate(ptr.cast(), layout) };
299 assert_eq!(arena.used(), 0);
300 }
301
302 #[test]
303 fn test_alignment() {
304 let mut arena = BufferArena::new(128);
305 unsafe { arena.ptr.write_bytes(0, arena.remaining()) };
306 let mut prev_end = arena.ptr;
307
308 for (i, align) in [1, 2, 4, 8, 16, 32, 4096].into_iter().rev().enumerate() {
309 let size = i + 1;
310 let layout = Layout::from_size_align(size, align).unwrap();
311 let ptr = unsafe { arena.allocate_zeroed(layout).unwrap() };
312 let addr = ptr.cast::<u8>().as_ptr() as usize;
313 assert_eq!(addr % align, 0, "addr {ptr:?} not aligned to {align}");
315 let fill = size as u8;
317 unsafe { ptr.cast::<u8>().write_bytes(fill, layout.size()) };
318 let data = unsafe { ptr.as_ref() };
319 assert_eq!(data, vec![fill; size].as_slice());
320
321 assert!(ptr.cast() >= prev_end, "Allocation overlapped previous");
323 prev_end = unsafe { ptr.cast().add(layout.size()) };
324 }
325 assert_eq!(arena.used(), 79);
326 let written =
327 unsafe { std::slice::from_raw_parts(arena.store.base.as_ptr(), arena.used()) };
328 assert_eq!(
329 written,
330 [
331 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
332 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 0, 0, 0, 0, 0,
333 4, 4, 4, 4, 5, 5, 5, 5, 5, 0, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7
334 ]
335 .as_ref()
336 );
337 unsafe { arena.deallocate(arena.store.base, Layout::from_size_align_unchecked(8, 1)) };
338 assert_eq!(arena.used(), 0);
339 }
340
341 #[test]
342 fn test_allocate_full_arena() {
343 let mut arena = BufferArena::new(16);
344 let layout = Layout::from_size_align(16, 1).unwrap();
345 let ptr = unsafe { arena.allocate(layout).unwrap() };
346 assert_eq!(ptr.len(), 16);
347 assert_eq!(arena.used(), 16);
348
349 let layout2 = Layout::from_size_align(1, 1).unwrap();
351 assert!(unsafe { arena.allocate(layout2) }.is_err());
352 }
353
354 #[test]
355 fn test_grow_allocation() {
356 let mut arena = BufferArena::new(32);
357 let layout = Layout::from_size_align(8, 1).unwrap();
358 let ptr = unsafe { arena.allocate(layout).unwrap() };
359
360 let new_layout = Layout::from_size_align(16, 1).unwrap();
361 let grown = unsafe { arena.grow(ptr.cast(), layout, new_layout).unwrap() };
362 assert_eq!(grown.len(), 16);
363 assert_eq!(arena.used(), 16);
364 }
365
366 #[test]
367 fn test_shrink_allocation() {
368 let mut arena = BufferArena::new(32);
369 let layout = Layout::from_size_align(16, 1).unwrap();
370 let ptr = unsafe { arena.allocate(layout).unwrap() };
371
372 let new_layout = Layout::from_size_align(8, 1).unwrap();
373 let shrunk = unsafe { arena.shrink(ptr.cast(), layout, new_layout).unwrap() };
374 assert_eq!(shrunk.len(), 8);
375 assert_eq!(arena.used(), 8);
377 }
378
379 #[test]
380 fn test_multiple_allocate_and_deallocate() {
381 use std::alloc::Layout;
382
383 let mut arena = BufferArena::new(64);
384 let layout = Layout::from_size_align(8, 1).unwrap();
385
386 for _ in 0..5 {
388 let ptr = unsafe { arena.allocate(layout).unwrap() };
389 assert_eq!(ptr.len(), 8);
390 assert_eq!(arena.used(), 8);
391 unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
392 assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
393
394 unsafe { arena.deallocate(ptr.cast(), layout) };
396 assert_eq!(arena.used(), 0);
397 }
398
399 let mut ptrs = Vec::new();
401 for _ in 0..4 {
402 let ptr = unsafe { arena.allocate(layout).unwrap() };
403 unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
404 assert_eq!(unsafe { ptr.as_ref() }, [0xAA; 8].as_ref());
405 ptrs.push(ptr);
406 }
407 assert_eq!(arena.used(), 32);
408
409 for ptr in ptrs.into_iter().rev() {
410 unsafe { arena.deallocate(ptr.cast(), layout) };
411 }
412 assert_eq!(arena.used(), 0);
413 }
414
415 #[test]
416 fn test_multi_alloc() {
417 let mut arena = BufferArena::new(16);
419
420 let layout = Layout::from_size_align(8, 8).unwrap();
422 let ptr = unsafe { arena.allocate(layout) }.unwrap();
423 unsafe { ptr.cast::<u8>().write_bytes(0xAA, layout.size()) };
424
425 let data1 = unsafe { ptr.as_ref() };
427 assert_eq!(data1, [0xAA; 8].as_slice());
428
429 let remaining = arena.remaining();
431 assert_eq!(remaining, 8);
432
433 let new_layout = Layout::from_size_align(layout.size() + 4, layout.align()).unwrap();
436 let grown_ptr = unsafe { arena.grow(ptr.cast(), layout, new_layout) }.unwrap();
437
438 unsafe {
440 grown_ptr
441 .cast::<u8>()
442 .add(layout.size())
443 .write_bytes(0xBB, 4)
444 };
445
446 assert_eq!(arena.remaining(), 4);
447
448 let grown_data = unsafe {
450 std::slice::from_raw_parts(grown_ptr.as_ptr() as *const u8, new_layout.size())
451 };
452 let mut expected = vec![0xAA; layout.size()];
453 expected.extend_from_slice(&[0xBB; 4]);
454 assert_eq!(grown_data, expected.as_slice());
455
456 let layout = unsafe { Layout::from_size_align_unchecked(4, 4) };
457 let ptr = unsafe { arena.allocate(layout).unwrap() };
458 assert_eq!(ptr.len(), layout.size());
459 assert_eq!(arena.remaining(), 0);
460 }
461}