use std::alloc::Layout;
use std::cell::Cell;
use std::ptr::{self, NonNull};
const HEADER_SIZE: usize = std::mem::size_of::<SlabHeader>();
const INITIAL_SLAB_SIZE: usize = 4096;
const ALLOC_ALIGN: usize = 8;
const SLAB_ALIGN: usize = if std::mem::align_of::<SlabHeader>() >= ALLOC_ALIGN {
std::mem::align_of::<SlabHeader>()
} else {
ALLOC_ALIGN
};
const _: () = assert!(SLAB_ALIGN >= ALLOC_ALIGN);
const _: () = assert!(HEADER_SIZE.is_multiple_of(ALLOC_ALIGN));
#[repr(C)]
struct SlabHeader {
prev: Option<NonNull<SlabHeader>>,
size: usize,
}
unsafe impl Sync for SlabHeader {}
static EMPTY_SLAB: SlabHeader = SlabHeader {
prev: None,
size: 0,
};
pub struct Arena {
ptr: Cell<NonNull<u8>>,
end: Cell<NonNull<u8>>,
slab: Cell<NonNull<SlabHeader>>,
}
#[cfg(target_pointer_width = "64")]
const _: () = assert!(std::mem::size_of::<Arena>() == 24);
unsafe impl Send for Arena {}
impl Default for Arena {
fn default() -> Self {
Self::new()
}
}
impl Arena {
pub fn new() -> Self {
let sentinel =
unsafe { NonNull::new_unchecked(&EMPTY_SLAB as *const SlabHeader as *mut SlabHeader) };
let dangling = NonNull::dangling();
Arena {
ptr: Cell::new(dangling),
end: Cell::new(dangling),
slab: Cell::new(sentinel),
}
}
#[inline]
pub(crate) fn alloc(&self, size: usize) -> NonNull<u8> {
let ptr = self.ptr.get();
let addr = ptr.as_ptr() as usize;
let aligned_addr = (addr + ALLOC_ALIGN - 1) & !(ALLOC_ALIGN - 1);
let padding = aligned_addr - addr;
let needed = padding.saturating_add(size);
let remaining = self.end.get().as_ptr() as usize - addr;
let result = if needed <= remaining {
unsafe {
self.ptr
.set(NonNull::new_unchecked(ptr.as_ptr().add(needed)));
NonNull::new_unchecked(ptr.as_ptr().add(padding))
}
} else {
self.alloc_slow(size)
};
#[cfg(test)]
if true {
use std::mem::MaybeUninit;
unsafe {
return NonNull::new_unchecked(
std::slice::from_raw_parts_mut(result.cast::<MaybeUninit<u8>>().as_ptr(), size)
.as_mut_ptr()
.cast(),
);
}
}
result
}
pub(crate) unsafe fn realloc(
&self,
ptr: NonNull<u8>,
old_size: usize,
new_size: usize,
) -> NonNull<u8> {
debug_assert!(new_size >= old_size);
let old_end = ptr.as_ptr() as usize + old_size;
let head = self.ptr.get().as_ptr() as usize;
if old_end == head {
let remaining = self.end.get().as_ptr() as usize - ptr.as_ptr() as usize;
if new_size <= remaining {
let extra = new_size - old_size;
unsafe {
let head_ptr = self.ptr.get().as_ptr();
self.ptr.set(NonNull::new_unchecked(head_ptr.add(extra)));
let result = NonNull::new_unchecked(head_ptr.sub(old_size));
#[cfg(test)]
if true {
use std::mem::MaybeUninit;
return NonNull::new_unchecked(
std::slice::from_raw_parts_mut(
result.cast::<MaybeUninit<u8>>().as_ptr(),
new_size,
)
.as_mut_ptr()
.cast(),
);
}
return result;
}
}
}
let new_ptr = self.alloc(new_size);
unsafe { ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), old_size) };
new_ptr
}
#[cold]
#[inline(never)]
fn alloc_slow(&self, size: usize) -> NonNull<u8> {
self.grow(size);
let ptr = self.ptr.get();
let addr = ptr.as_ptr() as usize;
let aligned_addr = (addr + ALLOC_ALIGN - 1) & !(ALLOC_ALIGN - 1);
let padding = aligned_addr - addr;
let needed = padding + size;
debug_assert!(needed <= self.end.get().as_ptr() as usize - addr);
unsafe {
self.ptr
.set(NonNull::new_unchecked(ptr.as_ptr().add(needed)));
NonNull::new_unchecked(ptr.as_ptr().add(padding))
}
}
pub(crate) unsafe fn scratch(&self) -> Scratch<'_> {
let start = self.ptr.get();
let cap = self.end.get().as_ptr() as usize - start.as_ptr() as usize;
Scratch {
arena: self,
start,
len: 0,
cap,
}
}
fn grow(&self, size: usize) {
let current_size = unsafe { self.slab.get().as_ref().size };
let min_slab = HEADER_SIZE.checked_add(size).expect("layout overflow");
let new_size = current_size
.saturating_mul(2)
.max(min_slab)
.max(INITIAL_SLAB_SIZE);
let slab_layout =
Layout::from_size_align(new_size, SLAB_ALIGN).expect("slab layout overflow");
let raw = unsafe { std::alloc::alloc(slab_layout) };
let Some(base) = NonNull::new(raw) else {
std::alloc::handle_alloc_error(slab_layout);
};
unsafe {
let header_ptr = base.as_ptr().cast::<SlabHeader>();
header_ptr.write(SlabHeader {
prev: Some(self.slab.get()),
size: new_size,
});
self.slab.set(NonNull::new_unchecked(header_ptr));
self.ptr
.set(NonNull::new_unchecked(base.as_ptr().add(HEADER_SIZE)));
self.end
.set(NonNull::new_unchecked(base.as_ptr().add(new_size)));
}
}
pub fn alloc_str(&self, s: &str) -> &str {
if s.is_empty() {
return "";
}
let dst = self.alloc(s.len());
unsafe {
std::ptr::copy_nonoverlapping(s.as_ptr(), dst.as_ptr(), s.len());
std::str::from_utf8_unchecked(std::slice::from_raw_parts(dst.as_ptr(), s.len()))
}
}
}
impl Drop for Arena {
fn drop(&mut self) {
let mut current = self.slab.get();
loop {
let header = unsafe { current.as_ref() };
if header.size == 0 {
break;
}
let prev = header.prev;
let slab_layout = unsafe { Layout::from_size_align_unchecked(header.size, SLAB_ALIGN) };
unsafe {
std::alloc::dealloc(current.as_ptr().cast(), slab_layout);
}
match prev {
Some(p) => current = p,
None => break,
}
}
}
}
pub(crate) struct Scratch<'a> {
arena: &'a Arena,
start: NonNull<u8>,
len: usize,
cap: usize,
}
impl<'a> Scratch<'a> {
#[inline]
pub fn push(&mut self, byte: u8) {
let len = self.len;
if len == self.cap {
self.grow_slow(1);
}
unsafe {
self.start.as_ptr().add(len).write(byte);
}
self.len = len + 1;
}
#[inline]
pub fn extend(&mut self, bytes: &[u8]) {
if bytes.len() > self.cap - self.len {
self.grow_slow(bytes.len());
}
unsafe {
ptr::copy_nonoverlapping(
bytes.as_ptr(),
self.start.as_ptr().add(self.len),
bytes.len(),
);
}
self.len += bytes.len();
}
#[inline]
pub(crate) fn push_strip_underscores(&mut self, bytes: &[u8]) -> bool {
let mut prev = 0u8;
for &b in bytes {
if b == b'_' {
if !prev.is_ascii_digit() {
return false;
}
} else {
if prev == b'_' && !b.is_ascii_digit() {
return false;
}
self.push(b);
}
prev = b;
}
prev != b'_'
}
#[inline]
pub fn as_bytes(&self) -> &[u8] {
if self.len == 0 {
return &[];
}
unsafe { std::slice::from_raw_parts(self.start.as_ptr(), self.len) }
}
pub fn commit(self) -> &'a [u8] {
if self.len == 0 {
return &[];
}
let slice = unsafe { std::slice::from_raw_parts(self.start.as_ptr(), self.len) };
unsafe {
self.arena
.ptr
.set(NonNull::new_unchecked(self.start.as_ptr().add(self.len)));
}
slice
}
#[cold]
#[inline(never)]
fn grow_slow(&mut self, additional: usize) {
let required = self.len.checked_add(additional).expect("scratch overflow");
let new_cap = self.cap.saturating_mul(2).max(required);
self.arena.grow(new_cap);
let new_start = self.arena.ptr.get();
if self.len > 0 {
unsafe {
ptr::copy_nonoverlapping(self.start.as_ptr(), new_start.as_ptr(), self.len);
}
}
self.start = new_start;
self.cap = self.arena.end.get().as_ptr() as usize - new_start.as_ptr() as usize;
}
}
#[cfg(test)]
#[path = "./arena_tests.rs"]
mod tests;