use core::slice;
use std::mem::MaybeUninit;
use std::num::NonZero;
use std::ptr::NonNull;
use crate::Span;
use crate::mem::{BlockRef, BlockSize};
#[derive(Debug)]
pub(crate) struct SpanBuilder {
block_ref: BlockRef,
start: NonNull<MaybeUninit<u8>>,
filled_bytes: BlockSize,
available_bytes: BlockSize,
}
impl SpanBuilder {
pub(crate) const unsafe fn new(start: NonNull<MaybeUninit<u8>>, len: NonZero<BlockSize>, block_ref: BlockRef) -> Self {
Self {
block_ref,
start,
filled_bytes: 0,
available_bytes: len.get(),
}
}
pub(crate) const fn len(&self) -> BlockSize {
self.filled_bytes
}
pub(crate) const fn is_empty(&self) -> bool {
self.filled_bytes == 0
}
#[cfg_attr(test, mutants::skip)] pub(crate) fn remaining_capacity(&self) -> usize {
self.available_bytes as usize
}
pub(crate) fn consume(&mut self, len: NonZero<BlockSize>) -> Span {
assert!(len.get() <= self.len());
let span = unsafe { Span::new(self.start.cast(), len.get(), self.block_ref.clone()) };
self.filled_bytes = self.filled_bytes.wrapping_sub(len.get());
self.start = unsafe { self.start.add(len.get() as usize) };
span
}
pub(crate) fn peek(&self) -> Span {
unsafe { Span::new(self.start.cast(), self.filled_bytes, self.block_ref.clone()) }
}
pub(crate) const fn block(&self) -> &BlockRef {
&self.block_ref
}
#[cfg_attr(test, mutants::skip)] pub(crate) fn unfilled_slice_mut(&mut self) -> &mut [MaybeUninit<u8>] {
let available_start = unsafe { self.start.add(self.filled_bytes as usize) };
unsafe { slice::from_raw_parts_mut(available_start.as_ptr(), self.available_bytes as usize) }
}
pub(crate) unsafe fn advance(&mut self, len: usize) {
#[expect(clippy::cast_possible_truncation, reason = "guaranteed by safety requirements")]
let count = len as BlockSize;
self.available_bytes = self.available_bytes.wrapping_sub(count);
self.filled_bytes = self.filled_bytes.wrapping_add(count);
}
#[cfg_attr(test, mutants::skip)] pub(crate) fn split_off_available(&mut self, count: NonZero<BlockSize>) -> Self {
assert!(count.get() as usize <= self.remaining_capacity());
let keep = self.available_bytes.wrapping_sub(count.get());
let new_start = unsafe { self.start.add((self.filled_bytes.wrapping_add(keep)) as usize) };
self.available_bytes = keep;
Self {
block_ref: self.block_ref.clone(),
start: new_start,
filled_bytes: 0,
available_bytes: count.get(),
}
}
#[cfg(test)]
pub(crate) fn put_slice(&mut self, src: &[u8]) {
use std::ptr;
let len = src.len();
assert!(self.remaining_capacity() >= len);
let dest_slice = self.unfilled_slice_mut();
unsafe {
ptr::copy_nonoverlapping(src.as_ptr(), dest_slice.as_mut_ptr().cast(), len);
}
unsafe { self.advance(len) };
}
}
unsafe impl Send for SpanBuilder {}
unsafe impl Sync for SpanBuilder {}
#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod tests {
use new_zealand::nz;
use static_assertions::assert_impl_all;
use super::*;
use crate::mem::testing::std_alloc_block;
assert_impl_all!(SpanBuilder: Send, Sync);
#[test]
fn smoke_test() {
let mut builder = std_alloc_block::allocate(nz!(10)).into_span_builder();
assert_eq!(builder.len(), 0);
assert!(builder.is_empty());
assert_eq!(builder.remaining_capacity(), 10);
builder.put_slice(&1234_u64.to_ne_bytes());
assert_eq!(builder.len(), 8);
assert!(!builder.is_empty());
assert_eq!(builder.remaining_capacity(), 2);
_ = builder.consume(nz!(8));
assert_eq!(builder.len(), 0);
assert!(builder.is_empty());
assert_eq!(builder.remaining_capacity(), 2);
builder.put_slice(&1234_u16.to_ne_bytes());
assert_eq!(builder.len(), 2);
assert!(!builder.is_empty());
assert_eq!(builder.remaining_capacity(), 0);
_ = builder.consume(nz!(2));
assert_eq!(builder.len(), 0);
assert!(builder.is_empty());
assert_eq!(builder.remaining_capacity(), 0);
}
#[test]
fn peek() {
let mut builder = std_alloc_block::allocate(nz!(10)).into_span_builder();
builder.put_slice(&1234_u32.to_ne_bytes());
builder.put_slice(&5678_u32.to_ne_bytes());
builder.put_slice(&90_u16.to_ne_bytes());
let mut peeked = builder.peek();
assert_eq!(peeked.len(), 10);
assert_eq!(peeked.as_ref().len(), 10);
assert_eq!(u32::from_ne_bytes(peeked.get_array()), 1234);
assert_eq!(u32::from_ne_bytes(peeked.get_array()), 5678);
assert_eq!(u16::from_ne_bytes(peeked.get_array()), 90);
assert_eq!(peeked.len(), 0);
assert_eq!(peeked.as_ref().len(), 0);
_ = builder.consume(nz!(10));
}
#[test]
fn split_off_available_full() {
let mut builder = std_alloc_block::allocate(nz!(20)).into_span_builder();
assert_eq!(builder.remaining_capacity(), 20);
let split = builder.split_off_available(nz!(20));
assert_eq!(builder.remaining_capacity(), 0);
assert_eq!(builder.len(), 0);
assert_eq!(split.remaining_capacity(), 20);
assert_eq!(split.len(), 0);
}
#[test]
fn split_off_available_partial_with_data() {
let mut builder = std_alloc_block::allocate(nz!(20)).into_span_builder();
builder.put_slice(&1234_u64.to_ne_bytes());
assert_eq!(builder.len(), 8);
assert_eq!(builder.remaining_capacity(), 12);
let mut split = builder.split_off_available(nz!(5));
assert_eq!(builder.len(), 8);
assert_eq!(builder.remaining_capacity(), 7);
assert_eq!(split.len(), 0);
assert_eq!(split.remaining_capacity(), 5);
split.put_slice(&5678_u32.to_ne_bytes());
assert_eq!(split.len(), 4);
assert_eq!(split.remaining_capacity(), 1);
}
#[test]
#[should_panic]
fn split_off_available_panics_on_overflow() {
let mut builder = std_alloc_block::allocate(nz!(10)).into_span_builder();
builder.put_slice(&1234_u64.to_ne_bytes());
builder.split_off_available(nz!(3));
}
#[test]
fn size_change_detector() {
assert_eq!(size_of::<SpanBuilder>(), 32);
}
}