use std::any::type_name;
use std::mem::{self, MaybeUninit};
use std::num::NonZero;
use smallvec::SmallVec;
use crate::mem::{Block, BlockMeta, BlockSize, Memory};
use crate::{BytesBufWriter, BytesView, MAX_INLINE_SPANS, MemoryGuard, Span, SpanBuilder};
#[doc = include_str!("../doc/snippets/sequence_memory_layout.md")]
#[derive(Default)]
pub struct BytesBuf {
frozen_spans: SmallVec<[Span; MAX_INLINE_SPANS]>,
span_builders_reversed: SmallVec<[SpanBuilder; MAX_INLINE_SPANS]>,
len: usize,
frozen: usize,
available: usize,
}
impl BytesBuf {
#[must_use]
pub fn new() -> Self {
Self::default()
}
#[must_use]
pub fn from_blocks<I>(blocks: I) -> Self
where
I: IntoIterator<Item = Block>,
{
Self::from_span_builders(blocks.into_iter().map(Block::into_span_builder))
}
pub(crate) fn from_span_builders<I>(span_builders: I) -> Self
where
I: IntoIterator<Item = SpanBuilder>,
{
let span_builders: SmallVec<[SpanBuilder; MAX_INLINE_SPANS]> = span_builders.into_iter().collect();
let available = span_builders.iter().map(SpanBuilder::remaining_capacity).sum();
Self {
frozen_spans: SmallVec::new_const(),
span_builders_reversed: span_builders,
len: 0,
frozen: 0,
available,
}
}
pub fn reserve<M: Memory + ?Sized>(&mut self, additional_bytes: usize, memory_provider: &M) {
let bytes_needed = additional_bytes.saturating_sub(self.remaining_capacity());
let Some(bytes_needed) = NonZero::new(bytes_needed) else {
return;
};
self.extend_capacity_by_at_least(bytes_needed, memory_provider);
}
fn extend_capacity_by_at_least<M: Memory + ?Sized>(&mut self, bytes: NonZero<usize>, memory_provider: &M) {
let additional_memory = memory_provider.reserve(bytes.get());
debug_assert!(additional_memory.capacity() >= bytes.get());
debug_assert!(additional_memory.is_empty());
self.available = self
.available
.checked_add(additional_memory.capacity())
.expect("buffer capacity cannot exceed usize::MAX");
self.span_builders_reversed.insert_many(0, additional_memory.span_builders_reversed);
}
pub(crate) fn append(&mut self, bytes: BytesView) {
if bytes.is_empty() {
return;
}
let bytes_len = bytes.len();
let total_unfrozen_bytes = NonZero::new(self.span_builders_reversed.last().map_or(0, SpanBuilder::len));
if let Some(total_unfrozen_bytes) = total_unfrozen_bytes {
self.freeze_from_first(total_unfrozen_bytes);
debug_assert!(self.span_builders_reversed.last().map_or(0, SpanBuilder::len) == 0);
}
self.len = self.len.checked_add(bytes_len).expect("buffer capacity cannot exceed usize::MAX");
self.frozen = self.frozen.wrapping_add(bytes_len);
self.frozen_spans.extend(bytes.into_spans_reversed().into_iter().rev());
}
#[must_use]
pub fn peek(&self) -> BytesView {
let mut result_spans_reversed: SmallVec<[Span; MAX_INLINE_SPANS]> = SmallVec::new();
if let Some(first_builder) = self.span_builders_reversed.last() {
let span = first_builder.peek();
if !span.is_empty() {
result_spans_reversed.push(span);
}
}
result_spans_reversed.extend(self.frozen_spans.iter().rev().cloned());
BytesView::from_spans_reversed(result_spans_reversed)
}
#[must_use]
#[cfg_attr(debug_assertions, expect(clippy::missing_panics_doc, reason = "only unreachable panics"))]
pub fn len(&self) -> usize {
#[cfg(debug_assertions)]
assert_eq!(self.len, self.calculate_len());
self.len
}
#[cfg(debug_assertions)]
fn calculate_len(&self) -> usize {
let frozen_len = self.frozen_spans.iter().map(|x| x.len() as usize).sum::<usize>();
let unfrozen_len = self.span_builders_reversed.last().map_or(0, SpanBuilder::len) as usize;
frozen_len.wrapping_add(unfrozen_len)
}
#[must_use]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[must_use]
pub fn capacity(&self) -> usize {
self.len().wrapping_add(self.remaining_capacity())
}
#[cfg_attr(test, mutants::skip)] pub fn remaining_capacity(&self) -> usize {
debug_assert_eq!(
self.available,
self.span_builders_reversed
.iter()
.map(SpanBuilder::remaining_capacity)
.sum::<usize>()
);
self.available
}
pub fn consume(&mut self, len: usize) -> BytesView {
self.consume_checked(len)
.expect("attempted to consume more bytes than available in buffer")
}
#[expect(clippy::missing_panics_doc, reason = "only unreachable panics")]
#[cfg_attr(test, mutants::skip)] pub fn consume_checked(&mut self, len: usize) -> Option<BytesView> {
if len > self.len() {
return None;
}
self.ensure_frozen(len);
let manifest = self.prepare_consume(len);
let mut result_spans_reversed: SmallVec<[Span; MAX_INLINE_SPANS]> = SmallVec::with_capacity(manifest.required_spans_capacity());
if manifest.consume_partial_span_bytes != 0 {
let partially_consumed_frozen_span = self
.frozen_spans
.get_mut(manifest.detach_complete_frozen_spans)
.expect("guarded by ensure_frozen()");
let take = partially_consumed_frozen_span.slice(0..manifest.consume_partial_span_bytes);
result_spans_reversed.push(take);
unsafe { partially_consumed_frozen_span.advance(manifest.consume_partial_span_bytes as usize) };
}
result_spans_reversed.extend(self.frozen_spans.drain(..manifest.detach_complete_frozen_spans).rev());
self.len = self.len.wrapping_sub(len);
self.frozen = self.frozen.wrapping_sub(len);
Some(BytesView::from_spans_reversed(result_spans_reversed))
}
fn prepare_consume(&self, mut len: usize) -> ConsumeManifest {
debug_assert!(len <= self.frozen);
let mut detach_complete_frozen_spans: usize = 0;
for span in &self.frozen_spans {
let span_len = span.len();
if span_len as usize <= len {
detach_complete_frozen_spans = detach_complete_frozen_spans.wrapping_add(1);
len = len
.checked_sub(span_len as usize)
.expect("somehow ended up with negative bytes remaining - algorithm defect");
if len != 0 {
continue;
}
}
break;
}
ConsumeManifest {
detach_complete_frozen_spans,
consume_partial_span_bytes: len.try_into().expect("we are supposed to have less than one memory block worth of data remaining but its length does not fit into a single memory block - algorithm defect"),
}
}
pub fn consume_all(&mut self) -> BytesView {
unsafe { self.consume_checked(self.len()).unwrap_unchecked() }
}
#[must_use]
pub fn split_off_remaining(&mut self, count: usize) -> Self {
self.split_off_remaining_checked(count)
.expect("attempted to split off more remaining capacity than available in buffer")
}
#[must_use]
#[expect(clippy::missing_panics_doc, reason = "only unreachable panics")]
#[cfg_attr(test, mutants::skip)] pub fn split_off_remaining_checked(&mut self, count: usize) -> Option<Self> {
if count > self.remaining_capacity() {
return None;
}
if count == 0 {
return Some(Self::new());
}
let mut result_builders: SmallVec<[SpanBuilder; MAX_INLINE_SPANS]> = SmallVec::new();
let mut remaining = count;
let mut whole_span_builders_to_take: usize = 0;
for span_builder in &self.span_builders_reversed {
if !span_builder.is_empty() {
break;
}
let capacity_in_span_builder = span_builder.remaining_capacity();
if capacity_in_span_builder > remaining {
break;
}
remaining = remaining.wrapping_sub(capacity_in_span_builder);
whole_span_builders_to_take = whole_span_builders_to_take.wrapping_add(1);
}
result_builders.extend(self.span_builders_reversed.drain(..whole_span_builders_to_take));
if remaining > 0 {
let span_builder = self.span_builders_reversed.first_mut().expect(
"remaining_capacity() check at the top ensures a builder is available because we have not received enough capacity yet",
);
let remaining: u32 = remaining.try_into()
.expect("the span builder drain loop ensures that remaining capacity comes from one memory block yet the value is too big to fit into a memory block - impossible");
let split_count = NonZero::new(remaining).expect("guarded by if-statement above");
result_builders.push(span_builder.split_off_available(split_count));
}
self.available = self.available.checked_sub(count).expect("guarded by bounds check above");
Some(Self::from_span_builders(result_builders))
}
fn freeze_from_first(&mut self, len: NonZero<BlockSize>) {
let span_builder = self
.span_builders_reversed
.last_mut()
.expect("there must be at least one span builder for it to be possible to freeze bytes");
debug_assert!(len.get() <= span_builder.len());
let span = span_builder.consume(len);
self.frozen_spans.push(span);
if span_builder.remaining_capacity() == 0 {
self.span_builders_reversed.pop();
}
self.frozen = self
.frozen
.checked_add(len.get() as usize)
.expect("usize overflow should be impossible here because the sequence builder capacity would exceed virtual memory size");
}
fn ensure_frozen(&mut self, len: usize) {
let must_freeze_bytes: BlockSize = len
.saturating_sub(self.frozen)
.try_into()
.expect("requested to freeze more bytes from the first block than can actually fit into one block");
let Some(must_freeze_bytes) = NonZero::new(must_freeze_bytes) else {
return;
};
self.freeze_from_first(must_freeze_bytes);
}
#[doc = include_str!("../doc/snippets/sequence_memory_layout.md")]
pub fn first_unfilled_slice(&mut self) -> &mut [MaybeUninit<u8>] {
if let Some(last) = self.span_builders_reversed.last_mut() {
last.unfilled_slice_mut()
} else {
&mut []
}
}
#[must_use]
pub fn first_unfilled_slice_meta(&self) -> Option<&dyn BlockMeta> {
self.span_builders_reversed.last().and_then(|sb| sb.block().meta())
}
#[expect(clippy::missing_panics_doc, reason = "only unreachable panics")]
pub unsafe fn advance(&mut self, count: usize) {
if count == 0 {
return;
}
debug_assert!(count <= self.span_builders_reversed.last().map_or(0, SpanBuilder::remaining_capacity));
let span_builder = self
.span_builders_reversed
.last_mut()
.expect("there must be at least one span builder if we wrote nonzero bytes");
unsafe { span_builder.advance(count) };
if span_builder.remaining_capacity() == 0 {
let len = NonZero::new(span_builder.len())
.expect("there is no capacity left in the span builder so there must be at least one byte to consume unless we somehow left an empty span builder in the queue");
self.freeze_from_first(len);
debug_assert!(
self.span_builders_reversed
.last()
.map_or(usize::MAX, SpanBuilder::remaining_capacity)
> 0
);
}
self.len = self
.len
.checked_add(count)
.expect("usize overflow should be impossible here because the sequence builder capacity would exceed virtual memory size");
self.available = self
.available
.checked_sub(count)
.expect("guarded by assertion above - we must have at least this much capacity still available");
}
pub fn begin_vectored_write(&mut self, max_len: Option<usize>) -> BytesBufVectoredWrite<'_> {
self.begin_vectored_write_checked(max_len)
.expect("attempted to begin a vectored write with a max_len that was greater than the remaining capacity")
}
pub fn begin_vectored_write_checked(&mut self, max_len: Option<usize>) -> Option<BytesBufVectoredWrite<'_>> {
if let Some(max_len) = max_len
&& max_len > self.remaining_capacity()
{
return None;
}
Some(BytesBufVectoredWrite { buf: self, max_len })
}
fn iter_available_capacity(&mut self, max_len: Option<usize>) -> BytesBufRemaining<'_> {
let next_span_builder_index = if self.span_builders_reversed.is_empty() { None } else { Some(0) };
BytesBufRemaining {
buf: self,
next_span_builder_index,
max_len,
}
}
pub fn extend_lifetime(&self) -> MemoryGuard {
MemoryGuard::new(
self.span_builders_reversed
.iter()
.map(SpanBuilder::block)
.map(Clone::clone)
.chain(self.frozen_spans.iter().map(Span::block_ref).map(Clone::clone)),
)
}
#[inline]
pub fn into_writer<M: Memory>(self, memory: M) -> BytesBufWriter<M> {
BytesBufWriter::new(self, memory)
}
}
impl std::fmt::Debug for BytesBuf {
#[cfg_attr(test, mutants::skip)] #[cfg_attr(coverage_nightly, coverage(off))] fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let frozen_spans = self.frozen_spans.iter().map(|x| x.len().to_string()).collect::<Vec<_>>().join(", ");
let span_builders = self
.span_builders_reversed
.iter()
.rev()
.map(|x| {
if x.is_empty() {
x.remaining_capacity().to_string()
} else {
format!("{} + {}", x.len(), x.remaining_capacity())
}
})
.collect::<Vec<_>>()
.join(", ");
f.debug_struct(type_name::<Self>())
.field("len", &self.len)
.field("frozen", &self.frozen)
.field("available", &self.available)
.field("frozen_spans", &frozen_spans)
.field("span_builders", &span_builders)
.finish()
}
}
#[derive(Debug, Clone, Copy)]
struct ConsumeManifest {
detach_complete_frozen_spans: usize,
consume_partial_span_bytes: BlockSize,
}
impl ConsumeManifest {
const fn required_spans_capacity(&self) -> usize {
if self.consume_partial_span_bytes != 0 {
self.detach_complete_frozen_spans.wrapping_add(1)
} else {
self.detach_complete_frozen_spans
}
}
}
#[derive(Debug)]
pub struct BytesBufVectoredWrite<'a> {
buf: &'a mut BytesBuf,
max_len: Option<usize>,
}
impl BytesBufVectoredWrite<'_> {
pub fn slices_mut(&mut self) -> BytesBufRemaining<'_> {
self.buf.iter_available_capacity(self.max_len)
}
pub fn extend_lifetime(&self) -> MemoryGuard {
self.buf.extend_lifetime()
}
#[expect(clippy::missing_panics_doc, reason = "only unreachable panics")]
pub unsafe fn commit(self, bytes_written: usize) {
debug_assert!(bytes_written <= self.buf.remaining_capacity());
if let Some(max_len) = self.max_len {
debug_assert!(bytes_written <= max_len);
}
let mut bytes_remaining = bytes_written;
while bytes_remaining > 0 {
let span_builder = self
.buf
.span_builders_reversed
.last_mut()
.expect("there must be at least one span builder because we still have filled capacity remaining to freeze");
let bytes_available = span_builder.remaining_capacity();
let bytes_to_commit = bytes_available.min(bytes_remaining);
unsafe { self.buf.advance(bytes_to_commit) };
bytes_remaining = bytes_remaining
.checked_sub(bytes_to_commit)
.expect("we somehow advanced the write head more than the count of written bytes");
}
}
}
#[derive(Debug)]
pub struct BytesBufRemaining<'a> {
buf: &'a mut BytesBuf,
next_span_builder_index: Option<usize>,
max_len: Option<usize>,
}
impl<'a> Iterator for BytesBufRemaining<'a> {
type Item = (&'a mut [MaybeUninit<u8>], Option<&'a dyn BlockMeta>);
#[cfg_attr(test, mutants::skip)] fn next(&mut self) -> Option<Self::Item> {
let next_span_builder_index = self.next_span_builder_index?;
self.next_span_builder_index = Some(
next_span_builder_index.wrapping_add(1),
);
if self.next_span_builder_index == Some(self.buf.span_builders_reversed.len()) {
self.next_span_builder_index = None;
}
let next_span_builder_index_storage_order = self
.buf
.span_builders_reversed
.len()
.wrapping_sub(next_span_builder_index + 1);
let span_builder = self
.buf
.span_builders_reversed
.get_mut(next_span_builder_index_storage_order)
.expect("iterator cursor referenced a span builder that does not exist");
let meta_with_a = {
let meta = span_builder.block().meta();
unsafe { mem::transmute::<Option<&dyn BlockMeta>, Option<&'a dyn BlockMeta>>(meta) }
};
let uninit_slice_mut = span_builder.unfilled_slice_mut();
let uninit_slice_mut = unsafe { mem::transmute::<&mut [MaybeUninit<u8>], &'a mut [MaybeUninit<u8>]>(&mut *uninit_slice_mut) };
let uninit_slice_mut = if let Some(max_len) = self.max_len {
let constrained_len = uninit_slice_mut.len().min(max_len);
let adjusted_slice = uninit_slice_mut.get_mut(..constrained_len).expect("guarded by min() above");
self.max_len = Some(max_len.wrapping_sub(constrained_len));
if self.max_len == Some(0) {
self.next_span_builder_index = None;
}
adjusted_slice
} else {
uninit_slice_mut
};
Some((uninit_slice_mut, meta_with_a))
}
}
impl From<BytesView> for BytesBuf {
fn from(value: BytesView) -> Self {
let mut buf = Self::new();
buf.append(value);
buf
}
}
#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod tests {
#![allow(clippy::indexing_slicing, reason = "Fine in test code, we prefer panic on error")]
use std::pin::pin;
use new_zealand::nz;
use static_assertions::assert_impl_all;
use testing_aids::assert_panic;
use super::*;
use crate::mem::GlobalPool;
use crate::mem::testing::{FixedBlockMemory, TestMemoryBlock};
const U64_SIZE: usize = size_of::<u64>();
const TWO_U64_SIZE: usize = size_of::<u64>() + size_of::<u64>();
const THREE_U64_SIZE: usize = size_of::<u64>() + size_of::<u64>() + size_of::<u64>();
assert_impl_all!(BytesBuf: Send, Sync);
#[test]
fn smoke_test() {
let memory = FixedBlockMemory::new(nz!(1234));
let min_length = 1000;
let mut buf = memory.reserve(min_length);
assert!(buf.capacity() >= min_length);
assert!(buf.remaining_capacity() >= min_length);
assert_eq!(buf.capacity(), buf.remaining_capacity());
assert_eq!(buf.len(), 0);
assert!(buf.is_empty());
buf.put_num_ne(1234_u64);
buf.put_num_ne(5678_u64);
buf.put_num_ne(1234_u64);
buf.put_num_ne(5678_u64);
assert_eq!(buf.len(), 32);
assert!(!buf.is_empty());
unsafe {
buf.advance(0);
}
let mut first_two = buf.consume(TWO_U64_SIZE);
let mut second_two = buf.consume(TWO_U64_SIZE);
assert_eq!(first_two.len(), 16);
assert_eq!(second_two.len(), 16);
assert_eq!(buf.len(), 0);
assert_eq!(first_two.get_num_ne::<u64>(), 1234);
assert_eq!(first_two.get_num_ne::<u64>(), 5678);
assert_eq!(second_two.get_num_ne::<u64>(), 1234);
assert_eq!(second_two.get_num_ne::<u64>(), 5678);
buf.put_num_ne(1111_u64);
assert_eq!(buf.len(), 8);
let mut last = buf.consume(U64_SIZE);
assert_eq!(last.len(), 8);
assert_eq!(buf.len(), 0);
assert_eq!(last.get_num_ne::<u64>(), 1111);
assert!(buf.consume_checked(1).is_none());
assert!(buf.consume_all().is_empty());
}
#[test]
fn extend_capacity() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
let memory = FixedBlockMemory::new(nz!(100));
buf.reserve(10, &memory);
assert_eq!(buf.capacity(), 100);
assert_eq!(buf.remaining_capacity(), 100);
buf.put_num_ne(1234_u64);
buf.put_num_ne(5678_u16);
assert_eq!(buf.len(), 10);
assert_eq!(buf.remaining_capacity(), 90);
assert_eq!(buf.capacity(), 100);
buf.reserve(140, &memory);
assert_eq!(buf.len(), 10);
assert_eq!(buf.remaining_capacity(), 190);
assert_eq!(buf.capacity(), 200);
buf.reserve(200, &memory);
assert_eq!(buf.len(), 10);
assert_eq!(buf.remaining_capacity(), 290);
assert_eq!(buf.capacity(), 300);
}
#[test]
fn append_existing_view() {
let memory = FixedBlockMemory::new(nz!(1234));
let min_length = 1000;
let mut payload_buffer = memory.reserve(min_length);
let mut target_buffer = memory.reserve(min_length);
payload_buffer.put_num_ne(1111_u64);
payload_buffer.put_num_ne(2222_u64);
payload_buffer.put_num_ne(3333_u64);
payload_buffer.put_num_ne(4444_u64);
let payload1 = payload_buffer.consume(TWO_U64_SIZE);
let payload2 = payload_buffer.consume(TWO_U64_SIZE);
target_buffer.put_num_ne(5555_u64);
target_buffer.put_num_ne(6666_u64);
let _ = target_buffer.consume(U64_SIZE);
target_buffer.put_bytes(payload1);
target_buffer.put_bytes(payload2);
target_buffer.put_bytes(BytesView::default());
target_buffer.put_num_ne(7777_u64);
assert_eq!(target_buffer.len(), 48);
let mut result = target_buffer.consume(48);
assert_eq!(result.get_num_ne::<u64>(), 6666);
assert_eq!(result.get_num_ne::<u64>(), 1111);
assert_eq!(result.get_num_ne::<u64>(), 2222);
assert_eq!(result.get_num_ne::<u64>(), 3333);
assert_eq!(result.get_num_ne::<u64>(), 4444);
assert_eq!(result.get_num_ne::<u64>(), 7777);
}
#[test]
fn consume_all_mixed() {
let mut buf = BytesBuf::new();
let memory = FixedBlockMemory::new(nz!(8));
buf.reserve(16, &memory);
buf.put_num_ne(1111_u64);
buf.put_num_ne(2222_u64);
let _ = buf.consume(8);
let mut append_buf = BytesBuf::new();
append_buf.reserve(8, &memory);
append_buf.put_num_ne(3333_u64);
let reused_bytes_to_append = append_buf.consume_all();
buf.append(reused_bytes_to_append);
buf.reserve(8, &memory);
buf.put_num_ne(4444_u64);
let mut result = buf.consume_all();
assert_eq!(result.len(), 24);
assert_eq!(result.get_num_ne::<u64>(), 2222);
assert_eq!(result.get_num_ne::<u64>(), 3333);
assert_eq!(result.get_num_ne::<u64>(), 4444);
}
#[test]
#[expect(clippy::cognitive_complexity, reason = "test code")]
fn peek_basic() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
let memory = FixedBlockMemory::new(nz!(10));
let peeked = buf.peek();
assert_eq!(peeked.len(), 0);
buf.reserve(100, &memory);
assert_eq!(buf.capacity(), 100);
buf.put_num_ne(1111_u64);
let mut peeked = buf.peek();
assert_eq!(peeked.first_slice().len(), 8);
assert_eq!(peeked.get_num_ne::<u64>(), 1111);
assert_eq!(peeked.len(), 0);
buf.put_num_ne(2222_u64);
buf.put_num_ne(3333_u64);
buf.put_num_ne(4444_u64);
buf.put_num_ne(5555_u64);
buf.put_num_ne(6666_u64);
buf.put_num_ne(7777_u64);
buf.put_num_ne(8888_u64);
buf.put_byte_repeated(9, 8);
assert_eq!(buf.len(), 72);
assert_eq!(buf.capacity(), 100);
assert_eq!(buf.remaining_capacity(), 28);
let mut peeked = buf.peek();
assert_eq!(peeked.len(), 72);
assert_eq!(peeked.first_slice().len(), 10);
assert_eq!(peeked.get_num_ne::<u64>(), 1111);
assert_eq!(peeked.get_num_ne::<u64>(), 2222);
assert_eq!(buf.len(), 72);
assert_eq!(peeked.first_slice().len(), 4);
assert_eq!(peeked.get_num_ne::<u64>(), 3333);
assert_eq!(peeked.get_num_ne::<u64>(), 4444);
assert_eq!(peeked.get_num_ne::<u64>(), 5555);
assert_eq!(peeked.get_num_ne::<u64>(), 6666);
assert_eq!(peeked.get_num_ne::<u64>(), 7777);
assert_eq!(peeked.get_num_ne::<u64>(), 8888);
for _ in 0..8 {
assert_eq!(peeked.get_byte(), 9);
}
assert_eq!(peeked.len(), 0);
assert_eq!(peeked.first_slice().len(), 0);
buf.put_byte_repeated(88, 28);
let mut peeked = buf.peek();
peeked.advance(72);
assert_eq!(peeked.len(), 28);
for _ in 0..28 {
assert_eq!(peeked.get_byte(), 88);
}
}
#[test]
fn consume_part_of_frozen_span() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
let memory = FixedBlockMemory::new(nz!(10));
buf.reserve(100, &memory);
assert_eq!(buf.capacity(), 100);
buf.put_num_ne(1111_u64);
buf.put_num_ne(2222_u64);
let mut first8 = buf.consume(U64_SIZE);
assert_eq!(first8.get_num_ne::<u64>(), 1111);
assert!(first8.is_empty());
buf.put_num_ne(3333_u64);
let mut second16 = buf.consume(16);
assert_eq!(second16.get_num_ne::<u64>(), 2222);
assert_eq!(second16.get_num_ne::<u64>(), 3333);
assert!(second16.is_empty());
}
#[test]
fn empty_buffer() {
let mut buf = BytesBuf::new();
assert!(buf.is_empty());
assert!(buf.peek().is_empty());
assert_eq!(0, buf.first_unfilled_slice().len());
let consumed = buf.consume(0);
assert!(consumed.is_empty());
let consumed = buf.consume_all();
assert!(consumed.is_empty());
}
#[test]
fn iter_available_empty_with_capacity() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
let memory = FixedBlockMemory::new(nz!(100));
buf.reserve(1000, &memory);
assert_eq!(buf.capacity(), 1000);
assert_eq!(buf.remaining_capacity(), 1000);
let iter = buf.iter_available_capacity(None);
let slices: Vec<_> = iter.map(|(s, _)| s).collect();
assert_eq!(slices.len(), 10);
for slice in slices {
assert_eq!(slice.len(), 100);
}
buf.reserve(100, &memory);
}
#[test]
fn iter_available_nonempty() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
let memory = FixedBlockMemory::new(nz!(8));
buf.reserve(TWO_U64_SIZE, &memory);
assert_eq!(buf.capacity(), 16);
assert_eq!(buf.remaining_capacity(), 16);
buf.put_num_ne(1234_u64);
assert_eq!(buf.len(), 8);
assert_eq!(buf.remaining_capacity(), 8);
let available_slices: Vec<_> = buf.iter_available_capacity(None).map(|(s, _)| s).collect();
assert_eq!(available_slices.len(), 1);
assert_eq!(available_slices[0].len(), 8);
buf.put_num_ne(5678_u32);
assert_eq!(buf.len(), 12);
assert_eq!(buf.remaining_capacity(), 4);
let available_slices: Vec<_> = buf.iter_available_capacity(None).map(|(s, _)| s).collect();
assert_eq!(available_slices.len(), 1);
assert_eq!(available_slices[0].len(), 4);
buf.put_num_ne(9012_u32);
assert_eq!(buf.len(), 16);
assert_eq!(buf.remaining_capacity(), 0);
assert_eq!(buf.iter_available_capacity(None).count(), 0);
}
#[test]
fn iter_available_empty_no_capacity() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
assert_eq!(buf.iter_available_capacity(None).count(), 0);
}
#[test]
fn vectored_write_zero() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
let memory = FixedBlockMemory::new(nz!(8));
buf.reserve(TWO_U64_SIZE, &memory);
assert_eq!(buf.capacity(), 16);
assert_eq!(buf.remaining_capacity(), 16);
let vectored_write = buf.begin_vectored_write(None);
unsafe {
vectored_write.commit(0);
}
assert_eq!(buf.capacity(), 16);
assert_eq!(buf.remaining_capacity(), 16);
}
#[test]
fn vectored_write_one_slice() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
let memory = FixedBlockMemory::new(nz!(8));
buf.reserve(U64_SIZE, &memory);
assert_eq!(buf.capacity(), 8);
assert_eq!(buf.remaining_capacity(), 8);
let mut vectored_write = buf.begin_vectored_write(None);
let mut slices: Vec<_> = vectored_write.slices_mut().map(|(s, _)| s).collect();
assert_eq!(slices.len(), 1);
assert_eq!(slices[0].len(), 8);
write_copy_of_slice(slices[0], &0x3333_3333_3333_3333_u64.to_ne_bytes());
unsafe {
vectored_write.commit(8);
}
assert_eq!(buf.len(), 8);
assert_eq!(buf.remaining_capacity(), 0);
assert_eq!(buf.capacity(), 8);
let mut result = buf.consume(U64_SIZE);
assert_eq!(result.get_num_ne::<u64>(), 0x3333_3333_3333_3333);
}
#[test]
fn vectored_write_multiple_slices() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
let memory = FixedBlockMemory::new(nz!(8));
buf.reserve(THREE_U64_SIZE, &memory);
assert_eq!(buf.capacity(), 24);
assert_eq!(buf.remaining_capacity(), 24);
let mut vectored_write = buf.begin_vectored_write(None);
let mut slices: Vec<_> = vectored_write.slices_mut().map(|(s, _)| s).collect();
assert_eq!(slices.len(), 3);
assert_eq!(slices[0].len(), 8);
assert_eq!(slices[1].len(), 8);
assert_eq!(slices[2].len(), 8);
write_copy_of_slice(slices[0], &0x3333_3333_3333_3333_u64.to_ne_bytes());
write_copy_of_slice(slices[1], &0x4444_4444_u32.to_ne_bytes());
unsafe {
vectored_write.commit(12);
}
assert_eq!(buf.len(), 12);
assert_eq!(buf.remaining_capacity(), 12);
assert_eq!(buf.capacity(), 24);
let mut vectored_write = buf.begin_vectored_write(None);
let mut slices: Vec<_> = vectored_write.slices_mut().map(|(s, _)| s).collect();
assert_eq!(slices.len(), 2);
assert_eq!(slices[0].len(), 4);
assert_eq!(slices[1].len(), 8);
write_copy_of_slice(slices[0], &0x5555_5555_u32.to_ne_bytes());
write_copy_of_slice(slices[1], &0x6666_6666_6666_6666_u64.to_ne_bytes());
unsafe {
vectored_write.commit(12);
}
assert_eq!(buf.len(), 24);
assert_eq!(buf.remaining_capacity(), 0);
assert_eq!(buf.capacity(), 24);
let mut result = buf.consume(THREE_U64_SIZE);
assert_eq!(result.get_num_ne::<u64>(), 0x3333_3333_3333_3333);
assert_eq!(result.get_num_ne::<u32>(), 0x4444_4444);
assert_eq!(result.get_num_ne::<u32>(), 0x5555_5555);
assert_eq!(result.get_num_ne::<u64>(), 0x6666_6666_6666_6666);
}
#[test]
fn vectored_write_max_len() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
let memory = FixedBlockMemory::new(nz!(8));
buf.reserve(THREE_U64_SIZE, &memory);
assert_eq!(buf.capacity(), 24);
assert_eq!(buf.remaining_capacity(), 24);
let mut vectored_write = buf.begin_vectored_write(Some(13));
let mut slices: Vec<_> = vectored_write.slices_mut().map(|(s, _)| s).collect();
assert_eq!(slices.len(), 2);
assert_eq!(slices[0].len(), 8);
assert_eq!(slices[1].len(), 5);
write_copy_of_slice(slices[0], &0x3333_3333_3333_3333_u64.to_ne_bytes());
write_copy_of_slice(slices[1], &0x4444_4444_u32.to_ne_bytes());
unsafe {
vectored_write.commit(12);
}
assert_eq!(buf.len(), 12);
assert_eq!(buf.remaining_capacity(), 12);
assert_eq!(buf.capacity(), 24);
let mut vectored_write = buf.begin_vectored_write(Some(12));
let mut slices: Vec<_> = vectored_write.slices_mut().map(|(s, _)| s).collect();
assert_eq!(slices.len(), 2);
assert_eq!(slices[0].len(), 4);
assert_eq!(slices[1].len(), 8);
write_copy_of_slice(slices[0], &0x5555_5555_u32.to_ne_bytes());
write_copy_of_slice(slices[1], &0x6666_6666_6666_6666_u64.to_ne_bytes());
unsafe {
vectored_write.commit(12);
}
assert_eq!(buf.len(), 24);
assert_eq!(buf.remaining_capacity(), 0);
assert_eq!(buf.capacity(), 24);
let mut result = buf.consume(THREE_U64_SIZE);
assert_eq!(result.get_num_ne::<u64>(), 0x3333_3333_3333_3333);
assert_eq!(result.get_num_ne::<u32>(), 0x4444_4444);
assert_eq!(result.get_num_ne::<u32>(), 0x5555_5555);
assert_eq!(result.get_num_ne::<u64>(), 0x6666_6666_6666_6666);
}
#[test]
fn vectored_write_max_len_overflow() {
let mut buf = BytesBuf::new();
let memory = FixedBlockMemory::new(nz!(8));
buf.reserve(THREE_U64_SIZE, &memory);
assert_eq!(buf.capacity(), 24);
assert_eq!(buf.remaining_capacity(), 24);
assert_panic!(buf.begin_vectored_write(Some(25)));
}
#[test]
fn vectored_write_overcommit() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
let memory = FixedBlockMemory::new(nz!(8));
buf.reserve(TWO_U64_SIZE, &memory);
assert_eq!(buf.capacity(), 16);
assert_eq!(buf.remaining_capacity(), 16);
let vectored_write = buf.begin_vectored_write(None);
assert_panic!(
unsafe {
vectored_write.commit(17);
}
);
}
#[test]
fn vectored_write_abort() {
let mut buf = BytesBuf::new();
assert_eq!(buf.capacity(), 0);
assert_eq!(buf.remaining_capacity(), 0);
let memory = FixedBlockMemory::new(nz!(8));
buf.reserve(U64_SIZE, &memory);
assert_eq!(buf.capacity(), 8);
assert_eq!(buf.remaining_capacity(), 8);
let mut vectored_write = buf.begin_vectored_write(None);
let mut slices: Vec<_> = vectored_write.slices_mut().map(|(s, _)| s).collect();
assert_eq!(slices.len(), 1);
assert_eq!(slices[0].len(), 8);
write_copy_of_slice(slices[0], &0x3333_3333_3333_3333_u64.to_ne_bytes());
#[expect(clippy::drop_non_drop, reason = "Just being explicit for illustration")]
drop(vectored_write);
assert_eq!(buf.len(), 0);
assert_eq!(buf.remaining_capacity(), 8);
assert_eq!(buf.capacity(), 8);
}
#[test]
fn extend_lifetime_references_all_blocks() {
let block1 = unsafe { TestMemoryBlock::new(nz!(8), None) };
let block1 = pin!(block1);
let block2 = unsafe { TestMemoryBlock::new(nz!(8), None) };
let block2 = pin!(block2);
let guard = {
let block1 = unsafe { block1.as_ref().to_block() };
let block2 = unsafe { block2.as_ref().to_block() };
let mut buf = BytesBuf::from_blocks([block1, block2]);
buf.put_num_ne(1234_u64);
assert_eq!(buf.frozen_spans.len(), 1);
assert_eq!(buf.span_builders_reversed.len(), 1);
buf.extend_lifetime()
};
assert_eq!(block1.ref_count(), 1);
assert_eq!(block2.ref_count(), 1);
drop(guard);
assert_eq!(block1.ref_count(), 0);
assert_eq!(block2.ref_count(), 0);
}
#[test]
fn extend_lifetime_during_vectored_write_references_all_blocks() {
let block1 = unsafe { TestMemoryBlock::new(nz!(8), None) };
let block1 = pin!(block1);
let block2 = unsafe { TestMemoryBlock::new(nz!(8), None) };
let block2 = pin!(block2);
let guard = {
let block1 = unsafe { block1.as_ref().to_block() };
let block2 = unsafe { block2.as_ref().to_block() };
let mut buf = BytesBuf::from_blocks([block1, block2]);
buf.put_num_ne(1234_u64);
assert_eq!(buf.frozen_spans.len(), 1);
assert_eq!(buf.span_builders_reversed.len(), 1);
let vectored_write = buf.begin_vectored_write(None);
vectored_write.extend_lifetime()
};
assert_eq!(block1.ref_count(), 1);
assert_eq!(block2.ref_count(), 1);
drop(guard);
assert_eq!(block1.ref_count(), 0);
assert_eq!(block2.ref_count(), 0);
}
#[test]
fn from_view() {
let memory = GlobalPool::new();
let view1 = BytesView::copied_from_slice(b"bla bla bla", &memory);
let mut buf: BytesBuf = view1.clone().into();
let view2 = buf.consume_all();
assert_eq!(view1, view2);
}
#[test]
fn consume_manifest_correctly_calculated() {
let memory = FixedBlockMemory::new(nz!(10));
let mut buf = BytesBuf::new();
buf.reserve(100, &memory);
buf.put_num_ne(1111_u64);
buf.put_num_ne(1111_u64);
buf.put_num_ne(1111_u64);
buf.put_num_ne(1111_u64);
buf.ensure_frozen(32);
let consume8 = buf.prepare_consume(8);
assert_eq!(consume8.detach_complete_frozen_spans, 0);
assert_eq!(consume8.consume_partial_span_bytes, 8);
assert_eq!(consume8.required_spans_capacity(), 1);
let consume10 = buf.prepare_consume(10);
assert_eq!(consume10.detach_complete_frozen_spans, 1);
assert_eq!(consume10.consume_partial_span_bytes, 0);
assert_eq!(consume10.required_spans_capacity(), 1);
let consume11 = buf.prepare_consume(11);
assert_eq!(consume11.detach_complete_frozen_spans, 1);
assert_eq!(consume11.consume_partial_span_bytes, 1);
assert_eq!(consume11.required_spans_capacity(), 2);
let consume30 = buf.prepare_consume(30);
assert_eq!(consume30.detach_complete_frozen_spans, 3);
assert_eq!(consume30.consume_partial_span_bytes, 0);
assert_eq!(consume30.required_spans_capacity(), 3);
let consume31 = buf.prepare_consume(31);
assert_eq!(consume31.detach_complete_frozen_spans, 3);
assert_eq!(consume31.consume_partial_span_bytes, 1);
assert_eq!(consume31.required_spans_capacity(), 4);
let consume32 = buf.prepare_consume(32);
assert_eq!(consume32.detach_complete_frozen_spans, 4);
assert_eq!(consume32.consume_partial_span_bytes, 0);
assert_eq!(consume32.required_spans_capacity(), 4);
}
#[test]
fn size_change_detector() {
assert_eq!(size_of::<BytesBuf>(), 552);
}
#[test]
fn peek_empty_builder() {
let buf = BytesBuf::new();
let peeked = buf.peek();
assert!(peeked.is_empty());
assert_eq!(peeked.len(), 0);
}
#[test]
fn peek_with_frozen_spans_only() {
let memory = FixedBlockMemory::new(nz!(10));
let mut buf = BytesBuf::new();
buf.reserve(20, &memory);
buf.put_num_ne(0x1111_1111_1111_1111_u64);
buf.put_num_ne(0x2222_2222_2222_2222_u64);
assert_eq!(buf.len(), 16);
let mut peeked = buf.peek();
assert_eq!(peeked.len(), 16);
assert_eq!(peeked.get_num_ne::<u64>(), 0x1111_1111_1111_1111);
assert_eq!(peeked.get_num_ne::<u64>(), 0x2222_2222_2222_2222);
assert_eq!(buf.len(), 16);
}
#[test]
fn peek_with_partially_filled_span_builder() {
let memory = FixedBlockMemory::new(nz!(10));
let mut buf = BytesBuf::new();
buf.reserve(10, &memory);
buf.put_num_ne(0x3333_3333_3333_3333_u64);
buf.put_num_ne(0x4444_u16);
assert_eq!(buf.len(), 10);
let mut peeked = buf.peek();
assert_eq!(peeked.len(), 10);
assert_eq!(peeked.get_num_ne::<u64>(), 0x3333_3333_3333_3333);
assert_eq!(peeked.get_num_ne::<u16>(), 0x4444);
assert_eq!(buf.len(), 10);
}
#[test]
fn peek_preserves_capacity_of_partial_span_builder() {
let memory = FixedBlockMemory::new(nz!(20));
let mut buf = BytesBuf::new();
buf.reserve(20, &memory);
buf.put_num_ne(0x5555_5555_5555_5555_u64);
assert_eq!(buf.len(), 8);
assert_eq!(buf.remaining_capacity(), 12);
let mut peeked = buf.peek();
assert_eq!(peeked.len(), 8);
assert_eq!(peeked.get_num_ne::<u64>(), 0x5555_5555_5555_5555);
assert_eq!(buf.len(), 8);
assert_eq!(buf.remaining_capacity(), 12);
buf.put_num_ne(0x6666_6666_u32);
assert_eq!(buf.len(), 12);
assert_eq!(buf.remaining_capacity(), 8);
let mut peeked2 = buf.peek();
assert_eq!(peeked2.len(), 12);
assert_eq!(peeked2.get_num_ne::<u64>(), 0x5555_5555_5555_5555);
assert_eq!(peeked2.get_num_ne::<u32>(), 0x6666_6666);
}
#[test]
fn peek_with_mixed_frozen_and_unfrozen() {
let memory = FixedBlockMemory::new(nz!(10));
let mut buf = BytesBuf::new();
buf.reserve(30, &memory);
buf.put_num_ne(0x1111_1111_1111_1111_u64);
buf.put_num_ne(0x2222_u16);
buf.put_num_ne(0x3333_3333_3333_3333_u64);
buf.put_num_ne(0x4444_u16);
buf.put_num_ne(0x5555_5555_u32);
assert_eq!(buf.len(), 24);
assert_eq!(buf.remaining_capacity(), 6);
let mut peeked = buf.peek();
assert_eq!(peeked.len(), 24);
assert_eq!(peeked.get_num_ne::<u64>(), 0x1111_1111_1111_1111);
assert_eq!(peeked.get_num_ne::<u16>(), 0x2222);
assert_eq!(peeked.get_num_ne::<u64>(), 0x3333_3333_3333_3333);
assert_eq!(peeked.get_num_ne::<u16>(), 0x4444);
assert_eq!(peeked.get_num_ne::<u32>(), 0x5555_5555);
assert_eq!(buf.len(), 24);
assert_eq!(buf.remaining_capacity(), 6);
}
#[test]
fn peek_then_consume() {
let memory = FixedBlockMemory::new(nz!(20));
let mut buf = BytesBuf::new();
buf.reserve(20, &memory);
buf.put_num_ne(0x7777_7777_7777_7777_u64);
buf.put_num_ne(0x8888_8888_u32);
assert_eq!(buf.len(), 12);
let mut peeked = buf.peek();
assert_eq!(peeked.len(), 12);
assert_eq!(peeked.get_num_ne::<u64>(), 0x7777_7777_7777_7777);
assert_eq!(buf.len(), 12);
let mut consumed = buf.consume(8);
assert_eq!(consumed.get_num_ne::<u64>(), 0x7777_7777_7777_7777);
assert_eq!(buf.len(), 4);
let mut peeked2 = buf.peek();
assert_eq!(peeked2.len(), 4);
assert_eq!(peeked2.get_num_ne::<u32>(), 0x8888_8888);
}
#[test]
fn peek_multiple_times() {
let memory = FixedBlockMemory::new(nz!(20));
let mut buf = BytesBuf::new();
buf.reserve(20, &memory);
buf.put_num_ne(0xAAAA_AAAA_AAAA_AAAA_u64);
let mut peeked1 = buf.peek();
let mut peeked2 = buf.peek();
assert_eq!(peeked1.get_num_ne::<u64>(), 0xAAAA_AAAA_AAAA_AAAA);
assert_eq!(peeked2.get_num_ne::<u64>(), 0xAAAA_AAAA_AAAA_AAAA);
assert_eq!(buf.len(), 8);
}
#[test]
fn first_unfilled_slice_meta_no_capacity() {
let buf = BytesBuf::new();
assert!(buf.first_unfilled_slice_meta().is_none());
}
#[test]
fn first_unfilled_slice_meta_no_meta() {
let memory = FixedBlockMemory::new(nz!(64));
let buf = memory.reserve(64);
assert!(buf.first_unfilled_slice_meta().is_none());
}
#[test]
fn first_unfilled_slice_meta_with_meta() {
#[derive(Debug)]
struct CustomMeta;
impl BlockMeta for CustomMeta {}
let block = unsafe { TestMemoryBlock::new(nz!(100), Some(Box::new(CustomMeta))) };
let block = pin!(block);
let block = unsafe { block.as_ref().to_block() };
let buf = BytesBuf::from_blocks([block]);
let meta = buf.first_unfilled_slice_meta().expect("should have metadata");
assert!(meta.is::<CustomMeta>());
assert!(!meta.is::<u8>());
}
fn write_copy_of_slice(dst: &mut [MaybeUninit<u8>], src: &[u8]) {
assert!(dst.len() >= src.len());
unsafe {
src.as_ptr().copy_to_nonoverlapping(dst.as_mut_ptr().cast(), src.len());
}
}
#[test]
fn split_off_remaining_basic() {
let memory = FixedBlockMemory::new(nz!(100));
let mut buf = memory.reserve(100);
buf.put_num_ne(1234_u64);
let remaining_before_split = buf.remaining_capacity();
let split = buf.split_off_remaining(20);
assert_eq!(buf.len(), U64_SIZE);
assert_eq!(buf.remaining_capacity(), remaining_before_split - 20);
assert!(split.remaining_capacity() >= 20);
assert!(split.is_empty());
}
#[test]
fn split_off_remaining_write_to_split() {
let memory = FixedBlockMemory::new(nz!(100));
let mut buf = memory.reserve(100);
buf.put_num_ne(1111_u64);
let mut split = buf.split_off_remaining(40);
split.put_num_ne(2222_u64);
split.put_num_ne(3333_u64);
assert_eq!(split.len(), TWO_U64_SIZE);
let mut split_view = split.consume_all();
assert_eq!(split_view.get_num_ne::<u64>(), 2222);
assert_eq!(split_view.get_num_ne::<u64>(), 3333);
let mut original_view = buf.consume_all();
assert_eq!(original_view.get_num_ne::<u64>(), 1111);
}
#[test]
fn split_off_remaining_all_capacity() {
let memory = FixedBlockMemory::new(nz!(100));
let mut buf = memory.reserve(100);
let full_capacity = buf.remaining_capacity();
let split = buf.split_off_remaining(full_capacity);
assert_eq!(buf.remaining_capacity(), 0);
assert_eq!(split.remaining_capacity(), full_capacity);
}
#[test]
fn split_off_remaining_zero() {
let memory = FixedBlockMemory::new(nz!(100));
let mut buf = memory.reserve(100);
let original_capacity = buf.remaining_capacity();
let split = buf.split_off_remaining(0);
assert_eq!(buf.remaining_capacity(), original_capacity);
assert!(split.is_empty());
assert_eq!(split.remaining_capacity(), 0);
}
#[test]
fn split_off_remaining_multi_block() {
let memory = FixedBlockMemory::new(nz!(10));
let mut buf = memory.reserve(30);
buf.put_num_ne(1234_u64);
let original_remaining = buf.remaining_capacity();
let mut split = buf.split_off_remaining(15);
assert_eq!(buf.remaining_capacity(), original_remaining - 15);
assert_eq!(buf.len(), U64_SIZE);
split.put_byte_repeated(0xAA, 15);
assert_eq!(split.len(), 15);
let mut original_view = buf.consume(U64_SIZE);
assert_eq!(original_view.get_num_ne::<u64>(), 1234);
}
#[test]
fn split_off_remaining_empty_buffer_no_capacity() {
let mut buf = BytesBuf::new();
assert_eq!(buf.remaining_capacity(), 0);
let split = buf.split_off_remaining(0);
assert_eq!(buf.len(), 0);
assert_eq!(buf.remaining_capacity(), 0);
assert_eq!(split.len(), 0);
assert_eq!(split.remaining_capacity(), 0);
}
#[test]
fn split_off_remaining_panics_on_overflow() {
let memory = FixedBlockMemory::new(nz!(100));
let mut buf = memory.reserve(100);
let capacity = buf.remaining_capacity();
assert_panic!(buf.split_off_remaining(capacity + 1));
}
#[test]
fn split_off_remaining_checked_returns_none() {
let memory = FixedBlockMemory::new(nz!(100));
let mut buf = memory.reserve(100);
let capacity = buf.remaining_capacity();
assert!(buf.split_off_remaining_checked(capacity + 1).is_none());
assert_eq!(buf.remaining_capacity(), capacity);
}
fn _can_use_in_dyn_traits(mem: &dyn Memory) {
let buf = mem.reserve(123);
let _ = buf.into_writer(mem);
}
}