use alloc::boxed::Box;
use alloc::vec;
use alloc::vec::Vec;
use bytes::Buf;
use core::cmp::{max, min};
use core::iter;
use core::marker::PhantomData;
use core::mem::{self, transmute, MaybeUninit};
use core::ptr;
const ENABLE_SELF_COPY_OPTIMIZATION: bool = cfg!(any(
all(
feature = "auto-self-copy-optimization",
not(feature = "prefer-no-self-copy-optimization"),
target_arch = "x86_64"
),
feature = "self-copy-optimization"
));
const MAX_SELF_COPY: usize = 9;
const MIN_CHUNK_SIZE: usize = 2 * mem::size_of::<&[u8]>();
pub trait ReverseBuf: Buf {
fn prepend<B: Buf>(&mut self, data: B);
fn remaining_writable(&self) -> usize;
#[inline]
fn prepend_slice(&mut self, data: &[u8]) {
self.prepend(data)
}
#[inline]
fn prepend_array<const N: usize>(&mut self, data: [u8; N]) {
self.prepend_slice(&data)
}
#[inline]
fn prepend_u8(&mut self, n: u8) {
self.prepend_array([n]);
}
#[inline]
fn prepend_i8(&mut self, n: i8) {
self.prepend_u8(n as u8);
}
#[inline]
fn prepend_u16_le(&mut self, n: u16) {
self.prepend_array(n.to_le_bytes())
}
#[inline]
fn prepend_i16_le(&mut self, n: i16) {
self.prepend_array(n.to_le_bytes())
}
#[inline]
fn prepend_u32_le(&mut self, n: u32) {
self.prepend_array(n.to_le_bytes())
}
#[inline]
fn prepend_i32_le(&mut self, n: i32) {
self.prepend_array(n.to_le_bytes())
}
#[inline]
fn prepend_u64_le(&mut self, n: u64) {
self.prepend_array(n.to_le_bytes())
}
#[inline]
fn prepend_i64_le(&mut self, n: i64) {
self.prepend_array(n.to_le_bytes())
}
#[inline]
fn prepend_f32_le(&mut self, n: f32) {
self.prepend_array(n.to_le_bytes())
}
#[inline]
fn prepend_f64_le(&mut self, n: f64) {
self.prepend_array(n.to_le_bytes())
}
#[inline]
fn prepend_u16_be(&mut self, n: u16) {
self.prepend_array(n.to_be_bytes())
}
#[inline]
fn prepend_i16_be(&mut self, n: i16) {
self.prepend_array(n.to_be_bytes())
}
#[inline]
fn prepend_u32_be(&mut self, n: u32) {
self.prepend_array(n.to_be_bytes())
}
#[inline]
fn prepend_i32_be(&mut self, n: i32) {
self.prepend_array(n.to_be_bytes())
}
#[inline]
fn prepend_u64_be(&mut self, n: u64) {
self.prepend_array(n.to_be_bytes())
}
#[inline]
fn prepend_i64_be(&mut self, n: i64) {
self.prepend_array(n.to_be_bytes())
}
#[inline]
fn prepend_f32_be(&mut self, n: f32) {
self.prepend_array(n.to_be_bytes())
}
#[inline]
fn prepend_f64_be(&mut self, n: f64) {
self.prepend_array(n.to_be_bytes())
}
}
#[derive(Clone)]
pub struct ReverseBuffer {
chunks: Vec<Box<[MaybeUninit<u8>]>>,
front: usize,
capacity: usize,
planned_allocation: usize,
planned_exact: bool,
keep_back: bool,
_phantom_data: PhantomData<u8>,
}
impl ReverseBuffer {
pub fn new() -> Self {
Self {
chunks: Vec::new(),
front: 0,
capacity: 0,
planned_allocation: 0,
planned_exact: false,
keep_back: false,
_phantom_data: PhantomData,
}
}
pub fn with_capacity(capacity: usize) -> Self {
if capacity == 0 {
return Self::new();
}
Self {
chunks: vec![Self::allocate_chunk(capacity)],
front: capacity,
capacity,
planned_allocation: 0,
planned_exact: false,
keep_back: true,
_phantom_data: PhantomData,
}
}
#[inline]
pub fn len(&self) -> usize {
debug_assert!(!(self.chunks.is_empty() && self.front > 0));
debug_assert_eq!(self.chunks.is_empty(), self.capacity == 0);
self.capacity - self.front
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn clear(&mut self) {
if self.keep_back {
self.chunks.truncate(1);
self.front = self.chunks[0].len();
self.capacity = self.front;
(self.planned_allocation, self.planned_exact) = (0, false);
} else {
*self = Self::new()
}
}
#[inline]
pub fn capacity(&self) -> usize {
self.capacity
}
pub fn contiguous(&self) -> Option<&[u8]> {
match self.chunks.len() {
0..=1 => Some(self.chunk()),
_ => None,
}
}
pub fn into_vec(mut self) -> Vec<u8> {
if self.chunks.len() == 1 && self.front == 0 {
let whole_buffer: Box<[u8]> = unsafe { transmute(self.chunks.pop().unwrap()) };
return whole_buffer.to_vec();
}
let mut res = Vec::with_capacity(self.len());
bytes::BufMut::put(&mut res, self);
res
}
#[inline(always)]
pub fn plan_reservation(&mut self, additional: usize) {
let Some(more_needed) = additional.checked_sub(self.front) else {
return; };
if self.planned_allocation > more_needed {
return; }
(self.planned_allocation, self.planned_exact) = (more_needed, false);
}
#[inline]
pub fn plan_reservation_exact(&mut self, additional: usize) {
let Some(more_needed) = additional.checked_sub(self.front) else {
return;
};
(self.planned_allocation, self.planned_exact) = (self.capacity + more_needed, true);
}
#[inline]
fn front_chunk_mut(&mut self) -> &mut [MaybeUninit<u8>] {
self.chunks.last_mut().map(Box::as_mut).unwrap_or(&mut [])
}
#[inline]
fn allocate_chunk(new_chunk_size: usize) -> Box<[MaybeUninit<u8>]> {
debug_assert!(new_chunk_size > 0);
unsafe {
let new_allocation =
alloc::alloc::alloc(alloc::alloc::Layout::array::<u8>(new_chunk_size).unwrap())
as *mut MaybeUninit<u8>;
Box::from_raw(ptr::slice_from_raw_parts_mut(
new_allocation,
new_chunk_size,
))
}
}
#[inline]
fn new_allocation_size(&self) -> usize {
if self.planned_exact {
debug_assert_ne!(self.planned_allocation, 0);
self.planned_allocation
} else {
max(max(MIN_CHUNK_SIZE, self.capacity), self.planned_allocation)
}
}
#[inline(never)]
#[cold]
fn grow_slow(&mut self) {
self.grow();
}
#[inline]
fn grow(&mut self) {
let new_chunk_size = self.new_allocation_size();
self.chunks.push(Self::allocate_chunk(new_chunk_size)); self.front += new_chunk_size; self.capacity += new_chunk_size;
(self.planned_allocation, self.planned_exact) = (0, false);
}
#[inline(never)]
#[cold]
fn grow_and_copy_buf<B: Buf>(&mut self, mut data: B, prepending_len: usize) {
self.plan_reservation(prepending_len);
let new_chunk_size = self.new_allocation_size();
let mut new_chunk = Self::allocate_chunk(new_chunk_size);
let old_front = self.front;
let new_front = new_chunk_size + self.front - prepending_len;
debug_assert!(new_front < new_chunk.len());
copy_buf(&mut data, unsafe {
new_chunk.get_unchecked_mut(new_front..)
});
debug_assert!(self
.chunks
.last()
.map_or(true, |old_front_chunk| old_front < old_front_chunk.len()));
copy_buf(&mut data, unsafe {
self.front_chunk_mut().get_unchecked_mut(..old_front)
});
debug_assert_eq!(data.remaining(), 0);
self.chunks.push(new_chunk); self.front = new_front; self.capacity += new_chunk_size;
(self.planned_allocation, self.planned_exact) = (0, false);
}
#[inline(never)]
#[cold]
fn grow_and_copy_slice(&mut self, data: &[u8]) {
self.plan_reservation(data.len());
let new_front;
if self.front == 0 {
self.grow();
new_front = self.front - data.len();
unsafe {
ptr::copy_nonoverlapping(
data.as_ptr(),
&mut self.front_chunk_mut()[new_front..] as *mut _ as *mut u8,
data.len(),
);
}
} else {
let (data_front, data_back) = data.split_at(data.len() - self.front);
unsafe {
ptr::copy_nonoverlapping(
data_back.as_ptr(),
self.front_chunk_mut() as *mut _ as *mut u8,
self.front,
);
}
self.grow();
new_front = self.front - data.len();
debug_assert_eq!(new_front + data_front.len(), self.front_chunk_mut().len());
unsafe {
ptr::copy_nonoverlapping(
data_front.as_ptr(),
&mut self.front_chunk_mut()[new_front..] as *mut _ as *mut u8,
data_front.len(),
);
}
}
self.front = new_front;
}
pub fn buf_reader(&self) -> ReverseBufferReader<'_> {
ReverseBufferReader {
chunks: self.chunks.as_slice(),
front: self.front,
capacity: self.capacity,
}
}
pub fn slices(&self) -> impl Iterator<Item = &[u8]> {
unsafe { to_vectorable_slices(&self.chunks, self.front) }
}
}
#[inline(always)]
fn copy_buf<B: Buf>(data: &mut B, mut dest_chunk: &mut [MaybeUninit<u8>]) {
debug_assert!(data.remaining() >= dest_chunk.len());
while !dest_chunk.is_empty() {
let src = data.chunk();
let copy_size = min(src.len(), dest_chunk.len());
unsafe {
ptr::copy_nonoverlapping(src.as_ptr(), dest_chunk.as_mut_ptr() as *mut u8, copy_size);
}
dest_chunk = &mut dest_chunk[copy_size..];
data.advance(copy_size);
}
}
#[inline(always)]
unsafe fn to_vectorable_slices(
chunks: &[Box<[MaybeUninit<u8>]>],
front: usize,
) -> impl Iterator<Item = &[u8]> {
chunks
.split_last()
.into_iter()
.flat_map(move |(front_chunk, rest)| {
iter::once(&front_chunk[front..]).chain(rest.iter().rev().map(Box::as_ref))
})
.map(|m| unsafe { transmute::<&[MaybeUninit<u8>], &[u8]>(m) })
}
impl ReverseBuf for ReverseBuffer {
#[inline(always)]
fn prepend<B: Buf>(&mut self, mut data: B) {
let prepending_len = data.remaining();
if prepending_len == 0 {
return;
}
if prepending_len <= self.front {
let new_front = self.front - prepending_len;
if ENABLE_SELF_COPY_OPTIMIZATION && prepending_len <= MAX_SELF_COPY {
let mut data_to_slice = [0; MAX_SELF_COPY];
data.copy_to_slice(&mut data_to_slice[..prepending_len]);
let old_front = self.front;
for (from, to) in data_to_slice[..prepending_len]
.iter()
.rev()
.zip(self.front_chunk_mut()[..old_front].iter_mut().rev())
{
*to = MaybeUninit::new(*from);
}
} else {
let dest_range = new_front..self.front;
copy_buf(&mut data, &mut self.front_chunk_mut()[dest_range]);
debug_assert_eq!(data.remaining(), 0);
}
self.front = new_front; } else {
self.grow_and_copy_buf(data, prepending_len);
}
}
fn remaining_writable(&self) -> usize {
usize::MAX - self.capacity
}
#[inline(always)]
fn prepend_slice(&mut self, data: &[u8]) {
if let Some(new_front) = self.front.checked_sub(data.len()) {
if ENABLE_SELF_COPY_OPTIMIZATION && data.len() <= MAX_SELF_COPY {
let old_front = self.front;
for (from, to) in data
.iter()
.rev()
.zip(self.front_chunk_mut()[..old_front].iter_mut().rev())
{
*to = MaybeUninit::new(*from);
}
} else {
unsafe {
ptr::copy_nonoverlapping(
data.as_ptr(),
self.front_chunk_mut()[new_front..].as_mut_ptr() as *mut u8,
data.len(),
);
}
}
self.front = new_front;
} else {
self.grow_and_copy_slice(data);
}
}
#[inline(always)]
fn prepend_array<const N: usize>(&mut self, data: [u8; N]) {
if let Some(new_front) = self.front.checked_sub(N) {
unsafe {
ptr::copy_nonoverlapping(
data.as_ptr(),
self.front_chunk_mut()[new_front..].as_mut_ptr() as *mut u8,
data.len(),
);
}
self.front = new_front;
} else {
self.grow_and_copy_slice(&data);
}
}
#[inline(always)]
fn prepend_u8(&mut self, byte: u8) {
if self.front == 0 {
self.grow_slow();
}
let new_front = self.front - 1;
self.front_chunk_mut()[new_front].write(byte);
self.front = new_front;
}
}
impl Default for ReverseBuffer {
fn default() -> Self {
Self::new()
}
}
impl Buf for ReverseBuffer {
#[inline]
fn remaining(&self) -> usize {
self.len()
}
#[inline(always)]
fn chunk(&self) -> &[u8] {
let Some(front_chunk) = self.chunks.last() else {
return &[];
};
debug_assert!(self.front < front_chunk.len());
unsafe { transmute(front_chunk.get_unchecked(self.front..)) }
}
#[inline]
fn advance(&mut self, cnt: usize) {
if cnt == 0 {
return;
}
if cnt > self.len() {
panic!("advanced past end");
};
self.front += cnt;
while let Some(front_chunk) = self.chunks.last() {
let front_chunk_size = front_chunk.len();
if self.front < front_chunk_size {
break;
}
if self.keep_back && self.chunks.len() == 1 {
debug_assert!(self.capacity == self.front);
break;
}
drop(self.chunks.pop());
self.capacity -= front_chunk_size;
self.front -= front_chunk_size;
(self.planned_allocation, self.planned_exact) = (0, false);
}
}
}
impl From<ReverseBuffer> for Vec<u8> {
fn from(value: ReverseBuffer) -> Self {
value.into_vec()
}
}
impl From<Box<[u8]>> for ReverseBuffer {
fn from(value: Box<[u8]>) -> Self {
Self {
capacity: value.len(),
chunks: vec![unsafe { transmute::<Box<[u8]>, Box<[MaybeUninit<u8>]>>(value) }],
front: 0, ..Self::new()
}
}
}
impl From<Vec<u8>> for ReverseBuffer {
fn from(value: Vec<u8>) -> Self {
Self::from(value.into_boxed_slice())
}
}
pub struct ReverseBufferReader<'a> {
chunks: &'a [Box<[MaybeUninit<u8>]>],
front: usize,
capacity: usize,
}
impl ReverseBufferReader<'_> {
pub fn contiguous(&self) -> Option<&[u8]> {
match self.chunks.len() {
0..=1 => Some(self.chunk()),
_ => None,
}
}
pub fn slices(&self) -> impl Iterator<Item = &[u8]> {
unsafe { to_vectorable_slices(self.chunks, self.front) }
}
}
impl Buf for ReverseBufferReader<'_> {
#[inline]
fn remaining(&self) -> usize {
self.capacity - self.front
}
#[inline(always)]
fn chunk(&self) -> &[u8] {
let Some(front_chunk) = self.chunks.last() else {
return &[];
};
debug_assert!(self.front < front_chunk.len());
unsafe { transmute(front_chunk.get_unchecked(self.front..)) }
}
#[inline]
fn advance(&mut self, cnt: usize) {
if cnt == 0 {
return;
}
if cnt > self.remaining() {
panic!("advanced past end");
};
self.front += cnt;
while let Some(front_chunk) = self.chunks.last() {
if self.front < front_chunk.len() {
break;
}
self.chunks = &self.chunks[..self.chunks.len() - 1];
self.front -= front_chunk.len();
self.capacity -= front_chunk.len();
}
}
}
#[cfg(test)]
mod test {
use super::{ReverseBuf, ReverseBuffer};
use alloc::vec::Vec;
use bytes::{Buf, BufMut};
fn compare_buf(buf: impl Buf, expected: &[u8]) {
let mut read = Vec::new();
read.put(buf);
assert_eq!(read, expected);
}
#[allow(clippy::len_zero)]
fn check_read(buf: ReverseBuffer, expected: &[u8]) {
assert_eq!(buf.len(), expected.len());
assert_eq!(buf.is_empty(), buf.len() == 0);
let mut vectored = Vec::new();
for slice in buf.slices() {
vectored.extend_from_slice(slice);
}
assert_eq!(vectored, expected);
let mut vectored = Vec::new();
for slice in buf.buf_reader().slices() {
vectored.extend_from_slice(slice);
}
assert_eq!(vectored, expected);
compare_buf(buf.buf_reader(), expected);
compare_buf(buf, expected);
}
#[test]
fn fresh() {
check_read(ReverseBuffer::new(), b"");
}
#[test]
fn fresh_with_plan_still_empty() {
let mut buf = ReverseBuffer::new();
buf.plan_reservation(100);
assert!(buf.is_empty());
assert_eq!(buf.len(), 0);
check_read(buf, b"");
}
#[test]
fn build_and_read() {
let mut buf = ReverseBuffer::new();
buf.prepend_slice(b"!");
buf.prepend_slice(b"world");
buf.prepend_slice(b"hello ");
check_read(buf, b"hello world!");
}
#[test]
fn build_bigger_and_read() {
let mut buf = ReverseBuffer::new();
buf.prepend_slice(b"!");
buf.prepend_slice(b"world");
buf.prepend_slice(b"hello ");
let mut buf2 = ReverseBuffer::new();
buf2.prepend(buf.buf_reader()); buf.prepend(buf2.buf_reader()); buf2.prepend(buf.buf_reader()); buf.prepend(buf2.buf_reader()); buf2.prepend(buf.buf_reader()); buf.prepend(buf2.buf_reader()); assert_eq!(buf.chunks.len(), 3);
check_read(
buf.clone(),
b"hello world!hello world!hello world!hello world!hello world!hello world!\
hello world!hello world!hello world!hello world!hello world!hello world!hello world!",
);
check_read(
buf,
b"hello world!hello world!hello world!hello world!hello world!hello world!\
hello world!hello world!hello world!hello world!hello world!hello world!hello world!",
);
}
#[test]
fn build_with_planned_reservation() {
let mut buf = ReverseBuffer::new();
buf.prepend_slice(b"!");
buf.prepend_slice(b"world");
buf.prepend_slice(b"hello ");
buf.plan_reservation(b"hello world!".len() * 12);
let mut buf2 = ReverseBuffer::new();
buf2.prepend(buf.buf_reader()); buf.prepend(buf2.buf_reader()); buf2.prepend(buf.buf_reader()); buf.prepend(buf2.buf_reader()); buf2.prepend(buf.buf_reader()); buf.prepend(buf2.buf_reader()); assert_eq!(buf.chunks.len(), 2);
assert_eq!(buf.capacity() - buf.len(), 0);
check_read(
buf,
b"hello world!hello world!hello world!hello world!hello world!hello world!\
hello world!hello world!hello world!hello world!hello world!hello world!hello world!",
);
}
#[test]
fn build_with_initial_planned_reservation() {
let mut buf = ReverseBuffer::new();
buf.plan_reservation(b"hello world!".len() * 13);
buf.prepend_slice(b"!");
buf.prepend_slice(b"world");
buf.prepend_slice(b"hello ");
let mut buf2 = ReverseBuffer::new();
buf2.prepend(buf.buf_reader()); buf.prepend(buf2.buf_reader()); buf2.prepend(buf.buf_reader()); buf.prepend(buf2.buf_reader()); buf2.prepend(buf.buf_reader()); buf.prepend(buf2.buf_reader()); assert_eq!(buf.chunks.len(), 1); assert_eq!(buf.capacity() - buf.len(), 0); check_read(
buf,
b"hello world!hello world!hello world!hello world!hello world!hello world!\
hello world!hello world!hello world!hello world!hello world!hello world!hello world!",
);
}
#[test]
fn build_with_exact_planned_reservation() {
let mut buf = ReverseBuffer::new();
buf.plan_reservation_exact(b"hello world!".len());
buf.prepend_slice(b"!");
buf.prepend_slice(b"world");
buf.prepend_slice(b"hello ");
assert_eq!(buf.chunks.len(), 1); assert_eq!(buf.capacity() - buf.len(), 0); check_read(buf, b"hello world!");
}
#[test]
fn build_with_capacity() {
let mut buf = ReverseBuffer::with_capacity(b"hello world!".len());
assert_eq!(buf.capacity(), b"hello world!".len());
assert!(buf.is_empty());
assert_eq!(buf.chunks.len(), 1);
buf.prepend_slice(b"!");
buf.prepend_slice(b"world");
buf.prepend_slice(b"hello ");
assert_eq!(buf.capacity(), b"hello world!".len());
assert_eq!(buf.len(), buf.capacity());
assert_eq!(buf.chunks.len(), 1);
buf.prepend_slice(b"12345");
assert_eq!(buf.chunks.len(), 2);
assert!(buf.capacity() > buf.len());
check_read(buf, b"12345hello world!");
}
#[test]
fn single_prepend_allocates_once() {
let mut buf = ReverseBuffer::with_capacity(4);
buf.prepend_slice(b"aa");
buf.prepend_slice(&[0; 100]);
assert_eq!(buf.len(), 102);
assert_eq!(buf.capacity(), buf.len());
assert_eq!(buf.chunks.len(), 2);
buf.clear();
assert_eq!(buf.chunks.len(), 1);
assert_eq!(buf.capacity(), 4);
assert!(buf.is_empty());
}
#[test]
fn fully_advancing_keep_back_reversebuffer_does_not_drop_back() {
let mut buf = ReverseBuffer::with_capacity(4);
buf.prepend_slice(b"asdfa");
assert_eq!(buf.chunks.len(), 2);
assert!(buf.capacity() > 5);
assert_eq!(buf.remaining(), 5);
compare_buf(&mut buf, b"asdfa");
assert_eq!(buf.capacity(), 4);
buf = ReverseBuffer::new();
buf.plan_reservation_exact(4);
buf.prepend_slice(b"as");
buf.prepend_slice(b"dfa");
assert_eq!(buf.chunks.len(), 2);
assert!(buf.capacity() > 5);
assert_eq!(buf.remaining(), 5);
compare_buf(&mut buf, b"dfaas");
assert_eq!(buf.capacity(), 0); }
#[test]
fn prepending_small_bufs() {
let mut buf = ReverseBuffer::new();
buf.prepend("hello".as_bytes());
buf.prepend("why ".as_bytes());
check_read(buf, b"why hello");
}
}