use core::{convert::Infallible, fmt};
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
pub trait Buffer {
type Error;
type Reborrow<'a>: Buffer<Error = Self::Error>
where
Self: 'a;
fn reborrow(&mut self) -> Self::Reborrow<'_>;
fn write_stack(&mut self, heap: usize, stack: usize, bytes: &[u8]) -> Result<(), Self::Error>;
fn pad_stack(&mut self, heap: usize, stack: usize, len: usize) -> Result<(), Self::Error>;
fn move_to_heap(&mut self, heap: usize, stack: usize, len: usize);
fn reserve_heap(
&mut self,
heap: usize,
stack: usize,
len: usize,
) -> Result<&mut [u8], Self::Error>;
}
#[derive(Clone, Copy, Default)]
pub struct DryBuffer;
impl Buffer for DryBuffer {
type Error = Infallible;
type Reborrow<'a> = Self;
#[inline(always)]
fn reborrow(&mut self) -> DryBuffer {
*self
}
#[inline(always)]
fn write_stack(
&mut self,
_heap: usize,
_stack: usize,
_bytes: &[u8],
) -> Result<(), Infallible> {
Ok(())
}
#[inline(always)]
fn pad_stack(&mut self, _heap: usize, _stack: usize, _len: usize) -> Result<(), Infallible> {
Ok(())
}
#[inline(always)]
fn move_to_heap(&mut self, _heap: usize, _stack: usize, _len: usize) {}
#[inline(always)]
fn reserve_heap(
&mut self,
_heap: usize,
_stack: usize,
_len: usize,
) -> Result<&mut [u8], Infallible> {
Ok(&mut [])
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct BufferExhausted;
impl fmt::Display for BufferExhausted {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "buffer exhausted")
}
}
#[repr(transparent)]
pub struct CheckedFixedBuffer<'a> {
buf: &'a mut [u8],
}
impl<'a> CheckedFixedBuffer<'a> {
#[inline(always)]
pub fn new(buf: &'a mut [u8]) -> Self {
CheckedFixedBuffer { buf }
}
}
impl<'a> Buffer for CheckedFixedBuffer<'a> {
type Error = BufferExhausted;
type Reborrow<'b> = CheckedFixedBuffer<'b> where 'a: 'b;
#[inline(always)]
fn reborrow(&mut self) -> Self::Reborrow<'_> {
CheckedFixedBuffer { buf: self.buf }
}
#[inline(always)]
fn write_stack(
&mut self,
heap: usize,
stack: usize,
bytes: &[u8],
) -> Result<(), BufferExhausted> {
debug_assert!(heap + stack <= self.buf.len());
if self.buf.len() - heap - stack < bytes.len() {
return Err(BufferExhausted);
}
let at = self.buf.len() - stack - bytes.len();
self.buf[at..][..bytes.len()].copy_from_slice(bytes);
Ok(())
}
#[inline(always)]
fn pad_stack(&mut self, heap: usize, stack: usize, len: usize) -> Result<(), BufferExhausted> {
debug_assert!(heap + stack <= self.buf.len());
if self.buf.len() - heap - stack < len {
return Err(BufferExhausted);
}
#[cfg(test)]
{
let at = self.buf.len() - stack - len;
self.buf[at..][..len].fill(0);
}
Ok(())
}
#[inline(always)]
fn move_to_heap(&mut self, heap: usize, stack: usize, len: usize) {
debug_assert!(heap + stack <= self.buf.len());
let start = self.buf.len() - stack;
let end = start + len;
self.buf.copy_within(start..end, heap);
}
#[inline(always)]
fn reserve_heap(
&mut self,
heap: usize,
stack: usize,
len: usize,
) -> Result<&mut [u8], BufferExhausted> {
debug_assert!(heap + stack <= self.buf.len());
if self.buf.len() - heap - stack < len {
return Err(BufferExhausted);
}
let end = heap + len;
Ok(&mut self.buf[..end])
}
}
impl<'a> Buffer for &'a mut [u8] {
type Error = Infallible;
type Reborrow<'b> = &'b mut [u8] where 'a: 'b;
#[inline(always)]
fn reborrow(&mut self) -> &'_ mut [u8] {
self
}
#[inline(always)]
fn write_stack(&mut self, heap: usize, stack: usize, bytes: &[u8]) -> Result<(), Infallible> {
debug_assert!(heap + stack <= self.len());
let at = self.len() - stack - bytes.len();
self[at..][..bytes.len()].copy_from_slice(bytes);
Ok(())
}
#[inline(always)]
fn pad_stack(&mut self, heap: usize, stack: usize, len: usize) -> Result<(), Infallible> {
debug_assert!(heap + stack <= self.len());
assert!(self.len() - heap - stack >= len);
#[cfg(test)]
{
let at = self.len() - stack - len;
self[at..][..len].fill(0);
}
Ok(())
}
#[inline(always)]
fn move_to_heap(&mut self, heap: usize, stack: usize, len: usize) {
debug_assert!(stack >= len);
debug_assert!(heap + stack <= self.len());
let start = self.len() - stack;
let end = start + len;
self.copy_within(start..end, heap);
}
#[inline(always)]
fn reserve_heap(
&mut self,
heap: usize,
stack: usize,
len: usize,
) -> Result<&mut [u8], Infallible> {
debug_assert!(heap + stack <= self.len());
let end = heap + len;
Ok(&mut self[..end])
}
}
pub struct MaybeFixedBuffer<'a> {
buf: &'a mut [u8],
exhausted: &'a mut bool,
}
impl<'a> MaybeFixedBuffer<'a> {
pub fn new(buf: &'a mut [u8], exhausted: &'a mut bool) -> Self {
MaybeFixedBuffer { buf, exhausted }
}
}
impl<'a> Buffer for MaybeFixedBuffer<'a> {
type Error = Infallible;
type Reborrow<'b> = MaybeFixedBuffer<'b> where 'a: 'b;
#[inline(always)]
fn reborrow(&mut self) -> Self::Reborrow<'_> {
MaybeFixedBuffer {
buf: self.buf,
exhausted: self.exhausted,
}
}
#[inline(always)]
fn write_stack(&mut self, heap: usize, stack: usize, bytes: &[u8]) -> Result<(), Infallible> {
if !*self.exhausted {
debug_assert!(heap + stack <= self.buf.len());
if self.buf.len() - heap - stack < bytes.len() {
*self.exhausted = true;
}
}
if !*self.exhausted {
let at = self.buf.len() - stack - bytes.len();
self.buf[at..][..bytes.len()].copy_from_slice(bytes);
}
Ok(())
}
#[inline(always)]
fn pad_stack(&mut self, heap: usize, stack: usize, len: usize) -> Result<(), Infallible> {
if !*self.exhausted {
debug_assert!(heap + stack <= self.buf.len());
if self.buf.len() - heap - stack < len {
*self.exhausted = true;
}
}
Ok(())
}
#[inline(always)]
fn move_to_heap(&mut self, heap: usize, stack: usize, len: usize) {
debug_assert!(stack >= len);
if !*self.exhausted {
debug_assert!(heap + stack <= self.buf.len());
let start = self.buf.len() - stack;
let end = start + len;
self.buf.copy_within(start..end, heap);
}
}
#[inline(always)]
fn reserve_heap(
&mut self,
heap: usize,
stack: usize,
len: usize,
) -> Result<&mut [u8], Infallible> {
if !*self.exhausted {
debug_assert!(heap + stack <= self.buf.len());
if self.buf.len() - heap - stack < len {
*self.exhausted = true;
}
}
if *self.exhausted {
Ok(&mut [])
} else {
let end = heap + len;
Ok(&mut self.buf[..end])
}
}
}
#[cfg(feature = "alloc")]
pub struct VecBuffer<'a> {
buf: &'a mut Vec<u8>,
}
#[cfg(feature = "alloc")]
impl<'a> VecBuffer<'a> {
pub fn new(buf: &'a mut Vec<u8>) -> Self {
VecBuffer { buf }
}
}
#[cfg(feature = "alloc")]
impl VecBuffer<'_> {
#[cold]
fn do_reserve(&mut self, heap: usize, stack: usize, additional: usize) {
let old_len = self.buf.len();
self.buf.resize(heap + stack + additional, 0);
let new_len = self.buf.len();
self.buf
.copy_within(old_len - stack..old_len, new_len - stack);
}
fn reserve(&mut self, heap: usize, stack: usize, additional: usize) {
let free = self.buf.len() - heap - stack;
if free < additional {
self.do_reserve(heap, stack, additional);
}
}
}
#[cfg(feature = "alloc")]
impl<'a> Buffer for VecBuffer<'a> {
type Error = Infallible;
type Reborrow<'b> = VecBuffer<'b> where 'a: 'b;
#[inline(always)]
fn reborrow(&mut self) -> Self::Reborrow<'_> {
VecBuffer { buf: self.buf }
}
#[inline(always)]
fn write_stack(&mut self, heap: usize, stack: usize, bytes: &[u8]) -> Result<(), Infallible> {
debug_assert!(heap + stack <= self.buf.len());
self.reserve(heap, stack, bytes.len());
let at = self.buf.len() - stack - bytes.len();
self.buf[at..][..bytes.len()].copy_from_slice(bytes);
Ok(())
}
#[inline(always)]
fn pad_stack(&mut self, heap: usize, stack: usize, len: usize) -> Result<(), Infallible> {
debug_assert!(heap + stack <= self.buf.len());
self.reserve(heap, stack, len);
#[cfg(test)]
{
let at = self.buf.len() - stack - len;
self.buf[at..][..len].fill(0);
}
Ok(())
}
#[inline(always)]
fn move_to_heap(&mut self, heap: usize, stack: usize, len: usize) {
debug_assert!(heap + stack <= self.buf.len());
debug_assert!(stack >= len);
let at = self.buf.len() - stack;
self.buf.copy_within(at..at + len, heap);
}
#[inline(always)]
fn reserve_heap(
&mut self,
heap: usize,
stack: usize,
len: usize,
) -> Result<&mut [u8], Infallible> {
debug_assert!(heap + stack <= self.buf.len());
self.reserve(heap, stack, len);
Ok(&mut self.buf[..heap + len])
}
}