use crate::buf::{limit, Chain, Limit, UninitSlice};
#[cfg(feature = "std")]
use crate::buf::{writer, Writer};
use core::{cmp, mem, ptr, usize};
use alloc::{boxed::Box, vec::Vec};
pub unsafe trait BufMut {
fn remaining_mut(&self) -> usize;
unsafe fn advance_mut(&mut self, cnt: usize);
fn has_remaining_mut(&self) -> bool {
self.remaining_mut() > 0
}
#[cfg_attr(docsrs, doc(alias = "bytes_mut"))]
fn chunk_mut(&mut self) -> &mut UninitSlice;
fn put<T: super::Buf>(&mut self, mut src: T)
where
Self: Sized,
{
assert!(self.remaining_mut() >= src.remaining());
while src.has_remaining() {
let l;
unsafe {
let s = src.chunk();
let d = self.chunk_mut();
l = cmp::min(s.len(), d.len());
ptr::copy_nonoverlapping(s.as_ptr(), d.as_mut_ptr() as *mut u8, l);
}
src.advance(l);
unsafe {
self.advance_mut(l);
}
}
}
fn put_slice(&mut self, src: &[u8]) {
let mut off = 0;
assert!(
self.remaining_mut() >= src.len(),
"buffer overflow; remaining = {}; src = {}",
self.remaining_mut(),
src.len()
);
while off < src.len() {
let cnt;
unsafe {
let dst = self.chunk_mut();
cnt = cmp::min(dst.len(), src.len() - off);
ptr::copy_nonoverlapping(src[off..].as_ptr(), dst.as_mut_ptr() as *mut u8, cnt);
off += cnt;
}
unsafe {
self.advance_mut(cnt);
}
}
}
fn put_bytes(&mut self, val: u8, cnt: usize) {
for _ in 0..cnt {
self.put_u8(val);
}
}
fn put_u8(&mut self, n: u8) {
let src = [n];
self.put_slice(&src);
}
fn put_i8(&mut self, n: i8) {
let src = [n as u8];
self.put_slice(&src)
}
fn put_u16(&mut self, n: u16) {
self.put_slice(&n.to_be_bytes())
}
fn put_u16_le(&mut self, n: u16) {
self.put_slice(&n.to_le_bytes())
}
fn put_u16_ne(&mut self, n: u16) {
self.put_slice(&n.to_ne_bytes())
}
fn put_i16(&mut self, n: i16) {
self.put_slice(&n.to_be_bytes())
}
fn put_i16_le(&mut self, n: i16) {
self.put_slice(&n.to_le_bytes())
}
fn put_i16_ne(&mut self, n: i16) {
self.put_slice(&n.to_ne_bytes())
}
fn put_u32(&mut self, n: u32) {
self.put_slice(&n.to_be_bytes())
}
fn put_u32_le(&mut self, n: u32) {
self.put_slice(&n.to_le_bytes())
}
fn put_u32_ne(&mut self, n: u32) {
self.put_slice(&n.to_ne_bytes())
}
fn put_i32(&mut self, n: i32) {
self.put_slice(&n.to_be_bytes())
}
fn put_i32_le(&mut self, n: i32) {
self.put_slice(&n.to_le_bytes())
}
fn put_i32_ne(&mut self, n: i32) {
self.put_slice(&n.to_ne_bytes())
}
fn put_u64(&mut self, n: u64) {
self.put_slice(&n.to_be_bytes())
}
fn put_u64_le(&mut self, n: u64) {
self.put_slice(&n.to_le_bytes())
}
fn put_u64_ne(&mut self, n: u64) {
self.put_slice(&n.to_ne_bytes())
}
fn put_i64(&mut self, n: i64) {
self.put_slice(&n.to_be_bytes())
}
fn put_i64_le(&mut self, n: i64) {
self.put_slice(&n.to_le_bytes())
}
fn put_i64_ne(&mut self, n: i64) {
self.put_slice(&n.to_ne_bytes())
}
fn put_u128(&mut self, n: u128) {
self.put_slice(&n.to_be_bytes())
}
fn put_u128_le(&mut self, n: u128) {
self.put_slice(&n.to_le_bytes())
}
fn put_u128_ne(&mut self, n: u128) {
self.put_slice(&n.to_ne_bytes())
}
fn put_i128(&mut self, n: i128) {
self.put_slice(&n.to_be_bytes())
}
fn put_i128_le(&mut self, n: i128) {
self.put_slice(&n.to_le_bytes())
}
fn put_i128_ne(&mut self, n: i128) {
self.put_slice(&n.to_ne_bytes())
}
fn put_uint(&mut self, n: u64, nbytes: usize) {
self.put_slice(&n.to_be_bytes()[mem::size_of_val(&n) - nbytes..]);
}
fn put_uint_le(&mut self, n: u64, nbytes: usize) {
self.put_slice(&n.to_le_bytes()[0..nbytes]);
}
fn put_uint_ne(&mut self, n: u64, nbytes: usize) {
if cfg!(target_endian = "big") {
self.put_uint(n, nbytes)
} else {
self.put_uint_le(n, nbytes)
}
}
fn put_int(&mut self, n: i64, nbytes: usize) {
self.put_slice(&n.to_be_bytes()[mem::size_of_val(&n) - nbytes..]);
}
fn put_int_le(&mut self, n: i64, nbytes: usize) {
self.put_slice(&n.to_le_bytes()[0..nbytes]);
}
fn put_int_ne(&mut self, n: i64, nbytes: usize) {
if cfg!(target_endian = "big") {
self.put_int(n, nbytes)
} else {
self.put_int_le(n, nbytes)
}
}
fn put_f32(&mut self, n: f32) {
self.put_u32(n.to_bits());
}
fn put_f32_le(&mut self, n: f32) {
self.put_u32_le(n.to_bits());
}
fn put_f32_ne(&mut self, n: f32) {
self.put_u32_ne(n.to_bits());
}
fn put_f64(&mut self, n: f64) {
self.put_u64(n.to_bits());
}
fn put_f64_le(&mut self, n: f64) {
self.put_u64_le(n.to_bits());
}
fn put_f64_ne(&mut self, n: f64) {
self.put_u64_ne(n.to_bits());
}
fn limit(self, limit: usize) -> Limit<Self>
where
Self: Sized,
{
limit::new(self, limit)
}
#[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
fn writer(self) -> Writer<Self>
where
Self: Sized,
{
writer::new(self)
}
fn chain_mut<U: BufMut>(self, next: U) -> Chain<Self, U>
where
Self: Sized,
{
Chain::new(self, next)
}
}
macro_rules! deref_forward_bufmut {
() => {
fn remaining_mut(&self) -> usize {
(**self).remaining_mut()
}
fn chunk_mut(&mut self) -> &mut UninitSlice {
(**self).chunk_mut()
}
unsafe fn advance_mut(&mut self, cnt: usize) {
(**self).advance_mut(cnt)
}
fn put_slice(&mut self, src: &[u8]) {
(**self).put_slice(src)
}
fn put_u8(&mut self, n: u8) {
(**self).put_u8(n)
}
fn put_i8(&mut self, n: i8) {
(**self).put_i8(n)
}
fn put_u16(&mut self, n: u16) {
(**self).put_u16(n)
}
fn put_u16_le(&mut self, n: u16) {
(**self).put_u16_le(n)
}
fn put_u16_ne(&mut self, n: u16) {
(**self).put_u16_ne(n)
}
fn put_i16(&mut self, n: i16) {
(**self).put_i16(n)
}
fn put_i16_le(&mut self, n: i16) {
(**self).put_i16_le(n)
}
fn put_i16_ne(&mut self, n: i16) {
(**self).put_i16_ne(n)
}
fn put_u32(&mut self, n: u32) {
(**self).put_u32(n)
}
fn put_u32_le(&mut self, n: u32) {
(**self).put_u32_le(n)
}
fn put_u32_ne(&mut self, n: u32) {
(**self).put_u32_ne(n)
}
fn put_i32(&mut self, n: i32) {
(**self).put_i32(n)
}
fn put_i32_le(&mut self, n: i32) {
(**self).put_i32_le(n)
}
fn put_i32_ne(&mut self, n: i32) {
(**self).put_i32_ne(n)
}
fn put_u64(&mut self, n: u64) {
(**self).put_u64(n)
}
fn put_u64_le(&mut self, n: u64) {
(**self).put_u64_le(n)
}
fn put_u64_ne(&mut self, n: u64) {
(**self).put_u64_ne(n)
}
fn put_i64(&mut self, n: i64) {
(**self).put_i64(n)
}
fn put_i64_le(&mut self, n: i64) {
(**self).put_i64_le(n)
}
fn put_i64_ne(&mut self, n: i64) {
(**self).put_i64_ne(n)
}
};
}
unsafe impl<T: BufMut + ?Sized> BufMut for &mut T {
deref_forward_bufmut!();
}
unsafe impl<T: BufMut + ?Sized> BufMut for Box<T> {
deref_forward_bufmut!();
}
unsafe impl BufMut for &mut [u8] {
#[inline]
fn remaining_mut(&self) -> usize {
self.len()
}
#[inline]
fn chunk_mut(&mut self) -> &mut UninitSlice {
unsafe { &mut *(*self as *mut [u8] as *mut _) }
}
#[inline]
unsafe fn advance_mut(&mut self, cnt: usize) {
let (_, b) = core::mem::replace(self, &mut []).split_at_mut(cnt);
*self = b;
}
#[inline]
fn put_slice(&mut self, src: &[u8]) {
self[..src.len()].copy_from_slice(src);
unsafe {
self.advance_mut(src.len());
}
}
fn put_bytes(&mut self, val: u8, cnt: usize) {
assert!(self.remaining_mut() >= cnt);
unsafe {
ptr::write_bytes(self.as_mut_ptr(), val, cnt);
self.advance_mut(cnt);
}
}
}
unsafe impl BufMut for &mut [core::mem::MaybeUninit<u8>] {
#[inline]
fn remaining_mut(&self) -> usize {
self.len()
}
#[inline]
fn chunk_mut(&mut self) -> &mut UninitSlice {
UninitSlice::uninit(self)
}
#[inline]
unsafe fn advance_mut(&mut self, cnt: usize) {
let (_, b) = core::mem::replace(self, &mut []).split_at_mut(cnt);
*self = b;
}
#[inline]
fn put_slice(&mut self, src: &[u8]) {
self.chunk_mut()[..src.len()].copy_from_slice(src);
unsafe {
self.advance_mut(src.len());
}
}
fn put_bytes(&mut self, val: u8, cnt: usize) {
assert!(self.remaining_mut() >= cnt);
unsafe {
ptr::write_bytes(self.as_mut_ptr() as *mut u8, val, cnt);
self.advance_mut(cnt);
}
}
}
unsafe impl BufMut for Vec<u8> {
#[inline]
fn remaining_mut(&self) -> usize {
core::isize::MAX as usize - self.len()
}
#[inline]
unsafe fn advance_mut(&mut self, cnt: usize) {
let len = self.len();
let remaining = self.capacity() - len;
assert!(
cnt <= remaining,
"cannot advance past `remaining_mut`: {:?} <= {:?}",
cnt,
remaining
);
self.set_len(len + cnt);
}
#[inline]
fn chunk_mut(&mut self) -> &mut UninitSlice {
if self.capacity() == self.len() {
self.reserve(64); }
let cap = self.capacity();
let len = self.len();
let ptr = self.as_mut_ptr();
unsafe { &mut UninitSlice::from_raw_parts_mut(ptr, cap)[len..] }
}
fn put<T: super::Buf>(&mut self, mut src: T)
where
Self: Sized,
{
self.reserve(src.remaining());
while src.has_remaining() {
let l;
{
let s = src.chunk();
l = s.len();
self.extend_from_slice(s);
}
src.advance(l);
}
}
#[inline]
fn put_slice(&mut self, src: &[u8]) {
self.extend_from_slice(src);
}
fn put_bytes(&mut self, val: u8, cnt: usize) {
let new_len = self.len().checked_add(cnt).unwrap();
self.resize(new_len, val);
}
}
fn _assert_trait_object(_b: &dyn BufMut) {}