msgpacker 0.7.1

MessagePack protocol implementation for Rust.
Documentation
use core::{hint, ops::Deref, ptr};

use alloc::vec::Vec;
use bytes::{buf::UninitSlice, Buf, BufMut};

/// An optimized encoder for dynamic allocation.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Encoder {
    bytes: Vec<u8>,
}

impl Encoder {
    /// A native build, x86_64 page size.
    ///
    /// Usage on other platforms is likely non-performance critical anyway.
    pub const PAGE: usize = 4096;

    /// Creates a new encoder with a single page.
    pub fn new() -> Self {
        Self::with_capacity(Self::PAGE)
    }

    /// Creates a new encoder with the provided bytes capacity, adjusted to fit [Self::PAGE].
    pub fn with_capacity(bytes: usize) -> Self {
        let pages = (bytes + Self::PAGE - 1) / Self::PAGE;

        Self {
            bytes: Vec::with_capacity(Self::PAGE * pages),
        }
    }

    /// Returns the current capacity of the encoder.
    pub fn capacity(&self) -> usize {
        self.bytes.capacity()
    }

    /// Returns the underlying Vec<u8>.
    pub fn into_inner(self) -> Vec<u8> {
        self.bytes
    }

    #[inline(always)]
    fn ensure_capacity(&mut self, cnt: usize) {
        let spare = self.bytes.capacity() - self.bytes.len();

        if spare < cnt {
            self.grow(cnt);
        }

        unsafe { hint::assert_unchecked(self.bytes.capacity() - self.bytes.len() >= cnt) }
    }

    #[cold]
    #[inline(never)]
    fn grow(&mut self, cnt: usize) {
        let pages = (cnt + Self::PAGE - 1) / Self::PAGE;

        self.bytes.reserve(pages * Self::PAGE);
    }
}

impl Deref for Encoder {
    type Target = [u8];

    fn deref(&self) -> &Self::Target {
        self.bytes.as_slice()
    }
}

unsafe impl BufMut for Encoder {
    #[inline]
    fn remaining_mut(&self) -> usize {
        isize::MAX as usize - self.bytes.len()
    }

    #[inline]
    unsafe fn advance_mut(&mut self, cnt: usize) {
        self.bytes.set_len(self.bytes.len() + cnt);
    }

    #[inline]
    fn chunk_mut(&mut self) -> &mut UninitSlice {
        if self.bytes.len() == self.bytes.capacity() {
            self.grow(Self::PAGE);
        }

        let cap = self.bytes.capacity();
        let len = self.bytes.len();
        let ptr = self.bytes.as_mut_ptr();

        unsafe { UninitSlice::from_raw_parts_mut(ptr.add(len), cap - len) }
    }

    #[inline]
    fn put<T: Buf>(&mut self, mut src: T) {
        let extra = src.remaining();
        self.ensure_capacity(extra);

        let mut current_pos = self.bytes.len();
        let base_ptr = self.bytes.as_mut_ptr();

        while src.has_remaining() {
            let s = src.chunk();
            let l = s.len();

            unsafe {
                ptr::copy_nonoverlapping(s.as_ptr(), base_ptr.add(current_pos), l);
            }

            current_pos += l;
            src.advance(l);
        }

        unsafe {
            self.bytes.set_len(current_pos);
        }
    }

    #[inline]
    fn put_slice(&mut self, src: &[u8]) {
        let n = src.len();
        self.ensure_capacity(n);

        let len = self.bytes.len();

        unsafe {
            ptr::copy_nonoverlapping(src.as_ptr(), self.bytes.as_mut_ptr().add(len), n);
            self.bytes.set_len(len + n);
        }
    }

    #[inline]
    fn put_bytes(&mut self, val: u8, cnt: usize) {
        self.ensure_capacity(cnt);

        let len = self.bytes.len();

        unsafe {
            ptr::write_bytes(self.bytes.as_mut_ptr().add(len), val, cnt);
            self.bytes.set_len(len + cnt);
        }
    }
}