santh-bufpool 0.1.0

Typed buffer recycling with fixed size classes and lock-free checkout/return
Documentation
use std::fmt;
use std::ops::{Deref, DerefMut};
use std::ptr::NonNull;
use std::slice;
use std::sync::atomic::Ordering;
use std::sync::Arc;

use crate::size_class::{zero_buffer, BufferAllocation, SizeClassPool};
use crate::stats::PoolStats;
use crate::tls::store_tls_buffer;

/// An immutable, shared buffer view.
///
/// `FrozenBuffer` can be sent across threads and shared because it only
/// permits read access. The underlying memory returns to the pool when the
/// last reference is dropped.
///
/// # Example
///
/// ```rust
/// use santh_bufpool::{BufferPool, PoolConfig};
///
/// let pool = BufferPool::new(PoolConfig::default());
/// let buffer = pool.checkout(8).unwrap();
/// let frozen = buffer.freeze();
/// assert_eq!(frozen.len(), 8);
/// ```
pub struct FrozenBuffer {
    ptr: NonNull<u8>,
    len: usize,
    capacity: usize,
    owner: Arc<SizeClassPool>,
    stats: Option<Arc<PoolStats>>,
}

// SAFETY: FrozenBuffer provides only immutable access to the pointed-to bytes.
unsafe impl Send for FrozenBuffer {}
unsafe impl Sync for FrozenBuffer {}

impl Deref for FrozenBuffer {
    type Target = [u8];

    fn deref(&self) -> &Self::Target {
        // SAFETY: `ptr` is valid for `len` bytes and the allocation is owned
        // by this struct for the duration of the borrow.
        unsafe { slice::from_raw_parts(self.ptr.as_ptr().cast_const(), self.len) }
    }
}

impl AsRef<[u8]> for FrozenBuffer {
    fn as_ref(&self) -> &[u8] {
        self
    }
}

impl Drop for FrozenBuffer {
    fn drop(&mut self) {
        if let Some(stats) = &self.stats {
            stats.checked_out.fetch_sub(1, Ordering::Relaxed);
            stats
                .bytes_checked_out
                .fetch_sub(self.len, Ordering::Relaxed);
        }
        zero_buffer(self.ptr, self.capacity);
        let allocation = BufferAllocation { ptr: self.ptr };
        self.owner.recycle_or_free(allocation);
    }
}

/// A borrowed buffer from the pool.
///
/// Automatically returns to the pool on drop. Supports `Deref` and `DerefMut`
/// for transparent access as a byte slice.
///
/// # Example
///
/// ```rust
/// use santh_bufpool::{BufferPool, PoolConfig};
///
/// let pool = BufferPool::new(PoolConfig::default());
/// let mut buffer = pool.checkout(4).unwrap();
/// buffer.copy_from_slice(&[1, 2, 3, 4]);
/// assert_eq!(&*buffer, &[1, 2, 3, 4]);
/// ```
pub struct PoolBuffer {
    pub(crate) ptr: NonNull<u8>,
    pub(crate) len: usize,
    pub(crate) capacity: usize,
    pub(crate) owner: Arc<SizeClassPool>,
    pub(crate) stats: Option<Arc<PoolStats>>,
}

// SAFETY: PoolBuffer owns its allocation exclusively until dropped or frozen.
unsafe impl Send for PoolBuffer {}
unsafe impl Sync for PoolBuffer {}

impl PoolBuffer {
    /// Freeze this buffer into an immutable, `Send + Sync` view.
    ///
    /// The frozen buffer can be shared across threads. The underlying
    /// memory returns to the pool when the last clone is dropped.
    ///
    /// # Example
    ///
    /// ```rust
    /// use santh_bufpool::{BufferPool, PoolConfig};
    ///
    /// let pool = BufferPool::new(PoolConfig::default());
    /// let buffer = pool.checkout(4).unwrap();
    /// let frozen = buffer.freeze();
    /// std::thread::spawn(move || {
    ///     assert_eq!(frozen.len(), 4);
    /// })
    /// .join()
    /// .unwrap();
    /// ```
    #[must_use]
    pub fn freeze(self) -> FrozenBuffer {
        let this = std::mem::ManuallyDrop::new(self);
        FrozenBuffer {
            ptr: this.ptr,
            len: this.len,
            capacity: this.capacity,
            owner: unsafe { std::ptr::read(&raw const this.owner) },
            stats: unsafe { std::ptr::read(&raw const this.stats) },
        }
    }

    /// Return the logical slice length of this buffer.
    ///
    /// # Example
    ///
    /// ```rust
    /// use santh_bufpool::{BufferPool, PoolConfig};
    ///
    /// let pool = BufferPool::new(PoolConfig::default());
    /// let buffer = pool.checkout(42).unwrap();
    /// assert_eq!(buffer.len(), 42);
    /// ```
    #[must_use]
    pub fn len(&self) -> usize {
        self.len
    }

    /// Return `true` if the logical length is zero.
    ///
    /// # Example
    ///
    /// ```rust
    /// use santh_bufpool::{BufferPool, PoolConfig};
    ///
    /// let pool = BufferPool::new(PoolConfig::default());
    /// let buffer = pool.checkout(0).unwrap();
    /// assert!(buffer.is_empty());
    /// ```
    #[must_use]
    pub fn is_empty(&self) -> bool {
        self.len == 0
    }

    /// Return the true physical capacity of the underlying allocation.
    ///
    /// # Example
    ///
    /// ```rust
    /// use santh_bufpool::{BufferPool, PoolConfig};
    ///
    /// let pool = BufferPool::new(PoolConfig::default());
    /// let buffer = pool.checkout(1).unwrap();
    /// assert!(buffer.capacity() >= 1);
    /// ```
    #[must_use]
    pub fn capacity(&self) -> usize {
        self.capacity
    }

    /// Returns a raw pointer to the buffer's contents.
    #[must_use]
    pub fn as_ptr(&self) -> *const u8 {
        self.ptr.as_ptr().cast_const()
    }

    /// Returns an unsafe mutable pointer to the buffer's contents.
    #[must_use]
    pub fn as_mut_ptr(&mut self) -> *mut u8 {
        self.ptr.as_ptr()
    }
}

impl AsRef<[u8]> for PoolBuffer {
    fn as_ref(&self) -> &[u8] {
        self
    }
}

impl AsMut<[u8]> for PoolBuffer {
    fn as_mut(&mut self) -> &mut [u8] {
        self
    }
}

impl Deref for PoolBuffer {
    type Target = [u8];

    fn deref(&self) -> &Self::Target {
        // SAFETY: `ptr` always comes from an allocation whose capacity is at
        // least `len`, and `len` is validated before checkout. The allocation
        // remains owned by `self` for the lifetime of the returned slice.
        unsafe { slice::from_raw_parts(self.ptr.as_ptr().cast_const(), self.len) }
    }
}

impl DerefMut for PoolBuffer {
    fn deref_mut(&mut self) -> &mut Self::Target {
        // SAFETY: `ptr` always comes from an allocation whose capacity is at
        // least `len`, and `self` has unique access while the mutable slice is
        // borrowed.
        unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
    }
}

impl fmt::Debug for PoolBuffer {
    fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
        formatter
            .debug_struct("PoolBuffer")
            .field("len", &self.len)
            .field("capacity", &self.capacity)
            .finish_non_exhaustive()
    }
}

impl Drop for PoolBuffer {
    fn drop(&mut self) {
        if let Some(stats) = &self.stats {
            stats.checked_out.fetch_sub(1, Ordering::Relaxed);
            stats
                .bytes_checked_out
                .fetch_sub(self.len, Ordering::Relaxed);
        }
        zero_buffer(self.ptr, self.capacity);
        if !store_tls_buffer(self.capacity, self.ptr, Arc::clone(&self.owner)) {
            let allocation = BufferAllocation { ptr: self.ptr };
            self.owner.recycle_or_free(allocation);
        }
    }
}