use super::{
MAX_PHYSICAL_BUF_SIZE, MAX_VIRTUAL_BUF_SIZE, allocate_mirrored, allocation_granularity, deallocate_mirrored,
};
use crate::mirrored::utils::mirrored_allocation_unit;
use num::{Integer, Zero};
use std::{
mem::{MaybeUninit, size_of},
ops::{Deref, DerefMut},
ptr::NonNull,
slice,
};
#[cfg(feature = "unstable")]
pub type Size = core::num::niche_types::UsizeNoHighBit;
#[cfg(not(feature = "unstable"))]
use crate::stable::{Size, SizeCompact};
#[derive(Debug)]
pub struct MirroredBuffer<T> {
ptr: NonNull<T>,
size: Size,
}
impl<T> MirroredBuffer<T> {
pub(crate) const ELEM_IS_ZST: bool = size_of::<T>() == 0;
#[inline]
pub(crate) unsafe fn set_size_unchecked(&mut self, v_cap: usize) {
debug_assert!(v_cap <= MAX_VIRTUAL_BUF_SIZE);
self.size = unsafe { Size::new_unchecked(v_cap) };
}
#[inline]
pub fn new() -> Self {
Self::with_capacity(if Self::ELEM_IS_ZST { MAX_PHYSICAL_BUF_SIZE } else { 0 })
}
#[inline]
pub fn physical_capacity(&self) -> usize {
let p_size = self.physical_size();
let t_size = if Self::ELEM_IS_ZST { 1 } else { size_of::<T>() };
debug_assert!(p_size.is_multiple_of(t_size));
p_size / t_size
}
#[allow(unused)]
#[inline]
pub fn is_empty(&self) -> bool {
debug_assert!(self.virtual_size().is_multiple_of(2));
self.virtual_size().is_zero()
}
#[inline]
pub fn virtual_size(&self) -> usize {
self.size.as_inner()
}
#[inline]
pub fn physical_size(&self) -> usize {
let v_size = self.virtual_size();
debug_assert!(v_size.is_even(), "Virtual size must be even");
v_size / 2
}
#[inline]
pub fn with_capacity(cap: usize) -> Self {
if Self::ELEM_IS_ZST {
return Self { ptr: NonNull::dangling(), size: unsafe { Size::new_unchecked(cap * 2) } };
}
let v_size = mirrored_allocation_unit::<T>(cap);
unsafe { Self::alloc(v_size) }
}
#[inline]
unsafe fn alloc(v_size: usize) -> Self {
debug_assert!(!Self::ELEM_IS_ZST);
if v_size == 0 {
return Self { ptr: NonNull::dangling(), size: unsafe { Size::new_unchecked(0) } };
}
debug_assert!(
v_size.is_multiple_of(allocation_granularity()) && v_size > 0 && v_size <= MAX_VIRTUAL_BUF_SIZE,
"virtual_size must be a positive multiple of allocation_granularity() and less than usize::MAX"
);
let p_size = v_size / 2;
debug_assert!(p_size != 0, "physical_size must be in range (0, MAX_USIZE_WITHOUT_HIGHEST_BIT/ 2)");
assert!(
align_of::<T>() <= allocation_granularity(),
"The alignment requirements of `T` must be smaller than the allocation granularity."
);
unsafe {
let ptr = allocate_mirrored(v_size).expect("Allocation failed");
Self { ptr: NonNull::new_unchecked(ptr.cast::<T>()), size: Size::new_unchecked(v_size) }
}
}
#[inline]
fn virtual_capacity(&self) -> usize {
self.physical_capacity() * 2
}
#[inline]
pub fn as_uninit_virtaul_slice(&self) -> &[MaybeUninit<T>] {
unsafe { slice::from_raw_parts(self.ptr.as_ptr().cast(), self.virtual_capacity()) }
}
#[inline]
pub fn as_uninit_virtual_mut_slice(&mut self) -> &mut [MaybeUninit<T>] {
unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr().cast(), self.virtual_capacity()) }
}
#[inline]
pub fn virtual_uninit_slice_at(&self, start: usize, len: usize) -> &[MaybeUninit<T>] {
debug_assert!(start.checked_add(len) <= Some(self.virtual_capacity()), "slice bounds out of virtual capacity");
debug_assert!(
len.checked_mul(size_of::<T>()).is_some_and(|bytes| bytes <= MAX_VIRTUAL_BUF_SIZE),
"slice byte length exceeds MAX_VIRTUAL_BUF_SIZE"
);
unsafe { self.as_uninit_virtaul_slice().get_unchecked(start..start + len) }
}
#[inline]
pub fn virtual_uninit_slice_mut_at(&mut self, start: usize, len: usize) -> &mut [MaybeUninit<T>] {
debug_assert!(start.checked_add(len) <= Some(self.virtual_capacity()), "slice bounds out of virtual capacity");
debug_assert!(
len.checked_mul(size_of::<T>()).is_some_and(|bytes| bytes <= MAX_VIRTUAL_BUF_SIZE),
"slice byte length exceeds MAX_VIRTUAL_BUF_SIZE"
);
unsafe { self.as_uninit_virtual_mut_slice().get_unchecked_mut(start..start + len) }
}
#[inline]
pub const fn as_ptr(&self) -> *mut T {
self.ptr.as_ptr()
}
}
impl<T> Default for MirroredBuffer<T> {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl<T> Drop for MirroredBuffer<T> {
fn drop(&mut self) {
if Self::ELEM_IS_ZST || self.virtual_size() == 0 {
return;
}
unsafe {
deallocate_mirrored(self.ptr.as_ptr().cast::<u8>(), self.virtual_size())
.expect("Failed to deallocate memory");
}
}
}
impl<T> Deref for MirroredBuffer<T> {
type Target = [MaybeUninit<T>];
#[inline]
fn deref(&self) -> &Self::Target {
self.as_uninit_virtaul_slice()
}
}
impl<T> DerefMut for MirroredBuffer<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_uninit_virtual_mut_slice()
}
}
unsafe impl<T> Send for MirroredBuffer<T> where T: Send {}
unsafe impl<T> Sync for MirroredBuffer<T> where T: Sync {}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn new_and_default_are_empty() {
let buf_new = MirroredBuffer::<u8>::new();
assert_eq!(buf_new.physical_capacity(), 0);
assert_eq!(buf_new.virtual_size(), 0);
assert_eq!(buf_new.physical_size(), 0);
let buf_default = MirroredBuffer::<u32>::default();
assert_eq!(buf_default.physical_capacity(), 0);
assert_eq!(buf_default.virtual_size(), 0);
assert_eq!(buf_default.physical_size(), 0);
}
#[test]
fn zst_full_cap_when_new_or_default() {
let buf_new = MirroredBuffer::<()>::new();
assert_eq!(buf_new.physical_capacity(), MAX_PHYSICAL_BUF_SIZE);
assert_eq!(buf_new.virtual_size(), MAX_VIRTUAL_BUF_SIZE);
assert_eq!(buf_new.physical_size(), MAX_PHYSICAL_BUF_SIZE);
}
#[test]
fn with_capacity_zero_is_empty() {
let buf = MirroredBuffer::<u8>::with_capacity(0);
assert_eq!(buf.physical_capacity(), 0);
assert_eq!(buf.virtual_size(), 0);
assert_eq!(buf.physical_size(), 0);
}
#[test]
fn with_capacity_allocates_memory() {
let cap = 10;
let buf = MirroredBuffer::<i32>::with_capacity(cap);
let mau = mirrored_allocation_unit::<i32>(cap);
assert!(buf.physical_capacity() >= cap);
assert_eq!(buf.virtual_size(), mau);
assert!(buf.virtual_size() > 0);
assert_eq!(buf.virtual_size(), buf.physical_size() * 2);
assert!(buf.virtual_size().is_multiple_of(allocation_granularity()));
}
#[test]
fn drop_deallocates() {
{
let _buf = MirroredBuffer::<u64>::with_capacity(100);
}
{
let _buf = MirroredBuffer::<u8>::new();
}
{
let _buf = MirroredBuffer::<()>::with_capacity(100);
}
}
#[test]
fn mirrored_writes_are_correct() {
let mut buf = MirroredBuffer::<u32>::with_capacity(4);
let capacity = buf.physical_capacity();
assert!(capacity >= 4);
let val1: u32 = 12345;
let val2: u32 = 67890;
unsafe {
*buf.get_unchecked_mut(0).as_mut_ptr() = val1;
*buf.get_unchecked_mut(2).as_mut_ptr() = val2;
assert_eq!(*buf.get_unchecked(0).assume_init_ref(), val1);
assert_eq!(*buf.get_unchecked(2).assume_init_ref(), val2);
assert_eq!(*buf.get_unchecked(capacity).assume_init_ref(), val1);
assert_eq!(*buf.get_unchecked(2 + capacity).assume_init_ref(), val2);
}
}
#[test]
fn mirrored_writes_in_second_half_are_correct() {
let mut buf = MirroredBuffer::<char>::with_capacity(8);
let capacity = buf.physical_capacity();
assert!(capacity >= 8);
let val1 = 'A';
let val2 = 'Z';
unsafe {
buf.as_uninit_virtual_mut_slice()[capacity + 1].as_mut_ptr().write(val1);
buf.as_uninit_virtual_mut_slice()[capacity + 5].as_mut_ptr().write(val2);
assert_eq!(*buf.get_unchecked(capacity + 1).assume_init_ref(), val1);
assert_eq!(*buf.get_unchecked(capacity + 5).assume_init_ref(), val2);
assert_eq!(*buf.get_unchecked(1).assume_init_ref(), val1);
assert_eq!(*buf.get_unchecked(5).assume_init_ref(), val2);
}
}
#[test]
fn slice_access_in_bounds() {
let mut buf = MirroredBuffer::<u8>::with_capacity(16);
let capacity = buf.physical_capacity();
let v_len = buf.virtual_capacity();
assert_eq!(v_len, capacity * 2);
let slice1 = buf.virtual_uninit_slice_at(0, capacity);
assert_eq!(slice1.len(), capacity);
let slice2 = buf.virtual_uninit_slice_at(capacity, capacity);
assert_eq!(slice2.len(), capacity);
let slice3 = buf.virtual_uninit_slice_mut_at(capacity - 4, 8);
assert_eq!(slice3.len(), 8);
}
#[test]
#[should_panic = "slice bounds out of virtual capacity"]
fn slice_access_out_of_bounds() {
let buf = MirroredBuffer::<u8>::with_capacity(16);
let v_len = buf.virtual_capacity();
let _slice = buf.virtual_uninit_slice_at(v_len - 4, 5);
}
#[test]
#[should_panic(expected = "slice bounds out of virtual capacity")]
fn slice_access_starts_truly_out_of_bounds() {
let buf = MirroredBuffer::<u8>::with_capacity(16);
let v_len = buf.virtual_capacity();
let _slice = buf.virtual_uninit_slice_at(v_len + 1, 0);
}
#[test]
fn alignment_test() {
#[repr(align(32))]
#[allow(dead_code)]
struct AlignedType(u64);
let buf = MirroredBuffer::<AlignedType>::with_capacity(4);
assert!(buf.physical_capacity() >= 4);
let ptr_addr = buf.as_ptr() as usize;
assert_eq!(ptr_addr % align_of::<AlignedType>(), 0, "Pointer is not correctly aligned");
}
#[test]
fn test_mirrored_buffer_zst() {
let buf = MirroredBuffer::<()>::with_capacity(5);
assert!(buf.physical_capacity() == 5);
}
#[test]
fn deref_and_deref_mut_traits() {
let mut buf = MirroredBuffer::<i32>::with_capacity(8);
let cap = buf.physical_capacity();
assert_eq!(buf.len(), cap * 2);
buf[0] = MaybeUninit::new(42);
unsafe {
assert_eq!(*buf[0].assume_init_ref(), 42);
}
}
#[test]
fn as_uninit_virtual_slices() {
let mut buf = MirroredBuffer::<u16>::with_capacity(4);
let virtual_capacity = buf.virtual_capacity();
let slice = buf.as_uninit_virtaul_slice();
assert_eq!(slice.len(), virtual_capacity);
let slice_mut = buf.as_uninit_virtual_mut_slice();
assert_eq!(slice_mut.len(), virtual_capacity);
}
#[test]
fn different_capacities() {
for cap in [1, 2, 4, 8, 16, 32, 64, 128] {
let buf = MirroredBuffer::<u8>::with_capacity(cap);
assert!(buf.physical_capacity() >= cap);
assert!(buf.virtual_size() > 0);
assert_eq!(buf.virtual_size(), buf.physical_size() * 2);
}
}
#[test]
fn send_and_sync_traits() {
fn assert_send_sync<T: Send + Sync>() {}
assert_send_sync::<MirroredBuffer<u32>>();
assert_send_sync::<MirroredBuffer<String>>();
}
#[test]
fn virtual_uninit_slice_methods() {
let mut buf = MirroredBuffer::<i32>::with_capacity(8);
let physical_capacity = buf.physical_capacity();
let virtual_capacity = buf.virtual_capacity();
let slice_start = buf.virtual_uninit_slice_at(0, physical_capacity);
assert_eq!(slice_start.len(), physical_capacity);
let slice_end = buf.virtual_uninit_slice_at(physical_capacity, physical_capacity);
assert_eq!(slice_end.len(), physical_capacity);
let slice_mut = buf.virtual_uninit_slice_mut_at(0, virtual_capacity);
assert_eq!(slice_mut.len(), virtual_capacity);
}
#[test]
fn size_methods() {
let buf = MirroredBuffer::<u64>::with_capacity(16);
let virtual_size = buf.virtual_size();
let physical_size = buf.physical_size();
let physical_capacity = buf.physical_capacity();
assert_eq!(virtual_size, physical_size * 2);
assert_eq!(physical_capacity * size_of::<u64>(), physical_size);
}
#[test]
fn different_type_alignments() {
#[repr(align(1))]
#[allow(dead_code)]
struct Align1(u8);
#[repr(align(2))]
#[allow(dead_code)]
struct Align2(u8);
#[repr(align(4))]
#[allow(dead_code)]
struct Align4(u8);
#[repr(align(8))]
#[allow(dead_code)]
struct Align8(u8);
let buf1 = MirroredBuffer::<Align1>::with_capacity(4);
assert!(buf1.physical_capacity() >= 4);
let buf2 = MirroredBuffer::<Align2>::with_capacity(4);
assert!(buf2.physical_capacity() >= 4);
let buf4 = MirroredBuffer::<Align4>::with_capacity(4);
assert!(buf4.physical_capacity() >= 4);
let buf8 = MirroredBuffer::<Align8>::with_capacity(4);
assert!(buf8.physical_capacity() >= 4);
}
#[test]
fn different_type_sizes() {
let buf1 = MirroredBuffer::<u8>::with_capacity(16);
assert!(buf1.physical_capacity() >= 16);
let buf2 = MirroredBuffer::<u16>::with_capacity(16);
assert!(buf2.physical_capacity() >= 16);
let buf3 = MirroredBuffer::<u32>::with_capacity(16);
assert!(buf3.physical_capacity() >= 16);
let buf4 = MirroredBuffer::<u64>::with_capacity(16);
assert!(buf4.physical_capacity() >= 16);
}
#[test]
#[should_panic = "attempt to multiply with overflow"]
fn extreme_capacities() {
let _ = MirroredBuffer::<u8>::with_capacity(MAX_VIRTUAL_BUF_SIZE + 1);
}
#[test]
fn zero_sized_types_extensive() {
#[derive(Clone, Copy, Debug, PartialEq)]
struct ZeroSizedType;
let buf = MirroredBuffer::<ZeroSizedType>::with_capacity(100);
assert!(buf.physical_capacity() >= 50);
let slice = buf.as_uninit_virtaul_slice();
assert_eq!(slice.len(), buf.virtual_capacity());
}
#[test]
fn deref_deref_mut_consistency() {
let mut buf = MirroredBuffer::<i32>::with_capacity(8);
let len_through_deref = buf.len();
let len_through_method = buf.virtual_capacity();
assert_eq!(len_through_deref, len_through_method);
buf[0] = MaybeUninit::new(12345);
unsafe {
assert_eq!(*buf[0].assume_init_ref(), 12345);
}
}
#[test]
fn slice_methods_edge_cases() {
let mut buf = MirroredBuffer::<u8>::with_capacity(8);
let capacity = buf.physical_capacity();
let empty_slice = buf.virtual_uninit_slice_at(0, 0);
assert_eq!(empty_slice.len(), 0);
let empty_slice_mut = buf.virtual_uninit_slice_mut_at(0, 0);
assert_eq!(empty_slice_mut.len(), 0);
let single_slice = buf.virtual_uninit_slice_at(0, 1);
assert_eq!(single_slice.len(), 1);
let single_slice_mut = buf.virtual_uninit_slice_mut_at(0, 1);
assert_eq!(single_slice_mut.len(), 1);
let boundary_slice = buf.virtual_uninit_slice_at(capacity - 1, 2);
assert_eq!(boundary_slice.len(), 2);
}
}