use std::ops::{Deref, Range, RangeBounds};
use std::sync::LazyLock;
use bytemuck::{Pod, Zeroable};
use either::Either;
use crate::storage::SharedStorage;
pub struct Buffer<T> {
storage: SharedStorage<T>,
ptr: *const T,
length: usize,
}
impl<T> Clone for Buffer<T> {
fn clone(&self) -> Self {
Self {
storage: self.storage.clone(),
ptr: self.ptr,
length: self.length,
}
}
}
unsafe impl<T: Send + Sync> Sync for Buffer<T> {}
unsafe impl<T: Send + Sync> Send for Buffer<T> {}
impl<T: PartialEq> PartialEq for Buffer<T> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.deref() == other.deref()
}
}
impl<T: Eq> Eq for Buffer<T> {}
impl<T: std::hash::Hash> std::hash::Hash for Buffer<T> {
#[inline]
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.as_slice().hash(state);
}
}
impl<T: std::fmt::Debug> std::fmt::Debug for Buffer<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Debug::fmt(&**self, f)
}
}
impl<T> Default for Buffer<T> {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl<T> Buffer<T> {
#[inline]
pub const fn new() -> Self {
Self::from_storage(SharedStorage::empty())
}
pub const fn from_storage(storage: SharedStorage<T>) -> Self {
let ptr = storage.as_ptr();
let length = storage.len();
Buffer {
storage,
ptr,
length,
}
}
pub fn from_static(data: &'static [T]) -> Self {
Self::from_storage(SharedStorage::from_static(data))
}
pub fn from_vec(data: Vec<T>) -> Self {
Self::from_storage(SharedStorage::from_vec(data))
}
pub fn from_owner<O: Send + AsRef<[T]> + 'static>(owner: O) -> Self {
Self::from_storage(SharedStorage::from_owner(owner))
}
pub fn with_slice<R, F: FnOnce(Buffer<T>) -> R>(slice: &[T], f: F) -> R {
SharedStorage::with_slice(slice, |ss| f(Self::from_storage(ss)))
}
pub fn with_vec<R, F: FnOnce(Buffer<T>) -> R>(vec: &mut Vec<T>, f: F) -> R {
SharedStorage::with_vec(vec, |ss| f(Self::from_storage(ss)))
}
pub fn into_storage(self) -> SharedStorage<T> {
self.storage
}
#[inline]
pub fn len(&self) -> usize {
self.length
}
#[inline]
pub fn is_empty(&self) -> bool {
self.length == 0
}
pub fn is_sliced(&self) -> bool {
self.storage.len() != self.length
}
pub fn expand_end_to_storage(self) -> Self {
unsafe {
let offset = self.ptr.offset_from(self.storage.as_ptr()) as usize;
Self {
ptr: self.ptr,
length: self.storage.len() - offset,
storage: self.storage,
}
}
}
#[inline]
pub fn as_slice(&self) -> &[T] {
debug_assert!(self.offset() + self.length <= self.storage.len());
unsafe { std::slice::from_raw_parts(self.ptr, self.length) }
}
#[inline]
#[must_use]
pub fn sliced<R: RangeBounds<usize>>(mut self, range: R) -> Self {
self.slice_in_place(range);
self
}
#[inline]
#[must_use]
pub unsafe fn sliced_unchecked<R: RangeBounds<usize>>(mut self, range: R) -> Self {
unsafe {
self.slice_in_place_unchecked(range);
}
self
}
#[inline]
pub fn slice_in_place<R: RangeBounds<usize>>(&mut self, range: R) {
unsafe {
let Range { start, end } = crate::check_range(range, ..self.len());
self.ptr = self.ptr.add(start);
self.length = end - start;
}
}
#[inline]
pub unsafe fn slice_in_place_unchecked<R: RangeBounds<usize>>(&mut self, range: R) {
unsafe {
let Range { start, end } = crate::decode_range_unchecked(range, ..self.len());
self.ptr = self.ptr.add(start);
self.length = end - start;
}
}
#[inline]
pub fn storage_ptr(&self) -> *const T {
self.storage.as_ptr()
}
#[inline]
pub fn offset(&self) -> usize {
unsafe {
let ret = self.ptr.offset_from(self.storage.as_ptr()) as usize;
debug_assert!(ret <= self.storage.len());
ret
}
}
#[inline]
pub unsafe fn set_len(&mut self, len: usize) {
self.length = len;
}
#[inline]
pub fn into_mut(mut self) -> Either<Self, Vec<T>> {
if self.is_sliced() {
return Either::Left(self);
}
match self.storage.try_into_vec() {
Ok(v) => Either::Right(v),
Err(slf) => {
self.storage = slf;
Either::Left(self)
},
}
}
#[inline]
pub fn get_mut_slice(&mut self) -> Option<&mut [T]> {
let offset = self.offset();
let slice = self.storage.try_as_mut_slice()?;
Some(unsafe { slice.get_unchecked_mut(offset..offset + self.length) })
}
pub fn storage_refcount(&self) -> u64 {
self.storage.refcount()
}
pub fn is_same_buffer(&self, other: &Self) -> bool {
self.ptr == other.ptr && self.length == other.length
}
}
impl<T: Pod> Buffer<T> {
pub fn try_transmute<U: Pod>(mut self) -> Result<Buffer<U>, Self> {
assert_ne!(size_of::<U>(), 0);
let ptr = self.ptr as *const U;
let length = self.length;
match self.storage.try_transmute() {
Err(v) => {
self.storage = v;
Err(self)
},
Ok(storage) => Ok(Buffer {
storage,
ptr,
length: length.checked_mul(size_of::<T>()).expect("overflow") / size_of::<U>(),
}),
}
}
}
impl<T: Clone> Buffer<T> {
pub fn to_vec(self) -> Vec<T> {
match self.into_mut() {
Either::Right(v) => v,
Either::Left(same) => same.as_slice().to_vec(),
}
}
}
#[repr(C, align(4096))]
#[derive(Copy, Clone)]
struct Aligned([u8; 4096]);
const GLOBAL_ZERO_SIZE: usize = 8 * 1024 * 1024;
static GLOBAL_ZEROES: LazyLock<SharedStorage<Aligned>> = LazyLock::new(|| {
assert!(GLOBAL_ZERO_SIZE.is_multiple_of(size_of::<Aligned>()));
let chunks = GLOBAL_ZERO_SIZE / size_of::<Aligned>();
let v = vec![Aligned([0; _]); chunks];
let mut ss = SharedStorage::from_vec(v);
ss.leak();
ss
});
impl<T: Zeroable> Buffer<T> {
pub fn zeroed(length: usize) -> Self {
let bytes_needed = length * size_of::<T>();
if align_of::<T>() <= align_of::<Aligned>() && bytes_needed <= GLOBAL_ZERO_SIZE {
unsafe {
let storage = GLOBAL_ZEROES.clone().transmute_unchecked::<T>();
let ptr = storage.as_ptr();
Buffer {
storage,
ptr,
length,
}
}
} else {
bytemuck::zeroed_vec(length).into()
}
}
}
impl<T> From<Vec<T>> for Buffer<T> {
#[inline]
fn from(v: Vec<T>) -> Self {
Self::from_vec(v)
}
}
impl<T> Deref for Buffer<T> {
type Target = [T];
#[inline(always)]
fn deref(&self) -> &[T] {
self.as_slice()
}
}
impl<T> AsRef<[T]> for Buffer<T> {
#[inline(always)]
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
impl<T> FromIterator<T> for Buffer<T> {
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
Vec::from_iter(iter).into()
}
}
#[cfg(feature = "serde")]
mod _serde_impl {
use serde::{Deserialize, Serialize};
use super::Buffer;
impl<T> Serialize for Buffer<T>
where
T: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
<[T] as Serialize>::serialize(self.as_slice(), serializer)
}
}
impl<'de, T> Deserialize<'de> for Buffer<T>
where
T: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
<Vec<T> as Deserialize>::deserialize(deserializer).map(Buffer::from)
}
}
}
impl<T: Copy> IntoIterator for Buffer<T> {
type Item = T;
type IntoIter = IntoIter<T>;
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self)
}
}
#[derive(Debug, Clone)]
pub struct IntoIter<T: Copy> {
values: Buffer<T>,
index: usize,
end: usize,
}
impl<T: Copy> IntoIter<T> {
#[inline]
fn new(values: Buffer<T>) -> Self {
let end = values.len();
Self {
values,
index: 0,
end,
}
}
}
impl<T: Copy> Iterator for IntoIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.index == self.end {
return None;
}
let old = self.index;
self.index += 1;
Some(*unsafe { self.values.get_unchecked(old) })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.end - self.index, Some(self.end - self.index))
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let new_index = self.index + n;
if new_index > self.end {
self.index = self.end;
None
} else {
self.index = new_index;
self.next()
}
}
}
impl<T: Copy> DoubleEndedIterator for IntoIter<T> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
if self.index == self.end {
None
} else {
self.end -= 1;
Some(*unsafe { self.values.get_unchecked(self.end) })
}
}
}
impl<T: Copy> ExactSizeIterator for IntoIter<T> {}