use std::mem::ManuallyDrop;
use std::sync::atomic::{AtomicPtr, Ordering};
use std::{ptr, slice};
use crate::Vec64;
use crate::structs::shared_buffer::SharedBuffer;
use crate::structs::shared_buffer::internal::pvec::{
PromotableVec, promo_clone, promo_drop, promo_is_unique, promo64_clone, promo64_drop,
};
pub(crate) struct Vtable {
pub(crate) clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> SharedBuffer,
pub(crate) drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize),
pub(crate) is_unique: unsafe fn(&AtomicPtr<()>) -> bool,
pub(crate) to_vec: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Vec<u8>,
pub(crate) to_vec64: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Vec64<u8>,
#[cfg(all(target_os = "linux", feature = "memfd"))]
pub(crate) memfd_fd: unsafe fn(&AtomicPtr<()>) -> Option<i32>,
}
pub(crate) static STATIC_VT: Vtable = Vtable {
clone: |_, p, l| SharedBuffer {
ptr: p,
len: l,
data: AtomicPtr::new(ptr::null_mut()),
vtable: &STATIC_VT,
},
drop: |_, _, _| {},
is_unique: |_| true,
to_vec: |_, p, l| unsafe { slice::from_raw_parts(p, l) }.to_vec(),
to_vec64: |_, p, l| {
let mut v = Vec64::with_capacity(l);
unsafe {
v.extend_from_slice(slice::from_raw_parts(p, l));
}
v
},
#[cfg(all(target_os = "linux", feature = "memfd"))]
memfd_fd: |_| None,
};
pub(crate) static PROMO_EVEN_VT: Vtable = Vtable {
clone: promo_clone,
drop: promo_drop,
is_unique: |h| promo_is_unique::<Vec<u8>>(h),
to_vec: |h, p, l| {
if promo_is_unique::<Vec<u8>>(h) {
let raw = h.swap(ptr::null_mut(), Ordering::AcqRel);
if !raw.is_null() {
return unsafe { Box::from_raw(raw as *mut PromotableVec<Vec<u8>>).inner };
}
}
unsafe { slice::from_raw_parts(p, l) }.to_vec()
},
to_vec64: |_, p, l| {
let mut v = Vec64::with_capacity(l);
unsafe {
v.extend_from_slice(slice::from_raw_parts(p, l));
}
v
},
#[cfg(all(target_os = "linux", feature = "memfd"))]
memfd_fd: |_| None,
};
pub(crate) static PROMO_ODD_VT: Vtable = Vtable {
#[cfg(all(target_os = "linux", feature = "memfd"))]
memfd_fd: |_| None,
..PROMO_EVEN_VT
};
pub(crate) static PROMO64_EVEN_VT: Vtable = Vtable {
clone: promo64_clone,
drop: promo64_drop,
is_unique: |h| promo_is_unique::<Vec64<u8>>(h),
to_vec: |h, p, l| {
if promo_is_unique::<Vec64<u8>>(h) {
let raw = h.swap(ptr::null_mut(), Ordering::AcqRel);
if !raw.is_null() {
return ManuallyDrop::new(unsafe {
Box::from_raw(raw as *mut PromotableVec<Vec64<u8>>).inner
})
.to_vec();
}
}
unsafe { slice::from_raw_parts(p, l) }.to_vec()
},
to_vec64: |h, p, l| {
if promo_is_unique::<Vec64<u8>>(h) {
let raw = h.swap(ptr::null_mut(), Ordering::AcqRel);
if !raw.is_null() {
return unsafe { Box::from_raw(raw as *mut PromotableVec<Vec64<u8>>).inner };
}
}
let mut v = Vec64::with_capacity(l);
unsafe {
v.extend_from_slice(slice::from_raw_parts(p, l));
}
v
},
#[cfg(all(target_os = "linux", feature = "memfd"))]
memfd_fd: |_| None,
};
pub(crate) static PROMO64_ODD_VT: Vtable = Vtable {
#[cfg(all(target_os = "linux", feature = "memfd"))]
memfd_fd: |_| None,
..PROMO64_EVEN_VT
};