use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering};
use crate::Vec64;
use crate::structs::shared_buffer::SharedBuffer;
use crate::structs::shared_buffer::internal::vtable::{PROMO_EVEN_VT, PROMO64_EVEN_VT};
#[repr(C)]
pub(crate) struct PromotableVec<T> {
pub(crate) ref_cnt: AtomicUsize,
pub(crate) inner: T,
}
#[inline]
pub(crate) fn promo_is_unique<T>(h: &AtomicPtr<()>) -> bool {
let raw = h.load(Ordering::Acquire);
if raw.is_null() {
true
} else {
unsafe {
(*(raw as *const PromotableVec<T>))
.ref_cnt
.load(Ordering::Acquire)
== 1
}
}
}
pub(crate) unsafe fn promo_clone(h: &AtomicPtr<()>, ptr: *const u8, len: usize) -> SharedBuffer {
let raw = h.load(Ordering::Acquire);
if raw.is_null() {
let promoted = Box::into_raw(Box::new(PromotableVec::<Vec<u8>> {
ref_cnt: AtomicUsize::new(1),
inner: unsafe { Vec::from_raw_parts(ptr as *mut u8, len, len) },
}));
h.store(promoted.cast(), Ordering::Release);
return SharedBuffer {
ptr,
len,
data: AtomicPtr::new(promoted.cast()),
vtable: &PROMO_EVEN_VT,
};
}
let header = unsafe { &*(raw as *const PromotableVec<Vec<u8>>) };
header.ref_cnt.fetch_add(1, Ordering::Relaxed);
SharedBuffer {
ptr,
len,
data: AtomicPtr::new(raw),
vtable: &PROMO_EVEN_VT,
}
}
pub(crate) unsafe fn promo_drop(h: &mut AtomicPtr<()>, _p: *const u8, _l: usize) {
let raw = h.load(Ordering::Acquire);
if raw.is_null() {
return;
}
let header = unsafe { &*(raw as *const PromotableVec<Vec<u8>>) };
if header.ref_cnt.fetch_sub(1, Ordering::AcqRel) == 1 {
drop(unsafe { Box::from_raw(raw as *mut PromotableVec<Vec<u8>>) });
}
}
pub(crate) unsafe fn promo64_clone(h: &AtomicPtr<()>, ptr: *const u8, len: usize) -> SharedBuffer {
let raw = h.load(Ordering::Acquire);
if raw.is_null() {
let promoted = Box::into_raw(Box::new(PromotableVec::<Vec64<u8>> {
ref_cnt: AtomicUsize::new(1),
inner: unsafe { Vec64::from_raw_parts(ptr as *mut u8, len, len) },
}));
h.store(promoted.cast(), Ordering::Release);
return SharedBuffer {
ptr,
len,
data: AtomicPtr::new(promoted.cast()),
vtable: &PROMO64_EVEN_VT,
};
}
let header = unsafe { &*(raw as *const PromotableVec<Vec64<u8>>) };
header.ref_cnt.fetch_add(1, Ordering::Relaxed);
SharedBuffer {
ptr,
len,
data: AtomicPtr::new(raw),
vtable: &PROMO64_EVEN_VT,
}
}
pub(crate) unsafe fn promo64_drop(h: &mut AtomicPtr<()>, _p: *const u8, _l: usize) {
let raw = h.load(Ordering::Acquire);
if raw.is_null() {
return;
}
let header = unsafe { &*(raw as *const PromotableVec<Vec64<u8>>) };
if header.ref_cnt.fetch_sub(1, Ordering::AcqRel) == 1 {
drop(unsafe { Box::from_raw(raw as *mut PromotableVec<Vec64<u8>>) });
}
}