use crate::std_prelude::*;
use super::OpaqueStore;
use crate::error::Error;
pub use cglue::task::{CWaker, FastCWaker};
use core::cell::UnsafeCell;
use core::future::Future;
use core::marker::{PhantomData, PhantomPinned};
use core::mem::ManuallyDrop;
use core::mem::MaybeUninit;
use core::num::NonZeroI32;
use core::pin::Pin;
use core::sync::atomic::*;
use core::task::{Context, Poll};
use rangemap::RangeSet;
use tarc::BaseArc;
mod output;
pub use output::*;
mod view;
pub use view::*;
const LOCK_BIT: u64 = 1 << 63;
const HAS_WAKER_BIT: u64 = 1 << 62;
const FINALIZED_BIT: u64 = 1 << 61;
const ALL_BITS: u64 = LOCK_BIT | HAS_WAKER_BIT | FINALIZED_BIT;
struct RcAndWaker {
rc_and_flags: AtomicU64,
waker: UnsafeCell<MaybeUninit<CWaker>>,
}
impl Default for RcAndWaker {
fn default() -> Self {
Self {
rc_and_flags: 0.into(),
waker: UnsafeCell::new(MaybeUninit::uninit()),
}
}
}
impl core::fmt::Debug for RcAndWaker {
fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(
fmt,
"{}",
(self.rc_and_flags.load(Ordering::Relaxed) & HAS_WAKER_BIT) != 0
)
}
}
impl RcAndWaker {
fn acquire(&self) -> bool {
(loop {
let flags = self.rc_and_flags.fetch_or(LOCK_BIT, Ordering::AcqRel);
if (flags & LOCK_BIT) == 0 {
break flags;
}
while self.rc_and_flags.load(Ordering::Relaxed) & LOCK_BIT != 0 {
core::hint::spin_loop();
}
} & HAS_WAKER_BIT)
!= 0
}
pub fn take(&self) -> Option<CWaker> {
let ret = if self.acquire() {
Some(unsafe { (*self.waker.get()).assume_init_read() })
} else {
None
};
self.rc_and_flags
.fetch_and(!(LOCK_BIT | HAS_WAKER_BIT), Ordering::Release);
ret
}
pub fn write(&self, waker: CWaker) -> u64 {
if self.acquire() {
unsafe { core::ptr::drop_in_place((*self.waker.get()).as_mut_ptr()) }
}
unsafe { *self.waker.get() = MaybeUninit::new(waker) };
self.rc_and_flags.fetch_or(HAS_WAKER_BIT, Ordering::Relaxed);
self.rc_and_flags.fetch_and(!LOCK_BIT, Ordering::AcqRel) & !ALL_BITS
}
pub fn acquire_rc(&self) -> u64 {
self.rc_and_flags.load(Ordering::Acquire) & !ALL_BITS
}
pub fn dec_rc(&self) -> (u64, bool) {
let ret = self.rc_and_flags.fetch_sub(1, Ordering::AcqRel);
(ret & !ALL_BITS, (ret & HAS_WAKER_BIT) != 0)
}
pub fn inc_rc(&self) -> u64 {
self.rc_and_flags.fetch_add(1, Ordering::AcqRel) & !ALL_BITS
}
pub fn finalize(&self) {
self.rc_and_flags.fetch_or(FINALIZED_BIT, Ordering::Release);
}
pub fn wait_finalize(&self) {
while (self.rc_and_flags.load(Ordering::Acquire) & FINALIZED_BIT) == 0 {
core::hint::spin_loop();
}
}
}
#[repr(C)]
pub struct FullPacket<T, Perms: PacketPerms> {
header: Packet<Perms>,
data: PackedLenData<T>,
}
impl<T, Perms: PacketPerms> FullPacket<T, Perms> {
pub fn new(val: T) -> Self
where
T: bytemuck::Pod,
{
unsafe { Self::new_unchecked(val) }
}
pub unsafe fn new_unchecked(data: T) -> Self {
FullPacket {
header: Packet::new_hdr(PacketVtblRef {
tag: PacketVtblTag::SimpleDirect as _,
}),
data: PackedLenData {
len: core::mem::size_of::<T>(),
data,
},
}
}
}
impl<T: bytemuck::Pod> FullPacket<MaybeUninit<T>, Write> {
pub fn new_uninit() -> Self {
unsafe { Self::new_unchecked(MaybeUninit::uninit()) }
}
}
impl<T, Perms: PacketPerms> AsRef<Packet<Perms>> for FullPacket<T, Perms> {
fn as_ref(&self) -> &Packet<Perms> {
&self.header
}
}
impl<T, Perms: PacketPerms> core::ops::Deref for FullPacket<T, Perms> {
type Target = Packet<Perms>;
fn deref(&self) -> &Self::Target {
self.as_ref()
}
}
#[repr(C)]
pub struct VecPacket<Perms: PacketPerms> {
header: Packet<Perms>,
data: PackedLenData<Perms::DataType>,
capacity: usize,
drop: unsafe extern "C" fn(Perms::DataType, usize, usize),
}
unsafe impl<Perms: PacketPerms> Send for VecPacket<Perms> {}
unsafe impl<Perms: PacketPerms> Sync for VecPacket<Perms> {}
impl<Perms: PacketPerms> AsRef<Packet<Perms>> for VecPacket<Perms> {
fn as_ref(&self) -> &Packet<Perms> {
&self.header
}
}
impl<Perms: PacketPerms> core::ops::Deref for VecPacket<Perms> {
type Target = Packet<Perms>;
fn deref(&self) -> &Self::Target {
self.as_ref()
}
}
impl VecPacket<Read> {
pub fn take(self) -> Vec<u8> {
let vec = unsafe {
Vec::from_raw_parts(
self.data.data.cast_mut().cast(),
self.data.len,
self.capacity,
)
};
core::mem::forget(self);
vec
}
}
impl VecPacket<Write> {
pub fn take(self) -> Vec<MaybeUninit<u8>> {
let vec =
unsafe { Vec::from_raw_parts(self.data.data.cast(), self.data.len, self.capacity) };
core::mem::forget(self);
vec
}
}
impl VecPacket<ReadWrite> {
pub fn take(self) -> Vec<u8> {
let vec =
unsafe { Vec::from_raw_parts(self.data.data.cast(), self.data.len, self.capacity) };
core::mem::forget(self);
vec
}
}
impl<Perms: PacketPerms> Drop for VecPacket<Perms> {
fn drop(&mut self) {
unsafe {
(self.drop)(self.data.data, self.data.len, self.capacity);
}
}
}
impl From<Vec<u8>> for VecPacket<Read> {
fn from(mut vec: Vec<u8>) -> Self {
unsafe extern "C" fn drop(
data: <Read as PacketPerms>::DataType,
len: usize,
capacity: usize,
) {
let _ = Vec::from_raw_parts(data.cast_mut().cast::<u8>(), len, capacity);
}
let data = vec.as_mut_ptr();
let len = vec.len();
let capacity = vec.capacity();
core::mem::forget(vec);
Self {
header: unsafe {
Packet::new_hdr(PacketVtblRef {
tag: PacketVtblTag::SimpleIndirect as _,
})
},
data: PackedLenData {
len,
data: data.cast(),
},
capacity,
drop,
}
}
}
impl<T: AnyBytes> From<Vec<T>> for VecPacket<Write> {
fn from(mut vec: Vec<T>) -> Self {
unsafe extern "C" fn drop(
data: <Write as PacketPerms>::DataType,
len: usize,
capacity: usize,
) {
let _ = Vec::from_raw_parts(data.cast::<u8>(), len, capacity);
}
let data = vec.as_mut_ptr();
let len = vec.len();
let capacity = vec.capacity();
core::mem::forget(vec);
Self {
header: unsafe {
Packet::new_hdr(PacketVtblRef {
tag: PacketVtblTag::SimpleIndirect as _,
})
},
data: PackedLenData {
len,
data: data.cast(),
},
capacity,
drop,
}
}
}
#[repr(C)]
pub struct OwnedPacket<Perms: PacketPerms> {
header: Packet<Perms>,
data: PackedLenData<Perms::DataType>,
drop: unsafe extern "C" fn(Perms::DataType, usize),
}
unsafe impl<Perms: PacketPerms> Send for OwnedPacket<Perms> {}
unsafe impl<Perms: PacketPerms> Sync for OwnedPacket<Perms> {}
impl<Perms: PacketPerms> Drop for OwnedPacket<Perms> {
fn drop(&mut self) {
unsafe {
(self.drop)(self.data.data, self.data.len);
}
}
}
impl From<Box<[u8]>> for OwnedPacket<Read> {
fn from(slc: Box<[u8]>) -> Self {
unsafe extern "C" fn drop(data: <Read as PacketPerms>::DataType, len: usize) {
let _ = Box::from_raw(core::slice::from_raw_parts_mut(
data.cast_mut().cast::<u8>(),
len,
));
}
let data = Box::leak(slc);
Self {
header: unsafe {
Packet::new_hdr(PacketVtblRef {
tag: PacketVtblTag::SimpleIndirect as _,
})
},
data: PackedLenData {
len: data.len(),
data: data as *const [u8] as *const (),
},
drop,
}
}
}
impl From<Box<[u8]>> for OwnedPacket<ReadWrite> {
fn from(slc: Box<[u8]>) -> Self {
unsafe extern "C" fn drop(data: <ReadWrite as PacketPerms>::DataType, len: usize) {
let _ = Box::from_raw(core::slice::from_raw_parts_mut(data.cast::<u8>(), len));
}
let data = Box::leak(slc);
Self {
header: unsafe {
Packet::new_hdr(PacketVtblRef {
tag: PacketVtblTag::SimpleIndirect as _,
})
},
data: PackedLenData {
len: data.len(),
data: data as *mut [u8] as *mut (),
},
drop,
}
}
}
impl<T: AnyBytes> From<Box<[T]>> for OwnedPacket<Write> {
fn from(slc: Box<[T]>) -> Self {
unsafe extern "C" fn drop(data: <Write as PacketPerms>::DataType, len: usize) {
let _ = Box::from_raw(core::slice::from_raw_parts_mut(
data.cast::<MaybeUninit<u8>>(),
len,
));
}
let data = Box::leak(slc);
Self {
header: unsafe {
Packet::new_hdr(PacketVtblRef {
tag: PacketVtblTag::SimpleIndirect as _,
})
},
data: PackedLenData {
len: data.len(),
data: data as *mut [T] as *mut (),
},
drop,
}
}
}
impl<Perms: PacketPerms> AsRef<Packet<Perms>> for OwnedPacket<Perms> {
fn as_ref(&self) -> &Packet<Perms> {
&self.header
}
}
impl<Perms: PacketPerms> core::ops::Deref for OwnedPacket<Perms> {
type Target = Packet<Perms>;
fn deref(&self) -> &Self::Target {
self.as_ref()
}
}
#[repr(C)]
pub struct RefPacket<'a, Perms: PacketPerms> {
header: Packet<Perms>,
data: PackedLenData<Perms::DataType>,
_phantom: PhantomData<&'a mut u8>,
}
#[cfg(mfio_assume_linear_types)]
impl<'a> From<&'a [u8]> for RefPacket<'a, Read> {
fn from(slc: &'a [u8]) -> Self {
Self {
header: unsafe {
Packet::new_hdr(PacketVtblRef {
tag: PacketVtblTag::SimpleIndirect as _,
})
},
data: PackedLenData {
len: slc.len(),
data: slc.as_ptr().cast(),
},
_phantom: PhantomData,
}
}
}
#[cfg(mfio_assume_linear_types)]
impl<'a, T: AnyBytes> From<&'a mut [T]> for RefPacket<'a, Write> {
fn from(slc: &'a mut [T]) -> Self {
Self {
header: unsafe {
Packet::new_hdr(PacketVtblRef {
tag: PacketVtblTag::SimpleIndirect as _,
})
},
data: PackedLenData {
len: slc.len(),
data: slc.as_mut_ptr().cast(),
},
_phantom: PhantomData,
}
}
}
#[cfg(mfio_assume_linear_types)]
impl<'a> From<&'a mut [u8]> for RefPacket<'a, ReadWrite> {
fn from(slc: &'a mut [u8]) -> Self {
Self {
header: unsafe {
Packet::new_hdr(PacketVtblRef {
tag: PacketVtblTag::SimpleIndirect as _,
})
},
data: PackedLenData {
len: slc.len(),
data: slc.as_mut_ptr().cast(),
},
_phantom: PhantomData,
}
}
}
impl<Perms: PacketPerms> AsRef<Packet<Perms>> for RefPacket<'_, Perms> {
fn as_ref(&self) -> &Packet<Perms> {
&self.header
}
}
impl<Perms: PacketPerms> core::ops::Deref for RefPacket<'_, Perms> {
type Target = Packet<Perms>;
fn deref(&self) -> &Self::Target {
self.as_ref()
}
}
#[repr(C, packed)]
struct PackedLenData<T> {
len: usize,
data: T,
}
#[repr(usize)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
pub enum PacketVtblTag {
SimpleDirect = 0,
SimpleIndirect = 1,
Complex,
}
#[derive(Clone, Copy)]
pub union PacketVtblRef<Perms: PacketPerms> {
pub tag: usize,
pub vtbl: &'static Perms,
}
impl<Perms: PacketPerms> core::fmt::Debug for PacketVtblRef<Perms> {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match self.tag() {
PacketVtblTag::Complex => core::fmt::Debug::fmt(unsafe { self.vtbl }, f),
v => core::fmt::Debug::fmt(&v, f),
}
}
}
impl<Perms: PacketPerms> PacketVtblRef<Perms> {
pub fn tag(&self) -> PacketVtblTag {
match unsafe { self.tag } {
0 => PacketVtblTag::SimpleDirect,
1 => PacketVtblTag::SimpleIndirect,
_ => PacketVtblTag::Complex,
}
}
pub fn vtbl(&self) -> Option<&'static Perms> {
if self.tag() == PacketVtblTag::Complex {
unsafe { Some(self.vtbl) }
} else {
None
}
}
}
#[repr(C)]
#[derive(Debug)]
pub struct Packet<Perms: PacketPerms> {
vtbl: PacketVtblRef<Perms>,
rc_and_waker: RcAndWaker,
error_clamp: AtomicU64,
min_error: AtomicI32,
_phantom: PhantomPinned,
}
impl<Perms: PacketPerms> AsRef<Self> for Packet<Perms> {
fn as_ref(&self) -> &Self {
self
}
}
unsafe impl<Perms: PacketPerms> Send for Packet<Perms> {}
unsafe impl<Perms: PacketPerms> Sync for Packet<Perms> {}
impl<Perms: PacketPerms> Drop for Packet<Perms> {
fn drop(&mut self) {
let loaded = self.rc_and_waker.acquire_rc();
assert_eq!(loaded, 0, "The packet has in-flight segments.");
}
}
impl<'a, Perms: PacketPerms> Future for &'a Packet<Perms> {
type Output = ();
fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
let this = Pin::into_inner(self);
let rc = this.rc_and_waker.write(cx.waker().clone().into());
if rc == 0 {
this.rc_and_waker.wait_finalize();
return Poll::Ready(());
}
Poll::Pending
}
}
impl<Perms: PacketPerms> Packet<Perms> {
pub fn rc(&self) -> usize {
(self.rc_and_waker.acquire_rc()) as usize
}
unsafe fn on_output(&self, error: Option<(u64, NonZeroI32)>) -> Option<CWaker> {
if let Some((start, error)) = error {
if self.error_clamp.fetch_min(start, Ordering::AcqRel) > start {
self.min_error.store(error.into(), Ordering::Relaxed);
}
}
let (prev, has_waker) = self.rc_and_waker.dec_rc();
if prev != 1 {
return None;
}
let ret = if has_waker {
self.rc_and_waker.take()
} else {
None
};
self.rc_and_waker.finalize();
ret
}
unsafe fn on_add_to_view(&self) {
let rc = self.rc_and_waker.inc_rc();
if rc != 0 {
self.rc_and_waker.dec_rc();
assert_eq!(rc, 0);
}
}
pub unsafe fn reset_err(&self) {
self.error_clamp.store(!0u64, Ordering::Release);
self.min_error.store(0, Ordering::Release);
}
pub unsafe fn new_hdr(vtbl: PacketVtblRef<Perms>) -> Self {
Packet {
vtbl,
rc_and_waker: Default::default(),
error_clamp: (!0u64).into(),
min_error: 0.into(),
_phantom: PhantomPinned,
}
}
pub unsafe fn simple_len(this: *const Self) -> *const usize {
this.add(1).cast()
}
pub unsafe fn simple_data(this: *const Self) -> *const u8 {
this.add(1).cast::<usize>().add(1).cast()
}
pub unsafe fn simple_data_mut(this: *const Self) -> *mut u8 {
this.add(1).cast::<usize>().add(1).cast_mut().cast()
}
pub fn simple_slice(&self) -> Option<&[MaybeUninit<u8>]> {
if self.vtbl.tag() == PacketVtblTag::Complex {
None
} else {
Some(unsafe {
core::slice::from_raw_parts(self.simple_data_ptr().cast(), *Self::simple_len(self))
})
}
}
pub unsafe fn simple_slice_mut(&self) -> Option<&mut [MaybeUninit<u8>]> {
if self.vtbl.tag() == PacketVtblTag::Complex {
None
} else {
Some(unsafe {
core::slice::from_raw_parts_mut(
self.simple_data_ptr().cast_mut().cast(),
*Self::simple_len(self),
)
})
}
}
pub fn simple_contiguous_slice(&self) -> Option<&[u8]> {
if self.vtbl.tag() == PacketVtblTag::Complex {
None
} else {
Some(unsafe {
core::slice::from_raw_parts(
self.simple_data_ptr(),
core::cmp::min(
self.error_clamp.load(Ordering::Acquire) as usize,
*Self::simple_len(self),
),
)
})
}
}
pub fn simple_data_ptr(&self) -> *const u8 {
match self.vtbl.tag() {
PacketVtblTag::SimpleDirect => unsafe { Self::simple_data(self) },
PacketVtblTag::SimpleIndirect => unsafe {
*Self::simple_data(self).cast::<*const u8>()
},
PacketVtblTag::Complex => panic!("simple_data_ptr called on complex Packet"),
}
}
pub fn simple_data_ptr_mut(&mut self) -> *mut u8 {
match self.vtbl.tag() {
PacketVtblTag::SimpleDirect => unsafe { Self::simple_data_mut(self) },
PacketVtblTag::SimpleIndirect => unsafe { *Self::simple_data(self).cast::<*mut u8>() },
PacketVtblTag::Complex => panic!("simple_data_ptr called on complex Packet"),
}
}
pub fn min_error(&self) -> Option<Error> {
NonZeroI32::new(self.min_error.load(Ordering::Relaxed)).map(Error::from_int_err)
}
pub fn error_clamp(&self) -> u64 {
self.error_clamp.load(Ordering::Relaxed)
}
pub fn err_on_zero(&self) -> Result<(), Error> {
if self.error_clamp() > 0 {
Ok(())
} else {
Err(self.min_error().expect("No error when error_clamp is 0"))
}
}
pub fn err_any(&self) -> Result<(), Error> {
if let Some(err) = self.min_error() {
Err(err)
} else {
Ok(())
}
}
}
impl<Perms: PacketPerms> Packet<Perms> {
pub fn new_buf(len: usize) -> BaseArc<Packet<Perms>> {
use std::alloc::Layout;
let size = core::mem::size_of::<FullPacket<PhantomData<()>, Perms>>() + len;
let align = core::mem::align_of::<FullPacket<PhantomData<()>, Perms>>();
unsafe extern "C" fn drop_pkt<Perms: PacketPerms>(data: *mut ()) {
core::ptr::drop_in_place(data.cast::<Packet<Perms>>())
}
let packet = unsafe {
BaseArc::custom(
Layout::from_size_align_unchecked(size, align),
Some(drop_pkt::<Perms>),
)
};
unsafe {
(packet.exclusive_ptr().unwrap().as_ptr() as *mut FullPacket<PhantomData<()>, Perms>)
.write(FullPacket {
header: Self::new_hdr(PacketVtblRef {
tag: PacketVtblTag::SimpleDirect as _,
}),
data: PackedLenData {
len,
data: PhantomData,
},
});
core::mem::transmute(packet)
}
}
}
impl Packet<Read> {
pub fn copy_from_slice(buf: &[u8]) -> BaseArc<Packet<Read>> {
let pkt = Self::new_buf(buf.len());
unsafe {
core::ptr::copy_nonoverlapping(
buf.as_ptr(),
pkt.simple_data_ptr().cast_mut(),
buf.len(),
)
};
pkt
}
}
impl Packet<ReadWrite> {
pub fn copy_from_slice(buf: &[u8]) -> BaseArc<Packet<ReadWrite>> {
let pkt = Self::new_buf(buf.len());
unsafe {
core::ptr::copy_nonoverlapping(
buf.as_ptr(),
pkt.simple_data_ptr().cast_mut(),
buf.len(),
)
};
pkt
}
}
unsafe impl<Perms: PacketPerms> OpaqueStore for BaseArc<Packet<Perms>> {
type ConstHdr = Packet<Perms>;
type Opaque<'a> = PacketView<'a, Perms> where Self: 'a;
type StackReq<'a> = Self where Self: 'a;
type HeapReq = Self where Self: 'static;
fn stack<'a>(self) -> Self::StackReq<'a>
where
Self: 'a,
{
self
}
fn heap(self) -> Self::HeapReq
where
Self: 'static,
{
self
}
fn stack_hdr<'a: 'b, 'b>(stack: &'b Self::StackReq<'a>) -> &'b Self::ConstHdr {
stack
}
fn stack_opaque<'a>(stack: &'a Self::StackReq<'a>) -> Self::Opaque<'a> {
PacketView::from_arc_ref(stack, 0)
}
}
unsafe impl<'c, Perms: PacketPerms> OpaqueStore for &'c BaseArc<Packet<Perms>> {
type ConstHdr = Packet<Perms>;
type Opaque<'a> = PacketView<'a, Perms> where Self: 'a;
type StackReq<'a> = Self where Self: 'a;
type HeapReq = BaseArc<Packet<Perms>> where Self: 'static;
fn stack<'a>(self) -> Self::StackReq<'a>
where
Self: 'a,
{
self
}
fn heap(self) -> Self::HeapReq
where
Self: 'static,
{
self.clone()
}
fn stack_hdr<'a: 'b, 'b>(stack: &'b Self::StackReq<'a>) -> &'b Self::ConstHdr
where
Self: 'a,
{
stack
}
fn stack_opaque<'a>(stack: &'a Self::StackReq<'a>) -> Self::Opaque<'a> {
PacketView::from_arc_ref(*stack, 0)
}
}
unsafe impl<T: 'static, Perms: PacketPerms> OpaqueStore for FullPacket<T, Perms> {
type ConstHdr = Packet<Perms>;
type Opaque<'a> = PacketView<'a, Perms> where Self: 'a;
crate::linear_types_switch! {
Standard => {
type StackReq<'a> = BaseArc<Self> where Self: 'a;
}
Linear => {
type StackReq<'a> = Self where Self: 'a;
}
}
type HeapReq = BaseArc<Self> where Self: 'static;
fn stack<'a>(self) -> Self::StackReq<'a>
where
Self: 'a,
{
#[allow(clippy::useless_conversion)]
self.into()
}
fn heap(self) -> Self::HeapReq
where
Self: 'static,
{
self.into()
}
fn stack_hdr<'a: 'b, 'b>(stack: &'b Self::StackReq<'a>) -> &'b Self::ConstHdr {
stack
}
fn stack_opaque<'a>(stack: &'a Self::StackReq<'a>) -> Self::Opaque<'a> {
crate::linear_types_switch! {
Standard => {
PacketView::from_arc_ref(
unsafe { &*(stack as *const BaseArc<FullPacket<T, Perms>>).cast() },
0,
)
}
Linear => {
PacketView::from_ref(
unsafe { &*(stack as *const FullPacket<T, Perms>).cast() },
0,
)
}
}
}
}
unsafe impl<T: 'static, Perms: PacketPerms> OpaqueStore for BaseArc<FullPacket<T, Perms>> {
type ConstHdr = Packet<Perms>;
type Opaque<'a> = PacketView<'a, Perms> where Self: 'a;
type StackReq<'a> = Self where Self: 'a;
type HeapReq = Self where Self: 'static;
fn stack<'a>(self) -> Self::StackReq<'a>
where
Self: 'a,
{
self
}
fn heap(self) -> Self::HeapReq
where
Self: 'static,
{
self
}
fn stack_hdr<'a: 'b, 'b>(stack: &'b Self::StackReq<'a>) -> &'b Self::ConstHdr {
stack
}
fn stack_opaque<'a>(stack: &'a Self::StackReq<'a>) -> Self::Opaque<'a> {
PacketView::from_arc_ref(
unsafe { &*(stack as *const BaseArc<FullPacket<T, Perms>>).cast() },
0,
)
}
}
unsafe impl<Perms: PacketPerms> OpaqueStore for crate::linear_types_switch! { Standard => { RefPacket::<'static, Perms> } Linear => { RefPacket::<'_, Perms> } } {
type ConstHdr = Packet<Perms>;
type Opaque<'a> = PacketView<'a, Perms> where Self: 'a;
type StackReq<'a> = Self where Self: 'a;
type HeapReq = BaseArc<Self> where Self: 'static;
fn stack<'a>(self) -> Self::StackReq<'a>
where
Self: 'a,
{
self
}
fn heap(self) -> Self::HeapReq
where
Self: 'static,
{
self.into()
}
fn stack_hdr<'a: 'b, 'b>(stack: &'b Self::StackReq<'a>) -> &'b Self::ConstHdr
where
Self: 'a,
{
stack
}
fn stack_opaque<'a>(stack: &'a Self::StackReq<'a>) -> Self::Opaque<'a> {
PacketView::from_ref(
unsafe { &*(stack as *const Self).cast() },
0,
)
}
}
unsafe impl<Perms: PacketPerms> OpaqueStore for VecPacket<Perms> {
type ConstHdr = Packet<Perms>;
type Opaque<'a> = PacketView<'a, Perms> where Self: 'a;
type StackReq<'a> = Self where Self: 'a;
type HeapReq = BaseArc<Self> where Self: 'static;
fn stack<'a>(self) -> Self::StackReq<'a>
where
Self: 'a,
{
self
}
fn heap(self) -> Self::HeapReq
where
Self: 'static,
{
self.into()
}
fn stack_hdr<'a: 'b, 'b>(stack: &'b Self::StackReq<'a>) -> &'b Self::ConstHdr
where
Self: 'a,
{
stack
}
fn stack_opaque<'a>(stack: &'a Self::StackReq<'a>) -> Self::Opaque<'a> {
PacketView::from_ref(
unsafe { &*(stack as *const Self).cast() },
0,
)
}
}
unsafe impl<Perms: PacketPerms> OpaqueStore for OwnedPacket<Perms> {
type ConstHdr = Packet<Perms>;
type Opaque<'a> = PacketView<'a, Perms> where Self: 'a;
type StackReq<'a> = Self where Self: 'a;
type HeapReq = BaseArc<Self> where Self: 'static;
fn stack<'a>(self) -> Self::StackReq<'a>
where
Self: 'a,
{
self
}
fn heap(self) -> Self::HeapReq
where
Self: 'static,
{
self.into()
}
fn stack_hdr<'a: 'b, 'b>(stack: &'b Self::StackReq<'a>) -> &'b Self::ConstHdr
where
Self: 'a,
{
stack
}
fn stack_opaque<'a>(stack: &'a Self::StackReq<'a>) -> Self::Opaque<'a> {
PacketView::from_ref(
unsafe { &*(stack as *const Self).cast() },
0,
)
}
}
impl<T: AsRef<Packet<Perms>>, Perms: PacketPerms> From<BaseArc<T>> for PacketView<'static, Perms> {
fn from(pkt: BaseArc<T>) -> Self {
Self::from_arc(pkt.transpose().into_base().unwrap(), 0)
}
}
pub trait PacketStore<'a, Perms: PacketPerms>:
'a + OpaqueStore<Opaque<'a> = PacketView<'a, Perms>, ConstHdr = Packet<Perms>>
{
}
impl<'a, Perms: PacketPerms, T> PacketStore<'a, Perms> for T where
T: 'a + OpaqueStore<Opaque<'a> = PacketView<'a, Perms>, ConstHdr = Packet<Perms>>
{
}
trait AnyBytes {}
impl AnyBytes for u8 {}
impl AnyBytes for MaybeUninit<u8> {}
pub trait IntoPacket<'a, Perms: PacketPerms> {
type Target: PacketStore<'a, Perms>;
type SyncHandle;
fn into_packet(self) -> (Self::Target, Self::SyncHandle);
fn sync_back(_hdr: &<Self::Target as OpaqueStore>::ConstHdr, _handle: Self::SyncHandle) {}
}
impl<'a, T: PacketStore<'a, Perms>, Perms: PacketPerms> IntoPacket<'a, Perms> for T {
type Target = Self;
type SyncHandle = ();
fn into_packet(self) -> (Self, ()) {
(self, ())
}
}
impl<'a, 'b: 'a> IntoPacket<'a, Read> for &'b [u8] {
type Target = BaseArc<Packet<Read>>;
type SyncHandle = ();
fn into_packet(self) -> (Self::Target, ()) {
(Packet::<Read>::copy_from_slice(self), ())
}
}
impl<'a, 'b: 'a, T: AnyBytes> IntoPacket<'a, Write> for &'b mut [T] {
crate::linear_types_switch! {
Standard => {
type SyncHandle = Self;
type Target = BaseArc<Packet<Write>>;
fn into_packet(self) -> (Self::Target, Self) {
(Packet::<Write>::new_buf(self.len()), self)
}
fn sync_back(hdr: &<Self::Target as OpaqueStore>::ConstHdr, handle: Self::SyncHandle) {
unsafe {
core::ptr::copy_nonoverlapping(
hdr.simple_data_ptr(),
handle.as_mut_ptr().cast(),
core::cmp::min(handle.len(), hdr.error_clamp() as usize),
);
}
}
}
Linear => {
type SyncHandle = ();
type Target = RefPacket<'a, Write>;
fn into_packet(self) -> (Self::Target, Self::SyncHandle) {
(self.into(), ())
}
}
}
}
impl<'a, 'b: 'a> IntoPacket<'a, ReadWrite> for &'b mut [u8] {
type Target = BaseArc<Packet<ReadWrite>>;
type SyncHandle = Self;
fn into_packet(self) -> (Self::Target, Self) {
(Packet::<ReadWrite>::copy_from_slice(&*self), self)
}
fn sync_back(hdr: &<Self::Target as OpaqueStore>::ConstHdr, handle: Self::SyncHandle) {
unsafe {
core::ptr::copy_nonoverlapping(
hdr.simple_data_ptr(),
handle.as_mut_ptr(),
core::cmp::min(handle.len(), hdr.error_clamp() as usize),
);
}
}
}
use cglue::prelude::v1::*;
pub type AllocFn<T> = for<'a> unsafe extern "C" fn(
packet: &'a mut ManuallyDrop<BoundPacketView<T>>,
alignment: usize,
out_alloced: &'a mut MaybeUninit<<T as PacketPerms>::Alloced>,
) -> bool;
pub type TransferDataFn<T> = for<'a> unsafe extern "C" fn(
packet: &'a mut PacketView<T>,
input: <T as PacketPerms>::ReverseDataType,
);
pub type LenFn<T> = unsafe extern "C" fn(packet: &Packet<T>) -> u64;
pub type MaybeAlloced<T> = Result<<T as PacketPerms>::Alloced, BoundPacketView<T>>;
pub type AllocedOrTransferred<T> = Result<<T as PacketPerms>::Alloced, TransferredPacket<T>>;
pub trait PacketPerms: 'static + core::fmt::Debug + Clone + Copy {
type DataType: Clone + Copy + core::fmt::Debug;
type ReverseDataType: Clone + Copy + core::fmt::Debug;
type Alloced: AllocatedPacket<Perms = Self>;
fn len_fn(&self) -> LenFn<Self>;
fn len(packet: &Packet<Self>) -> u64 {
if let Some(vtbl) = packet.vtbl.vtbl() {
unsafe { (vtbl.len_fn())(packet) }
} else {
unsafe { *Packet::simple_len(packet) as u64 }
}
}
fn alloc_fn(&self) -> AllocFn<Self>;
unsafe fn alloced_simple(packet: BoundPacketView<Self>) -> Self::Alloced;
fn try_alloc(packet: BoundPacketView<Self>, alignment: usize) -> MaybeAlloced<Self> {
if let Some(vtbl) = packet.view.pkt().vtbl.vtbl() {
let mut view = ManuallyDrop::new(packet);
let mut out = MaybeUninit::uninit();
let ret = unsafe { (vtbl.alloc_fn())(&mut view, alignment, &mut out) };
if ret {
Ok(unsafe { out.assume_init() })
} else {
Err(ManuallyDrop::into_inner(view))
}
} else {
let data = unsafe {
packet
.view
.pkt()
.simple_data_ptr()
.add(packet.view.start as usize)
};
if data.align_offset(alignment) == 0 {
Ok(unsafe { Self::alloced_simple(packet) })
} else {
Err(packet)
}
}
}
fn transfer_data_fn(&self) -> TransferDataFn<Self>;
unsafe fn transfer_data_simple(packet: &mut PacketView<Self>, input: Self::ReverseDataType);
unsafe fn transfer_data(packet: &mut PacketView<Self>, input: Self::ReverseDataType) {
if let Some(vtbl) = packet.pkt().vtbl.vtbl() {
(vtbl.transfer_data_fn())(packet, input)
} else {
Self::transfer_data_simple(packet, input)
}
}
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct ReadWrite {
pub len: unsafe extern "C" fn(&Packet<Self>) -> u64,
pub get_mut: for<'a> unsafe extern "C" fn(
&mut ManuallyDrop<BoundPacketView<Self>>,
usize,
&mut MaybeUninit<ReadWritePacketObj>,
) -> bool,
pub transfer_data: for<'a, 'b> unsafe extern "C" fn(&'a mut PacketView<Self>, *mut ()),
}
impl core::fmt::Debug for ReadWrite {
fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(fmt, "{:?}", self.get_mut as *const ())
}
}
impl PacketPerms for ReadWrite {
type DataType = *mut ();
type ReverseDataType = *mut ();
type Alloced = ReadWritePacketObj;
fn len_fn(&self) -> LenFn<Self> {
self.len
}
fn alloc_fn(&self) -> AllocFn<Self> {
self.get_mut
}
fn transfer_data_fn(&self) -> TransferDataFn<Self> {
self.transfer_data
}
unsafe fn alloced_simple(packet: BoundPacketView<Self>) -> Self::Alloced {
let data = packet.view.pkt().simple_data_ptr().cast_mut();
ReadWritePacketObj {
alloced_packet: unsafe { data.add(packet.view.start as usize) },
buffer: packet,
}
}
unsafe fn transfer_data_simple(view: &mut PacketView<Self>, data: *mut ()) {
let dst = Packet::simple_data_ptr_mut(view.pkt_mut());
core::ptr::swap_nonoverlapping(
data.cast(),
dst.add(view.start as usize),
view.len() as usize,
);
}
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct Write {
pub len: unsafe extern "C" fn(&Packet<Self>) -> u64,
pub get_mut: for<'a> unsafe extern "C" fn(
&mut ManuallyDrop<BoundPacketView<Self>>,
usize,
&mut MaybeUninit<WritePacketObj>,
) -> bool,
pub transfer_data: for<'a, 'b> unsafe extern "C" fn(&'a mut PacketView<Self>, *const ()),
}
impl core::fmt::Debug for Write {
fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(fmt, "{:?}", self.get_mut as *const ())
}
}
impl PacketPerms for Write {
type DataType = *mut ();
type ReverseDataType = *const ();
type Alloced = WritePacketObj;
fn len_fn(&self) -> LenFn<Self> {
self.len
}
fn alloc_fn(&self) -> AllocFn<Self> {
self.get_mut
}
fn transfer_data_fn(&self) -> TransferDataFn<Self> {
self.transfer_data
}
unsafe fn alloced_simple(packet: BoundPacketView<Self>) -> Self::Alloced {
let data = packet
.view
.pkt()
.simple_data_ptr()
.cast_mut()
.cast::<MaybeUninit<u8>>();
WritePacketObj {
alloced_packet: unsafe { data.add(packet.view.start as usize) },
buffer: packet,
}
}
unsafe fn transfer_data_simple(view: &mut PacketView<Self>, data: *const ()) {
let dst = Packet::simple_data_ptr_mut(view.pkt_mut());
core::ptr::copy(
data.cast(),
dst.add(view.start as usize),
view.len() as usize,
);
}
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct Read {
pub len: unsafe extern "C" fn(&Packet<Self>) -> u64,
pub get: unsafe extern "C" fn(
&mut ManuallyDrop<BoundPacketView<Self>>,
usize,
&mut MaybeUninit<ReadPacketObj>,
) -> bool,
pub transfer_data: for<'a> unsafe extern "C" fn(&'a mut PacketView<Self>, *mut ()),
}
impl core::fmt::Debug for Read {
fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(fmt, "{:?}", self.get as *const ())
}
}
impl PacketPerms for Read {
type DataType = *const ();
type ReverseDataType = *mut ();
type Alloced = ReadPacketObj;
fn len_fn(&self) -> LenFn<Self> {
self.len
}
fn alloc_fn(&self) -> AllocFn<Self> {
self.get
}
fn transfer_data_fn(&self) -> TransferDataFn<Self> {
self.transfer_data
}
unsafe fn alloced_simple(packet: BoundPacketView<Self>) -> Self::Alloced {
let data = packet.view.pkt().simple_data_ptr().cast::<u8>();
ReadPacketObj {
alloced_packet: unsafe { data.add(packet.view.start as usize) },
buffer: packet,
}
}
unsafe fn transfer_data_simple(view: &mut PacketView<Self>, data: *mut ()) {
let src = view.pkt().simple_data_ptr();
core::ptr::copy(
src,
data.cast::<u8>().add(view.start as usize),
view.len() as usize,
);
}
}
pub trait Splittable<T: Default + PartialEq>: Sized {
fn split_at(self, len: T) -> (Self, Self);
fn len(&self) -> T;
fn is_empty(&self) -> bool {
self.len() == Default::default()
}
}
impl<T: Default + PartialEq, A: Splittable<T>, B: Splittable<T>> Splittable<T> for Result<A, B> {
fn split_at(self, len: T) -> (Self, Self) {
match self {
Ok(v) => {
let (a, b) = v.split_at(len);
(Ok(a), Ok(b))
}
Err(v) => {
let (a, b) = v.split_at(len);
(Err(a), Err(b))
}
}
}
fn len(&self) -> T {
match self {
Ok(v) => v.len(),
Err(v) => v.len(),
}
}
}
pub trait Errorable: Sized {
fn error(self, err: Error);
}
impl<A: Errorable, B: Errorable> Errorable for Result<A, B> {
fn error(self, err: Error) {
match self {
Ok(v) => v.error(err),
Err(v) => v.error(err),
}
}
}
pub trait AllocatedPacket: Splittable<u64> + Errorable {
type Perms: PacketPerms;
type Pointer: Copy;
fn as_ptr(&self) -> Self::Pointer;
}
#[repr(C)]
pub struct ReadWritePacketObj {
alloced_packet: *mut u8,
buffer: BoundPacketView<ReadWrite>,
}
impl Splittable<u64> for ReadWritePacketObj {
fn split_at(self, len: u64) -> (Self, Self) {
let (b1, b2) = self.buffer.split_at(len);
(
Self {
alloced_packet: self.alloced_packet,
buffer: b1,
},
Self {
alloced_packet: unsafe { self.alloced_packet.add(len as usize) },
buffer: b2,
},
)
}
fn len(&self) -> u64 {
self.buffer.view.len()
}
}
impl Errorable for ReadWritePacketObj {
fn error(self, err: Error) {
self.buffer.error(err)
}
}
impl AllocatedPacket for ReadWritePacketObj {
type Perms = ReadWrite;
type Pointer = *mut u8;
fn as_ptr(&self) -> Self::Pointer {
self.alloced_packet
}
}
unsafe impl Send for ReadWritePacketObj {}
unsafe impl Sync for ReadWritePacketObj {}
impl core::ops::Deref for ReadWritePacketObj {
type Target = [u8];
fn deref(&self) -> &Self::Target {
unsafe { core::slice::from_raw_parts(self.alloced_packet, self.buffer.view.len() as usize) }
}
}
impl core::ops::DerefMut for ReadWritePacketObj {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe {
core::slice::from_raw_parts_mut(self.alloced_packet, self.buffer.view.len() as usize)
}
}
}
#[repr(C)]
pub struct WritePacketObj {
alloced_packet: *mut MaybeUninit<u8>,
buffer: BoundPacketView<Write>,
}
impl Splittable<u64> for WritePacketObj {
fn split_at(self, len: u64) -> (Self, Self) {
let (b1, b2) = self.buffer.split_at(len);
(
Self {
alloced_packet: self.alloced_packet,
buffer: b1,
},
Self {
alloced_packet: unsafe { self.alloced_packet.add(len as usize) },
buffer: b2,
},
)
}
fn len(&self) -> u64 {
self.buffer.view.len()
}
}
impl Errorable for WritePacketObj {
fn error(self, err: Error) {
self.buffer.error(err)
}
}
impl AllocatedPacket for WritePacketObj {
type Perms = Write;
type Pointer = *mut MaybeUninit<u8>;
fn as_ptr(&self) -> Self::Pointer {
self.alloced_packet
}
}
unsafe impl Send for WritePacketObj {}
unsafe impl Sync for WritePacketObj {}
impl core::ops::Deref for WritePacketObj {
type Target = [MaybeUninit<u8>];
fn deref(&self) -> &Self::Target {
unsafe { core::slice::from_raw_parts(self.alloced_packet, self.buffer.view.len() as usize) }
}
}
impl core::ops::DerefMut for WritePacketObj {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe {
core::slice::from_raw_parts_mut(self.alloced_packet, self.buffer.view.len() as usize)
}
}
}
#[repr(C)]
pub struct ReadPacketObj {
alloced_packet: *const u8,
buffer: BoundPacketView<Read>,
}
impl Splittable<u64> for ReadPacketObj {
fn split_at(self, len: u64) -> (Self, Self) {
let (b1, b2) = self.buffer.split_at(len);
(
Self {
alloced_packet: self.alloced_packet,
buffer: b1,
},
Self {
alloced_packet: unsafe { self.alloced_packet.add(len as usize) },
buffer: b2,
},
)
}
fn len(&self) -> u64 {
self.buffer.view.len()
}
}
impl Errorable for ReadPacketObj {
fn error(self, err: Error) {
self.buffer.error(err)
}
}
impl AllocatedPacket for ReadPacketObj {
type Perms = Read;
type Pointer = *const u8;
fn as_ptr(&self) -> Self::Pointer {
self.alloced_packet
}
}
unsafe impl Send for ReadPacketObj {}
unsafe impl Sync for ReadPacketObj {}
impl core::ops::Deref for ReadPacketObj {
type Target = [u8];
fn deref(&self) -> &Self::Target {
unsafe { core::slice::from_raw_parts(self.alloced_packet, self.buffer.view.len() as usize) }
}
}
#[repr(transparent)]
#[must_use = "please handle point of drop intentionally"]
pub struct TransferredPacket<T: PacketPerms>(BoundPacketView<T>);
impl<T: PacketPerms> Splittable<u64> for TransferredPacket<T> {
fn split_at(self, len: u64) -> (Self, Self) {
let (b1, b2) = self.0.split_at(len);
(Self(b1), Self(b2))
}
fn len(&self) -> u64 {
self.0.view.len()
}
}
impl<T: PacketPerms> Errorable for TransferredPacket<T> {
fn error(self, err: Error) {
self.0.error(err)
}
}
pub enum StandardPktVariations<Perms: PacketPerms> {
BoundPacketView(BoundPacketView<Perms>),
Alloced(<Perms as PacketPerms>::Alloced),
TransferredPacket(TransferredPacket<Perms>),
}
impl<P: AllocatedPacket<Perms = PP>, PP: PacketPerms<Alloced = P>> From<P>
for StandardPktVariations<P::Perms>
{
fn from(p: P) -> Self {
Self::Alloced(p)
}
}
impl<Perms: PacketPerms> From<BoundPacketView<Perms>> for StandardPktVariations<Perms> {
fn from(p: BoundPacketView<Perms>) -> Self {
Self::BoundPacketView(p)
}
}
impl<Perms: PacketPerms> From<MaybeAlloced<Perms>> for StandardPktVariations<Perms> {
fn from(p: MaybeAlloced<Perms>) -> Self {
match p {
Ok(p) => Self::Alloced(p),
Err(p) => Self::BoundPacketView(p),
}
}
}
impl<Perms: PacketPerms> From<AllocedOrTransferred<Perms>> for StandardPktVariations<Perms> {
fn from(p: AllocedOrTransferred<Perms>) -> Self {
match p {
Ok(p) => Self::Alloced(p),
Err(p) => Self::TransferredPacket(p),
}
}
}
impl<Perms: PacketPerms> From<TransferredPacket<Perms>> for StandardPktVariations<Perms> {
fn from(p: TransferredPacket<Perms>) -> Self {
Self::TransferredPacket(p)
}
}
impl<Perms: PacketPerms> Errorable for StandardPktVariations<Perms> {
fn error(self, err: Error) {
match self {
Self::BoundPacketView(p) => p.error(err),
Self::Alloced(p) => p.error(err),
Self::TransferredPacket(p) => p.error(err),
}
}
}
macro_rules! packet_combos {
($name:ident, $($perms:ident),*) => {
pub enum $name {
$($perms(StandardPktVariations<$perms>)),*
}
impl<P: AllocatedPacket<Perms = PP>, PP: PacketPerms<Alloced = P>> From<P> for $name where StandardPktVariations<PP>: Into<AnyPacket> {
fn from(p: P) -> Self {
StandardPktVariations::<PP>::from(p).into()
}
}
$(
impl From<StandardPktVariations<$perms>> for $name {
fn from(p: StandardPktVariations<$perms>) -> Self {
Self::$perms(p)
}
}
impl From<BoundPacketView<$perms>> for $name {
fn from(p: BoundPacketView<$perms>) -> Self {
Self::$perms(p.into())
}
}
impl From<MaybeAlloced<$perms>> for $name {
fn from(p: MaybeAlloced<$perms>) -> Self {
Self::$perms(p.into())
}
}
impl From<AllocedOrTransferred<$perms>> for $name {
fn from(p: AllocedOrTransferred<$perms>) -> Self {
Self::$perms(p.into())
}
}
impl From<TransferredPacket<$perms>> for $name {
fn from(p: TransferredPacket<$perms>) -> Self {
Self::$perms(p.into())
}
}
)*
impl Errorable for $name {
fn error(self, err: Error) {
match self {
$(Self::$perms(p) => p.error(err),)*
}
}
}
}
}
packet_combos!(AnyPacket, Read, Write, ReadWrite);
pub struct ReboundPacket<T: PacketPerms> {
ranges: RangeSet<u64>,
orig: ManuallyDrop<BoundPacketView<T>>,
unbound: AtomicBool,
}
impl<T: PacketPerms> ReboundPacket<T> {
pub fn packets_in_flight(&self) -> usize {
self.orig.view.pkt().rc() - if self.ranges.is_empty() { 0 } else { 1 }
}
pub fn unbound(&self) -> PacketView<'static, T> {
assert!(!self.unbound.swap(true, Ordering::Acquire));
unsafe { self.orig.unbound() }
}
pub fn on_processed(&mut self, pkt: PacketView<'static, T>, err: Option<Error>) {
match err {
Some(err) => {
let pkt = unsafe {
self.orig
.extract_packet(pkt.start - self.orig.view.start, pkt.len())
};
pkt.error(err);
}
None => {
let start = pkt.start - self.orig.view.start;
let end = pkt.end - self.orig.view.start;
self.ranges.insert(start..end);
}
}
}
pub fn range_result(&mut self, start: u64, len: u64, err: Option<Error>) {
let range = start..(start + len);
let mut o = self.ranges.overlapping(&range);
let o = o.next().unwrap();
assert!(o.contains(&start));
assert!(o.contains(&(start + len.saturating_sub(1))));
self.ranges.remove(range);
let pkt = unsafe { self.orig.extract_packet(start, len) };
if self.ranges.is_empty() {
let orig = unsafe { ManuallyDrop::take(&mut self.orig) };
unsafe { orig.forget() };
}
if let Some(err) = err {
pkt.error(err)
}
}
pub fn ranges(&self) -> &RangeSet<u64> {
&self.ranges
}
}
impl<T: PacketPerms> From<BoundPacketView<T>> for ReboundPacket<T> {
fn from(orig: BoundPacketView<T>) -> Self {
Self {
ranges: Default::default(),
orig: ManuallyDrop::new(orig),
unbound: false.into(),
}
}
}
impl<T: PacketPerms> Drop for ReboundPacket<T> {
fn drop(&mut self) {
if *self.unbound.get_mut() {
let mut prev = None;
for range in self.ranges.iter() {
prev = Some(unsafe {
self.orig
.extract_packet(range.start, range.end - range.start)
});
}
if prev.is_some() {
unsafe { ManuallyDrop::take(&mut self.orig).forget() };
}
core::mem::drop(prev);
} else {
unsafe { ManuallyDrop::drop(&mut self.orig) };
}
}
}