#![doc = include_str!("../../doc/ptr/span.md")]
use core::{
any,
fmt::{
self,
Binary,
Debug,
Display,
Formatter,
Pointer,
},
marker::PhantomData,
mem,
ptr::{
self,
NonNull,
},
};
use tap::Pipe;
use wyz::{
comu::{
Address,
Const,
Mut,
Mutability,
NullPtrError,
Reference,
Referential,
},
fmt::FmtForward,
};
use super::{
BitPtr,
BitPtrError,
BitPtrRange,
MisalignError,
};
use crate::{
index::{
BitEnd,
BitIdx,
},
mem::{
bits_of,
BitRegister,
},
order::{
BitOrder,
Lsb0,
},
slice::BitSlice,
store::BitStore,
};
#[doc = include_str!("../../doc/ptr/BitSpan.md")]
pub(crate) struct BitSpan<M = Const, T = usize, O = Lsb0>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
ptr: NonNull<()>,
len: usize,
_or: PhantomData<O>,
_ty: PhantomData<Address<M, [T]>>,
}
impl<M, T, O> BitSpan<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
pub(crate) const EMPTY: Self = Self {
ptr: NonNull::<T>::dangling().cast::<()>(),
len: 0,
_or: PhantomData,
_ty: PhantomData,
};
pub(crate) const LEN_HEAD_BITS: usize = 3;
pub(crate) const LEN_HEAD_MASK: usize = 0b111;
pub(crate) const PTR_ADDR_MASK: usize = !0 << Self::PTR_HEAD_BITS;
pub(crate) const PTR_HEAD_BITS: usize =
<T::Mem as BitRegister>::INDX as usize - Self::LEN_HEAD_BITS;
pub(crate) const PTR_HEAD_MASK: usize = !Self::PTR_ADDR_MASK;
pub(crate) const REGION_MAX_BITS: usize = !0 >> Self::LEN_HEAD_BITS;
pub(crate) const REGION_MAX_ELTS: usize =
crate::mem::elts::<T::Mem>(Self::REGION_MAX_BITS) + 1;
}
impl<M, T, O> BitSpan<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[cfg(feature = "alloc")]
pub(crate) fn uninhabited(addr: Address<M, T>) -> Self {
Self {
ptr: addr.into_inner().cast::<()>(),
..Self::EMPTY
}
}
pub(crate) fn new(
addr: Address<M, T>,
head: BitIdx<T::Mem>,
bits: usize,
) -> Result<Self, BitSpanError<T>> {
if bits > Self::REGION_MAX_BITS {
return Err(BitSpanError::TooLong(bits));
}
let base = BitPtr::<M, T, O>::new(addr, head)?;
let last = base.wrapping_add(bits);
if last < base {
return Err(BitSpanError::TooHigh(addr.to_const()));
}
Ok(unsafe { Self::new_unchecked(addr, head, bits) })
}
pub(crate) unsafe fn new_unchecked(
addr: Address<M, T>,
head: BitIdx<T::Mem>,
bits: usize,
) -> Self {
let addr = addr.to_const().cast::<u8>();
let head = head.into_inner() as usize;
let ptr_data = addr as usize & Self::PTR_ADDR_MASK;
let ptr_head = head >> Self::LEN_HEAD_BITS;
let len_head = head & Self::LEN_HEAD_MASK;
let len_bits = bits << Self::LEN_HEAD_BITS;
let ptr_raw = ptr_data | ptr_head;
let ptr = addr.wrapping_add(ptr_raw.wrapping_sub(addr as usize));
Self {
ptr: NonNull::new_unchecked(ptr.cast::<()>() as *mut ()),
len: len_bits | len_head,
..Self::EMPTY
}
}
}
impl<M, T, O> BitSpan<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
pub(crate) fn address(&self) -> Address<M, T> {
Address::new(unsafe {
NonNull::new_unchecked(
(self.ptr.as_ptr() as usize & Self::PTR_ADDR_MASK) as *mut T,
)
})
}
#[cfg(feature = "alloc")]
pub(crate) unsafe fn set_address(&mut self, addr: Address<M, T>) {
let mut addr_value = addr.to_const() as usize;
addr_value &= Self::PTR_ADDR_MASK;
addr_value |= self.ptr.as_ptr() as usize & Self::PTR_HEAD_MASK;
self.ptr = NonNull::new_unchecked(addr_value as *mut ())
}
pub(crate) fn head(&self) -> BitIdx<T::Mem> {
let ptr = self.ptr.as_ptr() as usize;
let ptr_head = (ptr & Self::PTR_HEAD_MASK) << Self::LEN_HEAD_BITS;
let len_head = self.len & Self::LEN_HEAD_MASK;
unsafe { BitIdx::new_unchecked((ptr_head | len_head) as u8) }
}
#[cfg(feature = "alloc")]
pub(crate) unsafe fn set_head(&mut self, head: BitIdx<T::Mem>) {
let head = head.into_inner() as usize;
let mut ptr = self.ptr.as_ptr() as usize;
ptr &= Self::PTR_ADDR_MASK;
ptr |= head >> Self::LEN_HEAD_BITS;
self.ptr = NonNull::new_unchecked(ptr as *mut ());
self.len &= !Self::LEN_HEAD_MASK;
self.len |= head & Self::LEN_HEAD_MASK;
}
pub(crate) fn len(&self) -> usize {
self.len >> Self::LEN_HEAD_BITS
}
pub(crate) unsafe fn set_len(&mut self, new_len: usize) {
if cfg!(debug_assertions) {
*self = Self::new(self.address(), self.head(), new_len).unwrap();
}
else {
self.len &= Self::LEN_HEAD_MASK;
self.len |= new_len << Self::LEN_HEAD_BITS;
}
}
pub(crate) fn raw_parts(&self) -> (Address<M, T>, BitIdx<T::Mem>, usize) {
(self.address(), self.head(), self.len())
}
}
impl<M, T, O> BitSpan<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
pub(crate) fn elements(&self) -> usize {
crate::mem::elts::<T>(self.len() + self.head().into_inner() as usize)
}
pub(crate) fn tail(&self) -> BitEnd<T::Mem> {
let (head, len) = (self.head(), self.len());
let (_, tail) = head.span(len);
tail
}
}
impl<M, T, O> BitSpan<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
pub(crate) fn cast<U>(self) -> BitSpan<M, U, O>
where U: BitStore {
let Self { ptr, len, .. } = self;
BitSpan {
ptr,
len,
..BitSpan::EMPTY
}
}
pub(crate) unsafe fn align_to<U>(self) -> (Self, BitSpan<M, U, O>, Self)
where U: BitStore {
let this = self.to_bitptr();
let mut rem = self.len();
let align = mem::align_of::<U>();
let step = this.align_offset(align);
if step > rem {
return (self, BitSpan::EMPTY, Self::EMPTY);
}
let left = this.span_unchecked(step);
rem -= step;
let mid_base =
this.add(step).address().cast::<U>().pipe(|addr| {
BitPtr::<M, U, O>::new_unchecked(addr, BitIdx::MIN)
});
let mid_elts = rem >> <U::Mem as BitRegister>::INDX;
let excess = rem & <U::Mem as BitRegister>::MASK as usize;
let step = rem - excess;
let mid = mid_base.span_unchecked(step);
let right_base =
mid_base.address().add(mid_elts).cast::<T>().pipe(|addr| {
BitPtr::<M, T, O>::new_unchecked(addr, BitIdx::MIN)
});
let right = right_base.span_unchecked(excess);
(left, mid, right)
}
pub(crate) fn from_bitslice_ptr_mut(raw: *mut BitSlice<T, O>) -> Self {
let BitSpan { ptr, len, .. } =
BitSpan::from_bitslice_ptr(raw as *const BitSlice<T, O>);
Self {
ptr,
len,
..Self::EMPTY
}
}
pub(crate) fn into_bitslice_ptr(self) -> *const BitSlice<T, O> {
let Self { ptr, len, .. } = self;
ptr::slice_from_raw_parts(ptr.as_ptr(), len) as *const BitSlice<T, O>
}
pub(crate) unsafe fn into_bitslice_ref<'a>(self) -> &'a BitSlice<T, O> {
&*self.into_bitslice_ptr()
}
pub(crate) fn to_bitptr(self) -> BitPtr<M, T, O> {
unsafe { BitPtr::new_unchecked(self.address(), self.head()) }
}
pub(crate) fn to_bitptr_range(self) -> BitPtrRange<M, T, O> {
let start = self.to_bitptr();
let end = unsafe { start.add(self.len()) };
BitPtrRange { start, end }
}
pub(crate) fn to_bitslice_addr(self) -> Address<M, BitSlice<T, O>> {
(self.into_bitslice_ptr() as *mut BitSlice<T, O>)
.pipe(|ptr| unsafe { NonNull::new_unchecked(ptr) })
.pipe(Address::new)
}
pub(crate) fn to_bitslice<'a>(self) -> Reference<'a, M, BitSlice<T, O>>
where Address<M, BitSlice<T, O>>: Referential<'a> {
unsafe { self.to_bitslice_addr().to_ref() }
}
}
impl<T, O> BitSpan<Const, T, O>
where
T: BitStore,
O: BitOrder,
{
pub(crate) fn from_bitslice_ptr(raw: *const BitSlice<T, O>) -> Self {
let slice_nn = match NonNull::new(raw as *const [()] as *mut [()]) {
Some(nn) => nn,
None => return Self::EMPTY,
};
let ptr = slice_nn.cast::<()>();
let len = unsafe { slice_nn.as_ref() }.len();
Self {
ptr,
len,
..Self::EMPTY
}
}
}
impl<T, O> BitSpan<Mut, T, O>
where
T: BitStore,
O: BitOrder,
{
pub(crate) fn into_bitslice_ptr_mut(self) -> *mut BitSlice<T, O> {
self.into_bitslice_ptr() as *mut BitSlice<T, O>
}
pub(crate) unsafe fn into_bitslice_mut<'a>(self) -> &'a mut BitSlice<T, O> {
&mut *self.into_bitslice_ptr_mut()
}
}
impl<M, T, O> BitSpan<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[cfg(feature = "alloc")]
pub(crate) fn len_encodable(len: usize) -> bool {
len <= Self::REGION_MAX_BITS
}
pub(crate) fn render<'a>(
&'a self,
fmt: &'a mut Formatter,
name: &'a str,
fields: impl IntoIterator<Item = &'a (&'a str, &'a dyn Debug)>,
) -> fmt::Result {
write!(
fmt,
"Bit{}<{}, {}>",
name,
any::type_name::<T::Mem>(),
any::type_name::<O>(),
)?;
let mut builder = fmt.debug_struct("");
builder
.field("addr", &self.address().fmt_pointer())
.field("head", &self.head().fmt_binary())
.field("bits", &self.len());
for (name, value) in fields {
builder.field(name, value);
}
builder.finish()
}
}
#[cfg(not(tarpaulin_include))]
impl<M, T, O> Clone for BitSpan<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn clone(&self) -> Self {
*self
}
}
impl<M1, M2, O, T1, T2> PartialEq<BitSpan<M2, T2, O>> for BitSpan<M1, T1, O>
where
M1: Mutability,
M2: Mutability,
O: BitOrder,
T1: BitStore,
T2: BitStore,
{
#[inline]
fn eq(&self, other: &BitSpan<M2, T2, O>) -> bool {
let (addr_a, head_a, bits_a) = self.raw_parts();
let (addr_b, head_b, bits_b) = other.raw_parts();
bits_of::<T1::Mem>() == bits_of::<T2::Mem>()
&& addr_a.to_const() as usize == addr_b.to_const() as usize
&& head_a.into_inner() == head_b.into_inner()
&& bits_a == bits_b
}
}
impl<T, O> From<&BitSlice<T, O>> for BitSpan<Const, T, O>
where
T: BitStore,
O: BitOrder,
{
#[inline]
fn from(bits: &BitSlice<T, O>) -> Self {
Self::from_bitslice_ptr(bits)
}
}
impl<T, O> From<&mut BitSlice<T, O>> for BitSpan<Mut, T, O>
where
T: BitStore,
O: BitOrder,
{
#[inline]
fn from(bits: &mut BitSlice<T, O>) -> Self {
Self::from_bitslice_ptr_mut(bits)
}
}
#[cfg(not(tarpaulin_include))]
impl<M, T, O> Default for BitSpan<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn default() -> Self {
Self::EMPTY
}
}
impl<M, T, O> Debug for BitSpan<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
self.render(fmt, "Span", None)
}
}
impl<M, T, O> Pointer for BitSpan<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
Pointer::fmt(&self.address(), fmt)?;
fmt.write_str("(")?;
Binary::fmt(&self.head(), fmt)?;
fmt.write_str(")[")?;
Display::fmt(&self.len(), fmt)?;
fmt.write_str("]")
}
}
impl<M, T, O> Copy for BitSpan<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
}
#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum BitSpanError<T>
where T: BitStore
{
Null(NullPtrError),
Misaligned(MisalignError<T>),
TooLong(usize),
TooHigh(*const T),
}
#[cfg(not(tarpaulin_include))]
impl<T> From<BitPtrError<T>> for BitSpanError<T>
where T: BitStore
{
#[inline]
fn from(err: BitPtrError<T>) -> Self {
match err {
BitPtrError::Null(err) => Self::Null(err),
BitPtrError::Misaligned(err) => Self::Misaligned(err),
}
}
}
#[cfg(not(tarpaulin_include))]
impl<T> From<MisalignError<T>> for BitSpanError<T>
where T: BitStore
{
#[inline]
fn from(err: MisalignError<T>) -> Self {
Self::Misaligned(err)
}
}
#[cfg(not(tarpaulin_include))]
impl<T> Debug for BitSpanError<T>
where T: BitStore
{
#[inline]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
write!(fmt, "BitSpanError<{}>::", any::type_name::<T::Mem>())?;
match self {
Self::Null(err) => fmt.debug_tuple("Null").field(&err).finish(),
Self::Misaligned(err) => {
fmt.debug_tuple("Misaligned").field(&err).finish()
},
Self::TooLong(len) => fmt.debug_tuple("TooLong").field(len).finish(),
Self::TooHigh(addr) => {
fmt.debug_tuple("TooHigh").field(addr).finish()
},
}
}
}
#[cfg(not(tarpaulin_include))]
impl<T> Display for BitSpanError<T>
where T: BitStore
{
#[inline]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
match self {
Self::Null(err) => Display::fmt(err, fmt),
Self::Misaligned(err) => Display::fmt(err, fmt),
Self::TooLong(len) => write!(
fmt,
"Length {} is too long to encode in a bit-slice, which can \
only accept {} bits",
len,
BitSpan::<Const, T, Lsb0>::REGION_MAX_BITS,
),
Self::TooHigh(addr) => write!(
fmt,
"Address {:p} is too high, and produces a span that wraps \
around to the zero address.",
addr,
),
}
}
}
unsafe impl<T> Send for BitSpanError<T> where T: BitStore {}
unsafe impl<T> Sync for BitSpanError<T> where T: BitStore {}
#[cfg(feature = "std")]
impl<T> std::error::Error for BitSpanError<T> where T: BitStore {}