#[cfg(any(feature = "alloc", test))]
use core::convert::TryInto;
use core::{
any,
convert::Infallible,
fmt::{
self,
Debug,
Display,
Formatter,
Pointer,
},
marker::PhantomData,
ptr::{
self,
NonNull,
},
};
use funty::IsNumber;
use wyz::fmt::FmtForward;
use super::{
Address,
BitPtr,
BitPtrError,
BitPtrRange,
Const,
Mut,
Mutability,
};
use crate::{
domain::Domain,
index::{
BitEnd,
BitIdx,
},
mem::BitMemory,
order::{
BitOrder,
Lsb0,
},
slice::BitSlice,
store::BitStore,
};
#[repr(C)]
pub(crate) struct BitSpan<M = Const, O = Lsb0, T = usize>
where
M: Mutability,
O: BitOrder,
T: BitStore,
{
ptr: NonNull<()>,
len: usize,
_or: PhantomData<O>,
_ty: PhantomData<Address<M, T>>,
}
impl<M, O, T> BitSpan<M, O, T>
where
M: Mutability,
O: BitOrder,
T: BitStore,
{
pub(crate) const EMPTY: Self = Self {
ptr: NonNull::<T>::dangling().cast::<()>(),
len: 0,
_or: PhantomData,
_ty: PhantomData,
};
pub(crate) const LEN_HEAD_BITS: usize = 3;
pub(crate) const LEN_HEAD_MASK: usize = 0b111;
pub(crate) const PTR_ADDR_MASK: usize = !0 << Self::PTR_HEAD_BITS;
pub(crate) const PTR_HEAD_BITS: usize =
T::Mem::INDX as usize - Self::LEN_HEAD_BITS;
pub(crate) const PTR_HEAD_MASK: usize = !Self::PTR_ADDR_MASK;
pub(crate) const REGION_MAX_BITS: usize = !0 >> Self::LEN_HEAD_BITS;
pub(crate) const REGION_MAX_ELTS: usize =
crate::mem::elts::<T::Mem>(Self::REGION_MAX_BITS) + 1;
#[inline(always)]
#[cfg(feature = "alloc")]
#[cfg(not(tarpaulin_include))]
pub(crate) fn uninhabited(addr: Address<M, T>) -> Self {
Self {
ptr: addr.into_inner().cast::<()>(),
len: 0,
_or: PhantomData,
_ty: PhantomData,
}
}
#[inline]
pub(crate) fn new(
addr: Address<M, T>,
head: BitIdx<T::Mem>,
bits: usize,
) -> Result<Self, BitSpanError<T>> {
if bits > Self::REGION_MAX_BITS {
return Err(BitSpanError::TooLong(bits));
};
let base = BitPtr::<M, O, T>::new(addr, head);
let last = base.wrapping_add(bits);
if last < base {
return Err(BitSpanError::TooHigh(addr.to_const()));
};
Ok(unsafe { Self::new_unchecked(addr, head, bits) })
}
#[inline]
pub(crate) unsafe fn new_unchecked(
addr: Address<M, T>,
head: BitIdx<T::Mem>,
bits: usize,
) -> Self {
let head = head.into_inner() as usize;
let ptr_data = addr.to_const() as usize & Self::PTR_ADDR_MASK;
let ptr_head = head >> Self::LEN_HEAD_BITS;
let len_head = head & Self::LEN_HEAD_MASK;
let len_bits = bits << Self::LEN_HEAD_BITS;
Self {
ptr: NonNull::new_unchecked((ptr_data | ptr_head) as *mut ()),
len: len_bits | len_head,
_or: PhantomData,
_ty: PhantomData,
}
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub(crate) fn from_bitslice_ptr_mut(raw: *mut BitSlice<O, T>) -> Self {
let BitSpan { ptr, len, _or, .. } =
BitSpan::from_bitslice_ptr(raw as *const BitSlice<O, T>);
Self {
ptr,
len,
_or,
_ty: PhantomData,
}
}
#[inline]
pub(crate) fn to_bitslice_ptr(self) -> *const BitSlice<O, T> {
ptr::slice_from_raw_parts(
self.ptr.as_ptr() as *const u8 as *const (),
self.len,
) as *const BitSlice<O, T>
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub(crate) fn to_bitslice_ref<'a>(self) -> &'a BitSlice<O, T> {
unsafe { &*self.to_bitslice_ptr() }
}
#[cfg_attr(not(tarpaulin_include), inline(always))]
pub(crate) fn cast<U>(self) -> BitSpan<M, O, U>
where U: BitStore {
let Self { ptr, len, .. } = self;
BitSpan {
ptr,
len,
..BitSpan::EMPTY
}
}
pub(crate) unsafe fn align_to<U>(self) -> (Self, BitSpan<M, O, U>, Self)
where U: BitStore {
match self.to_bitslice_ref().domain() {
Domain::Enclave { .. } => (self, BitSpan::EMPTY, BitSpan::EMPTY),
Domain::Region { head, body, tail } => {
let (l, c, r) = body.align_to::<U::Mem>();
let t_bits = T::Mem::BITS as usize;
let u_bits = U::Mem::BITS as usize;
let l_bits = l.len() * t_bits;
let c_bits = c.len() * u_bits;
let r_bits = r.len() * t_bits;
let l_addr = l.as_ptr() as *const T as *mut T;
let c_addr = c.as_ptr() as *const U as *mut U;
let r_addr = r.as_ptr() as *const T as *mut T;
let l_ptr = match head {
Some((head, addr)) => BitSpan::new_unchecked(
Address::new(NonNull::from(addr)),
head,
t_bits - head.into_inner() as usize + l_bits,
),
None => {
if l_bits == 0 {
BitSpan::EMPTY
}
else {
BitSpan::new_unchecked(
Address::new(NonNull::new(l_addr).unwrap()),
BitIdx::ZERO,
l_bits,
)
}
},
};
let c_ptr = if c_bits == 0 {
BitSpan::EMPTY
}
else {
BitSpan::new_unchecked(
Address::new(NonNull::new(c_addr).unwrap()),
BitIdx::ZERO,
c_bits,
)
};
let r_ptr = match tail {
Some((addr, tail)) => BitSpan::new_unchecked(
if r.is_empty() {
Address::new(NonNull::from(addr))
}
else {
Address::new(NonNull::new(r_addr).unwrap())
},
BitIdx::ZERO,
tail.into_inner() as usize + r_bits,
),
None => {
if !r.is_empty() {
BitSpan::new_unchecked(
Address::new(NonNull::new(r_addr).unwrap()),
BitIdx::ZERO,
r_bits,
)
}
else {
BitSpan::EMPTY
}
},
};
(l_ptr, c_ptr, r_ptr)
},
}
}
#[inline]
pub(crate) fn address(&self) -> Address<M, T> {
unsafe {
Address::new(NonNull::new_unchecked(
(self.ptr.as_ptr() as usize & Self::PTR_ADDR_MASK) as *mut T,
))
}
}
#[inline]
#[cfg(any(feature = "alloc", test))]
pub(crate) unsafe fn set_address<A>(&mut self, addr: A)
where
A: TryInto<Address<M, T>>,
A::Error: Debug,
{
let addr = addr.try_into().unwrap();
let mut addr_value = addr.to_const() as usize;
addr_value &= Self::PTR_ADDR_MASK;
addr_value |= self.ptr.as_ptr() as usize & Self::PTR_HEAD_MASK;
self.ptr = NonNull::new_unchecked(addr_value as *mut ());
}
#[inline]
pub(crate) fn head(&self) -> BitIdx<T::Mem> {
let ptr = self.ptr.as_ptr() as usize;
let ptr_head = (ptr & Self::PTR_HEAD_MASK) << Self::LEN_HEAD_BITS;
let len_head = self.len & Self::LEN_HEAD_MASK;
unsafe { BitIdx::new_unchecked((ptr_head | len_head) as u8) }
}
#[inline]
#[cfg(any(feature = "alloc", test))]
pub(crate) unsafe fn set_head(&mut self, head: BitIdx<T::Mem>) {
let head = head.into_inner() as usize;
let mut ptr = self.ptr.as_ptr() as usize;
ptr &= Self::PTR_ADDR_MASK;
ptr |= head >> Self::LEN_HEAD_BITS;
self.ptr = NonNull::new_unchecked(ptr as *mut ());
self.len &= !Self::LEN_HEAD_MASK;
self.len |= head & Self::LEN_HEAD_MASK;
}
#[cfg_attr(not(tarpaulin_include), inline(always))]
pub(crate) fn len(&self) -> usize {
self.len >> Self::LEN_HEAD_BITS
}
#[cfg_attr(not(tarpaulin_include), inline(always))]
pub(crate) unsafe fn set_len(&mut self, new_len: usize) {
debug_assert!(
new_len <= Self::REGION_MAX_BITS,
"Length {} out of range",
new_len,
);
self.len &= Self::LEN_HEAD_MASK;
self.len |= new_len << Self::LEN_HEAD_BITS;
}
#[inline]
pub(crate) fn as_bitptr(self) -> BitPtr<M, O, T> {
BitPtr::new(self.address(), self.head())
}
#[inline]
pub(crate) fn as_bitptr_range(self) -> BitPtrRange<M, O, T> {
unsafe { self.as_bitptr().range(self.len()) }
}
#[inline]
pub(crate) fn raw_parts(&self) -> (Address<M, T>, BitIdx<T::Mem>, usize) {
(self.address(), self.head(), self.len())
}
#[inline]
pub(crate) fn elements(&self) -> usize {
let total = self.len() + self.head().into_inner() as usize;
let base = total >> T::Mem::INDX;
let tail = total as u8 & T::Mem::MASK;
base + (tail != 0) as usize
}
#[inline]
pub(crate) fn tail(&self) -> BitEnd<T::Mem> {
let (head, len) = (self.head(), self.len());
if head.into_inner() == 0 && len == 0 {
return BitEnd::ZERO;
}
let tail = (head.into_inner() as usize + len) & T::Mem::MASK as usize;
unsafe {
BitEnd::new_unchecked(
(((tail == 0) as u8) << T::Mem::INDX) | tail as u8,
)
}
}
#[inline]
pub(crate) unsafe fn incr_head(&mut self) {
let head = self.head().into_inner() as usize + 1;
self.len &= !Self::LEN_HEAD_MASK;
self.len |= head & Self::LEN_HEAD_MASK;
let head = head >> Self::LEN_HEAD_BITS;
let mut ptr = self.ptr.as_ptr() as usize;
ptr &= Self::PTR_ADDR_MASK;
ptr += head;
self.ptr = NonNull::new_unchecked(ptr as *mut ());
}
pub(crate) fn render<'a>(
&'a self,
fmt: &'a mut Formatter,
name: &'a str,
fields: impl IntoIterator<Item = &'a (&'a str, &'a dyn Debug)>,
) -> fmt::Result {
write!(
fmt,
"Bit{}<{}, {}>",
name,
any::type_name::<O>(),
any::type_name::<T::Mem>()
)?;
let mut builder = fmt.debug_struct("");
builder
.field("addr", &self.address().fmt_pointer())
.field("head", &self.head().fmt_binary())
.field("bits", &self.len());
for (name, value) in fields {
builder.field(name, value);
}
builder.finish()
}
}
impl<O, T> BitSpan<Const, O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline]
pub(crate) fn from_bitslice_ptr(raw: *const BitSlice<O, T>) -> Self {
let slice_nn = match NonNull::new(raw as *const [()] as *mut [()]) {
Some(nn) => nn,
None => return Self::EMPTY,
};
let ptr = slice_nn.cast::<()>();
let len = unsafe { slice_nn.as_ref() }.len();
Self {
ptr,
len,
_or: PhantomData,
_ty: PhantomData,
}
}
}
impl<O, T> BitSpan<Mut, O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub(crate) fn to_bitslice_ptr_mut(self) -> *mut BitSlice<O, T> {
self.to_bitslice_ptr() as *mut BitSlice<O, T>
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub(crate) fn to_bitslice_mut<'a>(self) -> &'a mut BitSlice<O, T> {
unsafe { &mut *self.to_bitslice_ptr_mut() }
}
}
#[cfg(not(tarpaulin_include))]
impl<M, O, T> Clone for BitSpan<M, O, T>
where
M: Mutability,
O: BitOrder,
T: BitStore,
{
#[inline(always)]
fn clone(&self) -> Self {
*self
}
}
impl<M, O, T> Eq for BitSpan<M, O, T>
where
M: Mutability,
O: BitOrder,
T: BitStore,
{
}
impl<M1, M2, O, T1, T2> PartialEq<BitSpan<M2, O, T2>> for BitSpan<M1, O, T1>
where
M1: Mutability,
M2: Mutability,
O: BitOrder,
T1: BitStore,
T2: BitStore,
{
#[inline]
fn eq(&self, other: &BitSpan<M2, O, T2>) -> bool {
let (addr_a, head_a, bits_a) = self.raw_parts();
let (addr_b, head_b, bits_b) = other.raw_parts();
T1::Mem::BITS == T2::Mem::BITS
&& addr_a.to_const() as usize == addr_b.to_const() as usize
&& head_a.into_inner() == head_b.into_inner()
&& bits_a == bits_b
}
}
#[cfg(not(tarpaulin_include))]
impl<M, O, T> Default for BitSpan<M, O, T>
where
M: Mutability,
O: BitOrder,
T: BitStore,
{
#[inline(always)]
fn default() -> Self {
Self::EMPTY
}
}
#[cfg(not(tarpaulin_include))]
impl<M, O, T> Debug for BitSpan<M, O, T>
where
M: Mutability,
O: BitOrder,
T: BitStore,
{
#[inline(always)]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
Pointer::fmt(self, fmt)
}
}
#[cfg(not(tarpaulin_include))]
impl<M, O, T> Pointer for BitSpan<M, O, T>
where
M: Mutability,
O: BitOrder,
T: BitStore,
{
#[inline(always)]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
self.render(fmt, "Ptr", None)
}
}
impl<M, O, T> Copy for BitSpan<M, O, T>
where
M: Mutability,
O: BitOrder,
T: BitStore,
{
}
#[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)]
pub enum BitSpanError<T>
where T: BitStore
{
InvalidBitptr(BitPtrError<T>),
TooLong(usize),
TooHigh(*const T),
}
#[cfg(not(tarpaulin_include))]
impl<T> From<BitPtrError<T>> for BitSpanError<T>
where T: BitStore
{
#[inline(always)]
fn from(err: BitPtrError<T>) -> Self {
Self::InvalidBitptr(err)
}
}
#[cfg(not(tarpaulin_include))]
impl<T> From<Infallible> for BitSpanError<T>
where T: BitStore
{
#[inline(always)]
fn from(_: Infallible) -> Self {
unreachable!("Infallible errors can never be produced");
}
}
#[cfg(not(tarpaulin_include))]
impl<T> Debug for BitSpanError<T>
where T: BitStore
{
#[inline]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
let tname = any::type_name::<T>();
write!(fmt, "BitSpanError<{}>::", tname,)?;
match self {
Self::InvalidBitptr(err) => {
fmt.debug_tuple("InvalidBitptr").field(&err).finish()
},
Self::TooLong(len) => {
fmt.debug_tuple("TooLong").field(&len).finish()
},
Self::TooHigh(addr) => {
fmt.debug_tuple("TooHigh").field(&addr).finish()
},
}
}
}
#[cfg(not(tarpaulin_include))]
impl<T> Display for BitSpanError<T>
where T: BitStore
{
#[inline]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
match self {
Self::InvalidBitptr(err) => Display::fmt(err, fmt),
Self::TooLong(len) => write!(
fmt,
"Length {} is too long to encode in a bit slice, which can \
only accept {} bits",
len,
BitSpan::<Const, Lsb0, usize>::REGION_MAX_BITS
),
Self::TooHigh(addr) => {
write!(fmt, "Address {:p} would wrap the address space", addr)
},
}
}
}
unsafe impl<T> Send for BitSpanError<T> where T: BitStore
{
}
unsafe impl<T> Sync for BitSpanError<T> where T: BitStore
{
}
#[cfg(feature = "std")]
impl<T> std::error::Error for BitSpanError<T> where T: BitStore
{
}
#[cfg(test)]
mod tests {
use core::{
convert::TryFrom,
mem,
ptr,
};
use tap::Pipe;
use super::*;
use crate::{
prelude::*,
ptr::{
check_alignment,
MisalignError,
NullPtrError,
},
};
#[test]
fn ctor() {
assert!(matches!(
Address::<Const, u8>::try_from(ptr::null()),
Err(NullPtrError),
));
assert!(matches!(
Address::<Const, u16>::try_from(3 as *const u16).unwrap().pipe(check_alignment),
Err(MisalignError { ptr }) if ptr as usize == 3,
));
assert_eq!(
BitSpan::<Const, LocalBits, u8>::from_bitslice_ptr(
ptr::slice_from_raw_parts(ptr::null::<()>(), 1)
as *mut BitSlice<LocalBits, u8>
),
BitSpan::<Const, LocalBits, u8>::EMPTY,
);
let data = 0u16;
let mut addr = Address::from(&data);
let head = BitIdx::new(5).unwrap();
assert!(BitSpan::<_, Lsb0, _>::new(addr, head, !3).is_err());
addr = Address::try_from(!1 as *const u16).unwrap();
assert!(BitSpan::<_, Lsb0, _>::new(addr, head, 50).is_err());
}
#[test]
fn recast() {
let data = 0u32;
let bitspan = unsafe { BitPtr::from_ref(&data).span_unchecked(32) };
let raw_ptr = bitspan.to_bitslice_ptr();
assert_eq!(
bitspan,
BitSpan::<Const, Lsb0, u32>::from_bitslice_ptr(raw_ptr)
);
}
#[test]
fn realign() {
let data = [0u8; 10];
let bits = data.view_bits::<LocalBits>();
let (l, c, r) = unsafe { bits.as_bitspan().align_to::<u16>() };
assert_eq!(l.len() + c.len() + r.len(), 80);
let (l, c, r) = unsafe { bits[4 ..].as_bitspan().align_to::<u16>() };
assert_eq!(l.len() + c.len() + r.len(), 76);
let (l, c, r) = unsafe { bits[.. 76].as_bitspan().align_to::<u16>() };
assert_eq!(l.len() + c.len() + r.len(), 76);
let (l, c, r) = unsafe { bits[8 ..].as_bitspan().align_to::<u16>() };
assert_eq!(l.len() + c.len() + r.len(), 72);
let (l, c, r) = unsafe { bits[.. 72].as_bitspan().align_to::<u16>() };
assert_eq!(l.len() + c.len() + r.len(), 72);
let (l, c, r) = unsafe { bits[4 .. 76].as_bitspan().align_to::<u16>() };
assert_eq!(l.len() + c.len() + r.len(), 72);
}
#[test]
fn modify() {
let (a, b) = (0u16, 1u16);
let mut bitspan = a.view_bits::<LocalBits>().as_bitspan();
let mut expected = (&a as *const _ as usize, 16usize << 3);
assert_eq!(bitspan.address().to_const(), &a as *const _);
assert_eq!(bitspan.ptr.as_ptr() as usize, expected.0);
assert_eq!(bitspan.len, expected.1);
expected.0 = &b as *const _ as usize;
unsafe {
bitspan.set_address(&b as *const _);
}
assert_eq!(bitspan.address().to_const(), &b as *const _);
assert_eq!(bitspan.ptr.as_ptr() as usize, expected.0);
assert_eq!(bitspan.len, expected.1);
let orig_head = bitspan.head();
unsafe {
bitspan.set_head(orig_head.next().0);
}
assert_eq!(bitspan.head(), orig_head.next().0);
}
#[test]
fn mem_size() {
assert_eq!(
mem::size_of::<BitSpan<Const, LocalBits, usize>>(),
mem::size_of::<*const [usize]>()
);
assert_eq!(
mem::size_of::<Option<BitSpan<Const, LocalBits, usize>>>(),
mem::size_of::<*const [usize]>()
);
}
}