use crate::{
access::BitAccess,
devel as dvl,
index::{
BitIdx,
BitTail,
},
mem::BitMemory,
order::BitOrder,
slice::BitSlice,
store::BitStore,
};
use core::{
any,
fmt::{
self,
Debug,
Formatter,
Pointer,
},
marker::PhantomData,
ptr::{
self,
NonNull,
},
slice,
};
use wyz::fmt::FmtForward;
#[doc(hidden)]
#[derive(Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Address<T>
where T: BitStore
{
addr: usize,
_ty: PhantomData<T>,
}
#[cfg(not(tarpaulin_include))]
impl<T> Address<T>
where T: BitStore
{
#[inline(always)]
pub(crate) fn new(addr: usize) -> Self {
Self {
addr,
_ty: PhantomData,
}
}
#[inline(always)]
pub(crate) fn to_access(self) -> *const T::Access {
self.addr as *const T::Access
}
#[inline(always)]
pub(crate) fn to_alias(self) -> *const T::Alias {
self.addr as *const T::Alias
}
#[inline(always)]
pub(crate) fn to_const(self) -> *const T {
self.addr as *const T
}
#[inline(always)]
#[allow(clippy::wrong_self_convention)]
pub(crate) fn to_mut(self) -> *mut T {
self.addr as *mut T
}
#[inline(always)]
pub(crate) fn value(self) -> usize {
self.addr
}
}
#[cfg(not(tarpaulin_include))]
impl<T> Clone for Address<T>
where T: BitStore
{
#[inline(always)]
fn clone(&self) -> Self {
Self { ..*self }
}
}
#[cfg(not(tarpaulin_include))]
impl<T> From<&T> for Address<T>
where T: BitStore
{
#[inline(always)]
fn from(addr: &T) -> Self {
(addr as *const T).into()
}
}
#[cfg(not(tarpaulin_include))]
impl<T> From<*const T> for Address<T>
where T: BitStore
{
#[inline(always)]
fn from(addr: *const T) -> Self {
Self::new((addr) as usize)
}
}
#[cfg(not(tarpaulin_include))]
impl<T> From<&mut T> for Address<T>
where T: BitStore
{
#[inline(always)]
fn from(addr: &mut T) -> Self {
(addr as *mut T).into()
}
}
#[cfg(not(tarpaulin_include))]
impl<T> From<*mut T> for Address<T>
where T: BitStore
{
#[inline(always)]
fn from(addr: *mut T) -> Self {
Self::new(addr as usize)
}
}
#[cfg(not(tarpaulin_include))]
impl<T> Debug for Address<T>
where T: BitStore
{
#[inline(always)]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
<Self as Pointer>::fmt(&self, fmt)
}
}
#[cfg(not(tarpaulin_include))]
impl<T> Pointer for Address<T>
where T: BitStore
{
#[inline(always)]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
Pointer::fmt(&self.to_const(), fmt)
}
}
impl<T> Copy for Address<T> where T: BitStore
{
}
#[repr(C)]
#[derive(Eq, Hash)]
pub struct BitPtr<T>
where T: BitStore
{
ptr: NonNull<u8>,
len: usize,
_ty: PhantomData<*mut T>,
}
impl<T> BitPtr<T>
where T: BitStore
{
pub(crate) const EMPTY: Self = Self {
ptr: unsafe {
NonNull::new_unchecked(NonNull::<T>::dangling().as_ptr() as *mut u8)
},
len: 0,
_ty: PhantomData,
};
pub(crate) const LEN_HEAD_BITS: usize = 3;
pub(crate) const LEN_HEAD_MASK: usize = 0b0111;
pub(crate) const PTR_ADDR_MASK: usize = !0 << Self::PTR_HEAD_BITS;
pub(crate) const PTR_HEAD_BITS: usize =
T::Mem::INDX as usize - Self::LEN_HEAD_BITS;
pub(crate) const PTR_HEAD_MASK: usize = !Self::PTR_ADDR_MASK;
pub(crate) const REGION_MAX_BITS: usize = !0 >> Self::LEN_HEAD_BITS;
pub(crate) const REGION_MAX_ELTS: usize =
crate::mem::elts::<T::Mem>(Self::REGION_MAX_BITS) + 1;
#[cfg(feature = "alloc")]
pub(crate) fn uninhabited(addr: impl Into<Address<T>>) -> Self {
let addr = addr.into();
assert!(
addr.value().trailing_zeros() as usize >= Self::PTR_HEAD_BITS,
"Pointer {:p} does not satisfy minimum alignment requirements {}",
addr.to_const(),
Self::PTR_HEAD_BITS
);
Self {
ptr: match NonNull::new(addr.to_mut() as *mut u8) {
Some(nn) => nn,
None => return Self::EMPTY,
},
len: 0,
_ty: PhantomData,
}
}
pub(crate) fn new(
addr: impl Into<Address<T>>,
head: BitIdx<T::Mem>,
bits: usize,
) -> Option<Self> {
let addr = addr.into();
if addr.to_const().is_null()
|| (addr.value().trailing_zeros() as usize) < Self::PTR_HEAD_BITS
|| bits > Self::REGION_MAX_BITS
{
return None;
}
let elts = head.span(bits).0;
let last = addr.to_const().wrapping_add(elts);
if last < addr.to_const() {
return None;
}
Some(unsafe { Self::new_unchecked(addr, head, bits) })
}
#[inline]
pub(crate) unsafe fn new_unchecked(
addr: impl Into<Address<T>>,
head: BitIdx<T::Mem>,
bits: usize,
) -> Self {
let (addr, head) = (addr.into(), head.value() as usize);
let ptr_data = addr.value() & Self::PTR_ADDR_MASK;
let ptr_head = head >> Self::LEN_HEAD_BITS;
let len_head = head & Self::LEN_HEAD_MASK;
let len_bits = bits << Self::LEN_HEAD_BITS;
let ptr = Address::new(ptr_data | ptr_head);
Self {
ptr: NonNull::new_unchecked(ptr.to_mut()),
len: len_bits | len_head,
_ty: PhantomData,
}
}
#[inline]
pub(crate) fn pointer(&self) -> Address<T> {
Address::new(self.ptr.as_ptr() as usize & Self::PTR_ADDR_MASK)
}
#[inline]
#[cfg(feature = "alloc")]
pub(crate) unsafe fn set_pointer(&mut self, addr: impl Into<Address<T>>) {
let mut addr = addr.into();
if addr.to_const().is_null() {
*self = Self::EMPTY;
return;
}
addr.addr &= Self::PTR_ADDR_MASK;
addr.addr |= self.ptr.as_ptr() as usize & Self::PTR_HEAD_MASK;
self.ptr = NonNull::new_unchecked(addr.to_mut() as *mut u8);
}
pub(crate) fn head(&self) -> BitIdx<T::Mem> {
let ptr = self.ptr.as_ptr() as usize;
let ptr_head = (ptr & Self::PTR_HEAD_MASK) << Self::LEN_HEAD_BITS;
let len_head = self.len & Self::LEN_HEAD_MASK;
unsafe { BitIdx::new_unchecked((ptr_head | len_head) as u8) }
}
#[cfg(feature = "alloc")]
pub(crate) unsafe fn set_head(&mut self, head: BitIdx<T::Mem>) {
let head = head.value() as usize;
let mut ptr = self.ptr.as_ptr() as usize;
ptr &= Self::PTR_ADDR_MASK;
ptr |= head >> Self::LEN_HEAD_BITS;
self.ptr = NonNull::new_unchecked(ptr as *mut u8);
self.len &= !Self::LEN_HEAD_MASK;
self.len |= head & Self::LEN_HEAD_MASK;
}
#[inline]
pub(crate) fn len(&self) -> usize {
self.len >> Self::LEN_HEAD_BITS
}
#[inline]
pub(crate) unsafe fn set_len(&mut self, new_len: usize) {
debug_assert!(
new_len <= Self::REGION_MAX_BITS,
"Length {} out of range",
new_len,
);
self.len &= Self::LEN_HEAD_MASK;
self.len |= new_len << Self::LEN_HEAD_BITS;
}
#[inline]
pub(crate) fn raw_parts(&self) -> (Address<T>, BitIdx<T::Mem>, usize) {
(self.pointer(), self.head(), self.len())
}
pub(crate) fn elements(&self) -> usize {
let total = self.len() + self.head().value() as usize;
let base = total >> T::Mem::INDX;
let tail = total as u8 & T::Mem::MASK;
base + (tail != 0) as usize
}
#[inline]
pub(crate) fn tail(&self) -> BitTail<T::Mem> {
let (head, len) = (self.head(), self.len());
if head.value() == 0 && len == 0 {
return BitTail::ZERO;
}
let tail = (head.value() as usize + len) & T::Mem::MASK as usize;
unsafe {
BitTail::new_unchecked(
(((tail == 0) as u8) << T::Mem::INDX) | tail as u8,
)
}
}
#[inline]
pub(crate) unsafe fn incr_head(&mut self) {
let head = self.head().value() as usize + 1;
self.len &= !Self::LEN_HEAD_MASK;
self.len |= head & Self::LEN_HEAD_MASK;
let head = head >> Self::LEN_HEAD_BITS;
let mut ptr = self.ptr.as_ptr() as usize;
ptr &= Self::PTR_ADDR_MASK;
ptr += head;
self.ptr = NonNull::new_unchecked(ptr as *mut u8);
}
#[inline]
pub(crate) fn as_aliased_slice<'a>(&self) -> &'a [T::Alias] {
unsafe {
slice::from_raw_parts(self.pointer().to_alias(), self.elements())
}
}
#[inline]
pub(crate) unsafe fn read<O>(&self, index: usize) -> bool
where O: BitOrder {
let (elt, bit) = self.head().offset(index as isize);
let base = self.pointer().to_const();
(&*base.offset(elt)).get_bit::<O>(bit)
}
#[inline]
pub(crate) unsafe fn write<O>(&self, index: usize, value: bool)
where O: BitOrder {
let (elt, bit) = self.head().offset(index as isize);
let base = self.pointer().to_access();
(&*base.offset(elt)).write_bit::<O>(bit, value);
}
pub(crate) unsafe fn ptr_diff(self, other: Self) -> (isize, i8) {
let self_ptr = self.pointer();
let other_ptr = other.pointer();
let elts = other_ptr
.value()
.wrapping_sub(self_ptr.value())
.wrapping_div(core::mem::size_of::<T>()) as isize;
let bits = other.head().value() as i8 - self.head().value() as i8;
(elts, bits)
}
#[inline]
pub(crate) fn from_bitslice_ptr<O>(raw: *const BitSlice<O, T>) -> Self
where O: BitOrder {
let slice_nn = match NonNull::new(raw as *const [()] as *mut [()]) {
Some(r) => r,
None => return Self::EMPTY,
};
let ptr = dvl::nonnull_slice_to_base(slice_nn).cast::<u8>();
let len = unsafe { slice_nn.as_ref() }.len();
Self {
ptr,
len,
_ty: PhantomData,
}
}
#[inline(always)]
#[cfg(feature = "alloc")]
pub(crate) fn from_bitslice_ptr_mut<O>(raw: *mut BitSlice<O, T>) -> Self
where O: BitOrder {
Self::from_bitslice_ptr(raw as *const BitSlice<O, T>)
}
#[inline]
pub(crate) fn to_bitslice_ptr<O>(self) -> *const BitSlice<O, T>
where O: BitOrder {
ptr::slice_from_raw_parts(
self.ptr.as_ptr() as *const u8 as *const (),
self.len,
) as *const BitSlice<O, T>
}
#[inline(always)]
pub(crate) fn to_bitslice_ptr_mut<O>(self) -> *mut BitSlice<O, T>
where O: BitOrder {
self.to_bitslice_ptr::<O>() as *mut BitSlice<O, T>
}
#[inline(always)]
pub(crate) fn to_bitslice_ref<'a, O>(self) -> &'a BitSlice<O, T>
where O: BitOrder {
unsafe { &*self.to_bitslice_ptr::<O>() }
}
#[inline(always)]
pub(crate) fn to_bitslice_mut<'a, O>(self) -> &'a mut BitSlice<O, T>
where O: BitOrder {
unsafe { &mut *self.to_bitslice_ptr_mut::<O>() }
}
#[inline]
#[cfg(feature = "alloc")]
pub(crate) fn to_nonnull<O>(self) -> NonNull<BitSlice<O, T>>
where
O: BitOrder,
T: BitStore,
{
unsafe { NonNull::new_unchecked(self.to_bitslice_ptr_mut()) }
}
#[inline]
pub(crate) fn render<'a>(
&'a self,
fmt: &'a mut Formatter,
name: &'a str,
ord: Option<&'a str>,
fields: impl IntoIterator<Item = &'a (&'a str, &'a dyn Debug)>,
) -> fmt::Result {
write!(fmt, "Bit{}<", name)?;
if let Some(ord) = ord {
write!(fmt, "{}, ", ord)?;
}
write!(fmt, "{}>", any::type_name::<T::Mem>())?;
let mut builder = fmt.debug_struct("");
builder
.field("addr", &self.pointer().fmt_pointer())
.field("head", &self.head().fmt_binary())
.field("bits", &self.len());
for (name, value) in fields {
builder.field(name, value);
}
builder.finish()
}
}
#[cfg(not(tarpaulin_include))]
impl<T> Clone for BitPtr<T>
where T: BitStore
{
fn clone(&self) -> Self {
Self { ..*self }
}
}
impl<T, U> PartialEq<BitPtr<U>> for BitPtr<T>
where
T: BitStore,
U: BitStore,
{
fn eq(&self, other: &BitPtr<U>) -> bool {
let (addr_a, head_a, bits_a) = self.raw_parts();
let (addr_b, head_b, bits_b) = other.raw_parts();
T::Mem::BITS == U::Mem::BITS
&& addr_a.value() == addr_b.value()
&& head_a.value() == head_b.value()
&& bits_a == bits_b
}
}
#[cfg(not(tarpaulin_include))]
impl<T> Default for BitPtr<T>
where T: BitStore
{
#[inline(always)]
fn default() -> Self {
Self::EMPTY
}
}
#[cfg(not(tarpaulin_include))]
impl<T> Debug for BitPtr<T>
where T: BitStore
{
#[inline(always)]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
Pointer::fmt(self, fmt)
}
}
#[cfg(not(tarpaulin_include))]
impl<T> Pointer for BitPtr<T>
where T: BitStore
{
#[inline(always)]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
self.render(fmt, "Ptr", None, None)
}
}
impl<T> Copy for BitPtr<T> where T: BitStore
{
}
#[cfg(test)]
mod tests {
use crate::{
bits,
order::Msb0,
};
#[test]
#[cfg(feature = "alloc")]
fn render() {
let bits = bits![Msb0, u8; 0, 1, 0, 0];
let render = format!("{:?}", bits.bitptr());
assert!(render.starts_with("BitPtr<u8> { addr: 0x"));
assert!(render.ends_with(", head: 000, bits: 4 }"));
let render = format!("{:#?}", bits);
assert!(render.starts_with("BitSlice<bitvec::order::Msb0, u8> {"));
assert!(render.ends_with("} [\n 0b0100,\n]"), "{}", render);
}
#[test]
fn ptr_diff() {
let bits = bits![Msb0, u8; 0; 16];
let a = bits[2 .. 3].bitptr();
let b = bits[12 .. 13].bitptr();
assert_eq!(unsafe { a.ptr_diff(b) }, (1, 2));
let a = bits[5 .. 6].bitptr();
let b = bits[10 .. 11].bitptr();
assert_eq!(unsafe { a.ptr_diff(b) }, (1, -3));
let a = bits[8 .. 9].bitptr();
let b = bits[4 .. 5].bitptr();
assert_eq!(unsafe { a.ptr_diff(b) }, (-1, 4));
let a = bits[14 .. 15].bitptr();
let b = bits[1 .. 2].bitptr();
assert_eq!(unsafe { a.ptr_diff(b) }, (-1, -5));
}
}