use crate::{
cursor::Cursor,
domain::*,
indices::{
BitIdx,
Indexable,
},
slice::BitSlice,
store::BitStore,
};
use core::{
fmt::{
self,
Debug,
Formatter,
},
marker::PhantomData,
mem::size_of,
ptr::NonNull,
slice,
};
#[cfg(any(test, feature = "alloc"))]
use crate::indices::BitTail;
const PTR_BITS: usize = size_of::<*const u8>() * 8;
#[derive(Clone, Copy)]
#[doc(hidden)]
pub(crate) union Pointer<T>
where T: BitStore {
a: *const <T as BitStore>::Access,
r: *const T,
w: *mut T,
u: usize,
}
impl<T> Pointer<T>
where T: BitStore {
#[inline]
pub(crate) fn a(self) -> *const <T as BitStore>::Access {
unsafe { self.a }
}
#[inline]
pub(crate) fn r(self) -> *const T {
unsafe { self.r }
}
#[inline]
pub(crate) fn w(self) -> *mut T {
unsafe { self.w }
}
#[inline]
pub(crate) fn u(self) -> usize {
unsafe { self.u }
}
}
impl<T> From<&T> for Pointer<T>
where T: BitStore {
fn from(r: &T) -> Self {
Self { r }
}
}
impl<T> From<*const T> for Pointer<T>
where T: BitStore {
fn from(r: *const T) -> Self {
Self { r }
}
}
impl<T> From<&mut T> for Pointer<T>
where T: BitStore {
fn from(w: &mut T) -> Self {
Self { w }
}
}
impl<T> From<*mut T> for Pointer<T>
where T: BitStore {
fn from(w: *mut T) -> Self {
Self { w }
}
}
impl<T> From<usize> for Pointer<T>
where T: BitStore {
fn from(u: usize) -> Self {
Self { u }
}
}
#[repr(C)]
#[derive(Clone, Copy, Eq, Hash, PartialEq, PartialOrd, Ord)]
pub struct BitPtr<T = u8>
where T: BitStore {
_ty: PhantomData<T>,
ptr: NonNull<u8>,
len: usize,
}
impl<T> BitPtr<T>
where T: BitStore {
pub const PTR_DATA_MASK: usize = !Self::PTR_HEAD_MASK;
pub const PTR_HEAD_BITS: usize = T::INDX as usize - Self::LEN_HEAD_BITS;
pub const PTR_HEAD_MASK: usize = T::MASK as usize >> Self::LEN_HEAD_BITS;
pub const LEN_HEAD_BITS: usize = 3;
pub const LEN_HEAD_MASK: usize = 0b0111;
pub const MAX_ELTS: usize = (Self::MAX_BITS >> 3) + 1;
pub const MAX_BITS: usize = !0 >> Self::LEN_HEAD_BITS;
pub fn empty() -> Self {
Self {
_ty: PhantomData,
ptr: NonNull::dangling(),
len: 0,
}
}
#[cfg(feature = "alloc")]
pub(crate) fn uninhabited(ptr: impl Into<Pointer<T>>) -> Self {
let ptr = ptr.into();
assert!(
(ptr.u()).trailing_zeros() as usize >= Self::PTR_HEAD_BITS,
"Pointer {:p} does not satisfy minimum alignment requirements {}",
ptr.r(),
Self::PTR_HEAD_BITS,
);
Self {
_ty: PhantomData,
ptr: NonNull::new(ptr.w() as *mut u8)
.unwrap_or_else(NonNull::dangling),
len: 0,
}
}
pub(crate) fn new(
data: impl Into<Pointer<T>>,
head: BitIdx<T>,
bits: usize,
) -> Self {
let data = data.into();
if data.r().is_null() {
return Self::empty();
}
assert!(
data.u().trailing_zeros() as usize >= Self::PTR_HEAD_BITS,
"BitPtr domain pointer ({:p}) to {} must be aligned to at least {}",
data.r(),
T::TYPENAME,
Self::PTR_HEAD_BITS,
);
assert!(
bits <= Self::MAX_BITS,
"BitPtr cannot address {} bits; the maximum is {}",
bits,
Self::MAX_BITS,
);
let elts = head.span(bits).0;
let tail = data.r().wrapping_add(elts);
assert!(
tail >= data.r(),
"BitPtr region cannot wrap the address space: {:p} + {:02X} = {:p}",
data.r(),
elts,
tail,
);
unsafe { Self::new_unchecked(data, head, bits) }
}
pub(crate) unsafe fn new_unchecked(
data: impl Into<Pointer<T>>,
head: BitIdx<T>,
bits: usize,
) -> Self {
let (data, head) = (data.into(), *head as usize);
let ptr_data = data.u() & Self::PTR_DATA_MASK;
let ptr_head = head >> Self::LEN_HEAD_BITS;
let len_head = head & Self::LEN_HEAD_MASK;
let len_bits = bits << Self::LEN_HEAD_BITS;
let ptr: Pointer<u8> = (ptr_data | ptr_head).into();
Self {
_ty: PhantomData,
ptr: NonNull::new_unchecked(ptr.w()),
len: len_bits | len_head,
}
}
#[inline]
pub(crate) fn pointer(&self) -> Pointer<T> {
(self.ptr.as_ptr() as usize & Self::PTR_DATA_MASK).into()
}
#[inline]
#[cfg(feature = "alloc")]
pub(crate) unsafe fn set_pointer(&mut self, ptr: impl Into<Pointer<T>>) {
let mut data = ptr.into();
if data.r().is_null() {
*self = Self::empty();
return;
}
data.u &= Self::PTR_DATA_MASK;
data.u |= self.ptr.as_ptr() as usize & Self::PTR_HEAD_MASK;
self.ptr = NonNull::new_unchecked(data.w() as *mut u8);
}
#[inline]
pub fn head(&self) -> BitIdx<T> {
let ptr = self.ptr.as_ptr() as usize;
let ptr_head = (ptr & Self::PTR_HEAD_MASK) << Self::LEN_HEAD_BITS;
let len_head = self.len & Self::LEN_HEAD_MASK;
((ptr_head | len_head) as u8).idx()
}
#[inline]
pub fn len(&self) -> usize {
self.len >> Self::LEN_HEAD_BITS
}
#[cfg(feature = "alloc")]
#[inline]
pub unsafe fn set_len(&mut self, len: usize) {
let n = (len << Self::LEN_HEAD_BITS) | (self.len & Self::LEN_HEAD_MASK);
self.len = n;
}
#[inline]
pub(crate) fn raw_parts(&self) -> (Pointer<T>, BitIdx<T>, usize) {
(self.pointer(), self.head(), self.len())
}
pub fn elements(&self) -> usize {
self.head().span(self.len()).0
}
#[cfg(any(test, feature = "alloc"))]
#[inline]
pub(crate) fn tail(&self) -> BitTail<T> {
let (head, len) = (self.head(), self.len());
if *head == 0 && len == 0 {
return 0u8.tail();
}
let tail = (*self.head() as usize + len) & T::MASK as usize;
((((tail == 0) as u8) << T::INDX) | tail as u8).tail()
}
#[cfg(any(test, feature = "alloc"))]
#[inline]
pub fn is_empty(&self) -> bool {
self.len & !Self::LEN_HEAD_MASK == 0
}
pub fn as_slice<'a>(&self) -> &'a [T] {
unsafe { slice::from_raw_parts(self.pointer().r, self.elements()) }
}
pub fn as_mut_slice<'a>(&self) -> &'a mut [T] {
unsafe { slice::from_raw_parts_mut(self.pointer().w, self.elements()) }
}
pub fn as_access_slice<'a>(&self) -> &'a [T::Access] {
unsafe { slice::from_raw_parts(self.pointer().a, self.elements()) }
}
pub(crate) fn domain<'a>(self) -> BitDomain<'a, T> {
self.into()
}
#[cfg(feature = "alloc")]
pub unsafe fn incr_head(&mut self) {
let (data, head, bits) = self.raw_parts();
if bits == 0 {
return;
}
let (head, wrap) = head.incr();
*self = Self::new_unchecked(
data.r().offset(wrap as isize),
head,
bits - 1,
);
}
#[cfg(feature = "alloc")]
pub unsafe fn decr_head(&mut self) {
let (data, head, bits) = self.raw_parts();
if bits == 0 {
return;
}
let (head, wrap) = head.decr();
*self = Self::new_unchecked(
data.r().offset(-(wrap as isize)),
head,
bits + 1,
);
}
#[cfg(feature = "alloc")]
pub unsafe fn incr_tail(&mut self) {
let len = self.len();
if len == Self::MAX_BITS {
return;
}
self.set_len(len + 1);
}
#[cfg(feature = "alloc")]
pub unsafe fn decr_tail(&mut self) {
let len = self.len();
if len == 0 {
return;
}
self.set_len(len - 1);
}
pub(crate) fn from_bitslice<C>(bs: &BitSlice<C, T>) -> Self
where C: Cursor {
let src = unsafe { &*(bs as *const BitSlice<C, T> as *const [()]) };
let ptr = Pointer::from(src.as_ptr() as *const u8);
let (ptr, len) = match (ptr.w(), src.len()) {
(_, 0) => (NonNull::dangling(), 0),
(p, _) if p.is_null() => unreachable!("Rust forbids null refs"),
(p, l) => (unsafe { NonNull::new_unchecked(p) }, l),
};
Self { ptr, len, _ty: PhantomData }
}
pub(crate) fn into_bitslice<'a, C>(self) -> &'a BitSlice<C, T>
where C: Cursor {
unsafe {
&*(slice::from_raw_parts(
Pointer::from(self.ptr.as_ptr()).r() as *const (),
self.len,
) as *const [()] as *const BitSlice<C, T>)
}
}
pub(crate) fn into_bitslice_mut<'a, C>(self) -> &'a mut BitSlice<C, T>
where C: Cursor {
unsafe {
&mut *(slice::from_raw_parts_mut(
Pointer::from(self.ptr.as_ptr()).w() as *mut (),
self.len,
) as *mut [()] as *mut BitSlice<C, T>)
}
}
}
impl<T> AsMut<[T]> for BitPtr<T>
where T: BitStore {
fn as_mut(&mut self) -> &mut [T] {
self.as_mut_slice()
}
}
impl<T> AsRef<[T]> for BitPtr<T>
where T: BitStore {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
impl<'a, C, T> From<&'a BitSlice<C, T>> for BitPtr<T>
where C: Cursor, T: 'a + BitStore {
fn from(src: &'a BitSlice<C, T>) -> Self {
Self::from_bitslice(src)
}
}
impl<'a, C, T> From<&'a mut BitSlice<C, T>> for BitPtr<T>
where C: Cursor, T: 'a + BitStore {
fn from(src: &'a mut BitSlice<C, T>) -> Self {
Self::from_bitslice(src)
}
}
impl<T> Default for BitPtr<T>
where T: BitStore {
fn default() -> Self {
Self::empty()
}
}
impl<T> Debug for BitPtr<T>
where T: BitStore {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
struct HexPtr<T: BitStore>(*const T);
impl<T: BitStore> Debug for HexPtr<T> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "0x{:0>1$X}", self.0 as usize, PTR_BITS >> 2)
}
}
struct BinAddr<T: BitStore>(BitIdx<T>);
impl<T: BitStore> Debug for BinAddr<T> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "0b{:0>1$b}", *self.0, T::INDX as usize)
}
}
write!(f, "BitPtr<{}>", T::TYPENAME)?;
f.debug_struct("")
.field("data", &HexPtr::<T>(self.pointer().r()))
.field("head", &BinAddr::<T>(self.head()))
.field("bits", &self.len())
.finish()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn associated_consts_u8() {
assert_eq!(BitPtr::<u8>::PTR_HEAD_BITS, 0);
assert_eq!(BitPtr::<u8>::PTR_DATA_MASK, !0);
assert_eq!(BitPtr::<u8>::PTR_HEAD_MASK, 0);
}
#[test]
fn associated_consts_u16() {
assert_eq!(BitPtr::<u16>::PTR_HEAD_BITS, 1);
assert_eq!(BitPtr::<u16>::PTR_DATA_MASK, !0 << 1);
assert_eq!(BitPtr::<u16>::PTR_HEAD_MASK, 1);
}
#[test]
fn associated_consts_u32() {
assert_eq!(BitPtr::<u32>::PTR_HEAD_BITS, 2);
assert_eq!(BitPtr::<u32>::PTR_DATA_MASK, !0 << 2);
assert_eq!(BitPtr::<u32>::PTR_HEAD_MASK, 3);
}
#[test]
fn associated_consts_u64() {
assert_eq!(BitPtr::<u64>::PTR_HEAD_BITS, 3);
assert_eq!(BitPtr::<u64>::PTR_DATA_MASK, !0 << 3);
assert_eq!(BitPtr::<u64>::PTR_HEAD_MASK, 7);
}
#[test]
fn ctors() {
let data: [u32; 4] = [0; 4];
let bp = BitPtr::<u32>::new(&data as *const u32, 0u8.idx(), 32 * 4);
assert_eq!(bp.pointer().r(), &data as *const u32);
assert_eq!(bp.elements(), 4);
assert_eq!(*bp.head(), 0);
assert_eq!(*bp.tail(), 32);
}
#[test]
fn empty() {
let data = [0u8; 4];
let bp = BitPtr::<u8>::new(&data as *const u8, 2u8.idx(), 0);
assert!(bp.is_empty());
assert_eq!(*bp.head(), 2);
assert_eq!(*bp.tail(), 2);
}
#[cfg(not(miri))]
#[test]
#[should_panic]
fn overfull() {
BitPtr::<u32>::new(8 as *const u32, 1u8.idx(), BitPtr::<u32>::MAX_BITS + 1);
}
}