#![doc = include_str!("../../doc/ptr/range.md")]
use core::{
fmt::{
self,
Debug,
Formatter,
},
hash::{
Hash,
Hasher,
},
iter::FusedIterator,
ops::{
Bound,
Range,
RangeBounds,
},
};
use wyz::comu::{
Const,
Mutability,
};
use super::{
BitPtr,
BitSpan,
};
use crate::{
devel as dvl,
order::{
BitOrder,
Lsb0,
},
store::BitStore,
};
#[repr(C)]
#[doc = include_str!("../../doc/ptr/BitPtrRange.md")]
pub struct BitPtrRange<M = Const, T = usize, O = Lsb0>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
pub start: BitPtr<M, T, O>,
pub end: BitPtr<M, T, O>,
}
impl<M, T, O> BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
pub const EMPTY: Self = Self {
start: BitPtr::DANGLING,
end: BitPtr::DANGLING,
};
#[inline]
pub fn from_range(Range { start, end }: Range<BitPtr<M, T, O>>) -> Self {
Self { start, end }
}
#[inline]
pub fn into_range(self) -> Range<BitPtr<M, T, O>> {
let Self { start, end } = self;
start .. end
}
#[inline]
pub fn is_empty(&self) -> bool {
self.start == self.end
}
#[inline]
pub fn contains<M2, T2>(&self, pointer: &BitPtr<M2, T2, O>) -> bool
where
M2: Mutability,
T2: BitStore,
{
dvl::match_store::<T::Mem, T2::Mem>()
&& self.start <= *pointer
&& *pointer < self.end
}
pub(crate) unsafe fn into_bitspan(self) -> BitSpan<M, T, O> {
self.start.span_unchecked(self.len())
}
#[inline]
fn take_front(&mut self) -> BitPtr<M, T, O> {
let start = self.start;
self.start = start.wrapping_add(1);
start
}
#[inline]
fn take_back(&mut self) -> BitPtr<M, T, O> {
let prev = self.end.wrapping_sub(1);
self.end = prev;
prev
}
}
#[cfg(not(tarpaulin_include))]
impl<M, T, O> Clone for BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn clone(&self) -> Self {
Self { ..*self }
}
}
impl<M, T, O> Eq for BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
}
impl<M1, M2, O, T1, T2> PartialEq<BitPtrRange<M2, T2, O>>
for BitPtrRange<M1, T1, O>
where
M1: Mutability,
M2: Mutability,
O: BitOrder,
T1: BitStore,
T2: BitStore,
{
#[inline]
fn eq(&self, other: &BitPtrRange<M2, T2, O>) -> bool {
dvl::match_store::<T1::Mem, T2::Mem>()
&& self.start == other.start
&& self.end == other.end
}
}
#[cfg(not(tarpaulin_include))]
impl<M, T, O> Default for BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn default() -> Self {
Self::EMPTY
}
}
#[cfg(not(tarpaulin_include))]
impl<M, T, O> From<Range<BitPtr<M, T, O>>> for BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn from(range: Range<BitPtr<M, T, O>>) -> Self {
Self::from_range(range)
}
}
#[cfg(not(tarpaulin_include))]
impl<M, T, O> From<BitPtrRange<M, T, O>> for Range<BitPtr<M, T, O>>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn from(range: BitPtrRange<M, T, O>) -> Self {
range.into_range()
}
}
#[cfg(not(tarpaulin_include))]
impl<M, T, O> Debug for BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
let Range { start, end } = self.clone().into_range();
Debug::fmt(&start, fmt)?;
write!(fmt, "{0}..{0}", if fmt.alternate() { " " } else { "" })?;
Debug::fmt(&end, fmt)
}
}
#[cfg(not(tarpaulin_include))]
impl<M, T, O> Hash for BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn hash<H>(&self, state: &mut H)
where H: Hasher {
self.start.hash(state);
self.end.hash(state);
}
}
impl<M, T, O> Iterator for BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
type Item = BitPtr<M, T, O>;
easy_iter!();
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if Self::is_empty(&*self) {
return None;
}
Some(self.take_front())
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
if n >= self.len() {
self.start = self.end;
return None;
}
self.start = unsafe { self.start.add(n) };
Some(self.take_front())
}
}
impl<M, T, O> DoubleEndedIterator for BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
if Self::is_empty(&*self) {
return None;
}
Some(self.take_back())
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
if n >= self.len() {
self.end = self.start;
return None;
}
let out = unsafe { self.end.sub(n.wrapping_add(1)) };
self.end = out;
Some(out)
}
}
impl<M, T, O> ExactSizeIterator for BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn len(&self) -> usize {
(unsafe { self.end.offset_from(self.start) }) as usize
}
}
impl<M, T, O> FusedIterator for BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
}
#[cfg(not(tarpaulin_include))]
impl<M, T, O> RangeBounds<BitPtr<M, T, O>> for BitPtrRange<M, T, O>
where
M: Mutability,
T: BitStore,
O: BitOrder,
{
#[inline]
fn start_bound(&self) -> Bound<&BitPtr<M, T, O>> {
Bound::Included(&self.start)
}
#[inline]
fn end_bound(&self) -> Bound<&BitPtr<M, T, O>> {
Bound::Excluded(&self.end)
}
}