use core::{
cmp,
ops::{
Range,
RangeBounds,
RangeFrom,
RangeFull,
RangeInclusive,
RangeTo,
RangeToInclusive,
},
};
use funty::IsNumber;
use super::{
iter::{
Chunks,
ChunksExact,
ChunksExactMut,
ChunksMut,
Iter,
IterMut,
RChunks,
RChunksExact,
RChunksExactMut,
RChunksMut,
RSplit,
RSplitMut,
RSplitN,
RSplitNMut,
Split,
SplitMut,
SplitN,
SplitNMut,
Windows,
},
BitRef,
BitSlice,
};
#[cfg(feature = "alloc")]
use crate::vec::BitVec;
use crate::{
array::BitArray,
devel as dvl,
order::BitOrder,
ptr::{
BitPtr,
BitPtrRange,
BitSpan,
BitSpanError,
Const,
Mut,
},
store::BitStore,
};
impl<O, T> BitSlice<O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline]
pub fn len(&self) -> usize {
self.as_bitspan().len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.as_bitspan().len() == 0
}
#[inline]
pub fn first(&self) -> Option<BitRef<Const, O, T>> {
self.get(0)
}
#[inline]
pub fn first_mut(&mut self) -> Option<BitRef<Mut, O, T>> {
self.get_mut(0)
}
#[inline]
pub fn split_first(&self) -> Option<(BitRef<Const, O, T>, &Self)> {
match self.len() {
0 => None,
_ => unsafe {
let (head, rest) = self.split_at_unchecked(1);
Some((head.get_unchecked(0), rest))
},
}
}
#[inline]
#[allow(clippy::type_complexity)]
pub fn split_first_mut(
&mut self,
) -> Option<(BitRef<Mut, O, T::Alias>, &mut BitSlice<O, T::Alias>)> {
match self.len() {
0 => None,
_ => unsafe {
let (head, rest) = self.split_at_unchecked_mut(1);
Some((head.get_unchecked_mut(0), rest))
},
}
}
#[inline]
pub fn split_last(&self) -> Option<(BitRef<Const, O, T>, &Self)> {
match self.len() {
0 => None,
len => unsafe {
let (rest, tail) = self.split_at_unchecked(len.wrapping_sub(1));
Some((tail.get_unchecked(0), rest))
},
}
}
#[inline]
#[allow(clippy::type_complexity)]
pub fn split_last_mut(
&mut self,
) -> Option<(BitRef<Mut, O, T::Alias>, &mut BitSlice<O, T::Alias>)> {
match self.len() {
0 => None,
len => unsafe {
let (rest, tail) = self.split_at_unchecked_mut(len - 1);
Some((tail.get_unchecked_mut(0), rest))
},
}
}
#[inline]
pub fn last(&self) -> Option<BitRef<Const, O, T>> {
match self.len() {
0 => None,
len => Some(unsafe { self.get_unchecked(len - 1) }),
}
}
#[inline]
pub fn last_mut(&mut self) -> Option<BitRef<Mut, O, T>> {
match self.len() {
0 => None,
len => Some(unsafe { self.get_unchecked_mut(len - 1) }),
}
}
#[cfg_attr(not(tarpaulin_include), inline(always))]
pub fn get<'a, I>(&'a self, index: I) -> Option<I::Immut>
where I: BitSliceIndex<'a, O, T> {
index.get(self)
}
#[cfg_attr(not(tarpaulin_include), inline(always))]
pub fn get_mut<'a, I>(&'a mut self, index: I) -> Option<I::Mut>
where I: BitSliceIndex<'a, O, T> {
index.get_mut(self)
}
#[allow(clippy::missing_safety_doc)]
#[cfg_attr(not(tarpaulin_include), inline(always))]
pub unsafe fn get_unchecked<'a, I>(&'a self, index: I) -> I::Immut
where I: BitSliceIndex<'a, O, T> {
index.get_unchecked(self)
}
#[allow(clippy::missing_safety_doc)]
#[cfg_attr(not(tarpaulin_include), inline(always))]
pub unsafe fn get_unchecked_mut<'a, I>(&'a mut self, index: I) -> I::Mut
where I: BitSliceIndex<'a, O, T> {
index.get_unchecked_mut(self)
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated = "Use `as_bitptr` to access the region pointer"]
pub fn as_ptr(&self) -> BitPtr<Const, O, T> {
self.as_bitptr()
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated = "Use `as_bitptr_range` to access the region pointers"]
pub fn as_ptr_range(&self) -> BitPtrRange<Const, O, T> {
self.as_bitptr_range()
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated = "Use `as_mut_bitptr` to access the region pointer"]
pub fn as_mut_ptr(&mut self) -> BitPtr<Mut, O, T> {
self.as_mut_bitptr()
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated = "Use `as_mut_bitptr_range` to access the region pointers"]
pub fn as_mut_ptr_range(&mut self) -> BitPtrRange<Mut, O, T> {
self.as_mut_bitptr_range()
}
#[inline]
pub fn swap(&mut self, a: usize, b: usize) {
self.assert_in_bounds(a, 0 .. self.len());
self.assert_in_bounds(b, 0 .. self.len());
unsafe {
self.swap_unchecked(a, b);
}
}
#[inline]
pub fn reverse(&mut self) {
let mut bitspan = self.as_mut_bitspan();
let mut len = bitspan.len();
while len > 1 {
unsafe {
len -= 1;
bitspan.to_bitslice_mut().swap_unchecked(0, len);
bitspan.incr_head();
len -= 1;
}
}
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn iter(&self) -> Iter<O, T> {
Iter::new(self)
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn iter_mut(&mut self) -> IterMut<O, T> {
IterMut::new(self)
}
#[inline]
pub fn windows(&self, size: usize) -> Windows<O, T> {
assert_ne!(size, 0, "Window width cannot be 0");
Windows::new(self, size)
}
#[inline]
pub fn chunks(&self, chunk_size: usize) -> Chunks<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
Chunks::new(self, chunk_size)
}
#[inline]
pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
ChunksMut::new(self, chunk_size)
}
#[inline]
pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
ChunksExact::new(self, chunk_size)
}
#[inline]
pub fn chunks_exact_mut(
&mut self,
chunk_size: usize,
) -> ChunksExactMut<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
ChunksExactMut::new(self, chunk_size)
}
#[inline]
pub fn rchunks(&self, chunk_size: usize) -> RChunks<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
RChunks::new(self, chunk_size)
}
#[inline]
pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
RChunksMut::new(self, chunk_size)
}
#[inline]
pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
RChunksExact::new(self, chunk_size)
}
#[inline]
pub fn rchunks_exact_mut(
&mut self,
chunk_size: usize,
) -> RChunksExactMut<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
RChunksExactMut::new(self, chunk_size)
}
#[inline]
pub fn split_at(&self, mid: usize) -> (&Self, &Self) {
let len = self.len();
assert!(mid <= len, "Index {} out of bounds: {}", mid, len);
unsafe { self.split_at_unchecked(mid) }
}
#[inline]
#[allow(clippy::type_complexity)]
pub fn split_at_mut(
&mut self,
mid: usize,
) -> (&mut BitSlice<O, T::Alias>, &mut BitSlice<O, T::Alias>) {
self.assert_in_bounds(mid, 0 ..= self.len());
unsafe { self.split_at_unchecked_mut(mid) }
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn split<F>(&self, pred: F) -> Split<O, T, F>
where F: FnMut(usize, &bool) -> bool {
Split::new(self, pred)
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<O, T, F>
where F: FnMut(usize, &bool) -> bool {
SplitMut::new(self.alias_mut(), pred)
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn rsplit<F>(&self, pred: F) -> RSplit<O, T, F>
where F: FnMut(usize, &bool) -> bool {
RSplit::new(self, pred)
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<O, T, F>
where F: FnMut(usize, &bool) -> bool {
RSplitMut::new(self.alias_mut(), pred)
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<O, T, F>
where F: FnMut(usize, &bool) -> bool {
SplitN::new(self, pred, n)
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<O, T, F>
where F: FnMut(usize, &bool) -> bool {
SplitNMut::new(self.alias_mut(), pred, n)
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<O, T, F>
where F: FnMut(usize, &bool) -> bool {
RSplitN::new(self, pred, n)
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<O, T, F>
where F: FnMut(usize, &bool) -> bool {
RSplitNMut::new(self.alias_mut(), pred, n)
}
#[inline]
pub fn contains<O2, T2>(&self, x: &BitSlice<O2, T2>) -> bool
where
O2: BitOrder,
T2: BitStore,
{
let len = x.len();
if len > self.len() {
return false;
};
self.windows(len).any(|s| s == x)
}
#[inline]
pub fn starts_with<O2, T2>(&self, needle: &BitSlice<O2, T2>) -> bool
where
O2: BitOrder,
T2: BitStore,
{
let len = needle.len();
self.len() >= len && needle == unsafe { self.get_unchecked(.. len) }
}
#[inline]
pub fn ends_with<O2, T2>(&self, needle: &BitSlice<O2, T2>) -> bool
where
O2: BitOrder,
T2: BitStore,
{
let nlen = needle.len();
let len = self.len();
len >= nlen && needle == unsafe { self.get_unchecked(len - nlen ..) }
}
#[inline]
pub fn rotate_left(&mut self, mut by: usize) {
let len = self.len();
assert!(
by <= len,
"Slices cannot be rotated by more than their length"
);
if by == 0 || by == len {
return;
}
let mut tmp = BitArray::<O, usize>::zeroed();
while by > 0 {
let shamt = cmp::min(<usize as IsNumber>::BITS as usize, by);
unsafe {
let tmp_bits = tmp.get_unchecked_mut(.. shamt);
tmp_bits.clone_from_bitslice(self.get_unchecked(.. shamt));
self.copy_within_unchecked(shamt .., 0);
self.get_unchecked_mut(len - shamt ..)
.clone_from_bitslice(tmp_bits);
}
by -= shamt;
}
}
#[inline]
pub fn rotate_right(&mut self, mut by: usize) {
let len = self.len();
assert!(
by <= len,
"Slices cannot be rotated by more than their length"
);
if by == 0 || by == len {
return;
}
let mut tmp = BitArray::<O, usize>::zeroed();
while by > 0 {
let shamt = cmp::min(<usize as IsNumber>::BITS as usize, by);
let mid = len - shamt;
unsafe {
let tmp_bits = tmp.get_unchecked_mut(.. shamt);
tmp_bits.clone_from_bitslice(self.get_unchecked(mid ..));
self.copy_within_unchecked(.. mid, shamt);
self.get_unchecked_mut(.. shamt)
.clone_from_bitslice(tmp_bits);
}
by -= shamt;
}
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated = "Use `clone_from_bitslice` to copy between bitslices"]
pub fn clone_from_slice<O2, T2>(&mut self, src: &BitSlice<O2, T2>)
where
O2: BitOrder,
T2: BitStore,
{
self.clone_from_bitslice(src)
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated = "Use `copy_from_bitslice` to copy between bitslices"]
pub fn copy_from_slice(&mut self, src: &Self) {
self.copy_from_bitslice(src)
}
#[inline]
pub fn copy_within<R>(&mut self, src: R, dest: usize)
where R: RangeBounds<usize> {
let len = self.len();
let src = dvl::normalize_range(src, len);
dvl::assert_range(src.clone(), len);
dvl::assert_range(dest .. dest + (src.end - src.start), len);
unsafe {
self.copy_within_unchecked(src, dest);
}
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated = "Use `swap_with_bitslice` to swap between bitslices"]
pub fn swap_with_slice<O2, T2>(&mut self, other: &mut BitSlice<O2, T2>)
where
O2: BitOrder,
T2: BitStore,
{
self.swap_with_bitslice(other);
}
#[inline]
pub unsafe fn align_to<U>(&self) -> (&Self, &BitSlice<O, U>, &Self)
where U: BitStore {
let (l, c, r) = self.as_bitspan().align_to::<U>();
(
l.to_bitslice_ref(),
c.to_bitslice_ref(),
r.to_bitslice_ref(),
)
}
#[inline]
pub unsafe fn align_to_mut<U>(
&mut self,
) -> (&mut Self, &mut BitSlice<O, U>, &mut Self)
where U: BitStore {
let (l, c, r) = self.as_mut_bitspan().align_to::<U>();
(
l.to_bitslice_mut(),
c.to_bitslice_mut(),
r.to_bitslice_mut(),
)
}
}
#[cfg(feature = "alloc")]
impl<O, T> BitSlice<O, T>
where
O: BitOrder,
T: BitStore,
{
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated = "Prefer `to_bitvec`"]
pub fn to_vec(&self) -> BitVec<O, T::Unalias> {
self.to_bitvec()
}
#[inline]
pub fn repeat(&self, n: usize) -> BitVec<O, T::Unalias> {
let len = self.len();
let total = len.checked_mul(n).expect("capacity overflow");
let mut out = BitVec::repeat(false, total);
for chunk in unsafe { out.chunks_exact_mut(len).remove_alias() } {
chunk.clone_from_bitslice(self);
}
out
}
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn from_ref<O, T>(elem: &T) -> &BitSlice<O, T>
where
O: BitOrder,
T: BitStore,
{
BitSlice::from_element(elem)
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn from_mut<O, T>(elem: &mut T) -> &mut BitSlice<O, T>
where
O: BitOrder,
T: BitStore,
{
BitSlice::from_element_mut(elem)
}
#[inline]
pub unsafe fn from_raw_parts<'a, O, T>(
data: BitPtr<Const, O, T>,
len: usize,
) -> Result<&'a BitSlice<O, T>, BitSpanError<T>>
where
O: BitOrder,
T: BitStore,
{
data.span(len).map(BitSpan::to_bitslice_ref)
}
#[inline]
pub unsafe fn from_raw_parts_mut<'a, O, T>(
data: BitPtr<Mut, O, T>,
len: usize,
) -> Result<&'a mut BitSlice<O, T>, BitSpanError<T>>
where
O: BitOrder,
T: BitStore,
{
data.span(len).map(BitSpan::to_bitslice_mut)
}
pub trait BitSliceIndex<'a, O, T>
where
O: BitOrder,
T: BitStore,
{
type Immut;
type Mut;
fn get(self, slice: &'a BitSlice<O, T>) -> Option<Self::Immut>;
fn get_mut(self, slice: &'a mut BitSlice<O, T>) -> Option<Self::Mut>;
unsafe fn get_unchecked(self, slice: &'a BitSlice<O, T>) -> Self::Immut;
unsafe fn get_unchecked_mut(
self,
slice: &'a mut BitSlice<O, T>,
) -> Self::Mut;
fn index(self, slice: &'a BitSlice<O, T>) -> Self::Immut;
fn index_mut(self, slice: &'a mut BitSlice<O, T>) -> Self::Mut;
}
impl<'a, O, T> BitSliceIndex<'a, O, T> for usize
where
O: BitOrder,
T: BitStore,
{
type Immut = BitRef<'a, Const, O, T>;
type Mut = BitRef<'a, Mut, O, T>;
#[inline]
fn get(self, slice: &'a BitSlice<O, T>) -> Option<Self::Immut> {
if self < slice.len() {
Some(unsafe { self.get_unchecked(slice) })
}
else {
None
}
}
#[inline]
fn get_mut(self, slice: &'a mut BitSlice<O, T>) -> Option<Self::Mut> {
if self < slice.len() {
Some(unsafe { self.get_unchecked_mut(slice) })
}
else {
None
}
}
#[inline]
unsafe fn get_unchecked(self, slice: &'a BitSlice<O, T>) -> Self::Immut {
BitRef::from_bitptr(slice.as_bitptr().add(self))
}
#[inline]
unsafe fn get_unchecked_mut(
self,
slice: &'a mut BitSlice<O, T>,
) -> Self::Mut {
BitRef::from_bitptr(slice.as_mut_bitptr().add(self))
}
#[inline]
fn index(self, slice: &'a BitSlice<O, T>) -> Self::Immut {
self.get(slice).unwrap_or_else(|| {
panic!("Index {} out of bounds: {}", self, slice.len())
})
}
#[inline]
fn index_mut(self, slice: &'a mut BitSlice<O, T>) -> Self::Mut {
let len = slice.len();
self.get_mut(slice)
.unwrap_or_else(|| panic!("Index {} out of bounds: {}", self, len))
}
}
macro_rules! range_impl {
($r:ty { check $check:expr; select $select:expr; }) => {
impl<'a, O, T> BitSliceIndex<'a, O, T> for $r
where
O: BitOrder,
T: BitStore,
{
type Immut = &'a BitSlice<O, T>;
type Mut = &'a mut BitSlice<O, T>;
#[inline]
#[allow(
clippy::blocks_in_if_conditions,
clippy::redundant_closure_call
)]
fn get(self, slice: Self::Immut) -> Option<Self::Immut> {
if ($check)(self.clone(), slice.as_bitspan()) {
Some(unsafe { self.get_unchecked(slice) })
}
else {
None
}
}
#[inline]
#[allow(
clippy::blocks_in_if_conditions,
clippy::redundant_closure_call
)]
fn get_mut(self, slice: Self::Mut) -> Option<Self::Mut> {
if ($check)(self.clone(), slice.as_bitspan()) {
Some(unsafe { self.get_unchecked_mut(slice) })
}
else {
None
}
}
#[inline]
#[allow(clippy::redundant_closure_call)]
unsafe fn get_unchecked(self, slice: Self::Immut) -> Self::Immut {
($select)(self, slice.as_bitspan()).to_bitslice_ref()
}
#[inline]
#[allow(clippy::redundant_closure_call)]
unsafe fn get_unchecked_mut(self, slice: Self::Mut) -> Self::Mut {
($select)(self, slice.as_mut_bitspan()).to_bitslice_mut()
}
#[inline]
fn index(self, slice: Self::Immut) -> Self::Immut {
let r = self.clone();
let l = slice.len();
self.get(slice).unwrap_or_else(|| {
panic!("Range {:?} out of bounds: {}", r, l)
})
}
#[inline]
fn index_mut(self, slice: Self::Mut) -> Self::Mut {
let r = self.clone();
let l = slice.len();
self.get_mut(slice).unwrap_or_else(|| {
panic!("Range {:?} out of bounds: {}", r, l)
})
}
}
};
}
range_impl!(Range<usize> {
check |range: Self, span: BitSpan<_, _, _>| {
let len = span.len();
range.start <= len && range.end <= len && range.start <= range.end
};
select |range: Self, span: BitSpan<_, _, _>| {
span.as_bitptr().add(range.start).span_unchecked(range.len())
};
});
range_impl!(RangeFrom<usize> {
check |range: Self, span: BitSpan<_, _, _>| {
let len = span.len();
range.start <= len
};
select |range: Self, span: BitSpan<_, _, _>| {
span.as_bitptr().add(range.start).span_unchecked(span.len() - range.start)
};
});
range_impl!(RangeTo<usize> {
check |range: Self, span: BitSpan<_, _, _>| {
range.end <= span.len()
};
select |range: Self, mut span: BitSpan<_, _, _>| {
span.set_len(range.end);
span
};
});
range_impl!(RangeInclusive<usize> {
check |range: Self, span: BitSpan<_, _, _>| {
let len = span.len();
let start = *range.start();
let end = *range.end();
start < len && end < len && start <= end
};
select |range: Self, span: BitSpan<_, _, _>| {
let start = *range.start();
let end = *range.end();
span.as_bitptr().add(start).span_unchecked(end + 1 - start)
};
});
range_impl!(RangeToInclusive<usize> {
check |range: Self, span: BitSpan<_, _, _>| {
range.end < span.len()
};
select |range: Self, mut span: BitSpan<_, _, _>| {
span.set_len(range.end + 1);
span
};
});
#[cfg(not(tarpaulin_include))]
impl<'a, O, T> BitSliceIndex<'a, O, T> for RangeFull
where
O: BitOrder,
T: BitStore,
{
type Immut = &'a BitSlice<O, T>;
type Mut = &'a mut BitSlice<O, T>;
#[inline(always)]
fn get(self, slice: Self::Immut) -> Option<Self::Immut> {
Some(slice)
}
#[inline(always)]
fn get_mut(self, slice: Self::Mut) -> Option<Self::Mut> {
Some(slice)
}
#[inline(always)]
unsafe fn get_unchecked(self, slice: Self::Immut) -> Self::Immut {
slice
}
#[inline(always)]
unsafe fn get_unchecked_mut(self, slice: Self::Mut) -> Self::Mut {
slice
}
#[inline(always)]
fn index(self, slice: Self::Immut) -> Self::Immut {
slice
}
#[inline(always)]
fn index_mut(self, slice: Self::Mut) -> Self::Mut {
slice
}
}