use crate::{
access::BitAccess,
array::BitArray,
devel as dvl,
domain::{
Domain,
DomainMut,
},
index::BitRegister,
mem::BitMemory,
order::BitOrder,
pointer::BitPtr,
slice::{
iter::{
Chunks,
ChunksExact,
ChunksExactMut,
ChunksMut,
Iter,
IterMut,
RChunks,
RChunksExact,
RChunksExactMut,
RChunksMut,
RSplit,
RSplitMut,
RSplitN,
RSplitNMut,
Split,
SplitMut,
SplitN,
SplitNMut,
Windows,
},
BitMut,
BitSlice,
},
store::BitStore,
};
use core::{
cmp,
ops::{
Range,
RangeBounds,
RangeFrom,
RangeFull,
RangeInclusive,
RangeTo,
RangeToInclusive,
},
};
use tap::{
pipe::Pipe,
tap::Tap,
};
#[cfg(feature = "alloc")]
use crate::vec::BitVec;
impl<O, T> BitSlice<O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline]
pub fn len(&self) -> usize {
self.bitptr().len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.bitptr().len() == 0
}
#[inline]
pub fn first(&self) -> Option<&bool> {
self.get(0)
}
#[inline]
pub fn first_mut(&mut self) -> Option<BitMut<O, T>> {
self.get_mut(0)
}
#[inline]
pub fn split_first(&self) -> Option<(&bool, &Self)> {
match self.len() {
0 => None,
_ => unsafe {
let (head, rest) = self.split_at_unchecked(1);
Some((head.get_unchecked(0), rest))
},
}
}
#[inline]
#[allow(clippy::type_complexity)]
pub fn split_first_mut(
&mut self,
) -> Option<(BitMut<O, T::Alias>, &mut BitSlice<O, T::Alias>)> {
match self.len() {
0 => None,
_ => unsafe {
let (head, rest) = self.split_at_unchecked_mut(1);
Some((head.get_unchecked_mut(0), rest))
},
}
}
#[inline]
pub fn split_last(&self) -> Option<(&bool, &Self)> {
match self.len() {
0 => None,
len => unsafe {
let (rest, tail) = self.split_at_unchecked(len.wrapping_sub(1));
Some((tail.get_unchecked(0), rest))
},
}
}
#[inline]
#[allow(clippy::type_complexity)]
pub fn split_last_mut(
&mut self,
) -> Option<(BitMut<O, T::Alias>, &mut BitSlice<O, T::Alias>)> {
match self.len() {
0 => None,
len => unsafe {
let (rest, tail) = self.split_at_unchecked_mut(len - 1);
Some((tail.get_unchecked_mut(0), rest))
},
}
}
#[inline]
pub fn last(&self) -> Option<&bool> {
match self.len() {
0 => None,
len => Some(unsafe { self.get_unchecked(len - 1) }),
}
}
#[inline]
pub fn last_mut(&mut self) -> Option<BitMut<O, T>> {
match self.len() {
0 => None,
len => Some(unsafe { self.get_unchecked_mut(len - 1) }),
}
}
#[inline]
pub fn get<'a, I>(&'a self, index: I) -> Option<I::Immut>
where I: BitSliceIndex<'a, O, T> {
index.get(self)
}
#[inline]
pub fn get_mut<'a, I>(&'a mut self, index: I) -> Option<I::Mut>
where I: BitSliceIndex<'a, O, T> {
index.get_mut(self)
}
#[inline]
#[allow(clippy::missing_safety_doc)]
pub unsafe fn get_unchecked<'a, I>(&'a self, index: I) -> I::Immut
where I: BitSliceIndex<'a, O, T> {
index.get_unchecked(self)
}
#[inline]
#[allow(clippy::missing_safety_doc)]
pub unsafe fn get_unchecked_mut<'a, I>(&'a mut self, index: I) -> I::Mut
where I: BitSliceIndex<'a, O, T> {
index.get_unchecked_mut(self)
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn as_ptr(&self) -> *const Self {
self as *const Self
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn as_mut_ptr(&mut self) -> *mut Self {
self as *mut Self
}
#[inline]
pub fn swap(&mut self, a: usize, b: usize) {
let len = self.len();
assert!(a < len, "Index {} out of bounds: {}", a, len);
assert!(b < len, "Index {} out of bounds: {}", b, len);
unsafe {
self.swap_unchecked(a, b);
}
}
#[inline]
pub fn reverse(&mut self) {
let mut bitptr = self.bitptr();
let mut len = bitptr.len();
while len > 1 {
unsafe {
len -= 1;
bitptr.to_bitslice_mut::<O>().swap_unchecked(0, len);
bitptr.incr_head();
len -= 1;
}
}
}
#[inline]
pub fn iter(&self) -> Iter<O, T> {
self.into_iter()
}
#[inline]
pub fn iter_mut(&mut self) -> IterMut<O, T> {
self.into_iter()
}
#[inline]
pub fn windows(&self, size: usize) -> Windows<O, T> {
assert_ne!(size, 0, "Window width cannot be 0");
Windows::new(self, size)
}
#[inline]
pub fn chunks(&self, chunk_size: usize) -> Chunks<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
Chunks::new(self, chunk_size)
}
#[inline]
pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
ChunksMut::new(self, chunk_size)
}
#[inline]
pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
ChunksExact::new(self, chunk_size)
}
#[inline]
pub fn chunks_exact_mut(
&mut self,
chunk_size: usize,
) -> ChunksExactMut<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
ChunksExactMut::new(self, chunk_size)
}
#[inline]
pub fn rchunks(&self, chunk_size: usize) -> RChunks<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
RChunks::new(self, chunk_size)
}
#[inline]
pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
RChunksMut::new(self, chunk_size)
}
#[inline]
pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
RChunksExact::new(self, chunk_size)
}
#[inline]
pub fn rchunks_exact_mut(
&mut self,
chunk_size: usize,
) -> RChunksExactMut<O, T> {
assert_ne!(chunk_size, 0, "Chunk width cannot be 0");
RChunksExactMut::new(self, chunk_size)
}
#[inline]
pub fn split_at(&self, mid: usize) -> (&Self, &Self) {
let len = self.len();
assert!(mid <= len, "Index {} out of bounds: {}", mid, len);
unsafe { self.split_at_unchecked(mid) }
}
#[inline]
#[allow(clippy::type_complexity)]
pub fn split_at_mut(
&mut self,
mid: usize,
) -> (&mut BitSlice<O, T::Alias>, &mut BitSlice<O, T::Alias>) {
let len = self.len();
assert!(mid <= len, "Index {} out of bounds: {}", mid, len);
unsafe { self.split_at_unchecked_mut(mid) }
}
#[inline]
pub fn split<F>(&self, pred: F) -> Split<O, T, F>
where F: FnMut(usize, &bool) -> bool {
Split::new(self, pred)
}
#[inline]
pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<O, T, F>
where F: FnMut(usize, &bool) -> bool {
SplitMut::new(self.alias_mut(), pred)
}
#[inline]
pub fn rsplit<F>(&self, pred: F) -> RSplit<O, T, F>
where F: FnMut(usize, &bool) -> bool {
RSplit::new(self, pred)
}
#[inline]
pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<O, T, F>
where F: FnMut(usize, &bool) -> bool {
RSplitMut::new(self.alias_mut(), pred)
}
#[inline]
pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<O, T, F>
where F: FnMut(usize, &bool) -> bool {
SplitN::new(self, pred, n)
}
#[inline]
pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<O, T, F>
where F: FnMut(usize, &bool) -> bool {
SplitNMut::new(self.alias_mut(), pred, n)
}
#[inline]
pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<O, T, F>
where F: FnMut(usize, &bool) -> bool {
RSplitN::new(self, pred, n)
}
#[inline]
pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<O, T, F>
where F: FnMut(usize, &bool) -> bool {
RSplitNMut::new(self.alias_mut(), pred, n)
}
#[inline]
pub fn contains<O2, T2>(&self, x: &BitSlice<O2, T2>) -> bool
where
O2: BitOrder,
T2: BitStore,
{
let len = x.len();
if len > self.len() {
return false;
};
self.windows(len).any(|s| s == x)
}
#[inline]
pub fn starts_with<O2, T2>(&self, needle: &BitSlice<O2, T2>) -> bool
where
O2: BitOrder,
T2: BitStore,
{
let len = needle.len();
self.len() >= len && needle == unsafe { self.get_unchecked(.. len) }
}
#[inline]
pub fn ends_with<O2, T2>(&self, needle: &BitSlice<O2, T2>) -> bool
where
O2: BitOrder,
T2: BitStore,
{
let nlen = needle.len();
let len = self.len();
len >= nlen && needle == unsafe { self.get_unchecked(len - nlen ..) }
}
#[inline]
pub fn rotate_left(&mut self, mut by: usize) {
let len = self.len();
assert!(
by <= len,
"Slices cannot be rotated by more than their length"
);
if by == 0 || by == len {
return;
}
let mut tmp = BitArray::<O, usize>::new(0);
while by > 0 {
let shamt = cmp::min(<usize as BitMemory>::BITS as usize, by);
unsafe {
let tmp_bits = tmp.get_unchecked_mut(.. shamt);
tmp_bits.clone_from_bitslice(self.get_unchecked(.. shamt));
self.copy_within_unchecked(shamt .., 0);
self.get_unchecked_mut(len - shamt ..)
.clone_from_bitslice(tmp_bits);
}
by -= shamt;
}
}
#[inline]
pub fn rotate_right(&mut self, mut by: usize) {
let len = self.len();
assert!(
by <= len,
"Slices cannot be rotated by more than their length"
);
if by == 0 || by == len {
return;
}
let mut tmp = BitArray::<O, usize>::new(0);
while by > 0 {
let shamt = cmp::min(<usize as BitMemory>::BITS as usize, by);
let mid = len - shamt;
unsafe {
let tmp_bits = tmp.get_unchecked_mut(.. shamt);
tmp_bits.clone_from_bitslice(self.get_unchecked(mid ..));
self.copy_within_unchecked(.. mid, shamt);
self.get_unchecked_mut(.. shamt)
.clone_from_bitslice(tmp_bits);
}
by -= shamt;
}
}
#[inline]
pub fn clone_from_bitslice<O2, T2>(&mut self, src: &BitSlice<O2, T2>)
where
O2: BitOrder,
T2: BitStore,
{
let len = self.len();
assert_eq!(
len,
src.len(),
"Cloning between slices requires equal lengths"
);
for idx in 0 .. len {
unsafe {
self.set_unchecked(idx, *src.get_unchecked(idx));
}
}
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated(note = "Use `.clone_from_bitslice` to copy between bitslices")]
pub fn clone_from_slice<O2, T2>(&mut self, src: &BitSlice<O2, T2>)
where
O2: BitOrder,
T2: BitStore,
{
self.clone_from_bitslice(src)
}
#[inline]
pub fn copy_from_bitslice(&mut self, src: &Self) {
let len = self.len();
assert_eq!(
len,
src.len(),
"Copying between slices requires equal lengths"
);
let (d_head, s_head) = (self.bitptr().head(), src.bitptr().head());
if d_head == s_head {
match (self.domain_mut(), src.domain()) {
(
DomainMut::Enclave {
elem: d_elem, tail, ..
},
Domain::Enclave { elem: s_elem, .. },
) => {
let mask = O::mask(d_head, tail);
d_elem.clear_bits(mask);
d_elem.set_bits(mask & s_elem.load_value());
},
(
DomainMut::Region {
head: d_head,
body: d_body,
tail: d_tail,
},
Domain::Region {
head: s_head,
body: s_body,
tail: s_tail,
},
) => {
if let (Some((h_idx, dh_elem)), Some((_, sh_elem))) =
(d_head, s_head)
{
let mask = O::mask(h_idx, None);
dh_elem.clear_bits(mask);
dh_elem.set_bits(mask & sh_elem.load_value());
}
d_body.copy_from_slice(s_body);
if let (Some((dt_elem, t_idx)), Some((st_elem, _))) =
(d_tail, s_tail)
{
let mask = O::mask(None, t_idx);
dt_elem.clear_bits(mask);
dt_elem.set_bits(mask & st_elem.load_value());
}
},
_ => unreachable!(
"Slices with equal type parameters, lengths, and heads \
will always have equal domains"
),
}
}
self.clone_from_bitslice(src);
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated(note = "Use `.copy_from_bitslice` to copy between bitslices")]
pub fn copy_from_slice(&mut self, src: &Self) {
self.copy_from_bitslice(src)
}
#[inline]
pub fn copy_within<R>(&mut self, src: R, dest: usize)
where R: RangeBounds<usize> {
let len = self.len();
let src = dvl::normalize_range(src, len);
dvl::assert_range(src.clone(), len);
dvl::assert_range(dest .. dest + (src.end - src.start), len);
unsafe {
self.copy_within_unchecked(src, dest);
}
}
#[inline]
pub fn swap_with_bitslice<O2, T2>(&mut self, other: &mut BitSlice<O2, T2>)
where
O2: BitOrder,
T2: BitStore,
{
let len = self.len();
assert_eq!(len, other.len());
for n in 0 .. len {
unsafe {
let (this, that) =
(*self.get_unchecked(n), *other.get_unchecked(n));
self.set_unchecked(n, that);
other.set_unchecked(n, this);
}
}
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated(note = "Use `.swap_with_bitslice` to swap between bitslices")]
pub fn swap_with_slice<O2, T2>(&mut self, other: &mut BitSlice<O2, T2>)
where
O2: BitOrder,
T2: BitStore,
{
self.swap_with_bitslice(other);
}
#[inline]
pub unsafe fn align_to<U>(&self) -> (&Self, &BitSlice<O, U>, &Self)
where U: BitStore {
let bitptr = self.bitptr();
let bp_len = bitptr.len();
let (l, c, r) = bitptr.as_aliased_slice().align_to::<U::Alias>();
let l_start = bitptr.head().value() as usize;
let mut l = BitSlice::<O, T>::from_aliased_slice_unchecked(l);
if l.len() > l_start {
l = l.get_unchecked(l_start ..);
}
let mut c = BitSlice::<O, U>::from_aliased_slice_unchecked(c);
let c_len = cmp::min(c.len(), bp_len - l.len());
c = c.get_unchecked(.. c_len);
let mut r = BitSlice::<O, T>::from_aliased_slice_unchecked(r);
let r_len = bp_len - l.len() - c.len();
if r.len() > r_len {
r = r.get_unchecked(.. r_len);
}
(
l.bitptr()
.pipe(dvl::remove_bitptr_alias::<T>)
.to_bitslice_ref(),
c.bitptr()
.pipe(dvl::remove_bitptr_alias::<U>)
.to_bitslice_ref(),
r.bitptr()
.pipe(dvl::remove_bitptr_alias::<T>)
.to_bitslice_ref(),
)
}
#[inline]
pub unsafe fn align_to_mut<U>(
&mut self,
) -> (&mut Self, &mut BitSlice<O, U>, &mut Self)
where U: BitStore {
let (l, c, r) = self.align_to::<U>();
(
l.bitptr().to_bitslice_mut(),
c.bitptr().to_bitslice_mut(),
r.bitptr().to_bitslice_mut(),
)
}
}
#[cfg(feature = "alloc")]
impl<O, T> BitSlice<O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline(always)]
pub fn to_bitvec(&self) -> BitVec<O, T> {
BitVec::from_bitslice(self)
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated(note = "Use `.to_bitvec` to convert a bit slice into a vector")]
pub fn to_vec(&self) -> BitVec<O, T> {
self.to_bitvec()
}
#[inline]
pub fn repeat(&self, n: usize) -> BitVec<O, T>
where
O: BitOrder,
T: BitStore,
{
let len = self.len();
let total = len.checked_mul(n).expect("capacity overflow");
let mut out = BitVec::repeat(false, total);
for span in (0 .. n).map(|rep| rep * len .. (rep + 1) * len) {
unsafe { out.get_unchecked_mut(span) }.copy_from_bitslice(self);
}
unsafe {
out.set_len(total);
}
out
}
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn from_ref<O, T>(elem: &T) -> &BitSlice<O, T>
where
O: BitOrder,
T: BitStore + BitRegister,
{
BitSlice::from_element(elem)
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn from_mut<O, T>(elem: &mut T) -> &mut BitSlice<O, T>
where
O: BitOrder,
T: BitStore + BitRegister,
{
BitSlice::from_element_mut(elem)
}
#[inline]
#[cfg(not(tarpaulin_include))]
pub unsafe fn from_raw_parts<'a, O, T>(
data: *const T,
len: usize,
) -> &'a BitSlice<O, T>
where
O: BitOrder,
T: 'a + BitStore + BitMemory,
{
super::bits_from_raw_parts(data, 0, len * T::Mem::BITS as usize)
.unwrap_or_else(|| {
panic!(
"Failed to construct `&{}BitSlice` from invalid pointer {:p} \
or element count {}",
"", data, len
)
})
}
#[inline]
#[cfg(not(tarpaulin_include))]
pub unsafe fn from_raw_parts_mut<'a, O, T>(
data: *mut T,
len: usize,
) -> &'a mut BitSlice<O, T>
where
O: BitOrder,
T: 'a + BitStore + BitMemory,
{
super::bits_from_raw_parts_mut(data, 0, len * T::Mem::BITS as usize)
.unwrap_or_else(|| {
panic!(
"Failed to construct `&{}BitSlice` from invalid pointer {:p} \
or element count {}",
"mut ", data, len
)
})
}
pub trait BitSliceIndex<'a, O, T>
where
O: 'a + BitOrder,
T: 'a + BitStore,
{
type Immut;
type Mut;
fn get(self, slice: &'a BitSlice<O, T>) -> Option<Self::Immut>;
fn get_mut(self, slice: &'a mut BitSlice<O, T>) -> Option<Self::Mut>;
unsafe fn get_unchecked(self, slice: &'a BitSlice<O, T>) -> Self::Immut;
unsafe fn get_unchecked_mut(
self,
slice: &'a mut BitSlice<O, T>,
) -> Self::Mut;
fn index(self, slice: &'a BitSlice<O, T>) -> Self::Immut;
fn index_mut(self, slice: &'a mut BitSlice<O, T>) -> Self::Mut;
}
impl<'a, O, T> BitSliceIndex<'a, O, T> for usize
where
O: 'a + BitOrder,
T: 'a + BitStore,
{
type Immut = &'a bool;
type Mut = BitMut<'a, O, T>;
#[inline]
fn get(self, slice: &'a BitSlice<O, T>) -> Option<Self::Immut> {
if self < slice.len() {
Some(unsafe { self.get_unchecked(slice) })
}
else {
None
}
}
#[inline]
fn get_mut(self, slice: &'a mut BitSlice<O, T>) -> Option<Self::Mut> {
if self < slice.len() {
Some(unsafe { self.get_unchecked_mut(slice) })
}
else {
None
}
}
#[inline]
unsafe fn get_unchecked(self, slice: &'a BitSlice<O, T>) -> Self::Immut {
if slice.bitptr().read::<O>(self) {
&true
}
else {
&false
}
}
#[inline]
unsafe fn get_unchecked_mut(
self,
slice: &'a mut BitSlice<O, T>,
) -> Self::Mut {
let bitptr = slice.bitptr();
let (elt, bit) = bitptr.head().offset(self as isize);
let addr = bitptr.pointer().to_access().offset(elt);
BitMut::new_unchecked(addr, bit)
}
#[inline]
fn index(self, slice: &'a BitSlice<O, T>) -> Self::Immut {
self.get(slice).unwrap_or_else(|| {
panic!("Index {} out of bounds: {}", self, slice.len())
})
}
#[inline]
fn index_mut(self, slice: &'a mut BitSlice<O, T>) -> Self::Mut {
let len = slice.len();
self.get_mut(slice)
.unwrap_or_else(|| panic!("Index {} out of bounds: {}", self, len))
}
}
macro_rules! range_impl {
( $r:ty { $get:item $unchecked:item } ) => {
impl<'a, O, T> BitSliceIndex<'a, O, T> for $r
where O: 'a + BitOrder, T: 'a + BitStore {
type Immut = &'a BitSlice<O, T>;
type Mut = &'a mut BitSlice<O, T>;
#[inline]
$get
#[inline]
fn get_mut(self, slice: Self::Mut) -> Option<Self::Mut> {
self.get(slice).map(|s| s.bitptr().to_bitslice_mut())
}
#[inline]
$unchecked
#[inline]
unsafe fn get_unchecked_mut(self, slice: Self::Mut) -> Self::Mut {
self.get_unchecked(slice).bitptr().to_bitslice_mut()
}
fn index(self, slice: Self::Immut) -> Self::Immut {
let r = self.clone();
let l = slice.len();
self.get(slice)
.unwrap_or_else(|| {
panic!("Range {:?} out of bounds: {}", r, l)
})
}
#[inline]
fn index_mut(self, slice: Self::Mut) -> Self::Mut {
self.index(slice).bitptr().to_bitslice_mut()
}
}
};
( $( $r:ty => map $func:expr; )* ) => { $(
impl<'a, O, T> BitSliceIndex<'a, O, T> for $r
where O: 'a + BitOrder, T: 'a + BitStore {
type Immut = &'a BitSlice<O, T>;
type Mut = &'a mut BitSlice<O, T>;
#[inline]
fn get(self, slice: Self::Immut) -> Option<Self::Immut> {
$func(self).get(slice)
}
#[inline]
fn get_mut(self, slice: Self::Mut) -> Option<Self::Mut> {
$func(self).get_mut(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: Self::Immut) -> Self::Immut {
$func(self).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: Self::Mut) -> Self::Mut {
$func(self).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: Self::Immut) -> Self::Immut {
$func(self).index(slice)
}
#[inline]
fn index_mut(self, slice: Self::Mut) -> Self::Mut {
$func(self).index_mut(slice)
}
}
)* };
}
range_impl!(Range<usize> {
fn get(self, slice: Self::Immut) -> Option<Self::Immut> {
let len = slice.len();
if self.start > len || self.end > len || self.start > self.end {
return None;
}
Some(unsafe { (self.start .. self.end).get_unchecked(slice) })
}
unsafe fn get_unchecked(self, slice: Self::Immut) -> Self::Immut {
let (addr, head, _) = slice.bitptr().raw_parts();
let (skip, new_head) = head.offset(self.start as isize);
BitPtr::new_unchecked(
addr.to_const().offset(skip),
new_head,
self.end - self.start,
).to_bitslice_ref()
}
});
range_impl!(RangeFrom<usize> {
fn get(self, slice: Self::Immut) -> Option<Self::Immut> {
let len = slice.len();
if self.start <= len {
Some(unsafe { (self.start ..).get_unchecked(slice) })
}
else {
None
}
}
unsafe fn get_unchecked(self, slice: Self::Immut) -> Self::Immut {
let (addr, head, bits) = slice.bitptr().raw_parts();
let (skip, new_head) = head.offset(self.start as isize);
BitPtr::new_unchecked(
addr.to_const().offset(skip),
new_head,
bits - self.start,
).to_bitslice_ref()
}
});
range_impl!(RangeTo<usize> {
fn get(self, slice: Self::Immut) -> Option<Self::Immut> {
let len = slice.len();
if self.end <= len {
Some(unsafe { (.. self.end).get_unchecked(slice) })
}
else {
None
}
}
unsafe fn get_unchecked(self, slice: Self::Immut) -> Self::Immut {
slice.bitptr().tap_mut(|bp| bp.set_len(self.end)).to_bitslice_ref()
}
});
range_impl! {
RangeInclusive<usize> => map |this: Self| {
#[allow(clippy::range_plus_one)]
(*this.start() .. *this.end() + 1)
};
RangeToInclusive<usize> => map |RangeToInclusive { end }| {
#[allow(clippy::range_plus_one)]
(.. end + 1)
};
}
#[cfg(not(tarpaulin_include))]
impl<'a, O, T> BitSliceIndex<'a, O, T> for RangeFull
where
O: 'a + BitOrder,
T: 'a + BitStore,
{
type Immut = &'a BitSlice<O, T>;
type Mut = &'a mut BitSlice<O, T>;
#[inline(always)]
fn get(self, slice: Self::Immut) -> Option<Self::Immut> {
Some(slice)
}
#[inline(always)]
fn get_mut(self, slice: Self::Mut) -> Option<Self::Mut> {
Some(slice)
}
#[inline(always)]
unsafe fn get_unchecked(self, slice: Self::Immut) -> Self::Immut {
slice
}
#[inline(always)]
unsafe fn get_unchecked_mut(self, slice: Self::Mut) -> Self::Mut {
slice
}
#[inline(always)]
fn index(self, slice: Self::Immut) -> Self::Immut {
slice
}
#[inline(always)]
fn index_mut(self, slice: Self::Mut) -> Self::Mut {
slice
}
}