use crate::{
devel as dvl,
order::BitOrder,
slice::{
BitSlice,
Iter,
},
store::BitStore,
vec::BitVec,
};
use core::{
fmt::{
self,
Debug,
Formatter,
},
iter::{
FromIterator,
FusedIterator,
},
mem,
ops::{
Range,
RangeBounds,
},
ptr::NonNull,
};
use tap::{
pipe::Pipe,
tap::TapOptional,
};
impl<O, T> Extend<bool> for BitVec<O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline]
fn extend<I>(&mut self, iter: I)
where I: IntoIterator<Item = bool> {
let mut iter = iter.into_iter();
match iter.size_hint() {
(n, None) | (_, Some(n)) => {
self.reserve(n);
let len = self.len();
let new_len = len + n;
let new = unsafe { self.get_unchecked_mut(len .. new_len) };
let mut pulled = 0;
for (slot, bit) in new.iter_mut().zip(iter.by_ref()) {
slot.set(bit);
pulled += 1;
}
unsafe {
self.set_len(len + pulled);
}
},
}
iter.for_each(|bit| self.push(bit));
}
}
impl<'a, O, T> Extend<&'a bool> for BitVec<O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline]
fn extend<I>(&mut self, iter: I)
where I: IntoIterator<Item = &'a bool> {
self.extend(iter.into_iter().copied());
}
}
impl<O, T> FromIterator<bool> for BitVec<O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline]
fn from_iter<I>(iter: I) -> Self
where I: IntoIterator<Item = bool> {
let iter = iter.into_iter();
let mut out = match iter.size_hint() {
(n, None) | (_, Some(n)) => Self::with_capacity(n),
};
out.extend(iter);
out
}
}
impl<'a, O, T> FromIterator<&'a bool> for BitVec<O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline]
fn from_iter<I>(iter: I) -> Self
where I: IntoIterator<Item = &'a bool> {
iter.into_iter().copied().pipe(Self::from_iter)
}
}
impl<O, T> IntoIterator for BitVec<O, T>
where
O: 'static + BitOrder,
T: 'static + BitStore,
{
type IntoIter = IntoIter<O, T>;
type Item = bool;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter {
iter: self.as_bitslice().bitptr().to_bitslice_ref().iter(),
_bv: self,
}
}
}
#[cfg(not(tarpaulin_include))]
impl<'a, O, T> IntoIterator for &'a BitVec<O, T>
where
O: 'a + BitOrder,
T: 'a + BitStore,
{
type IntoIter = <&'a BitSlice<O, T> as IntoIterator>::IntoIter;
type Item = <&'a BitSlice<O, T> as IntoIterator>::Item;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.as_bitslice().into_iter()
}
}
#[cfg(not(tarpaulin_include))]
impl<'a, O, T> IntoIterator for &'a mut BitVec<O, T>
where
O: 'a + BitOrder,
T: 'a + BitStore,
{
type IntoIter = <&'a mut BitSlice<O, T> as IntoIterator>::IntoIter;
type Item = <&'a mut BitSlice<O, T> as IntoIterator>::Item;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.as_mut_bitslice().into_iter()
}
}
#[derive(Clone, Debug)]
pub struct IntoIter<O, T>
where
O: 'static + BitOrder,
T: 'static + BitStore,
{
_bv: BitVec<O, T>,
iter: Iter<'static, O, T>,
}
impl<O, T> IntoIter<O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline]
#[cfg(not(tarpaulin_include))]
pub fn as_bitslice(&self) -> &BitSlice<O, T> {
self.iter.as_bitslice()
}
#[doc(hidden)]
#[inline(always)]
#[cfg(not(tarpaulin_include))]
#[deprecated(
note = "Use `.as_bitslice()` on iterators to view the remaining data."
)]
pub fn as_slice(&self) -> &BitSlice<O, T> {
self.as_bitslice()
}
#[inline]
#[cfg(not(tarpaulin_include))]
pub fn as_mut_bitslice(&mut self) -> &mut BitSlice<O, T> {
self.iter.as_bitslice().bitptr().to_bitslice_mut()
}
#[cfg_attr(not(tarpaulin_include), inline(always))]
#[doc(hidden)]
#[deprecated(note = "Use `.as_mut_bitslice()` on iterators to view the \
remaining data.")]
#[cfg(not(tarpaulin_include))]
pub fn as_mut_slice(&mut self) -> &mut BitSlice<O, T> {
self.as_mut_bitslice()
}
}
#[cfg(not(tarpaulin_include))]
impl<O, T> Iterator for IntoIter<O, T>
where
O: BitOrder,
T: BitStore,
{
type Item = bool;
#[cfg_attr(not(tarpaulin_include), inline(always))]
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().copied()
}
#[cfg_attr(not(tarpaulin_include), inline(always))]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
#[cfg_attr(not(tarpaulin_include), inline(always))]
fn count(self) -> usize {
self.len()
}
#[cfg_attr(not(tarpaulin_include), inline(always))]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n).copied()
}
#[cfg_attr(not(tarpaulin_include), inline(always))]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
}
#[cfg(not(tarpaulin_include))]
impl<O, T> DoubleEndedIterator for IntoIter<O, T>
where
O: BitOrder,
T: BitStore,
{
#[cfg_attr(not(tarpaulin_include), inline(always))]
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back().copied()
}
#[cfg_attr(not(tarpaulin_include), inline(always))]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth_back(n).copied()
}
}
#[cfg(not(tarpaulin_include))]
impl<O, T> ExactSizeIterator for IntoIter<O, T>
where
O: BitOrder,
T: BitStore,
{
#[cfg_attr(not(tarpaulin_include), inline(always))]
fn len(&self) -> usize {
self.iter.len()
}
}
impl<O, T> FusedIterator for IntoIter<O, T>
where
O: BitOrder,
T: BitStore,
{
}
pub struct Drain<'a, O, T>
where
O: BitOrder,
T: 'a + BitStore,
{
source: NonNull<BitVec<O, T>>,
drain: Iter<'a, O, T>,
tail: Range<usize>,
}
impl<'a, O, T> Drain<'a, O, T>
where
O: BitOrder,
T: 'a + BitStore,
{
#[inline]
pub(super) fn new<R>(source: &'a mut BitVec<O, T>, range: R) -> Self
where R: RangeBounds<usize> {
let len = source.len();
let drain = dvl::normalize_range(range, len);
dvl::assert_range(drain.clone(), len);
let tail = drain.end .. len;
let drain = unsafe {
source.set_len(drain.start);
source
.as_bitslice()
.get_unchecked(drain)
.bitptr()
.to_bitslice_ref()
.iter()
};
let source = source.into();
Self {
source,
drain,
tail,
}
}
#[inline(always)]
#[cfg(not(tarpaulin_include))]
pub fn as_bitslice(&self) -> &'a BitSlice<O, T> {
self.drain.as_bitslice()
}
#[inline]
fn fill<I>(&mut self, iter: &mut I) -> FillStatus
where I: Iterator<Item = bool> {
let bitvec = unsafe { self.source.as_mut() };
let mut len = bitvec.len();
let span = unsafe { bitvec.get_unchecked_mut(len .. self.tail.start) };
let mut out = FillStatus::FullSpan;
for slot in span {
if let Some(bit) = iter.next() {
slot.set(bit);
len += 1;
}
else {
out = FillStatus::EmptyInput;
break;
}
}
unsafe {
bitvec.set_len(len);
}
out
}
#[inline]
unsafe fn move_tail(&mut self, additional: usize) {
let bitvec = self.source.as_mut();
let tail_len = self.tail.end - self.tail.start;
let full_len = additional + tail_len;
bitvec.reserve(full_len);
let new_tail_start = additional + self.tail.start;
let orig_tail = mem::replace(
&mut self.tail,
new_tail_start .. new_tail_start + tail_len,
);
let len = bitvec.len();
bitvec.set_len(full_len);
bitvec.copy_within_unchecked(orig_tail, new_tail_start);
bitvec.set_len(len);
}
}
#[cfg(not(tarpaulin_include))]
impl<O, T> AsRef<BitSlice<O, T>> for Drain<'_, O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline(always)]
fn as_ref(&self) -> &BitSlice<O, T> {
self.as_bitslice()
}
}
#[cfg(not(tarpaulin_include))]
impl<'a, O, T> Debug for Drain<'a, O, T>
where
O: BitOrder,
T: 'a + BitStore,
{
#[inline]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
fmt.debug_tuple("Drain")
.field(&self.drain.as_bitslice())
.finish()
}
}
#[cfg(not(tarpaulin_include))]
impl<O, T> Iterator for Drain<'_, O, T>
where
O: BitOrder,
T: BitStore,
{
type Item = bool;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
self.drain.next().copied()
}
#[inline(always)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.drain.size_hint()
}
#[inline(always)]
fn count(self) -> usize {
self.len()
}
#[inline(always)]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.drain.nth(n).copied()
}
#[inline(always)]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
}
#[cfg(not(tarpaulin_include))]
impl<O, T> DoubleEndedIterator for Drain<'_, O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline(always)]
fn next_back(&mut self) -> Option<Self::Item> {
self.drain.next_back().copied()
}
#[inline(always)]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
self.drain.nth_back(n).copied()
}
}
#[cfg(not(tarpaulin_include))]
impl<O, T> ExactSizeIterator for Drain<'_, O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline(always)]
fn len(&self) -> usize {
self.drain.len()
}
}
impl<O, T> FusedIterator for Drain<'_, O, T>
where
O: BitOrder,
T: BitStore,
{
}
unsafe impl<O, T> Send for Drain<'_, O, T>
where
O: BitOrder,
T: BitStore,
{
}
unsafe impl<O, T> Sync for Drain<'_, O, T>
where
O: BitOrder,
T: BitStore,
{
}
impl<O, T> Drop for Drain<'_, O, T>
where
O: BitOrder,
T: BitStore,
{
#[inline]
fn drop(&mut self) {
let tail = self.tail.clone();
let tail_len = tail.end - tail.start;
if tail_len == 0 {
return;
}
let bitvec = unsafe { self.source.as_mut() };
let old_len = bitvec.len();
let new_len = old_len + tail_len;
unsafe {
bitvec.set_len(new_len);
bitvec.copy_within_unchecked(tail, old_len);
}
}
}
#[repr(u8)]
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
enum FillStatus {
FullSpan = 0,
EmptyInput = 1,
}
#[derive(Debug)]
pub struct Splice<'a, O, T, I>
where
O: BitOrder,
T: 'a + BitStore,
I: Iterator<Item = bool>,
{
drain: Drain<'a, O, T>,
splice: I,
}
impl<'a, O, T, I> Splice<'a, O, T, I>
where
O: BitOrder,
T: 'a + BitStore,
I: Iterator<Item = bool>,
{
pub(super) fn new<II>(drain: Drain<'a, O, T>, splice: II) -> Self
where II: IntoIterator<IntoIter = I, Item = bool> {
let splice = splice.into_iter();
Self { drain, splice }
}
}
#[cfg(not(tarpaulin_include))]
impl<O, T, I> Iterator for Splice<'_, O, T, I>
where
O: BitOrder,
T: BitStore,
I: Iterator<Item = bool>,
{
type Item = bool;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.drain.next().tap_some(|_| {
if let Some(bit) = self.splice.next() {
unsafe {
let bv = self.drain.source.as_mut();
let len = bv.len();
bv.set_unchecked(len, bit);
bv.set_len(len + 1);
}
}
})
}
#[inline(always)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.drain.size_hint()
}
#[inline(always)]
fn count(self) -> usize {
self.drain.len()
}
}
#[cfg(not(tarpaulin_include))]
impl<O, T, I> DoubleEndedIterator for Splice<'_, O, T, I>
where
O: BitOrder,
T: BitStore,
I: Iterator<Item = bool>,
{
#[inline(always)]
fn next_back(&mut self) -> Option<Self::Item> {
self.drain.next_back()
}
#[inline(always)]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
self.drain.nth_back(n)
}
}
#[cfg(not(tarpaulin_include))]
impl<O, T, I> ExactSizeIterator for Splice<'_, O, T, I>
where
O: BitOrder,
T: BitStore,
I: Iterator<Item = bool>,
{
#[inline(always)]
fn len(&self) -> usize {
self.drain.len()
}
}
impl<O, T, I> FusedIterator for Splice<'_, O, T, I>
where
O: BitOrder,
T: BitStore,
I: Iterator<Item = bool>,
{
}
impl<O, T, I> Drop for Splice<'_, O, T, I>
where
O: BitOrder,
T: BitStore,
I: Iterator<Item = bool>,
{
#[inline]
fn drop(&mut self) {
let tail = self.drain.tail.clone();
let tail_len = tail.end - tail.start;
let bitvec = unsafe { self.drain.source.as_mut() };
if tail_len == 0 {
bitvec.extend(self.splice.by_ref());
return;
}
if let FillStatus::EmptyInput = self.drain.fill(&mut self.splice) {
return;
}
let len = match self.splice.size_hint() {
(n, None) | (_, Some(n)) => n,
};
unsafe {
self.drain.move_tail(len);
}
if let FillStatus::EmptyInput = self.drain.fill(&mut self.splice) {
return;
}
let mut collected = self.splice.by_ref().collect::<BitVec>().into_iter();
let len = collected.len();
if len > 0 {
unsafe {
self.drain.move_tail(len);
}
let filled = self.drain.fill(&mut collected);
debug_assert_eq!(filled, FillStatus::EmptyInput);
debug_assert_eq!(collected.len(), 0);
}
}
}