#![cfg(any(feature = "alloc", feature = "std"))]
use crate::{
bits::{
BitIdx,
Bits,
},
boxed::BitBox,
cursor::{
BigEndian,
Cursor,
},
pointer::BitPtr,
slice::BitSlice,
};
#[cfg(all(feature = "alloc", not(feature = "std")))]
use alloc::{
borrow::{
Borrow,
BorrowMut,
ToOwned,
},
boxed::Box,
vec::Vec,
};
use core::{
clone::Clone,
cmp::{
Eq,
Ord,
Ordering,
PartialEq,
PartialOrd,
},
convert::{
AsMut,
AsRef,
From,
},
default::Default,
fmt::{
self,
Debug,
Display,
Formatter,
},
hash::{
Hash,
Hasher,
},
iter::{
self,
DoubleEndedIterator,
ExactSizeIterator,
Extend,
FromIterator,
FusedIterator,
Iterator,
IntoIterator,
},
marker::{
PhantomData,
Send,
},
mem,
ops::{
Add,
AddAssign,
BitAnd,
BitAndAssign,
BitOr,
BitOrAssign,
BitXor,
BitXorAssign,
Deref,
DerefMut,
Drop,
Index,
IndexMut,
Range,
RangeBounds,
RangeFrom,
RangeFull,
RangeInclusive,
RangeTo,
RangeToInclusive,
Neg,
Not,
Shl,
ShlAssign,
Shr,
ShrAssign,
Sub,
SubAssign,
},
ptr::{
self,
NonNull,
},
slice,
};
#[cfg(feature = "std")]
use std::{
borrow::{
Borrow,
BorrowMut,
ToOwned,
},
boxed::Box,
cmp,
io::{
self,
Write,
},
vec::Vec,
};
#[repr(C)]
pub struct BitVec<C = BigEndian, T = u8>
where C: Cursor, T: Bits {
_cursor: PhantomData<C>,
pointer: BitPtr<T>,
capacity: usize,
}
impl<C, T> BitVec<C, T>
where C: Cursor, T: Bits {
pub fn new() -> Self {
Self {
_cursor: PhantomData,
pointer: BitPtr::empty(),
capacity: 0,
}
}
pub fn with_capacity(capacity: usize) -> Self {
let (cap, _) = BitIdx::from(0).span::<T>(capacity);
let (ptr, cap) = {
let v = Vec::with_capacity(cap);
let (ptr, cap) = (v.as_ptr(), v.capacity());
mem::forget(v);
(ptr, cap)
};
Self {
_cursor: PhantomData,
pointer: BitPtr::uninhabited(ptr),
capacity: cap,
}
}
pub fn from_element(elt: T) -> Self {
Self::from_vec({
let mut v = Vec::with_capacity(1);
v.push(elt);
v
})
}
pub fn from_slice(slice: &[T]) -> Self {
BitSlice::<C, T>::from_slice(slice).to_owned()
}
pub fn from_vec(vec: Vec<T>) -> Self {
let len = vec.len();
assert!(
len <= BitPtr::<T>::MAX_ELTS,
"Vector length {} overflows {}",
len,
BitPtr::<T>::MAX_ELTS,
);
let (ptr, len, cap) = (vec.as_ptr(), len, vec.capacity());
mem::forget(vec);
Self {
_cursor: PhantomData,
pointer: BitPtr::new(ptr, len, 0, T::BITS),
capacity: cap,
}
}
pub fn from_bitslice(slice: &BitSlice<C, T>) -> Self {
Self::from_iter(slice.iter())
}
pub fn from_boxed_bitslice(slice: BitBox<C, T>) -> Self {
let bitptr = slice.bitptr();
mem::forget(slice);
unsafe { Self::from_raw_parts(bitptr, bitptr.elements()) }
}
pub unsafe fn from_raw_parts(pointer: BitPtr<T>, capacity: usize) -> Self {
Self {
_cursor: PhantomData,
pointer,
capacity,
}
}
pub fn capacity(&self) -> usize {
assert!(
self.capacity <= BitPtr::<T>::MAX_ELTS,
"Capacity {} overflows {}",
self.capacity,
BitPtr::<T>::MAX_ELTS,
);
self.capacity << T::INDX
}
pub fn reserve(&mut self, additional: usize) {
let newlen = self.len() + additional;
assert!(
newlen < BitPtr::<T>::MAX_BITS,
"Capacity overflow: {} exceeds {}",
newlen,
BitPtr::<T>::MAX_BITS,
);
let (e, _) = self.pointer.tail().span::<T>(additional);
self.do_unto_vec(|v| v.reserve(e));
}
pub fn reserve_exact(&mut self, additional: usize) {
let newlen = self.len() + additional;
assert!(
newlen < BitPtr::<T>::MAX_BITS,
"Capacity overflow: {} exceeds {}",
newlen,
BitPtr::<T>::MAX_BITS,
);
let (e, _) = self.pointer.tail().span::<T>(additional);
self.do_unto_vec(|v| v.reserve_exact(e));
}
pub fn shrink_to_fit(&mut self) {
self.do_unto_vec(Vec::shrink_to_fit);
}
pub fn truncate(&mut self, len: usize) {
if len < self.len() {
let (p, _, h, _) = self.pointer.raw_parts();
let (e, t) = h.span::<T>(len);
self.pointer = unsafe { BitPtr::new_unchecked(p, e, h, t) };
}
}
pub fn as_bitslice(&self) -> &BitSlice<C, T> {
self.pointer.into()
}
pub fn as_mut_bitslice(&mut self) -> &mut BitSlice<C, T> {
self.pointer.into()
}
pub unsafe fn set_len(&mut self, len: usize) {
if len == 0 {
self.pointer = BitPtr::uninhabited(self.pointer.pointer());
return;
}
assert!(
len < BitPtr::<T>::MAX_BITS,
"Capacity overflow: {} overflows maximum length {}",
len,
BitPtr::<T>::MAX_BITS,
);
assert!(
len <= self.capacity(),
"Capacity overflow: {} overflows allocation size {}",
len,
self.capacity(),
);
let (ptr, _, head, _) = self.pointer.raw_parts();
let (elts, tail) = match head.offset::<T>(len as isize) {
(e, BitIdx(0)) => (e.saturating_sub(1), T::BITS.into()),
(e, t) => (e, t),
};
self.pointer = BitPtr::new_unchecked(
ptr,
(elts as usize).saturating_add(1),
head,
tail,
);
}
pub fn swap_remove(&mut self, index: usize) -> bool {
let len = self.len();
assert!(index < len, "Index {} out of bounds: {}", index, len);
self.swap(index, len - 1);
self.pop()
.expect("BitVec::swap_remove cannot fail after index validation")
}
pub fn insert(&mut self, index: usize, value: bool) {
let len = self.len();
assert!(index <= len, "Index {} is out of bounds: {}", index, len);
self.push(value);
self[index ..].rotate_right(1);
}
pub fn remove(&mut self, index: usize) -> bool {
let len = self.len();
assert!(index < len, "Index {} is out of bounds: {}", index, len);
self[index ..].rotate_left(1);
self.pop().expect("BitVec::remove cannot fail after index validation")
}
pub fn retain<F>(&mut self, mut pred: F)
where F: FnMut(bool) -> bool {
for n in (0 .. self.len()).rev() {
if !pred(self[n]) {
self.remove(n);
}
}
}
pub fn push(&mut self, value: bool) {
let len = self.len();
assert!(
len < BitPtr::<T>::MAX_BITS,
"Capacity overflow: {} >= {}",
len,
BitPtr::<T>::MAX_BITS,
);
if self.is_empty() || *self.pointer.tail() == T::BITS {
self.do_unto_vec(|v| v.push(0.into()));
}
unsafe { self.bitptr_mut().incr_tail() };
self.set(len, value);
}
pub fn pop(&mut self) -> Option<bool> {
if self.is_empty() {
return None;
}
let out = self[self.len() - 1];
unsafe { self.bitptr_mut().decr_tail() };
Some(out)
}
pub fn append<D, U>(&mut self, other: &mut BitVec<D, U>)
where D: Cursor, U: Bits {
self.extend(other.iter());
other.clear();
}
pub fn drain<R>(&mut self, range: R) -> Drain<C, T>
where R: RangeBounds<usize> {
use core::ops::Bound::*;
let len = self.len();
let from = match range.start_bound() {
Included(&n) => n,
Excluded(&n) => n + 1,
Unbounded => 0,
};
let upto = match range.end_bound() {
Included(&n) => n + 1,
Excluded(&n) => n,
Unbounded => len,
};
assert!(from <= upto, "The drain start must be below the drain end");
assert!(upto <= len, "The drain end must be within the vector bounds");
unsafe {
let ranging: &BitSlice<C, T> = self
.as_bitslice()[from .. upto]
.bitptr()
.into();
self.set_len(from);
Drain {
bitvec: NonNull::from(self),
iter: ranging.iter(),
tail_start: upto,
tail_len: len - upto,
}
}
}
pub fn clear(&mut self) {
unsafe { self.set_len(0) }
}
pub fn split_off(&mut self, at: usize) -> Self {
let len = self.len();
assert!(at <= len, "Index out of bounds: {} is beyond {}", at, len);
match at {
0 => unsafe {
let out = Self::from_raw_parts(self.pointer, self.capacity);
ptr::write(self, Self::new());
return out;
},
n if n == len => Self::new(),
_ => {
let out = self.as_bitslice().iter().skip(at).collect();
self.truncate(at);
out
},
}
}
pub fn resize(&mut self, new_len: usize, value: bool) {
let len = self.len();
if new_len < len {
self.truncate(new_len);
}
else if new_len > len {
self.extend(iter::repeat(value).take(new_len - len));
}
}
pub fn splice<R, I>(
&mut self,
range: R,
replacement: I,
) -> Splice<C, T, <I as IntoIterator>::IntoIter>
where R: RangeBounds<usize>, I: IntoIterator<Item=bool> {
Splice {
drain: self.drain(range),
splice: replacement.into_iter(),
}
}
pub fn set_elements(&mut self, element: T) {
self.do_unto_vec(|v| {
let (ptr, cap) = (v.as_mut_ptr(), v.capacity());
for elt in unsafe { slice::from_raw_parts_mut(ptr, cap) } {
*elt = element;
}
})
}
pub fn change_cursor<D>(self) -> BitVec<D, T>
where D: Cursor {
let (bp, cap) = (self.pointer, self.capacity);
mem::forget(self);
unsafe { BitVec::from_raw_parts(bp, cap) }
}
pub fn into_boxed_bitslice(self) -> BitBox<C, T> {
let pointer = self.pointer;
mem::forget(self.into_boxed_slice());
unsafe { BitBox::from_raw(pointer) }
}
pub fn into_boxed_slice(self) -> Box<[T]> {
self.into_vec().into_boxed_slice()
}
pub fn into_vec(self) -> Vec<T> {
let (data, elts, _, _) = self.pointer.raw_parts();
let out = unsafe {
Vec::from_raw_parts(data as *mut T, elts, self.capacity)
};
mem::forget(self);
out
}
pub fn bitptr(&self) -> BitPtr<T> {
self.pointer
}
pub(crate) fn bitptr_mut(&mut self) -> &mut BitPtr<T> {
&mut self.pointer
}
fn do_unto_vec<F, R>(&mut self, func: F) -> R
where F: FnOnce(&mut Vec<T>) -> R {
let (p, e, h, t) = self.pointer.raw_parts();
let mut v = unsafe {
Vec::from_raw_parts(p as *mut T, e, self.capacity)
};
let out = func(&mut v);
self.pointer = unsafe { BitPtr::new_unchecked(v.as_ptr(), e, h, t) };
self.capacity = v.capacity();
mem::forget(v);
out
}
fn do_with_vec<F, R>(&self, func: F) -> R
where F: FnOnce(&Vec<T>) -> R {
let (data, elts, _, _) = self.pointer.raw_parts();
let v: Vec<T> = unsafe {
Vec::from_raw_parts(data as *mut T, elts, self.capacity)
};
let out = func(&v);
mem::forget(v);
out
}
}
impl<C, T> Borrow<BitSlice<C, T>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn borrow(&self) -> &BitSlice<C, T> {
self.as_bitslice()
}
}
impl<C, T> BorrowMut<BitSlice<C, T>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn borrow_mut(&mut self) -> &mut BitSlice<C, T> {
self.as_mut_bitslice()
}
}
impl<C, T> Clone for BitVec<C, T>
where C: Cursor, T: Bits {
fn clone(&self) -> Self {
let (e, h, t) = self.pointer.region_data();
let new_vec = self.do_with_vec(Clone::clone);
let (ptr, cap) = (new_vec.as_ptr(), new_vec.capacity());
mem::forget(new_vec);
Self {
_cursor: PhantomData,
pointer: unsafe { BitPtr::new_unchecked(ptr, e, h, t) },
capacity: cap,
}
}
fn clone_from(&mut self, other: &Self) {
let (from, elts, h, t) = other.pointer.raw_parts();
self.clear();
self.reserve(other.len());
let to = self.pointer.pointer() as *mut T;
unsafe {
ptr::copy_nonoverlapping(from, to, elts);
}
self.pointer = unsafe { BitPtr::new_unchecked(to, elts, h, t) };
}
}
impl<C, T> Eq for BitVec<C, T>
where C: Cursor, T: Bits {}
impl<C, T> Ord for BitVec<C, T>
where C: Cursor, T: Bits {
fn cmp(&self, rhs: &Self) -> Ordering {
self.as_bitslice().cmp(rhs.as_bitslice())
}
}
impl<A, B, C, D> PartialEq<BitVec<C, D>> for BitVec<A, B>
where A: Cursor, B: Bits, C: Cursor, D: Bits {
fn eq(&self, rhs: &BitVec<C, D>) -> bool {
self.as_bitslice().eq(rhs.as_bitslice())
}
}
impl<A, B, C, D> PartialEq<BitSlice<C, D>> for BitVec<A, B>
where A: Cursor, B: Bits, C: Cursor, D: Bits {
fn eq(&self, rhs: &BitSlice<C, D>) -> bool {
self.as_bitslice().eq(rhs)
}
}
impl<A, B, C, D> PartialEq<&BitSlice<C, D>> for BitVec<A, B>
where A: Cursor, B: Bits, C: Cursor, D: Bits {
fn eq(&self, rhs: &&BitSlice<C, D>) -> bool {
self.as_bitslice().eq(*rhs)
}
}
impl<A, B, C, D> PartialOrd<BitVec<C, D>> for BitVec<A, B>
where A: Cursor, B: Bits, C: Cursor, D: Bits {
fn partial_cmp(&self, rhs: &BitVec<C, D>) -> Option<Ordering> {
self.as_bitslice().partial_cmp(rhs.as_bitslice())
}
}
impl<A, B, C, D> PartialOrd<BitSlice<C, D>> for BitVec<A, B>
where A: Cursor, B: Bits, C: Cursor, D: Bits {
fn partial_cmp(&self, rhs: &BitSlice<C, D>) -> Option<Ordering> {
self.as_bitslice().partial_cmp(rhs)
}
}
impl<A, B, C, D> PartialOrd<&BitSlice<C, D>> for BitVec<A, B>
where A: Cursor, B: Bits, C: Cursor, D: Bits {
fn partial_cmp(&self, rhs: &&BitSlice<C, D>) -> Option<Ordering> {
self.as_bitslice().partial_cmp(*rhs)
}
}
impl<C, T> AsMut<BitSlice<C, T>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn as_mut(&mut self) -> &mut BitSlice<C, T> {
self.as_mut_bitslice()
}
}
impl<C, T> AsMut<[T]> for BitVec<C, T>
where C: Cursor, T: Bits {
fn as_mut(&mut self) -> &mut [T] {
self.as_mut_slice()
}
}
impl<C, T> AsRef<BitSlice<C, T>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn as_ref(&self) -> &BitSlice<C, T> {
self.as_bitslice()
}
}
impl<C, T> AsRef<[T]> for BitVec<C, T>
where C: Cursor, T: Bits {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
impl<C, T> From<&BitSlice<C, T>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn from(src: &BitSlice<C, T>) -> Self {
Self::from_bitslice(src)
}
}
impl<C, T> From<&[bool]> for BitVec<C, T>
where C: Cursor, T: Bits {
fn from(src: &[bool]) -> Self {
src.iter().cloned().collect()
}
}
impl<C, T> From<BitBox<C, T>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn from(src: BitBox<C, T>) -> Self {
Self::from_boxed_bitslice(src)
}
}
impl<C, T> From<&[T]> for BitVec<C, T>
where C: Cursor, T: Bits {
fn from(src: &[T]) -> Self {
Self::from_slice(src)
}
}
impl<C, T> From<Box<[T]>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn from(src: Box<[T]>) -> Self {
Self::from_boxed_bitslice(BitBox::from_boxed_slice(src))
}
}
impl<C, T> Into<Box<[T]>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn into(self) -> Box<[T]> {
self.into_boxed_slice()
}
}
impl<C, T> From<Vec<T>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn from(src: Vec<T>) -> Self {
Self::from_vec(src)
}
}
impl<C, T> Into<Vec<T>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn into(self) -> Vec<T> {
self.into_vec()
}
}
impl<C, T> Default for BitVec<C, T>
where C: Cursor, T: Bits {
fn default() -> Self {
Self::new()
}
}
impl<C, T> Debug for BitVec<C, T>
where C: Cursor, T: Bits {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.write_str("BitVec<")?;
f.write_str(C::TYPENAME)?;
f.write_str(", ")?;
f.write_str(T::TYPENAME)?;
f.write_str("> ")?;
Display::fmt(&**self, f)
}
}
impl<C, T> Display for BitVec<C, T>
where C: Cursor, T: Bits {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Display::fmt(&**self, f)
}
}
impl<C, T> Hash for BitVec<C, T>
where C: Cursor, T: Bits {
fn hash<H: Hasher>(&self, hasher: &mut H) {
<BitSlice<C, T> as Hash>::hash(self, hasher)
}
}
#[cfg(feature = "std")]
impl<C, T> Write for BitVec<C, T>
where C: Cursor, T: Bits {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let amt = cmp::min(buf.len(), BitPtr::<T>::MAX_BITS - self.len());
self.extend(<&BitSlice<C, u8>>::from(buf));
Ok(amt)
}
fn flush(&mut self) -> io::Result<()> { Ok(()) }
}
impl<C, T> Extend<bool> for BitVec<C, T>
where C: Cursor, T: Bits {
fn extend<I: IntoIterator<Item=bool>>(&mut self, src: I) {
let iter = src.into_iter();
match iter.size_hint() {
(_, Some(hi)) => self.reserve(hi),
(lo, None) => self.reserve(lo),
}
iter.for_each(|b| self.push(b));
}
}
impl<C, T> FromIterator<bool> for BitVec<C, T>
where C: Cursor, T: Bits {
fn from_iter<I: IntoIterator<Item=bool>>(src: I) -> Self {
let iter = src.into_iter();
let mut bv = match iter.size_hint() {
| (_, Some(len))
| (len, _)
=> Self::with_capacity(len),
};
for bit in iter {
bv.push(bit);
}
bv
}
}
impl<C, T> IntoIterator for BitVec<C, T>
where C: Cursor, T: Bits {
type Item = bool;
type IntoIter = IntoIter<C, T>;
fn into_iter(self) -> Self::IntoIter {
IntoIter {
slab: self.pointer.pointer() as *const T,
inner: self,
}
}
}
impl<'a, C, T> IntoIterator for &'a BitVec<C, T>
where C: Cursor, T: 'a + Bits {
type Item = bool;
type IntoIter = <&'a BitSlice<C, T> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
<&'a BitSlice<C, T> as IntoIterator>::into_iter(self)
}
}
unsafe impl<C, T> Send for BitVec<C, T>
where C: Cursor, T: Bits {}
impl<C, T> Add for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = Self;
fn add(mut self, addend: Self) -> Self::Output {
self += addend;
self
}
}
impl<C, T> AddAssign for BitVec<C, T>
where C: Cursor, T: Bits {
fn add_assign(&mut self, mut addend: Self) {
use core::iter::repeat;
if addend.len() > self.len() {
mem::swap(self, &mut addend);
return *self += addend;
}
let mut c = false;
let mut stack = BitVec::<C, T>::with_capacity(self.len());
let addend = addend.into_iter().rev().chain(repeat(false));
for (a, b) in self.iter().rev().zip(addend) {
static JUMP: [u8; 8] = [
0, 2, 2, 1, 2, 1, 1, 3, ];
let idx = ((c as u8) << 2) | ((a as u8) << 1) | (b as u8);
let yz = JUMP[idx as usize];
let (y, z) = (yz & 2 != 0, yz & 1 != 0);
stack.push(y);
c = z;
}
if c {
stack.push(true);
}
self.clear();
while let Some(bit) = stack.pop() {
self.push(bit);
}
}
}
impl<C, T, I> BitAnd<I> for BitVec<C, T>
where C: Cursor, T: Bits, I: IntoIterator<Item=bool> {
type Output = Self;
fn bitand(mut self, rhs: I) -> Self::Output {
self &= rhs;
self
}
}
impl<C, T, I> BitAndAssign<I> for BitVec<C, T>
where C: Cursor, T: Bits, I: IntoIterator<Item=bool> {
fn bitand_assign(&mut self, rhs: I) {
let mut len = 0;
for (idx, other) in (0 .. self.len()).zip(rhs.into_iter()) {
let val = self[idx] & other;
self.set(idx, val);
len += 1;
}
self.truncate(len);
}
}
impl<C, T, I> BitOr<I> for BitVec<C, T>
where C: Cursor, T: Bits, I: IntoIterator<Item=bool> {
type Output = Self;
fn bitor(mut self, rhs: I) -> Self::Output {
self |= rhs;
self
}
}
impl<C, T, I> BitOrAssign<I> for BitVec<C, T>
where C: Cursor, T: Bits, I: IntoIterator<Item=bool> {
fn bitor_assign(&mut self, rhs: I) {
let mut len = 0;
for (idx, other) in (0 .. self.len()).zip(rhs.into_iter()) {
let val = self[idx] | other;
self.set(idx, val);
len += 1;
}
self.truncate(len);
}
}
impl<C, T, I> BitXor<I> for BitVec<C, T>
where C: Cursor, T: Bits, I: IntoIterator<Item=bool> {
type Output = Self;
fn bitxor(mut self, rhs: I) -> Self::Output {
self ^= rhs;
self
}
}
impl<C, T, I> BitXorAssign<I> for BitVec<C, T>
where C: Cursor, T: Bits, I: IntoIterator<Item=bool> {
fn bitxor_assign(&mut self, rhs: I) {
let mut len = 0;
for (idx, other) in (0 .. self.len()).zip(rhs.into_iter()) {
let val = self[idx] ^ other;
self.set(idx, val);
len += 1;
}
self.truncate(len);
}
}
impl<C, T> Deref for BitVec<C, T>
where C: Cursor, T: Bits {
type Target = BitSlice<C, T>;
fn deref(&self) -> &Self::Target {
self.as_bitslice()
}
}
impl<C, T> DerefMut for BitVec<C, T>
where C: Cursor, T: Bits {
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_mut_bitslice()
}
}
impl<C, T> Drop for BitVec<C, T>
where C: Cursor, T: Bits {
fn drop(&mut self) {
let bp = mem::replace(&mut self.pointer, BitPtr::empty());
let (ptr, cap) = (bp.pointer(), self.capacity);
drop(unsafe { Vec::from_raw_parts(ptr as *mut T, 0, cap) });
}
}
impl<C, T> Index<usize> for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = bool;
fn index(&self, cursor: usize) -> &Self::Output {
if self.as_bitslice()[cursor] { &true } else { &false }
}
}
impl<C, T> Index<Range<usize>> for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = BitSlice<C, T>;
fn index(&self, Range { start, end }: Range<usize>) -> &Self::Output {
&self.as_bitslice()[start .. end]
}
}
impl<C, T> IndexMut<Range<usize>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn index_mut(
&mut self,
Range { start, end }: Range<usize>,
) -> &mut Self::Output {
&mut self.as_mut_bitslice()[start .. end]
}
}
impl<C, T> Index<RangeInclusive<usize>> for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = BitSlice<C, T>;
fn index(&self, index: RangeInclusive<usize>) -> &Self::Output {
let start = *index.start();
if let Some(end) = index.end().checked_add(1) {
&self.as_bitslice()[start .. end]
}
else {
&self.as_bitslice()[start ..]
}
}
}
impl<C, T> IndexMut<RangeInclusive<usize>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn index_mut(&mut self, index: RangeInclusive<usize>) -> &mut Self::Output {
let start = *index.start();
if let Some(end) = index.end().checked_add(1) {
&mut self.as_mut_bitslice()[start .. end]
}
else {
&mut self.as_mut_bitslice()[start ..]
}
}
}
impl<C, T> Index<RangeFrom<usize>> for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = BitSlice<C, T>;
fn index(&self, RangeFrom { start }: RangeFrom<usize>) -> &Self::Output {
&self.as_bitslice()[start .. self.len()]
}
}
impl<C, T> IndexMut<RangeFrom<usize>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn index_mut(
&mut self,
RangeFrom { start }: RangeFrom<usize>,
) -> &mut Self::Output {
let len = self.len();
&mut self.as_mut_bitslice()[start .. len]
}
}
impl<C, T> Index<RangeFull> for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = BitSlice<C, T>;
fn index(&self, _: RangeFull) -> &Self::Output {
self.as_bitslice()
}
}
impl<C, T> IndexMut<RangeFull> for BitVec<C, T>
where C: Cursor, T: Bits {
fn index_mut(&mut self, _: RangeFull) -> &mut Self::Output {
self.as_mut_bitslice()
}
}
impl<C, T> Index<RangeTo<usize>> for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = BitSlice<C, T>;
fn index(&self, RangeTo { end }: RangeTo<usize>) -> &Self::Output {
&self.as_bitslice()[0 .. end]
}
}
impl<C, T> IndexMut<RangeTo<usize>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn index_mut(
&mut self,
RangeTo { end }: RangeTo<usize>,
) -> &mut Self::Output {
&mut self.as_mut_bitslice()[0 .. end]
}
}
impl<C, T> Index<RangeToInclusive<usize>> for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = BitSlice<C, T>;
fn index(
&self,
RangeToInclusive { end }: RangeToInclusive<usize>,
) -> &Self::Output {
&self.as_bitslice()[0 ..= end]
}
}
impl<C, T> IndexMut<RangeToInclusive<usize>> for BitVec<C, T>
where C: Cursor, T: Bits {
fn index_mut(
&mut self,
RangeToInclusive { end }: RangeToInclusive<usize>,
) -> &mut Self::Output {
&mut self.as_mut_bitslice()[0 ..= end]
}
}
impl<C, T> Neg for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = Self;
fn neg(mut self) -> Self::Output {
if self.is_empty() || self.not_any() {
return self;
}
self = !self;
self += BitVec::<C, T>::from_iter(iter::once(true));
self
}
}
impl<C, T> Not for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = Self;
fn not(mut self) -> Self::Output {
let _ = !(self.as_mut_bitslice());
self
}
}
__bitvec_shift!(u8, u16, u32, u64, i8, i16, i32, i64);
impl<C, T> Shl<usize> for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = Self;
fn shl(mut self, shamt: usize) -> Self::Output {
self <<= shamt;
self
}
}
impl<C, T> ShlAssign<usize> for BitVec<C, T>
where C: Cursor, T: Bits {
fn shl_assign(&mut self, shamt: usize) {
let len = self.len();
if shamt >= len {
self.clear();
let buf: &mut [T] = self.as_mut();
let ptr = buf.as_mut_ptr();
let len = buf.len();
unsafe { core::ptr::write_bytes(ptr, 0, len); }
return;
}
for idx in shamt .. len {
let val = self[idx];
self.set(idx.saturating_sub(shamt), val);
}
let trunc = len.saturating_sub(shamt);
for idx in trunc .. len {
self.set(idx, false);
}
self.truncate(trunc);
}
}
impl<C, T> Shr<usize> for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = Self;
fn shr(mut self, shamt: usize) -> Self::Output {
self >>= shamt;
self
}
}
impl<C, T> ShrAssign<usize> for BitVec<C, T>
where C: Cursor, T: Bits {
fn shr_assign(&mut self, shamt: usize) {
let old_len = self.len();
for _ in 0 .. shamt {
self.push(false);
}
for idx in (0 .. old_len).rev() {
let val = self[idx];
self.set(idx.saturating_add(shamt), val);
}
for idx in 0 .. shamt {
self.set(idx, false);
}
}
}
impl<C, T> Sub for BitVec<C, T>
where C: Cursor, T: Bits {
type Output = Self;
fn sub(mut self, subtrahend: Self) -> Self::Output {
self -= subtrahend;
self
}
}
impl<C, T> SubAssign for BitVec<C, T>
where C: Cursor, T: Bits {
fn sub_assign(&mut self, mut subtrahend: Self) {
if subtrahend.not_any() {
return;
}
subtrahend = -subtrahend;
let (llen, rlen) = (self.len(), subtrahend.len());
if rlen > llen {
let diff = rlen - llen;
*self >>= diff;
}
else {
if llen > rlen {
let diff = llen - rlen;
let sign = subtrahend[0];
subtrahend >>= diff;
subtrahend[.. diff].set_all(sign);
}
}
let old = self.len();
eprintln!("subtra: {}", subtrahend);
*self += subtrahend;
if self.len() > old {
*self <<= 1;
}
}
}
pub struct Drain<'a, C, T>
where C: Cursor, T: 'a + Bits {
bitvec: NonNull<BitVec<C, T>>,
iter: crate::slice::Iter<'a, C, T>,
tail_start: usize,
tail_len: usize,
}
impl<'a, C, T> Drain<'a, C, T>
where C: Cursor, T: 'a + Bits {
unsafe fn fill<I: Iterator<Item=bool>>(&mut self, stream: &mut I) -> bool {
let bv = self.bitvec.as_mut();
let drain_from = bv.len();
let drain_upto = self.tail_start;
for n in drain_from .. drain_upto {
if let Some(bit) = stream.next() {
bv.push(bit);
}
else {
for (to, from) in (n .. n + self.tail_len).zip(drain_upto ..) {
bv.swap(from, to);
}
self.tail_start = n;
return false;
}
}
true
}
unsafe fn move_tail(&mut self, by: usize) {
let bv = self.bitvec.as_mut();
bv.reserve(by);
let new_tail = self.tail_start + by;
let old_len = bv.len();
let new_len = self.tail_start + self.tail_len + by;
bv.set_len(new_len);
for n in (0 .. self.tail_len).rev() {
bv.swap(self.tail_start + n, new_tail + n);
}
bv.set_len(old_len);
self.tail_start = new_tail;
}
}
impl<'a, C, T> DoubleEndedIterator for Drain<'a, C, T>
where C: Cursor, T: 'a + Bits {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, C, T> ExactSizeIterator for Drain<'a, C, T>
where C: Cursor, T: 'a + Bits {}
impl<'a, C, T> FusedIterator for Drain<'a, C, T>
where C: Cursor, T: 'a + Bits {}
impl<'a, C, T> Iterator for Drain<'a, C, T>
where C: Cursor, T: 'a + Bits {
type Item = bool;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.len()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn last(mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, C, T> Drop for Drain<'a, C, T>
where C: Cursor, T: 'a + Bits {
fn drop(&mut self) { unsafe {
let bv: &mut BitVec<C, T> = self.bitvec.as_mut();
let start = bv.len();
let tail = self.tail_start;
let tail_len = self.tail_len;
let full_len = tail + tail_len;
let end_len = start + tail_len;
bv.set_len(full_len);
for (from, to) in (tail .. full_len).zip(start .. end_len) {
bv.swap(from, to);
}
bv.set_len(end_len);
} }
}
#[repr(C)]
pub struct IntoIter<C, T>
where C: Cursor, T: Bits {
inner: BitVec<C, T>,
slab: *const T,
}
impl<C, T> DoubleEndedIterator for IntoIter<C, T>
where C: Cursor, T: Bits {
fn next_back(&mut self) -> Option<Self::Item> {
let mut slice_iter = (*self.inner).into_iter();
let out = slice_iter.next_back();
self.inner.pointer = slice_iter.bitptr();
out
}
}
impl<C, T> ExactSizeIterator for IntoIter<C, T>
where C: Cursor, T: Bits {}
impl<C, T> FusedIterator for IntoIter<C, T>
where C: Cursor, T: Bits {}
impl<C, T> Iterator for IntoIter<C, T>
where C: Cursor, T: Bits {
type Item = bool;
fn next(&mut self) -> Option<Self::Item> {
let mut slice_iter = (*self.inner).into_iter();
let out = slice_iter.next();
self.inner.pointer = slice_iter.bitptr();
out
}
fn size_hint(&self) -> (usize, Option<usize>) {
let rem = self.inner.len();
(rem, Some(rem))
}
fn count(self) -> usize {
self.len()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let mut slice_iter = (*self.inner).into_iter();
let out = slice_iter.nth(n);
self.inner.pointer = slice_iter.bitptr();
out
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
}
impl<C, T> Drop for IntoIter<C, T>
where C: Cursor, T: Bits {
fn drop(&mut self) {
let cap = self.inner.capacity;
mem::forget(mem::replace(&mut self.inner, BitVec::new()));
unsafe { Vec::from_raw_parts(self.slab as *mut T, 0, cap) };
}
}
pub struct Splice<'a, C, T, I>
where C: Cursor, T: 'a + Bits, I: Iterator<Item=bool> {
drain: Drain<'a, C, T>,
splice: I,
}
impl<'a, C, T, I> DoubleEndedIterator for Splice<'a, C, T, I>
where C: Cursor, T: 'a + Bits, I: Iterator<Item=bool> {
fn next_back(&mut self) -> Option<Self::Item> {
self.drain.next_back()
}
}
impl<'a, C, T, I> ExactSizeIterator for Splice<'a, C, T, I>
where C: Cursor, T: 'a + Bits, I: Iterator<Item=bool> {}
impl<'a, C, T, I> FusedIterator for Splice<'a, C, T, I>
where C: Cursor, T: 'a + Bits, I: Iterator<Item=bool> {}
impl<'a, C, T, I> Iterator for Splice<'a, C, T, I>
where C: Cursor, T: 'a + Bits, I: Iterator<Item=bool> {
type Item = bool;
fn next(&mut self) -> Option<Self::Item> {
self.drain.next().map(|bit| {
if let Some(new_bit) = self.splice.next() {
unsafe { self.drain.bitvec.as_mut() }.push(new_bit);
}
bit
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.drain.size_hint()
}
fn count(self) -> usize {
self.drain.len()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.drain.nth(n)
}
fn last(mut self) -> Option<Self::Item> {
self.drain.next_back()
}
}
impl<'a, C, T, I> Drop for Splice<'a, C, T, I>
where C: Cursor, T: 'a + Bits, I: Iterator<Item=bool> {
fn drop(&mut self) { unsafe {
if self.drain.tail_len == 0 {
self.drain.bitvec.as_mut().extend(self.splice.by_ref());
return;
}
if !self.drain.fill(&mut self.splice) {
return;
}
let (lower, _) = self.splice.size_hint();
if lower > 0 {
self.drain.move_tail(lower);
if !self.drain.fill(&mut self.splice) {
return;
}
}
let mut remnant = self.splice.by_ref().collect::<Vec<_>>().into_iter();
if remnant.len() > 0 {
self.drain.move_tail(remnant.len());
self.drain.fill(&mut remnant);
}
} }
}