#![no_std]
#![deny(clippy::undocumented_unsafe_blocks)]
extern crate alloc;
use alloc::{vec, vec::Vec};
mod block;
mod range;
pub mod on_stack;
#[cfg(feature = "serde")]
extern crate serde;
#[cfg(feature = "serde")]
mod serde_impl;
use core::fmt::Write;
use core::fmt::{Binary, Display, Error, Formatter};
use core::cmp::Ordering;
use core::hash::Hash;
use core::iter::{Chain, FusedIterator};
use core::mem::ManuallyDrop;
use core::mem::MaybeUninit;
use core::ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign, Index};
use core::ptr::NonNull;
pub use range::IndexRange;
pub(crate) const BITS: usize = core::mem::size_of::<Block>() * 8;
#[cfg(feature = "serde")]
pub(crate) const BYTES: usize = core::mem::size_of::<Block>();
pub use block::Block as SimdBlock;
pub type Block = usize;
#[inline]
fn div_rem(x: usize, denominator: usize) -> (usize, usize) {
(x / denominator, x % denominator)
}
fn vec_into_parts<T>(vec: Vec<T>) -> (NonNull<T>, usize, usize) {
let mut vec = ManuallyDrop::new(vec);
(
unsafe { NonNull::new_unchecked(vec.as_mut_ptr()) },
vec.capacity(),
vec.len(),
)
}
#[derive(Debug, Eq)]
pub struct FixedBitSet {
pub(crate) data: NonNull<MaybeUninit<SimdBlock>>,
capacity: usize,
pub(crate) length: usize,
}
unsafe impl Send for FixedBitSet {}
unsafe impl Sync for FixedBitSet {}
impl FixedBitSet {
pub const fn new() -> Self {
FixedBitSet {
data: NonNull::dangling(),
capacity: 0,
length: 0,
}
}
pub fn with_capacity(bits: usize) -> Self {
let (mut blocks, rem) = div_rem(bits, SimdBlock::BITS);
blocks += (rem > 0) as usize;
Self::from_blocks_and_len(vec![SimdBlock::NONE; blocks], bits)
}
#[inline]
fn from_blocks_and_len(data: Vec<SimdBlock>, length: usize) -> Self {
let (data, capacity, _) = vec_into_parts(data);
FixedBitSet {
data: data.cast(),
capacity,
length,
}
}
pub fn with_capacity_and_blocks<I: IntoIterator<Item = Block>>(bits: usize, blocks: I) -> Self {
let mut bitset = Self::with_capacity(bits);
for (subblock, value) in bitset.as_mut_slice().iter_mut().zip(blocks.into_iter()) {
*subblock = value;
}
bitset
}
#[inline]
pub fn grow(&mut self, bits: usize) {
#[cold]
#[track_caller]
#[inline(never)]
fn do_grow(slf: &mut FixedBitSet, bits: usize) {
unsafe { slf.grow_inner(bits, MaybeUninit::new(SimdBlock::NONE)) };
}
if bits > self.length {
do_grow(self, bits);
}
}
#[inline(always)]
unsafe fn grow_inner(&mut self, bits: usize, fill: MaybeUninit<SimdBlock>) {
let mut data = unsafe {
Vec::from_raw_parts(self.data.as_ptr(), self.simd_block_len(), self.capacity)
};
let (mut blocks, rem) = div_rem(bits, SimdBlock::BITS);
blocks += (rem > 0) as usize;
data.resize(blocks, fill);
let (data, capacity, _) = vec_into_parts(data);
self.data = data;
self.capacity = capacity;
self.length = bits;
}
#[inline]
unsafe fn get_unchecked(&self, subblock: usize) -> &Block {
&*self.data.as_ptr().cast::<Block>().add(subblock)
}
#[inline]
unsafe fn get_unchecked_mut(&mut self, subblock: usize) -> &mut Block {
&mut *self.data.as_ptr().cast::<Block>().add(subblock)
}
#[inline]
fn usize_len(&self) -> usize {
let (mut blocks, rem) = div_rem(self.length, BITS);
blocks += (rem > 0) as usize;
blocks
}
#[inline]
fn simd_block_len(&self) -> usize {
let (mut blocks, rem) = div_rem(self.length, SimdBlock::BITS);
blocks += (rem > 0) as usize;
blocks
}
#[inline]
fn batch_count_ones(blocks: impl IntoIterator<Item = Block>) -> usize {
blocks.into_iter().map(|x| x.count_ones() as usize).sum()
}
#[inline]
fn as_simd_slice(&self) -> &[SimdBlock] {
unsafe { core::slice::from_raw_parts(self.data.as_ptr().cast(), self.simd_block_len()) }
}
#[inline]
fn as_mut_simd_slice(&mut self) -> &mut [SimdBlock] {
unsafe { core::slice::from_raw_parts_mut(self.data.as_ptr().cast(), self.simd_block_len()) }
}
#[inline]
fn as_simd_slice_uninit(&self) -> &[MaybeUninit<SimdBlock>] {
unsafe { core::slice::from_raw_parts(self.data.as_ptr(), self.simd_block_len()) }
}
#[inline]
fn as_mut_simd_slice_uninit(&mut self) -> &mut [MaybeUninit<SimdBlock>] {
unsafe { core::slice::from_raw_parts_mut(self.data.as_ptr(), self.simd_block_len()) }
}
#[inline]
pub fn grow_and_insert(&mut self, bits: usize) {
self.grow(bits + 1);
let (blocks, rem) = div_rem(bits, BITS);
unsafe {
*self.get_unchecked_mut(blocks) |= 1 << rem;
}
}
#[inline]
pub fn len(&self) -> usize {
self.length
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub fn is_clear(&self) -> bool {
self.as_simd_slice().iter().all(|block| block.is_empty())
}
#[inline]
pub fn minimum(&self) -> Option<usize> {
let (block_idx, block) = self
.as_simd_slice()
.iter()
.enumerate()
.find(|&(_, block)| !block.is_empty())?;
let mut inner = 0;
let mut trailing = 0;
for subblock in block.into_usize_array() {
if subblock != 0 {
trailing = subblock.trailing_zeros() as usize;
break;
} else {
inner += BITS;
}
}
Some(block_idx * SimdBlock::BITS + inner + trailing)
}
#[inline]
pub fn maximum(&self) -> Option<usize> {
let (block_idx, block) = self
.as_simd_slice()
.iter()
.rev()
.enumerate()
.find(|&(_, block)| !block.is_empty())?;
let mut inner = 0;
let mut leading = 0;
for subblock in block.into_usize_array().iter().rev() {
if *subblock != 0 {
leading = subblock.leading_zeros() as usize;
break;
} else {
inner += BITS;
}
}
let max = self.simd_block_len() * SimdBlock::BITS;
Some(max - block_idx * SimdBlock::BITS - inner - leading - 1)
}
#[inline]
pub fn is_full(&self) -> bool {
self.contains_all_in_range(..)
}
#[inline]
pub fn contains(&self, bit: usize) -> bool {
(bit < self.length)
.then(|| unsafe { self.contains_unchecked(bit) })
.unwrap_or(false)
}
#[inline]
pub unsafe fn contains_unchecked(&self, bit: usize) -> bool {
let (block, i) = div_rem(bit, BITS);
(self.get_unchecked(block) & (1 << i)) != 0
}
#[inline]
pub fn clear(&mut self) {
for elt in self.as_mut_simd_slice().iter_mut() {
*elt = SimdBlock::NONE
}
}
#[inline]
pub fn insert(&mut self, bit: usize) {
assert!(
bit < self.length,
"insert at index {} exceeds fixedbitset size {}",
bit,
self.length
);
unsafe {
self.insert_unchecked(bit);
}
}
#[inline]
pub unsafe fn insert_unchecked(&mut self, bit: usize) {
let (block, i) = div_rem(bit, BITS);
unsafe {
*self.get_unchecked_mut(block) |= 1 << i;
}
}
#[inline]
pub fn remove(&mut self, bit: usize) {
assert!(
bit < self.length,
"remove at index {} exceeds fixedbitset size {}",
bit,
self.length
);
unsafe {
self.remove_unchecked(bit);
}
}
#[inline]
pub unsafe fn remove_unchecked(&mut self, bit: usize) {
let (block, i) = div_rem(bit, BITS);
unsafe {
*self.get_unchecked_mut(block) &= !(1 << i);
}
}
#[inline]
pub fn put(&mut self, bit: usize) -> bool {
assert!(
bit < self.length,
"put at index {} exceeds fixedbitset size {}",
bit,
self.length
);
unsafe { self.put_unchecked(bit) }
}
#[inline]
pub unsafe fn put_unchecked(&mut self, bit: usize) -> bool {
let (block, i) = div_rem(bit, BITS);
unsafe {
let word = self.get_unchecked_mut(block);
let prev = *word & (1 << i) != 0;
*word |= 1 << i;
prev
}
}
#[inline]
pub fn toggle(&mut self, bit: usize) {
assert!(
bit < self.length,
"toggle at index {} exceeds fixedbitset size {}",
bit,
self.length
);
unsafe {
self.toggle_unchecked(bit);
}
}
#[inline]
pub unsafe fn toggle_unchecked(&mut self, bit: usize) {
let (block, i) = div_rem(bit, BITS);
unsafe {
*self.get_unchecked_mut(block) ^= 1 << i;
}
}
#[inline]
pub fn set(&mut self, bit: usize, enabled: bool) {
assert!(
bit < self.length,
"set at index {} exceeds fixedbitset size {}",
bit,
self.length
);
unsafe {
self.set_unchecked(bit, enabled);
}
}
#[inline]
pub unsafe fn set_unchecked(&mut self, bit: usize, enabled: bool) {
let (block, i) = div_rem(bit, BITS);
let elt = unsafe { self.get_unchecked_mut(block) };
if enabled {
*elt |= 1 << i;
} else {
*elt &= !(1 << i);
}
}
#[inline]
pub fn copy_bit(&mut self, from: usize, to: usize) {
assert!(
to < self.length,
"copy to index {} exceeds fixedbitset size {}",
to,
self.length
);
let enabled = self.contains(from);
unsafe { self.set_unchecked(to, enabled) };
}
#[inline]
pub unsafe fn copy_bit_unchecked(&mut self, from: usize, to: usize) {
let enabled = self.contains_unchecked(from);
self.set_unchecked(to, enabled);
}
#[inline]
pub fn count_ones<T: IndexRange>(&self, range: T) -> usize {
Self::batch_count_ones(Masks::new(range, self.length).map(|(block, mask)| {
unsafe { *self.get_unchecked(block) & mask }
}))
}
#[inline]
pub fn count_zeroes<T: IndexRange>(&self, range: T) -> usize {
Self::batch_count_ones(Masks::new(range, self.length).map(|(block, mask)| {
unsafe { !*self.get_unchecked(block) & mask }
}))
}
#[inline]
pub fn set_range<T: IndexRange>(&mut self, range: T, enabled: bool) {
if enabled {
self.insert_range(range);
} else {
self.remove_range(range);
}
}
#[inline]
pub fn insert_range<T: IndexRange>(&mut self, range: T) {
for (block, mask) in Masks::new(range, self.length) {
let block = unsafe { self.get_unchecked_mut(block) };
*block |= mask;
}
}
#[inline]
pub fn remove_range<T: IndexRange>(&mut self, range: T) {
for (block, mask) in Masks::new(range, self.length) {
let block = unsafe { self.get_unchecked_mut(block) };
*block &= !mask;
}
}
#[inline]
pub fn toggle_range<T: IndexRange>(&mut self, range: T) {
for (block, mask) in Masks::new(range, self.length) {
let block = unsafe { self.get_unchecked_mut(block) };
*block ^= mask;
}
}
#[inline]
pub fn contains_all_in_range<T: IndexRange>(&self, range: T) -> bool {
for (block, mask) in Masks::new(range, self.length) {
let block = unsafe { self.get_unchecked(block) };
if block & mask != mask {
return false;
}
}
true
}
#[inline]
pub fn contains_any_in_range<T: IndexRange>(&self, range: T) -> bool {
for (block, mask) in Masks::new(range, self.length) {
let block = unsafe { self.get_unchecked(block) };
if block & mask != 0 {
return true;
}
}
false
}
#[inline]
pub fn as_slice(&self) -> &[Block] {
unsafe {
let ptr = self.data.as_ptr().cast::<Block>();
core::slice::from_raw_parts(ptr, self.usize_len())
}
}
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [Block] {
unsafe {
let ptr = self.data.as_ptr().cast::<Block>();
core::slice::from_raw_parts_mut(ptr, self.usize_len())
}
}
#[inline]
pub fn ones(&self) -> Ones {
match self.as_slice().split_first() {
Some((&first_block, rem)) => {
let (&last_block, rem) = rem.split_last().unwrap_or((&0, rem));
Ones {
bitset_front: first_block,
bitset_back: last_block,
block_idx_front: 0,
block_idx_back: (1 + rem.len()) * BITS,
remaining_blocks: rem.iter(),
}
}
None => Ones {
bitset_front: 0,
bitset_back: 0,
block_idx_front: 0,
block_idx_back: 0,
remaining_blocks: [].iter(),
},
}
}
pub fn into_ones(self) -> IntoOnes {
let ptr = self.data.as_ptr().cast();
let len = self.simd_block_len() * SimdBlock::USIZE_COUNT;
let slice = unsafe { core::slice::from_raw_parts(ptr, len) };
let data: Vec<SimdBlock> = unsafe {
Vec::from_raw_parts(
self.data.as_ptr().cast(),
self.simd_block_len(),
self.capacity,
)
};
let mut iter = slice.iter().copied();
core::mem::forget(self);
IntoOnes {
bitset_front: iter.next().unwrap_or(0),
bitset_back: iter.next_back().unwrap_or(0),
block_idx_front: 0,
block_idx_back: len.saturating_sub(1) * BITS,
remaining_blocks: iter,
_buf: data,
}
}
#[inline]
pub fn zeroes(&self) -> Zeroes {
match self.as_slice().split_first() {
Some((&block, rem)) => Zeroes {
bitset: !block,
block_idx: 0,
len: self.len(),
remaining_blocks: rem.iter(),
},
None => Zeroes {
bitset: !0,
block_idx: 0,
len: self.len(),
remaining_blocks: [].iter(),
},
}
}
pub fn intersection<'a>(&'a self, other: &'a FixedBitSet) -> Intersection<'a> {
Intersection {
iter: self.ones(),
other,
}
}
pub fn union<'a>(&'a self, other: &'a FixedBitSet) -> Union<'a> {
Union {
iter: self.ones().chain(other.difference(self)),
}
}
pub fn difference<'a>(&'a self, other: &'a FixedBitSet) -> Difference<'a> {
Difference {
iter: self.ones(),
other,
}
}
pub fn symmetric_difference<'a>(&'a self, other: &'a FixedBitSet) -> SymmetricDifference<'a> {
SymmetricDifference {
iter: self.difference(other).chain(other.difference(self)),
}
}
pub fn union_with(&mut self, other: &FixedBitSet) {
if other.len() >= self.len() {
self.grow(other.len());
}
self.as_mut_simd_slice()
.iter_mut()
.zip(other.as_simd_slice().iter())
.for_each(|(x, y)| *x |= *y);
}
pub fn intersect_with(&mut self, other: &FixedBitSet) {
let me = self.as_mut_simd_slice();
let other = other.as_simd_slice();
me.iter_mut().zip(other.iter()).for_each(|(x, y)| {
*x &= *y;
});
let mn = core::cmp::min(me.len(), other.len());
for wd in &mut me[mn..] {
*wd = SimdBlock::NONE;
}
}
pub fn difference_with(&mut self, other: &FixedBitSet) {
self.as_mut_simd_slice()
.iter_mut()
.zip(other.as_simd_slice().iter())
.for_each(|(x, y)| {
*x &= !*y;
});
}
pub fn symmetric_difference_with(&mut self, other: &FixedBitSet) {
if other.len() >= self.len() {
self.grow(other.len());
}
self.as_mut_simd_slice()
.iter_mut()
.zip(other.as_simd_slice().iter())
.for_each(|(x, y)| {
*x ^= *y;
});
}
#[inline]
pub fn union_count(&self, other: &FixedBitSet) -> usize {
let me = self.as_slice();
let other = other.as_slice();
let count = Self::batch_count_ones(me.iter().zip(other.iter()).map(|(x, y)| (*x | *y)));
match other.len().cmp(&me.len()) {
Ordering::Greater => count + Self::batch_count_ones(other[me.len()..].iter().copied()),
Ordering::Less => count + Self::batch_count_ones(me[other.len()..].iter().copied()),
Ordering::Equal => count,
}
}
#[inline]
pub fn intersection_count(&self, other: &FixedBitSet) -> usize {
Self::batch_count_ones(
self.as_slice()
.iter()
.zip(other.as_slice())
.map(|(x, y)| (*x & *y)),
)
}
#[inline]
pub fn difference_count(&self, other: &FixedBitSet) -> usize {
Self::batch_count_ones(
self.as_slice()
.iter()
.zip(other.as_slice().iter())
.map(|(x, y)| (*x & !*y)),
)
}
#[inline]
pub fn symmetric_difference_count(&self, other: &FixedBitSet) -> usize {
let me = self.as_slice();
let other = other.as_slice();
let count = Self::batch_count_ones(me.iter().zip(other.iter()).map(|(x, y)| (*x ^ *y)));
match other.len().cmp(&me.len()) {
Ordering::Greater => count + Self::batch_count_ones(other[me.len()..].iter().copied()),
Ordering::Less => count + Self::batch_count_ones(me[other.len()..].iter().copied()),
Ordering::Equal => count,
}
}
pub fn is_disjoint(&self, other: &FixedBitSet) -> bool {
self.as_simd_slice()
.iter()
.zip(other.as_simd_slice())
.all(|(x, y)| (*x & *y).is_empty())
}
pub fn is_subset(&self, other: &FixedBitSet) -> bool {
let me = self.as_simd_slice();
let other = other.as_simd_slice();
me.iter()
.zip(other.iter())
.all(|(x, y)| x.andnot(*y).is_empty())
&& me.iter().skip(other.len()).all(|x| x.is_empty())
}
pub fn is_superset(&self, other: &FixedBitSet) -> bool {
other.is_subset(self)
}
}
impl Hash for FixedBitSet {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.length.hash(state);
self.as_simd_slice().hash(state);
}
}
impl PartialEq for FixedBitSet {
fn eq(&self, other: &Self) -> bool {
self.length == other.length && self.as_simd_slice().eq(other.as_simd_slice())
}
}
impl PartialOrd for FixedBitSet {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for FixedBitSet {
fn cmp(&self, other: &Self) -> Ordering {
self.length
.cmp(&other.length)
.then_with(|| self.as_simd_slice().cmp(other.as_simd_slice()))
}
}
impl Default for FixedBitSet {
fn default() -> Self {
Self::new()
}
}
impl Drop for FixedBitSet {
fn drop(&mut self) {
drop(unsafe {
Vec::from_raw_parts(self.data.as_ptr(), self.simd_block_len(), self.capacity)
});
}
}
impl Binary for FixedBitSet {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
if f.alternate() {
f.write_str("0b")?;
}
for i in 0..self.length {
if self[i] {
f.write_char('1')?;
} else {
f.write_char('0')?;
}
}
Ok(())
}
}
impl Display for FixedBitSet {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
Binary::fmt(&self, f)
}
}
pub struct Difference<'a> {
iter: Ones<'a>,
other: &'a FixedBitSet,
}
impl<'a> Iterator for Difference<'a> {
type Item = usize;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.iter.by_ref().find(|&nxt| !self.other.contains(nxt))
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a> DoubleEndedIterator for Difference<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter
.by_ref()
.rev()
.find(|&nxt| !self.other.contains(nxt))
}
}
impl<'a> FusedIterator for Difference<'a> {}
pub struct SymmetricDifference<'a> {
iter: Chain<Difference<'a>, Difference<'a>>,
}
impl<'a> Iterator for SymmetricDifference<'a> {
type Item = usize;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a> DoubleEndedIterator for SymmetricDifference<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a> FusedIterator for SymmetricDifference<'a> {}
pub struct Intersection<'a> {
iter: Ones<'a>,
other: &'a FixedBitSet,
}
impl<'a> Iterator for Intersection<'a> {
type Item = usize;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.iter.by_ref().find(|&nxt| self.other.contains(nxt))
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a> DoubleEndedIterator for Intersection<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter
.by_ref()
.rev()
.find(|&nxt| self.other.contains(nxt))
}
}
impl<'a> FusedIterator for Intersection<'a> {}
pub struct Union<'a> {
iter: Chain<Ones<'a>, Difference<'a>>,
}
impl<'a> Iterator for Union<'a> {
type Item = usize;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a> DoubleEndedIterator for Union<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a> FusedIterator for Union<'a> {}
struct Masks {
first_block: usize,
first_mask: usize,
last_block: usize,
last_mask: usize,
}
impl Masks {
#[inline]
fn new<T: IndexRange>(range: T, length: usize) -> Masks {
let start = range.start().unwrap_or(0);
let end = range.end().unwrap_or(length);
assert!(
start <= end && end <= length,
"invalid range {}..{} for a fixedbitset of size {}",
start,
end,
length
);
let (first_block, first_rem) = div_rem(start, BITS);
let (last_block, last_rem) = div_rem(end, BITS);
Masks {
first_block,
first_mask: usize::MAX << first_rem,
last_block,
last_mask: (usize::MAX >> 1) >> (BITS - last_rem - 1),
}
}
}
impl Iterator for Masks {
type Item = (usize, usize);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
match self.first_block.cmp(&self.last_block) {
Ordering::Less => {
let res = (self.first_block, self.first_mask);
self.first_block += 1;
self.first_mask = !0;
Some(res)
}
Ordering::Equal => {
let mask = self.first_mask & self.last_mask;
let res = if mask == 0 {
None
} else {
Some((self.first_block, mask))
};
self.first_block += 1;
res
}
Ordering::Greater => None,
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.first_block..=self.last_block).size_hint()
}
}
impl FusedIterator for Masks {}
impl ExactSizeIterator for Masks {}
pub struct Ones<'a> {
bitset_front: usize,
bitset_back: usize,
block_idx_front: usize,
block_idx_back: usize,
remaining_blocks: core::slice::Iter<'a, usize>,
}
impl<'a> Ones<'a> {
#[inline]
pub fn last_positive_bit_and_unset(n: &mut usize) -> usize {
let last_bit = *n & n.wrapping_neg();
let position = last_bit.trailing_zeros();
*n &= *n - 1;
position as usize
}
#[inline]
fn first_positive_bit_and_unset(n: &mut usize) -> usize {
let bit_idx = n.leading_zeros();
let mask = !((1_usize) << (BITS as u32 - bit_idx - 1));
n.bitand_assign(mask);
bit_idx as usize
}
}
impl<'a> DoubleEndedIterator for Ones<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
while self.bitset_back == 0 {
match self.remaining_blocks.next_back() {
None => {
if self.bitset_front != 0 {
self.bitset_back = 0;
self.block_idx_back = self.block_idx_front;
return Some(
self.block_idx_front + BITS
- Self::first_positive_bit_and_unset(&mut self.bitset_front)
- 1,
);
} else {
return None;
}
}
Some(next_block) => {
self.bitset_back = *next_block;
self.block_idx_back -= BITS;
}
};
}
Some(
self.block_idx_back - Self::first_positive_bit_and_unset(&mut self.bitset_back) + BITS
- 1,
)
}
}
impl<'a> Iterator for Ones<'a> {
type Item = usize;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
while self.bitset_front == 0 {
match self.remaining_blocks.next() {
Some(next_block) => {
self.bitset_front = *next_block;
self.block_idx_front += BITS;
}
None => {
if self.bitset_back != 0 {
self.block_idx_front = self.block_idx_back;
self.bitset_front = 0;
return Some(
self.block_idx_back
+ Self::last_positive_bit_and_unset(&mut self.bitset_back),
);
} else {
return None;
}
}
};
}
Some(self.block_idx_front + Self::last_positive_bit_and_unset(&mut self.bitset_front))
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(
0,
(Some(self.block_idx_back - self.block_idx_front + 2 * BITS)),
)
}
}
impl<'a> FusedIterator for Ones<'a> {}
pub struct Zeroes<'a> {
bitset: usize,
block_idx: usize,
len: usize,
remaining_blocks: core::slice::Iter<'a, usize>,
}
impl<'a> Iterator for Zeroes<'a> {
type Item = usize;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
while self.bitset == 0 {
self.bitset = !*self.remaining_blocks.next()?;
self.block_idx += BITS;
}
let t = self.bitset & (0_usize).wrapping_sub(self.bitset);
let r = self.bitset.trailing_zeros() as usize;
self.bitset ^= t;
let bit = self.block_idx + r;
if bit < self.len {
Some(bit)
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.len))
}
}
impl<'a> FusedIterator for Zeroes<'a> {}
impl Clone for FixedBitSet {
#[inline]
fn clone(&self) -> Self {
Self::from_blocks_and_len(Vec::from(self.as_simd_slice()), self.length)
}
#[inline]
fn clone_from(&mut self, source: &Self) {
if self.length < source.length {
unsafe { self.grow_inner(source.length, MaybeUninit::uninit()) };
}
let me = self.as_mut_simd_slice_uninit();
let them = source.as_simd_slice_uninit();
match me.len().cmp(&them.len()) {
Ordering::Greater => {
let (head, tail) = me.split_at_mut(them.len());
head.copy_from_slice(them);
tail.fill(MaybeUninit::new(SimdBlock::NONE));
}
Ordering::Equal => me.copy_from_slice(them),
Ordering::Less => {}
}
self.length = source.length;
}
}
impl Index<usize> for FixedBitSet {
type Output = bool;
#[inline]
fn index(&self, bit: usize) -> &bool {
if self.contains(bit) {
&true
} else {
&false
}
}
}
impl Extend<usize> for FixedBitSet {
fn extend<I: IntoIterator<Item = usize>>(&mut self, src: I) {
let iter = src.into_iter();
for i in iter {
if i >= self.len() {
self.grow(i + 1);
}
self.put(i);
}
}
}
impl FromIterator<usize> for FixedBitSet {
fn from_iter<I: IntoIterator<Item = usize>>(src: I) -> Self {
let mut fbs = FixedBitSet::with_capacity(0);
fbs.extend(src);
fbs
}
}
pub struct IntoOnes {
bitset_front: Block,
bitset_back: Block,
block_idx_front: usize,
block_idx_back: usize,
remaining_blocks: core::iter::Copied<core::slice::Iter<'static, usize>>,
_buf: Vec<SimdBlock>,
}
impl IntoOnes {
#[inline]
pub fn last_positive_bit_and_unset(n: &mut Block) -> usize {
let last_bit = *n & n.wrapping_neg();
let position = last_bit.trailing_zeros();
*n &= *n - 1;
position as usize
}
#[inline]
fn first_positive_bit_and_unset(n: &mut Block) -> usize {
let bit_idx = n.leading_zeros();
let mask = !((1_usize) << (BITS as u32 - bit_idx - 1));
n.bitand_assign(mask);
bit_idx as usize
}
}
impl DoubleEndedIterator for IntoOnes {
fn next_back(&mut self) -> Option<Self::Item> {
while self.bitset_back == 0 {
match self.remaining_blocks.next_back() {
None => {
if self.bitset_front != 0 {
self.bitset_back = 0;
self.block_idx_back = self.block_idx_front;
return Some(
self.block_idx_front + BITS
- Self::first_positive_bit_and_unset(&mut self.bitset_front)
- 1,
);
} else {
return None;
}
}
Some(next_block) => {
self.bitset_back = next_block;
self.block_idx_back -= BITS;
}
};
}
Some(
self.block_idx_back - Self::first_positive_bit_and_unset(&mut self.bitset_back) + BITS
- 1,
)
}
}
impl Iterator for IntoOnes {
type Item = usize;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
while self.bitset_front == 0 {
match self.remaining_blocks.next() {
Some(next_block) => {
self.bitset_front = next_block;
self.block_idx_front += BITS;
}
None => {
if self.bitset_back != 0 {
self.block_idx_front = self.block_idx_back;
self.bitset_front = 0;
return Some(
self.block_idx_back
+ Self::last_positive_bit_and_unset(&mut self.bitset_back),
);
} else {
return None;
}
}
};
}
Some(self.block_idx_front + Self::last_positive_bit_and_unset(&mut self.bitset_front))
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(
0,
(Some(self.block_idx_back - self.block_idx_front + 2 * BITS)),
)
}
}
impl FusedIterator for IntoOnes {}
impl<'a> BitAnd for &'a FixedBitSet {
type Output = FixedBitSet;
fn bitand(self, other: &FixedBitSet) -> FixedBitSet {
let (short, long) = {
if self.len() <= other.len() {
(self.as_simd_slice(), other.as_simd_slice())
} else {
(other.as_simd_slice(), self.as_simd_slice())
}
};
let mut data = Vec::from(short);
for (data, block) in data.iter_mut().zip(long.iter()) {
*data &= *block;
}
let len = core::cmp::min(self.len(), other.len());
FixedBitSet::from_blocks_and_len(data, len)
}
}
impl BitAndAssign for FixedBitSet {
fn bitand_assign(&mut self, other: Self) {
self.intersect_with(&other);
}
}
impl BitAndAssign<&Self> for FixedBitSet {
fn bitand_assign(&mut self, other: &Self) {
self.intersect_with(other);
}
}
impl<'a> BitOr for &'a FixedBitSet {
type Output = FixedBitSet;
fn bitor(self, other: &FixedBitSet) -> FixedBitSet {
let (short, long) = {
if self.len() <= other.len() {
(self.as_simd_slice(), other.as_simd_slice())
} else {
(other.as_simd_slice(), self.as_simd_slice())
}
};
let mut data = Vec::from(long);
for (data, block) in data.iter_mut().zip(short.iter()) {
*data |= *block;
}
let len = core::cmp::max(self.len(), other.len());
FixedBitSet::from_blocks_and_len(data, len)
}
}
impl BitOrAssign for FixedBitSet {
fn bitor_assign(&mut self, other: Self) {
self.union_with(&other);
}
}
impl BitOrAssign<&Self> for FixedBitSet {
fn bitor_assign(&mut self, other: &Self) {
self.union_with(other);
}
}
impl<'a> BitXor for &'a FixedBitSet {
type Output = FixedBitSet;
fn bitxor(self, other: &FixedBitSet) -> FixedBitSet {
let (short, long) = {
if self.len() <= other.len() {
(self.as_simd_slice(), other.as_simd_slice())
} else {
(other.as_simd_slice(), self.as_simd_slice())
}
};
let mut data = Vec::from(long);
for (data, block) in data.iter_mut().zip(short.iter()) {
*data ^= *block;
}
let len = core::cmp::max(self.len(), other.len());
FixedBitSet::from_blocks_and_len(data, len)
}
}
impl BitXorAssign for FixedBitSet {
fn bitxor_assign(&mut self, other: Self) {
self.symmetric_difference_with(&other);
}
}
impl BitXorAssign<&Self> for FixedBitSet {
fn bitxor_assign(&mut self, other: &Self) {
self.symmetric_difference_with(other);
}
}