1#![deny(clippy::undocumented_unsafe_blocks)]
17
18extern crate alloc;
19use alloc::{vec, vec::Vec};
20#[cfg(feature = "serde")]
21extern crate serde;
22#[cfg(feature = "serde")]
23mod serde_impl;
24
25use core::fmt::Write;
26use core::fmt::{Binary, Display, Error, Formatter};
27
28use core::cmp::Ordering;
29use core::hash::Hash;
30use core::iter::{Chain, FusedIterator};
31use core::ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign, Index};
32
33use crate::{block, IndexRange};
34
35pub(crate) const BITS: usize = core::mem::size_of::<Block>() * 8;
36#[cfg(feature = "serde")]
37pub(crate) const BYTES: usize = core::mem::size_of::<Block>();
38
39type SimdBlock = block::Block;
40pub type Block = usize;
41
42#[inline]
43const fn div_rem(x: usize, denominator: usize) -> (usize, usize) {
44 (x / denominator, x % denominator)
45}
46
47#[inline]
48pub const fn get_nblock(bits:usize) -> usize {
49 let (div, rem) = div_rem(bits, SimdBlock::BITS);
50 div + (rem > 0) as usize
51}
52
53#[derive(Debug, Eq)]
63pub struct FixedBitSet<const NBLOCK: usize> {
64 pub(crate) data: [SimdBlock; NBLOCK],
65 pub(crate) length: usize,
67}
68
69impl<const NBLOCK: usize> FixedBitSet<NBLOCK> {
70 pub const fn new() -> Self {
72 FixedBitSet {
73 data: [SimdBlock::NONE; NBLOCK],
74 length: 0,
75 }
76 }
77
78 pub fn with_capacity(bits: usize) -> Self {
81 let blocks = get_nblock(bits);
82 Self::from_blocks_and_len(vec![SimdBlock::NONE; blocks], bits)
83 }
84
85 pub fn with_capacity_and_blocks<I: IntoIterator<Item = Block>>(
99 capacity: usize,
100 blocks: I,
101 ) -> Self {
102 let mut bitset = Self::with_capacity(capacity);
103 for (subblock, value) in bitset.as_mut_slice().iter_mut().zip(blocks.into_iter()) {
104 *subblock = value;
105 }
106 bitset
107 }
108
109 #[inline]
110 pub fn grow(&mut self, bits: usize) {
111 let (mut blocks, rem) = div_rem(bits, SimdBlock::BITS);
112 blocks += (rem > 0) as usize;
113 if blocks > NBLOCK {
114 panic!("Cannot grow FixedBitSet beyond its capacity");
115 }
116 self.length = bits;
117 }
118
119 #[inline]
120 fn from_blocks_and_len(data: Vec<SimdBlock>, length: usize) -> Self {
121 assert!(length <= NBLOCK * SimdBlock::BITS);
122 let mut new_data = [SimdBlock::NONE; NBLOCK];
123 new_data[..data.len()].copy_from_slice(&data);
124 FixedBitSet {
125 data: new_data,
126 length,
127 }
128 }
129
130 #[inline]
136 pub fn grow_and_insert(&mut self, bits: usize) {
137 self.grow(bits + 1);
138
139 let (blocks, rem) = div_rem(bits, BITS);
140 unsafe {
142 *self.get_unchecked_mut(blocks) |= 1 << rem;
143 }
144 }
145
146 #[inline]
147 unsafe fn get_unchecked(&self, subblock: usize) -> &Block {
148 &*self.data.as_ptr().cast::<Block>().add(subblock)
149 }
150
151 #[inline]
152 unsafe fn get_unchecked_mut(&mut self, subblock: usize) -> &mut Block {
153 &mut *self.data.as_mut_ptr().cast::<Block>().add(subblock)
154 }
155
156 #[inline]
157 fn usize_len(&self) -> usize {
158 let (mut blocks, rem) = div_rem(self.length, BITS);
159 blocks += (rem > 0) as usize;
160 blocks
161 }
162
163 #[inline]
164 fn simd_block_len(&self) -> usize {
165 let (mut blocks, rem) = div_rem(self.length, SimdBlock::BITS);
166 blocks += (rem > 0) as usize;
167 blocks
168 }
169
170 #[inline]
171 fn batch_count_ones(blocks: impl IntoIterator<Item = Block>) -> usize {
172 blocks.into_iter().map(|x| x.count_ones() as usize).sum()
173 }
174
175 #[inline]
176 fn as_simd_slice(&self) -> &[SimdBlock] {
177 unsafe { core::slice::from_raw_parts(self.data.as_ptr().cast(), self.simd_block_len()) }
180 }
181
182 #[inline]
183 fn as_mut_simd_slice(&mut self) -> &mut [SimdBlock] {
184 unsafe {
187 core::slice::from_raw_parts_mut(self.data.as_mut_ptr().cast(), self.simd_block_len())
188 }
189 }
190 #[inline]
202 pub fn len(&self) -> usize {
203 self.length
204 }
205
206 #[inline]
221 pub fn is_empty(&self) -> bool {
222 self.len() == 0
223 }
224
225 #[inline]
241 pub fn is_clear(&self) -> bool {
242 self.as_simd_slice().iter().all(|block| block.is_empty())
243 }
244
245 #[inline]
260 pub fn minimum(&self) -> Option<usize> {
261 let (block_idx, block) = self
262 .as_simd_slice()
263 .iter()
264 .enumerate()
265 .find(|&(_, block)| !block.is_empty())?;
266 let mut inner = 0;
267 let mut trailing = 0;
268 for subblock in block.into_usize_array() {
269 if subblock != 0 {
270 trailing = subblock.trailing_zeros() as usize;
271 break;
272 } else {
273 inner += BITS;
274 }
275 }
276 Some(block_idx * SimdBlock::BITS + inner + trailing)
277 }
278
279 #[inline]
294 pub fn maximum(&self) -> Option<usize> {
295 let (block_idx, block) = self
296 .as_simd_slice()
297 .iter()
298 .rev()
299 .enumerate()
300 .find(|&(_, block)| !block.is_empty())?;
301 let mut inner = 0;
302 let mut leading = 0;
303 for subblock in block.into_usize_array().iter().rev() {
304 if *subblock != 0 {
305 leading = subblock.leading_zeros() as usize;
306 break;
307 } else {
308 inner += BITS;
309 }
310 }
311 let max = self.simd_block_len() * SimdBlock::BITS;
312 Some(max - block_idx * SimdBlock::BITS - inner - leading - 1)
313 }
314
315 #[inline]
328 pub fn is_full(&self) -> bool {
329 self.contains_all_in_range(..)
330 }
331
332 #[inline]
339 pub fn contains(&self, bit: usize) -> bool {
340 (bit < self.length)
341 .then(|| unsafe { self.contains_unchecked(bit) })
343 .unwrap_or(false)
344 }
345
346 #[inline]
355 pub unsafe fn contains_unchecked(&self, bit: usize) -> bool {
356 let (block, i) = div_rem(bit, BITS);
357 (self.get_unchecked(block) & (1 << i)) != 0
358 }
359
360 #[inline]
362 pub fn clear(&mut self) {
363 for elt in self.as_mut_simd_slice().iter_mut() {
364 *elt = SimdBlock::NONE
365 }
366 }
367
368 #[inline]
372 pub fn insert(&mut self, bit: usize) {
373 assert!(
374 bit < self.length,
375 "insert at index {} exceeds fixedbitset size {}",
376 bit,
377 self.length
378 );
379 unsafe {
381 self.insert_unchecked(bit);
382 }
383 }
384
385 #[inline]
390 pub unsafe fn insert_unchecked(&mut self, bit: usize) {
391 let (block, i) = div_rem(bit, BITS);
392 unsafe {
394 *self.get_unchecked_mut(block) |= 1 << i;
395 }
396 }
397
398 #[inline]
402 pub fn remove(&mut self, bit: usize) {
403 assert!(
404 bit < self.length,
405 "remove at index {} exceeds fixedbitset size {}",
406 bit,
407 self.length
408 );
409 unsafe {
411 self.remove_unchecked(bit);
412 }
413 }
414
415 #[inline]
420 pub unsafe fn remove_unchecked(&mut self, bit: usize) {
421 let (block, i) = div_rem(bit, BITS);
422 unsafe {
424 *self.get_unchecked_mut(block) &= !(1 << i);
425 }
426 }
427
428 #[inline]
432 pub fn put(&mut self, bit: usize) -> bool {
433 assert!(
434 bit < self.length,
435 "put at index {} exceeds fixedbitset size {}",
436 bit,
437 self.length
438 );
439 unsafe { self.put_unchecked(bit) }
441 }
442
443 #[inline]
448 pub unsafe fn put_unchecked(&mut self, bit: usize) -> bool {
449 let (block, i) = div_rem(bit, BITS);
450 unsafe {
452 let word = self.get_unchecked_mut(block);
453 let prev = *word & (1 << i) != 0;
454 *word |= 1 << i;
455 prev
456 }
457 }
458
459 #[inline]
463 pub fn toggle(&mut self, bit: usize) {
464 assert!(
465 bit < self.length,
466 "toggle at index {} exceeds fixedbitset size {}",
467 bit,
468 self.length
469 );
470 unsafe {
472 self.toggle_unchecked(bit);
473 }
474 }
475
476 #[inline]
481 pub unsafe fn toggle_unchecked(&mut self, bit: usize) {
482 let (block, i) = div_rem(bit, BITS);
483 unsafe {
485 *self.get_unchecked_mut(block) ^= 1 << i;
486 }
487 }
488
489 #[inline]
493 pub fn set(&mut self, bit: usize, enabled: bool) {
494 assert!(
495 bit < self.length,
496 "set at index {} exceeds fixedbitset size {}",
497 bit,
498 self.length
499 );
500 unsafe {
502 self.set_unchecked(bit, enabled);
503 }
504 }
505
506 #[inline]
511 pub unsafe fn set_unchecked(&mut self, bit: usize, enabled: bool) {
512 let (block, i) = div_rem(bit, BITS);
513 let elt = unsafe { self.get_unchecked_mut(block) };
515 if enabled {
516 *elt |= 1 << i;
517 } else {
518 *elt &= !(1 << i);
519 }
520 }
521
522 #[inline]
528 pub fn copy_bit(&mut self, from: usize, to: usize) {
529 assert!(
530 to < self.length,
531 "copy to index {} exceeds fixedbitset size {}",
532 to,
533 self.length
534 );
535 let enabled = self.contains(from);
536 unsafe { self.set_unchecked(to, enabled) };
538 }
539
540 #[inline]
548 pub unsafe fn copy_bit_unchecked(&mut self, from: usize, to: usize) {
549 let enabled = self.contains_unchecked(from);
551 self.set_unchecked(to, enabled);
553 }
554
555 #[inline]
562 pub fn count_ones<T: IndexRange>(&self, range: T) -> usize {
563 Self::batch_count_ones(Masks::new(range, self.length).map(|(block, mask)| {
564 unsafe { *self.get_unchecked(block) & mask }
566 }))
567 }
568
569 #[inline]
576 pub fn count_zeroes<T: IndexRange>(&self, range: T) -> usize {
577 Self::batch_count_ones(Masks::new(range, self.length).map(|(block, mask)| {
578 unsafe { !*self.get_unchecked(block) & mask }
580 }))
581 }
582
583 #[inline]
589 pub fn set_range<T: IndexRange>(&mut self, range: T, enabled: bool) {
590 if enabled {
591 self.insert_range(range);
592 } else {
593 self.remove_range(range);
594 }
595 }
596
597 #[inline]
603 pub fn insert_range<T: IndexRange>(&mut self, range: T) {
604 for (block, mask) in Masks::new(range, self.length) {
605 let block = unsafe { self.get_unchecked_mut(block) };
607 *block |= mask;
608 }
609 }
610
611 #[inline]
617 pub fn remove_range<T: IndexRange>(&mut self, range: T) {
618 for (block, mask) in Masks::new(range, self.length) {
619 let block = unsafe { self.get_unchecked_mut(block) };
621 *block &= !mask;
622 }
623 }
624
625 #[inline]
631 pub fn toggle_range<T: IndexRange>(&mut self, range: T) {
632 for (block, mask) in Masks::new(range, self.length) {
633 let block = unsafe { self.get_unchecked_mut(block) };
635 *block ^= mask;
636 }
637 }
638
639 #[inline]
643 pub fn contains_all_in_range<T: IndexRange>(&self, range: T) -> bool {
644 for (block, mask) in Masks::new(range, self.length) {
645 let block = unsafe { self.get_unchecked(block) };
647 if block & mask != mask {
648 return false;
649 }
650 }
651 true
652 }
653
654 #[inline]
658 pub fn contains_any_in_range<T: IndexRange>(&self, range: T) -> bool {
659 for (block, mask) in Masks::new(range, self.length) {
660 let block = unsafe { self.get_unchecked(block) };
662 if block & mask != 0 {
663 return true;
664 }
665 }
666 false
667 }
668
669 #[inline]
671 pub fn as_slice(&self) -> &[Block] {
672 unsafe {
677 let ptr = self.data.as_ptr().cast::<Block>();
678 core::slice::from_raw_parts(ptr, self.usize_len())
679 }
680 }
681
682 #[inline]
685 pub fn as_mut_slice(&mut self) -> &mut [Block] {
686 unsafe {
691 let ptr = self.data.as_mut_ptr().cast::<Block>();
692 core::slice::from_raw_parts_mut(ptr, self.usize_len())
693 }
694 }
695
696 #[inline]
700 pub fn ones(&self) -> Ones {
701 match self.as_slice().split_first() {
702 Some((&first_block, rem)) => {
703 let (&last_block, rem) = rem.split_last().unwrap_or((&0, rem));
704 Ones {
705 bitset_front: first_block,
706 bitset_back: last_block,
707 block_idx_front: 0,
708 block_idx_back: (1 + rem.len()) * BITS,
709 remaining_blocks: rem.iter(),
710 }
711 }
712 None => Ones {
713 bitset_front: 0,
714 bitset_back: 0,
715 block_idx_front: 0,
716 block_idx_back: 0,
717 remaining_blocks: [].iter(),
718 },
719 }
720 }
721
722 pub fn into_ones(self) -> IntoOnes<NBLOCK> {
727 let ptr = self.data.as_ptr().cast();
728 let len = self.simd_block_len() * SimdBlock::USIZE_COUNT;
729 let slice = unsafe { core::slice::from_raw_parts(ptr, len) };
736 let mut iter = slice.iter().copied();
739 IntoOnes {
740 bitset_front: iter.next().unwrap_or(0),
741 bitset_back: iter.next_back().unwrap_or(0),
742 block_idx_front: 0,
743 block_idx_back: len.saturating_sub(1) * BITS,
744 remaining_blocks: iter,
745 _buf: self.data,
746 }
747 }
748
749 #[inline]
753 pub fn zeroes(&self) -> Zeroes {
754 match self.as_slice().split_first() {
755 Some((&block, rem)) => Zeroes {
756 bitset: !block,
757 block_idx: 0,
758 len: self.len(),
759 remaining_blocks: rem.iter(),
760 },
761 None => Zeroes {
762 bitset: !0,
763 block_idx: 0,
764 len: self.len(),
765 remaining_blocks: [].iter(),
766 },
767 }
768 }
769
770 pub fn intersection<'a>(&'a self, other: &'a FixedBitSet<NBLOCK>) -> Intersection<'a, NBLOCK> {
772 Intersection {
773 iter: self.ones(),
774 other,
775 }
776 }
777
778 pub fn union<'a>(&'a self, other: &'a FixedBitSet<NBLOCK>) -> Union<'a, NBLOCK> {
780 Union {
781 iter: self.ones().chain(other.difference(self)),
782 }
783 }
784
785 pub fn difference<'a>(&'a self, other: &'a FixedBitSet<NBLOCK>) -> Difference<'a, NBLOCK> {
788 Difference {
789 iter: self.ones(),
790 other,
791 }
792 }
793
794 pub fn symmetric_difference<'a>(
797 &'a self,
798 other: &'a FixedBitSet<NBLOCK>,
799 ) -> SymmetricDifference<'a, NBLOCK> {
800 SymmetricDifference {
801 iter: self.difference(other).chain(other.difference(self)),
802 }
803 }
804
805 pub fn union_with(&mut self, other: &FixedBitSet<NBLOCK>) {
809 if other.len() >= self.len() {
810 self.grow(other.len());
811 }
812 self.as_mut_simd_slice()
813 .iter_mut()
814 .zip(other.as_simd_slice().iter())
815 .for_each(|(x, y)| *x |= *y);
816 }
817
818 pub fn intersect_with(&mut self, other: &FixedBitSet<NBLOCK>) {
822 if other.len() >= self.len() {
823 self.grow(other.len());
824 }
825 let me = self.as_mut_simd_slice();
826 let other = other.as_simd_slice();
827 me.iter_mut().zip(other.iter()).for_each(|(x, y)| {
828 *x &= *y;
829 });
830 let mn = core::cmp::min(me.len(), other.len());
831 for wd in &mut me[mn..] {
832 *wd = SimdBlock::NONE;
833 }
834 }
835
836 pub fn difference_with(&mut self, other: &FixedBitSet<NBLOCK>) {
840 if other.len() >= self.len() {
841 self.grow(other.len());
842 }
843 self.as_mut_simd_slice()
844 .iter_mut()
845 .zip(other.as_simd_slice().iter())
846 .for_each(|(x, y)| {
847 *x &= !*y;
848 });
849
850 }
857
858 pub fn symmetric_difference_with(&mut self, other: &FixedBitSet<NBLOCK>) {
862 if other.len() >= self.len() {
863 self.grow(other.len());
864 }
865 self.as_mut_simd_slice()
866 .iter_mut()
867 .zip(other.as_simd_slice().iter())
868 .for_each(|(x, y)| {
869 *x ^= *y;
870 });
871 }
872
873 #[inline]
879 pub fn union_count(&self, other: &FixedBitSet<NBLOCK>) -> usize {
880 let me = self.as_slice();
881 let other = other.as_slice();
882 let count = Self::batch_count_ones(me.iter().zip(other.iter()).map(|(x, y)| (*x | *y)));
883 match other.len().cmp(&me.len()) {
884 Ordering::Greater => count + Self::batch_count_ones(other[me.len()..].iter().copied()),
885 Ordering::Less => count + Self::batch_count_ones(me[other.len()..].iter().copied()),
886 Ordering::Equal => count,
887 }
888 }
889
890 #[inline]
896 pub fn intersection_count(&self, other: &FixedBitSet<NBLOCK>) -> usize {
897 Self::batch_count_ones(
898 self.as_slice()
899 .iter()
900 .zip(other.as_slice())
901 .map(|(x, y)| (*x & *y)),
902 )
903 }
904
905 #[inline]
911 pub fn difference_count(&self, other: &FixedBitSet<NBLOCK>) -> usize {
912 Self::batch_count_ones(
913 self.as_slice()
914 .iter()
915 .zip(other.as_slice().iter())
916 .map(|(x, y)| (*x & !*y)),
917 )
918 }
919
920 #[inline]
926 pub fn symmetric_difference_count(&self, other: &FixedBitSet<NBLOCK>) -> usize {
927 let me = self.as_slice();
928 let other = other.as_slice();
929 let count = Self::batch_count_ones(me.iter().zip(other.iter()).map(|(x, y)| (*x ^ *y)));
930 match other.len().cmp(&me.len()) {
931 Ordering::Greater => count + Self::batch_count_ones(other[me.len()..].iter().copied()),
932 Ordering::Less => count + Self::batch_count_ones(me[other.len()..].iter().copied()),
933 Ordering::Equal => count,
934 }
935 }
936
937 pub fn is_disjoint(&self, other: &FixedBitSet<NBLOCK>) -> bool {
940 self.as_simd_slice()
941 .iter()
942 .zip(other.as_simd_slice())
943 .all(|(x, y)| (*x & *y).is_empty())
944 }
945
946 pub fn is_subset(&self, other: &FixedBitSet<NBLOCK>) -> bool {
949 let me = self.as_simd_slice();
950 let other = other.as_simd_slice();
951 me.iter()
952 .zip(other.iter())
953 .all(|(x, y)| x.andnot(*y).is_empty())
954 && me.iter().skip(other.len()).all(|x| x.is_empty())
955 }
956
957 pub fn is_superset(&self, other: &FixedBitSet<NBLOCK>) -> bool {
960 other.is_subset(self)
961 }
962}
963
964impl<const NBLOCK: usize> PartialOrd for FixedBitSet<NBLOCK> {
965 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
966 Some(self.cmp(other))
967 }
968}
969
970impl<const NBLOCK: usize> Ord for FixedBitSet<NBLOCK> {
971 fn cmp(&self, other: &Self) -> Ordering {
972 self.length
973 .cmp(&other.length)
974 .then_with(|| self.as_simd_slice().cmp(other.as_simd_slice()))
975 }
976}
977
978impl<const NBLOCK: usize> Default for FixedBitSet<NBLOCK> {
979 fn default() -> Self {
980 Self::new()
981 }
982}
983
984impl<const NBLOCK: usize> Binary for FixedBitSet<NBLOCK> {
985 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
986 if f.alternate() {
987 f.write_str("0b")?;
988 }
989
990 for i in 0..self.length {
991 if self[i] {
992 f.write_char('1')?;
993 } else {
994 f.write_char('0')?;
995 }
996 }
997
998 Ok(())
999 }
1000}
1001
1002impl<const NBLOCK: usize> Display for FixedBitSet<NBLOCK> {
1003 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
1004 Binary::fmt(&self, f)
1005 }
1006}
1007
1008pub struct Difference<'a, const NBLOCK: usize> {
1012 iter: Ones<'a>,
1013 other: &'a FixedBitSet<NBLOCK>,
1014}
1015
1016impl<'a, const NBLOCK: usize> Iterator for Difference<'a, NBLOCK> {
1017 type Item = usize;
1018
1019 #[inline]
1020 fn next(&mut self) -> Option<Self::Item> {
1021 self.iter.by_ref().find(|&nxt| !self.other.contains(nxt))
1022 }
1023
1024 #[inline]
1025 fn size_hint(&self) -> (usize, Option<usize>) {
1026 self.iter.size_hint()
1027 }
1028}
1029
1030impl<'a, const NBLOCK: usize> DoubleEndedIterator for Difference<'a, NBLOCK> {
1031 fn next_back(&mut self) -> Option<Self::Item> {
1032 self.iter
1033 .by_ref()
1034 .rev()
1035 .find(|&nxt| !self.other.contains(nxt))
1036 }
1037}
1038
1039impl<'a, const NBLOCK: usize> FusedIterator for Difference<'a, NBLOCK> {}
1041
1042pub struct SymmetricDifference<'a, const NBLOCK: usize> {
1046 iter: Chain<Difference<'a, NBLOCK>, Difference<'a, NBLOCK>>,
1047}
1048
1049impl<'a, const NBLOCK: usize> Iterator for SymmetricDifference<'a, NBLOCK> {
1050 type Item = usize;
1051
1052 #[inline]
1053 fn next(&mut self) -> Option<Self::Item> {
1054 self.iter.next()
1055 }
1056
1057 #[inline]
1058 fn size_hint(&self) -> (usize, Option<usize>) {
1059 self.iter.size_hint()
1060 }
1061}
1062
1063impl<'a, const NBLOCK: usize> DoubleEndedIterator for SymmetricDifference<'a, NBLOCK> {
1064 fn next_back(&mut self) -> Option<Self::Item> {
1065 self.iter.next_back()
1066 }
1067}
1068
1069impl<'a, const NBLOCK: usize> FusedIterator for SymmetricDifference<'a, NBLOCK> {}
1071
1072pub struct Intersection<'a, const NBLOCK: usize> {
1076 iter: Ones<'a>,
1077 other: &'a FixedBitSet<NBLOCK>,
1078}
1079
1080impl<'a, const NBLOCK: usize> Iterator for Intersection<'a, NBLOCK> {
1081 type Item = usize; #[inline]
1084 fn next(&mut self) -> Option<Self::Item> {
1085 self.iter.by_ref().find(|&nxt| self.other.contains(nxt))
1086 }
1087
1088 #[inline]
1089 fn size_hint(&self) -> (usize, Option<usize>) {
1090 self.iter.size_hint()
1091 }
1092}
1093
1094impl<'a, const NBLOCK: usize> DoubleEndedIterator for Intersection<'a, NBLOCK> {
1095 fn next_back(&mut self) -> Option<Self::Item> {
1096 self.iter
1097 .by_ref()
1098 .rev()
1099 .find(|&nxt| self.other.contains(nxt))
1100 }
1101}
1102
1103impl<'a, const NBLOCK: usize> FusedIterator for Intersection<'a, NBLOCK> {}
1105
1106pub struct Union<'a, const NBLOCK: usize> {
1110 iter: Chain<Ones<'a>, Difference<'a, NBLOCK>>,
1111}
1112
1113impl<'a, const NBLOCK: usize> Iterator for Union<'a, NBLOCK> {
1114 type Item = usize;
1115
1116 #[inline]
1117 fn next(&mut self) -> Option<Self::Item> {
1118 self.iter.next()
1119 }
1120
1121 #[inline]
1122 fn size_hint(&self) -> (usize, Option<usize>) {
1123 self.iter.size_hint()
1124 }
1125}
1126
1127impl<'a, const NBLOCK: usize> DoubleEndedIterator for Union<'a, NBLOCK> {
1128 fn next_back(&mut self) -> Option<Self::Item> {
1129 self.iter.next_back()
1130 }
1131}
1132
1133impl<'a, const NBLOCK: usize> FusedIterator for Union<'a, NBLOCK> {}
1135
1136struct Masks {
1137 first_block: usize,
1138 first_mask: usize,
1139 last_block: usize,
1140 last_mask: usize,
1141}
1142
1143impl Masks {
1144 #[inline]
1145 fn new<T: IndexRange>(range: T, length: usize) -> Masks {
1146 let start = range.start().unwrap_or(0);
1147 let end = range.end().unwrap_or(length);
1148 assert!(
1149 start <= end && end <= length,
1150 "invalid range {}..{} for a FixedBitSet<NBLOCK> of size {}",
1151 start,
1152 end,
1153 length
1154 );
1155
1156 let (first_block, first_rem) = div_rem(start, BITS);
1157 let (last_block, last_rem) = div_rem(end, BITS);
1158
1159 Masks {
1160 first_block,
1161 first_mask: usize::MAX << first_rem,
1162 last_block,
1163 last_mask: (usize::MAX >> 1) >> (BITS - last_rem - 1),
1164 }
1166 }
1167}
1168
1169impl Iterator for Masks {
1170 type Item = (usize, usize);
1171
1172 #[inline]
1173 fn next(&mut self) -> Option<Self::Item> {
1174 match self.first_block.cmp(&self.last_block) {
1175 Ordering::Less => {
1176 let res = (self.first_block, self.first_mask);
1177 self.first_block += 1;
1178 self.first_mask = !0;
1179 Some(res)
1180 }
1181 Ordering::Equal => {
1182 let mask = self.first_mask & self.last_mask;
1183 let res = if mask == 0 {
1184 None
1185 } else {
1186 Some((self.first_block, mask))
1187 };
1188 self.first_block += 1;
1189 res
1190 }
1191 Ordering::Greater => None,
1192 }
1193 }
1194
1195 #[inline]
1196 fn size_hint(&self) -> (usize, Option<usize>) {
1197 (self.first_block..=self.last_block).size_hint()
1198 }
1199}
1200
1201impl FusedIterator for Masks {}
1203
1204impl ExactSizeIterator for Masks {}
1207
1208pub struct Ones<'a> {
1212 bitset_front: usize,
1213 bitset_back: usize,
1214 block_idx_front: usize,
1215 block_idx_back: usize,
1216 remaining_blocks: core::slice::Iter<'a, usize>,
1217}
1218
1219impl<'a> Ones<'a> {
1220 #[inline]
1221 pub fn last_positive_bit_and_unset(n: &mut usize) -> usize {
1222 let last_bit = *n & n.wrapping_neg();
1224
1225 let position = last_bit.trailing_zeros();
1227
1228 *n &= *n - 1;
1230
1231 position as usize
1232 }
1233
1234 #[inline]
1235 fn first_positive_bit_and_unset(n: &mut usize) -> usize {
1236 let bit_idx = n.leading_zeros();
1238
1239 let mask = !((1_usize) << (BITS as u32 - bit_idx - 1));
1241 n.bitand_assign(mask);
1242
1243 bit_idx as usize
1244 }
1245}
1246
1247impl<'a> DoubleEndedIterator for Ones<'a> {
1248 fn next_back(&mut self) -> Option<Self::Item> {
1249 while self.bitset_back == 0 {
1250 match self.remaining_blocks.next_back() {
1251 None => {
1252 if self.bitset_front != 0 {
1253 self.bitset_back = 0;
1254 self.block_idx_back = self.block_idx_front;
1255 return Some(
1256 self.block_idx_front + BITS
1257 - Self::first_positive_bit_and_unset(&mut self.bitset_front)
1258 - 1,
1259 );
1260 } else {
1261 return None;
1262 }
1263 }
1264 Some(next_block) => {
1265 self.bitset_back = *next_block;
1266 self.block_idx_back -= BITS;
1267 }
1268 };
1269 }
1270
1271 Some(
1272 self.block_idx_back - Self::first_positive_bit_and_unset(&mut self.bitset_back) + BITS
1273 - 1,
1274 )
1275 }
1276}
1277
1278impl<'a> Iterator for Ones<'a> {
1279 type Item = usize; #[inline]
1282 fn next(&mut self) -> Option<Self::Item> {
1283 while self.bitset_front == 0 {
1284 match self.remaining_blocks.next() {
1285 Some(next_block) => {
1286 self.bitset_front = *next_block;
1287 self.block_idx_front += BITS;
1288 }
1289 None => {
1290 if self.bitset_back != 0 {
1291 self.block_idx_front = self.block_idx_back;
1293 self.bitset_front = 0;
1294
1295 return Some(
1296 self.block_idx_back
1297 + Self::last_positive_bit_and_unset(&mut self.bitset_back),
1298 );
1299 } else {
1300 return None;
1301 }
1302 }
1303 };
1304 }
1305
1306 Some(self.block_idx_front + Self::last_positive_bit_and_unset(&mut self.bitset_front))
1307 }
1308
1309 #[inline]
1310 fn size_hint(&self) -> (usize, Option<usize>) {
1311 (
1312 0,
1313 (Some(self.block_idx_back - self.block_idx_front + 2 * BITS)),
1314 )
1315 }
1316}
1317
1318impl<'a> FusedIterator for Ones<'a> {}
1320
1321pub struct Zeroes<'a> {
1325 bitset: usize,
1326 block_idx: usize,
1327 len: usize,
1328 remaining_blocks: core::slice::Iter<'a, usize>,
1329}
1330
1331impl<'a> Iterator for Zeroes<'a> {
1332 type Item = usize; #[inline]
1335 fn next(&mut self) -> Option<Self::Item> {
1336 while self.bitset == 0 {
1337 self.bitset = !*self.remaining_blocks.next()?;
1338 self.block_idx += BITS;
1339 }
1340 let t = self.bitset & (0_usize).wrapping_sub(self.bitset);
1341 let r = self.bitset.trailing_zeros() as usize;
1342 self.bitset ^= t;
1343 let bit = self.block_idx + r;
1344 if bit < self.len {
1346 Some(bit)
1347 } else {
1348 None
1349 }
1350 }
1351
1352 #[inline]
1353 fn size_hint(&self) -> (usize, Option<usize>) {
1354 (0, Some(self.len))
1355 }
1356}
1357
1358impl<'a> FusedIterator for Zeroes<'a> {}
1360
1361impl<const NBLOCK: usize> Index<usize> for FixedBitSet<NBLOCK> {
1367 type Output = bool;
1368
1369 #[inline]
1370 fn index(&self, bit: usize) -> &bool {
1371 if self.contains(bit) {
1372 &true
1373 } else {
1374 &false
1375 }
1376 }
1377}
1378
1379impl<const NBLOCK: usize> Extend<usize> for FixedBitSet<NBLOCK> {
1381 fn extend<I: IntoIterator<Item = usize>>(&mut self, src: I) {
1382 let iter = src.into_iter();
1383 for i in iter {
1384 if i >= self.len() {
1385 self.grow(i + 1);
1386 }
1387 self.put(i);
1388 }
1389 }
1390}
1391
1392impl<const NBLOCK: usize> FromIterator<usize> for FixedBitSet<NBLOCK> {
1395 fn from_iter<I: IntoIterator<Item = usize>>(src: I) -> Self {
1396 let mut fbs = FixedBitSet::new();
1397 fbs.extend(src);
1398 fbs
1399 }
1400}
1401
1402pub struct IntoOnes<const N: usize> {
1403 bitset_front: Block,
1404 bitset_back: Block,
1405 block_idx_front: usize,
1406 block_idx_back: usize,
1407 remaining_blocks: core::iter::Copied<core::slice::Iter<'static, usize>>,
1408 _buf: [SimdBlock; N],
1410}
1411
1412impl<const NBLOCK: usize> IntoOnes<NBLOCK> {
1413 #[inline]
1414 pub fn last_positive_bit_and_unset(n: &mut Block) -> usize {
1415 let last_bit = *n & n.wrapping_neg();
1417
1418 let position = last_bit.trailing_zeros();
1420
1421 *n &= *n - 1;
1423
1424 position as usize
1425 }
1426
1427 #[inline]
1428 fn first_positive_bit_and_unset(n: &mut Block) -> usize {
1429 let bit_idx = n.leading_zeros();
1431
1432 let mask = !((1_usize) << (BITS as u32 - bit_idx - 1));
1434 n.bitand_assign(mask);
1435
1436 bit_idx as usize
1437 }
1438}
1439
1440impl<const NBLOCK: usize> DoubleEndedIterator for IntoOnes<NBLOCK> {
1441 fn next_back(&mut self) -> Option<Self::Item> {
1442 while self.bitset_back == 0 {
1443 match self.remaining_blocks.next_back() {
1444 None => {
1445 if self.bitset_front != 0 {
1446 self.bitset_back = 0;
1447 self.block_idx_back = self.block_idx_front;
1448 return Some(
1449 self.block_idx_front + BITS
1450 - Self::first_positive_bit_and_unset(&mut self.bitset_front)
1451 - 1,
1452 );
1453 } else {
1454 return None;
1455 }
1456 }
1457 Some(next_block) => {
1458 self.bitset_back = next_block;
1459 self.block_idx_back -= BITS;
1460 }
1461 };
1462 }
1463
1464 Some(
1465 self.block_idx_back - Self::first_positive_bit_and_unset(&mut self.bitset_back) + BITS
1466 - 1,
1467 )
1468 }
1469}
1470
1471impl<const NBLOCK: usize> Iterator for IntoOnes<NBLOCK> {
1472 type Item = usize; #[inline]
1475 fn next(&mut self) -> Option<Self::Item> {
1476 while self.bitset_front == 0 {
1477 match self.remaining_blocks.next() {
1478 Some(next_block) => {
1479 self.bitset_front = next_block;
1480 self.block_idx_front += BITS;
1481 }
1482 None => {
1483 if self.bitset_back != 0 {
1484 self.block_idx_front = self.block_idx_back;
1486 self.bitset_front = 0;
1487
1488 return Some(
1489 self.block_idx_back
1490 + Self::last_positive_bit_and_unset(&mut self.bitset_back),
1491 );
1492 } else {
1493 return None;
1494 }
1495 }
1496 };
1497 }
1498
1499 Some(self.block_idx_front + Self::last_positive_bit_and_unset(&mut self.bitset_front))
1500 }
1501
1502 #[inline]
1503 fn size_hint(&self) -> (usize, Option<usize>) {
1504 (
1505 0,
1506 (Some(self.block_idx_back - self.block_idx_front + 2 * BITS)),
1507 )
1508 }
1509}
1510
1511impl<const NBLOCK: usize> FusedIterator for IntoOnes<NBLOCK> {}
1513
1514impl<'a, const NBLOCK: usize> BitAnd for &'a FixedBitSet<NBLOCK> {
1515 type Output = FixedBitSet<NBLOCK>;
1516 fn bitand(self, other: &FixedBitSet<NBLOCK>) -> FixedBitSet<NBLOCK> {
1517 let (short, long) = {
1518 if self.len() <= other.len() {
1519 (self.as_simd_slice(), other.as_simd_slice())
1520 } else {
1521 (other.as_simd_slice(), self.as_simd_slice())
1522 }
1523 };
1524 let mut data = Vec::from(short);
1525 for (data, block) in data.iter_mut().zip(long.iter()) {
1526 *data &= *block;
1527 }
1528 let len = core::cmp::min(self.len(), other.len());
1529 FixedBitSet::from_blocks_and_len(data, len)
1530 }
1531}
1532
1533impl<const NBLOCK: usize> BitAndAssign for FixedBitSet<NBLOCK> {
1534 fn bitand_assign(&mut self, other: Self) {
1535 self.intersect_with(&other);
1536 }
1537}
1538
1539impl<const NBLOCK: usize> BitAndAssign<&Self> for FixedBitSet<NBLOCK> {
1540 fn bitand_assign(&mut self, other: &Self) {
1541 self.intersect_with(other);
1542 }
1543}
1544
1545impl<'a, const NBLOCK: usize> BitOr for &'a FixedBitSet<NBLOCK> {
1546 type Output = FixedBitSet<NBLOCK>;
1547 fn bitor(self, other: &FixedBitSet<NBLOCK>) -> FixedBitSet<NBLOCK> {
1548 let (short, long) = {
1549 if self.len() <= other.len() {
1550 (self.as_simd_slice(), other.as_simd_slice())
1551 } else {
1552 (other.as_simd_slice(), self.as_simd_slice())
1553 }
1554 };
1555 let mut data = Vec::from(long);
1556 for (data, block) in data.iter_mut().zip(short.iter()) {
1557 *data |= *block;
1558 }
1559 let len = core::cmp::max(self.len(), other.len());
1560 FixedBitSet::from_blocks_and_len(data, len)
1561 }
1562}
1563
1564impl<const NBLOCK: usize> BitOrAssign for FixedBitSet<NBLOCK> {
1565 fn bitor_assign(&mut self, other: Self) {
1566 self.union_with(&other);
1567 }
1568}
1569
1570impl<const NBLOCK: usize> BitOrAssign<&Self> for FixedBitSet<NBLOCK> {
1571 fn bitor_assign(&mut self, other: &Self) {
1572 self.union_with(other);
1573 }
1574}
1575
1576impl<'a, const NBLOCK: usize> BitXor for &'a FixedBitSet<NBLOCK> {
1577 type Output = FixedBitSet<NBLOCK>;
1578 fn bitxor(self, other: &FixedBitSet<NBLOCK>) -> FixedBitSet<NBLOCK> {
1579 let (short, long) = {
1580 if self.len() <= other.len() {
1581 (self.as_simd_slice(), other.as_simd_slice())
1582 } else {
1583 (other.as_simd_slice(), self.as_simd_slice())
1584 }
1585 };
1586 let mut data = Vec::from(long);
1587 for (data, block) in data.iter_mut().zip(short.iter()) {
1588 *data ^= *block;
1589 }
1590 let len = core::cmp::max(self.len(), other.len());
1591 FixedBitSet::from_blocks_and_len(data, len)
1592 }
1593}
1594
1595impl<const NBLOCK: usize> BitXorAssign for FixedBitSet<NBLOCK> {
1596 fn bitxor_assign(&mut self, other: Self) {
1597 self.symmetric_difference_with(&other);
1598 }
1599}
1600
1601impl<const NBLOCK: usize> BitXorAssign<&Self> for FixedBitSet<NBLOCK> {
1602 fn bitxor_assign(&mut self, other: &Self) {
1603 self.symmetric_difference_with(other);
1604 }
1605}
1606
1607impl<const NBLOCK: usize> Hash for FixedBitSet<NBLOCK> {
1608 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
1609 self.length.hash(state);
1610 self.as_simd_slice().hash(state);
1611 }
1612}
1613
1614impl<const NBLOCK: usize> PartialEq for FixedBitSet<NBLOCK> {
1615 fn eq(&self, other: &Self) -> bool {
1616 self.length == other.length && self.as_simd_slice().eq(other.as_simd_slice())
1617 }
1618}
1619
1620impl<const NBLOCK: usize> Clone for FixedBitSet<NBLOCK> {
1621 #[inline]
1622 fn clone(&self) -> Self {
1623 Self::from_blocks_and_len(Vec::from(self.as_simd_slice()), self.length)
1624 }
1625
1626 #[inline]
1627 fn clone_from(&mut self, source: &Self) {
1628 if self.length < source.length {
1629 self.grow(source.length);
1630 }
1631 let me = self.as_mut_simd_slice();
1632 let them = source.as_simd_slice();
1633 match me.len().cmp(&them.len()) {
1634 Ordering::Greater => {
1635 let (head, tail) = me.split_at_mut(them.len());
1636 head.copy_from_slice(them);
1637 tail.fill(SimdBlock::NONE);
1638 }
1639 Ordering::Equal => me.copy_from_slice(them),
1640 Ordering::Less => {}
1643 }
1644 self.length = source.length;
1645 }
1646}