1#![no_std]
17#![deny(clippy::undocumented_unsafe_blocks)]
18
19extern crate alloc;
20use alloc::{vec, vec::Vec};
21
22mod block;
23mod range;
24pub mod on_stack;
25
26#[cfg(feature = "serde")]
27extern crate serde;
28#[cfg(feature = "serde")]
29mod serde_impl;
30
31use core::fmt::Write;
32use core::fmt::{Binary, Display, Error, Formatter};
33
34use core::cmp::Ordering;
35use core::hash::Hash;
36use core::iter::{Chain, FusedIterator};
37use core::mem::ManuallyDrop;
38use core::mem::MaybeUninit;
39use core::ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign, Index};
40use core::ptr::NonNull;
41pub use range::IndexRange;
42
43pub(crate) const BITS: usize = core::mem::size_of::<Block>() * 8;
44#[cfg(feature = "serde")]
45pub(crate) const BYTES: usize = core::mem::size_of::<Block>();
46
47pub use block::Block as SimdBlock;
48pub type Block = usize;
49
50#[inline]
51fn div_rem(x: usize, denominator: usize) -> (usize, usize) {
52 (x / denominator, x % denominator)
53}
54
55fn vec_into_parts<T>(vec: Vec<T>) -> (NonNull<T>, usize, usize) {
56 let mut vec = ManuallyDrop::new(vec);
57 (
58 unsafe { NonNull::new_unchecked(vec.as_mut_ptr()) },
60 vec.capacity(),
61 vec.len(),
62 )
63}
64
65#[derive(Debug, Eq)]
74pub struct FixedBitSet {
75 pub(crate) data: NonNull<MaybeUninit<SimdBlock>>,
76 capacity: usize,
77 pub(crate) length: usize,
79}
80
81unsafe impl Send for FixedBitSet {}
83unsafe impl Sync for FixedBitSet {}
86
87impl FixedBitSet {
88 pub const fn new() -> Self {
90 FixedBitSet {
91 data: NonNull::dangling(),
92 capacity: 0,
93 length: 0,
94 }
95 }
96
97 pub fn with_capacity(bits: usize) -> Self {
100 let (mut blocks, rem) = div_rem(bits, SimdBlock::BITS);
101 blocks += (rem > 0) as usize;
102 Self::from_blocks_and_len(vec![SimdBlock::NONE; blocks], bits)
103 }
104
105 #[inline]
106 fn from_blocks_and_len(data: Vec<SimdBlock>, length: usize) -> Self {
107 let (data, capacity, _) = vec_into_parts(data);
108 FixedBitSet {
109 data: data.cast(),
110 capacity,
111 length,
112 }
113 }
114
115 pub fn with_capacity_and_blocks<I: IntoIterator<Item = Block>>(bits: usize, blocks: I) -> Self {
129 let mut bitset = Self::with_capacity(bits);
130 for (subblock, value) in bitset.as_mut_slice().iter_mut().zip(blocks.into_iter()) {
131 *subblock = value;
132 }
133 bitset
134 }
135
136 #[inline]
138 pub fn grow(&mut self, bits: usize) {
139 #[cold]
140 #[track_caller]
141 #[inline(never)]
142 fn do_grow(slf: &mut FixedBitSet, bits: usize) {
143 unsafe { slf.grow_inner(bits, MaybeUninit::new(SimdBlock::NONE)) };
145 }
146
147 if bits > self.length {
148 do_grow(self, bits);
149 }
150 }
151
152 #[inline(always)]
156 unsafe fn grow_inner(&mut self, bits: usize, fill: MaybeUninit<SimdBlock>) {
157 let mut data = unsafe {
160 Vec::from_raw_parts(self.data.as_ptr(), self.simd_block_len(), self.capacity)
161 };
162 let (mut blocks, rem) = div_rem(bits, SimdBlock::BITS);
163 blocks += (rem > 0) as usize;
164 data.resize(blocks, fill);
165 let (data, capacity, _) = vec_into_parts(data);
166 self.data = data;
167 self.capacity = capacity;
168 self.length = bits;
169 }
170
171 #[inline]
172 unsafe fn get_unchecked(&self, subblock: usize) -> &Block {
173 &*self.data.as_ptr().cast::<Block>().add(subblock)
174 }
175
176 #[inline]
177 unsafe fn get_unchecked_mut(&mut self, subblock: usize) -> &mut Block {
178 &mut *self.data.as_ptr().cast::<Block>().add(subblock)
179 }
180
181 #[inline]
182 fn usize_len(&self) -> usize {
183 let (mut blocks, rem) = div_rem(self.length, BITS);
184 blocks += (rem > 0) as usize;
185 blocks
186 }
187
188 #[inline]
189 fn simd_block_len(&self) -> usize {
190 let (mut blocks, rem) = div_rem(self.length, SimdBlock::BITS);
191 blocks += (rem > 0) as usize;
192 blocks
193 }
194
195 #[inline]
196 fn batch_count_ones(blocks: impl IntoIterator<Item = Block>) -> usize {
197 blocks.into_iter().map(|x| x.count_ones() as usize).sum()
198 }
199
200 #[inline]
201 fn as_simd_slice(&self) -> &[SimdBlock] {
202 unsafe { core::slice::from_raw_parts(self.data.as_ptr().cast(), self.simd_block_len()) }
205 }
206
207 #[inline]
208 fn as_mut_simd_slice(&mut self) -> &mut [SimdBlock] {
209 unsafe { core::slice::from_raw_parts_mut(self.data.as_ptr().cast(), self.simd_block_len()) }
212 }
213
214 #[inline]
215 fn as_simd_slice_uninit(&self) -> &[MaybeUninit<SimdBlock>] {
216 unsafe { core::slice::from_raw_parts(self.data.as_ptr(), self.simd_block_len()) }
219 }
220
221 #[inline]
222 fn as_mut_simd_slice_uninit(&mut self) -> &mut [MaybeUninit<SimdBlock>] {
223 unsafe { core::slice::from_raw_parts_mut(self.data.as_ptr(), self.simd_block_len()) }
226 }
227
228 #[inline]
234 pub fn grow_and_insert(&mut self, bits: usize) {
235 self.grow(bits + 1);
236
237 let (blocks, rem) = div_rem(bits, BITS);
238 unsafe {
240 *self.get_unchecked_mut(blocks) |= 1 << rem;
241 }
242 }
243
244 #[inline]
256 pub fn len(&self) -> usize {
257 self.length
258 }
259
260 #[inline]
275 pub fn is_empty(&self) -> bool {
276 self.len() == 0
277 }
278
279 #[inline]
295 pub fn is_clear(&self) -> bool {
296 self.as_simd_slice().iter().all(|block| block.is_empty())
297 }
298
299 #[inline]
314 pub fn minimum(&self) -> Option<usize> {
315 let (block_idx, block) = self
316 .as_simd_slice()
317 .iter()
318 .enumerate()
319 .find(|&(_, block)| !block.is_empty())?;
320 let mut inner = 0;
321 let mut trailing = 0;
322 for subblock in block.into_usize_array() {
323 if subblock != 0 {
324 trailing = subblock.trailing_zeros() as usize;
325 break;
326 } else {
327 inner += BITS;
328 }
329 }
330 Some(block_idx * SimdBlock::BITS + inner + trailing)
331 }
332
333 #[inline]
348 pub fn maximum(&self) -> Option<usize> {
349 let (block_idx, block) = self
350 .as_simd_slice()
351 .iter()
352 .rev()
353 .enumerate()
354 .find(|&(_, block)| !block.is_empty())?;
355 let mut inner = 0;
356 let mut leading = 0;
357 for subblock in block.into_usize_array().iter().rev() {
358 if *subblock != 0 {
359 leading = subblock.leading_zeros() as usize;
360 break;
361 } else {
362 inner += BITS;
363 }
364 }
365 let max = self.simd_block_len() * SimdBlock::BITS;
366 Some(max - block_idx * SimdBlock::BITS - inner - leading - 1)
367 }
368
369 #[inline]
382 pub fn is_full(&self) -> bool {
383 self.contains_all_in_range(..)
384 }
385
386 #[inline]
393 pub fn contains(&self, bit: usize) -> bool {
394 (bit < self.length)
395 .then(|| unsafe { self.contains_unchecked(bit) })
397 .unwrap_or(false)
398 }
399
400 #[inline]
409 pub unsafe fn contains_unchecked(&self, bit: usize) -> bool {
410 let (block, i) = div_rem(bit, BITS);
411 (self.get_unchecked(block) & (1 << i)) != 0
412 }
413
414 #[inline]
416 pub fn clear(&mut self) {
417 for elt in self.as_mut_simd_slice().iter_mut() {
418 *elt = SimdBlock::NONE
419 }
420 }
421
422 #[inline]
426 pub fn insert(&mut self, bit: usize) {
427 assert!(
428 bit < self.length,
429 "insert at index {} exceeds fixedbitset size {}",
430 bit,
431 self.length
432 );
433 unsafe {
435 self.insert_unchecked(bit);
436 }
437 }
438
439 #[inline]
444 pub unsafe fn insert_unchecked(&mut self, bit: usize) {
445 let (block, i) = div_rem(bit, BITS);
446 unsafe {
448 *self.get_unchecked_mut(block) |= 1 << i;
449 }
450 }
451
452 #[inline]
456 pub fn remove(&mut self, bit: usize) {
457 assert!(
458 bit < self.length,
459 "remove at index {} exceeds fixedbitset size {}",
460 bit,
461 self.length
462 );
463 unsafe {
465 self.remove_unchecked(bit);
466 }
467 }
468
469 #[inline]
474 pub unsafe fn remove_unchecked(&mut self, bit: usize) {
475 let (block, i) = div_rem(bit, BITS);
476 unsafe {
478 *self.get_unchecked_mut(block) &= !(1 << i);
479 }
480 }
481
482 #[inline]
486 pub fn put(&mut self, bit: usize) -> bool {
487 assert!(
488 bit < self.length,
489 "put at index {} exceeds fixedbitset size {}",
490 bit,
491 self.length
492 );
493 unsafe { self.put_unchecked(bit) }
495 }
496
497 #[inline]
502 pub unsafe fn put_unchecked(&mut self, bit: usize) -> bool {
503 let (block, i) = div_rem(bit, BITS);
504 unsafe {
506 let word = self.get_unchecked_mut(block);
507 let prev = *word & (1 << i) != 0;
508 *word |= 1 << i;
509 prev
510 }
511 }
512
513 #[inline]
517 pub fn toggle(&mut self, bit: usize) {
518 assert!(
519 bit < self.length,
520 "toggle at index {} exceeds fixedbitset size {}",
521 bit,
522 self.length
523 );
524 unsafe {
526 self.toggle_unchecked(bit);
527 }
528 }
529
530 #[inline]
535 pub unsafe fn toggle_unchecked(&mut self, bit: usize) {
536 let (block, i) = div_rem(bit, BITS);
537 unsafe {
539 *self.get_unchecked_mut(block) ^= 1 << i;
540 }
541 }
542
543 #[inline]
547 pub fn set(&mut self, bit: usize, enabled: bool) {
548 assert!(
549 bit < self.length,
550 "set at index {} exceeds fixedbitset size {}",
551 bit,
552 self.length
553 );
554 unsafe {
556 self.set_unchecked(bit, enabled);
557 }
558 }
559
560 #[inline]
565 pub unsafe fn set_unchecked(&mut self, bit: usize, enabled: bool) {
566 let (block, i) = div_rem(bit, BITS);
567 let elt = unsafe { self.get_unchecked_mut(block) };
569 if enabled {
570 *elt |= 1 << i;
571 } else {
572 *elt &= !(1 << i);
573 }
574 }
575
576 #[inline]
582 pub fn copy_bit(&mut self, from: usize, to: usize) {
583 assert!(
584 to < self.length,
585 "copy to index {} exceeds fixedbitset size {}",
586 to,
587 self.length
588 );
589 let enabled = self.contains(from);
590 unsafe { self.set_unchecked(to, enabled) };
592 }
593
594 #[inline]
602 pub unsafe fn copy_bit_unchecked(&mut self, from: usize, to: usize) {
603 let enabled = self.contains_unchecked(from);
605 self.set_unchecked(to, enabled);
607 }
608
609 #[inline]
616 pub fn count_ones<T: IndexRange>(&self, range: T) -> usize {
617 Self::batch_count_ones(Masks::new(range, self.length).map(|(block, mask)| {
618 unsafe { *self.get_unchecked(block) & mask }
620 }))
621 }
622
623 #[inline]
630 pub fn count_zeroes<T: IndexRange>(&self, range: T) -> usize {
631 Self::batch_count_ones(Masks::new(range, self.length).map(|(block, mask)| {
632 unsafe { !*self.get_unchecked(block) & mask }
634 }))
635 }
636
637 #[inline]
643 pub fn set_range<T: IndexRange>(&mut self, range: T, enabled: bool) {
644 if enabled {
645 self.insert_range(range);
646 } else {
647 self.remove_range(range);
648 }
649 }
650
651 #[inline]
657 pub fn insert_range<T: IndexRange>(&mut self, range: T) {
658 for (block, mask) in Masks::new(range, self.length) {
659 let block = unsafe { self.get_unchecked_mut(block) };
661 *block |= mask;
662 }
663 }
664
665 #[inline]
671 pub fn remove_range<T: IndexRange>(&mut self, range: T) {
672 for (block, mask) in Masks::new(range, self.length) {
673 let block = unsafe { self.get_unchecked_mut(block) };
675 *block &= !mask;
676 }
677 }
678
679 #[inline]
685 pub fn toggle_range<T: IndexRange>(&mut self, range: T) {
686 for (block, mask) in Masks::new(range, self.length) {
687 let block = unsafe { self.get_unchecked_mut(block) };
689 *block ^= mask;
690 }
691 }
692
693 #[inline]
697 pub fn contains_all_in_range<T: IndexRange>(&self, range: T) -> bool {
698 for (block, mask) in Masks::new(range, self.length) {
699 let block = unsafe { self.get_unchecked(block) };
701 if block & mask != mask {
702 return false;
703 }
704 }
705 true
706 }
707
708 #[inline]
712 pub fn contains_any_in_range<T: IndexRange>(&self, range: T) -> bool {
713 for (block, mask) in Masks::new(range, self.length) {
714 let block = unsafe { self.get_unchecked(block) };
716 if block & mask != 0 {
717 return true;
718 }
719 }
720 false
721 }
722
723 #[inline]
725 pub fn as_slice(&self) -> &[Block] {
726 unsafe {
731 let ptr = self.data.as_ptr().cast::<Block>();
732 core::slice::from_raw_parts(ptr, self.usize_len())
733 }
734 }
735
736 #[inline]
739 pub fn as_mut_slice(&mut self) -> &mut [Block] {
740 unsafe {
745 let ptr = self.data.as_ptr().cast::<Block>();
746 core::slice::from_raw_parts_mut(ptr, self.usize_len())
747 }
748 }
749
750 #[inline]
754 pub fn ones(&self) -> Ones {
755 match self.as_slice().split_first() {
756 Some((&first_block, rem)) => {
757 let (&last_block, rem) = rem.split_last().unwrap_or((&0, rem));
758 Ones {
759 bitset_front: first_block,
760 bitset_back: last_block,
761 block_idx_front: 0,
762 block_idx_back: (1 + rem.len()) * BITS,
763 remaining_blocks: rem.iter(),
764 }
765 }
766 None => Ones {
767 bitset_front: 0,
768 bitset_back: 0,
769 block_idx_front: 0,
770 block_idx_back: 0,
771 remaining_blocks: [].iter(),
772 },
773 }
774 }
775
776 pub fn into_ones(self) -> IntoOnes {
781 let ptr = self.data.as_ptr().cast();
782 let len = self.simd_block_len() * SimdBlock::USIZE_COUNT;
783 let slice = unsafe { core::slice::from_raw_parts(ptr, len) };
790 let data: Vec<SimdBlock> = unsafe {
793 Vec::from_raw_parts(
794 self.data.as_ptr().cast(),
795 self.simd_block_len(),
796 self.capacity,
797 )
798 };
799 let mut iter = slice.iter().copied();
800
801 core::mem::forget(self);
802
803 IntoOnes {
804 bitset_front: iter.next().unwrap_or(0),
805 bitset_back: iter.next_back().unwrap_or(0),
806 block_idx_front: 0,
807 block_idx_back: len.saturating_sub(1) * BITS,
808 remaining_blocks: iter,
809 _buf: data,
810 }
811 }
812
813 #[inline]
817 pub fn zeroes(&self) -> Zeroes {
818 match self.as_slice().split_first() {
819 Some((&block, rem)) => Zeroes {
820 bitset: !block,
821 block_idx: 0,
822 len: self.len(),
823 remaining_blocks: rem.iter(),
824 },
825 None => Zeroes {
826 bitset: !0,
827 block_idx: 0,
828 len: self.len(),
829 remaining_blocks: [].iter(),
830 },
831 }
832 }
833
834 pub fn intersection<'a>(&'a self, other: &'a FixedBitSet) -> Intersection<'a> {
836 Intersection {
837 iter: self.ones(),
838 other,
839 }
840 }
841
842 pub fn union<'a>(&'a self, other: &'a FixedBitSet) -> Union<'a> {
844 Union {
845 iter: self.ones().chain(other.difference(self)),
846 }
847 }
848
849 pub fn difference<'a>(&'a self, other: &'a FixedBitSet) -> Difference<'a> {
852 Difference {
853 iter: self.ones(),
854 other,
855 }
856 }
857
858 pub fn symmetric_difference<'a>(&'a self, other: &'a FixedBitSet) -> SymmetricDifference<'a> {
861 SymmetricDifference {
862 iter: self.difference(other).chain(other.difference(self)),
863 }
864 }
865
866 pub fn union_with(&mut self, other: &FixedBitSet) {
870 if other.len() >= self.len() {
871 self.grow(other.len());
872 }
873 self.as_mut_simd_slice()
874 .iter_mut()
875 .zip(other.as_simd_slice().iter())
876 .for_each(|(x, y)| *x |= *y);
877 }
878
879 pub fn intersect_with(&mut self, other: &FixedBitSet) {
883 let me = self.as_mut_simd_slice();
884 let other = other.as_simd_slice();
885 me.iter_mut().zip(other.iter()).for_each(|(x, y)| {
886 *x &= *y;
887 });
888 let mn = core::cmp::min(me.len(), other.len());
889 for wd in &mut me[mn..] {
890 *wd = SimdBlock::NONE;
891 }
892 }
893
894 pub fn difference_with(&mut self, other: &FixedBitSet) {
898 self.as_mut_simd_slice()
899 .iter_mut()
900 .zip(other.as_simd_slice().iter())
901 .for_each(|(x, y)| {
902 *x &= !*y;
903 });
904
905 }
912
913 pub fn symmetric_difference_with(&mut self, other: &FixedBitSet) {
917 if other.len() >= self.len() {
918 self.grow(other.len());
919 }
920 self.as_mut_simd_slice()
921 .iter_mut()
922 .zip(other.as_simd_slice().iter())
923 .for_each(|(x, y)| {
924 *x ^= *y;
925 });
926 }
927
928 #[inline]
934 pub fn union_count(&self, other: &FixedBitSet) -> usize {
935 let me = self.as_slice();
936 let other = other.as_slice();
937 let count = Self::batch_count_ones(me.iter().zip(other.iter()).map(|(x, y)| (*x | *y)));
938 match other.len().cmp(&me.len()) {
939 Ordering::Greater => count + Self::batch_count_ones(other[me.len()..].iter().copied()),
940 Ordering::Less => count + Self::batch_count_ones(me[other.len()..].iter().copied()),
941 Ordering::Equal => count,
942 }
943 }
944
945 #[inline]
951 pub fn intersection_count(&self, other: &FixedBitSet) -> usize {
952 Self::batch_count_ones(
953 self.as_slice()
954 .iter()
955 .zip(other.as_slice())
956 .map(|(x, y)| (*x & *y)),
957 )
958 }
959
960 #[inline]
966 pub fn difference_count(&self, other: &FixedBitSet) -> usize {
967 Self::batch_count_ones(
968 self.as_slice()
969 .iter()
970 .zip(other.as_slice().iter())
971 .map(|(x, y)| (*x & !*y)),
972 )
973 }
974
975 #[inline]
981 pub fn symmetric_difference_count(&self, other: &FixedBitSet) -> usize {
982 let me = self.as_slice();
983 let other = other.as_slice();
984 let count = Self::batch_count_ones(me.iter().zip(other.iter()).map(|(x, y)| (*x ^ *y)));
985 match other.len().cmp(&me.len()) {
986 Ordering::Greater => count + Self::batch_count_ones(other[me.len()..].iter().copied()),
987 Ordering::Less => count + Self::batch_count_ones(me[other.len()..].iter().copied()),
988 Ordering::Equal => count,
989 }
990 }
991
992 pub fn is_disjoint(&self, other: &FixedBitSet) -> bool {
995 self.as_simd_slice()
996 .iter()
997 .zip(other.as_simd_slice())
998 .all(|(x, y)| (*x & *y).is_empty())
999 }
1000
1001 pub fn is_subset(&self, other: &FixedBitSet) -> bool {
1004 let me = self.as_simd_slice();
1005 let other = other.as_simd_slice();
1006 me.iter()
1007 .zip(other.iter())
1008 .all(|(x, y)| x.andnot(*y).is_empty())
1009 && me.iter().skip(other.len()).all(|x| x.is_empty())
1010 }
1011
1012 pub fn is_superset(&self, other: &FixedBitSet) -> bool {
1015 other.is_subset(self)
1016 }
1017}
1018
1019impl Hash for FixedBitSet {
1020 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
1021 self.length.hash(state);
1022 self.as_simd_slice().hash(state);
1023 }
1024}
1025
1026impl PartialEq for FixedBitSet {
1027 fn eq(&self, other: &Self) -> bool {
1028 self.length == other.length && self.as_simd_slice().eq(other.as_simd_slice())
1029 }
1030}
1031
1032impl PartialOrd for FixedBitSet {
1033 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1034 Some(self.cmp(other))
1035 }
1036}
1037
1038impl Ord for FixedBitSet {
1039 fn cmp(&self, other: &Self) -> Ordering {
1040 self.length
1041 .cmp(&other.length)
1042 .then_with(|| self.as_simd_slice().cmp(other.as_simd_slice()))
1043 }
1044}
1045
1046impl Default for FixedBitSet {
1047 fn default() -> Self {
1048 Self::new()
1049 }
1050}
1051
1052impl Drop for FixedBitSet {
1053 fn drop(&mut self) {
1054 drop(unsafe {
1057 Vec::from_raw_parts(self.data.as_ptr(), self.simd_block_len(), self.capacity)
1058 });
1059 }
1060}
1061
1062impl Binary for FixedBitSet {
1063 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
1064 if f.alternate() {
1065 f.write_str("0b")?;
1066 }
1067
1068 for i in 0..self.length {
1069 if self[i] {
1070 f.write_char('1')?;
1071 } else {
1072 f.write_char('0')?;
1073 }
1074 }
1075
1076 Ok(())
1077 }
1078}
1079
1080impl Display for FixedBitSet {
1081 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
1082 Binary::fmt(&self, f)
1083 }
1084}
1085
1086pub struct Difference<'a> {
1090 iter: Ones<'a>,
1091 other: &'a FixedBitSet,
1092}
1093
1094impl<'a> Iterator for Difference<'a> {
1095 type Item = usize;
1096
1097 #[inline]
1098 fn next(&mut self) -> Option<Self::Item> {
1099 self.iter.by_ref().find(|&nxt| !self.other.contains(nxt))
1100 }
1101
1102 #[inline]
1103 fn size_hint(&self) -> (usize, Option<usize>) {
1104 self.iter.size_hint()
1105 }
1106}
1107
1108impl<'a> DoubleEndedIterator for Difference<'a> {
1109 fn next_back(&mut self) -> Option<Self::Item> {
1110 self.iter
1111 .by_ref()
1112 .rev()
1113 .find(|&nxt| !self.other.contains(nxt))
1114 }
1115}
1116
1117impl<'a> FusedIterator for Difference<'a> {}
1119
1120pub struct SymmetricDifference<'a> {
1124 iter: Chain<Difference<'a>, Difference<'a>>,
1125}
1126
1127impl<'a> Iterator for SymmetricDifference<'a> {
1128 type Item = usize;
1129
1130 #[inline]
1131 fn next(&mut self) -> Option<Self::Item> {
1132 self.iter.next()
1133 }
1134
1135 #[inline]
1136 fn size_hint(&self) -> (usize, Option<usize>) {
1137 self.iter.size_hint()
1138 }
1139}
1140
1141impl<'a> DoubleEndedIterator for SymmetricDifference<'a> {
1142 fn next_back(&mut self) -> Option<Self::Item> {
1143 self.iter.next_back()
1144 }
1145}
1146
1147impl<'a> FusedIterator for SymmetricDifference<'a> {}
1149
1150pub struct Intersection<'a> {
1154 iter: Ones<'a>,
1155 other: &'a FixedBitSet,
1156}
1157
1158impl<'a> Iterator for Intersection<'a> {
1159 type Item = usize; #[inline]
1162 fn next(&mut self) -> Option<Self::Item> {
1163 self.iter.by_ref().find(|&nxt| self.other.contains(nxt))
1164 }
1165
1166 #[inline]
1167 fn size_hint(&self) -> (usize, Option<usize>) {
1168 self.iter.size_hint()
1169 }
1170}
1171
1172impl<'a> DoubleEndedIterator for Intersection<'a> {
1173 fn next_back(&mut self) -> Option<Self::Item> {
1174 self.iter
1175 .by_ref()
1176 .rev()
1177 .find(|&nxt| self.other.contains(nxt))
1178 }
1179}
1180
1181impl<'a> FusedIterator for Intersection<'a> {}
1183
1184pub struct Union<'a> {
1188 iter: Chain<Ones<'a>, Difference<'a>>,
1189}
1190
1191impl<'a> Iterator for Union<'a> {
1192 type Item = usize;
1193
1194 #[inline]
1195 fn next(&mut self) -> Option<Self::Item> {
1196 self.iter.next()
1197 }
1198
1199 #[inline]
1200 fn size_hint(&self) -> (usize, Option<usize>) {
1201 self.iter.size_hint()
1202 }
1203}
1204
1205impl<'a> DoubleEndedIterator for Union<'a> {
1206 fn next_back(&mut self) -> Option<Self::Item> {
1207 self.iter.next_back()
1208 }
1209}
1210
1211impl<'a> FusedIterator for Union<'a> {}
1213
1214struct Masks {
1215 first_block: usize,
1216 first_mask: usize,
1217 last_block: usize,
1218 last_mask: usize,
1219}
1220
1221impl Masks {
1222 #[inline]
1223 fn new<T: IndexRange>(range: T, length: usize) -> Masks {
1224 let start = range.start().unwrap_or(0);
1225 let end = range.end().unwrap_or(length);
1226 assert!(
1227 start <= end && end <= length,
1228 "invalid range {}..{} for a fixedbitset of size {}",
1229 start,
1230 end,
1231 length
1232 );
1233
1234 let (first_block, first_rem) = div_rem(start, BITS);
1235 let (last_block, last_rem) = div_rem(end, BITS);
1236
1237 Masks {
1238 first_block,
1239 first_mask: usize::MAX << first_rem,
1240 last_block,
1241 last_mask: (usize::MAX >> 1) >> (BITS - last_rem - 1),
1242 }
1244 }
1245}
1246
1247impl Iterator for Masks {
1248 type Item = (usize, usize);
1249
1250 #[inline]
1251 fn next(&mut self) -> Option<Self::Item> {
1252 match self.first_block.cmp(&self.last_block) {
1253 Ordering::Less => {
1254 let res = (self.first_block, self.first_mask);
1255 self.first_block += 1;
1256 self.first_mask = !0;
1257 Some(res)
1258 }
1259 Ordering::Equal => {
1260 let mask = self.first_mask & self.last_mask;
1261 let res = if mask == 0 {
1262 None
1263 } else {
1264 Some((self.first_block, mask))
1265 };
1266 self.first_block += 1;
1267 res
1268 }
1269 Ordering::Greater => None,
1270 }
1271 }
1272
1273 #[inline]
1274 fn size_hint(&self) -> (usize, Option<usize>) {
1275 (self.first_block..=self.last_block).size_hint()
1276 }
1277}
1278
1279impl FusedIterator for Masks {}
1281
1282impl ExactSizeIterator for Masks {}
1285
1286pub struct Ones<'a> {
1290 bitset_front: usize,
1291 bitset_back: usize,
1292 block_idx_front: usize,
1293 block_idx_back: usize,
1294 remaining_blocks: core::slice::Iter<'a, usize>,
1295}
1296
1297impl<'a> Ones<'a> {
1298 #[inline]
1299 pub fn last_positive_bit_and_unset(n: &mut usize) -> usize {
1300 let last_bit = *n & n.wrapping_neg();
1302
1303 let position = last_bit.trailing_zeros();
1305
1306 *n &= *n - 1;
1308
1309 position as usize
1310 }
1311
1312 #[inline]
1313 fn first_positive_bit_and_unset(n: &mut usize) -> usize {
1314 let bit_idx = n.leading_zeros();
1316
1317 let mask = !((1_usize) << (BITS as u32 - bit_idx - 1));
1319 n.bitand_assign(mask);
1320
1321 bit_idx as usize
1322 }
1323}
1324
1325impl<'a> DoubleEndedIterator for Ones<'a> {
1326 fn next_back(&mut self) -> Option<Self::Item> {
1327 while self.bitset_back == 0 {
1328 match self.remaining_blocks.next_back() {
1329 None => {
1330 if self.bitset_front != 0 {
1331 self.bitset_back = 0;
1332 self.block_idx_back = self.block_idx_front;
1333 return Some(
1334 self.block_idx_front + BITS
1335 - Self::first_positive_bit_and_unset(&mut self.bitset_front)
1336 - 1,
1337 );
1338 } else {
1339 return None;
1340 }
1341 }
1342 Some(next_block) => {
1343 self.bitset_back = *next_block;
1344 self.block_idx_back -= BITS;
1345 }
1346 };
1347 }
1348
1349 Some(
1350 self.block_idx_back - Self::first_positive_bit_and_unset(&mut self.bitset_back) + BITS
1351 - 1,
1352 )
1353 }
1354}
1355
1356impl<'a> Iterator for Ones<'a> {
1357 type Item = usize; #[inline]
1360 fn next(&mut self) -> Option<Self::Item> {
1361 while self.bitset_front == 0 {
1362 match self.remaining_blocks.next() {
1363 Some(next_block) => {
1364 self.bitset_front = *next_block;
1365 self.block_idx_front += BITS;
1366 }
1367 None => {
1368 if self.bitset_back != 0 {
1369 self.block_idx_front = self.block_idx_back;
1371 self.bitset_front = 0;
1372
1373 return Some(
1374 self.block_idx_back
1375 + Self::last_positive_bit_and_unset(&mut self.bitset_back),
1376 );
1377 } else {
1378 return None;
1379 }
1380 }
1381 };
1382 }
1383
1384 Some(self.block_idx_front + Self::last_positive_bit_and_unset(&mut self.bitset_front))
1385 }
1386
1387 #[inline]
1388 fn size_hint(&self) -> (usize, Option<usize>) {
1389 (
1390 0,
1391 (Some(self.block_idx_back - self.block_idx_front + 2 * BITS)),
1392 )
1393 }
1394}
1395
1396impl<'a> FusedIterator for Ones<'a> {}
1398
1399pub struct Zeroes<'a> {
1403 bitset: usize,
1404 block_idx: usize,
1405 len: usize,
1406 remaining_blocks: core::slice::Iter<'a, usize>,
1407}
1408
1409impl<'a> Iterator for Zeroes<'a> {
1410 type Item = usize; #[inline]
1413 fn next(&mut self) -> Option<Self::Item> {
1414 while self.bitset == 0 {
1415 self.bitset = !*self.remaining_blocks.next()?;
1416 self.block_idx += BITS;
1417 }
1418 let t = self.bitset & (0_usize).wrapping_sub(self.bitset);
1419 let r = self.bitset.trailing_zeros() as usize;
1420 self.bitset ^= t;
1421 let bit = self.block_idx + r;
1422 if bit < self.len {
1424 Some(bit)
1425 } else {
1426 None
1427 }
1428 }
1429
1430 #[inline]
1431 fn size_hint(&self) -> (usize, Option<usize>) {
1432 (0, Some(self.len))
1433 }
1434}
1435
1436impl<'a> FusedIterator for Zeroes<'a> {}
1438
1439impl Clone for FixedBitSet {
1440 #[inline]
1441 fn clone(&self) -> Self {
1442 Self::from_blocks_and_len(Vec::from(self.as_simd_slice()), self.length)
1443 }
1444
1445 #[inline]
1446 fn clone_from(&mut self, source: &Self) {
1447 if self.length < source.length {
1448 unsafe { self.grow_inner(source.length, MaybeUninit::uninit()) };
1450 }
1451 let me = self.as_mut_simd_slice_uninit();
1452 let them = source.as_simd_slice_uninit();
1453 match me.len().cmp(&them.len()) {
1454 Ordering::Greater => {
1455 let (head, tail) = me.split_at_mut(them.len());
1456 head.copy_from_slice(them);
1457 tail.fill(MaybeUninit::new(SimdBlock::NONE));
1458 }
1459 Ordering::Equal => me.copy_from_slice(them),
1460 Ordering::Less => {}
1463 }
1464 self.length = source.length;
1465 }
1466}
1467
1468impl Index<usize> for FixedBitSet {
1474 type Output = bool;
1475
1476 #[inline]
1477 fn index(&self, bit: usize) -> &bool {
1478 if self.contains(bit) {
1479 &true
1480 } else {
1481 &false
1482 }
1483 }
1484}
1485
1486impl Extend<usize> for FixedBitSet {
1488 fn extend<I: IntoIterator<Item = usize>>(&mut self, src: I) {
1489 let iter = src.into_iter();
1490 for i in iter {
1491 if i >= self.len() {
1492 self.grow(i + 1);
1493 }
1494 self.put(i);
1495 }
1496 }
1497}
1498
1499impl FromIterator<usize> for FixedBitSet {
1502 fn from_iter<I: IntoIterator<Item = usize>>(src: I) -> Self {
1503 let mut fbs = FixedBitSet::with_capacity(0);
1504 fbs.extend(src);
1505 fbs
1506 }
1507}
1508
1509pub struct IntoOnes {
1510 bitset_front: Block,
1511 bitset_back: Block,
1512 block_idx_front: usize,
1513 block_idx_back: usize,
1514 remaining_blocks: core::iter::Copied<core::slice::Iter<'static, usize>>,
1515 _buf: Vec<SimdBlock>,
1517}
1518
1519impl IntoOnes {
1520 #[inline]
1521 pub fn last_positive_bit_and_unset(n: &mut Block) -> usize {
1522 let last_bit = *n & n.wrapping_neg();
1524
1525 let position = last_bit.trailing_zeros();
1527
1528 *n &= *n - 1;
1530
1531 position as usize
1532 }
1533
1534 #[inline]
1535 fn first_positive_bit_and_unset(n: &mut Block) -> usize {
1536 let bit_idx = n.leading_zeros();
1538
1539 let mask = !((1_usize) << (BITS as u32 - bit_idx - 1));
1541 n.bitand_assign(mask);
1542
1543 bit_idx as usize
1544 }
1545}
1546
1547impl DoubleEndedIterator for IntoOnes {
1548 fn next_back(&mut self) -> Option<Self::Item> {
1549 while self.bitset_back == 0 {
1550 match self.remaining_blocks.next_back() {
1551 None => {
1552 if self.bitset_front != 0 {
1553 self.bitset_back = 0;
1554 self.block_idx_back = self.block_idx_front;
1555 return Some(
1556 self.block_idx_front + BITS
1557 - Self::first_positive_bit_and_unset(&mut self.bitset_front)
1558 - 1,
1559 );
1560 } else {
1561 return None;
1562 }
1563 }
1564 Some(next_block) => {
1565 self.bitset_back = next_block;
1566 self.block_idx_back -= BITS;
1567 }
1568 };
1569 }
1570
1571 Some(
1572 self.block_idx_back - Self::first_positive_bit_and_unset(&mut self.bitset_back) + BITS
1573 - 1,
1574 )
1575 }
1576}
1577
1578impl Iterator for IntoOnes {
1579 type Item = usize; #[inline]
1582 fn next(&mut self) -> Option<Self::Item> {
1583 while self.bitset_front == 0 {
1584 match self.remaining_blocks.next() {
1585 Some(next_block) => {
1586 self.bitset_front = next_block;
1587 self.block_idx_front += BITS;
1588 }
1589 None => {
1590 if self.bitset_back != 0 {
1591 self.block_idx_front = self.block_idx_back;
1593 self.bitset_front = 0;
1594
1595 return Some(
1596 self.block_idx_back
1597 + Self::last_positive_bit_and_unset(&mut self.bitset_back),
1598 );
1599 } else {
1600 return None;
1601 }
1602 }
1603 };
1604 }
1605
1606 Some(self.block_idx_front + Self::last_positive_bit_and_unset(&mut self.bitset_front))
1607 }
1608
1609 #[inline]
1610 fn size_hint(&self) -> (usize, Option<usize>) {
1611 (
1612 0,
1613 (Some(self.block_idx_back - self.block_idx_front + 2 * BITS)),
1614 )
1615 }
1616}
1617
1618impl FusedIterator for IntoOnes {}
1620
1621impl<'a> BitAnd for &'a FixedBitSet {
1622 type Output = FixedBitSet;
1623 fn bitand(self, other: &FixedBitSet) -> FixedBitSet {
1624 let (short, long) = {
1625 if self.len() <= other.len() {
1626 (self.as_simd_slice(), other.as_simd_slice())
1627 } else {
1628 (other.as_simd_slice(), self.as_simd_slice())
1629 }
1630 };
1631 let mut data = Vec::from(short);
1632 for (data, block) in data.iter_mut().zip(long.iter()) {
1633 *data &= *block;
1634 }
1635 let len = core::cmp::min(self.len(), other.len());
1636 FixedBitSet::from_blocks_and_len(data, len)
1637 }
1638}
1639
1640impl BitAndAssign for FixedBitSet {
1641 fn bitand_assign(&mut self, other: Self) {
1642 self.intersect_with(&other);
1643 }
1644}
1645
1646impl BitAndAssign<&Self> for FixedBitSet {
1647 fn bitand_assign(&mut self, other: &Self) {
1648 self.intersect_with(other);
1649 }
1650}
1651
1652impl<'a> BitOr for &'a FixedBitSet {
1653 type Output = FixedBitSet;
1654 fn bitor(self, other: &FixedBitSet) -> FixedBitSet {
1655 let (short, long) = {
1656 if self.len() <= other.len() {
1657 (self.as_simd_slice(), other.as_simd_slice())
1658 } else {
1659 (other.as_simd_slice(), self.as_simd_slice())
1660 }
1661 };
1662 let mut data = Vec::from(long);
1663 for (data, block) in data.iter_mut().zip(short.iter()) {
1664 *data |= *block;
1665 }
1666 let len = core::cmp::max(self.len(), other.len());
1667 FixedBitSet::from_blocks_and_len(data, len)
1668 }
1669}
1670
1671impl BitOrAssign for FixedBitSet {
1672 fn bitor_assign(&mut self, other: Self) {
1673 self.union_with(&other);
1674 }
1675}
1676
1677impl BitOrAssign<&Self> for FixedBitSet {
1678 fn bitor_assign(&mut self, other: &Self) {
1679 self.union_with(other);
1680 }
1681}
1682
1683impl<'a> BitXor for &'a FixedBitSet {
1684 type Output = FixedBitSet;
1685 fn bitxor(self, other: &FixedBitSet) -> FixedBitSet {
1686 let (short, long) = {
1687 if self.len() <= other.len() {
1688 (self.as_simd_slice(), other.as_simd_slice())
1689 } else {
1690 (other.as_simd_slice(), self.as_simd_slice())
1691 }
1692 };
1693 let mut data = Vec::from(long);
1694 for (data, block) in data.iter_mut().zip(short.iter()) {
1695 *data ^= *block;
1696 }
1697 let len = core::cmp::max(self.len(), other.len());
1698 FixedBitSet::from_blocks_and_len(data, len)
1699 }
1700}
1701
1702impl BitXorAssign for FixedBitSet {
1703 fn bitxor_assign(&mut self, other: Self) {
1704 self.symmetric_difference_with(&other);
1705 }
1706}
1707
1708impl BitXorAssign<&Self> for FixedBitSet {
1709 fn bitxor_assign(&mut self, other: &Self) {
1710 self.symmetric_difference_with(other);
1711 }
1712}