1use crate::default_vec::DefaultVec;
2use core::fmt::{Debug, Formatter};
3use core::iter;
4use core::marker::PhantomData;
5use core::ops::{BitAndAssign, BitOrAssign, BitXorAssign, SubAssign};
6
7type Elt = u32;
8
9#[cfg_attr(feature = "serde-1", derive(serde::Serialize, serde::Deserialize))]
15pub struct BitSet<I = usize>(DefaultVec<Elt>, PhantomData<I>);
16
17impl<I> Default for BitSet<I> {
18 fn default() -> Self {
19 BitSet(DefaultVec::default(), PhantomData)
20 }
21}
22
23impl<I> Clone for BitSet<I> {
24 fn clone(&self) -> Self {
25 BitSet(self.0.clone(), PhantomData)
26 }
27
28 fn clone_from(&mut self, source: &Self) {
29 self.0.clone_from(&source.0)
30 }
31}
32
33impl<I> PartialEq<Self> for BitSet<I> {
34 fn eq(&self, other: &Self) -> bool {
35 self.0 == other.0
36 }
37}
38
39impl<I> Eq for BitSet<I> {}
40
41#[inline]
42fn split(x: usize) -> (usize, Elt, u32) {
43 let offset = (x % Elt::BITS as usize) as u32;
44 (x / Elt::BITS as usize, 1 << offset, offset)
45}
46
47impl<I: Into<usize>> BitSet<I> {
48 pub fn insert(&mut self, x: I) -> bool {
61 let (chunk_idx, mask, _) = split(x.into());
62 let chunk = self.0.get_mut(chunk_idx);
63 let res = (*chunk & mask) == 0;
64 *chunk |= mask;
65 res
66 }
67
68 pub fn remove(&mut self, x: I) -> bool {
81 let (chunk_idx, mask, _) = split(x.into());
82 let chunk = self.0.get_mut(chunk_idx);
83 let res = (*chunk & mask) != 0;
84 *chunk &= !mask;
85 res
86 }
87
88 pub fn set(&mut self, x: I, v: bool) -> bool {
101 let (chunk_idx, mask, shift) = split(x.into());
102 let chunk = self.0.get_mut(chunk_idx);
103 let res = (*chunk & mask) != 0;
104 *chunk &= !mask;
105 *chunk |= (v as Elt) << shift;
106 res
107 }
108
109 pub fn contains(&self, x: I) -> bool {
111 let (chunk_idx, mask, _) = split(x.into());
112 let chunk = self.0.get(chunk_idx);
113 (chunk & mask) != 0
114 }
115
116 pub fn contains_mut(&mut self, x: I) -> bool {
118 let (chunk_idx, mask, _) = split(x.into());
119 let chunk = *self.0.get_mut(chunk_idx);
120 (chunk & mask) != 0
121 }
122
123 pub fn clear(&mut self) {
125 self.0.clear()
126 }
127}
128
129impl<I: From<usize> + Into<usize> + Copy> BitSet<I> {
130 pub fn iter(&self) -> impl Iterator<Item = I> + '_ {
134 let max = self.0.capacity() * (Elt::BITS as usize);
135 (0..max).map(I::from).filter(|x| self.contains(*x))
136 }
137}
138
139impl<'a, I> BitAndAssign<&'a BitSet> for BitSet<I> {
140 fn bitand_assign(&mut self, rhs: &'a BitSet) {
152 for (this, other) in self
153 .0
154 .iter_mut()
155 .zip(rhs.0.iter().copied().chain(iter::repeat(0)))
156 {
157 *this &= other
158 }
159 }
160}
161
162impl<'a, I> BitOrAssign<&'a BitSet> for BitSet<I> {
163 fn bitor_assign(&mut self, rhs: &'a BitSet) {
175 if rhs.0.capacity() > self.0.capacity() {
176 self.0.reserve(rhs.0.capacity())
177 }
178 for (this, other) in self.0.iter_mut().zip(rhs.0.iter().copied()) {
179 *this |= other
180 }
181 }
182}
183
184impl<'a, I> SubAssign<&'a BitSet> for BitSet<I> {
185 fn sub_assign(&mut self, rhs: &'a BitSet) {
197 for (this, other) in self.0.iter_mut().zip(rhs.0.iter().copied()) {
198 *this &= !other
199 }
200 }
201}
202
203impl<'a, I> BitXorAssign<&'a BitSet> for BitSet<I> {
204 fn bitxor_assign(&mut self, rhs: &'a BitSet) {
216 if rhs.0.capacity() > self.0.capacity() {
217 self.0.reserve(rhs.0.capacity())
218 }
219 for (this, other) in self.0.iter_mut().zip(rhs.0.iter().copied()) {
220 *this ^= other
221 }
222 }
223}
224
225impl<I: Into<usize>> Extend<I> for BitSet<I> {
226 fn extend<T: IntoIterator<Item = I>>(&mut self, iter: T) {
227 iter.into_iter().for_each(|x| {
228 self.insert(x);
229 })
230 }
231}
232
233impl<I: Into<usize>> FromIterator<I> for BitSet<I> {
234 fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
235 let mut res = Self::default();
236 res.extend(iter);
237 res
238 }
239}
240
241impl<I: From<usize> + Into<usize> + Copy + Debug> Debug for BitSet<I> {
242 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
243 f.debug_set().entries(self.iter()).finish()
244 }
245}