index_set/atomic_bitset.rs
1use crate::*;
2
3/// Same as `[AtomicUsize; N]`, but with an additional functionality.
4pub struct AtomicBitSet<const N: usize> {
5 bitset: [AtomicUsize; N],
6 // used for optimizing the search to find the next free bit
7 rotation: AtomicUsize,
8}
9
10impl<const N: usize> AtomicBitSet<N> {
11 /// Creates a new `AtomicBitSet` with the specified number of slots.
12 /// Each slot can hold 32/64 bits depending on the architecture.
13 ///
14 /// ## Examples
15 ///
16 /// ```rust
17 /// use index_set::{AtomicBitSet, slot_count};
18 ///
19 /// let bitset: AtomicBitSet<{ slot_count::from_bits(128) }> = AtomicBitSet::new();
20 /// ```
21 #[inline]
22 #[allow(clippy::new_without_default)]
23 pub const fn new() -> Self {
24 Self {
25 bitset: [const { AtomicUsize::new(0) }; N],
26 rotation: AtomicUsize::new(0),
27 }
28 }
29
30 /// Atomically finds the next free bit (unset bit with value `0`) in the bitset, sets it to `1`,
31 /// and returns its index.
32 ///
33 /// This method is thread-safe and can be used in concurrent environments.
34 ///
35 /// ## Examples
36 ///
37 /// ```rust
38 /// use index_set::{AtomicBitSet, slot_count, BitSet, SharedBitSet};
39 /// // Create a new AtomicBitSet with memory size of 1 kilobyte
40 /// static BIT_SET: AtomicBitSet<{ slot_count::from_kilobytes(1) }> = AtomicBitSet::new();
41 /// assert_eq!(BIT_SET.set_next_free_bit(), Some(0));
42 ///
43 /// BIT_SET.insert(2);
44 /// assert_eq!(BIT_SET.set_next_free_bit(), Some(1));
45 /// assert_eq!(BIT_SET.set_next_free_bit(), Some(3));
46 ///
47 /// BIT_SET.remove(1);
48 /// assert_eq!(BIT_SET.has(1), false);
49 /// assert_eq!(BIT_SET.set_next_free_bit(), Some(1));
50 ///
51 /// assert_eq!(BIT_SET.size(), 4);
52 /// // it can hold up to 8192 unique identifiers.
53 /// assert_eq!(BIT_SET.capacity(), 8192);
54 /// ```
55 pub fn set_next_free_bit(&self) -> Option<usize> {
56 // rotate the slots to find the next free id
57 let skip = self.rotation.load(Ordering::Relaxed);
58 let mut slot_idx = skip;
59
60 let slots = utils::rotate_left(&self.bitset, skip);
61
62 for slot in slots {
63 let available_slot = slot.fetch_update(Ordering::AcqRel, Ordering::Acquire, |curr| {
64 // slot is full
65 if curr == usize::MAX {
66 return None;
67 }
68 let next_available_bit = (!curr).trailing_zeros() as usize;
69 Some(curr | (1 << next_available_bit))
70 });
71
72 if let Ok(curr) = available_slot {
73 if skip != slot_idx {
74 self.rotation.store(slot_idx, Ordering::Relaxed);
75 }
76 let next_available_bit = (!curr).trailing_zeros() as usize;
77 return Some(slot_idx * usize::BITS as usize + next_available_bit);
78 }
79
80 slot_idx += 1;
81 if slot_idx >= N {
82 slot_idx = 0;
83 }
84 }
85 None
86 }
87}
88
89impl<const N: usize> std::ops::Deref for AtomicBitSet<N> {
90 type Target = [AtomicUsize];
91
92 #[inline]
93 fn deref(&self) -> &Self::Target {
94 &self.bitset
95 }
96}