eta_algorithms/data_structs/bitmap/
atomic_bitmap.rs1use std::alloc::Layout;
2use std::sync::atomic::{AtomicUsize, Ordering};
3
4use crate::data_structs::bitmap::consts::{DIV_SHIFT, MASK};
5use crate::data_structs::bitmap::handle::Handle;
6
7#[derive(Clone, Copy)]
8pub enum Mode {
9 Relaxed,
10 Strict,
11}
12
13pub struct AtomicBitmap {
14 data: *mut AtomicUsize,
15 bit_capacity: usize,
16 capacity: usize,
17 layout: Layout,
18}
19
20impl AtomicBitmap {
21 pub fn new(bit_count: usize) -> Self {
22 let size = (bit_count >> DIV_SHIFT) + 1;
23 let layout = Layout::array::<AtomicUsize>(size).expect("Failed to create layout");
24 let data = unsafe { std::alloc::alloc(layout) as *mut AtomicUsize };
25 for i in 0..size {
26 unsafe {
27 (*data.add(i)).store(0, Ordering::Relaxed);
28 }
29 }
30 AtomicBitmap {
31 data,
32 capacity: size,
33 bit_capacity: bit_count,
34 layout,
35 }
36 }
37
38 pub fn to_indices_true(&self, mode: Mode) -> Vec<usize> {
39 let mut indices = Vec::new();
40 for i in 0..self.bit_capacity {
41 if unsafe { self.get_unchecked(i, mode) } {
42 indices.push(i);
43 }
44 }
45 indices
46 }
47 pub fn to_indices_false(&self, mode: Mode) -> Vec<usize> {
48 let mut indices = Vec::new();
49 for i in 0..self.bit_capacity {
50 if unsafe { self.get_unchecked(i, mode) == false } {
51 indices.push(i);
52 }
53 }
54 indices
55 }
56
57 pub fn check_batch(&self, handles: &[Handle], mode: Mode) -> bool {
58 for handle in handles {
59 let val = unsafe {
60 match mode {
61 Mode::Relaxed => (*self.data.add(handle.chunk as usize)).load(Ordering::Relaxed),
62 Mode::Strict => (*self.data.add(handle.chunk as usize)).load(Ordering::Acquire),
63 }
64 };
65 if (val & handle.bit_mask) != handle.bit_mask {
66 return false;
67 }
68 }
69 true
70 }
71
72 #[inline(always)]
73 pub fn bit_capacity(&self) -> usize {
74 self.bit_capacity
75 }
76 #[inline(always)]
77 pub fn capacity(&self) -> usize {
78 self.capacity
79 }
80 #[inline(always)]
81 pub fn set(&self, bit_index: usize, value: bool, mode: Mode) {
82 if bit_index >= self.bit_capacity {
83 panic!("Bit index out of bounds");
84 }
85
86 let offset = bit_index >> DIV_SHIFT;
87 let bit_offset = bit_index & (MASK);
88 unsafe {
89 let ptr = self.data.add(offset);
90 match mode {
91 Mode::Relaxed => (*ptr).store(
92 ((*ptr).load(Ordering::Relaxed) & !(1 << bit_offset)) | ((value as usize) << bit_offset),
93 Ordering::Relaxed,
94 ),
95 Mode::Strict => (*ptr).store(
96 ((*ptr).load(Ordering::Acquire) & !(1 << bit_offset)) | ((value as usize) << bit_offset),
97 Ordering::Release,
98 ),
99 }
100 }
101 }
102 #[inline(always)]
103 pub fn get(&self, bit_index: usize, mode: Mode) -> Option<bool> {
104 if bit_index >= self.bit_capacity {
105 return None;
106 }
107
108 let offset = bit_index >> DIV_SHIFT;
109 let bit_offset = bit_index & (MASK);
110 unsafe {
111 let ptr = self.data.add(offset);
112 match mode {
113 Mode::Relaxed => Some(((*ptr).load(Ordering::Relaxed) & (1 << bit_offset)) != 0),
114 Mode::Strict => Some(((*ptr).load(Ordering::Acquire) & (1 << bit_offset)) != 0),
115 }
116 }
117 }
118 #[inline(always)]
119 pub unsafe fn set_unchecked(&self, bit_index: usize, value: bool, mode: Mode) {
120 let offset = bit_index >> DIV_SHIFT;
121 let bit_offset = bit_index & (MASK);
122 unsafe {
123 let ptr = self.data.add(offset);
124 match mode {
125 Mode::Relaxed => (*ptr).store(
126 ((*ptr).load(Ordering::Relaxed) & !(1 << bit_offset)) | ((value as usize) << bit_offset),
127 Ordering::Relaxed,
128 ),
129 Mode::Strict => (*ptr).store(
130 ((*ptr).load(Ordering::Acquire) & !(1 << bit_offset)) | ((value as usize) << bit_offset),
131 Ordering::Release,
132 ),
133 }
134 }
135 }
136 #[inline(always)]
137 pub unsafe fn get_unchecked(&self, bit_index: usize, mode: Mode) -> bool {
138 let offset = bit_index >> DIV_SHIFT;
139 let bit_offset = bit_index & (MASK);
140 unsafe {
141 let ptr = self.data.add(offset);
142 match mode {
143 Mode::Relaxed => (*ptr).load(Ordering::Relaxed) & (1 << bit_offset) != 0,
144 Mode::Strict => (*ptr).load(Ordering::Acquire) & (1 << bit_offset) != 0,
145 }
146 }
147 }
148}
149
150impl Drop for AtomicBitmap {
151 fn drop(&mut self) {
152 unsafe {
153 std::alloc::dealloc(self.data as *mut u8, self.layout);
154 }
155 }
156}
157
158unsafe impl Send for AtomicBitmap {}
159
160unsafe impl Sync for AtomicBitmap {}