eta_algorithms/data_structs/bitmap/
mod.rs1use std::alloc::Layout;
2use std::ptr;
3
4use crate::data_structs::bitmap::consts::{BIT_END_OFFSET, BIT_MASK, DIV_SHIFT};
5use crate::data_structs::bitmap::handle::Handle;
6
7pub mod atomic_bitmap;
8pub mod handle;
9
10#[cfg(target_pointer_width = "64")]
11pub(crate) mod consts {
12 pub(crate) const DIV_SHIFT: usize = 6; pub(crate) const BIT_END_OFFSET: usize = 63;
14 pub(crate) const BIT_MASK: usize = 0xFFFFFFFFFFFFFFFF;
15}
16
17#[cfg(target_pointer_width = "32")]
18pub(self) mod consts {
19 pub(crate) const DIV_SHIFT: usize = 5;
20 pub(crate) const BIT_END_OFFSET: usize = 31;
21 pub(crate) const BIT_MASK: usize = 0xFFFFFFFF;
22}
23
24#[cfg(target_pointer_width = "16")]
25pub(self) mod consts {
26 pub(crate) const DIV_SHIFT: usize = 4;
27 pub(crate) const BIT_END_OFFSET: usize = 15;
28 pub(crate) const BIT_MASK: usize = 0xFFFF;
29}
30
31pub struct Bitmap {
32 data: *mut usize,
33 bit_capacity: usize,
34 capacity: usize,
35 layout: Layout,
36}
37
38impl Bitmap {
39 pub fn new(bit_count: usize) -> Self {
40 let size = (bit_count >> DIV_SHIFT) + 1;
41 let layout = Layout::array::<usize>(size).expect("Failed to create layout");
42 let data = unsafe { std::alloc::alloc(layout) as *mut usize };
43 unsafe { ptr::write_bytes(data, 0, size) };
44 Bitmap {
45 data,
46 capacity: size,
47 bit_capacity: bit_count,
48 layout,
49 }
50 }
51
52 pub fn first_zero(&self, bit_index: usize) -> Option<usize> {
53 if bit_index >= self.bit_capacity {
54 panic!("Bit index out of bounds");
55 }
56
57 let index = unsafe { self.first_zero_unchecked(bit_index) };
58 if index >= self.bit_capacity {
59 None
60 } else {
61 Some(index)
62 }
63 }
64 pub fn first_one(&self, bit_index: usize) -> Option<usize> {
65 if bit_index >= self.bit_capacity {
66 panic!("Bit index out of bounds");
67 }
68
69 let index = unsafe { self.first_one_unchecked(bit_index) };
70 if index >= self.bit_capacity {
71 None
72 } else {
73 Some(index)
74 }
75 }
76 pub unsafe fn first_zero_unchecked(&self, bit_index: usize) -> usize {
78 let offset = bit_index >> DIV_SHIFT;
79 let bit_offset = bit_index & BIT_END_OFFSET;
80
81 let last_chunk = self.data.add(self.capacity - 1);
83 let mut data_ptr = self.data.add(offset);
84 let mut counter = offset * (BIT_END_OFFSET + 1);
85 let mut data = *data_ptr | ((1 << bit_offset) - 1);
86
87 while data & BIT_MASK == BIT_MASK {
88 if data_ptr != last_chunk {
89 data_ptr = data_ptr.add(1);
90 data = *data_ptr;
91 counter += BIT_END_OFFSET + 1;
92 continue;
93 }
94 break;
95 }
96 counter += data.trailing_ones() as usize;
97 counter
98 }
99
100 pub unsafe fn first_one_unchecked(&self, bit_index: usize) -> usize {
102 let offset = bit_index >> DIV_SHIFT;
103 let bit_offset = bit_index & BIT_END_OFFSET;
104
105 let last_chunk = self.data.add(self.capacity - 1);
107 let mut data_ptr = self.data.add(offset);
108 let mut counter = offset * (BIT_END_OFFSET + 1);
109 let mut data = *data_ptr & !((1 << bit_offset) - 1);
110
111 while !data & BIT_MASK == BIT_MASK {
112 if data_ptr != last_chunk {
113 data_ptr = data_ptr.add(1);
114 data = *data_ptr;
115 counter += BIT_END_OFFSET + 1;
116 continue;
117 }
118 break;
119 }
120 counter += data.trailing_zeros() as usize;
121 counter
122 }
123 pub fn check_batch(&self, handles: &[Handle]) -> bool {
124 for handle in handles {
125 let val = unsafe { *self.data.add(handle.chunk as usize) };
126 if (val & handle.bit_mask) != handle.bit_mask {
127 return false;
128 }
129 }
130 true
131 }
132
133 pub fn to_indices_true(&self) -> Vec<usize> {
134 let mut indices = Vec::new();
135 for i in 0..self.bit_capacity {
136 if unsafe { self.get_unchecked(i) } {
137 indices.push(i);
138 }
139 }
140 indices
141 }
142 pub fn to_indices_false(&self) -> Vec<usize> {
143 let mut indices = Vec::new();
144 for i in 0..self.bit_capacity {
145 if unsafe { self.get_unchecked(i) == false } {
146 indices.push(i);
147 }
148 }
149 indices
150 }
151
152 #[inline(always)]
153 pub fn bit_capacity(&self) -> usize {
154 self.bit_capacity
155 }
156 #[inline(always)]
157 pub fn capacity(&self) -> usize {
158 self.capacity
159 }
160 #[inline(always)]
161 pub fn set(&mut self, bit_index: usize, value: bool) {
162 if bit_index >= self.bit_capacity {
163 panic!("Bit index out of bounds");
164 }
165
166 let offset = bit_index >> DIV_SHIFT;
167 let bit_offset = bit_index & BIT_END_OFFSET;
168 unsafe {
169 let ptr = self.data.add(offset);
170 *ptr = (*ptr & !(1 << bit_offset)) | ((value as usize) << bit_offset);
171 }
172 }
173 #[inline(always)]
174 pub fn get(&self, bit_index: usize) -> Option<bool> {
175 if bit_index >= self.bit_capacity {
176 return None;
177 }
178
179 let offset = bit_index >> DIV_SHIFT;
180 let bit_offset = bit_index & (BIT_END_OFFSET);
181 unsafe {
182 let ptr = self.data.add(offset);
183 Some((*ptr & (1 << bit_offset)) != 0)
184 }
185 }
186 #[inline(always)]
187 pub unsafe fn set_unchecked(&mut self, bit_index: usize, value: bool) {
188 let offset = bit_index >> DIV_SHIFT;
189 let bit_offset = bit_index & (BIT_END_OFFSET);
190 unsafe {
191 let ptr = self.data.add(offset);
192 *ptr = (*ptr & !(1 << bit_offset)) | ((value as usize) << bit_offset);
193 }
194 }
195 #[inline(always)]
196 pub unsafe fn get_unchecked(&self, bit_index: usize) -> bool {
197 let offset = bit_index >> DIV_SHIFT;
198 let bit_offset = bit_index & (BIT_END_OFFSET);
199 unsafe {
200 let ptr = self.data.add(offset);
201 (*ptr & (1 << bit_offset)) != 0
202 }
203 }
204}
205
206impl Drop for Bitmap {
207 fn drop(&mut self) {
208 unsafe {
209 std::alloc::dealloc(self.data as *mut u8, self.layout);
210 }
211 }
212}