vm_memory/bitmap/backend/
atomic_bitmap.rs1use std::num::NonZeroUsize;
7use std::sync::atomic::{AtomicU64, Ordering};
8
9use crate::bitmap::{Bitmap, NewBitmap, RefSlice, WithBitmapSlice};
10
11#[derive(Debug)]
15pub struct AtomicBitmap {
16 map: Vec<AtomicU64>,
17 size: usize,
18 byte_size: usize,
19 page_size: NonZeroUsize,
20}
21
22#[allow(clippy::len_without_is_empty)]
23impl AtomicBitmap {
24 pub fn new(byte_size: usize, page_size: NonZeroUsize) -> Self {
27 let num_pages = byte_size.div_ceil(page_size.get());
28 let map_size = num_pages.div_ceil(u64::BITS as usize);
29 let map: Vec<AtomicU64> = (0..map_size).map(|_| AtomicU64::new(0)).collect();
30
31 AtomicBitmap {
32 map,
33 size: num_pages,
34 byte_size,
35 page_size,
36 }
37 }
38
39 pub fn enlarge(&mut self, additional_size: usize) {
42 self.byte_size += additional_size;
43 self.size = self.byte_size.div_ceil(self.page_size.get());
44 let map_size = self.size.div_ceil(u64::BITS as usize);
45 self.map.resize_with(map_size, Default::default);
46 }
47
48 pub fn is_bit_set(&self, index: usize) -> bool {
50 if index < self.size {
51 (self.map[index >> 6].load(Ordering::Acquire) & (1 << (index & 63))) != 0
52 } else {
53 false
55 }
56 }
57
58 pub fn is_addr_set(&self, addr: usize) -> bool {
60 self.is_bit_set(addr / self.page_size)
61 }
62
63 pub fn set_addr_range(&self, start_addr: usize, len: usize) {
67 self.set_reset_addr_range(start_addr, len, true);
68 }
69
70 fn set_reset_addr_range(&self, start_addr: usize, len: usize, set: bool) {
75 if len == 0 {
78 return;
79 }
80
81 let first_bit = start_addr / self.page_size;
82 let last_bit = start_addr.saturating_add(len - 1) / self.page_size;
85 for n in first_bit..=last_bit {
86 if n >= self.size {
87 break;
89 }
90 if set {
91 self.map[n >> 6].fetch_or(1 << (n & 63), Ordering::SeqCst);
92 } else {
93 self.map[n >> 6].fetch_and(!(1 << (n & 63)), Ordering::SeqCst);
94 }
95 }
96 }
97
98 pub fn reset_addr_range(&self, start_addr: usize, len: usize) {
102 self.set_reset_addr_range(start_addr, len, false);
103 }
104
105 pub fn set_bit(&self, index: usize) {
107 if index >= self.size {
108 return;
110 }
111 self.map[index >> 6].fetch_or(1 << (index & 63), Ordering::SeqCst);
112 }
113
114 pub fn reset_bit(&self, index: usize) {
116 if index >= self.size {
117 return;
119 }
120 self.map[index >> 6].fetch_and(!(1 << (index & 63)), Ordering::SeqCst);
121 }
122
123 pub fn len(&self) -> usize {
125 self.size
126 }
127
128 pub fn byte_size(&self) -> usize {
130 self.byte_size
131 }
132
133 pub fn get_and_reset(&self) -> Vec<u64> {
135 self.map
136 .iter()
137 .map(|u| u.fetch_and(0, Ordering::SeqCst))
138 .collect()
139 }
140
141 pub fn reset(&self) {
143 for it in self.map.iter() {
144 it.store(0, Ordering::Release);
145 }
146 }
147}
148
149impl Clone for AtomicBitmap {
150 fn clone(&self) -> Self {
151 let map = self
152 .map
153 .iter()
154 .map(|i| i.load(Ordering::Acquire))
155 .map(AtomicU64::new)
156 .collect();
157 AtomicBitmap {
158 map,
159 size: self.size,
160 byte_size: self.byte_size,
161 page_size: self.page_size,
162 }
163 }
164}
165
166impl<'a> WithBitmapSlice<'a> for AtomicBitmap {
167 type S = RefSlice<'a, Self>;
168}
169
170impl Bitmap for AtomicBitmap {
171 fn mark_dirty(&self, offset: usize, len: usize) {
172 self.set_addr_range(offset, len)
173 }
174
175 fn dirty_at(&self, offset: usize) -> bool {
176 self.is_addr_set(offset)
177 }
178
179 fn slice_at(&self, offset: usize) -> <Self as WithBitmapSlice<'_>>::S {
180 RefSlice::new(self, offset)
181 }
182}
183
184impl Default for AtomicBitmap {
185 fn default() -> Self {
186 AtomicBitmap::new(0, const { NonZeroUsize::new(0x1000).unwrap() })
188 }
189}
190
191impl NewBitmap for AtomicBitmap {
192 fn with_len(len: usize) -> Self {
193 #[cfg(target_family = "unix")]
194 let page_size = unsafe { libc::sysconf(libc::_SC_PAGE_SIZE) };
196
197 #[cfg(target_family = "windows")]
198 let page_size = {
199 use winapi::um::sysinfoapi::GetSystemInfo;
200 let mut sysinfo = std::mem::MaybeUninit::zeroed();
201 unsafe { GetSystemInfo(sysinfo.as_mut_ptr()) };
204 unsafe { sysinfo.assume_init().dwPageSize }
206 };
207
208 AtomicBitmap::new(
211 len,
212 NonZeroUsize::try_from(usize::try_from(page_size).unwrap()).unwrap(),
213 )
214 }
215}
216
217#[cfg(test)]
218mod tests {
219 use super::*;
220
221 use crate::bitmap::tests::test_bitmap;
222
223 #[allow(clippy::undocumented_unsafe_blocks)]
224 const DEFAULT_PAGE_SIZE: NonZeroUsize = NonZeroUsize::new(128).unwrap();
225
226 #[test]
227 fn test_bitmap_basic() {
228 let a = AtomicBitmap::new(1025, DEFAULT_PAGE_SIZE);
230 assert_eq!(a.len(), 9);
231
232 let b = AtomicBitmap::new(1024, DEFAULT_PAGE_SIZE);
233 assert_eq!(b.len(), 8);
234 b.set_addr_range(128, 129);
235 assert!(!b.is_addr_set(0));
236 assert!(b.is_addr_set(128));
237 assert!(b.is_addr_set(256));
238 assert!(!b.is_addr_set(384));
239
240 #[allow(clippy::redundant_clone)]
241 let copy_b = b.clone();
242 assert!(copy_b.is_addr_set(256));
243 assert!(!copy_b.is_addr_set(384));
244
245 b.reset();
246 assert!(!b.is_addr_set(128));
247 assert!(!b.is_addr_set(256));
248 assert!(!b.is_addr_set(384));
249
250 b.set_addr_range(128, 129);
251 let v = b.get_and_reset();
252
253 assert!(!b.is_addr_set(128));
254 assert!(!b.is_addr_set(256));
255 assert!(!b.is_addr_set(384));
256
257 assert_eq!(v.len(), 1);
258 assert_eq!(v[0], 0b110);
259 }
260
261 #[test]
262 fn test_bitmap_reset() {
263 let b = AtomicBitmap::new(1024, DEFAULT_PAGE_SIZE);
264 assert_eq!(b.len(), 8);
265 b.set_addr_range(128, 129);
266 assert!(!b.is_addr_set(0));
267 assert!(b.is_addr_set(128));
268 assert!(b.is_addr_set(256));
269 assert!(!b.is_addr_set(384));
270
271 b.reset_addr_range(128, 129);
272 assert!(!b.is_addr_set(0));
273 assert!(!b.is_addr_set(128));
274 assert!(!b.is_addr_set(256));
275 assert!(!b.is_addr_set(384));
276 }
277
278 #[test]
279 fn test_bitmap_out_of_range() {
280 let b = AtomicBitmap::new(1024, NonZeroUsize::MIN);
281 b.set_addr_range(768, 512);
283 assert!(b.is_addr_set(768));
284 assert!(!b.is_addr_set(1024));
286 assert!(!b.is_addr_set(1152));
287 }
288
289 #[test]
290 fn test_bitmap_impl() {
291 let b = AtomicBitmap::new(0x800, DEFAULT_PAGE_SIZE);
292 test_bitmap(&b);
293 }
294
295 #[test]
296 fn test_bitmap_enlarge() {
297 let mut b = AtomicBitmap::new(8 * 1024, DEFAULT_PAGE_SIZE);
298 assert_eq!(b.len(), 64);
299 b.set_addr_range(128, 129);
300 assert!(!b.is_addr_set(0));
301 assert!(b.is_addr_set(128));
302 assert!(b.is_addr_set(256));
303 assert!(!b.is_addr_set(384));
304
305 b.reset_addr_range(128, 129);
306 assert!(!b.is_addr_set(0));
307 assert!(!b.is_addr_set(128));
308 assert!(!b.is_addr_set(256));
309 assert!(!b.is_addr_set(384));
310 b.set_addr_range(128, 129);
311 b.enlarge(8 * 1024);
312 for i in 65..128 {
313 assert!(!b.is_bit_set(i));
314 }
315 assert_eq!(b.len(), 128);
316 assert!(!b.is_addr_set(0));
317 assert!(b.is_addr_set(128));
318 assert!(b.is_addr_set(256));
319 assert!(!b.is_addr_set(384));
320
321 b.set_bit(55);
322 assert!(b.is_bit_set(55));
323 for i in 65..128 {
324 b.set_bit(i);
325 }
326 for i in 65..128 {
327 assert!(b.is_bit_set(i));
328 }
329 b.reset_addr_range(0, 16 * 1024);
330 for i in 0..128 {
331 assert!(!b.is_bit_set(i));
332 }
333 }
334}