1#[cfg(feature = "backend-bitmap")]
10mod backend;
11
12use std::fmt::Debug;
13
14use crate::{GuestMemoryBackend, GuestMemoryRegion};
15
16#[cfg(feature = "backend-bitmap")]
17pub use backend::{ArcSlice, AtomicBitmap, RefSlice};
18
19pub trait WithBitmapSlice<'a> {
21 type S: BitmapSlice;
23}
24
25pub trait BitmapSlice: Bitmap + Clone + Debug + for<'a> WithBitmapSlice<'a, S = Self> {}
28
29pub trait Bitmap: for<'a> WithBitmapSlice<'a> {
39 fn mark_dirty(&self, offset: usize, len: usize);
41
42 fn dirty_at(&self, offset: usize) -> bool;
44
45 fn slice_at(&self, offset: usize) -> <Self as WithBitmapSlice<'_>>::S;
48}
49
50pub trait NewBitmap: Bitmap + Default {
53 fn with_len(len: usize) -> Self;
55}
56
57impl WithBitmapSlice<'_> for () {
60 type S = Self;
61}
62
63impl BitmapSlice for () {}
64
65impl Bitmap for () {
66 fn mark_dirty(&self, _offset: usize, _len: usize) {}
67
68 fn dirty_at(&self, _offset: usize) -> bool {
69 false
70 }
71
72 fn slice_at(&self, _offset: usize) -> Self {}
73}
74
75impl NewBitmap for () {
76 fn with_len(_len: usize) -> Self {}
77}
78
79impl<'a, B> WithBitmapSlice<'a> for Option<B>
81where
82 B: WithBitmapSlice<'a>,
83{
84 type S = Option<B::S>;
85}
86
87impl<B: BitmapSlice> BitmapSlice for Option<B> {}
88
89impl<B: Bitmap> Bitmap for Option<B> {
90 fn mark_dirty(&self, offset: usize, len: usize) {
91 if let Some(inner) = self {
92 inner.mark_dirty(offset, len)
93 }
94 }
95
96 fn dirty_at(&self, offset: usize) -> bool {
97 if let Some(inner) = self {
98 return inner.dirty_at(offset);
99 }
100 false
101 }
102
103 fn slice_at(&self, offset: usize) -> Option<<B as WithBitmapSlice<'_>>::S> {
104 if let Some(inner) = self {
105 return Some(inner.slice_at(offset));
106 }
107 None
108 }
109}
110
111pub type BS<'a, B> = <B as WithBitmapSlice<'a>>::S;
114
115pub type MS<'a, M> = BS<'a, <<M as GuestMemoryBackend>::R as GuestMemoryRegion>::B>;
118
119#[cfg(test)]
120#[cfg(feature = "backend-bitmap")]
121pub(crate) mod tests {
122 use super::*;
123
124 use std::mem::size_of_val;
125 use std::sync::atomic::Ordering;
126
127 use crate::{Bytes, VolatileMemory};
128 #[cfg(feature = "backend-mmap")]
129 use crate::{GuestAddress, MemoryRegionAddress};
130
131 pub fn range_is_clean<B: Bitmap>(b: &B, start: usize, len: usize) -> bool {
133 (start..start + len).all(|offset| !b.dirty_at(offset))
134 }
135
136 pub fn range_is_dirty<B: Bitmap>(b: &B, start: usize, len: usize) -> bool {
138 (start..start + len).all(|offset| b.dirty_at(offset))
139 }
140
141 pub fn check_range<B: Bitmap>(b: &B, start: usize, len: usize, clean: bool) -> bool {
142 if clean {
143 range_is_clean(b, start, len)
144 } else {
145 range_is_dirty(b, start, len)
146 }
147 }
148
149 pub fn test_bitmap<B: Bitmap>(b: &B) {
152 let len = 0x800;
153 let dirty_offset = 0x400;
154 let dirty_len = 0x100;
155
156 let s = b.slice_at(dirty_offset);
158
159 assert!(range_is_clean(b, 0, len));
160 assert!(range_is_clean(&s, 0, dirty_len));
161
162 b.mark_dirty(dirty_offset, dirty_len);
163 assert!(range_is_dirty(b, dirty_offset, dirty_len));
164 assert!(range_is_dirty(&s, 0, dirty_len));
165 }
166
167 pub fn test_bytes<F, G, M, A>(bytes: &M, check_range_fn: F, address_fn: G, step: usize)
173 where
174 F: Fn(&M, usize, usize, bool) -> bool,
175 G: Fn(usize) -> A,
176 M: Bytes<A>,
177 <M as Bytes<A>>::E: Debug,
178 {
179 const BUF_SIZE: usize = 1024;
180 let buf = vec![1u8; 1024];
181 let mut dirty_offset = 0x1000;
182
183 let val = 1u64;
184
185 assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, true));
187 assert_eq!(
188 bytes
189 .write(buf.as_slice(), address_fn(dirty_offset))
190 .unwrap(),
191 BUF_SIZE
192 );
193 assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, false));
194
195 dirty_offset += step;
197 assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, true));
198 bytes
199 .write_slice(buf.as_slice(), address_fn(dirty_offset))
200 .unwrap();
201 assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, false));
202
203 dirty_offset += step;
205 assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, true));
206 bytes.write_obj(val, address_fn(dirty_offset)).unwrap();
207 assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, false));
208
209 dirty_offset += step;
211 assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, true));
212 bytes
213 .store(val, address_fn(dirty_offset), Ordering::Relaxed)
214 .unwrap();
215 assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, false));
216 }
217
218 #[cfg(feature = "backend-mmap")]
223 fn test_guest_memory_region<R: GuestMemoryRegion>(region: &R) {
224 let dirty_addr = MemoryRegionAddress(0x0);
225 let val = 123u64;
226 let dirty_len = size_of_val(&val);
227
228 let slice = region.get_slice(dirty_addr, dirty_len).unwrap();
229
230 assert!(range_is_clean(®ion.bitmap(), 0, region.len() as usize));
231 assert!(range_is_clean(slice.bitmap(), 0, dirty_len));
232
233 region.write_obj(val, dirty_addr).unwrap();
234
235 assert!(range_is_dirty(
236 ®ion.bitmap(),
237 dirty_addr.0 as usize,
238 dirty_len
239 ));
240
241 assert!(range_is_dirty(slice.bitmap(), 0, dirty_len));
242
243 test_bytes(
247 region,
248 |r: &R, start: usize, len: usize, clean: bool| {
249 check_range(&r.bitmap(), start, len, clean)
250 },
251 |offset| MemoryRegionAddress(offset as u64),
252 0x1000,
253 );
254 }
255
256 #[cfg(feature = "backend-mmap")]
257 pub fn test_guest_memory_and_region<M, F>(f: F)
259 where
260 M: GuestMemoryBackend,
261 F: Fn() -> M,
262 {
263 let m = f();
264 let dirty_addr = GuestAddress(0x1000);
265 let val = 123u64;
266 let dirty_len = size_of_val(&val);
267
268 let (region, region_addr) = m.to_region_addr(dirty_addr).unwrap();
269 let mut slices = m.get_slices(dirty_addr, dirty_len);
270 let slice = slices.next().unwrap().unwrap();
271 assert!(slices.next().is_none());
272
273 assert!(range_is_clean(®ion.bitmap(), 0, region.len() as usize));
274 assert!(range_is_clean(slice.bitmap(), 0, dirty_len));
275
276 m.write_obj(val, dirty_addr).unwrap();
277
278 assert!(range_is_dirty(
279 ®ion.bitmap(),
280 region_addr.0 as usize,
281 dirty_len
282 ));
283
284 assert!(range_is_dirty(slice.bitmap(), 0, dirty_len));
285
286 test_guest_memory_region(f().find_region(GuestAddress(0)).unwrap());
288
289 let check_range_closure = |m: &M, start: usize, len: usize, clean: bool| -> bool {
291 m.get_slices(GuestAddress(start as u64), len).all(|r| {
292 let slice = r.unwrap();
293 check_range(slice.bitmap(), 0, slice.len(), clean)
294 })
295 };
296
297 test_bytes(
298 &f(),
299 check_range_closure,
300 |offset| GuestAddress(offset as u64),
301 0x1000,
302 );
303 }
304
305 pub fn test_volatile_memory<M: VolatileMemory>(m: &M) {
306 assert!(m.len() >= 0x8000);
307
308 let dirty_offset = 0x1000;
309 let val = 123u64;
310 let dirty_len = size_of_val(&val);
311
312 let get_ref_offset = 0x2000;
313 let array_ref_offset = 0x3000;
314
315 let s1 = m.as_volatile_slice();
316 let s2 = m.get_slice(dirty_offset, dirty_len).unwrap();
317
318 assert!(range_is_clean(s1.bitmap(), 0, s1.len()));
319 assert!(range_is_clean(s2.bitmap(), 0, s2.len()));
320
321 s1.write_obj(val, dirty_offset).unwrap();
322
323 assert!(range_is_dirty(s1.bitmap(), dirty_offset, dirty_len));
324 assert!(range_is_dirty(s2.bitmap(), 0, dirty_len));
325
326 let v_ref = m.get_ref::<u64>(get_ref_offset).unwrap();
327 assert!(range_is_clean(s1.bitmap(), get_ref_offset, dirty_len));
328 v_ref.store(val);
329 assert!(range_is_dirty(s1.bitmap(), get_ref_offset, dirty_len));
330
331 let arr_ref = m.get_array_ref::<u64>(array_ref_offset, 1).unwrap();
332 assert!(range_is_clean(s1.bitmap(), array_ref_offset, dirty_len));
333 arr_ref.store(0, val);
334 assert!(range_is_dirty(s1.bitmap(), array_ref_offset, dirty_len));
335 }
336}