1#[cfg(any(test, feature = "backend-bitmap"))]
10mod backend;
11
12use std::fmt::Debug;
13
14use crate::{GuestMemory, GuestMemoryRegion};
15
16#[cfg(any(test, feature = "backend-bitmap"))]
17pub use backend::{ArcSlice, AtomicBitmap, RefSlice};
18
19pub trait WithBitmapSlice<'a> {
21 type S: BitmapSlice;
23}
24
25pub trait BitmapSlice: Bitmap + Clone + Debug + for<'a> WithBitmapSlice<'a, S = Self> {}
28
29pub trait Bitmap: for<'a> WithBitmapSlice<'a> {
39 fn mark_dirty(&self, offset: usize, len: usize);
41
42 fn dirty_at(&self, offset: usize) -> bool;
44
45 fn slice_at(&self, offset: usize) -> <Self as WithBitmapSlice>::S;
48}
49
50impl WithBitmapSlice<'_> for () {
53 type S = Self;
54}
55
56impl BitmapSlice for () {}
57
58impl Bitmap for () {
59 fn mark_dirty(&self, _offset: usize, _len: usize) {}
60
61 fn dirty_at(&self, _offset: usize) -> bool {
62 false
63 }
64
65 fn slice_at(&self, _offset: usize) -> Self {}
66}
67
68impl<'a, B> WithBitmapSlice<'a> for Option<B>
70where
71 B: WithBitmapSlice<'a>,
72{
73 type S = Option<B::S>;
74}
75
76impl<B: BitmapSlice> BitmapSlice for Option<B> {}
77
78impl<B: Bitmap> Bitmap for Option<B> {
79 fn mark_dirty(&self, offset: usize, len: usize) {
80 if let Some(inner) = self {
81 inner.mark_dirty(offset, len)
82 }
83 }
84
85 fn dirty_at(&self, offset: usize) -> bool {
86 if let Some(inner) = self {
87 return inner.dirty_at(offset);
88 }
89 false
90 }
91
92 fn slice_at(&self, offset: usize) -> Option<<B as WithBitmapSlice>::S> {
93 if let Some(inner) = self {
94 return Some(inner.slice_at(offset));
95 }
96 None
97 }
98}
99
100pub type BS<'a, B> = <B as WithBitmapSlice<'a>>::S;
103
104pub type MS<'a, M> = BS<'a, <<M as GuestMemory>::R as GuestMemoryRegion>::B>;
107
108#[cfg(test)]
109pub(crate) mod tests {
110 use super::*;
111
112 use std::io::Cursor;
113 use std::marker::PhantomData;
114 use std::mem::size_of_val;
115 use std::result::Result;
116 use std::sync::atomic::Ordering;
117
118 use crate::{Bytes, VolatileMemory};
119 #[cfg(feature = "backend-mmap")]
120 use crate::{GuestAddress, MemoryRegionAddress};
121
122 pub fn range_is_clean<B: Bitmap>(b: &B, start: usize, len: usize) -> bool {
124 (start..start + len).all(|offset| !b.dirty_at(offset))
125 }
126
127 pub fn range_is_dirty<B: Bitmap>(b: &B, start: usize, len: usize) -> bool {
129 (start..start + len).all(|offset| b.dirty_at(offset))
130 }
131
132 pub fn check_range<B: Bitmap>(b: &B, start: usize, len: usize, clean: bool) -> bool {
133 if clean {
134 range_is_clean(b, start, len)
135 } else {
136 range_is_dirty(b, start, len)
137 }
138 }
139
140 pub fn test_bitmap<B: Bitmap>(b: &B) {
143 let len = 0x800;
144 let dirty_offset = 0x400;
145 let dirty_len = 0x100;
146
147 let s = b.slice_at(dirty_offset);
149
150 assert!(range_is_clean(b, 0, len));
151 assert!(range_is_clean(&s, 0, dirty_len));
152
153 b.mark_dirty(dirty_offset, dirty_len);
154 assert!(range_is_dirty(b, dirty_offset, dirty_len));
155 assert!(range_is_dirty(&s, 0, dirty_len));
156 }
157
158 #[derive(Debug)]
159 pub enum TestAccessError {
160 RangeCleanCheck,
161 RangeDirtyCheck,
162 }
163
164 struct BytesHelper<F, G, M> {
167 check_range_fn: F,
168 address_fn: G,
169 phantom: PhantomData<*const M>,
170 }
171
172 impl<F, G, M, A> BytesHelper<F, G, M>
183 where
184 F: Fn(&M, usize, usize, bool) -> bool,
185 G: Fn(usize) -> A,
186 M: Bytes<A>,
187 {
188 fn check_range(&self, m: &M, start: usize, len: usize, clean: bool) -> bool {
189 (self.check_range_fn)(m, start, len, clean)
190 }
191
192 fn address(&self, offset: usize) -> A {
193 (self.address_fn)(offset)
194 }
195
196 fn test_access<Op>(
197 &self,
198 bytes: &M,
199 dirty_offset: usize,
200 dirty_len: usize,
201 op: Op,
202 ) -> Result<(), TestAccessError>
203 where
204 Op: Fn(&M, A),
205 {
206 if !self.check_range(bytes, dirty_offset, dirty_len, true) {
207 return Err(TestAccessError::RangeCleanCheck);
208 }
209
210 op(bytes, self.address(dirty_offset));
211
212 if !self.check_range(bytes, dirty_offset, dirty_len, false) {
213 return Err(TestAccessError::RangeDirtyCheck);
214 }
215
216 Ok(())
217 }
218 }
219
220 pub fn test_bytes<F, G, M, A>(bytes: &M, check_range_fn: F, address_fn: G, step: usize)
226 where
227 F: Fn(&M, usize, usize, bool) -> bool,
228 G: Fn(usize) -> A,
229 A: Copy,
230 M: Bytes<A>,
231 <M as Bytes<A>>::E: Debug,
232 {
233 const BUF_SIZE: usize = 1024;
234 let buf = vec![1u8; 1024];
235
236 let val = 1u64;
237
238 let h = BytesHelper {
239 check_range_fn,
240 address_fn,
241 phantom: PhantomData,
242 };
243
244 let mut dirty_offset = 0x1000;
245
246 h.test_access(bytes, dirty_offset, BUF_SIZE, |m, addr| {
248 assert_eq!(m.write(buf.as_slice(), addr).unwrap(), BUF_SIZE)
249 })
250 .unwrap();
251 dirty_offset += step;
252
253 h.test_access(bytes, dirty_offset, BUF_SIZE, |m, addr| {
255 m.write_slice(buf.as_slice(), addr).unwrap()
256 })
257 .unwrap();
258 dirty_offset += step;
259
260 h.test_access(bytes, dirty_offset, size_of_val(&val), |m, addr| {
262 m.write_obj(val, addr).unwrap()
263 })
264 .unwrap();
265 dirty_offset += step;
266
267 #[allow(deprecated)] h.test_access(bytes, dirty_offset, BUF_SIZE, |m, addr| {
270 assert_eq!(
271 m.read_from(addr, &mut Cursor::new(&buf), BUF_SIZE).unwrap(),
272 BUF_SIZE
273 )
274 })
275 .unwrap();
276 dirty_offset += step;
277
278 #[allow(deprecated)] h.test_access(bytes, dirty_offset, BUF_SIZE, |m, addr| {
281 m.read_exact_from(addr, &mut Cursor::new(&buf), BUF_SIZE)
282 .unwrap()
283 })
284 .unwrap();
285 dirty_offset += step;
286
287 h.test_access(bytes, dirty_offset, size_of_val(&val), |m, addr| {
289 m.store(val, addr, Ordering::Relaxed).unwrap()
290 })
291 .unwrap();
292 }
293
294 #[cfg(feature = "backend-mmap")]
299 fn test_guest_memory_region<R: GuestMemoryRegion>(region: &R) {
300 let dirty_addr = MemoryRegionAddress(0x0);
301 let val = 123u64;
302 let dirty_len = size_of_val(&val);
303
304 let slice = region.get_slice(dirty_addr, dirty_len).unwrap();
305
306 assert!(range_is_clean(region.bitmap(), 0, region.len() as usize));
307 assert!(range_is_clean(slice.bitmap(), 0, dirty_len));
308
309 region.write_obj(val, dirty_addr).unwrap();
310
311 assert!(range_is_dirty(
312 region.bitmap(),
313 dirty_addr.0 as usize,
314 dirty_len
315 ));
316
317 assert!(range_is_dirty(slice.bitmap(), 0, dirty_len));
318
319 test_bytes(
323 region,
324 |r: &R, start: usize, len: usize, clean: bool| {
325 check_range(r.bitmap(), start, len, clean)
326 },
327 |offset| MemoryRegionAddress(offset as u64),
328 0x1000,
329 );
330 }
331
332 #[cfg(feature = "backend-mmap")]
333 pub fn test_guest_memory_and_region<M, F>(f: F)
335 where
336 M: GuestMemory,
337 F: Fn() -> M,
338 {
339 let m = f();
340 let dirty_addr = GuestAddress(0x1000);
341 let val = 123u64;
342 let dirty_len = size_of_val(&val);
343
344 let (region, region_addr) = m.to_region_addr(dirty_addr).unwrap();
345 let slice = m.get_slice(dirty_addr, dirty_len).unwrap();
346
347 assert!(range_is_clean(region.bitmap(), 0, region.len() as usize));
348 assert!(range_is_clean(slice.bitmap(), 0, dirty_len));
349
350 m.write_obj(val, dirty_addr).unwrap();
351
352 assert!(range_is_dirty(
353 region.bitmap(),
354 region_addr.0 as usize,
355 dirty_len
356 ));
357
358 assert!(range_is_dirty(slice.bitmap(), 0, dirty_len));
359
360 test_guest_memory_region(f().find_region(GuestAddress(0)).unwrap());
362
363 let check_range_closure = |m: &M, start: usize, len: usize, clean: bool| -> bool {
365 let mut check_result = true;
366 m.try_access(len, GuestAddress(start as u64), |_, size, reg_addr, reg| {
367 if !check_range(reg.bitmap(), reg_addr.0 as usize, size, clean) {
368 check_result = false;
369 }
370 Ok(size)
371 })
372 .unwrap();
373
374 check_result
375 };
376
377 test_bytes(
378 &f(),
379 check_range_closure,
380 |offset| GuestAddress(offset as u64),
381 0x1000,
382 );
383 }
384
385 pub fn test_volatile_memory<M: VolatileMemory>(m: &M) {
386 assert!(m.len() >= 0x8000);
387
388 let dirty_offset = 0x1000;
389 let val = 123u64;
390 let dirty_len = size_of_val(&val);
391
392 let get_ref_offset = 0x2000;
393 let array_ref_offset = 0x3000;
394
395 let s1 = m.as_volatile_slice();
396 let s2 = m.get_slice(dirty_offset, dirty_len).unwrap();
397
398 assert!(range_is_clean(s1.bitmap(), 0, s1.len()));
399 assert!(range_is_clean(s2.bitmap(), 0, s2.len()));
400
401 s1.write_obj(val, dirty_offset).unwrap();
402
403 assert!(range_is_dirty(s1.bitmap(), dirty_offset, dirty_len));
404 assert!(range_is_dirty(s2.bitmap(), 0, dirty_len));
405
406 let v_ref = m.get_ref::<u64>(get_ref_offset).unwrap();
407 assert!(range_is_clean(s1.bitmap(), get_ref_offset, dirty_len));
408 v_ref.store(val);
409 assert!(range_is_dirty(s1.bitmap(), get_ref_offset, dirty_len));
410
411 let arr_ref = m.get_array_ref::<u64>(array_ref_offset, 1).unwrap();
412 assert!(range_is_clean(s1.bitmap(), array_ref_offset, dirty_len));
413 arr_ref.store(0, val);
414 assert!(range_is_dirty(s1.bitmap(), array_ref_offset, dirty_len));
415 }
416}