#[cfg(feature = "backend-bitmap")]
mod backend;
use std::fmt::Debug;
use crate::{GuestMemoryBackend, GuestMemoryRegion};
#[cfg(feature = "backend-bitmap")]
pub use backend::{ArcSlice, AtomicBitmap, RefSlice};
pub trait WithBitmapSlice<'a> {
type S: BitmapSlice;
}
pub trait BitmapSlice: Bitmap + Clone + Debug + for<'a> WithBitmapSlice<'a, S = Self> {}
pub trait Bitmap: for<'a> WithBitmapSlice<'a> {
fn mark_dirty(&self, offset: usize, len: usize);
fn dirty_at(&self, offset: usize) -> bool;
fn slice_at(&self, offset: usize) -> <Self as WithBitmapSlice<'_>>::S;
}
pub trait NewBitmap: Bitmap + Default {
fn with_len(len: usize) -> Self;
}
impl WithBitmapSlice<'_> for () {
type S = Self;
}
impl BitmapSlice for () {}
impl Bitmap for () {
fn mark_dirty(&self, _offset: usize, _len: usize) {}
fn dirty_at(&self, _offset: usize) -> bool {
false
}
fn slice_at(&self, _offset: usize) -> Self {}
}
impl NewBitmap for () {
fn with_len(_len: usize) -> Self {}
}
impl<'a, B> WithBitmapSlice<'a> for Option<B>
where
B: WithBitmapSlice<'a>,
{
type S = Option<B::S>;
}
impl<B: BitmapSlice> BitmapSlice for Option<B> {}
impl<B: Bitmap> Bitmap for Option<B> {
fn mark_dirty(&self, offset: usize, len: usize) {
if let Some(inner) = self {
inner.mark_dirty(offset, len)
}
}
fn dirty_at(&self, offset: usize) -> bool {
if let Some(inner) = self {
return inner.dirty_at(offset);
}
false
}
fn slice_at(&self, offset: usize) -> Option<<B as WithBitmapSlice<'_>>::S> {
if let Some(inner) = self {
return Some(inner.slice_at(offset));
}
None
}
}
pub type BS<'a, B> = <B as WithBitmapSlice<'a>>::S;
pub type MS<'a, M> = BS<'a, <<M as GuestMemoryBackend>::R as GuestMemoryRegion>::B>;
#[cfg(test)]
#[cfg(feature = "backend-bitmap")]
pub(crate) mod tests {
use super::*;
use std::mem::size_of_val;
use std::sync::atomic::Ordering;
use crate::{Bytes, VolatileMemory};
#[cfg(feature = "backend-mmap")]
use crate::{GuestAddress, MemoryRegionAddress};
pub fn range_is_clean<B: Bitmap>(b: &B, start: usize, len: usize) -> bool {
(start..start + len).all(|offset| !b.dirty_at(offset))
}
pub fn range_is_dirty<B: Bitmap>(b: &B, start: usize, len: usize) -> bool {
(start..start + len).all(|offset| b.dirty_at(offset))
}
pub fn check_range<B: Bitmap>(b: &B, start: usize, len: usize, clean: bool) -> bool {
if clean {
range_is_clean(b, start, len)
} else {
range_is_dirty(b, start, len)
}
}
pub fn test_bitmap<B: Bitmap>(b: &B) {
let len = 0x800;
let dirty_offset = 0x400;
let dirty_len = 0x100;
let s = b.slice_at(dirty_offset);
assert!(range_is_clean(b, 0, len));
assert!(range_is_clean(&s, 0, dirty_len));
b.mark_dirty(dirty_offset, dirty_len);
assert!(range_is_dirty(b, dirty_offset, dirty_len));
assert!(range_is_dirty(&s, 0, dirty_len));
}
pub fn test_bytes<F, G, M, A>(bytes: &M, check_range_fn: F, address_fn: G, step: usize)
where
F: Fn(&M, usize, usize, bool) -> bool,
G: Fn(usize) -> A,
M: Bytes<A>,
<M as Bytes<A>>::E: Debug,
{
const BUF_SIZE: usize = 1024;
let buf = vec![1u8; 1024];
let mut dirty_offset = 0x1000;
let val = 1u64;
assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, true));
assert_eq!(
bytes
.write(buf.as_slice(), address_fn(dirty_offset))
.unwrap(),
BUF_SIZE
);
assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, false));
dirty_offset += step;
assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, true));
bytes
.write_slice(buf.as_slice(), address_fn(dirty_offset))
.unwrap();
assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, false));
dirty_offset += step;
assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, true));
bytes.write_obj(val, address_fn(dirty_offset)).unwrap();
assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, false));
dirty_offset += step;
assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, true));
bytes
.store(val, address_fn(dirty_offset), Ordering::Relaxed)
.unwrap();
assert!(check_range_fn(bytes, dirty_offset, BUF_SIZE, false));
}
#[cfg(feature = "backend-mmap")]
fn test_guest_memory_region<R: GuestMemoryRegion>(region: &R) {
let dirty_addr = MemoryRegionAddress(0x0);
let val = 123u64;
let dirty_len = size_of_val(&val);
let slice = region.get_slice(dirty_addr, dirty_len).unwrap();
assert!(range_is_clean(®ion.bitmap(), 0, region.len() as usize));
assert!(range_is_clean(slice.bitmap(), 0, dirty_len));
region.write_obj(val, dirty_addr).unwrap();
assert!(range_is_dirty(
®ion.bitmap(),
dirty_addr.0 as usize,
dirty_len
));
assert!(range_is_dirty(slice.bitmap(), 0, dirty_len));
test_bytes(
region,
|r: &R, start: usize, len: usize, clean: bool| {
check_range(&r.bitmap(), start, len, clean)
},
|offset| MemoryRegionAddress(offset as u64),
0x1000,
);
}
#[cfg(feature = "backend-mmap")]
pub fn test_guest_memory_and_region<M, F>(f: F)
where
M: GuestMemoryBackend,
F: Fn() -> M,
{
let m = f();
let dirty_addr = GuestAddress(0x1000);
let val = 123u64;
let dirty_len = size_of_val(&val);
let (region, region_addr) = m.to_region_addr(dirty_addr).unwrap();
let mut slices = m.get_slices(dirty_addr, dirty_len);
let slice = slices.next().unwrap().unwrap();
assert!(slices.next().is_none());
assert!(range_is_clean(®ion.bitmap(), 0, region.len() as usize));
assert!(range_is_clean(slice.bitmap(), 0, dirty_len));
m.write_obj(val, dirty_addr).unwrap();
assert!(range_is_dirty(
®ion.bitmap(),
region_addr.0 as usize,
dirty_len
));
assert!(range_is_dirty(slice.bitmap(), 0, dirty_len));
test_guest_memory_region(f().find_region(GuestAddress(0)).unwrap());
let check_range_closure = |m: &M, start: usize, len: usize, clean: bool| -> bool {
m.get_slices(GuestAddress(start as u64), len).all(|r| {
let slice = r.unwrap();
check_range(slice.bitmap(), 0, slice.len(), clean)
})
};
test_bytes(
&f(),
check_range_closure,
|offset| GuestAddress(offset as u64),
0x1000,
);
}
pub fn test_volatile_memory<M: VolatileMemory>(m: &M) {
assert!(m.len() >= 0x8000);
let dirty_offset = 0x1000;
let val = 123u64;
let dirty_len = size_of_val(&val);
let get_ref_offset = 0x2000;
let array_ref_offset = 0x3000;
let s1 = m.as_volatile_slice();
let s2 = m.get_slice(dirty_offset, dirty_len).unwrap();
assert!(range_is_clean(s1.bitmap(), 0, s1.len()));
assert!(range_is_clean(s2.bitmap(), 0, s2.len()));
s1.write_obj(val, dirty_offset).unwrap();
assert!(range_is_dirty(s1.bitmap(), dirty_offset, dirty_len));
assert!(range_is_dirty(s2.bitmap(), 0, dirty_len));
let v_ref = m.get_ref::<u64>(get_ref_offset).unwrap();
assert!(range_is_clean(s1.bitmap(), get_ref_offset, dirty_len));
v_ref.store(val);
assert!(range_is_dirty(s1.bitmap(), get_ref_offset, dirty_len));
let arr_ref = m.get_array_ref::<u64>(array_ref_offset, 1).unwrap();
assert!(range_is_clean(s1.bitmap(), array_ref_offset, dirty_len));
arr_ref.store(0, val);
assert!(range_is_dirty(s1.bitmap(), array_ref_offset, dirty_len));
}
}