use proptest::prelude::*;
use std::cell::UnsafeCell;
use std::marker;
use wiggle::GuestMemory;
#[derive(Debug, Clone)]
pub struct MemAreas(Vec<MemArea>);
impl MemAreas {
pub fn new() -> Self {
MemAreas(Vec::new())
}
pub fn insert(&mut self, a: MemArea) {
match self.0.binary_search(&a) {
Ok(loc) => self.0.insert(loc, a),
Err(loc) => self.0.insert(loc, a),
}
}
pub fn iter(&self) -> impl Iterator<Item = &MemArea> {
self.0.iter()
}
}
impl<R> From<R> for MemAreas
where
R: AsRef<[MemArea]>,
{
fn from(ms: R) -> MemAreas {
let mut out = MemAreas::new();
for m in ms.as_ref().into_iter() {
out.insert(*m);
}
out
}
}
impl Into<Vec<MemArea>> for MemAreas {
fn into(self) -> Vec<MemArea> {
self.0.clone()
}
}
#[repr(align(4096))]
pub struct HostMemory {
buffer: UnsafeCell<[u8; 4096]>,
}
impl HostMemory {
pub fn new() -> Self {
HostMemory {
buffer: UnsafeCell::new([0; 4096]),
}
}
pub fn mem_area_strat(align: u32) -> BoxedStrategy<MemArea> {
prop::num::u32::ANY
.prop_filter_map("needs to fit in memory", move |p| {
let p_aligned = p - (p % align);
let ptr = p_aligned % 4096;
if ptr + align < 4096 {
Some(MemArea { ptr, len: align })
} else {
None
}
})
.boxed()
}
pub fn invert(regions: &MemAreas) -> MemAreas {
let mut out = MemAreas::new();
let mut start = 0;
for r in regions.iter() {
let len = r.ptr - start;
if len > 0 {
out.insert(MemArea {
ptr: start,
len: r.ptr - start,
});
}
start = r.ptr + r.len;
}
if start < 4096 {
out.insert(MemArea {
ptr: start,
len: 4096 - start,
});
}
out
}
pub fn byte_slice_strat(size: u32, exclude: &MemAreas) -> BoxedStrategy<MemArea> {
let available: Vec<MemArea> = Self::invert(exclude)
.iter()
.flat_map(|a| a.inside(size))
.collect();
Just(available)
.prop_filter("available memory for allocation", |a| !a.is_empty())
.prop_flat_map(|a| prop::sample::select(a))
.boxed()
}
}
unsafe impl GuestMemory for HostMemory {
fn base(&self) -> (*mut u8, u32) {
unsafe {
let ptr = self.buffer.get();
((*ptr).as_mut_ptr(), (*ptr).len() as u32)
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct MemArea {
pub ptr: u32,
pub len: u32,
}
impl MemArea {
pub fn overlapping(&self, b: Self) -> bool {
let a_range = std::ops::Range {
start: self.ptr,
end: self.ptr + self.len,
};
let b_range = std::ops::Range {
start: b.ptr,
end: b.ptr + b.len,
};
for b_elem in b_range.clone() {
if a_range.contains(&b_elem) {
return true;
}
}
for a_elem in a_range {
if b_range.contains(&a_elem) {
return true;
}
}
return false;
}
pub fn non_overlapping_set<M>(areas: M) -> bool
where
M: Into<MemAreas>,
{
let areas = areas.into();
for (aix, a) in areas.iter().enumerate() {
for (bix, b) in areas.iter().enumerate() {
if aix != bix {
if a.overlapping(*b) {
return false;
}
}
}
}
return true;
}
fn inside(&self, len: u32) -> impl Iterator<Item = MemArea> {
let end: i64 = self.len as i64 - len as i64;
let start = self.ptr;
(0..end).into_iter().map(move |v| MemArea {
ptr: start + v as u32,
len,
})
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn hostmemory_is_aligned() {
let h = HostMemory::new();
assert_eq!(h.base().0 as usize % 4096, 0);
let h = Box::new(h);
assert_eq!(h.base().0 as usize % 4096, 0);
}
#[test]
fn invert() {
fn invert_equality(input: &[MemArea], expected: &[MemArea]) {
let input: MemAreas = input.into();
let inverted: Vec<MemArea> = HostMemory::invert(&input).into();
assert_eq!(expected, inverted.as_slice());
}
invert_equality(&[], &[MemArea { ptr: 0, len: 4096 }]);
invert_equality(
&[MemArea { ptr: 0, len: 1 }],
&[MemArea { ptr: 1, len: 4095 }],
);
invert_equality(
&[MemArea { ptr: 1, len: 1 }],
&[MemArea { ptr: 0, len: 1 }, MemArea { ptr: 2, len: 4094 }],
);
invert_equality(
&[MemArea { ptr: 1, len: 4095 }],
&[MemArea { ptr: 0, len: 1 }],
);
invert_equality(
&[MemArea { ptr: 0, len: 1 }, MemArea { ptr: 1, len: 4095 }],
&[],
);
invert_equality(
&[MemArea { ptr: 1, len: 2 }, MemArea { ptr: 4, len: 1 }],
&[
MemArea { ptr: 0, len: 1 },
MemArea { ptr: 3, len: 1 },
MemArea { ptr: 5, len: 4091 },
],
);
}
fn set_of_slices_strat(
s1: u32,
s2: u32,
s3: u32,
) -> BoxedStrategy<(MemArea, MemArea, MemArea)> {
HostMemory::byte_slice_strat(s1, &MemAreas::new())
.prop_flat_map(move |a1| {
(
Just(a1),
HostMemory::byte_slice_strat(s2, &MemAreas::from(&[a1])),
)
})
.prop_flat_map(move |(a1, a2)| {
(
Just(a1),
Just(a2),
HostMemory::byte_slice_strat(s3, &MemAreas::from(&[a1, a2])),
)
})
.boxed()
}
#[test]
fn trivial_inside() {
let a = MemArea { ptr: 24, len: 4072 };
let interior = a.inside(24).collect::<Vec<_>>();
assert!(interior.len() > 0);
}
proptest! {
#[test]
fn inside(r in HostMemory::mem_area_strat(123)) {
let set_of_r = MemAreas::from(&[r]);
let exterior = HostMemory::invert(&set_of_r);
let interior = r.inside(22);
for i in interior {
assert!(r.overlapping(i));
assert!(i.ptr >= r.ptr);
assert!(r.ptr + r.len >= i.ptr + i.len);
let mut all = exterior.clone();
all.insert(i);
assert!(MemArea::non_overlapping_set(all));
}
}
#[test]
fn byte_slices((s1, s2, s3) in set_of_slices_strat(12, 34, 56)) {
let all = MemAreas::from(&[s1, s2, s3]);
assert!(MemArea::non_overlapping_set(all));
}
}
}
use std::cell::RefCell;
use wiggle::GuestError;
pub struct WasiCtx<'a> {
pub guest_errors: RefCell<Vec<GuestError>>,
lifetime: marker::PhantomData<&'a ()>,
}
impl<'a> WasiCtx<'a> {
pub fn new() -> Self {
Self {
guest_errors: RefCell::new(vec![]),
lifetime: marker::PhantomData,
}
}
}
#[macro_export]
macro_rules! impl_errno {
( $errno:ty ) => {
impl<'a> wiggle::GuestErrorType<'a> for $errno {
type Context = WasiCtx<'a>;
fn success() -> $errno {
<$errno>::Ok
}
fn from_error(e: GuestError, ctx: &WasiCtx) -> $errno {
eprintln!("GUEST ERROR: {:?}", e);
ctx.guest_errors.borrow_mut().push(e);
types::Errno::InvalidArg
}
}
};
}