use std::cmp::min;
use std::marker::PhantomData;
use std::mem::size_of;
use std::ptr::copy;
use std::ptr::read_volatile;
use std::ptr::write_bytes;
use std::ptr::write_volatile;
use std::result;
use std::slice;
use std::usize;
use remain::sorted;
use thiserror::Error;
use crate::sys::IoBufMut;
use crate::DataInit;
#[sorted]
#[derive(Error, Eq, PartialEq, Debug)]
pub enum VolatileMemoryError {
#[error("address 0x{addr:x} is out of bounds")]
OutOfBounds { addr: usize },
#[error("address 0x{base:x} offset by 0x{offset:x} would overflow")]
Overflow { base: usize, offset: usize },
}
pub type VolatileMemoryResult<T> = result::Result<T, VolatileMemoryError>;
use crate::VolatileMemoryError as Error;
type Result<T> = VolatileMemoryResult<T>;
pub fn calc_offset(base: usize, offset: usize) -> Result<usize> {
match base.checked_add(offset) {
None => Err(Error::Overflow { base, offset }),
Some(m) => Ok(m),
}
}
pub trait VolatileMemory {
fn get_slice(&self, offset: usize, count: usize) -> Result<VolatileSlice>;
fn get_ref<T: DataInit>(&self, offset: usize) -> Result<VolatileRef<T>> {
let slice = self.get_slice(offset, size_of::<T>())?;
Ok(VolatileRef {
addr: slice.as_mut_ptr() as *mut T,
phantom: PhantomData,
})
}
}
#[derive(Copy, Clone, Debug)]
#[repr(transparent)]
pub struct VolatileSlice<'a>(IoBufMut<'a>);
impl<'a> VolatileSlice<'a> {
pub fn new(buf: &mut [u8]) -> VolatileSlice {
VolatileSlice(IoBufMut::new(buf))
}
pub unsafe fn from_raw_parts(addr: *mut u8, len: usize) -> VolatileSlice<'a> {
VolatileSlice(IoBufMut::from_raw_parts(addr, len))
}
pub fn as_ptr(&self) -> *const u8 {
self.0.as_ptr()
}
pub fn as_mut_ptr(&self) -> *mut u8 {
self.0.as_mut_ptr()
}
pub fn size(&self) -> usize {
self.0.len()
}
pub fn advance(&mut self, count: usize) {
self.0.advance(count)
}
pub fn truncate(&mut self, len: usize) {
self.0.truncate(len)
}
pub fn as_iobuf(&self) -> &IoBufMut {
&self.0
}
#[allow(clippy::wrong_self_convention)]
pub fn as_iobufs<'mem, 'slice>(
iovs: &'slice [VolatileSlice<'mem>],
) -> &'slice [IoBufMut<'mem>] {
unsafe { slice::from_raw_parts(iovs.as_ptr() as *const IoBufMut, iovs.len()) }
}
pub fn offset(self, count: usize) -> Result<VolatileSlice<'a>> {
let new_addr = (self.as_mut_ptr() as usize).checked_add(count).ok_or(
VolatileMemoryError::Overflow {
base: self.as_mut_ptr() as usize,
offset: count,
},
)?;
let new_size = self
.size()
.checked_sub(count)
.ok_or(VolatileMemoryError::OutOfBounds { addr: new_addr })?;
unsafe { Ok(VolatileSlice::from_raw_parts(new_addr as *mut u8, new_size)) }
}
pub fn sub_slice(self, offset: usize, count: usize) -> Result<VolatileSlice<'a>> {
let mem_end = calc_offset(offset, count)?;
if mem_end > self.size() {
return Err(Error::OutOfBounds { addr: mem_end });
}
let new_addr = (self.as_mut_ptr() as usize).checked_add(offset).ok_or(
VolatileMemoryError::Overflow {
base: self.as_mut_ptr() as usize,
offset,
},
)?;
Ok(unsafe { VolatileSlice::from_raw_parts(new_addr as *mut u8, count) })
}
pub fn write_bytes(&self, value: u8) {
unsafe {
write_bytes(self.as_mut_ptr(), value, self.size());
}
}
pub fn copy_to<T>(&self, buf: &mut [T])
where
T: DataInit,
{
let mut addr = self.as_mut_ptr() as *const u8;
for v in buf.iter_mut().take(self.size() / size_of::<T>()) {
unsafe {
*v = read_volatile(addr as *const T);
addr = addr.add(size_of::<T>());
}
}
}
pub fn copy_to_volatile_slice(&self, slice: VolatileSlice) {
unsafe {
copy(
self.as_mut_ptr() as *const u8,
slice.as_mut_ptr(),
min(self.size(), slice.size()),
);
}
}
pub fn copy_from<T>(&self, buf: &[T])
where
T: DataInit,
{
let mut addr = self.as_mut_ptr();
for &v in buf.iter().take(self.size() / size_of::<T>()) {
unsafe {
write_volatile(addr as *mut T, v);
addr = addr.add(size_of::<T>());
}
}
}
}
impl<'a> VolatileMemory for VolatileSlice<'a> {
fn get_slice(&self, offset: usize, count: usize) -> Result<VolatileSlice> {
self.sub_slice(offset, count)
}
}
#[derive(Debug)]
pub struct VolatileRef<'a, T: DataInit>
where
T: 'a,
{
addr: *mut T,
phantom: PhantomData<&'a T>,
}
impl<'a, T: DataInit> VolatileRef<'a, T> {
pub unsafe fn new(addr: *mut T) -> VolatileRef<'a, T> {
VolatileRef {
addr,
phantom: PhantomData,
}
}
pub fn as_mut_ptr(&self) -> *mut T {
self.addr
}
pub fn size(&self) -> usize {
size_of::<T>()
}
#[inline(always)]
pub fn store(&self, v: T) {
unsafe { write_volatile(self.addr, v) };
}
#[inline(always)]
pub fn load(&self) -> T {
unsafe { read_volatile(self.addr) }
}
pub fn to_slice(&self) -> VolatileSlice<'a> {
unsafe { VolatileSlice::from_raw_parts(self.as_mut_ptr() as *mut u8, self.size()) }
}
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use std::sync::Barrier;
use std::thread::spawn;
use super::*;
#[derive(Clone)]
struct VecMem {
mem: Arc<Vec<u8>>,
}
impl VecMem {
fn new(size: usize) -> VecMem {
let mut mem = Vec::new();
mem.resize(size, 0);
VecMem { mem: Arc::new(mem) }
}
}
impl VolatileMemory for VecMem {
fn get_slice(&self, offset: usize, count: usize) -> Result<VolatileSlice> {
let mem_end = calc_offset(offset, count)?;
if mem_end > self.mem.len() {
return Err(Error::OutOfBounds { addr: mem_end });
}
let new_addr = (self.mem.as_ptr() as usize).checked_add(offset).ok_or(
VolatileMemoryError::Overflow {
base: self.mem.as_ptr() as usize,
offset,
},
)?;
Ok(unsafe { VolatileSlice::from_raw_parts(new_addr as *mut u8, count) })
}
}
#[test]
fn ref_store() {
let mut a = [0u8; 1];
let a_ref = VolatileSlice::new(&mut a[..]);
let v_ref = a_ref.get_ref(0).unwrap();
v_ref.store(2u8);
assert_eq!(a[0], 2);
}
#[test]
fn ref_load() {
let mut a = [5u8; 1];
{
let a_ref = VolatileSlice::new(&mut a[..]);
let c = {
let v_ref = a_ref.get_ref::<u8>(0).unwrap();
assert_eq!(v_ref.load(), 5u8);
v_ref
};
c.load();
} ;
}
#[test]
fn ref_to_slice() {
let mut a = [1u8; 5];
let a_ref = VolatileSlice::new(&mut a[..]);
let v_ref = a_ref.get_ref(1).unwrap();
v_ref.store(0x12345678u32);
let ref_slice = v_ref.to_slice();
assert_eq!(v_ref.as_mut_ptr() as usize, ref_slice.as_mut_ptr() as usize);
assert_eq!(v_ref.size(), ref_slice.size());
}
#[test]
fn observe_mutate() {
let a = VecMem::new(1);
let a_clone = a.clone();
let v_ref = a.get_ref::<u8>(0).unwrap();
v_ref.store(99);
let start_barrier = Arc::new(Barrier::new(2));
let thread_start_barrier = start_barrier.clone();
let end_barrier = Arc::new(Barrier::new(2));
let thread_end_barrier = end_barrier.clone();
spawn(move || {
thread_start_barrier.wait();
let clone_v_ref = a_clone.get_ref::<u8>(0).unwrap();
clone_v_ref.store(0);
thread_end_barrier.wait();
});
assert_eq!(v_ref.load(), 99);
start_barrier.wait();
end_barrier.wait();
assert_eq!(v_ref.load(), 0);
}
#[test]
fn slice_size() {
let a = VecMem::new(100);
let s = a.get_slice(0, 27).unwrap();
assert_eq!(s.size(), 27);
let s = a.get_slice(34, 27).unwrap();
assert_eq!(s.size(), 27);
let s = s.get_slice(20, 5).unwrap();
assert_eq!(s.size(), 5);
}
#[test]
fn slice_overflow_error() {
use std::usize::MAX;
let a = VecMem::new(1);
let res = a.get_slice(MAX, 1).unwrap_err();
assert_eq!(
res,
Error::Overflow {
base: MAX,
offset: 1,
}
);
}
#[test]
fn slice_oob_error() {
let a = VecMem::new(100);
a.get_slice(50, 50).unwrap();
let res = a.get_slice(55, 50).unwrap_err();
assert_eq!(res, Error::OutOfBounds { addr: 105 });
}
#[test]
fn ref_overflow_error() {
use std::usize::MAX;
let a = VecMem::new(1);
let res = a.get_ref::<u8>(MAX).unwrap_err();
assert_eq!(
res,
Error::Overflow {
base: MAX,
offset: 1,
}
);
}
#[test]
fn ref_oob_error() {
let a = VecMem::new(100);
a.get_ref::<u8>(99).unwrap();
let res = a.get_ref::<u16>(99).unwrap_err();
assert_eq!(res, Error::OutOfBounds { addr: 101 });
}
#[test]
fn ref_oob_too_large() {
let a = VecMem::new(3);
let res = a.get_ref::<u32>(0).unwrap_err();
assert_eq!(res, Error::OutOfBounds { addr: 4 });
}
}