cyclone_v/
memory.rs

1pub mod dev_mem;
2
3#[cfg(feature = "std")]
4pub use dev_mem::*;
5use std::ops::{Bound, RangeBounds};
6
7pub mod buffer;
8#[cfg(feature = "std")]
9pub use buffer::*;
10
11fn clamp_range(range: impl RangeBounds<usize>, max: usize) -> (usize, usize) {
12    let start = match range.start_bound() {
13        Bound::Included(start) => *start,
14        Bound::Excluded(start) => *start + 1,
15        Bound::Unbounded => 0,
16    }
17    .clamp(0, max);
18    let end = match range.end_bound() {
19        Bound::Included(end) => *end + 1,
20        Bound::Excluded(end) => *end,
21        Bound::Unbounded => max,
22    }
23    .min(max);
24
25    (start, end - start)
26}
27
28/// Maps a memory region to a pointer.
29pub trait MemoryMapper {
30    /// Create a Mapper that maps the given physical address and size.
31    fn create(address: usize, size: usize) -> Result<Self, &'static str>
32    where
33        Self: Sized;
34
35    /// Returns the maximum length that can be addressed. The end of this
36    /// memory mapped range is `address + len()`.
37    fn len(&self) -> usize;
38
39    /// Returns true if the mapping is empty.
40    fn is_empty(&self) -> bool {
41        self.len() == 0
42    }
43
44    /// Returns a pointer to the mapped memory region.
45    fn as_ptr<T>(&self) -> *const T;
46
47    /// Returns a mutable pointer to the mapped memory region.
48    fn as_mut_ptr<T>(&mut self) -> *mut T;
49
50    /// Creates an inner range of bytes. The offsets are relative to the base
51    /// of the mapped memory, e.g. `as_range(0..4)` will return the first 4
52    /// bytes of the mapped memory (a memory mapping to address 0x12340000 will
53    /// map 0x12340000..0x12340004).
54    fn as_range(&self, range: impl RangeBounds<usize>) -> &[u8] {
55        let (start, len) = clamp_range(range, self.len());
56        unsafe { std::slice::from_raw_parts(self.as_ptr::<u8>().add(start), len) }
57    }
58
59    /// Creates an inner mutable range of bytes.
60    fn as_mut_range(&mut self, range: impl RangeBounds<usize>) -> &mut [u8] {
61        let (start, len) = clamp_range(range, self.len());
62        unsafe { std::slice::from_raw_parts_mut(self.as_mut_ptr::<u8>().add(start), len) }
63    }
64}
65
66pub struct RegionMemoryMapper<'a> {
67    region: &'a mut [u8],
68}
69
70impl<'a> RegionMemoryMapper<'a> {
71    pub fn new(region: &'a mut [u8]) -> Self {
72        Self { region }
73    }
74}
75
76impl<'a> MemoryMapper for RegionMemoryMapper<'a> {
77    fn create(_address: usize, _size: usize) -> Result<Self, &'static str> {
78        unimplemented!()
79    }
80
81    fn len(&self) -> usize {
82        self.region.len()
83    }
84
85    fn as_ptr<T>(&self) -> *const T {
86        self.region.as_ptr() as *const T
87    }
88
89    fn as_mut_ptr<T>(&mut self) -> *mut T {
90        self.region.as_mut_ptr() as *mut T
91    }
92}
93
94#[test]
95fn range_works() {
96    let data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
97    let mut data2 = data;
98    let mut mapper = RegionMemoryMapper::new(&mut data2);
99    assert_eq!(mapper.as_range(..), &data);
100    assert_eq!(mapper.as_range(0..99), &data);
101    assert_eq!(mapper.as_range(5..8), &data[5..8]);
102
103    mapper.as_mut_range(5..8).copy_from_slice(&[0, 0, 0]);
104    assert_eq!(mapper.as_range(..), &[0, 1, 2, 3, 4, 0, 0, 0, 8, 9]);
105}