1use super::super::{Error, Register, RISCV_MAX_MEMORY, RISCV_PAGESIZE};
2use super::{fill_page_data, get_page_indices, memset, set_dirty, Memory};
3
4use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
5use bytes::Bytes;
6use std::io::{Cursor, Seek, SeekFrom};
7use std::marker::PhantomData;
8use std::ops::{Deref, DerefMut};
9
10pub struct FlatMemory<R> {
11 data: Vec<u8>,
12 flags: Vec<u8>,
13 memory_size: usize,
14 riscv_pages: usize,
15 load_reservation_address: R,
16 _inner: PhantomData<R>,
17}
18
19impl<R> Deref for FlatMemory<R> {
20 type Target = Vec<u8>;
21
22 fn deref(&self) -> &Self::Target {
23 &self.data
24 }
25}
26
27impl<R> DerefMut for FlatMemory<R> {
28 fn deref_mut(&mut self) -> &mut Vec<u8> {
29 &mut self.data
30 }
31}
32
33impl<R: Register> Memory for FlatMemory<R> {
36 type REG = R;
37
38 fn new() -> Self {
39 Self::new_with_memory(RISCV_MAX_MEMORY)
40 }
41
42 fn new_with_memory(memory_size: usize) -> Self {
43 assert!(memory_size <= RISCV_MAX_MEMORY);
44 assert!(memory_size % RISCV_PAGESIZE == 0);
45 Self {
46 data: vec![0; memory_size as usize],
47 flags: vec![0; memory_size / RISCV_PAGESIZE],
48 memory_size,
49 riscv_pages: memory_size / RISCV_PAGESIZE,
50 load_reservation_address: R::from_u64(u64::MAX),
51 _inner: PhantomData,
52 }
53 }
54
55 fn init_pages(
56 &mut self,
57 addr: u64,
58 size: u64,
59 _flags: u8,
60 source: Option<Bytes>,
61 offset_from_addr: u64,
62 ) -> Result<(), Error> {
63 fill_page_data(self, addr, size, source, offset_from_addr)
64 }
65
66 fn fetch_flag(&mut self, page: u64) -> Result<u8, Error> {
67 if page < self.riscv_pages as u64 {
68 Ok(self.flags[page as usize])
69 } else {
70 Err(Error::MemOutOfBound)
71 }
72 }
73
74 fn set_flag(&mut self, page: u64, flag: u8) -> Result<(), Error> {
75 if page < self.riscv_pages as u64 {
76 self.flags[page as usize] |= flag;
77 Ok(())
78 } else {
79 Err(Error::MemOutOfBound)
80 }
81 }
82
83 fn clear_flag(&mut self, page: u64, flag: u8) -> Result<(), Error> {
84 if page < self.riscv_pages as u64 {
85 self.flags[page as usize] &= !flag;
86 Ok(())
87 } else {
88 Err(Error::MemOutOfBound)
89 }
90 }
91
92 fn memory_size(&self) -> usize {
93 self.memory_size
94 }
95
96 fn execute_load16(&mut self, addr: u64) -> Result<u16, Error> {
97 self.load16(&Self::REG::from_u64(addr)).map(|v| v.to_u16())
98 }
99
100 fn execute_load32(&mut self, addr: u64) -> Result<u32, Error> {
101 self.load32(&R::from_u64(addr)).map(|v| v.to_u32())
102 }
103
104 fn load8(&mut self, addr: &Self::REG) -> Result<Self::REG, Error> {
105 let addr = addr.to_u64();
106 if addr.checked_add(1).ok_or(Error::MemOutOfBound)? > self.len() as u64 {
107 return Err(Error::MemOutOfBound);
108 }
109 let mut reader = Cursor::new(&self.data);
110 reader.seek(SeekFrom::Start(addr as u64))?;
111 let v = reader.read_u8()?;
112 Ok(Self::REG::from_u8(v))
113 }
114
115 fn load16(&mut self, addr: &Self::REG) -> Result<Self::REG, Error> {
116 let addr = addr.to_u64();
117 if addr.checked_add(2).ok_or(Error::MemOutOfBound)? > self.len() as u64 {
118 return Err(Error::MemOutOfBound);
119 }
120 let mut reader = Cursor::new(&self.data);
121 reader.seek(SeekFrom::Start(addr as u64))?;
122 let v = reader.read_u16::<LittleEndian>()?;
124 Ok(Self::REG::from_u16(v))
125 }
126
127 fn load32(&mut self, addr: &Self::REG) -> Result<Self::REG, Error> {
128 let addr = addr.to_u64();
129 if addr.checked_add(4).ok_or(Error::MemOutOfBound)? > self.len() as u64 {
130 return Err(Error::MemOutOfBound);
131 }
132 let mut reader = Cursor::new(&self.data);
133 reader.seek(SeekFrom::Start(addr as u64))?;
134 let v = reader.read_u32::<LittleEndian>()?;
136 Ok(Self::REG::from_u32(v))
137 }
138
139 fn load64(&mut self, addr: &Self::REG) -> Result<Self::REG, Error> {
140 let addr = addr.to_u64();
141 if addr.checked_add(8).ok_or(Error::MemOutOfBound)? > self.len() as u64 {
142 return Err(Error::MemOutOfBound);
143 }
144 let mut reader = Cursor::new(&self.data);
145 reader.seek(SeekFrom::Start(addr as u64))?;
146 let v = reader.read_u64::<LittleEndian>()?;
148 Ok(Self::REG::from_u64(v))
149 }
150
151 fn store8(&mut self, addr: &Self::REG, value: &Self::REG) -> Result<(), Error> {
152 let addr = addr.to_u64();
153 let page_indices = get_page_indices(addr.to_u64(), 1)?;
154 set_dirty(self, &page_indices)?;
155 let mut writer = Cursor::new(&mut self.data);
156 writer.seek(SeekFrom::Start(addr as u64))?;
157 writer.write_u8(value.to_u8())?;
158 Ok(())
159 }
160
161 fn store16(&mut self, addr: &Self::REG, value: &Self::REG) -> Result<(), Error> {
162 let addr = addr.to_u64();
163 let page_indices = get_page_indices(addr.to_u64(), 2)?;
164 set_dirty(self, &page_indices)?;
165 let mut writer = Cursor::new(&mut self.data);
166 writer.seek(SeekFrom::Start(addr as u64))?;
167 writer.write_u16::<LittleEndian>(value.to_u16())?;
168 Ok(())
169 }
170
171 fn store32(&mut self, addr: &Self::REG, value: &Self::REG) -> Result<(), Error> {
172 let addr = addr.to_u64();
173 let page_indices = get_page_indices(addr.to_u64(), 4)?;
174 set_dirty(self, &page_indices)?;
175 let mut writer = Cursor::new(&mut self.data);
176 writer.seek(SeekFrom::Start(addr as u64))?;
177 writer.write_u32::<LittleEndian>(value.to_u32())?;
178 Ok(())
179 }
180
181 fn store64(&mut self, addr: &Self::REG, value: &Self::REG) -> Result<(), Error> {
182 let addr = addr.to_u64();
183 let page_indices = get_page_indices(addr.to_u64(), 8)?;
184 set_dirty(self, &page_indices)?;
185 let mut writer = Cursor::new(&mut self.data);
186 writer.seek(SeekFrom::Start(addr as u64))?;
187 writer.write_u64::<LittleEndian>(value.to_u64())?;
188 Ok(())
189 }
190
191 fn store_bytes(&mut self, addr: u64, value: &[u8]) -> Result<(), Error> {
192 let size = value.len() as u64;
193 if size == 0 {
194 return Ok(());
195 }
196 let page_indices = get_page_indices(addr.to_u64(), size)?;
197 set_dirty(self, &page_indices)?;
198 let slice = &mut self[addr as usize..(addr + size) as usize];
199 slice.copy_from_slice(value);
200 Ok(())
201 }
202
203 fn store_byte(&mut self, addr: u64, size: u64, value: u8) -> Result<(), Error> {
204 if size == 0 {
205 return Ok(());
206 }
207 let page_indices = get_page_indices(addr.to_u64(), size)?;
208 set_dirty(self, &page_indices)?;
209 memset(&mut self[addr as usize..(addr + size) as usize], value);
210 Ok(())
211 }
212
213 fn load_bytes(&mut self, addr: u64, size: u64) -> Result<Bytes, Error> {
214 if size == 0 {
215 return Ok(Bytes::new());
216 }
217 if addr.checked_add(size).ok_or(Error::MemOutOfBound)? > self.memory_size() as u64 {
218 return Err(Error::MemOutOfBound);
219 }
220 Ok(Bytes::from(
221 self[addr as usize..(addr + size) as usize].to_vec(),
222 ))
223 }
224
225 fn lr(&self) -> &Self::REG {
226 &self.load_reservation_address
227 }
228
229 fn set_lr(&mut self, value: &Self::REG) {
230 self.load_reservation_address = value.clone();
231 }
232}