1use crate::error::Error;
2
3use scroll::Pread;
4
5#[derive(Debug, thiserror::Error, docsplay::Display)]
7pub struct InvalidDataLengthError {
8 pub function_name: &'static str,
10 pub alignment: usize,
12}
13impl InvalidDataLengthError {
14 pub fn new(function_name: &'static str, alignment: usize) -> Self {
15 Self {
16 function_name,
17 alignment,
18 }
19 }
20}
21
22#[derive(Debug, thiserror::Error, docsplay::Display)]
24pub struct MemoryNotAlignedError {
25 pub address: u64,
27 pub alignment: usize,
29}
30
31pub trait MemoryInterface<ERR = Error>
33where
34 ERR: std::error::Error + From<InvalidDataLengthError> + From<MemoryNotAlignedError>,
35{
36 fn supports_native_64bit_access(&mut self) -> bool;
42
43 fn read_word_64(&mut self, address: u64) -> Result<u64, ERR> {
48 let mut word = 0;
49 self.read_64(address, std::slice::from_mut(&mut word))?;
50 Ok(word)
51 }
52
53 fn read_word_32(&mut self, address: u64) -> Result<u32, ERR> {
58 let mut word = 0;
59 self.read_32(address, std::slice::from_mut(&mut word))?;
60 Ok(word)
61 }
62
63 fn read_word_16(&mut self, address: u64) -> Result<u16, ERR> {
68 let mut word = 0;
69 self.read_16(address, std::slice::from_mut(&mut word))?;
70 Ok(word)
71 }
72
73 fn read_word_8(&mut self, address: u64) -> Result<u8, ERR> {
75 let mut word = 0;
76 self.read_8(address, std::slice::from_mut(&mut word))?;
77 Ok(word)
78 }
79
80 fn read_64(&mut self, address: u64, data: &mut [u64]) -> Result<(), ERR>;
86
87 fn read_32(&mut self, address: u64, data: &mut [u32]) -> Result<(), ERR>;
93
94 fn read_16(&mut self, address: u64, data: &mut [u16]) -> Result<(), ERR>;
100
101 fn read_8(&mut self, address: u64, data: &mut [u8]) -> Result<(), ERR>;
103
104 fn read_mem_64bit(&mut self, address: u64, data: &mut [u8]) -> Result<(), ERR> {
109 if !data.len().is_multiple_of(8) {
114 return Err(InvalidDataLengthError::new("read_mem_64bit", 8).into());
115 }
116 let mut buffer = vec![0u64; data.len() / 8];
117 self.read_64(address, &mut buffer)?;
118 for (bytes, value) in data.chunks_exact_mut(8).zip(buffer.iter()) {
119 bytes.copy_from_slice(&u64::to_le_bytes(*value));
120 }
121 Ok(())
122 }
123
124 fn read_mem_32bit(&mut self, address: u64, data: &mut [u8]) -> Result<(), ERR> {
129 if !data.len().is_multiple_of(4) {
134 return Err(InvalidDataLengthError::new("read_mem_32bit", 4).into());
135 }
136 let mut buffer = vec![0u32; data.len() / 4];
137 self.read_32(address, &mut buffer)?;
138 for (bytes, value) in data.chunks_exact_mut(4).zip(buffer.iter()) {
139 bytes.copy_from_slice(&u32::to_le_bytes(*value));
140 }
141 Ok(())
142 }
143
144 fn read(&mut self, address: u64, data: &mut [u8]) -> Result<(), ERR> {
155 if self.supports_native_64bit_access() {
156 self.read_8(address, data)?;
158 } else if address.is_multiple_of(4) && data.len().is_multiple_of(4) {
159 self.read_mem_32bit(address, data)?;
161 } else {
162 let start_extra_count = (address % 4) as usize;
163 let mut buffer = vec![0u8; (start_extra_count + data.len()).div_ceil(4) * 4];
164 self.read_mem_32bit(address - start_extra_count as u64, &mut buffer)?;
165 data.copy_from_slice(&buffer[start_extra_count..start_extra_count + data.len()]);
166 }
167 Ok(())
168 }
169
170 fn write_word_64(&mut self, address: u64, data: u64) -> Result<(), ERR> {
175 self.write_64(address, std::slice::from_ref(&data))
176 }
177
178 fn write_word_32(&mut self, address: u64, data: u32) -> Result<(), ERR> {
183 self.write_32(address, std::slice::from_ref(&data))
184 }
185
186 fn write_word_16(&mut self, address: u64, data: u16) -> Result<(), ERR> {
191 self.write_16(address, std::slice::from_ref(&data))
192 }
193
194 fn write_word_8(&mut self, address: u64, data: u8) -> Result<(), ERR> {
196 self.write_8(address, std::slice::from_ref(&data))
197 }
198
199 fn write_64(&mut self, address: u64, data: &[u64]) -> Result<(), ERR>;
205
206 fn write_32(&mut self, address: u64, data: &[u32]) -> Result<(), ERR>;
212
213 fn write_16(&mut self, address: u64, data: &[u16]) -> Result<(), ERR>;
219
220 fn write_8(&mut self, address: u64, data: &[u8]) -> Result<(), ERR>;
222
223 fn write_mem_64bit(&mut self, address: u64, data: &[u8]) -> Result<(), ERR> {
226 if !data.len().is_multiple_of(8) {
231 return Err(InvalidDataLengthError::new("write_mem_64bit", 8).into());
232 }
233 let mut buffer = vec![0u64; data.len() / 8];
234 for (bytes, value) in data.chunks_exact(8).zip(buffer.iter_mut()) {
235 *value = bytes
236 .pread_with(0, scroll::LE)
237 .expect("an u64 - this is a bug, please report it");
238 }
239
240 self.write_64(address, &buffer)?;
241 Ok(())
242 }
243
244 fn write_mem_32bit(&mut self, address: u64, data: &[u8]) -> Result<(), ERR> {
247 if !data.len().is_multiple_of(4) {
252 return Err(InvalidDataLengthError::new("write_mem_32bit", 4).into());
253 }
254 let mut buffer = vec![0u32; data.len() / 4];
255 for (bytes, value) in data.chunks_exact(4).zip(buffer.iter_mut()) {
256 *value = bytes
257 .pread_with(0, scroll::LE)
258 .expect("an u32 - this is a bug, please report it");
259 }
260
261 self.write_32(address, &buffer)?;
262 Ok(())
263 }
264
265 fn write(&mut self, mut address: u64, mut data: &[u8]) -> Result<(), ERR> {
272 let len = data.len();
273 let start_extra_count = ((4 - (address % 4) as usize) % 4).min(len);
274 let end_extra_count = (len - start_extra_count) % 4;
275 let inbetween_count = len - start_extra_count - end_extra_count;
276 assert!(start_extra_count < 4);
277 assert!(end_extra_count < 4);
278 assert!(inbetween_count.is_multiple_of(4));
279
280 if start_extra_count != 0 || end_extra_count != 0 {
281 if !self.supports_8bit_transfers()? {
285 return Err(MemoryNotAlignedError {
286 address,
287 alignment: 4,
288 }
289 .into());
290 }
291 }
292
293 if start_extra_count != 0 {
294 self.write_8(address, &data[..start_extra_count])?;
296
297 address += start_extra_count as u64;
298 data = &data[start_extra_count..];
299 }
300
301 if inbetween_count > 0 {
303 let mut buffer = vec![0u32; inbetween_count / 4];
305 for (bytes, value) in data.chunks_exact(4).zip(buffer.iter_mut()) {
306 *value = u32::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]);
307 }
308 self.write_32(address, &buffer)?;
309
310 address += inbetween_count as u64;
311 data = &data[inbetween_count..];
312 }
313
314 if end_extra_count > 0 {
316 self.write_8(address, &data[..end_extra_count])?;
317 }
318
319 Ok(())
320 }
321
322 fn supports_8bit_transfers(&self) -> Result<bool, ERR>;
324
325 fn flush(&mut self) -> Result<(), ERR>;
332}
333
334pub(crate) fn valid_32bit_address(address: u64) -> Result<u32, Error> {
338 let address: u32 = address
339 .try_into()
340 .map_err(|_| Error::Other(format!("Address {address:#08x} out of range")))?;
341
342 Ok(address)
343}
344
345pub trait CoreMemoryInterface {
347 type ErrorType: std::error::Error + From<InvalidDataLengthError> + From<MemoryNotAlignedError>;
348
349 fn memory(&self) -> &dyn MemoryInterface<Self::ErrorType>;
351
352 fn memory_mut(&mut self) -> &mut dyn MemoryInterface<Self::ErrorType>;
354}
355
356impl<T> MemoryInterface<Error> for T
357where
358 T: CoreMemoryInterface,
359 Error: From<<T as CoreMemoryInterface>::ErrorType>,
360{
361 fn supports_native_64bit_access(&mut self) -> bool {
362 self.memory_mut().supports_native_64bit_access()
363 }
364
365 fn read_word_64(&mut self, address: u64) -> Result<u64, Error> {
366 self.memory_mut().read_word_64(address).map_err(Error::from)
367 }
368
369 fn read_word_32(&mut self, address: u64) -> Result<u32, Error> {
370 self.memory_mut().read_word_32(address).map_err(Error::from)
371 }
372
373 fn read_word_16(&mut self, address: u64) -> Result<u16, Error> {
374 self.memory_mut().read_word_16(address).map_err(Error::from)
375 }
376
377 fn read_word_8(&mut self, address: u64) -> Result<u8, Error> {
378 self.memory_mut().read_word_8(address).map_err(Error::from)
379 }
380
381 fn read_64(&mut self, address: u64, data: &mut [u64]) -> Result<(), Error> {
382 self.memory_mut()
383 .read_64(address, data)
384 .map_err(Error::from)
385 }
386
387 fn read_32(&mut self, address: u64, data: &mut [u32]) -> Result<(), Error> {
388 self.memory_mut()
389 .read_32(address, data)
390 .map_err(Error::from)
391 }
392
393 fn read_16(&mut self, address: u64, data: &mut [u16]) -> Result<(), Error> {
394 self.memory_mut()
395 .read_16(address, data)
396 .map_err(Error::from)
397 }
398
399 fn read_8(&mut self, address: u64, data: &mut [u8]) -> Result<(), Error> {
400 self.memory_mut().read_8(address, data).map_err(Error::from)
401 }
402
403 fn read(&mut self, address: u64, data: &mut [u8]) -> Result<(), Error> {
404 self.memory_mut().read(address, data).map_err(Error::from)
405 }
406
407 fn write_word_64(&mut self, address: u64, data: u64) -> Result<(), Error> {
408 self.memory_mut()
409 .write_word_64(address, data)
410 .map_err(Error::from)
411 }
412
413 fn write_word_32(&mut self, address: u64, data: u32) -> Result<(), Error> {
414 self.memory_mut()
415 .write_word_32(address, data)
416 .map_err(Error::from)
417 }
418
419 fn write_word_16(&mut self, address: u64, data: u16) -> Result<(), Error> {
420 self.memory_mut()
421 .write_word_16(address, data)
422 .map_err(Error::from)
423 }
424
425 fn write_word_8(&mut self, address: u64, data: u8) -> Result<(), Error> {
426 self.memory_mut()
427 .write_word_8(address, data)
428 .map_err(Error::from)
429 }
430
431 fn write_64(&mut self, address: u64, data: &[u64]) -> Result<(), Error> {
432 self.memory_mut()
433 .write_64(address, data)
434 .map_err(Error::from)
435 }
436
437 fn write_32(&mut self, address: u64, data: &[u32]) -> Result<(), Error> {
438 self.memory_mut()
439 .write_32(address, data)
440 .map_err(Error::from)
441 }
442
443 fn write_16(&mut self, address: u64, data: &[u16]) -> Result<(), Error> {
444 self.memory_mut()
445 .write_16(address, data)
446 .map_err(Error::from)
447 }
448
449 fn write_8(&mut self, address: u64, data: &[u8]) -> Result<(), Error> {
450 self.memory_mut()
451 .write_8(address, data)
452 .map_err(Error::from)
453 }
454
455 fn write(&mut self, address: u64, data: &[u8]) -> Result<(), Error> {
456 self.memory_mut().write(address, data).map_err(Error::from)
457 }
458
459 fn supports_8bit_transfers(&self) -> Result<bool, Error> {
460 self.memory().supports_8bit_transfers().map_err(Error::from)
461 }
462
463 fn flush(&mut self) -> Result<(), Error> {
464 self.memory_mut().flush().map_err(Error::from)
465 }
466}