1use crate::error::Error;
2
3use scroll::Pread;
4
5#[derive(Debug, thiserror::Error, docsplay::Display)]
7pub struct InvalidDataLengthError {
8 pub function_name: &'static str,
10 pub alignment: usize,
12}
13impl InvalidDataLengthError {
14 pub fn new(function_name: &'static str, alignment: usize) -> Self {
15 Self {
16 function_name,
17 alignment,
18 }
19 }
20}
21
22#[derive(Debug, thiserror::Error, docsplay::Display)]
24pub struct MemoryNotAlignedError {
25 pub address: u64,
27 pub alignment: usize,
29}
30
31pub trait MemoryInterface<ERR = Error>
33where
34 ERR: std::error::Error + From<InvalidDataLengthError> + From<MemoryNotAlignedError>,
35{
36 fn supports_native_64bit_access(&mut self) -> bool;
42
43 fn read_word_64(&mut self, address: u64) -> Result<u64, ERR> {
48 let mut word = 0;
49 self.read_64(address, std::slice::from_mut(&mut word))?;
50 Ok(word)
51 }
52
53 fn read_word_32(&mut self, address: u64) -> Result<u32, ERR> {
58 let mut word = 0;
59 self.read_32(address, std::slice::from_mut(&mut word))?;
60 Ok(word)
61 }
62
63 fn read_word_16(&mut self, address: u64) -> Result<u16, ERR> {
68 let mut word = 0;
69 self.read_16(address, std::slice::from_mut(&mut word))?;
70 Ok(word)
71 }
72
73 fn read_word_8(&mut self, address: u64) -> Result<u8, ERR> {
75 let mut word = 0;
76 self.read_8(address, std::slice::from_mut(&mut word))?;
77 Ok(word)
78 }
79
80 fn read_64(&mut self, address: u64, data: &mut [u64]) -> Result<(), ERR>;
86
87 fn read_32(&mut self, address: u64, data: &mut [u32]) -> Result<(), ERR>;
93
94 fn read_16(&mut self, address: u64, data: &mut [u16]) -> Result<(), ERR>;
100
101 fn read_8(&mut self, address: u64, data: &mut [u8]) -> Result<(), ERR>;
103
104 fn read_mem_64bit(&mut self, address: u64, data: &mut [u8]) -> Result<(), ERR> {
109 if !data.len().is_multiple_of(8) {
114 return Err(InvalidDataLengthError::new("read_mem_64bit", 8).into());
115 }
116 let mut buffer = vec![0u64; data.len() / 8];
117 self.read_64(address, &mut buffer)?;
118 for (bytes, value) in data.chunks_exact_mut(8).zip(buffer.iter()) {
119 bytes.copy_from_slice(&u64::to_le_bytes(*value));
120 }
121 Ok(())
122 }
123
124 fn read_mem_32bit(&mut self, address: u64, data: &mut [u8]) -> Result<(), ERR> {
129 if !data.len().is_multiple_of(4) {
134 return Err(InvalidDataLengthError::new("read_mem_32bit", 4).into());
135 }
136 let mut buffer = vec![0u32; data.len() / 4];
137 self.read_32(address, &mut buffer)?;
138 for (bytes, value) in data.chunks_exact_mut(4).zip(buffer.iter()) {
139 bytes.copy_from_slice(&u32::to_le_bytes(*value));
140 }
141 Ok(())
142 }
143
144 fn read(&mut self, address: u64, data: &mut [u8]) -> Result<(), ERR> {
155 if self.supports_native_64bit_access() {
156 self.read_8(address, data)?;
158 } else if address.is_multiple_of(4) && data.len().is_multiple_of(4) {
159 self.read_mem_32bit(address, data)?;
161 } else {
162 let start_extra_count = (address % 4) as usize;
163 let mut buffer = vec![0u8; (start_extra_count + data.len()).div_ceil(4) * 4];
164 self.read_mem_32bit(address - start_extra_count as u64, &mut buffer)?;
165 data.copy_from_slice(&buffer[start_extra_count..start_extra_count + data.len()]);
166 }
167 Ok(())
168 }
169
170 fn write_word_64(&mut self, address: u64, data: u64) -> Result<(), ERR> {
175 self.write_64(address, std::slice::from_ref(&data))
176 }
177
178 fn write_word_32(&mut self, address: u64, data: u32) -> Result<(), ERR> {
183 self.write_32(address, std::slice::from_ref(&data))
184 }
185
186 fn write_word_16(&mut self, address: u64, data: u16) -> Result<(), ERR> {
191 self.write_16(address, std::slice::from_ref(&data))
192 }
193
194 fn write_word_8(&mut self, address: u64, data: u8) -> Result<(), ERR> {
196 self.write_8(address, std::slice::from_ref(&data))
197 }
198
199 fn write_64(&mut self, address: u64, data: &[u64]) -> Result<(), ERR>;
205
206 fn write_32(&mut self, address: u64, data: &[u32]) -> Result<(), ERR>;
212
213 fn write_16(&mut self, address: u64, data: &[u16]) -> Result<(), ERR>;
219
220 fn write_8(&mut self, address: u64, data: &[u8]) -> Result<(), ERR>;
222
223 fn write_mem_64bit(&mut self, address: u64, data: &[u8]) -> Result<(), ERR> {
226 if !data.len().is_multiple_of(8) {
231 return Err(InvalidDataLengthError::new("write_mem_64bit", 8).into());
232 }
233 let mut buffer = vec![0u64; data.len() / 8];
234 for (bytes, value) in data.chunks_exact(8).zip(buffer.iter_mut()) {
235 *value = bytes
236 .pread_with(0, scroll::LE)
237 .expect("an u64 - this is a bug, please report it");
238 }
239
240 self.write_64(address, &buffer)?;
241 Ok(())
242 }
243
244 fn write_mem_32bit(&mut self, address: u64, data: &[u8]) -> Result<(), ERR> {
247 if !data.len().is_multiple_of(4) {
252 return Err(InvalidDataLengthError::new("write_mem_32bit", 4).into());
253 }
254 let mut buffer = vec![0u32; data.len() / 4];
255 for (bytes, value) in data.chunks_exact(4).zip(buffer.iter_mut()) {
256 *value = bytes
257 .pread_with(0, scroll::LE)
258 .expect("an u32 - this is a bug, please report it");
259 }
260
261 self.write_32(address, &buffer)?;
262 Ok(())
263 }
264
265 fn write(&mut self, mut address: u64, mut data: &[u8]) -> Result<(), ERR> {
272 let len = data.len();
273 let start_extra_count = ((4 - (address % 4) as usize) % 4).min(len);
274 let end_extra_count = (len - start_extra_count) % 4;
275 let inbetween_count = len - start_extra_count - end_extra_count;
276 assert!(start_extra_count < 4);
277 assert!(end_extra_count < 4);
278 assert!(inbetween_count.is_multiple_of(4));
279
280 if start_extra_count != 0 || end_extra_count != 0 {
281 if !self.supports_8bit_transfers()? {
285 return Err(MemoryNotAlignedError {
286 address,
287 alignment: 4,
288 }
289 .into());
290 }
291 }
292
293 if start_extra_count != 0 {
294 self.write_8(address, &data[..start_extra_count])?;
296
297 address += start_extra_count as u64;
298 data = &data[start_extra_count..];
299 }
300
301 if inbetween_count > 0 {
303 let mut buffer = vec![0u32; inbetween_count / 4];
305 for (bytes, value) in data.chunks_exact(4).zip(buffer.iter_mut()) {
306 *value = u32::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]);
307 }
308 self.write_32(address, &buffer)?;
309
310 address += inbetween_count as u64;
311 data = &data[inbetween_count..];
312 }
313
314 if end_extra_count > 0 {
316 self.write_8(address, &data[..end_extra_count])?;
317 }
318
319 Ok(())
320 }
321
322 fn supports_8bit_transfers(&self) -> Result<bool, ERR>;
324
325 fn flush(&mut self) -> Result<(), ERR>;
332
333 fn execute_single_memory_operation(
337 &mut self,
338 mut operation: Operation<'_>,
339 ) -> Result<(), crate::Error>
340 where
341 Error: From<ERR>,
342 {
343 let result = match operation.operation {
344 OperationKind::Read(ref mut data) => self.read(operation.address, data),
345 OperationKind::Read8(ref mut data) => self.read_8(operation.address, data),
346 OperationKind::Read16(ref mut data) => self.read_16(operation.address, data),
347 OperationKind::Read32(ref mut data) => self.read_32(operation.address, data),
348 OperationKind::Read64(ref mut data) => self.read_64(operation.address, data),
349 OperationKind::Write(data) => self.write(operation.address, data),
350 OperationKind::Write8(data) => self.write_8(operation.address, data),
351 OperationKind::Write16(data) => self.write_16(operation.address, data),
352 OperationKind::Write32(data) => self.write_32(operation.address, data),
353 OperationKind::Write64(data) => self.write_64(operation.address, data),
354 OperationKind::WriteWord8(data) => self.write_word_8(operation.address, data),
355 OperationKind::WriteWord16(data) => self.write_word_16(operation.address, data),
356 OperationKind::WriteWord32(data) => self.write_word_32(operation.address, data),
357 OperationKind::WriteWord64(data) => self.write_word_64(operation.address, data),
358 };
359 result.map_err(Error::from)
360 }
361
362 fn execute_memory_operations(&mut self, operations: &mut [Operation<'_>])
370 where
371 Error: From<ERR>,
372 {
373 for operation in operations {
374 let result = self.execute_single_memory_operation(operation.reborrow());
375 let success = result.is_ok();
376 operation.result = Some(result);
377 if !success {
378 break;
379 }
380 }
381 }
382}
383
384pub(crate) fn valid_32bit_address(address: u64) -> Result<u32, Error> {
388 let address: u32 = address
389 .try_into()
390 .map_err(|_| Error::Other(format!("Address {address:#08x} out of range")))?;
391
392 Ok(address)
393}
394
395pub trait CoreMemoryInterface {
397 type ErrorType: std::error::Error + From<InvalidDataLengthError> + From<MemoryNotAlignedError>;
398
399 fn memory(&self) -> &dyn MemoryInterface<Self::ErrorType>;
401
402 fn memory_mut(&mut self) -> &mut dyn MemoryInterface<Self::ErrorType>;
404}
405
406impl<T> MemoryInterface<Error> for T
407where
408 T: CoreMemoryInterface,
409 Error: From<<T as CoreMemoryInterface>::ErrorType>,
410{
411 fn supports_native_64bit_access(&mut self) -> bool {
412 self.memory_mut().supports_native_64bit_access()
413 }
414
415 fn read_word_64(&mut self, address: u64) -> Result<u64, Error> {
416 self.memory_mut().read_word_64(address).map_err(Error::from)
417 }
418
419 fn read_word_32(&mut self, address: u64) -> Result<u32, Error> {
420 self.memory_mut().read_word_32(address).map_err(Error::from)
421 }
422
423 fn read_word_16(&mut self, address: u64) -> Result<u16, Error> {
424 self.memory_mut().read_word_16(address).map_err(Error::from)
425 }
426
427 fn read_word_8(&mut self, address: u64) -> Result<u8, Error> {
428 self.memory_mut().read_word_8(address).map_err(Error::from)
429 }
430
431 fn read_64(&mut self, address: u64, data: &mut [u64]) -> Result<(), Error> {
432 self.memory_mut()
433 .read_64(address, data)
434 .map_err(Error::from)
435 }
436
437 fn read_32(&mut self, address: u64, data: &mut [u32]) -> Result<(), Error> {
438 self.memory_mut()
439 .read_32(address, data)
440 .map_err(Error::from)
441 }
442
443 fn read_16(&mut self, address: u64, data: &mut [u16]) -> Result<(), Error> {
444 self.memory_mut()
445 .read_16(address, data)
446 .map_err(Error::from)
447 }
448
449 fn read_8(&mut self, address: u64, data: &mut [u8]) -> Result<(), Error> {
450 self.memory_mut().read_8(address, data).map_err(Error::from)
451 }
452
453 fn read(&mut self, address: u64, data: &mut [u8]) -> Result<(), Error> {
454 self.memory_mut().read(address, data).map_err(Error::from)
455 }
456
457 fn write_word_64(&mut self, address: u64, data: u64) -> Result<(), Error> {
458 self.memory_mut()
459 .write_word_64(address, data)
460 .map_err(Error::from)
461 }
462
463 fn write_word_32(&mut self, address: u64, data: u32) -> Result<(), Error> {
464 self.memory_mut()
465 .write_word_32(address, data)
466 .map_err(Error::from)
467 }
468
469 fn write_word_16(&mut self, address: u64, data: u16) -> Result<(), Error> {
470 self.memory_mut()
471 .write_word_16(address, data)
472 .map_err(Error::from)
473 }
474
475 fn write_word_8(&mut self, address: u64, data: u8) -> Result<(), Error> {
476 self.memory_mut()
477 .write_word_8(address, data)
478 .map_err(Error::from)
479 }
480
481 fn write_64(&mut self, address: u64, data: &[u64]) -> Result<(), Error> {
482 self.memory_mut()
483 .write_64(address, data)
484 .map_err(Error::from)
485 }
486
487 fn write_32(&mut self, address: u64, data: &[u32]) -> Result<(), Error> {
488 self.memory_mut()
489 .write_32(address, data)
490 .map_err(Error::from)
491 }
492
493 fn write_16(&mut self, address: u64, data: &[u16]) -> Result<(), Error> {
494 self.memory_mut()
495 .write_16(address, data)
496 .map_err(Error::from)
497 }
498
499 fn write_8(&mut self, address: u64, data: &[u8]) -> Result<(), Error> {
500 self.memory_mut()
501 .write_8(address, data)
502 .map_err(Error::from)
503 }
504
505 fn write(&mut self, address: u64, data: &[u8]) -> Result<(), Error> {
506 self.memory_mut().write(address, data).map_err(Error::from)
507 }
508
509 fn supports_8bit_transfers(&self) -> Result<bool, Error> {
510 self.memory().supports_8bit_transfers().map_err(Error::from)
511 }
512
513 fn flush(&mut self) -> Result<(), Error> {
514 self.memory_mut().flush().map_err(Error::from)
515 }
516
517 fn execute_memory_operations(&mut self, operations: &mut [Operation<'_>]) {
518 self.memory_mut().execute_memory_operations(operations)
519 }
520}
521
522#[derive(Debug)]
524pub enum OperationKind<'a> {
525 Read(&'a mut [u8]),
526 Read8(&'a mut [u8]),
527 Read16(&'a mut [u16]),
528 Read32(&'a mut [u32]),
529 Read64(&'a mut [u64]),
530 Write(&'a [u8]),
531 Write8(&'a [u8]),
532 Write16(&'a [u16]),
533 Write32(&'a [u32]),
534 Write64(&'a [u64]),
535 WriteWord8(u8),
536 WriteWord16(u16),
537 WriteWord32(u32),
538 WriteWord64(u64),
539}
540
541#[derive(Debug)]
543pub struct Operation<'a> {
544 pub address: u64,
546
547 pub result: Option<Result<(), Error>>,
551
552 pub operation: OperationKind<'a>,
554}
555
556impl<'a> Operation<'a> {
557 pub fn new(address: u64, operation: OperationKind<'a>) -> Self {
559 Operation {
560 address,
561 result: None,
562 operation,
563 }
564 }
565
566 pub(crate) fn reborrow(&mut self) -> Operation<'_> {
567 Operation {
568 address: self.address,
569 result: self.result.take(),
570 operation: match self.operation {
571 OperationKind::Read(ref mut data) => OperationKind::Read(data),
572 OperationKind::Read8(ref mut data) => OperationKind::Read8(data),
573 OperationKind::Read16(ref mut data) => OperationKind::Read16(data),
574 OperationKind::Read32(ref mut data) => OperationKind::Read32(data),
575 OperationKind::Read64(ref mut data) => OperationKind::Read64(data),
576 OperationKind::Write(data) => OperationKind::Write(data),
577 OperationKind::Write8(data) => OperationKind::Write8(data),
578 OperationKind::Write16(data) => OperationKind::Write16(data),
579 OperationKind::Write32(data) => OperationKind::Write32(data),
580 OperationKind::Write64(data) => OperationKind::Write64(data),
581 OperationKind::WriteWord8(data) => OperationKind::WriteWord8(data),
582 OperationKind::WriteWord16(data) => OperationKind::WriteWord16(data),
583 OperationKind::WriteWord32(data) => OperationKind::WriteWord32(data),
584 OperationKind::WriteWord64(data) => OperationKind::WriteWord64(data),
585 },
586 }
587 }
588}