1use crate::bitmap::{Bitmap, BS};
4use crate::guest_memory::Result;
5use crate::{
6 Address, AtomicAccess, Bytes, FileOffset, GuestAddress, GuestMemoryBackend, GuestMemoryError,
7 GuestUsize, MemoryRegionAddress, ReadVolatile, VolatileSlice, WriteVolatile,
8};
9use std::sync::atomic::Ordering;
10use std::sync::Arc;
11
12#[allow(clippy::len_without_is_empty)]
41pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = GuestMemoryError> {
42 type B: Bitmap;
44
45 fn len(&self) -> GuestUsize;
47
48 fn start_addr(&self) -> GuestAddress;
50
51 fn last_addr(&self) -> GuestAddress {
53 self.start_addr().unchecked_add(self.len() - 1)
55 }
56
57 fn bitmap(&self) -> BS<'_, Self::B>;
59
60 fn check_address(&self, addr: MemoryRegionAddress) -> Option<MemoryRegionAddress> {
62 if self.address_in_range(addr) {
63 Some(addr)
64 } else {
65 None
66 }
67 }
68
69 fn address_in_range(&self, addr: MemoryRegionAddress) -> bool {
71 addr.raw_value() < self.len()
72 }
73
74 fn checked_offset(
76 &self,
77 base: MemoryRegionAddress,
78 offset: usize,
79 ) -> Option<MemoryRegionAddress> {
80 base.checked_add(offset as u64)
81 .and_then(|addr| self.check_address(addr))
82 }
83
84 fn to_region_addr(&self, addr: GuestAddress) -> Option<MemoryRegionAddress> {
88 addr.checked_offset_from(self.start_addr())
89 .and_then(|offset| self.check_address(MemoryRegionAddress(offset)))
90 }
91
92 fn get_host_address(&self, _addr: MemoryRegionAddress) -> Result<*mut u8> {
103 Err(GuestMemoryError::HostAddressNotAvailable)
104 }
105
106 fn file_offset(&self) -> Option<&FileOffset> {
108 None
109 }
110
111 #[allow(unused_variables)]
114 fn get_slice(
115 &self,
116 offset: MemoryRegionAddress,
117 count: usize,
118 ) -> Result<VolatileSlice<'_, BS<'_, Self::B>>> {
119 Err(GuestMemoryError::HostAddressNotAvailable)
120 }
121
122 fn as_volatile_slice(&self) -> Result<VolatileSlice<'_, BS<'_, Self::B>>> {
147 self.get_slice(MemoryRegionAddress(0), self.len() as usize)
148 }
149
150 #[cfg(target_os = "linux")]
167 fn is_hugetlbfs(&self) -> Option<bool> {
168 None
169 }
170}
171
172#[derive(Debug, thiserror::Error)]
174pub enum GuestRegionCollectionError {
175 #[error("No memory region found")]
177 NoMemoryRegion,
178 #[error("Some of the memory regions intersect with each other")]
180 MemoryRegionOverlap,
181 #[error("The provided memory regions haven't been sorted")]
183 UnsortedMemoryRegions,
184}
185
186#[derive(Debug)]
191pub struct GuestRegionCollection<R> {
192 regions: Vec<Arc<R>>,
193}
194
195impl<R> Default for GuestRegionCollection<R> {
196 fn default() -> Self {
197 Self {
198 regions: Vec::new(),
199 }
200 }
201}
202
203impl<R> Clone for GuestRegionCollection<R> {
204 fn clone(&self) -> Self {
205 GuestRegionCollection {
206 regions: self.regions.iter().map(Arc::clone).collect(),
207 }
208 }
209}
210
211impl<R: GuestMemoryRegion> GuestRegionCollection<R> {
212 pub fn new() -> Self {
214 Self::default()
215 }
216
217 pub fn from_regions(
225 mut regions: Vec<R>,
226 ) -> std::result::Result<Self, GuestRegionCollectionError> {
227 Self::from_arc_regions(regions.drain(..).map(Arc::new).collect())
228 }
229
230 pub fn from_arc_regions(
243 regions: Vec<Arc<R>>,
244 ) -> std::result::Result<Self, GuestRegionCollectionError> {
245 if regions.is_empty() {
246 return Err(GuestRegionCollectionError::NoMemoryRegion);
247 }
248
249 for window in regions.windows(2) {
250 let prev = &window[0];
251 let next = &window[1];
252
253 if prev.start_addr() > next.start_addr() {
254 return Err(GuestRegionCollectionError::UnsortedMemoryRegions);
255 }
256
257 if prev.last_addr() >= next.start_addr() {
258 return Err(GuestRegionCollectionError::MemoryRegionOverlap);
259 }
260 }
261
262 Ok(Self { regions })
263 }
264
265 pub fn insert_region(
270 &self,
271 region: Arc<R>,
272 ) -> std::result::Result<GuestRegionCollection<R>, GuestRegionCollectionError> {
273 let mut regions = self.regions.clone();
274 regions.push(region);
275 regions.sort_by_key(|x| x.start_addr());
276
277 Self::from_arc_regions(regions)
278 }
279
280 pub fn remove_region(
287 &self,
288 base: GuestAddress,
289 size: GuestUsize,
290 ) -> std::result::Result<(GuestRegionCollection<R>, Arc<R>), GuestRegionCollectionError> {
291 if let Ok(region_index) = self.regions.binary_search_by_key(&base, |x| x.start_addr()) {
292 if self.regions.get(region_index).unwrap().len() == size {
293 let mut regions = self.regions.clone();
294 let region = regions.remove(region_index);
295 return Ok((Self { regions }, region));
296 }
297 }
298
299 Err(GuestRegionCollectionError::NoMemoryRegion)
300 }
301}
302
303impl<R: GuestMemoryRegion> GuestMemoryBackend for GuestRegionCollection<R> {
304 type R = R;
305
306 fn num_regions(&self) -> usize {
307 self.regions.len()
308 }
309
310 fn find_region(&self, addr: GuestAddress) -> Option<&R> {
311 let index = match self.regions.binary_search_by_key(&addr, |x| x.start_addr()) {
312 Ok(x) => Some(x),
313 Err(x) if (x > 0 && addr <= self.regions[x - 1].last_addr()) => Some(x - 1),
315 _ => None,
316 };
317 index.map(|x| self.regions[x].as_ref())
318 }
319
320 fn iter(&self) -> impl Iterator<Item = &Self::R> {
321 self.regions.iter().map(AsRef::as_ref)
322 }
323}
324
325pub trait GuestMemoryRegionBytes: GuestMemoryRegion {}
330
331impl<R: GuestMemoryRegionBytes> Bytes<MemoryRegionAddress> for R {
332 type E = GuestMemoryError;
333
334 fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<usize> {
354 let maddr = addr.raw_value() as usize;
355 self.as_volatile_slice()?
356 .write(buf, maddr)
357 .map_err(Into::into)
358 }
359
360 fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<usize> {
381 let maddr = addr.raw_value() as usize;
382 self.as_volatile_slice()?
383 .read(buf, maddr)
384 .map_err(Into::into)
385 }
386
387 fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<()> {
388 let maddr = addr.raw_value() as usize;
389 self.as_volatile_slice()?
390 .write_slice(buf, maddr)
391 .map_err(Into::into)
392 }
393
394 fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<()> {
395 let maddr = addr.raw_value() as usize;
396 self.as_volatile_slice()?
397 .read_slice(buf, maddr)
398 .map_err(Into::into)
399 }
400
401 fn read_volatile_from<F>(
402 &self,
403 addr: MemoryRegionAddress,
404 src: &mut F,
405 count: usize,
406 ) -> Result<usize>
407 where
408 F: ReadVolatile,
409 {
410 self.as_volatile_slice()?
411 .read_volatile_from(addr.0 as usize, src, count)
412 .map_err(Into::into)
413 }
414
415 fn read_exact_volatile_from<F>(
416 &self,
417 addr: MemoryRegionAddress,
418 src: &mut F,
419 count: usize,
420 ) -> Result<()>
421 where
422 F: ReadVolatile,
423 {
424 self.as_volatile_slice()?
425 .read_exact_volatile_from(addr.0 as usize, src, count)
426 .map_err(Into::into)
427 }
428
429 fn write_volatile_to<F>(
430 &self,
431 addr: MemoryRegionAddress,
432 dst: &mut F,
433 count: usize,
434 ) -> Result<usize>
435 where
436 F: WriteVolatile,
437 {
438 self.as_volatile_slice()?
439 .write_volatile_to(addr.0 as usize, dst, count)
440 .map_err(Into::into)
441 }
442
443 fn write_all_volatile_to<F>(
444 &self,
445 addr: MemoryRegionAddress,
446 dst: &mut F,
447 count: usize,
448 ) -> Result<()>
449 where
450 F: WriteVolatile,
451 {
452 self.as_volatile_slice()?
453 .write_all_volatile_to(addr.0 as usize, dst, count)
454 .map_err(Into::into)
455 }
456
457 fn store<T: AtomicAccess>(
458 &self,
459 val: T,
460 addr: MemoryRegionAddress,
461 order: Ordering,
462 ) -> Result<()> {
463 self.as_volatile_slice().and_then(|s| {
464 s.store(val, addr.raw_value() as usize, order)
465 .map_err(Into::into)
466 })
467 }
468
469 fn load<T: AtomicAccess>(&self, addr: MemoryRegionAddress, order: Ordering) -> Result<T> {
470 self.as_volatile_slice()
471 .and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
472 }
473}
474
475#[cfg(test)]
476pub(crate) mod tests {
477 use crate::region::{GuestMemoryRegionBytes, GuestRegionCollectionError};
478 use crate::{
479 Address, GuestAddress, GuestMemoryBackend, GuestMemoryRegion, GuestRegionCollection,
480 GuestUsize,
481 };
482 use matches::assert_matches;
483 use std::sync::Arc;
484
485 #[derive(Debug, PartialEq, Eq)]
486 pub(crate) struct MockRegion {
487 pub(crate) start: GuestAddress,
488 pub(crate) len: GuestUsize,
489 }
490
491 impl GuestMemoryRegion for MockRegion {
492 type B = ();
493
494 fn len(&self) -> GuestUsize {
495 self.len
496 }
497
498 fn start_addr(&self) -> GuestAddress {
499 self.start
500 }
501
502 fn bitmap(&self) {}
503 }
504
505 impl GuestMemoryRegionBytes for MockRegion {}
506
507 pub(crate) type Collection = GuestRegionCollection<MockRegion>;
508
509 fn check_guest_memory_mmap(
510 maybe_guest_mem: Result<Collection, GuestRegionCollectionError>,
511 expected_regions_summary: &[(GuestAddress, u64)],
512 ) {
513 assert!(maybe_guest_mem.is_ok());
514
515 let guest_mem = maybe_guest_mem.unwrap();
516 assert_eq!(guest_mem.num_regions(), expected_regions_summary.len());
517 let maybe_last_mem_reg = expected_regions_summary.last();
518 if let Some((region_addr, region_size)) = maybe_last_mem_reg {
519 let mut last_addr = region_addr.unchecked_add(*region_size);
520 if last_addr.raw_value() != 0 {
521 last_addr = last_addr.unchecked_sub(1);
522 }
523 assert_eq!(guest_mem.last_addr(), last_addr);
524 }
525 for ((region_addr, region_size), mmap) in
526 expected_regions_summary.iter().zip(guest_mem.iter())
527 {
528 assert_eq!(region_addr, &mmap.start);
529 assert_eq!(region_size, &mmap.len);
530
531 assert!(guest_mem.find_region(*region_addr).is_some());
532 }
533 }
534
535 pub(crate) fn new_guest_memory_collection_from_regions(
536 regions_summary: &[(GuestAddress, u64)],
537 ) -> Result<Collection, GuestRegionCollectionError> {
538 Collection::from_regions(
539 regions_summary
540 .iter()
541 .map(|&(start, len)| MockRegion { start, len })
542 .collect(),
543 )
544 }
545
546 fn new_guest_memory_collection_from_arc_regions(
547 regions_summary: &[(GuestAddress, u64)],
548 ) -> Result<Collection, GuestRegionCollectionError> {
549 Collection::from_arc_regions(
550 regions_summary
551 .iter()
552 .map(|&(start, len)| Arc::new(MockRegion { start, len }))
553 .collect(),
554 )
555 }
556
557 #[test]
558 fn test_no_memory_region() {
559 let regions_summary = [];
560
561 assert_matches!(
562 new_guest_memory_collection_from_regions(®ions_summary).unwrap_err(),
563 GuestRegionCollectionError::NoMemoryRegion
564 );
565 assert_matches!(
566 new_guest_memory_collection_from_arc_regions(®ions_summary).unwrap_err(),
567 GuestRegionCollectionError::NoMemoryRegion
568 );
569 }
570
571 #[test]
572 fn test_overlapping_memory_regions() {
573 let regions_summary = [(GuestAddress(0), 100), (GuestAddress(99), 100)];
574
575 assert_matches!(
576 new_guest_memory_collection_from_regions(®ions_summary).unwrap_err(),
577 GuestRegionCollectionError::MemoryRegionOverlap
578 );
579 assert_matches!(
580 new_guest_memory_collection_from_arc_regions(®ions_summary).unwrap_err(),
581 GuestRegionCollectionError::MemoryRegionOverlap
582 );
583 }
584
585 #[test]
586 fn test_unsorted_memory_regions() {
587 let regions_summary = [(GuestAddress(100), 100), (GuestAddress(0), 100)];
588
589 assert_matches!(
590 new_guest_memory_collection_from_regions(®ions_summary).unwrap_err(),
591 GuestRegionCollectionError::UnsortedMemoryRegions
592 );
593 assert_matches!(
594 new_guest_memory_collection_from_arc_regions(®ions_summary).unwrap_err(),
595 GuestRegionCollectionError::UnsortedMemoryRegions
596 );
597 }
598
599 #[test]
600 fn test_valid_memory_regions() {
601 let regions_summary = [(GuestAddress(0), 100), (GuestAddress(100), 100)];
602
603 let guest_mem = Collection::new();
604 assert_eq!(guest_mem.num_regions(), 0);
605
606 check_guest_memory_mmap(
607 new_guest_memory_collection_from_regions(®ions_summary),
608 ®ions_summary,
609 );
610
611 check_guest_memory_mmap(
612 new_guest_memory_collection_from_arc_regions(®ions_summary),
613 ®ions_summary,
614 );
615 }
616
617 #[test]
618 fn test_mmap_insert_region() {
619 let region_size = 0x1000;
620 let regions = vec![
621 (GuestAddress(0x0), region_size),
622 (GuestAddress(0x10_0000), region_size),
623 ];
624 let mem_orig = new_guest_memory_collection_from_regions(®ions).unwrap();
625 let mut gm = mem_orig.clone();
626 assert_eq!(mem_orig.num_regions(), 2);
627
628 let new_regions = [
629 (GuestAddress(0x8000), 0x1000),
630 (GuestAddress(0x4000), 0x1000),
631 (GuestAddress(0xc000), 0x1000),
632 ];
633
634 for (start, len) in new_regions {
635 gm = gm
636 .insert_region(Arc::new(MockRegion { start, len }))
637 .unwrap();
638 }
639
640 gm.insert_region(Arc::new(MockRegion {
641 start: GuestAddress(0xc000),
642 len: 0x1000,
643 }))
644 .unwrap_err();
645
646 assert_eq!(mem_orig.num_regions(), 2);
647 assert_eq!(gm.num_regions(), 5);
648
649 let regions = gm.iter().collect::<Vec<_>>();
650
651 assert_eq!(regions[0].start_addr(), GuestAddress(0x0000));
652 assert_eq!(regions[1].start_addr(), GuestAddress(0x4000));
653 assert_eq!(regions[2].start_addr(), GuestAddress(0x8000));
654 assert_eq!(regions[3].start_addr(), GuestAddress(0xc000));
655 assert_eq!(regions[4].start_addr(), GuestAddress(0x10_0000));
656 }
657
658 #[test]
659 fn test_mmap_remove_region() {
660 let region_size = 0x1000;
661 let regions = vec![
662 (GuestAddress(0x0), region_size),
663 (GuestAddress(0x10_0000), region_size),
664 ];
665 let mem_orig = new_guest_memory_collection_from_regions(®ions).unwrap();
666 let gm = mem_orig.clone();
667 assert_eq!(mem_orig.num_regions(), 2);
668
669 gm.remove_region(GuestAddress(0), 128).unwrap_err();
670 gm.remove_region(GuestAddress(0x4000), 128).unwrap_err();
671 let (gm, region) = gm.remove_region(GuestAddress(0x10_0000), 0x1000).unwrap();
672
673 assert_eq!(mem_orig.num_regions(), 2);
674 assert_eq!(gm.num_regions(), 1);
675
676 assert_eq!(gm.iter().next().unwrap().start_addr(), GuestAddress(0x0000));
677 assert_eq!(region.start_addr(), GuestAddress(0x10_0000));
678 }
679
680 #[test]
681 fn test_iter() {
682 let region_size = 0x400;
683 let regions = vec![
684 (GuestAddress(0x0), region_size),
685 (GuestAddress(0x1000), region_size),
686 ];
687 let mut iterated_regions = Vec::new();
688 let gm = new_guest_memory_collection_from_regions(®ions).unwrap();
689
690 for region in gm.iter() {
691 assert_eq!(region.len(), region_size as GuestUsize);
692 }
693
694 for region in gm.iter() {
695 iterated_regions.push((region.start_addr(), region.len()));
696 }
697 assert_eq!(regions, iterated_regions);
698
699 assert!(regions
700 .iter()
701 .map(|x| (x.0, x.1))
702 .eq(iterated_regions.iter().copied()));
703
704 let mmap_regions = gm.iter().collect::<Vec<_>>();
705
706 assert_eq!(mmap_regions[0].start, regions[0].0);
707 assert_eq!(mmap_regions[1].start, regions[1].0);
708 }
709
710 #[test]
711 fn test_address_in_range() {
712 let start_addr1 = GuestAddress(0x0);
713 let start_addr2 = GuestAddress(0x800);
714 let guest_mem =
715 new_guest_memory_collection_from_regions(&[(start_addr1, 0x400), (start_addr2, 0x400)])
716 .unwrap();
717
718 assert!(guest_mem.address_in_range(GuestAddress(0x200)));
719 assert!(!guest_mem.address_in_range(GuestAddress(0x600)));
720 assert!(guest_mem.address_in_range(GuestAddress(0xa00)));
721 assert!(!guest_mem.address_in_range(GuestAddress(0xc00)));
722 }
723
724 #[test]
725 fn test_check_address() {
726 let start_addr1 = GuestAddress(0x0);
727 let start_addr2 = GuestAddress(0x800);
728 let guest_mem =
729 new_guest_memory_collection_from_regions(&[(start_addr1, 0x400), (start_addr2, 0x400)])
730 .unwrap();
731
732 assert_eq!(
733 guest_mem.check_address(GuestAddress(0x200)),
734 Some(GuestAddress(0x200))
735 );
736 assert_eq!(guest_mem.check_address(GuestAddress(0x600)), None);
737 assert_eq!(
738 guest_mem.check_address(GuestAddress(0xa00)),
739 Some(GuestAddress(0xa00))
740 );
741 assert_eq!(guest_mem.check_address(GuestAddress(0xc00)), None);
742 }
743
744 #[test]
745 fn test_checked_offset() {
746 let start_addr1 = GuestAddress(0);
747 let start_addr2 = GuestAddress(0x800);
748 let start_addr3 = GuestAddress(0xc00);
749 let guest_mem = new_guest_memory_collection_from_regions(&[
750 (start_addr1, 0x400),
751 (start_addr2, 0x400),
752 (start_addr3, 0x400),
753 ])
754 .unwrap();
755
756 assert_eq!(
757 guest_mem.checked_offset(start_addr1, 0x200),
758 Some(GuestAddress(0x200))
759 );
760 assert_eq!(
761 guest_mem.checked_offset(start_addr1, 0xa00),
762 Some(GuestAddress(0xa00))
763 );
764 assert_eq!(
765 guest_mem.checked_offset(start_addr2, 0x7ff),
766 Some(GuestAddress(0xfff))
767 );
768 assert_eq!(guest_mem.checked_offset(start_addr2, 0xc00), None);
769 assert_eq!(guest_mem.checked_offset(start_addr1, usize::MAX), None);
770
771 assert_eq!(guest_mem.checked_offset(start_addr1, 0x400), None);
772 assert_eq!(
773 guest_mem.checked_offset(start_addr1, 0x400 - 1),
774 Some(GuestAddress(0x400 - 1))
775 );
776 }
777}