1use std::io::{Read, Write};
5use std::sync::atomic::Ordering;
6use std::sync::Arc;
7
8use vm_memory::bitmap::{Bitmap, BS};
9use vm_memory::guest_memory::GuestMemoryIterator;
10use vm_memory::mmap::{Error, NewBitmap};
11use vm_memory::{
12 guest_memory, AtomicAccess, Bytes, FileOffset, GuestAddress, GuestMemory, GuestMemoryRegion,
13 GuestRegionMmap, GuestUsize, MemoryRegionAddress, VolatileSlice,
14};
15
16use crate::GuestRegionRaw;
17
18#[derive(Debug)]
20pub enum GuestRegionHybrid<B = ()> {
21 Mmap(GuestRegionMmap<B>),
23 Raw(GuestRegionRaw<B>),
25}
26
27impl<B: Bitmap> GuestRegionHybrid<B> {
28 pub fn from_mmap_region(region: GuestRegionMmap<B>) -> Self {
30 GuestRegionHybrid::Mmap(region)
31 }
32
33 pub fn from_raw_region(region: GuestRegionRaw<B>) -> Self {
35 GuestRegionHybrid::Raw(region)
36 }
37}
38
39impl<B: Bitmap> Bytes<MemoryRegionAddress> for GuestRegionHybrid<B> {
40 type E = guest_memory::Error;
41
42 fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
43 match self {
44 GuestRegionHybrid::Mmap(region) => region.write(buf, addr),
45 GuestRegionHybrid::Raw(region) => region.write(buf, addr),
46 }
47 }
48
49 fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
50 match self {
51 GuestRegionHybrid::Mmap(region) => region.read(buf, addr),
52 GuestRegionHybrid::Raw(region) => region.read(buf, addr),
53 }
54 }
55
56 fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
57 match self {
58 GuestRegionHybrid::Mmap(region) => region.write_slice(buf, addr),
59 GuestRegionHybrid::Raw(region) => region.write_slice(buf, addr),
60 }
61 }
62
63 fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
64 match self {
65 GuestRegionHybrid::Mmap(region) => region.read_slice(buf, addr),
66 GuestRegionHybrid::Raw(region) => region.read_slice(buf, addr),
67 }
68 }
69
70 fn read_from<F>(
71 &self,
72 addr: MemoryRegionAddress,
73 src: &mut F,
74 count: usize,
75 ) -> guest_memory::Result<usize>
76 where
77 F: Read,
78 {
79 match self {
80 GuestRegionHybrid::Mmap(region) => region.read_from(addr, src, count),
81 GuestRegionHybrid::Raw(region) => region.read_from(addr, src, count),
82 }
83 }
84
85 fn read_exact_from<F>(
86 &self,
87 addr: MemoryRegionAddress,
88 src: &mut F,
89 count: usize,
90 ) -> guest_memory::Result<()>
91 where
92 F: Read,
93 {
94 match self {
95 GuestRegionHybrid::Mmap(region) => region.read_exact_from(addr, src, count),
96 GuestRegionHybrid::Raw(region) => region.read_exact_from(addr, src, count),
97 }
98 }
99
100 fn write_to<F>(
101 &self,
102 addr: MemoryRegionAddress,
103 dst: &mut F,
104 count: usize,
105 ) -> guest_memory::Result<usize>
106 where
107 F: Write,
108 {
109 match self {
110 GuestRegionHybrid::Mmap(region) => region.write_to(addr, dst, count),
111 GuestRegionHybrid::Raw(region) => region.write_to(addr, dst, count),
112 }
113 }
114
115 fn write_all_to<F>(
116 &self,
117 addr: MemoryRegionAddress,
118 dst: &mut F,
119 count: usize,
120 ) -> guest_memory::Result<()>
121 where
122 F: Write,
123 {
124 match self {
125 GuestRegionHybrid::Mmap(region) => region.write_all_to(addr, dst, count),
126 GuestRegionHybrid::Raw(region) => region.write_all_to(addr, dst, count),
127 }
128 }
129
130 fn store<T: AtomicAccess>(
131 &self,
132 val: T,
133 addr: MemoryRegionAddress,
134 order: Ordering,
135 ) -> guest_memory::Result<()> {
136 match self {
137 GuestRegionHybrid::Mmap(region) => region.store(val, addr, order),
138 GuestRegionHybrid::Raw(region) => region.store(val, addr, order),
139 }
140 }
141
142 fn load<T: AtomicAccess>(
143 &self,
144 addr: MemoryRegionAddress,
145 order: Ordering,
146 ) -> guest_memory::Result<T> {
147 match self {
148 GuestRegionHybrid::Mmap(region) => region.load(addr, order),
149 GuestRegionHybrid::Raw(region) => region.load(addr, order),
150 }
151 }
152}
153
154impl<B: Bitmap> GuestMemoryRegion for GuestRegionHybrid<B> {
155 type B = B;
156
157 fn len(&self) -> GuestUsize {
158 match self {
159 GuestRegionHybrid::Mmap(region) => region.len(),
160 GuestRegionHybrid::Raw(region) => region.len(),
161 }
162 }
163
164 fn start_addr(&self) -> GuestAddress {
165 match self {
166 GuestRegionHybrid::Mmap(region) => region.start_addr(),
167 GuestRegionHybrid::Raw(region) => region.start_addr(),
168 }
169 }
170
171 fn bitmap(&self) -> &Self::B {
172 match self {
173 GuestRegionHybrid::Mmap(region) => region.bitmap(),
174 GuestRegionHybrid::Raw(region) => region.bitmap(),
175 }
176 }
177
178 fn get_host_address(&self, addr: MemoryRegionAddress) -> guest_memory::Result<*mut u8> {
179 match self {
180 GuestRegionHybrid::Mmap(region) => region.get_host_address(addr),
181 GuestRegionHybrid::Raw(region) => region.get_host_address(addr),
182 }
183 }
184
185 fn file_offset(&self) -> Option<&FileOffset> {
186 match self {
187 GuestRegionHybrid::Mmap(region) => region.file_offset(),
188 GuestRegionHybrid::Raw(region) => region.file_offset(),
189 }
190 }
191
192 unsafe fn as_slice(&self) -> Option<&[u8]> {
193 match self {
194 GuestRegionHybrid::Mmap(region) => region.as_slice(),
195 GuestRegionHybrid::Raw(region) => region.as_slice(),
196 }
197 }
198
199 unsafe fn as_mut_slice(&self) -> Option<&mut [u8]> {
200 match self {
201 GuestRegionHybrid::Mmap(region) => region.as_mut_slice(),
202 GuestRegionHybrid::Raw(region) => region.as_mut_slice(),
203 }
204 }
205
206 fn get_slice(
207 &self,
208 offset: MemoryRegionAddress,
209 count: usize,
210 ) -> guest_memory::Result<VolatileSlice<BS<B>>> {
211 match self {
212 GuestRegionHybrid::Mmap(region) => region.get_slice(offset, count),
213 GuestRegionHybrid::Raw(region) => region.get_slice(offset, count),
214 }
215 }
216
217 #[cfg(target_os = "linux")]
218 fn is_hugetlbfs(&self) -> Option<bool> {
219 match self {
220 GuestRegionHybrid::Mmap(region) => region.is_hugetlbfs(),
221 GuestRegionHybrid::Raw(region) => region.is_hugetlbfs(),
222 }
223 }
224}
225
226#[derive(Clone, Debug, Default)]
232pub struct GuestMemoryHybrid<B = ()> {
233 pub(crate) regions: Vec<Arc<GuestRegionHybrid<B>>>,
234}
235
236impl<B: NewBitmap> GuestMemoryHybrid<B> {
237 pub fn new() -> Self {
239 Self::default()
240 }
241}
242
243impl<B: Bitmap> GuestMemoryHybrid<B> {
244 pub fn from_regions(mut regions: Vec<GuestRegionHybrid<B>>) -> Result<Self, Error> {
252 Self::from_arc_regions(regions.drain(..).map(Arc::new).collect())
253 }
254
255 pub fn from_arc_regions(regions: Vec<Arc<GuestRegionHybrid<B>>>) -> Result<Self, Error> {
268 if regions.is_empty() {
269 return Err(Error::NoMemoryRegion);
270 }
271
272 for window in regions.windows(2) {
273 let prev = &window[0];
274 let next = &window[1];
275
276 if prev.start_addr() > next.start_addr() {
277 return Err(Error::UnsortedMemoryRegions);
278 }
279
280 if prev.last_addr() >= next.start_addr() {
281 return Err(Error::MemoryRegionOverlap);
282 }
283 }
284
285 Ok(Self { regions })
286 }
287
288 pub fn insert_region(
293 &self,
294 region: Arc<GuestRegionHybrid<B>>,
295 ) -> Result<GuestMemoryHybrid<B>, Error> {
296 let mut regions = self.regions.clone();
297 regions.push(region);
298 regions.sort_by_key(|x| x.start_addr());
299
300 Self::from_arc_regions(regions)
301 }
302
303 pub fn remove_region(
310 &self,
311 base: GuestAddress,
312 size: GuestUsize,
313 ) -> Result<(GuestMemoryHybrid<B>, Arc<GuestRegionHybrid<B>>), Error> {
314 if let Ok(region_index) = self.regions.binary_search_by_key(&base, |x| x.start_addr()) {
315 if self.regions.get(region_index).unwrap().len() as GuestUsize == size {
316 let mut regions = self.regions.clone();
317 let region = regions.remove(region_index);
318 return Ok((Self { regions }, region));
319 }
320 }
321
322 Err(Error::InvalidGuestRegion)
323 }
324}
325
326pub struct Iter<'a, B>(std::slice::Iter<'a, Arc<GuestRegionHybrid<B>>>);
330
331impl<'a, B> Iterator for Iter<'a, B> {
332 type Item = &'a GuestRegionHybrid<B>;
333
334 fn next(&mut self) -> Option<Self::Item> {
335 self.0.next().map(AsRef::as_ref)
336 }
337}
338
339impl<'a, B: 'a> GuestMemoryIterator<'a, GuestRegionHybrid<B>> for GuestMemoryHybrid<B> {
340 type Iter = Iter<'a, B>;
341}
342
343impl<B: Bitmap + 'static> GuestMemory for GuestMemoryHybrid<B> {
344 type R = GuestRegionHybrid<B>;
345
346 type I = Self;
347
348 fn num_regions(&self) -> usize {
349 self.regions.len()
350 }
351
352 fn find_region(&self, addr: GuestAddress) -> Option<&GuestRegionHybrid<B>> {
353 let index = match self.regions.binary_search_by_key(&addr, |x| x.start_addr()) {
354 Ok(x) => Some(x),
355 Err(x) if (x > 0 && addr <= self.regions[x - 1].last_addr()) => Some(x - 1),
357 _ => None,
358 };
359 index.map(|x| self.regions[x].as_ref())
360 }
361
362 fn iter(&self) -> Iter<B> {
363 Iter(self.regions.iter())
364 }
365}
366
367#[cfg(test)]
368mod tests {
369 use super::*;
370 use std::io::Seek;
371 use vm_memory::{GuestMemoryError, MmapRegion};
372 use vmm_sys_util::tempfile::TempFile;
373
374 #[test]
375 fn test_region_new() {
376 let start_addr = GuestAddress(0x0);
377
378 let mmap_reg =
379 GuestRegionMmap::new(MmapRegion::<()>::new(0x400).unwrap(), start_addr).unwrap();
380 let guest_region = GuestRegionHybrid::from_mmap_region(mmap_reg);
381
382 assert_eq!(guest_region.start_addr(), start_addr);
383 assert_eq!(guest_region.len(), 0x400);
384
385 let mut buf = [0u8; 1024];
386 let raw_region =
387 unsafe { GuestRegionRaw::<()>::new(start_addr, &mut buf as *mut _, 0x800) };
388 let guest_region = GuestRegionHybrid::from_raw_region(raw_region);
389
390 assert_eq!(guest_region.start_addr(), start_addr);
391 assert_eq!(guest_region.len(), 0x800);
392 }
393
394 #[test]
395 fn test_write_and_read_on_mmap_region() {
396 let start_addr = GuestAddress(0x0);
397 let mmap_reg =
398 GuestRegionMmap::new(MmapRegion::<()>::new(0x800).unwrap(), start_addr).unwrap();
399 let guest_region = GuestRegionHybrid::from_mmap_region(mmap_reg);
400 let buf_to_write = [0xF0u8; 0x400];
401 let write_addr = MemoryRegionAddress(0x400);
402
403 let number_of_bytes_write = guest_region.write(&buf_to_write, write_addr).unwrap();
405 assert_eq!(number_of_bytes_write, 0x400);
406 let mut buf_read = [0u8; 0x400];
407 let number_of_bytes_read = guest_region.read(&mut buf_read, write_addr).unwrap();
408 assert_eq!(number_of_bytes_read, 0x400);
409 assert_eq!(buf_read, [0xF0u8; 0x400]);
410
411 let invalid_addr = MemoryRegionAddress(0x900);
413 assert!(matches!(
414 guest_region
415 .write(&buf_to_write, invalid_addr)
416 .err()
417 .unwrap(),
418 GuestMemoryError::InvalidBackendAddress
419 ));
420
421 assert!(matches!(
423 guest_region
424 .read(&mut buf_read, invalid_addr)
425 .err()
426 .unwrap(),
427 GuestMemoryError::InvalidBackendAddress
428 ));
429 }
430
431 #[test]
432 fn test_write_and_read_on_raw_region() {
433 let start_addr = GuestAddress(0x0);
434 let mut buf_of_raw_region = [0u8; 0x800];
435 let raw_region = unsafe {
436 GuestRegionRaw::<()>::new(start_addr, &mut buf_of_raw_region as *mut _, 0x800)
437 };
438 let guest_region = GuestRegionHybrid::from_raw_region(raw_region);
439 let buf_to_write = [0xF0u8; 0x400];
440 let write_addr = MemoryRegionAddress(0x400);
441
442 let number_of_bytes_write = guest_region.write(&buf_to_write, write_addr).unwrap();
444 assert_eq!(number_of_bytes_write, 0x400);
445 let mut buf_read = [0u8; 0x400];
446 let number_of_bytes_read = guest_region.read(&mut buf_read, write_addr).unwrap();
447 assert_eq!(number_of_bytes_read, 0x400);
448 assert_eq!(buf_read, [0xF0u8; 0x400]);
449
450 let invalid_addr = MemoryRegionAddress(0x900);
452 assert!(matches!(
453 guest_region
454 .write(&buf_to_write, invalid_addr)
455 .err()
456 .unwrap(),
457 GuestMemoryError::InvalidBackendAddress
458 ));
459
460 assert!(matches!(
462 guest_region
463 .read(&mut buf_read, invalid_addr)
464 .err()
465 .unwrap(),
466 GuestMemoryError::InvalidBackendAddress
467 ));
468 }
469
470 #[test]
471 fn test_write_slice_and_read_slice_on_mmap_region() {
472 let start_addr = GuestAddress(0x0);
473 let mmap_reg =
474 GuestRegionMmap::new(MmapRegion::<()>::new(0x800).unwrap(), start_addr).unwrap();
475 let guest_region = GuestRegionHybrid::from_mmap_region(mmap_reg);
476 let buf_to_write = [0xF0u8; 0x400];
477 let write_addr = MemoryRegionAddress(0x400);
478
479 guest_region.write_slice(&buf_to_write, write_addr).unwrap();
481 let mut buf_read = [0x0u8; 0x400];
482 guest_region.read_slice(&mut buf_read, write_addr).unwrap();
483 assert_eq!(buf_read, [0xF0u8; 0x400]);
484
485 let invalid_addr = MemoryRegionAddress(0x900);
487 assert!(matches!(
488 guest_region
489 .write_slice(&buf_to_write, invalid_addr)
490 .err()
491 .unwrap(),
492 GuestMemoryError::InvalidBackendAddress
493 ));
494
495 let insufficient_addr = MemoryRegionAddress(0x600);
497 assert_eq!(
498 format!(
499 "{:?}",
500 guest_region
501 .write_slice(&buf_to_write, insufficient_addr)
502 .err()
503 .unwrap()
504 ),
505 format!(
506 "PartialBuffer {{ expected: {:?}, completed: {:?} }}",
507 buf_to_write.len(),
508 guest_region.len() as usize - 0x600_usize
509 )
510 );
511
512 let invalid_addr = MemoryRegionAddress(0x900);
514 let mut buf_read = [0x0u8; 0x400];
515 assert!(matches!(
516 guest_region
517 .read_slice(&mut buf_read, invalid_addr)
518 .err()
519 .unwrap(),
520 GuestMemoryError::InvalidBackendAddress
521 ));
522
523 let insufficient_addr = MemoryRegionAddress(0x600);
525 let mut buf_read = [0x0u8; 0x400];
526 assert_eq!(
527 format!(
528 "{:?}",
529 guest_region
530 .read_slice(&mut buf_read, insufficient_addr)
531 .err()
532 .unwrap()
533 ),
534 format!(
535 "PartialBuffer {{ expected: {:?}, completed: {:?} }}",
536 buf_to_write.len(),
537 guest_region.len() as usize - 0x600_usize
538 )
539 );
540 assert_eq!(
541 {
542 let mut buf = [0x0u8; 0x400];
543 for cell in buf.iter_mut().take(0x200) {
544 *cell = 0xF0;
545 }
546 buf
547 },
548 buf_read
549 );
550 }
551
552 #[test]
553 fn test_write_and_read_slice_on_raw_region() {
554 let start_addr = GuestAddress(0x0);
555 let mut buf_of_raw_region = [0u8; 0x800];
556 let raw_region = unsafe {
557 GuestRegionRaw::<()>::new(start_addr, &mut buf_of_raw_region as *mut _, 0x800)
558 };
559 let guest_region = GuestRegionHybrid::from_raw_region(raw_region);
560 let buf_to_write = [0xF0u8; 0x400];
561 let write_addr = MemoryRegionAddress(0x400);
562
563 guest_region.write_slice(&buf_to_write, write_addr).unwrap();
565 let mut buf_read = [0x0u8; 0x400];
566 guest_region.read_slice(&mut buf_read, write_addr).unwrap();
567 assert_eq!(buf_read, [0xF0u8; 0x400]);
568
569 let invalid_addr = MemoryRegionAddress(0x900);
571 assert!(matches!(
572 guest_region
573 .write_slice(&buf_to_write, invalid_addr)
574 .err()
575 .unwrap(),
576 GuestMemoryError::InvalidBackendAddress
577 ));
578
579 let insufficient_addr = MemoryRegionAddress(0x600);
581 assert_eq!(
582 format!(
583 "{:?}",
584 guest_region
585 .write_slice(&buf_to_write, insufficient_addr)
586 .err()
587 .unwrap()
588 ),
589 format!(
590 "PartialBuffer {{ expected: {:?}, completed: {:?} }}",
591 buf_to_write.len(),
592 guest_region.len() as usize - 0x600_usize
593 )
594 );
595
596 let invalid_addr = MemoryRegionAddress(0x900);
598 let mut buf_read = [0x0u8; 0x400];
599 assert!(matches!(
600 guest_region
601 .read_slice(&mut buf_read, invalid_addr)
602 .err()
603 .unwrap(),
604 GuestMemoryError::InvalidBackendAddress
605 ));
606
607 let insufficient_addr = MemoryRegionAddress(0x600);
609 let mut buf_read = [0x0u8; 0x400];
610 assert_eq!(
611 format!(
612 "{:?}",
613 guest_region
614 .read_slice(&mut buf_read, insufficient_addr)
615 .err()
616 .unwrap()
617 ),
618 format!(
619 "PartialBuffer {{ expected: {:?}, completed: {:?} }}",
620 buf_to_write.len(),
621 guest_region.len() as usize - 0x600_usize
622 )
623 );
624 assert_eq!(
625 {
626 let mut buf = [0x0u8; 0x400];
627 for cell in buf.iter_mut().take(0x200) {
628 *cell = 0xF0;
629 }
630 buf
631 },
632 buf_read
633 );
634 }
635
636 #[test]
637 fn test_read_from_and_write_to_on_mmap_region() {
638 let start_addr = GuestAddress(0x0);
639 let mmap_reg =
640 GuestRegionMmap::new(MmapRegion::<()>::new(0x800).unwrap(), start_addr).unwrap();
641 let guest_region = GuestRegionHybrid::from_mmap_region(mmap_reg);
642 let write_addr = MemoryRegionAddress(0x400);
643 let original_content = b"hello world";
644 let size_of_file = original_content.len();
645
646 let mut file_to_write_mmap_region = TempFile::new().unwrap().into_file();
648 file_to_write_mmap_region
649 .set_len(size_of_file as u64)
650 .unwrap();
651 file_to_write_mmap_region
652 .write_all(original_content)
653 .unwrap();
654 file_to_write_mmap_region.rewind().unwrap();
656 guest_region
657 .read_from(write_addr, &mut file_to_write_mmap_region, size_of_file)
658 .unwrap();
659 let mut file_read_from_mmap_region = TempFile::new().unwrap().into_file();
660 file_read_from_mmap_region
661 .set_len(size_of_file as u64)
662 .unwrap();
663 guest_region
664 .write_all_to(write_addr, &mut file_read_from_mmap_region, size_of_file)
665 .unwrap();
666 file_read_from_mmap_region.rewind().unwrap();
668 let mut content = String::new();
669 file_read_from_mmap_region
670 .read_to_string(&mut content)
671 .unwrap();
672 assert_eq!(content.as_bytes(), original_content);
673 assert_eq!(
674 file_read_from_mmap_region.metadata().unwrap().len(),
675 size_of_file as u64
676 );
677
678 let invalid_addr = MemoryRegionAddress(0x900);
680 assert!(matches!(
681 guest_region
682 .read_from(invalid_addr, &mut file_to_write_mmap_region, size_of_file)
683 .err()
684 .unwrap(),
685 GuestMemoryError::InvalidBackendAddress
686 ));
687
688 let invalid_addr = MemoryRegionAddress(0x900);
690 assert!(matches!(
691 guest_region
692 .write_to(invalid_addr, &mut file_read_from_mmap_region, size_of_file)
693 .err()
694 .unwrap(),
695 GuestMemoryError::InvalidBackendAddress
696 ));
697 }
698
699 #[test]
700 fn test_read_from_and_write_to_on_raw_region() {
701 let start_addr = GuestAddress(0x0);
702 let mut buf_of_raw_region = [0u8; 0x800];
703 let raw_region = unsafe {
704 GuestRegionRaw::<()>::new(start_addr, &mut buf_of_raw_region as *mut _, 0x800)
705 };
706 let guest_region = GuestRegionHybrid::from_raw_region(raw_region);
707 let write_addr = MemoryRegionAddress(0x400);
708 let original_content = b"hello world";
709 let size_of_file = original_content.len();
710
711 let mut file_to_write_mmap_region = TempFile::new().unwrap().into_file();
713 file_to_write_mmap_region
714 .set_len(size_of_file as u64)
715 .unwrap();
716 file_to_write_mmap_region
717 .write_all(original_content)
718 .unwrap();
719 file_to_write_mmap_region.rewind().unwrap();
721 guest_region
722 .read_from(write_addr, &mut file_to_write_mmap_region, size_of_file)
723 .unwrap();
724 let mut file_read_from_mmap_region = TempFile::new().unwrap().into_file();
725 file_read_from_mmap_region
726 .set_len(size_of_file as u64)
727 .unwrap();
728 guest_region
729 .write_all_to(write_addr, &mut file_read_from_mmap_region, size_of_file)
730 .unwrap();
731 file_read_from_mmap_region.rewind().unwrap();
733 let mut content = String::new();
734 file_read_from_mmap_region
735 .read_to_string(&mut content)
736 .unwrap();
737 assert_eq!(content.as_bytes(), original_content);
738 assert_eq!(
739 file_read_from_mmap_region.metadata().unwrap().len(),
740 size_of_file as u64
741 );
742
743 let invalid_addr = MemoryRegionAddress(0x900);
745 assert!(matches!(
746 guest_region
747 .read_from(invalid_addr, &mut file_to_write_mmap_region, size_of_file)
748 .err()
749 .unwrap(),
750 GuestMemoryError::InvalidBackendAddress
751 ));
752
753 let invalid_addr = MemoryRegionAddress(0x900);
755 assert!(matches!(
756 guest_region
757 .write_to(invalid_addr, &mut file_read_from_mmap_region, size_of_file)
758 .err()
759 .unwrap(),
760 GuestMemoryError::InvalidBackendAddress
761 ));
762 }
763
764 #[test]
765 fn test_write_all_to_and_read_exact_from() {
766 let start_addr = GuestAddress(0x0);
767 let write_addr = MemoryRegionAddress(0x400);
768 let original_content = b"hello world";
769 let size_of_file = original_content.len();
770 let mmap_reg =
772 GuestRegionMmap::new(MmapRegion::<()>::new(0x800).unwrap(), start_addr).unwrap();
773 let guest_mmap_region = GuestRegionHybrid::from_mmap_region(mmap_reg);
774 let mut buf_of_raw_region = [0u8; 0x800];
776 let raw_region = unsafe {
777 GuestRegionRaw::<()>::new(start_addr, &mut buf_of_raw_region as *mut _, 0x800)
778 };
779 let guest_raw_region = GuestRegionHybrid::from_raw_region(raw_region);
780
781 let mut file_to_write_mmap_region = TempFile::new().unwrap().into_file();
783 file_to_write_mmap_region
784 .set_len(size_of_file as u64)
785 .unwrap();
786 file_to_write_mmap_region
787 .write_all(original_content)
788 .unwrap();
789 file_to_write_mmap_region.rewind().unwrap();
790 guest_mmap_region
791 .read_exact_from(write_addr, &mut file_to_write_mmap_region, size_of_file)
792 .unwrap();
793 let mut file_read_from_mmap_region = TempFile::new().unwrap().into_file();
794 file_read_from_mmap_region
795 .set_len(size_of_file as u64)
796 .unwrap();
797 guest_mmap_region
798 .write_all_to(write_addr, &mut file_read_from_mmap_region, size_of_file)
799 .unwrap();
800 file_read_from_mmap_region.rewind().unwrap();
801 let mut content = String::new();
802 file_read_from_mmap_region
803 .read_to_string(&mut content)
804 .unwrap();
805 assert_eq!(content.as_bytes(), original_content);
806 assert_eq!(
807 file_read_from_mmap_region.metadata().unwrap().len(),
808 size_of_file as u64
809 );
810
811 let mut file_to_write_raw_region = TempFile::new().unwrap().into_file();
813 file_to_write_raw_region
814 .set_len(size_of_file as u64)
815 .unwrap();
816 file_to_write_raw_region
817 .write_all(original_content)
818 .unwrap();
819 file_to_write_raw_region.rewind().unwrap();
820 guest_raw_region
821 .read_exact_from(write_addr, &mut file_to_write_raw_region, size_of_file)
822 .unwrap();
823 let mut file_read_from_raw_region = TempFile::new().unwrap().into_file();
824 file_read_from_raw_region
825 .set_len(size_of_file as u64)
826 .unwrap();
827 guest_raw_region
828 .write_all_to(write_addr, &mut file_read_from_raw_region, size_of_file)
829 .unwrap();
830 file_read_from_raw_region.rewind().unwrap();
831 let mut content = String::new();
832 file_read_from_raw_region
833 .read_to_string(&mut content)
834 .unwrap();
835 assert_eq!(content.as_bytes(), original_content);
836 assert_eq!(
837 file_read_from_raw_region.metadata().unwrap().len(),
838 size_of_file as u64
839 );
840
841 let invalid_addr = MemoryRegionAddress(0x900);
843 assert!(matches!(
844 guest_mmap_region
845 .read_exact_from(invalid_addr, &mut file_to_write_mmap_region, size_of_file)
846 .err()
847 .unwrap(),
848 GuestMemoryError::InvalidBackendAddress
849 ));
850
851 let invalid_addr = MemoryRegionAddress(0x900);
853 assert!(matches!(
854 guest_mmap_region
855 .write_all_to(invalid_addr, &mut file_read_from_mmap_region, size_of_file)
856 .err()
857 .unwrap(),
858 GuestMemoryError::InvalidBackendAddress
859 ));
860
861 let invalid_addr = MemoryRegionAddress(0x900);
863 assert!(matches!(
864 guest_raw_region
865 .read_exact_from(invalid_addr, &mut file_to_write_raw_region, size_of_file)
866 .err()
867 .unwrap(),
868 GuestMemoryError::InvalidBackendAddress
869 ));
870
871 let invalid_addr = MemoryRegionAddress(0x900);
873 assert!(matches!(
874 guest_raw_region
875 .write_all_to(invalid_addr, &mut file_read_from_raw_region, size_of_file)
876 .err()
877 .unwrap(),
878 GuestMemoryError::InvalidBackendAddress
879 ));
880 }
881
882 #[test]
883 fn test_store_and_load() {
884 let test_val = 0xFF;
885 let start_addr = GuestAddress(0x0);
886 let write_addr = MemoryRegionAddress(0x400);
887 let mmap_reg =
889 GuestRegionMmap::new(MmapRegion::<()>::new(0x800).unwrap(), start_addr).unwrap();
890 let guest_mmap_region = GuestRegionHybrid::from_mmap_region(mmap_reg);
891 let mut buf_of_raw_region = [0u8; 0x800];
893 let raw_region = unsafe {
894 GuestRegionRaw::<()>::new(start_addr, &mut buf_of_raw_region as *mut _, 0x800)
895 };
896 let guest_raw_region = GuestRegionHybrid::from_raw_region(raw_region);
897
898 guest_mmap_region
900 .store(test_val, write_addr, Ordering::Relaxed)
901 .unwrap();
902 let val_read_from_mmap_region: u64 = guest_mmap_region
903 .load(write_addr, Ordering::Relaxed)
904 .unwrap();
905 assert_eq!(val_read_from_mmap_region, test_val);
906 guest_raw_region
907 .store(test_val, write_addr, Ordering::Relaxed)
908 .unwrap();
909 let val_read_from_raw_region: u64 = guest_raw_region
910 .load(write_addr, Ordering::Relaxed)
911 .unwrap();
912 assert_eq!(val_read_from_raw_region, test_val);
913
914 let invalid_addr = MemoryRegionAddress(0x900);
916 assert!(matches!(
917 guest_mmap_region
918 .store(test_val, invalid_addr, Ordering::Relaxed)
919 .err()
920 .unwrap(),
921 GuestMemoryError::InvalidBackendAddress
922 ));
923
924 let invalid_addr = MemoryRegionAddress(0x900);
926 assert!(matches!(
927 guest_raw_region
928 .store(test_val, invalid_addr, Ordering::Relaxed)
929 .err()
930 .unwrap(),
931 GuestMemoryError::InvalidBackendAddress
932 ));
933
934 assert!(matches!(
936 guest_mmap_region
937 .load::<u64>(invalid_addr, Ordering::Relaxed)
938 .err()
939 .unwrap(),
940 GuestMemoryError::InvalidBackendAddress
941 ));
942
943 assert!(matches!(
945 guest_raw_region
946 .load::<u64>(invalid_addr, Ordering::Relaxed)
947 .err()
948 .unwrap(),
949 GuestMemoryError::InvalidBackendAddress
950 ));
951 }
952
953 #[test]
954 fn test_bitmap() {
955 let start_addr = GuestAddress(0x0);
957 let mmap_reg =
958 GuestRegionMmap::new(MmapRegion::<()>::new(0x800).unwrap(), start_addr).unwrap();
959 let guest_mmap_region = GuestRegionHybrid::from_mmap_region(mmap_reg);
960 let mut buf_of_raw_region = [0u8; 0x800];
961 let raw_region = unsafe {
962 GuestRegionRaw::<()>::new(start_addr, &mut buf_of_raw_region as *mut _, 0x800)
963 };
964 let guest_raw_region = GuestRegionHybrid::from_raw_region(raw_region);
965
966 assert_eq!(guest_mmap_region.bitmap(), guest_raw_region.bitmap());
967 }
968
969 #[test]
970 fn test_get_host_address_on_mmap_region() {
971 let start_addr = GuestAddress(0x0);
972 let mmap_reg =
973 GuestRegionMmap::new(MmapRegion::<()>::new(0x800).unwrap(), start_addr).unwrap();
974 let guest_region = GuestRegionHybrid::from_mmap_region(mmap_reg);
975
976 let addr_1 = guest_region
978 .get_host_address(MemoryRegionAddress(0x0))
979 .unwrap();
980 let addr_2 = guest_region
981 .get_host_address(MemoryRegionAddress(0x400))
982 .unwrap();
983 assert_eq!(addr_1 as u64 + 0x400, addr_2 as u64);
984
985 let invalid_addr = MemoryRegionAddress(0x900);
987 assert!(matches!(
988 guest_region.get_host_address(invalid_addr).err().unwrap(),
989 GuestMemoryError::InvalidBackendAddress
990 ));
991 }
992
993 #[test]
994 fn test_get_host_address_on_raw_region() {
995 let start_addr = GuestAddress(0x0);
996 let mut buf_of_raw_region = [0u8; 0x800];
997 let raw_region = unsafe {
998 GuestRegionRaw::<()>::new(start_addr, &mut buf_of_raw_region as *mut _, 0x800)
999 };
1000 let guest_region = GuestRegionHybrid::from_raw_region(raw_region);
1001
1002 let addr_1 = guest_region
1004 .get_host_address(MemoryRegionAddress(0x0))
1005 .unwrap();
1006 let addr_2 = guest_region
1007 .get_host_address(MemoryRegionAddress(0x400))
1008 .unwrap();
1009 assert_eq!(addr_1 as u64 + 0x400, addr_2 as u64);
1010
1011 let invalid_addr = MemoryRegionAddress(0x900);
1013 assert!(matches!(
1014 guest_region.get_host_address(invalid_addr).err().unwrap(),
1015 GuestMemoryError::InvalidBackendAddress
1016 ));
1017 }
1018
1019 #[test]
1026 fn test_guest_memory_mmap_get_slice() {
1027 let mmap_reg =
1029 GuestRegionMmap::new(MmapRegion::<()>::new(0x400).unwrap(), GuestAddress(0)).unwrap();
1030 let guest_mmap_region = GuestRegionHybrid::from_mmap_region(mmap_reg);
1031
1032 let slice_addr = MemoryRegionAddress(0x100);
1034 let slice_size = 0x200;
1035 let slice = guest_mmap_region.get_slice(slice_addr, slice_size).unwrap();
1036 assert_eq!(slice.len(), slice_size);
1037
1038 let slice_addr = MemoryRegionAddress(0x200);
1040 let slice_size = 0x0;
1041 let slice = guest_mmap_region.get_slice(slice_addr, slice_size).unwrap();
1042 assert!(slice.is_empty());
1043
1044 let slice_addr = MemoryRegionAddress(0x300);
1046 let slice_size = 0x200;
1047 assert!(guest_mmap_region.get_slice(slice_addr, slice_size).is_err());
1048 }
1049
1050 #[test]
1051 fn test_from_regions_on_guest_memory_hybrid() {
1052 let mut regions = Vec::<GuestRegionHybrid<()>>::new();
1054 let mmap_reg =
1055 GuestRegionMmap::new(MmapRegion::<()>::new(0x100).unwrap(), GuestAddress(0x100))
1056 .unwrap();
1057 regions.push(GuestRegionHybrid::Mmap(mmap_reg));
1058 let mmap_reg =
1059 GuestRegionMmap::new(MmapRegion::<()>::new(0x100).unwrap(), GuestAddress(0x200))
1060 .unwrap();
1061 regions.push(GuestRegionHybrid::Mmap(mmap_reg));
1062 let guest_region = GuestMemoryHybrid::<()>::from_regions(regions).unwrap();
1063 assert_eq!(guest_region.regions[0].start_addr(), GuestAddress(0x100));
1064 assert_eq!(guest_region.regions[1].start_addr(), GuestAddress(0x200));
1065
1066 let mut regions = Vec::<GuestRegionHybrid<()>>::new();
1068 let mmap_reg =
1069 GuestRegionMmap::new(MmapRegion::<()>::new(0x400).unwrap(), GuestAddress(0x200))
1070 .unwrap();
1071 regions.push(GuestRegionHybrid::Mmap(mmap_reg));
1072 let mmap_reg =
1073 GuestRegionMmap::new(MmapRegion::<()>::new(0x400).unwrap(), GuestAddress(0x100))
1074 .unwrap();
1075 regions.push(GuestRegionHybrid::Mmap(mmap_reg));
1076 let guest_region = GuestMemoryHybrid::<()>::from_regions(regions);
1077 assert!(matches!(
1078 guest_region.err().unwrap(),
1079 Error::UnsortedMemoryRegions
1080 ));
1081
1082 let regions = Vec::<GuestRegionHybrid<()>>::new();
1084 let guest_region = GuestMemoryHybrid::<()>::from_regions(regions);
1085 assert!(matches!(guest_region.err().unwrap(), Error::NoMemoryRegion));
1086 }
1087
1088 #[test]
1089 fn test_iterator_on_guest_region_hybrid() {
1090 let mut regions = Vec::<GuestRegionHybrid<()>>::new();
1091 let mmap_reg =
1092 GuestRegionMmap::new(MmapRegion::<()>::new(0x100).unwrap(), GuestAddress(0x100))
1093 .unwrap();
1094 regions.push(GuestRegionHybrid::Mmap(mmap_reg));
1095 let mmap_reg =
1096 GuestRegionMmap::new(MmapRegion::<()>::new(0x100).unwrap(), GuestAddress(0x200))
1097 .unwrap();
1098 regions.push(GuestRegionHybrid::Mmap(mmap_reg));
1099 let guest_region = GuestMemoryHybrid::<()>::from_regions(regions).unwrap();
1100 let mut region = guest_region.iter();
1101
1102 assert_eq!(region.next().unwrap().start_addr(), GuestAddress(0x100));
1103 assert_eq!(region.next().unwrap().start_addr(), GuestAddress(0x200));
1104 }
1105}