1use std::borrow::Borrow;
16use std::error;
17use std::fmt;
18use std::io::{Read, Write};
19#[cfg(unix)]
20use std::io::{Seek, SeekFrom};
21use std::ops::Deref;
22use std::result;
23use std::sync::atomic::Ordering;
24use std::sync::Arc;
25
26use crate::address::Address;
27use crate::bitmap::{Bitmap, BS};
28use crate::guest_memory::{
29 self, FileOffset, GuestAddress, GuestMemory, GuestMemoryIterator, GuestMemoryRegion,
30 GuestUsize, MemoryRegionAddress,
31};
32use crate::volatile_memory::{VolatileMemory, VolatileSlice};
33use crate::{AtomicAccess, Bytes};
34
35#[cfg(unix)]
36pub use crate::mmap_unix::{Error as MmapRegionError, MmapRegion, MmapRegionBuilder};
37
38#[cfg(windows)]
39pub use crate::mmap_windows::MmapRegion;
40#[cfg(windows)]
41pub use std::io::Error as MmapRegionError;
42
43pub trait NewBitmap: Bitmap + Default {
45 fn with_len(len: usize) -> Self;
47}
48
49impl NewBitmap for () {
50 fn with_len(_len: usize) -> Self {}
51}
52
53pub(crate) trait AsSlice {
55 unsafe fn as_slice(&self) -> &[u8];
61
62 #[allow(clippy::mut_from_ref)]
70 unsafe fn as_mut_slice(&self) -> &mut [u8];
71}
72
73#[derive(Debug)]
75pub enum Error {
76 InvalidGuestRegion,
79 MmapRegion(MmapRegionError),
81 NoMemoryRegion,
83 MemoryRegionOverlap,
85 UnsortedMemoryRegions,
87}
88
89impl fmt::Display for Error {
90 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
91 match self {
92 Error::InvalidGuestRegion => write!(
93 f,
94 "Adding the guest base address to the length of the underlying mapping \
95 resulted in an overflow"
96 ),
97 Error::MmapRegion(e) => write!(f, "{}", e),
98 Error::NoMemoryRegion => write!(f, "No memory region found"),
99 Error::MemoryRegionOverlap => {
100 write!(f, "Some of the memory regions intersect with each other")
101 }
102 Error::UnsortedMemoryRegions => {
103 write!(f, "The provided memory regions haven't been sorted")
104 }
105 }
106 }
107}
108
109impl error::Error for Error {}
110
111#[cfg(unix)]
113pub fn check_file_offset(
118 file_offset: &FileOffset,
119 size: usize,
120) -> result::Result<(), MmapRegionError> {
121 let mut file = file_offset.file();
122 let start = file_offset.start();
123
124 if let Some(end) = start.checked_add(size as u64) {
125 let filesize = file
126 .seek(SeekFrom::End(0))
127 .map_err(MmapRegionError::SeekEnd)?;
128 file.seek(SeekFrom::Start(0))
129 .map_err(MmapRegionError::SeekStart)?;
130 if filesize < end {
131 return Err(MmapRegionError::MappingPastEof);
132 }
133 } else {
134 return Err(MmapRegionError::InvalidOffsetLength);
135 }
136
137 Ok(())
138}
139
140#[derive(Debug)]
146pub struct GuestRegionMmap<B = ()> {
147 mapping: MmapRegion<B>,
148 guest_base: GuestAddress,
149}
150
151impl<B> Deref for GuestRegionMmap<B> {
152 type Target = MmapRegion<B>;
153
154 fn deref(&self) -> &MmapRegion<B> {
155 &self.mapping
156 }
157}
158
159impl<B: Bitmap> GuestRegionMmap<B> {
160 pub fn new(mapping: MmapRegion<B>, guest_base: GuestAddress) -> result::Result<Self, Error> {
162 if guest_base.0.checked_add(mapping.size() as u64).is_none() {
163 return Err(Error::InvalidGuestRegion);
164 }
165
166 Ok(GuestRegionMmap {
167 mapping,
168 guest_base,
169 })
170 }
171}
172
173impl<B: Bitmap> Bytes<MemoryRegionAddress> for GuestRegionMmap<B> {
174 type E = guest_memory::Error;
175
176 fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
192 let maddr = addr.raw_value() as usize;
193 self.as_volatile_slice()
194 .unwrap()
195 .write(buf, maddr)
196 .map_err(Into::into)
197 }
198
199 fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
216 let maddr = addr.raw_value() as usize;
217 self.as_volatile_slice()
218 .unwrap()
219 .read(buf, maddr)
220 .map_err(Into::into)
221 }
222
223 fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
224 let maddr = addr.raw_value() as usize;
225 self.as_volatile_slice()
226 .unwrap()
227 .write_slice(buf, maddr)
228 .map_err(Into::into)
229 }
230
231 fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
232 let maddr = addr.raw_value() as usize;
233 self.as_volatile_slice()
234 .unwrap()
235 .read_slice(buf, maddr)
236 .map_err(Into::into)
237 }
238
239 fn read_from<F>(
269 &self,
270 addr: MemoryRegionAddress,
271 src: &mut F,
272 count: usize,
273 ) -> guest_memory::Result<usize>
274 where
275 F: Read,
276 {
277 let maddr = addr.raw_value() as usize;
278 self.as_volatile_slice()
279 .unwrap()
280 .read_from::<F>(maddr, src, count)
281 .map_err(Into::into)
282 }
283
284 fn read_exact_from<F>(
314 &self,
315 addr: MemoryRegionAddress,
316 src: &mut F,
317 count: usize,
318 ) -> guest_memory::Result<()>
319 where
320 F: Read,
321 {
322 let maddr = addr.raw_value() as usize;
323 self.as_volatile_slice()
324 .unwrap()
325 .read_exact_from::<F>(maddr, src, count)
326 .map_err(Into::into)
327 }
328
329 fn write_to<F>(
359 &self,
360 addr: MemoryRegionAddress,
361 dst: &mut F,
362 count: usize,
363 ) -> guest_memory::Result<usize>
364 where
365 F: Write,
366 {
367 let maddr = addr.raw_value() as usize;
368 self.as_volatile_slice()
369 .unwrap()
370 .write_to::<F>(maddr, dst, count)
371 .map_err(Into::into)
372 }
373
374 fn write_all_to<F>(
404 &self,
405 addr: MemoryRegionAddress,
406 dst: &mut F,
407 count: usize,
408 ) -> guest_memory::Result<()>
409 where
410 F: Write,
411 {
412 let maddr = addr.raw_value() as usize;
413 self.as_volatile_slice()
414 .unwrap()
415 .write_all_to::<F>(maddr, dst, count)
416 .map_err(Into::into)
417 }
418
419 fn store<T: AtomicAccess>(
420 &self,
421 val: T,
422 addr: MemoryRegionAddress,
423 order: Ordering,
424 ) -> guest_memory::Result<()> {
425 self.as_volatile_slice().and_then(|s| {
426 s.store(val, addr.raw_value() as usize, order)
427 .map_err(Into::into)
428 })
429 }
430
431 fn load<T: AtomicAccess>(
432 &self,
433 addr: MemoryRegionAddress,
434 order: Ordering,
435 ) -> guest_memory::Result<T> {
436 self.as_volatile_slice()
437 .and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
438 }
439}
440
441impl<B: Bitmap> GuestMemoryRegion for GuestRegionMmap<B> {
442 type B = B;
443
444 fn len(&self) -> GuestUsize {
445 self.mapping.size() as GuestUsize
446 }
447
448 fn start_addr(&self) -> GuestAddress {
449 self.guest_base
450 }
451
452 fn bitmap(&self) -> &Self::B {
453 self.mapping.bitmap()
454 }
455
456 fn get_host_address(&self, addr: MemoryRegionAddress) -> guest_memory::Result<*mut u8> {
457 self.check_address(addr)
460 .ok_or(guest_memory::Error::InvalidBackendAddress)
461 .map(|addr| {
462 self.mapping
463 .as_ptr()
464 .wrapping_offset(addr.raw_value() as isize)
465 })
466 }
467
468 fn file_offset(&self) -> Option<&FileOffset> {
469 self.mapping.file_offset()
470 }
471
472 unsafe fn as_slice(&self) -> Option<&[u8]> {
473 Some(self.mapping.as_slice())
474 }
475
476 unsafe fn as_mut_slice(&self) -> Option<&mut [u8]> {
477 Some(self.mapping.as_mut_slice())
478 }
479
480 fn get_slice(
481 &self,
482 offset: MemoryRegionAddress,
483 count: usize,
484 ) -> guest_memory::Result<VolatileSlice<BS<B>>> {
485 let slice = self.mapping.get_slice(offset.raw_value() as usize, count)?;
486 Ok(slice)
487 }
488
489 #[cfg(target_os = "linux")]
490 fn is_hugetlbfs(&self) -> Option<bool> {
491 self.mapping.is_hugetlbfs()
492 }
493}
494
495#[derive(Clone, Debug, Default)]
502pub struct GuestMemoryMmap<B = ()> {
503 regions: Vec<Arc<GuestRegionMmap<B>>>,
504}
505
506impl<B: NewBitmap> GuestMemoryMmap<B> {
507 pub fn new() -> Self {
509 Self::default()
510 }
511
512 pub fn from_ranges(ranges: &[(GuestAddress, usize)]) -> result::Result<Self, Error> {
516 Self::from_ranges_with_files(ranges.iter().map(|r| (r.0, r.1, None)))
517 }
518
519 pub fn from_ranges_with_files<A, T>(ranges: T) -> result::Result<Self, Error>
524 where
525 A: Borrow<(GuestAddress, usize, Option<FileOffset>)>,
526 T: IntoIterator<Item = A>,
527 {
528 Self::from_regions(
529 ranges
530 .into_iter()
531 .map(|x| {
532 let guest_base = x.borrow().0;
533 let size = x.borrow().1;
534
535 if let Some(ref f_off) = x.borrow().2 {
536 MmapRegion::from_file(f_off.clone(), size)
537 } else {
538 MmapRegion::new(size)
539 }
540 .map_err(Error::MmapRegion)
541 .and_then(|r| GuestRegionMmap::new(r, guest_base))
542 })
543 .collect::<result::Result<Vec<_>, Error>>()?,
544 )
545 }
546}
547
548impl<B: Bitmap> GuestMemoryMmap<B> {
549 pub fn from_regions(mut regions: Vec<GuestRegionMmap<B>>) -> result::Result<Self, Error> {
557 Self::from_arc_regions(regions.drain(..).map(Arc::new).collect())
558 }
559
560 pub fn from_arc_regions(regions: Vec<Arc<GuestRegionMmap<B>>>) -> result::Result<Self, Error> {
573 if regions.is_empty() {
574 return Err(Error::NoMemoryRegion);
575 }
576
577 for window in regions.windows(2) {
578 let prev = &window[0];
579 let next = &window[1];
580
581 if prev.start_addr() > next.start_addr() {
582 return Err(Error::UnsortedMemoryRegions);
583 }
584
585 if prev.last_addr() >= next.start_addr() {
586 return Err(Error::MemoryRegionOverlap);
587 }
588 }
589
590 Ok(Self { regions })
591 }
592
593 pub fn insert_region(
598 &self,
599 region: Arc<GuestRegionMmap<B>>,
600 ) -> result::Result<GuestMemoryMmap<B>, Error> {
601 let mut regions = self.regions.clone();
602 regions.push(region);
603 regions.sort_by_key(|x| x.start_addr());
604
605 Self::from_arc_regions(regions)
606 }
607
608 pub fn remove_region(
615 &self,
616 base: GuestAddress,
617 size: GuestUsize,
618 ) -> result::Result<(GuestMemoryMmap<B>, Arc<GuestRegionMmap<B>>), Error> {
619 if let Ok(region_index) = self.regions.binary_search_by_key(&base, |x| x.start_addr()) {
620 if self.regions.get(region_index).unwrap().mapping.size() as GuestUsize == size {
621 let mut regions = self.regions.clone();
622 let region = regions.remove(region_index);
623 return Ok((Self { regions }, region));
624 }
625 }
626
627 Err(Error::InvalidGuestRegion)
628 }
629}
630
631pub struct Iter<'a, B>(std::slice::Iter<'a, Arc<GuestRegionMmap<B>>>);
635
636impl<'a, B> Iterator for Iter<'a, B> {
637 type Item = &'a GuestRegionMmap<B>;
638 fn next(&mut self) -> Option<Self::Item> {
639 self.0.next().map(AsRef::as_ref)
640 }
641}
642
643impl<'a, B: 'a> GuestMemoryIterator<'a, GuestRegionMmap<B>> for GuestMemoryMmap<B> {
644 type Iter = Iter<'a, B>;
645}
646
647impl<B: Bitmap + 'static> GuestMemory for GuestMemoryMmap<B> {
648 type R = GuestRegionMmap<B>;
649
650 type I = Self;
651
652 fn num_regions(&self) -> usize {
653 self.regions.len()
654 }
655
656 fn find_region(&self, addr: GuestAddress) -> Option<&GuestRegionMmap<B>> {
657 let index = match self.regions.binary_search_by_key(&addr, |x| x.start_addr()) {
658 Ok(x) => Some(x),
659 Err(x) if (x > 0 && addr <= self.regions[x - 1].last_addr()) => Some(x - 1),
661 _ => None,
662 };
663 index.map(|x| self.regions[x].as_ref())
664 }
665
666 fn iter(&self) -> Iter<B> {
667 Iter(self.regions.iter())
668 }
669}
670
671#[cfg(test)]
672mod tests {
673 #![allow(clippy::undocumented_unsafe_blocks)]
674 extern crate vmm_sys_util;
675
676 use super::*;
677
678 use crate::bitmap::tests::test_guest_memory_and_region;
679 use crate::bitmap::AtomicBitmap;
680 use crate::GuestAddressSpace;
681
682 use std::fs::File;
683 use std::mem;
684 use std::path::Path;
685 use vmm_sys_util::tempfile::TempFile;
686
687 type GuestMemoryMmap = super::GuestMemoryMmap<()>;
688 type GuestRegionMmap = super::GuestRegionMmap<()>;
689 type MmapRegion = super::MmapRegion<()>;
690
691 #[test]
692 fn basic_map() {
693 let m = MmapRegion::new(1024).unwrap();
694 assert_eq!(1024, m.size());
695 }
696
697 fn check_guest_memory_mmap(
698 maybe_guest_mem: Result<GuestMemoryMmap, Error>,
699 expected_regions_summary: &[(GuestAddress, usize)],
700 ) {
701 assert!(maybe_guest_mem.is_ok());
702
703 let guest_mem = maybe_guest_mem.unwrap();
704 assert_eq!(guest_mem.num_regions(), expected_regions_summary.len());
705 let maybe_last_mem_reg = expected_regions_summary.last();
706 if let Some((region_addr, region_size)) = maybe_last_mem_reg {
707 let mut last_addr = region_addr.unchecked_add(*region_size as u64);
708 if last_addr.raw_value() != 0 {
709 last_addr = last_addr.unchecked_sub(1);
710 }
711 assert_eq!(guest_mem.last_addr(), last_addr);
712 }
713 for ((region_addr, region_size), mmap) in expected_regions_summary
714 .iter()
715 .zip(guest_mem.regions.iter())
716 {
717 assert_eq!(region_addr, &mmap.guest_base);
718 assert_eq!(region_size, &mmap.mapping.size());
719
720 assert!(guest_mem.find_region(*region_addr).is_some());
721 }
722 }
723
724 fn new_guest_memory_mmap(
725 regions_summary: &[(GuestAddress, usize)],
726 ) -> Result<GuestMemoryMmap, Error> {
727 GuestMemoryMmap::from_ranges(regions_summary)
728 }
729
730 fn new_guest_memory_mmap_from_regions(
731 regions_summary: &[(GuestAddress, usize)],
732 ) -> Result<GuestMemoryMmap, Error> {
733 GuestMemoryMmap::from_regions(
734 regions_summary
735 .iter()
736 .map(|(region_addr, region_size)| {
737 GuestRegionMmap::new(MmapRegion::new(*region_size).unwrap(), *region_addr)
738 .unwrap()
739 })
740 .collect(),
741 )
742 }
743
744 fn new_guest_memory_mmap_from_arc_regions(
745 regions_summary: &[(GuestAddress, usize)],
746 ) -> Result<GuestMemoryMmap, Error> {
747 GuestMemoryMmap::from_arc_regions(
748 regions_summary
749 .iter()
750 .map(|(region_addr, region_size)| {
751 Arc::new(
752 GuestRegionMmap::new(MmapRegion::new(*region_size).unwrap(), *region_addr)
753 .unwrap(),
754 )
755 })
756 .collect(),
757 )
758 }
759
760 fn new_guest_memory_mmap_with_files(
761 regions_summary: &[(GuestAddress, usize)],
762 ) -> Result<GuestMemoryMmap, Error> {
763 let regions: Vec<(GuestAddress, usize, Option<FileOffset>)> = regions_summary
764 .iter()
765 .map(|(region_addr, region_size)| {
766 let f = TempFile::new().unwrap().into_file();
767 f.set_len(*region_size as u64).unwrap();
768
769 (*region_addr, *region_size, Some(FileOffset::new(f, 0)))
770 })
771 .collect();
772
773 GuestMemoryMmap::from_ranges_with_files(®ions)
774 }
775
776 #[test]
777 fn test_no_memory_region() {
778 let regions_summary = [];
779
780 assert_eq!(
781 format!(
782 "{:?}",
783 new_guest_memory_mmap(®ions_summary).err().unwrap()
784 ),
785 format!("{:?}", Error::NoMemoryRegion)
786 );
787
788 assert_eq!(
789 format!(
790 "{:?}",
791 new_guest_memory_mmap_with_files(®ions_summary)
792 .err()
793 .unwrap()
794 ),
795 format!("{:?}", Error::NoMemoryRegion)
796 );
797
798 assert_eq!(
799 format!(
800 "{:?}",
801 new_guest_memory_mmap_from_regions(®ions_summary)
802 .err()
803 .unwrap()
804 ),
805 format!("{:?}", Error::NoMemoryRegion)
806 );
807
808 assert_eq!(
809 format!(
810 "{:?}",
811 new_guest_memory_mmap_from_arc_regions(®ions_summary)
812 .err()
813 .unwrap()
814 ),
815 format!("{:?}", Error::NoMemoryRegion)
816 );
817 }
818
819 #[test]
820 fn test_overlapping_memory_regions() {
821 let regions_summary = [(GuestAddress(0), 100_usize), (GuestAddress(99), 100_usize)];
822
823 assert_eq!(
824 format!(
825 "{:?}",
826 new_guest_memory_mmap(®ions_summary).err().unwrap()
827 ),
828 format!("{:?}", Error::MemoryRegionOverlap)
829 );
830
831 assert_eq!(
832 format!(
833 "{:?}",
834 new_guest_memory_mmap_with_files(®ions_summary)
835 .err()
836 .unwrap()
837 ),
838 format!("{:?}", Error::MemoryRegionOverlap)
839 );
840
841 assert_eq!(
842 format!(
843 "{:?}",
844 new_guest_memory_mmap_from_regions(®ions_summary)
845 .err()
846 .unwrap()
847 ),
848 format!("{:?}", Error::MemoryRegionOverlap)
849 );
850
851 assert_eq!(
852 format!(
853 "{:?}",
854 new_guest_memory_mmap_from_arc_regions(®ions_summary)
855 .err()
856 .unwrap()
857 ),
858 format!("{:?}", Error::MemoryRegionOverlap)
859 );
860 }
861
862 #[test]
863 fn test_unsorted_memory_regions() {
864 let regions_summary = [(GuestAddress(100), 100_usize), (GuestAddress(0), 100_usize)];
865
866 assert_eq!(
867 format!(
868 "{:?}",
869 new_guest_memory_mmap(®ions_summary).err().unwrap()
870 ),
871 format!("{:?}", Error::UnsortedMemoryRegions)
872 );
873
874 assert_eq!(
875 format!(
876 "{:?}",
877 new_guest_memory_mmap_with_files(®ions_summary)
878 .err()
879 .unwrap()
880 ),
881 format!("{:?}", Error::UnsortedMemoryRegions)
882 );
883
884 assert_eq!(
885 format!(
886 "{:?}",
887 new_guest_memory_mmap_from_regions(®ions_summary)
888 .err()
889 .unwrap()
890 ),
891 format!("{:?}", Error::UnsortedMemoryRegions)
892 );
893
894 assert_eq!(
895 format!(
896 "{:?}",
897 new_guest_memory_mmap_from_arc_regions(®ions_summary)
898 .err()
899 .unwrap()
900 ),
901 format!("{:?}", Error::UnsortedMemoryRegions)
902 );
903 }
904
905 #[test]
906 fn test_valid_memory_regions() {
907 let regions_summary = [(GuestAddress(0), 100_usize), (GuestAddress(100), 100_usize)];
908
909 let guest_mem = GuestMemoryMmap::new();
910 assert_eq!(guest_mem.regions.len(), 0);
911
912 check_guest_memory_mmap(new_guest_memory_mmap(®ions_summary), ®ions_summary);
913
914 check_guest_memory_mmap(
915 new_guest_memory_mmap_with_files(®ions_summary),
916 ®ions_summary,
917 );
918
919 check_guest_memory_mmap(
920 new_guest_memory_mmap_from_regions(®ions_summary),
921 ®ions_summary,
922 );
923
924 check_guest_memory_mmap(
925 new_guest_memory_mmap_from_arc_regions(®ions_summary),
926 ®ions_summary,
927 );
928 }
929
930 #[test]
931 fn slice_addr() {
932 let m = GuestRegionMmap::new(MmapRegion::new(5).unwrap(), GuestAddress(0)).unwrap();
933 let s = m.get_slice(MemoryRegionAddress(2), 3).unwrap();
934 assert_eq!(s.as_ptr(), unsafe { m.as_ptr().offset(2) });
935 }
936
937 #[test]
938 fn mapped_file_read() {
939 let mut f = TempFile::new().unwrap().into_file();
940 let sample_buf = &[1, 2, 3, 4, 5];
941 assert!(f.write_all(sample_buf).is_ok());
942
943 let region = MmapRegion::from_file(FileOffset::new(f, 0), sample_buf.len()).unwrap();
944 let mem_map = GuestRegionMmap::new(region, GuestAddress(0)).unwrap();
945 let buf = &mut [0u8; 16];
946 assert_eq!(
947 mem_map.as_volatile_slice().unwrap().read(buf, 0).unwrap(),
948 sample_buf.len()
949 );
950 assert_eq!(buf[0..sample_buf.len()], sample_buf[..]);
951 }
952
953 #[test]
954 fn test_address_in_range() {
955 let f1 = TempFile::new().unwrap().into_file();
956 f1.set_len(0x400).unwrap();
957 let f2 = TempFile::new().unwrap().into_file();
958 f2.set_len(0x400).unwrap();
959
960 let start_addr1 = GuestAddress(0x0);
961 let start_addr2 = GuestAddress(0x800);
962 let guest_mem =
963 GuestMemoryMmap::from_ranges(&[(start_addr1, 0x400), (start_addr2, 0x400)]).unwrap();
964 let guest_mem_backed_by_file = GuestMemoryMmap::from_ranges_with_files(&[
965 (start_addr1, 0x400, Some(FileOffset::new(f1, 0))),
966 (start_addr2, 0x400, Some(FileOffset::new(f2, 0))),
967 ])
968 .unwrap();
969
970 let guest_mem_list = vec![guest_mem, guest_mem_backed_by_file];
971 for guest_mem in guest_mem_list.iter() {
972 assert!(guest_mem.address_in_range(GuestAddress(0x200)));
973 assert!(!guest_mem.address_in_range(GuestAddress(0x600)));
974 assert!(guest_mem.address_in_range(GuestAddress(0xa00)));
975 assert!(!guest_mem.address_in_range(GuestAddress(0xc00)));
976 }
977 }
978
979 #[test]
980 fn test_check_address() {
981 let f1 = TempFile::new().unwrap().into_file();
982 f1.set_len(0x400).unwrap();
983 let f2 = TempFile::new().unwrap().into_file();
984 f2.set_len(0x400).unwrap();
985
986 let start_addr1 = GuestAddress(0x0);
987 let start_addr2 = GuestAddress(0x800);
988 let guest_mem =
989 GuestMemoryMmap::from_ranges(&[(start_addr1, 0x400), (start_addr2, 0x400)]).unwrap();
990 let guest_mem_backed_by_file = GuestMemoryMmap::from_ranges_with_files(&[
991 (start_addr1, 0x400, Some(FileOffset::new(f1, 0))),
992 (start_addr2, 0x400, Some(FileOffset::new(f2, 0))),
993 ])
994 .unwrap();
995
996 let guest_mem_list = vec![guest_mem, guest_mem_backed_by_file];
997 for guest_mem in guest_mem_list.iter() {
998 assert_eq!(
999 guest_mem.check_address(GuestAddress(0x200)),
1000 Some(GuestAddress(0x200))
1001 );
1002 assert_eq!(guest_mem.check_address(GuestAddress(0x600)), None);
1003 assert_eq!(
1004 guest_mem.check_address(GuestAddress(0xa00)),
1005 Some(GuestAddress(0xa00))
1006 );
1007 assert_eq!(guest_mem.check_address(GuestAddress(0xc00)), None);
1008 }
1009 }
1010
1011 #[test]
1012 fn test_to_region_addr() {
1013 let f1 = TempFile::new().unwrap().into_file();
1014 f1.set_len(0x400).unwrap();
1015 let f2 = TempFile::new().unwrap().into_file();
1016 f2.set_len(0x400).unwrap();
1017
1018 let start_addr1 = GuestAddress(0x0);
1019 let start_addr2 = GuestAddress(0x800);
1020 let guest_mem =
1021 GuestMemoryMmap::from_ranges(&[(start_addr1, 0x400), (start_addr2, 0x400)]).unwrap();
1022 let guest_mem_backed_by_file = GuestMemoryMmap::from_ranges_with_files(&[
1023 (start_addr1, 0x400, Some(FileOffset::new(f1, 0))),
1024 (start_addr2, 0x400, Some(FileOffset::new(f2, 0))),
1025 ])
1026 .unwrap();
1027
1028 let guest_mem_list = vec![guest_mem, guest_mem_backed_by_file];
1029 for guest_mem in guest_mem_list.iter() {
1030 assert!(guest_mem.to_region_addr(GuestAddress(0x600)).is_none());
1031 let (r0, addr0) = guest_mem.to_region_addr(GuestAddress(0x800)).unwrap();
1032 let (r1, addr1) = guest_mem.to_region_addr(GuestAddress(0xa00)).unwrap();
1033 assert!(r0.as_ptr() == r1.as_ptr());
1034 assert_eq!(addr0, MemoryRegionAddress(0));
1035 assert_eq!(addr1, MemoryRegionAddress(0x200));
1036 }
1037 }
1038
1039 #[test]
1040 fn test_get_host_address() {
1041 let f1 = TempFile::new().unwrap().into_file();
1042 f1.set_len(0x400).unwrap();
1043 let f2 = TempFile::new().unwrap().into_file();
1044 f2.set_len(0x400).unwrap();
1045
1046 let start_addr1 = GuestAddress(0x0);
1047 let start_addr2 = GuestAddress(0x800);
1048 let guest_mem =
1049 GuestMemoryMmap::from_ranges(&[(start_addr1, 0x400), (start_addr2, 0x400)]).unwrap();
1050 let guest_mem_backed_by_file = GuestMemoryMmap::from_ranges_with_files(&[
1051 (start_addr1, 0x400, Some(FileOffset::new(f1, 0))),
1052 (start_addr2, 0x400, Some(FileOffset::new(f2, 0))),
1053 ])
1054 .unwrap();
1055
1056 let guest_mem_list = vec![guest_mem, guest_mem_backed_by_file];
1057 for guest_mem in guest_mem_list.iter() {
1058 assert!(guest_mem.get_host_address(GuestAddress(0x600)).is_err());
1059 let ptr0 = guest_mem.get_host_address(GuestAddress(0x800)).unwrap();
1060 let ptr1 = guest_mem.get_host_address(GuestAddress(0xa00)).unwrap();
1061 assert_eq!(
1062 ptr0,
1063 guest_mem.find_region(GuestAddress(0x800)).unwrap().as_ptr()
1064 );
1065 assert_eq!(unsafe { ptr0.offset(0x200) }, ptr1);
1066 }
1067 }
1068
1069 #[test]
1070 fn test_deref() {
1071 let f = TempFile::new().unwrap().into_file();
1072 f.set_len(0x400).unwrap();
1073
1074 let start_addr = GuestAddress(0x0);
1075 let guest_mem = GuestMemoryMmap::from_ranges(&[(start_addr, 0x400)]).unwrap();
1076 let guest_mem_backed_by_file = GuestMemoryMmap::from_ranges_with_files(&[(
1077 start_addr,
1078 0x400,
1079 Some(FileOffset::new(f, 0)),
1080 )])
1081 .unwrap();
1082
1083 let guest_mem_list = vec![guest_mem, guest_mem_backed_by_file];
1084 for guest_mem in guest_mem_list.iter() {
1085 let sample_buf = &[1, 2, 3, 4, 5];
1086
1087 assert_eq!(guest_mem.write(sample_buf, start_addr).unwrap(), 5);
1088 let slice = guest_mem
1089 .find_region(GuestAddress(0))
1090 .unwrap()
1091 .as_volatile_slice()
1092 .unwrap();
1093
1094 let buf = &mut [0, 0, 0, 0, 0];
1095 assert_eq!(slice.read(buf, 0).unwrap(), 5);
1096 assert_eq!(buf, sample_buf);
1097 }
1098 }
1099
1100 #[test]
1101 fn test_read_u64() {
1102 let f1 = TempFile::new().unwrap().into_file();
1103 f1.set_len(0x1000).unwrap();
1104 let f2 = TempFile::new().unwrap().into_file();
1105 f2.set_len(0x1000).unwrap();
1106
1107 let start_addr1 = GuestAddress(0x0);
1108 let start_addr2 = GuestAddress(0x1000);
1109 let bad_addr = GuestAddress(0x2001);
1110 let bad_addr2 = GuestAddress(0x1ffc);
1111 let max_addr = GuestAddress(0x2000);
1112
1113 let gm =
1114 GuestMemoryMmap::from_ranges(&[(start_addr1, 0x1000), (start_addr2, 0x1000)]).unwrap();
1115 let gm_backed_by_file = GuestMemoryMmap::from_ranges_with_files(&[
1116 (start_addr1, 0x1000, Some(FileOffset::new(f1, 0))),
1117 (start_addr2, 0x1000, Some(FileOffset::new(f2, 0))),
1118 ])
1119 .unwrap();
1120
1121 let gm_list = vec![gm, gm_backed_by_file];
1122 for gm in gm_list.iter() {
1123 let val1: u64 = 0xaa55_aa55_aa55_aa55;
1124 let val2: u64 = 0x55aa_55aa_55aa_55aa;
1125 assert_eq!(
1126 format!("{:?}", gm.write_obj(val1, bad_addr).err().unwrap()),
1127 format!("InvalidGuestAddress({:?})", bad_addr,)
1128 );
1129 assert_eq!(
1130 format!("{:?}", gm.write_obj(val1, bad_addr2).err().unwrap()),
1131 format!(
1132 "PartialBuffer {{ expected: {:?}, completed: {:?} }}",
1133 mem::size_of::<u64>(),
1134 max_addr.checked_offset_from(bad_addr2).unwrap()
1135 )
1136 );
1137
1138 gm.write_obj(val1, GuestAddress(0x500)).unwrap();
1139 gm.write_obj(val2, GuestAddress(0x1000 + 32)).unwrap();
1140 let num1: u64 = gm.read_obj(GuestAddress(0x500)).unwrap();
1141 let num2: u64 = gm.read_obj(GuestAddress(0x1000 + 32)).unwrap();
1142 assert_eq!(val1, num1);
1143 assert_eq!(val2, num2);
1144 }
1145 }
1146
1147 #[test]
1148 fn write_and_read() {
1149 let f = TempFile::new().unwrap().into_file();
1150 f.set_len(0x400).unwrap();
1151
1152 let mut start_addr = GuestAddress(0x1000);
1153 let gm = GuestMemoryMmap::from_ranges(&[(start_addr, 0x400)]).unwrap();
1154 let gm_backed_by_file = GuestMemoryMmap::from_ranges_with_files(&[(
1155 start_addr,
1156 0x400,
1157 Some(FileOffset::new(f, 0)),
1158 )])
1159 .unwrap();
1160
1161 let gm_list = vec![gm, gm_backed_by_file];
1162 for gm in gm_list.iter() {
1163 let sample_buf = &[1, 2, 3, 4, 5];
1164
1165 assert_eq!(gm.write(sample_buf, start_addr).unwrap(), 5);
1166
1167 let buf = &mut [0u8; 5];
1168 assert_eq!(gm.read(buf, start_addr).unwrap(), 5);
1169 assert_eq!(buf, sample_buf);
1170
1171 start_addr = GuestAddress(0x13ff);
1172 assert_eq!(gm.write(sample_buf, start_addr).unwrap(), 1);
1173 assert_eq!(gm.read(buf, start_addr).unwrap(), 1);
1174 assert_eq!(buf[0], sample_buf[0]);
1175 start_addr = GuestAddress(0x1000);
1176 }
1177 }
1178
1179 #[test]
1180 fn read_to_and_write_from_mem() {
1181 let f = TempFile::new().unwrap().into_file();
1182 f.set_len(0x400).unwrap();
1183
1184 let gm = GuestMemoryMmap::from_ranges(&[(GuestAddress(0x1000), 0x400)]).unwrap();
1185 let gm_backed_by_file = GuestMemoryMmap::from_ranges_with_files(&[(
1186 GuestAddress(0x1000),
1187 0x400,
1188 Some(FileOffset::new(f, 0)),
1189 )])
1190 .unwrap();
1191
1192 let gm_list = vec![gm, gm_backed_by_file];
1193 for gm in gm_list.iter() {
1194 let addr = GuestAddress(0x1010);
1195 let mut file = if cfg!(unix) {
1196 File::open(Path::new("/dev/zero")).unwrap()
1197 } else {
1198 File::open(Path::new("c:\\Windows\\system32\\ntoskrnl.exe")).unwrap()
1199 };
1200 gm.write_obj(!0u32, addr).unwrap();
1201 gm.read_exact_from(addr, &mut file, mem::size_of::<u32>())
1202 .unwrap();
1203 let value: u32 = gm.read_obj(addr).unwrap();
1204 if cfg!(unix) {
1205 assert_eq!(value, 0);
1206 } else {
1207 assert_eq!(value, 0x0090_5a4d);
1208 }
1209
1210 let mut sink = Vec::new();
1211 gm.write_all_to(addr, &mut sink, mem::size_of::<u32>())
1212 .unwrap();
1213 if cfg!(unix) {
1214 assert_eq!(sink, vec![0; mem::size_of::<u32>()]);
1215 } else {
1216 assert_eq!(sink, vec![0x4d, 0x5a, 0x90, 0x00]);
1217 };
1218 }
1219 }
1220
1221 #[test]
1222 fn create_vec_with_regions() {
1223 let region_size = 0x400;
1224 let regions = vec![
1225 (GuestAddress(0x0), region_size),
1226 (GuestAddress(0x1000), region_size),
1227 ];
1228 let mut iterated_regions = Vec::new();
1229 let gm = GuestMemoryMmap::from_ranges(®ions).unwrap();
1230
1231 for region in gm.iter() {
1232 assert_eq!(region.len(), region_size as GuestUsize);
1233 }
1234
1235 for region in gm.iter() {
1236 iterated_regions.push((region.start_addr(), region.len() as usize));
1237 }
1238 assert_eq!(regions, iterated_regions);
1239
1240 assert!(regions
1241 .iter()
1242 .map(|x| (x.0, x.1))
1243 .eq(iterated_regions.iter().copied()));
1244
1245 assert_eq!(gm.regions[0].guest_base, regions[0].0);
1246 assert_eq!(gm.regions[1].guest_base, regions[1].0);
1247 }
1248
1249 #[test]
1250 fn test_memory() {
1251 let region_size = 0x400;
1252 let regions = vec![
1253 (GuestAddress(0x0), region_size),
1254 (GuestAddress(0x1000), region_size),
1255 ];
1256 let mut iterated_regions = Vec::new();
1257 let gm = Arc::new(GuestMemoryMmap::from_ranges(®ions).unwrap());
1258 let mem = gm.memory();
1259
1260 for region in mem.iter() {
1261 assert_eq!(region.len(), region_size as GuestUsize);
1262 }
1263
1264 for region in mem.iter() {
1265 iterated_regions.push((region.start_addr(), region.len() as usize));
1266 }
1267 assert_eq!(regions, iterated_regions);
1268
1269 assert!(regions
1270 .iter()
1271 .map(|x| (x.0, x.1))
1272 .eq(iterated_regions.iter().copied()));
1273
1274 assert_eq!(gm.regions[0].guest_base, regions[0].0);
1275 assert_eq!(gm.regions[1].guest_base, regions[1].0);
1276 }
1277
1278 #[test]
1279 fn test_access_cross_boundary() {
1280 let f1 = TempFile::new().unwrap().into_file();
1281 f1.set_len(0x1000).unwrap();
1282 let f2 = TempFile::new().unwrap().into_file();
1283 f2.set_len(0x1000).unwrap();
1284
1285 let start_addr1 = GuestAddress(0x0);
1286 let start_addr2 = GuestAddress(0x1000);
1287 let gm =
1288 GuestMemoryMmap::from_ranges(&[(start_addr1, 0x1000), (start_addr2, 0x1000)]).unwrap();
1289 let gm_backed_by_file = GuestMemoryMmap::from_ranges_with_files(&[
1290 (start_addr1, 0x1000, Some(FileOffset::new(f1, 0))),
1291 (start_addr2, 0x1000, Some(FileOffset::new(f2, 0))),
1292 ])
1293 .unwrap();
1294
1295 let gm_list = vec![gm, gm_backed_by_file];
1296 for gm in gm_list.iter() {
1297 let sample_buf = &[1, 2, 3, 4, 5];
1298 assert_eq!(gm.write(sample_buf, GuestAddress(0xffc)).unwrap(), 5);
1299 let buf = &mut [0u8; 5];
1300 assert_eq!(gm.read(buf, GuestAddress(0xffc)).unwrap(), 5);
1301 assert_eq!(buf, sample_buf);
1302 }
1303 }
1304
1305 #[test]
1306 fn test_retrieve_fd_backing_memory_region() {
1307 let f = TempFile::new().unwrap().into_file();
1308 f.set_len(0x400).unwrap();
1309
1310 let start_addr = GuestAddress(0x0);
1311 let gm = GuestMemoryMmap::from_ranges(&[(start_addr, 0x400)]).unwrap();
1312 assert!(gm.find_region(start_addr).is_some());
1313 let region = gm.find_region(start_addr).unwrap();
1314 assert!(region.file_offset().is_none());
1315
1316 let gm = GuestMemoryMmap::from_ranges_with_files(&[(
1317 start_addr,
1318 0x400,
1319 Some(FileOffset::new(f, 0)),
1320 )])
1321 .unwrap();
1322 assert!(gm.find_region(start_addr).is_some());
1323 let region = gm.find_region(start_addr).unwrap();
1324 assert!(region.file_offset().is_some());
1325 }
1326
1327 #[test]
1332 #[cfg(unix)]
1333 fn test_retrieve_offset_from_fd_backing_memory_region() {
1334 let f = TempFile::new().unwrap().into_file();
1335 f.set_len(0x1400).unwrap();
1336 let offset = 0x1000;
1338
1339 let start_addr = GuestAddress(0x0);
1340 let gm = GuestMemoryMmap::from_ranges(&[(start_addr, 0x400)]).unwrap();
1341 assert!(gm.find_region(start_addr).is_some());
1342 let region = gm.find_region(start_addr).unwrap();
1343 assert!(region.file_offset().is_none());
1344
1345 let gm = GuestMemoryMmap::from_ranges_with_files(&[(
1346 start_addr,
1347 0x400,
1348 Some(FileOffset::new(f, offset)),
1349 )])
1350 .unwrap();
1351 assert!(gm.find_region(start_addr).is_some());
1352 let region = gm.find_region(start_addr).unwrap();
1353 assert!(region.file_offset().is_some());
1354 assert_eq!(region.file_offset().unwrap().start(), offset);
1355 }
1356
1357 #[test]
1358 fn test_mmap_insert_region() {
1359 let region_size = 0x1000;
1360 let regions = vec![
1361 (GuestAddress(0x0), region_size),
1362 (GuestAddress(0x10_0000), region_size),
1363 ];
1364 let gm = Arc::new(GuestMemoryMmap::from_ranges(®ions).unwrap());
1365 let mem_orig = gm.memory();
1366 assert_eq!(mem_orig.num_regions(), 2);
1367
1368 let mmap = Arc::new(
1369 GuestRegionMmap::new(MmapRegion::new(0x1000).unwrap(), GuestAddress(0x8000)).unwrap(),
1370 );
1371 let gm = gm.insert_region(mmap).unwrap();
1372 let mmap = Arc::new(
1373 GuestRegionMmap::new(MmapRegion::new(0x1000).unwrap(), GuestAddress(0x4000)).unwrap(),
1374 );
1375 let gm = gm.insert_region(mmap).unwrap();
1376 let mmap = Arc::new(
1377 GuestRegionMmap::new(MmapRegion::new(0x1000).unwrap(), GuestAddress(0xc000)).unwrap(),
1378 );
1379 let gm = gm.insert_region(mmap).unwrap();
1380 let mmap = Arc::new(
1381 GuestRegionMmap::new(MmapRegion::new(0x1000).unwrap(), GuestAddress(0xc000)).unwrap(),
1382 );
1383 gm.insert_region(mmap).unwrap_err();
1384
1385 assert_eq!(mem_orig.num_regions(), 2);
1386 assert_eq!(gm.num_regions(), 5);
1387
1388 assert_eq!(gm.regions[0].start_addr(), GuestAddress(0x0000));
1389 assert_eq!(gm.regions[1].start_addr(), GuestAddress(0x4000));
1390 assert_eq!(gm.regions[2].start_addr(), GuestAddress(0x8000));
1391 assert_eq!(gm.regions[3].start_addr(), GuestAddress(0xc000));
1392 assert_eq!(gm.regions[4].start_addr(), GuestAddress(0x10_0000));
1393 }
1394
1395 #[test]
1396 fn test_mmap_remove_region() {
1397 let region_size = 0x1000;
1398 let regions = vec![
1399 (GuestAddress(0x0), region_size),
1400 (GuestAddress(0x10_0000), region_size),
1401 ];
1402 let gm = Arc::new(GuestMemoryMmap::from_ranges(®ions).unwrap());
1403 let mem_orig = gm.memory();
1404 assert_eq!(mem_orig.num_regions(), 2);
1405
1406 gm.remove_region(GuestAddress(0), 128).unwrap_err();
1407 gm.remove_region(GuestAddress(0x4000), 128).unwrap_err();
1408 let (gm, region) = gm.remove_region(GuestAddress(0x10_0000), 0x1000).unwrap();
1409
1410 assert_eq!(mem_orig.num_regions(), 2);
1411 assert_eq!(gm.num_regions(), 1);
1412
1413 assert_eq!(gm.regions[0].start_addr(), GuestAddress(0x0000));
1414 assert_eq!(region.start_addr(), GuestAddress(0x10_0000));
1415 }
1416
1417 #[test]
1418 fn test_guest_memory_mmap_get_slice() {
1419 let region_addr = GuestAddress(0);
1420 let region_size = 0x400;
1421 let region =
1422 GuestRegionMmap::new(MmapRegion::new(region_size).unwrap(), region_addr).unwrap();
1423
1424 let slice_addr = MemoryRegionAddress(0x100);
1426 let slice_size = 0x200;
1427 let slice = region.get_slice(slice_addr, slice_size).unwrap();
1428 assert_eq!(slice.len(), slice_size);
1429
1430 let slice_addr = MemoryRegionAddress(0x200);
1432 let slice_size = 0x0;
1433 let slice = region.get_slice(slice_addr, slice_size).unwrap();
1434 assert!(slice.is_empty());
1435
1436 let slice_addr = MemoryRegionAddress(0x300);
1438 let slice_size = 0x200;
1439 assert!(region.get_slice(slice_addr, slice_size).is_err());
1440 }
1441
1442 #[test]
1443 fn test_guest_memory_mmap_as_volatile_slice() {
1444 let region_addr = GuestAddress(0);
1445 let region_size = 0x400;
1446 let region =
1447 GuestRegionMmap::new(MmapRegion::new(region_size).unwrap(), region_addr).unwrap();
1448
1449 let slice = region.as_volatile_slice().unwrap();
1451 assert_eq!(slice.len(), region_size);
1452
1453 let v = 0x1234_5678u32;
1455 let r = slice.get_ref::<u32>(0x200).unwrap();
1456 r.store(v);
1457 assert_eq!(r.load(), v);
1458 }
1459
1460 #[test]
1461 fn test_guest_memory_get_slice() {
1462 let start_addr1 = GuestAddress(0);
1463 let start_addr2 = GuestAddress(0x800);
1464 let guest_mem =
1465 GuestMemoryMmap::from_ranges(&[(start_addr1, 0x400), (start_addr2, 0x400)]).unwrap();
1466
1467 let slice_size = 0x200;
1469 let slice = guest_mem
1470 .get_slice(GuestAddress(0x100), slice_size)
1471 .unwrap();
1472 assert_eq!(slice.len(), slice_size);
1473
1474 let slice_size = 0x400;
1475 let slice = guest_mem
1476 .get_slice(GuestAddress(0x800), slice_size)
1477 .unwrap();
1478 assert_eq!(slice.len(), slice_size);
1479
1480 assert!(guest_mem
1482 .get_slice(GuestAddress(0x900), 0)
1483 .unwrap()
1484 .is_empty());
1485
1486 assert!(guest_mem.get_slice(GuestAddress(0), 0x500).is_err());
1488 assert!(guest_mem.get_slice(GuestAddress(0x600), 0x100).is_err());
1489 assert!(guest_mem.get_slice(GuestAddress(0xc00), 0x100).is_err());
1490 }
1491
1492 #[test]
1493 fn test_checked_offset() {
1494 let start_addr1 = GuestAddress(0);
1495 let start_addr2 = GuestAddress(0x800);
1496 let start_addr3 = GuestAddress(0xc00);
1497 let guest_mem = GuestMemoryMmap::from_ranges(&[
1498 (start_addr1, 0x400),
1499 (start_addr2, 0x400),
1500 (start_addr3, 0x400),
1501 ])
1502 .unwrap();
1503
1504 assert_eq!(
1505 guest_mem.checked_offset(start_addr1, 0x200),
1506 Some(GuestAddress(0x200))
1507 );
1508 assert_eq!(
1509 guest_mem.checked_offset(start_addr1, 0xa00),
1510 Some(GuestAddress(0xa00))
1511 );
1512 assert_eq!(
1513 guest_mem.checked_offset(start_addr2, 0x7ff),
1514 Some(GuestAddress(0xfff))
1515 );
1516 assert_eq!(guest_mem.checked_offset(start_addr2, 0xc00), None);
1517 assert_eq!(guest_mem.checked_offset(start_addr1, std::usize::MAX), None);
1518
1519 assert_eq!(guest_mem.checked_offset(start_addr1, 0x400), None);
1520 assert_eq!(
1521 guest_mem.checked_offset(start_addr1, 0x400 - 1),
1522 Some(GuestAddress(0x400 - 1))
1523 );
1524 }
1525
1526 #[test]
1527 fn test_check_range() {
1528 let start_addr1 = GuestAddress(0);
1529 let start_addr2 = GuestAddress(0x800);
1530 let start_addr3 = GuestAddress(0xc00);
1531 let guest_mem = GuestMemoryMmap::from_ranges(&[
1532 (start_addr1, 0x400),
1533 (start_addr2, 0x400),
1534 (start_addr3, 0x400),
1535 ])
1536 .unwrap();
1537
1538 assert!(guest_mem.check_range(start_addr1, 0x0));
1539 assert!(guest_mem.check_range(start_addr1, 0x200));
1540 assert!(guest_mem.check_range(start_addr1, 0x400));
1541 assert!(!guest_mem.check_range(start_addr1, 0xa00));
1542 assert!(guest_mem.check_range(start_addr2, 0x7ff));
1543 assert!(guest_mem.check_range(start_addr2, 0x800));
1544 assert!(!guest_mem.check_range(start_addr2, 0x801));
1545 assert!(!guest_mem.check_range(start_addr2, 0xc00));
1546 assert!(!guest_mem.check_range(start_addr1, std::usize::MAX));
1547 }
1548
1549 #[test]
1550 fn test_atomic_accesses() {
1551 let region =
1552 GuestRegionMmap::new(MmapRegion::new(0x1000).unwrap(), GuestAddress(0)).unwrap();
1553
1554 crate::bytes::tests::check_atomic_accesses(
1555 region,
1556 MemoryRegionAddress(0),
1557 MemoryRegionAddress(0x1000),
1558 );
1559 }
1560
1561 #[test]
1562 fn test_dirty_tracking() {
1563 test_guest_memory_and_region(|| {
1564 crate::GuestMemoryMmap::<AtomicBitmap>::from_ranges(&[(GuestAddress(0), 0x1_0000)])
1565 .unwrap()
1566 });
1567 }
1568}