1#[cfg(any(test, feature = "backend-bitmap"))]
10mod backend;
11
12use std::fmt::Debug;
13
14use crate::{GuestMemory, GuestMemoryRegion};
15
16#[cfg(any(test, feature = "backend-bitmap"))]
17pub use backend::{ArcSlice, AtomicBitmap, RefSlice};
18
19pub trait WithBitmapSlice<'a> {
21 type S: BitmapSlice;
23}
24
25pub trait BitmapSlice: Bitmap + Clone + Debug + for<'a> WithBitmapSlice<'a, S = Self> {}
28
29pub trait Bitmap: for<'a> WithBitmapSlice<'a> {
39 fn mark_dirty(&self, offset: usize, len: usize);
41
42 fn dirty_at(&self, offset: usize) -> bool;
44
45 fn slice_at(&self, offset: usize) -> <Self as WithBitmapSlice>::S;
48}
49
50impl<'a> WithBitmapSlice<'a> for () {
54 type S = Self;
55}
56
57impl BitmapSlice for () {}
58
59impl Bitmap for () {
60 fn mark_dirty(&self, _offset: usize, _len: usize) {}
61
62 fn dirty_at(&self, _offset: usize) -> bool {
63 false
64 }
65
66 fn slice_at(&self, _offset: usize) -> Self {}
67}
68
69impl<'a, B> WithBitmapSlice<'a> for Option<B>
72where
73 B: WithBitmapSlice<'a>,
74{
75 type S = Option<B::S>;
76}
77
78impl<B: BitmapSlice> BitmapSlice for Option<B> {}
79
80impl<B: Bitmap> Bitmap for Option<B> {
81 fn mark_dirty(&self, offset: usize, len: usize) {
82 if let Some(inner) = self {
83 inner.mark_dirty(offset, len)
84 }
85 }
86
87 fn dirty_at(&self, offset: usize) -> bool {
88 if let Some(inner) = self {
89 return inner.dirty_at(offset);
90 }
91 false
92 }
93
94 fn slice_at(&self, offset: usize) -> Option<<B as WithBitmapSlice>::S> {
95 if let Some(inner) = self {
96 return Some(inner.slice_at(offset));
97 }
98 None
99 }
100}
101
102pub type BS<'a, B> = <B as WithBitmapSlice<'a>>::S;
105
106pub type MS<'a, M> = BS<'a, <<M as GuestMemory>::R as GuestMemoryRegion>::B>;
109
110#[cfg(test)]
111pub(crate) mod tests {
112 use super::*;
113
114 use std::io::Cursor;
115 use std::marker::PhantomData;
116 use std::mem::size_of_val;
117 use std::result::Result;
118 use std::sync::atomic::Ordering;
119
120 use crate::{Bytes, VolatileMemory};
121 #[cfg(feature = "backend-mmap")]
122 use crate::{GuestAddress, MemoryRegionAddress};
123
124 pub fn range_is_clean<B: Bitmap>(b: &B, start: usize, len: usize) -> bool {
126 (start..start + len).all(|offset| !b.dirty_at(offset))
127 }
128
129 pub fn range_is_dirty<B: Bitmap>(b: &B, start: usize, len: usize) -> bool {
131 (start..start + len).all(|offset| b.dirty_at(offset))
132 }
133
134 pub fn check_range<B: Bitmap>(b: &B, start: usize, len: usize, clean: bool) -> bool {
135 if clean {
136 range_is_clean(b, start, len)
137 } else {
138 range_is_dirty(b, start, len)
139 }
140 }
141
142 pub fn test_bitmap<B: Bitmap>(b: &B) {
145 let len = 0x2000;
146 let dirty_offset = 0x1000;
147 let dirty_len = 0x100;
148
149 let s = b.slice_at(dirty_offset);
151
152 assert!(range_is_clean(b, 0, len));
153 assert!(range_is_clean(&s, 0, dirty_len));
154
155 b.mark_dirty(dirty_offset, dirty_len);
156 assert!(range_is_dirty(b, dirty_offset, dirty_len));
157 assert!(range_is_dirty(&s, 0, dirty_len));
158 }
159
160 #[derive(Debug)]
161 pub enum TestAccessError {
162 RangeCleanCheck,
163 RangeDirtyCheck,
164 }
165
166 struct BytesHelper<F, G, M> {
169 check_range_fn: F,
170 address_fn: G,
171 phantom: PhantomData<*const M>,
172 }
173
174 impl<F, G, M, A> BytesHelper<F, G, M>
185 where
186 F: Fn(&M, usize, usize, bool) -> bool,
187 G: Fn(usize) -> A,
188 M: Bytes<A>,
189 {
190 fn check_range(&self, m: &M, start: usize, len: usize, clean: bool) -> bool {
191 (self.check_range_fn)(m, start, len, clean)
192 }
193
194 fn address(&self, offset: usize) -> A {
195 (self.address_fn)(offset)
196 }
197
198 fn test_access<Op>(
199 &self,
200 bytes: &M,
201 dirty_offset: usize,
202 dirty_len: usize,
203 op: Op,
204 ) -> Result<(), TestAccessError>
205 where
206 Op: Fn(&M, A),
207 {
208 if !self.check_range(bytes, dirty_offset, dirty_len, true) {
209 return Err(TestAccessError::RangeCleanCheck);
210 }
211
212 op(bytes, self.address(dirty_offset));
213
214 if !self.check_range(bytes, dirty_offset, dirty_len, false) {
215 return Err(TestAccessError::RangeDirtyCheck);
216 }
217
218 Ok(())
219 }
220 }
221
222 pub fn test_bytes<F, G, M, A>(bytes: &M, check_range_fn: F, address_fn: G, step: usize)
228 where
229 F: Fn(&M, usize, usize, bool) -> bool,
230 G: Fn(usize) -> A,
231 A: Copy,
232 M: Bytes<A>,
233 <M as Bytes<A>>::E: Debug,
234 {
235 const BUF_SIZE: usize = 1024;
236 let buf = vec![1u8; 1024];
237
238 let val = 1u64;
239
240 let h = BytesHelper {
241 check_range_fn,
242 address_fn,
243 phantom: PhantomData,
244 };
245
246 let mut dirty_offset = 0x1000;
247
248 h.test_access(bytes, dirty_offset, BUF_SIZE, |m, addr| {
250 assert_eq!(m.write(buf.as_slice(), addr).unwrap(), BUF_SIZE)
251 })
252 .unwrap();
253 dirty_offset += step;
254
255 h.test_access(bytes, dirty_offset, BUF_SIZE, |m, addr| {
257 m.write_slice(buf.as_slice(), addr).unwrap()
258 })
259 .unwrap();
260 dirty_offset += step;
261
262 h.test_access(bytes, dirty_offset, size_of_val(&val), |m, addr| {
264 m.write_obj(val, addr).unwrap()
265 })
266 .unwrap();
267 dirty_offset += step;
268
269 h.test_access(bytes, dirty_offset, BUF_SIZE, |m, addr| {
271 assert_eq!(
272 m.read_from(addr, &mut Cursor::new(&buf), BUF_SIZE).unwrap(),
273 BUF_SIZE
274 )
275 })
276 .unwrap();
277 dirty_offset += step;
278
279 h.test_access(bytes, dirty_offset, BUF_SIZE, |m, addr| {
281 m.read_exact_from(addr, &mut Cursor::new(&buf), BUF_SIZE)
282 .unwrap()
283 })
284 .unwrap();
285 dirty_offset += step;
286
287 h.test_access(bytes, dirty_offset, size_of_val(&val), |m, addr| {
289 m.store(val, addr, Ordering::Relaxed).unwrap()
290 })
291 .unwrap();
292 }
293
294 #[cfg(feature = "backend-mmap")]
299 fn test_guest_memory_region<R: GuestMemoryRegion>(region: &R) {
300 let dirty_addr = MemoryRegionAddress(0x0);
301 let val = 123u64;
302 let dirty_len = size_of_val(&val);
303
304 let slice = region.get_slice(dirty_addr, dirty_len).unwrap();
305
306 assert!(range_is_clean(region.bitmap(), 0, region.len() as usize));
307 assert!(range_is_clean(slice.bitmap(), 0, dirty_len));
308
309 region.write_obj(val, dirty_addr).unwrap();
310
311 assert!(range_is_dirty(
312 region.bitmap(),
313 dirty_addr.0 as usize,
314 dirty_len
315 ));
316
317 assert!(range_is_dirty(slice.bitmap(), 0, dirty_len));
318
319 test_bytes(
323 region,
324 |r: &R, start: usize, len: usize, clean: bool| {
325 check_range(r.bitmap(), start, len, clean)
326 },
327 |offset| MemoryRegionAddress(offset as u64),
328 0x1000,
329 );
330 }
331
332 #[cfg(feature = "backend-mmap")]
333 pub fn test_guest_memory_and_region<M, F>(f: F)
335 where
336 M: GuestMemory,
337 F: Fn() -> M,
338 {
339 let m = f();
340 let dirty_addr = GuestAddress(0x1000);
341 let val = 123u64;
342 let dirty_len = size_of_val(&val);
343
344 let (region, region_addr) = m.to_region_addr(dirty_addr).unwrap();
345 let slice = m.get_slice(dirty_addr, dirty_len).unwrap();
346
347 assert!(range_is_clean(region.bitmap(), 0, region.len() as usize));
348 assert!(range_is_clean(slice.bitmap(), 0, dirty_len));
349
350 m.write_obj(val, dirty_addr).unwrap();
351
352 assert!(range_is_dirty(
353 region.bitmap(),
354 region_addr.0 as usize,
355 dirty_len
356 ));
357
358 assert!(range_is_dirty(slice.bitmap(), 0, dirty_len));
359
360 test_guest_memory_region(f().find_region(GuestAddress(0)).unwrap());
362
363 let check_range_closure = |m: &M, start: usize, len: usize, clean: bool| -> bool {
365 let mut check_result = true;
366 m.try_access(len, GuestAddress(start as u64), |_, size, reg_addr, reg| {
367 if !check_range(reg.bitmap(), reg_addr.0 as usize, size, clean) {
368 check_result = false;
369 }
370 Ok(size)
371 })
372 .unwrap();
373
374 check_result
375 };
376
377 test_bytes(
378 &f(),
379 check_range_closure,
380 |offset| GuestAddress(offset as u64),
381 0x1000,
382 );
383 }
384
385 pub fn test_volatile_memory<M: VolatileMemory>(m: &M) {
386 assert!(m.len() >= 0x8000);
387
388 let dirty_offset = 0x1000;
389 let val = 123u64;
390 let dirty_len = size_of_val(&val);
391
392 let get_ref_offset = 0x2000;
393 let array_ref_offset = 0x3000;
394
395 let s1 = m.as_volatile_slice();
396 let s2 = m.get_slice(dirty_offset, dirty_len).unwrap();
397
398 assert!(range_is_clean(s1.bitmap(), 0, s1.len()));
399 assert!(range_is_clean(s2.bitmap(), 0, s2.len()));
400
401 s1.write_obj(val, dirty_offset).unwrap();
402
403 assert!(range_is_dirty(s1.bitmap(), dirty_offset, dirty_len));
404 assert!(range_is_dirty(s2.bitmap(), 0, dirty_len));
405
406 let v_ref = m.get_ref::<u64>(get_ref_offset).unwrap();
407 assert!(range_is_clean(s1.bitmap(), get_ref_offset, dirty_len));
408 v_ref.store(val);
409 assert!(range_is_dirty(s1.bitmap(), get_ref_offset, dirty_len));
410
411 let arr_ref = m.get_array_ref::<u64>(array_ref_offset, 1).unwrap();
412 assert!(range_is_clean(s1.bitmap(), array_ref_offset, dirty_len));
413 arr_ref.store(0, val);
414 assert!(range_is_dirty(s1.bitmap(), array_ref_offset, dirty_len));
415 }
416}