1
1
//! Module containing abstracts for dealing with contiguous regions of guest memory
2
2
3
3
use crate :: bitmap:: { Bitmap , BS } ;
4
- use crate :: guest_memory:: Error ;
5
4
use crate :: guest_memory:: Result ;
6
5
use crate :: {
7
- Address , Bytes , FileOffset , GuestAddress , GuestMemory , GuestUsize , MemoryRegionAddress ,
8
- VolatileSlice ,
6
+ Address , AtomicAccess , Bytes , FileOffset , GuestAddress , GuestMemory , GuestMemoryError ,
7
+ GuestUsize , MemoryRegionAddress , ReadVolatile , VolatileSlice , WriteVolatile ,
9
8
} ;
9
+ use std:: sync:: atomic:: Ordering ;
10
10
use std:: sync:: Arc ;
11
11
12
12
/// Represents a continuous region of guest physical memory.
13
13
#[ allow( clippy:: len_without_is_empty) ]
14
- pub trait GuestMemoryRegion : Bytes < MemoryRegionAddress , E = Error > {
14
+ pub trait GuestMemoryRegion : Bytes < MemoryRegionAddress , E = GuestMemoryError > {
15
15
/// Type used for dirty memory tracking.
16
16
type B : Bitmap ;
17
17
@@ -73,7 +73,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
73
73
/// Rust memory safety model. It's the caller's responsibility to ensure that there's no
74
74
/// concurrent accesses to the underlying guest memory.
75
75
fn get_host_address ( & self , _addr : MemoryRegionAddress ) -> Result < * mut u8 > {
76
- Err ( Error :: HostAddressNotAvailable )
76
+ Err ( GuestMemoryError :: HostAddressNotAvailable )
77
77
}
78
78
79
79
/// Returns information regarding the file and offset backing this memory region.
@@ -89,7 +89,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
89
89
offset : MemoryRegionAddress ,
90
90
count : usize ,
91
91
) -> Result < VolatileSlice < BS < Self :: B > > > {
92
- Err ( Error :: HostAddressNotAvailable )
92
+ Err ( GuestMemoryError :: HostAddressNotAvailable )
93
93
}
94
94
95
95
/// Gets a slice of memory for the entire region that supports volatile access.
@@ -299,3 +299,147 @@ impl<R: GuestMemoryRegion> GuestMemory for GuestRegionCollection<R> {
299
299
self . regions . iter ( ) . map ( AsRef :: as_ref)
300
300
}
301
301
}
302
+
303
+ impl < R : GuestMemoryRegion > Bytes < MemoryRegionAddress > for R {
304
+ type E = GuestMemoryError ;
305
+
306
+ /// # Examples
307
+ /// * Write a slice at guest address 0x1200.
308
+ ///
309
+ /// ```
310
+ /// # #[cfg(feature = "backend-mmap")]
311
+ /// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
312
+ /// #
313
+ /// # #[cfg(feature = "backend-mmap")]
314
+ /// # {
315
+ /// # let start_addr = GuestAddress(0x1000);
316
+ /// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
317
+ /// # .expect("Could not create guest memory");
318
+ /// #
319
+ /// let res = gm
320
+ /// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
321
+ /// .expect("Could not write to guest memory");
322
+ /// assert_eq!(5, res);
323
+ /// # }
324
+ /// ```
325
+ fn write ( & self , buf : & [ u8 ] , addr : MemoryRegionAddress ) -> Result < usize > {
326
+ let maddr = addr. raw_value ( ) as usize ;
327
+ self . as_volatile_slice ( ) ?
328
+ . write ( buf, maddr)
329
+ . map_err ( Into :: into)
330
+ }
331
+
332
+ /// # Examples
333
+ /// * Read a slice of length 16 at guestaddress 0x1200.
334
+ ///
335
+ /// ```
336
+ /// # #[cfg(feature = "backend-mmap")]
337
+ /// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
338
+ /// #
339
+ /// # #[cfg(feature = "backend-mmap")]
340
+ /// # {
341
+ /// # let start_addr = GuestAddress(0x1000);
342
+ /// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
343
+ /// # .expect("Could not create guest memory");
344
+ /// #
345
+ /// let buf = &mut [0u8; 16];
346
+ /// let res = gm
347
+ /// .read(buf, GuestAddress(0x1200))
348
+ /// .expect("Could not read from guest memory");
349
+ /// assert_eq!(16, res);
350
+ /// # }
351
+ /// ```
352
+ fn read ( & self , buf : & mut [ u8 ] , addr : MemoryRegionAddress ) -> Result < usize > {
353
+ let maddr = addr. raw_value ( ) as usize ;
354
+ self . as_volatile_slice ( ) ?
355
+ . read ( buf, maddr)
356
+ . map_err ( Into :: into)
357
+ }
358
+
359
+ fn write_slice ( & self , buf : & [ u8 ] , addr : MemoryRegionAddress ) -> Result < ( ) > {
360
+ let maddr = addr. raw_value ( ) as usize ;
361
+ self . as_volatile_slice ( ) ?
362
+ . write_slice ( buf, maddr)
363
+ . map_err ( Into :: into)
364
+ }
365
+
366
+ fn read_slice ( & self , buf : & mut [ u8 ] , addr : MemoryRegionAddress ) -> Result < ( ) > {
367
+ let maddr = addr. raw_value ( ) as usize ;
368
+ self . as_volatile_slice ( ) ?
369
+ . read_slice ( buf, maddr)
370
+ . map_err ( Into :: into)
371
+ }
372
+
373
+ fn read_volatile_from < F > (
374
+ & self ,
375
+ addr : MemoryRegionAddress ,
376
+ src : & mut F ,
377
+ count : usize ,
378
+ ) -> Result < usize >
379
+ where
380
+ F : ReadVolatile ,
381
+ {
382
+ self . as_volatile_slice ( ) ?
383
+ . read_volatile_from ( addr. 0 as usize , src, count)
384
+ . map_err ( Into :: into)
385
+ }
386
+
387
+ fn read_exact_volatile_from < F > (
388
+ & self ,
389
+ addr : MemoryRegionAddress ,
390
+ src : & mut F ,
391
+ count : usize ,
392
+ ) -> Result < ( ) >
393
+ where
394
+ F : ReadVolatile ,
395
+ {
396
+ self . as_volatile_slice ( ) ?
397
+ . read_exact_volatile_from ( addr. 0 as usize , src, count)
398
+ . map_err ( Into :: into)
399
+ }
400
+
401
+ fn write_volatile_to < F > (
402
+ & self ,
403
+ addr : MemoryRegionAddress ,
404
+ dst : & mut F ,
405
+ count : usize ,
406
+ ) -> Result < usize >
407
+ where
408
+ F : WriteVolatile ,
409
+ {
410
+ self . as_volatile_slice ( ) ?
411
+ . write_volatile_to ( addr. 0 as usize , dst, count)
412
+ . map_err ( Into :: into)
413
+ }
414
+
415
+ fn write_all_volatile_to < F > (
416
+ & self ,
417
+ addr : MemoryRegionAddress ,
418
+ dst : & mut F ,
419
+ count : usize ,
420
+ ) -> Result < ( ) >
421
+ where
422
+ F : WriteVolatile ,
423
+ {
424
+ self . as_volatile_slice ( ) ?
425
+ . write_all_volatile_to ( addr. 0 as usize , dst, count)
426
+ . map_err ( Into :: into)
427
+ }
428
+
429
+ fn store < T : AtomicAccess > (
430
+ & self ,
431
+ val : T ,
432
+ addr : MemoryRegionAddress ,
433
+ order : Ordering ,
434
+ ) -> Result < ( ) > {
435
+ self . as_volatile_slice ( ) . and_then ( |s| {
436
+ s. store ( val, addr. raw_value ( ) as usize , order)
437
+ . map_err ( Into :: into)
438
+ } )
439
+ }
440
+
441
+ fn load < T : AtomicAccess > ( & self , addr : MemoryRegionAddress , order : Ordering ) -> Result < T > {
442
+ self . as_volatile_slice ( )
443
+ . and_then ( |s| s. load ( addr. raw_value ( ) as usize , order) . map_err ( Into :: into) )
444
+ }
445
+ }
0 commit comments