Skip to content

Commit f91e18a

Browse files
committed
generically impl Bytes for all R: GuestMemoryRegion
This trait implementation of GuestMemoryRegion for `GuestRegionMmap`does not actually make use of the specifics of `GuestRegionMmap`, so can be completely generic in terms of `GuestMemoryRegion`. This allows us to move it to guest_memory.rs, eliminating one further instance of code depending on exactly one of mmap_unix.rs and mmap_xen.rs being compiled in. Replace .unwrap() with error propagation via `?`, as we no longer know that we are dealing with a specific GuestMemoryRegion impl that always implements as_volatile_slice(). Signed-off-by: Patrick Roy <[email protected]>
1 parent 69ffd45 commit f91e18a

File tree

3 files changed

+154
-157
lines changed

3 files changed

+154
-157
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
`write_volatile_to` and `write_all_volatile_to` functions from the `GuestMemory` trait to the `Bytes` trait.
1515
- \[[#312](https://github.com/rust-vmm/vm-memory/pull/312)\]: Give `GuestMemory::find_region` a default implementation,
1616
based on linear search.
17+
- \[[#312](https://github.com/rust-vmm/vm-memory/pull/312)\]: Implement `Bytes<MemoryRegionAddress>` generically
18+
for all `R: GuestMemoryRegion`.
1719

1820
### Removed
1921

src/mmap.rs

Lines changed: 2 additions & 151 deletions
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,13 @@ use std::borrow::Borrow;
1717
use std::io::{Seek, SeekFrom};
1818
use std::ops::Deref;
1919
use std::result;
20-
use std::sync::atomic::Ordering;
2120

2221
use crate::address::Address;
2322
use crate::bitmap::{Bitmap, BS};
2423
use crate::guest_memory::{self, FileOffset, GuestAddress, GuestUsize, MemoryRegionAddress};
2524
use crate::region::GuestMemoryRegion;
2625
use crate::volatile_memory::{VolatileMemory, VolatileSlice};
27-
use crate::{AtomicAccess, Bytes, Error, GuestRegionCollection, ReadVolatile, WriteVolatile};
26+
use crate::{Error, GuestRegionCollection};
2827

2928
#[cfg(all(not(feature = "xen"), unix))]
3029
pub use crate::mmap_unix::{Error as MmapRegionError, MmapRegion, MmapRegionBuilder};
@@ -143,154 +142,6 @@ impl<B: NewBitmap> GuestRegionMmap<B> {
143142
}
144143
}
145144

146-
impl<B: Bitmap> Bytes<MemoryRegionAddress> for GuestRegionMmap<B> {
147-
type E = guest_memory::Error;
148-
149-
/// # Examples
150-
/// * Write a slice at guest address 0x1200.
151-
///
152-
/// ```
153-
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
154-
/// #
155-
/// # let start_addr = GuestAddress(0x1000);
156-
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
157-
/// # .expect("Could not create guest memory");
158-
/// #
159-
/// let res = gm
160-
/// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
161-
/// .expect("Could not write to guest memory");
162-
/// assert_eq!(5, res);
163-
/// ```
164-
fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
165-
let maddr = addr.raw_value() as usize;
166-
self.as_volatile_slice()
167-
.unwrap()
168-
.write(buf, maddr)
169-
.map_err(Into::into)
170-
}
171-
172-
/// # Examples
173-
/// * Read a slice of length 16 at guestaddress 0x1200.
174-
///
175-
/// ```
176-
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
177-
/// #
178-
/// # let start_addr = GuestAddress(0x1000);
179-
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
180-
/// # .expect("Could not create guest memory");
181-
/// #
182-
/// let buf = &mut [0u8; 16];
183-
/// let res = gm
184-
/// .read(buf, GuestAddress(0x1200))
185-
/// .expect("Could not read from guest memory");
186-
/// assert_eq!(16, res);
187-
/// ```
188-
fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
189-
let maddr = addr.raw_value() as usize;
190-
self.as_volatile_slice()
191-
.unwrap()
192-
.read(buf, maddr)
193-
.map_err(Into::into)
194-
}
195-
196-
fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
197-
let maddr = addr.raw_value() as usize;
198-
self.as_volatile_slice()
199-
.unwrap()
200-
.write_slice(buf, maddr)
201-
.map_err(Into::into)
202-
}
203-
204-
fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
205-
let maddr = addr.raw_value() as usize;
206-
self.as_volatile_slice()
207-
.unwrap()
208-
.read_slice(buf, maddr)
209-
.map_err(Into::into)
210-
}
211-
212-
fn read_volatile_from<F>(
213-
&self,
214-
addr: MemoryRegionAddress,
215-
src: &mut F,
216-
count: usize,
217-
) -> Result<usize, Self::E>
218-
where
219-
F: ReadVolatile,
220-
{
221-
self.as_volatile_slice()
222-
.unwrap()
223-
.read_volatile_from(addr.0 as usize, src, count)
224-
.map_err(Into::into)
225-
}
226-
227-
fn read_exact_volatile_from<F>(
228-
&self,
229-
addr: MemoryRegionAddress,
230-
src: &mut F,
231-
count: usize,
232-
) -> Result<(), Self::E>
233-
where
234-
F: ReadVolatile,
235-
{
236-
self.as_volatile_slice()
237-
.unwrap()
238-
.read_exact_volatile_from(addr.0 as usize, src, count)
239-
.map_err(Into::into)
240-
}
241-
242-
fn write_volatile_to<F>(
243-
&self,
244-
addr: MemoryRegionAddress,
245-
dst: &mut F,
246-
count: usize,
247-
) -> Result<usize, Self::E>
248-
where
249-
F: WriteVolatile,
250-
{
251-
self.as_volatile_slice()
252-
.unwrap()
253-
.write_volatile_to(addr.0 as usize, dst, count)
254-
.map_err(Into::into)
255-
}
256-
257-
fn write_all_volatile_to<F>(
258-
&self,
259-
addr: MemoryRegionAddress,
260-
dst: &mut F,
261-
count: usize,
262-
) -> Result<(), Self::E>
263-
where
264-
F: WriteVolatile,
265-
{
266-
self.as_volatile_slice()
267-
.unwrap()
268-
.write_all_volatile_to(addr.0 as usize, dst, count)
269-
.map_err(Into::into)
270-
}
271-
272-
fn store<T: AtomicAccess>(
273-
&self,
274-
val: T,
275-
addr: MemoryRegionAddress,
276-
order: Ordering,
277-
) -> guest_memory::Result<()> {
278-
self.as_volatile_slice().and_then(|s| {
279-
s.store(val, addr.raw_value() as usize, order)
280-
.map_err(Into::into)
281-
})
282-
}
283-
284-
fn load<T: AtomicAccess>(
285-
&self,
286-
addr: MemoryRegionAddress,
287-
order: Ordering,
288-
) -> guest_memory::Result<T> {
289-
self.as_volatile_slice()
290-
.and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
291-
}
292-
}
293-
294145
impl<B: Bitmap> GuestMemoryRegion for GuestRegionMmap<B> {
295146
type B = B;
296147

@@ -382,7 +233,7 @@ mod tests {
382233

383234
use crate::bitmap::tests::test_guest_memory_and_region;
384235
use crate::bitmap::AtomicBitmap;
385-
use crate::{Error, GuestAddressSpace, GuestMemory, GuestMemoryError};
236+
use crate::{Bytes, Error, GuestAddressSpace, GuestMemory, GuestMemoryError};
386237

387238
use std::io::Write;
388239
use std::mem;

src/region.rs

Lines changed: 150 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
11
//! Module containing abstracts for dealing with contiguous regions of guest memory
22
33
use crate::bitmap::{Bitmap, BS};
4-
use crate::guest_memory::Error;
54
use crate::guest_memory::Result;
65
use crate::{
7-
Address, Bytes, FileOffset, GuestAddress, GuestMemory, GuestUsize, MemoryRegionAddress,
8-
VolatileSlice,
6+
Address, AtomicAccess, Bytes, FileOffset, GuestAddress, GuestMemory, GuestMemoryError,
7+
GuestUsize, MemoryRegionAddress, ReadVolatile, VolatileSlice, WriteVolatile,
98
};
9+
use std::sync::atomic::Ordering;
1010
use std::sync::Arc;
1111

1212
/// Represents a continuous region of guest physical memory.
1313
#[allow(clippy::len_without_is_empty)]
14-
pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
14+
pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = GuestMemoryError> {
1515
/// Type used for dirty memory tracking.
1616
type B: Bitmap;
1717

@@ -73,7 +73,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
7373
/// Rust memory safety model. It's the caller's responsibility to ensure that there's no
7474
/// concurrent accesses to the underlying guest memory.
7575
fn get_host_address(&self, _addr: MemoryRegionAddress) -> Result<*mut u8> {
76-
Err(Error::HostAddressNotAvailable)
76+
Err(GuestMemoryError::HostAddressNotAvailable)
7777
}
7878

7979
/// Returns information regarding the file and offset backing this memory region.
@@ -89,7 +89,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
8989
offset: MemoryRegionAddress,
9090
count: usize,
9191
) -> Result<VolatileSlice<BS<Self::B>>> {
92-
Err(Error::HostAddressNotAvailable)
92+
Err(GuestMemoryError::HostAddressNotAvailable)
9393
}
9494

9595
/// Gets a slice of memory for the entire region that supports volatile access.
@@ -299,3 +299,147 @@ impl<R: GuestMemoryRegion> GuestMemory for GuestRegionCollection<R> {
299299
self.regions.iter().map(AsRef::as_ref)
300300
}
301301
}
302+
303+
impl<R: GuestMemoryRegion> Bytes<MemoryRegionAddress> for R {
304+
type E = GuestMemoryError;
305+
306+
/// # Examples
307+
/// * Write a slice at guest address 0x1200.
308+
///
309+
/// ```
310+
/// # #[cfg(feature = "backend-mmap")]
311+
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
312+
/// #
313+
/// # #[cfg(feature = "backend-mmap")]
314+
/// # {
315+
/// # let start_addr = GuestAddress(0x1000);
316+
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
317+
/// # .expect("Could not create guest memory");
318+
/// #
319+
/// let res = gm
320+
/// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
321+
/// .expect("Could not write to guest memory");
322+
/// assert_eq!(5, res);
323+
/// # }
324+
/// ```
325+
fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<usize> {
326+
let maddr = addr.raw_value() as usize;
327+
self.as_volatile_slice()?
328+
.write(buf, maddr)
329+
.map_err(Into::into)
330+
}
331+
332+
/// # Examples
333+
/// * Read a slice of length 16 at guestaddress 0x1200.
334+
///
335+
/// ```
336+
/// # #[cfg(feature = "backend-mmap")]
337+
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
338+
/// #
339+
/// # #[cfg(feature = "backend-mmap")]
340+
/// # {
341+
/// # let start_addr = GuestAddress(0x1000);
342+
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
343+
/// # .expect("Could not create guest memory");
344+
/// #
345+
/// let buf = &mut [0u8; 16];
346+
/// let res = gm
347+
/// .read(buf, GuestAddress(0x1200))
348+
/// .expect("Could not read from guest memory");
349+
/// assert_eq!(16, res);
350+
/// # }
351+
/// ```
352+
fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<usize> {
353+
let maddr = addr.raw_value() as usize;
354+
self.as_volatile_slice()?
355+
.read(buf, maddr)
356+
.map_err(Into::into)
357+
}
358+
359+
fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<()> {
360+
let maddr = addr.raw_value() as usize;
361+
self.as_volatile_slice()?
362+
.write_slice(buf, maddr)
363+
.map_err(Into::into)
364+
}
365+
366+
fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<()> {
367+
let maddr = addr.raw_value() as usize;
368+
self.as_volatile_slice()?
369+
.read_slice(buf, maddr)
370+
.map_err(Into::into)
371+
}
372+
373+
fn read_volatile_from<F>(
374+
&self,
375+
addr: MemoryRegionAddress,
376+
src: &mut F,
377+
count: usize,
378+
) -> Result<usize>
379+
where
380+
F: ReadVolatile,
381+
{
382+
self.as_volatile_slice()?
383+
.read_volatile_from(addr.0 as usize, src, count)
384+
.map_err(Into::into)
385+
}
386+
387+
fn read_exact_volatile_from<F>(
388+
&self,
389+
addr: MemoryRegionAddress,
390+
src: &mut F,
391+
count: usize,
392+
) -> Result<()>
393+
where
394+
F: ReadVolatile,
395+
{
396+
self.as_volatile_slice()?
397+
.read_exact_volatile_from(addr.0 as usize, src, count)
398+
.map_err(Into::into)
399+
}
400+
401+
fn write_volatile_to<F>(
402+
&self,
403+
addr: MemoryRegionAddress,
404+
dst: &mut F,
405+
count: usize,
406+
) -> Result<usize>
407+
where
408+
F: WriteVolatile,
409+
{
410+
self.as_volatile_slice()?
411+
.write_volatile_to(addr.0 as usize, dst, count)
412+
.map_err(Into::into)
413+
}
414+
415+
fn write_all_volatile_to<F>(
416+
&self,
417+
addr: MemoryRegionAddress,
418+
dst: &mut F,
419+
count: usize,
420+
) -> Result<()>
421+
where
422+
F: WriteVolatile,
423+
{
424+
self.as_volatile_slice()?
425+
.write_all_volatile_to(addr.0 as usize, dst, count)
426+
.map_err(Into::into)
427+
}
428+
429+
fn store<T: AtomicAccess>(
430+
&self,
431+
val: T,
432+
addr: MemoryRegionAddress,
433+
order: Ordering,
434+
) -> Result<()> {
435+
self.as_volatile_slice().and_then(|s| {
436+
s.store(val, addr.raw_value() as usize, order)
437+
.map_err(Into::into)
438+
})
439+
}
440+
441+
fn load<T: AtomicAccess>(&self, addr: MemoryRegionAddress, order: Ordering) -> Result<T> {
442+
self.as_volatile_slice()
443+
.and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
444+
}
445+
}

0 commit comments

Comments
 (0)