Skip to main content

memory_mapped_vmo/
lib.rs

1// Copyright 2023 The Fuchsia Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5use std::mem::{align_of, size_of};
6use zerocopy::FromBytes;
7
8#[derive(Debug)]
9pub enum Error {
10    InvalidInput,
11}
12
13/// Memory-maps a VMO and mediates access to its memory.
14pub struct MemoryMappedVmo {
15    map_addr: usize,
16    vmo_size: usize,
17    writable: bool,
18}
19
20impl MemoryMappedVmo {
21    /// Maps a VMO in read-only mode.
22    ///
23    /// Attempting to call methods on the returned object that return mutable references will panic.
24    ///
25    /// # Safety
26    /// The caller must either guarantee that the `vmo` is not modified by others while the returned
27    /// instance is alive or that accesses are synchronized in application-specific ways.
28    pub unsafe fn new_readonly(vmo: &zx::Vmo) -> Result<MemoryMappedVmo, zx::Status> {
29        unsafe { Self::new_impl(vmo, false) }
30    }
31
32    /// Maps a VMO in read-write mode.
33    ///
34    /// # Safety
35    /// The caller must either guarantee that the `vmo` is not modified by others while the returned
36    /// instance is alive or that accesses are synchronized in application-specific ways.
37    pub unsafe fn new_readwrite(vmo: &zx::Vmo) -> Result<MemoryMappedVmo, zx::Status> {
38        unsafe { Self::new_impl(vmo, true) }
39    }
40
41    /// # Safety
42    /// The caller must either guarantee that the `vmo` is not modified by others while the returned
43    /// instance is alive or that accesses are synchronized in application-specific ways.
44    unsafe fn new_impl(vmo: &zx::Vmo, writable: bool) -> Result<MemoryMappedVmo, zx::Status> {
45        let vmo_size = vmo.get_content_size()? as usize;
46
47        let mut flags = zx::VmarFlags::PERM_READ
48            | zx::VmarFlags::ALLOW_FAULTS
49            | zx::VmarFlags::REQUIRE_NON_RESIZABLE;
50        if writable {
51            flags |= zx::VmarFlags::PERM_WRITE;
52        }
53
54        let map_addr = fuchsia_runtime::vmar_root_self().map(0, &vmo, 0, vmo_size, flags)?;
55        Ok(MemoryMappedVmo { map_addr, vmo_size, writable })
56    }
57
58    /// Returns the number of usable bytes in the VMO (i.e. its ZX_PROP_VMO_CONTENT_SIZE property,
59    /// which is not rounded to the page size).
60    pub fn vmo_size(&self) -> usize {
61        self.vmo_size
62    }
63
64    /// Given an element type, a base offset within the VMO and a number of elements, verifies that
65    /// the offset is suitably aligned and that the range of the elements fits in the VMO bounds. If
66    /// both conditions are satisfied, return a const pointer to its first element.
67    fn validate_and_get_ptr<T>(
68        &self,
69        byte_offset: usize,
70        num_elements: usize,
71    ) -> Result<*const T, Error> {
72        if byte_offset % align_of::<T>() == 0 {
73            if let Some(num_bytes) = size_of::<T>().checked_mul(num_elements) {
74                if let Some(end) = byte_offset.checked_add(num_bytes) {
75                    if end <= self.vmo_size {
76                        return Ok((self.map_addr + byte_offset) as *const T);
77                    }
78                }
79            }
80        }
81
82        Err(Error::InvalidInput)
83    }
84
85    /// Like validate_and_get_ptr, but returns a mut pointer and panics if the VMO is not writable.
86    fn validate_and_get_mut_ptr<T>(
87        &mut self,
88        byte_offset: usize,
89        num_elements: usize,
90    ) -> Result<*mut T, Error> {
91        if !self.writable {
92            panic!("MemoryMappedVmo is not writable");
93        }
94
95        Ok(self.validate_and_get_ptr::<T>(byte_offset, num_elements)? as *mut T)
96    }
97
98    /// Returns a reference to a slice of elements in the VMO.
99    ///
100    /// This method validates the alignment and the bounds against the VMO size.
101    pub fn get_slice<'a, T: FromBytes>(
102        &'a self,
103        byte_offset: usize,
104        num_elements: usize,
105    ) -> Result<&'a [T], Error> {
106        let ptr = self.validate_and_get_ptr(byte_offset, num_elements)?;
107        unsafe { Ok(std::slice::from_raw_parts(ptr, num_elements)) }
108    }
109
110    /// Returns a reference to an element in the VMO.
111    ///
112    /// This method validates the alignment and the bounds against the VMO size.
113    pub fn get_object<'a, T: FromBytes>(&'a self, byte_offset: usize) -> Result<&'a T, Error> {
114        let ptr = self.validate_and_get_ptr(byte_offset, 1)?;
115        unsafe { Ok(&*ptr) }
116    }
117
118    /// Returns a mutable reference to a slice of elements in the VMO.
119    ///
120    /// This method validates the alignment and the bounds against the VMO size.
121    pub fn get_slice_mut<'a, T: FromBytes>(
122        &'a mut self,
123        byte_offset: usize,
124        num_elements: usize,
125    ) -> Result<&'a mut [T], Error> {
126        let ptr = self.validate_and_get_mut_ptr(byte_offset, num_elements)?;
127        unsafe { Ok(std::slice::from_raw_parts_mut(ptr, num_elements)) }
128    }
129
130    /// Returns a mutable reference to an element in the VMO.
131    ///
132    /// This method validates the alignment and the bounds against the VMO size.
133    pub fn get_object_mut<'a, T: FromBytes>(
134        &'a mut self,
135        byte_offset: usize,
136    ) -> Result<&'a mut T, Error> {
137        let ptr = self.validate_and_get_mut_ptr(byte_offset, 1)?;
138        unsafe { Ok(&mut *ptr) }
139    }
140}
141
142impl Drop for MemoryMappedVmo {
143    fn drop(&mut self) {
144        // SAFETY: We owned the mapping.
145        unsafe {
146            fuchsia_runtime::vmar_root_self()
147                .unmap(self.map_addr, self.vmo_size)
148                .expect("failed to unmap MemoryMappedVmo");
149        }
150    }
151}
152
153#[cfg(test)]
154mod tests {
155    use super::*;
156    use assert_matches::assert_matches;
157
158    // Test data used by some of the following tests.
159    const TEST_DATA: [u64; 4] = [11, 22, 33, 44];
160    const TEST_DATA_SIZE: usize = size_of::<u64>() * TEST_DATA.len();
161
162    #[test]
163    fn test_vmo_size() {
164        let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
165        let m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
166
167        assert_eq!(m.vmo_size(), TEST_DATA_SIZE);
168    }
169
170    #[test]
171    fn test_write_objects_read_slice() {
172        let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
173
174        // Fill VMO with test data as individual objects.
175        {
176            let mut m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
177            for (i, val) in TEST_DATA.iter().enumerate() {
178                *m.get_object_mut(size_of::<u64>() * i).unwrap() = *val;
179            }
180        }
181
182        // Verify that we can read them back correctly as a slice.
183        {
184            let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
185            assert_eq!(*m.get_slice::<u64>(0, 4).unwrap(), TEST_DATA);
186        }
187    }
188
189    #[test]
190    fn test_write_slice_read_objects() {
191        let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
192
193        // Fill VMO with test data as a slice.
194        {
195            let mut m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
196            m.get_slice_mut(0, 4).unwrap().copy_from_slice(&TEST_DATA);
197        }
198
199        // Verify that we can read it back correctly as individual objects.
200        {
201            let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
202            for (i, expected_val) in TEST_DATA.iter().enumerate() {
203                let actual_val: &u64 = m.get_object(size_of::<u64>() * i).unwrap();
204                assert_eq!(*actual_val, *expected_val, "value mismatch at i={}", i);
205            }
206        }
207    }
208
209    #[test]
210    fn test_write_slice_read_subslices() {
211        const COUNT: usize = 4;
212        let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
213
214        // Fill VMO with test data.
215        let mut m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
216        m.get_slice_mut::<u64>(0, COUNT).unwrap().copy_from_slice(&[11, 22, 33, 44]);
217
218        // Verify that we can read subslices correctly.
219        const SECOND_ELEM_BYTE_OFFSET: usize = size_of::<u64>();
220        assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 0).unwrap(), []);
221        assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 1).unwrap(), [22]);
222        assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 2).unwrap(), [22, 33]);
223        assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 3).unwrap(), [22, 33, 44]);
224    }
225
226    #[test]
227    fn test_uninitialized_is_zero() {
228        const COUNT: usize = 4;
229        let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
230        let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
231
232        // Verify that the value of uninitialized data is zero.
233        assert_eq!(*m.get_slice::<u64>(0, COUNT).unwrap(), [0; COUNT]);
234    }
235
236    #[test]
237    fn test_range_errors() {
238        const COUNT: usize = 4;
239        let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
240        let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
241
242        // Reading at a misaligned offset should fail.
243        const MISALIGNED_OFFSET: usize = size_of::<u64>() - 1;
244        assert_matches!(m.get_object::<u64>(MISALIGNED_OFFSET), Err(Error::InvalidInput));
245
246        // Reading an out-of-bounds range should fail.
247        const SECOND_ELEM_BYTE_OFFSET: usize = size_of::<u64>();
248        assert_matches!(
249            m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, COUNT),
250            Err(Error::InvalidInput)
251        );
252    }
253
254    #[test]
255    #[should_panic(expected = "MemoryMappedVmo is not writable")]
256    fn test_cannot_get_mutable_slice_from_readonly_vmo() {
257        let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
258        let mut m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
259
260        // This should panic:
261        let _ = m.get_slice_mut::<u64>(0, 1);
262    }
263
264    #[test]
265    #[should_panic(expected = "MemoryMappedVmo is not writable")]
266    fn test_cannot_get_mutable_object_from_readonly_vmo() {
267        let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
268        let mut m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
269
270        // This should panic:
271        let _ = m.get_object_mut::<u64>(0);
272    }
273}