1use std::mem::{align_of, size_of};
6
7#[derive(Debug)]
8pub enum Error {
9 InvalidInput,
10}
11
12pub struct MemoryMappedVmo {
14 map_addr: usize,
15 vmo_size: usize,
16 writable: bool,
17}
18
19impl MemoryMappedVmo {
20 pub unsafe fn new_readonly(vmo: &zx::Vmo) -> Result<MemoryMappedVmo, zx::Status> {
28 unsafe { Self::new_impl(vmo, false) }
29 }
30
31 pub unsafe fn new_readwrite(vmo: &zx::Vmo) -> Result<MemoryMappedVmo, zx::Status> {
37 unsafe { Self::new_impl(vmo, true) }
38 }
39
40 unsafe fn new_impl(vmo: &zx::Vmo, writable: bool) -> Result<MemoryMappedVmo, zx::Status> {
44 let vmo_size = vmo.get_content_size()? as usize;
45
46 let mut flags = zx::VmarFlags::PERM_READ
47 | zx::VmarFlags::ALLOW_FAULTS
48 | zx::VmarFlags::REQUIRE_NON_RESIZABLE;
49 if writable {
50 flags |= zx::VmarFlags::PERM_WRITE;
51 }
52
53 let map_addr = fuchsia_runtime::vmar_root_self().map(0, &vmo, 0, vmo_size, flags)?;
54 Ok(MemoryMappedVmo { map_addr, vmo_size, writable })
55 }
56
57 pub fn vmo_size(&self) -> usize {
60 self.vmo_size
61 }
62
63 fn validate_and_get_ptr<T>(
67 &self,
68 byte_offset: usize,
69 num_elements: usize,
70 ) -> Result<*const T, Error> {
71 if byte_offset % align_of::<T>() == 0 {
72 if let Some(num_bytes) = size_of::<T>().checked_mul(num_elements) {
73 if let Some(end) = byte_offset.checked_add(num_bytes) {
74 if end <= self.vmo_size {
75 return Ok((self.map_addr + byte_offset) as *const T);
76 }
77 }
78 }
79 }
80
81 Err(Error::InvalidInput)
82 }
83
84 fn validate_and_get_mut_ptr<T>(
86 &mut self,
87 byte_offset: usize,
88 num_elements: usize,
89 ) -> Result<*mut T, Error> {
90 if !self.writable {
91 panic!("MemoryMappedVmo is not writable");
92 }
93
94 Ok(self.validate_and_get_ptr::<T>(byte_offset, num_elements)? as *mut T)
95 }
96
97 pub fn get_slice<'a, T: MemoryMappable>(
101 &'a self,
102 byte_offset: usize,
103 num_elements: usize,
104 ) -> Result<&'a [T], Error> {
105 let ptr = self.validate_and_get_ptr(byte_offset, num_elements)?;
106 unsafe { Ok(std::slice::from_raw_parts(ptr, num_elements)) }
107 }
108
109 pub fn get_object<'a, T: MemoryMappable>(&'a self, byte_offset: usize) -> Result<&'a T, Error> {
113 let ptr = self.validate_and_get_ptr(byte_offset, 1)?;
114 unsafe { Ok(&*ptr) }
115 }
116
117 pub fn get_slice_mut<'a, T: MemoryMappable>(
121 &'a mut self,
122 byte_offset: usize,
123 num_elements: usize,
124 ) -> Result<&'a mut [T], Error> {
125 let ptr = self.validate_and_get_mut_ptr(byte_offset, num_elements)?;
126 unsafe { Ok(std::slice::from_raw_parts_mut(ptr, num_elements)) }
127 }
128
129 pub fn get_object_mut<'a, T: MemoryMappable>(
133 &'a mut self,
134 byte_offset: usize,
135 ) -> Result<&'a mut T, Error> {
136 let ptr = self.validate_and_get_mut_ptr(byte_offset, 1)?;
137 unsafe { Ok(&mut *ptr) }
138 }
139}
140
141impl Drop for MemoryMappedVmo {
142 fn drop(&mut self) {
143 unsafe {
145 fuchsia_runtime::vmar_root_self()
146 .unmap(self.map_addr, self.vmo_size)
147 .expect("failed to unmap MemoryMappedVmo");
148 }
149 }
150}
151
152pub unsafe trait MemoryMappable {}
164
165unsafe impl MemoryMappable for u8 {}
166unsafe impl MemoryMappable for u16 {}
167unsafe impl MemoryMappable for u32 {}
168unsafe impl MemoryMappable for u64 {}
169unsafe impl<T: MemoryMappable> MemoryMappable for [T] {}
170unsafe impl<T: MemoryMappable, const N: usize> MemoryMappable for [T; N] {}
171
172#[cfg(test)]
173mod tests {
174 use super::*;
175 use assert_matches::assert_matches;
176
177 const TEST_DATA: [u64; 4] = [11, 22, 33, 44];
179 const TEST_DATA_SIZE: usize = size_of::<u64>() * TEST_DATA.len();
180
181 #[test]
182 fn test_vmo_size() {
183 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
184 let m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
185
186 assert_eq!(m.vmo_size(), TEST_DATA_SIZE);
187 }
188
189 #[test]
190 fn test_write_objects_read_slice() {
191 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
192
193 {
195 let mut m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
196 for (i, val) in TEST_DATA.iter().enumerate() {
197 *m.get_object_mut(size_of::<u64>() * i).unwrap() = *val;
198 }
199 }
200
201 {
203 let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
204 assert_eq!(*m.get_slice::<u64>(0, 4).unwrap(), TEST_DATA);
205 }
206 }
207
208 #[test]
209 fn test_write_slice_read_objects() {
210 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
211
212 {
214 let mut m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
215 m.get_slice_mut(0, 4).unwrap().copy_from_slice(&TEST_DATA);
216 }
217
218 {
220 let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
221 for (i, expected_val) in TEST_DATA.iter().enumerate() {
222 let actual_val: &u64 = m.get_object(size_of::<u64>() * i).unwrap();
223 assert_eq!(*actual_val, *expected_val, "value mismatch at i={}", i);
224 }
225 }
226 }
227
228 #[test]
229 fn test_write_slice_read_subslices() {
230 const COUNT: usize = 4;
231 let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
232
233 let mut m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
235 m.get_slice_mut::<u64>(0, COUNT).unwrap().copy_from_slice(&[11, 22, 33, 44]);
236
237 const SECOND_ELEM_BYTE_OFFSET: usize = size_of::<u64>();
239 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 0).unwrap(), []);
240 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 1).unwrap(), [22]);
241 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 2).unwrap(), [22, 33]);
242 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 3).unwrap(), [22, 33, 44]);
243 }
244
245 #[test]
246 fn test_uninitialized_is_zero() {
247 const COUNT: usize = 4;
248 let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
249 let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
250
251 assert_eq!(*m.get_slice::<u64>(0, COUNT).unwrap(), [0; COUNT]);
253 }
254
255 #[test]
256 fn test_range_errors() {
257 const COUNT: usize = 4;
258 let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
259 let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
260
261 const MISALIGNED_OFFSET: usize = size_of::<u64>() - 1;
263 assert_matches!(m.get_object::<u64>(MISALIGNED_OFFSET), Err(Error::InvalidInput));
264
265 const SECOND_ELEM_BYTE_OFFSET: usize = size_of::<u64>();
267 assert_matches!(
268 m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, COUNT),
269 Err(Error::InvalidInput)
270 );
271 }
272
273 #[test]
274 #[should_panic(expected = "MemoryMappedVmo is not writable")]
275 fn test_cannot_get_mutable_slice_from_readonly_vmo() {
276 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
277 let mut m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
278
279 let _ = m.get_slice_mut::<u64>(0, 1);
281 }
282
283 #[test]
284 #[should_panic(expected = "MemoryMappedVmo is not writable")]
285 fn test_cannot_get_mutable_object_from_readonly_vmo() {
286 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
287 let mut m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
288
289 let _ = m.get_object_mut::<u64>(0);
291 }
292}