1use std::mem::{align_of, size_of};
6use zerocopy::FromBytes;
7
8#[derive(Debug)]
9pub enum Error {
10 InvalidInput,
11}
12
13pub struct MemoryMappedVmo {
15 map_addr: usize,
16 vmo_size: usize,
17 writable: bool,
18}
19
20impl MemoryMappedVmo {
21 pub unsafe fn new_readonly(vmo: &zx::Vmo) -> Result<MemoryMappedVmo, zx::Status> {
29 unsafe { Self::new_impl(vmo, false) }
30 }
31
32 pub unsafe fn new_readwrite(vmo: &zx::Vmo) -> Result<MemoryMappedVmo, zx::Status> {
38 unsafe { Self::new_impl(vmo, true) }
39 }
40
41 unsafe fn new_impl(vmo: &zx::Vmo, writable: bool) -> Result<MemoryMappedVmo, zx::Status> {
45 let vmo_size = vmo.get_content_size()? as usize;
46
47 let mut flags = zx::VmarFlags::PERM_READ
48 | zx::VmarFlags::ALLOW_FAULTS
49 | zx::VmarFlags::REQUIRE_NON_RESIZABLE;
50 if writable {
51 flags |= zx::VmarFlags::PERM_WRITE;
52 }
53
54 let map_addr = fuchsia_runtime::vmar_root_self().map(0, &vmo, 0, vmo_size, flags)?;
55 Ok(MemoryMappedVmo { map_addr, vmo_size, writable })
56 }
57
58 pub fn vmo_size(&self) -> usize {
61 self.vmo_size
62 }
63
64 fn validate_and_get_ptr<T>(
68 &self,
69 byte_offset: usize,
70 num_elements: usize,
71 ) -> Result<*const T, Error> {
72 if byte_offset % align_of::<T>() == 0 {
73 if let Some(num_bytes) = size_of::<T>().checked_mul(num_elements) {
74 if let Some(end) = byte_offset.checked_add(num_bytes) {
75 if end <= self.vmo_size {
76 return Ok((self.map_addr + byte_offset) as *const T);
77 }
78 }
79 }
80 }
81
82 Err(Error::InvalidInput)
83 }
84
85 fn validate_and_get_mut_ptr<T>(
87 &mut self,
88 byte_offset: usize,
89 num_elements: usize,
90 ) -> Result<*mut T, Error> {
91 if !self.writable {
92 panic!("MemoryMappedVmo is not writable");
93 }
94
95 Ok(self.validate_and_get_ptr::<T>(byte_offset, num_elements)? as *mut T)
96 }
97
98 pub fn get_slice<'a, T: FromBytes>(
102 &'a self,
103 byte_offset: usize,
104 num_elements: usize,
105 ) -> Result<&'a [T], Error> {
106 let ptr = self.validate_and_get_ptr(byte_offset, num_elements)?;
107 unsafe { Ok(std::slice::from_raw_parts(ptr, num_elements)) }
108 }
109
110 pub fn get_object<'a, T: FromBytes>(&'a self, byte_offset: usize) -> Result<&'a T, Error> {
114 let ptr = self.validate_and_get_ptr(byte_offset, 1)?;
115 unsafe { Ok(&*ptr) }
116 }
117
118 pub fn get_slice_mut<'a, T: FromBytes>(
122 &'a mut self,
123 byte_offset: usize,
124 num_elements: usize,
125 ) -> Result<&'a mut [T], Error> {
126 let ptr = self.validate_and_get_mut_ptr(byte_offset, num_elements)?;
127 unsafe { Ok(std::slice::from_raw_parts_mut(ptr, num_elements)) }
128 }
129
130 pub fn get_object_mut<'a, T: FromBytes>(
134 &'a mut self,
135 byte_offset: usize,
136 ) -> Result<&'a mut T, Error> {
137 let ptr = self.validate_and_get_mut_ptr(byte_offset, 1)?;
138 unsafe { Ok(&mut *ptr) }
139 }
140}
141
142impl Drop for MemoryMappedVmo {
143 fn drop(&mut self) {
144 unsafe {
146 fuchsia_runtime::vmar_root_self()
147 .unmap(self.map_addr, self.vmo_size)
148 .expect("failed to unmap MemoryMappedVmo");
149 }
150 }
151}
152
153#[cfg(test)]
154mod tests {
155 use super::*;
156 use assert_matches::assert_matches;
157
158 const TEST_DATA: [u64; 4] = [11, 22, 33, 44];
160 const TEST_DATA_SIZE: usize = size_of::<u64>() * TEST_DATA.len();
161
162 #[test]
163 fn test_vmo_size() {
164 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
165 let m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
166
167 assert_eq!(m.vmo_size(), TEST_DATA_SIZE);
168 }
169
170 #[test]
171 fn test_write_objects_read_slice() {
172 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
173
174 {
176 let mut m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
177 for (i, val) in TEST_DATA.iter().enumerate() {
178 *m.get_object_mut(size_of::<u64>() * i).unwrap() = *val;
179 }
180 }
181
182 {
184 let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
185 assert_eq!(*m.get_slice::<u64>(0, 4).unwrap(), TEST_DATA);
186 }
187 }
188
189 #[test]
190 fn test_write_slice_read_objects() {
191 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
192
193 {
195 let mut m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
196 m.get_slice_mut(0, 4).unwrap().copy_from_slice(&TEST_DATA);
197 }
198
199 {
201 let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
202 for (i, expected_val) in TEST_DATA.iter().enumerate() {
203 let actual_val: &u64 = m.get_object(size_of::<u64>() * i).unwrap();
204 assert_eq!(*actual_val, *expected_val, "value mismatch at i={}", i);
205 }
206 }
207 }
208
209 #[test]
210 fn test_write_slice_read_subslices() {
211 const COUNT: usize = 4;
212 let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
213
214 let mut m = unsafe { MemoryMappedVmo::new_readwrite(&vmo) }.unwrap();
216 m.get_slice_mut::<u64>(0, COUNT).unwrap().copy_from_slice(&[11, 22, 33, 44]);
217
218 const SECOND_ELEM_BYTE_OFFSET: usize = size_of::<u64>();
220 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 0).unwrap(), []);
221 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 1).unwrap(), [22]);
222 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 2).unwrap(), [22, 33]);
223 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 3).unwrap(), [22, 33, 44]);
224 }
225
226 #[test]
227 fn test_uninitialized_is_zero() {
228 const COUNT: usize = 4;
229 let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
230 let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
231
232 assert_eq!(*m.get_slice::<u64>(0, COUNT).unwrap(), [0; COUNT]);
234 }
235
236 #[test]
237 fn test_range_errors() {
238 const COUNT: usize = 4;
239 let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
240 let m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
241
242 const MISALIGNED_OFFSET: usize = size_of::<u64>() - 1;
244 assert_matches!(m.get_object::<u64>(MISALIGNED_OFFSET), Err(Error::InvalidInput));
245
246 const SECOND_ELEM_BYTE_OFFSET: usize = size_of::<u64>();
248 assert_matches!(
249 m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, COUNT),
250 Err(Error::InvalidInput)
251 );
252 }
253
254 #[test]
255 #[should_panic(expected = "MemoryMappedVmo is not writable")]
256 fn test_cannot_get_mutable_slice_from_readonly_vmo() {
257 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
258 let mut m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
259
260 let _ = m.get_slice_mut::<u64>(0, 1);
262 }
263
264 #[test]
265 #[should_panic(expected = "MemoryMappedVmo is not writable")]
266 fn test_cannot_get_mutable_object_from_readonly_vmo() {
267 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
268 let mut m = unsafe { MemoryMappedVmo::new_readonly(&vmo) }.unwrap();
269
270 let _ = m.get_object_mut::<u64>(0);
272 }
273}