1use crate::mm::PAGE_SIZE;
6use crate::vfs::{FsStr, FsString};
7use smallvec::SmallVec;
8use starnix_types::user_buffer::{UserBuffer, UserBuffers, UserBuffers32};
9use starnix_uapi::errors::Errno;
10use starnix_uapi::user_address::{
11 ArchSpecific, MappingMultiArchUserRef, MultiArchUserRef, UserAddress, UserAddress32,
12 UserCString, UserRef,
13};
14use starnix_uapi::user_value::UserValue;
15use starnix_uapi::{PATH_MAX, UIO_MAXIOV, errno, error, uapi};
16use std::ffi::CStr;
17use std::mem::MaybeUninit;
18use usercopy::slice_to_maybe_uninit_mut;
19use zerocopy::{FromBytes, Immutable, IntoBytes};
20
21pub type IOVecPtr = MultiArchUserRef<uapi::iovec, uapi::arch32::iovec>;
22
23pub trait MemoryAccessor {
24 fn read_memory<'a>(
31 &self,
32 addr: UserAddress,
33 bytes: &'a mut [MaybeUninit<u8>],
34 ) -> Result<&'a mut [u8], Errno>;
35
36 fn read_memory_partial_until_null_byte<'a>(
44 &self,
45 addr: UserAddress,
46 bytes: &'a mut [MaybeUninit<u8>],
47 ) -> Result<&'a mut [u8], Errno>;
48
49 fn read_memory_partial<'a>(
58 &self,
59 addr: UserAddress,
60 bytes: &'a mut [MaybeUninit<u8>],
61 ) -> Result<&'a mut [u8], Errno>;
62
63 fn write_memory(&self, addr: UserAddress, bytes: &[u8]) -> Result<usize, Errno>;
71
72 fn write_memory_partial(&self, addr: UserAddress, bytes: &[u8]) -> Result<usize, Errno>;
79
80 fn zero(&self, addr: UserAddress, length: usize) -> Result<usize, Errno>;
84}
85
86pub trait TaskMemoryAccessor: MemoryAccessor {
87 fn maximum_valid_address(&self) -> Option<UserAddress>;
89}
90
91#[inline]
93fn object_as_mut_bytes<T: FromBytes + Sized>(
94 object: &mut MaybeUninit<T>,
95) -> &mut [MaybeUninit<u8>] {
96 unsafe {
100 std::slice::from_raw_parts_mut(
101 object.as_mut_ptr() as *mut MaybeUninit<u8>,
102 std::mem::size_of::<T>(),
103 )
104 }
105}
106
107#[inline]
109fn slice_as_mut_bytes<T: FromBytes + Sized>(
110 slice: &mut [MaybeUninit<T>],
111) -> &mut [MaybeUninit<u8>] {
112 unsafe {
115 std::slice::from_raw_parts_mut(
116 slice.as_mut_ptr() as *mut MaybeUninit<u8>,
117 slice.len() * std::mem::size_of::<T>(),
118 )
119 }
120}
121
122pub struct NumberOfElementsRead(pub usize);
127
128#[inline]
137pub unsafe fn read_to_vec<T: FromBytes, E>(
138 max_len: usize,
139 read_fn: impl FnOnce(&mut [MaybeUninit<T>]) -> Result<NumberOfElementsRead, E>,
140) -> Result<Vec<T>, E> {
141 let mut buffer = Vec::with_capacity(max_len);
142 let NumberOfElementsRead(read_elements) = read_fn(&mut buffer.spare_capacity_mut()[..max_len])?;
145 debug_assert!(read_elements <= max_len, "read_elements={read_elements}, max_len={max_len}");
146 unsafe { buffer.set_len(read_elements) }
149 Ok(buffer)
150}
151
152#[inline]
161pub unsafe fn read_to_array<T: FromBytes, E, const N: usize>(
162 read_fn: impl FnOnce(&mut [MaybeUninit<T>]) -> Result<(), E>,
163) -> Result<[T; N], E> {
164 let buffer: MaybeUninit<[MaybeUninit<T>; N]> = MaybeUninit::uninit();
166 let mut buffer = unsafe { buffer.assume_init() };
170 read_fn(&mut buffer)?;
171 let buffer = buffer.map(|a| unsafe { a.assume_init() });
176 Ok(buffer)
177}
178
179#[inline]
187pub unsafe fn read_to_object_as_bytes<T: FromBytes, E>(
188 read_fn: impl FnOnce(&mut [MaybeUninit<u8>]) -> Result<(), E>,
189) -> Result<T, E> {
190 let mut object = MaybeUninit::uninit();
191 read_fn(object_as_mut_bytes(&mut object))?;
192 let object = unsafe { object.assume_init() };
195 Ok(object)
196}
197
198pub trait MemoryAccessorExt: MemoryAccessor {
199 fn read_memory_to_slice(&self, addr: UserAddress, bytes: &mut [u8]) -> Result<(), Errno> {
206 let bytes_len = bytes.len();
207 self.read_memory(addr, slice_to_maybe_uninit_mut(bytes))
208 .map(|bytes_read| debug_assert_eq!(bytes_read.len(), bytes_len))
209 }
210
211 fn read_memory_to_vec(&self, addr: UserAddress, len: usize) -> Result<Vec<u8>, Errno> {
213 unsafe {
215 read_to_vec::<u8, _>(len, |buf| {
216 self.read_memory(addr, buf).map(|bytes_read| {
217 debug_assert_eq!(bytes_read.len(), len);
218 NumberOfElementsRead(len)
219 })
220 })
221 }
222 }
223
224 fn read_memory_partial_to_vec(
226 &self,
227 addr: UserAddress,
228 max_len: usize,
229 ) -> Result<Vec<u8>, Errno> {
230 unsafe {
232 read_to_vec::<u8, _>(max_len, |buf| {
233 self.read_memory_partial(addr, buf)
234 .map(|bytes_read| NumberOfElementsRead(bytes_read.len()))
235 })
236 }
237 }
238
239 fn read_memory_to_array<const N: usize>(&self, addr: UserAddress) -> Result<[u8; N], Errno> {
241 unsafe {
243 read_to_array(|buf| {
244 self.read_memory(addr, buf).map(|bytes_read| debug_assert_eq!(bytes_read.len(), N))
245 })
246 }
247 }
248
249 fn read_buffer(&self, buffer: &UserBuffer) -> Result<Vec<u8>, Errno> {
251 self.read_memory_to_vec(buffer.address, buffer.length)
252 }
253
254 fn read_object<T: FromBytes>(&self, user: UserRef<T>) -> Result<T, Errno> {
256 unsafe {
258 read_to_object_as_bytes(|buf| {
259 self.read_memory(user.addr(), buf)
260 .map(|bytes_read| debug_assert_eq!(bytes_read.len(), std::mem::size_of::<T>()))
261 })
262 }
263 }
264
265 fn read_multi_arch_ptr<T64, T32>(
266 &self,
267 user: MultiArchUserRef<MultiArchUserRef<T64, T32>, MultiArchUserRef<T64, T32>>,
268 ) -> Result<MultiArchUserRef<T64, T32>, Errno> {
269 let address = if user.is_arch32() {
270 self.read_object::<UserAddress32>(user.addr().into())?.into()
271 } else {
272 self.read_object::<UserAddress>(user.addr().into())?
273 };
274 Ok(MultiArchUserRef::<T64, T32>::new(&user, address))
275 }
276
277 fn read_multi_arch_object<T, T64: FromBytes + TryInto<T>, T32: FromBytes + TryInto<T>>(
280 &self,
281 user: MappingMultiArchUserRef<T, T64, T32>,
282 ) -> Result<T, Errno> {
283 match user {
284 MappingMultiArchUserRef::<T, T64, T32>::Arch64(user, _) => {
285 self.read_object(user)?.try_into().map_err(|_| errno!(EINVAL))
286 }
287 MappingMultiArchUserRef::<T, T64, T32>::Arch32(user) => {
288 self.read_object(user)?.try_into().map_err(|_| errno!(EINVAL))
289 }
290 }
291 }
292
293 fn read_multi_arch_objects_to_vec<
295 T,
296 T64: FromBytes + TryInto<T>,
297 T32: FromBytes + TryInto<T>,
298 >(
299 &self,
300 user: MappingMultiArchUserRef<T, T64, T32>,
301 len: usize,
302 ) -> Result<Vec<T>, Errno> {
303 match user {
304 MappingMultiArchUserRef::<T, T64, T32>::Arch64(user, _) => self
305 .read_objects_to_vec(user, len)?
306 .into_iter()
307 .map(TryInto::<T>::try_into)
308 .collect::<Result<Vec<T>, _>>()
309 .map_err(|_| errno!(EINVAL)),
310 MappingMultiArchUserRef::<T, T64, T32>::Arch32(user) => self
311 .read_objects_to_vec(user, len)?
312 .into_iter()
313 .map(TryInto::<T>::try_into)
314 .collect::<Result<Vec<T>, _>>()
315 .map_err(|_| errno!(EINVAL)),
316 }
317 }
318
319 fn read_object_partial<T: FromBytes>(
327 &self,
328 user: UserRef<T>,
329 partial_size: usize,
330 ) -> Result<T, Errno> {
331 let full_size = std::mem::size_of::<T>();
332 if partial_size > full_size {
333 return error!(EINVAL);
334 }
335
336 let mut object = MaybeUninit::uninit();
339 let (to_read, to_zero) = object_as_mut_bytes(&mut object).split_at_mut(partial_size);
340 self.read_memory(user.addr(), to_read)?;
341
342 to_zero.fill(MaybeUninit::new(0));
344
345 Ok(unsafe { object.assume_init() })
348 }
349
350 fn read_objects<'a, T: FromBytes>(
352 &self,
353 user: UserRef<T>,
354 objects: &'a mut [MaybeUninit<T>],
355 ) -> Result<&'a mut [T], Errno> {
356 let objects_len = objects.len();
357 self.read_memory(user.addr(), slice_as_mut_bytes(objects)).map(|bytes_read| {
358 debug_assert_eq!(bytes_read.len(), objects_len * std::mem::size_of::<T>());
359 unsafe {
361 std::slice::from_raw_parts_mut(bytes_read.as_mut_ptr() as *mut T, objects_len)
362 }
363 })
364 }
365
366 fn read_objects_to_slice<T: FromBytes>(
368 &self,
369 user: UserRef<T>,
370 objects: &mut [T],
371 ) -> Result<(), Errno> {
372 let objects_len = objects.len();
373 self.read_objects(user, slice_to_maybe_uninit_mut(objects))
374 .map(|objects_read| debug_assert_eq!(objects_read.len(), objects_len))
375 }
376
377 fn read_objects_to_vec<T: FromBytes>(
379 &self,
380 user: UserRef<T>,
381 len: usize,
382 ) -> Result<Vec<T>, Errno> {
383 unsafe {
385 read_to_vec::<T, _>(len, |buf| {
386 self.read_objects(user, buf).map(|objects_read| {
387 debug_assert_eq!(objects_read.len(), len);
388 NumberOfElementsRead(len)
389 })
390 })
391 }
392 }
393
394 fn read_objects_to_smallvec<T: Clone + FromBytes, const N: usize>(
396 &self,
397 user: UserRef<T>,
398 len: usize,
399 ) -> Result<SmallVec<[T; N]>, Errno> {
400 if len > N {
401 Ok(SmallVec::<[T; N]>::from_vec(self.read_objects_to_vec(user, len)?))
402 } else {
403 let mut buffer: [MaybeUninit<T>; N] = unsafe { MaybeUninit::uninit().assume_init() };
408
409 self.read_objects(user, &mut buffer[..len])?;
410
411 let buffer: MaybeUninit<[T; N]> = unsafe { std::mem::transmute_copy(&buffer) };
414
415 Ok(unsafe { SmallVec::from_buf_and_len_unchecked(buffer, len) })
417 }
418 }
419
420 fn read_objects_to_array<T: Copy + FromBytes, const N: usize>(
422 &self,
423 user: UserRef<T>,
424 ) -> Result<[T; N], Errno> {
425 unsafe {
427 read_to_array(|buf| {
428 self.read_objects(user, buf).map(|objects_read| {
429 debug_assert_eq!(objects_read.len(), N);
430 })
431 })
432 }
433 }
434
435 fn read_iovec<T: Copy + Eq + IntoBytes + FromBytes + Immutable + TryInto<usize>>(
439 &self,
440 iovec_addr: IOVecPtr,
441 iovec_count: UserValue<T>,
442 ) -> Result<UserBuffers, Errno> {
443 let iovec_count = iovec_count.raw().try_into().map_err(|_| errno!(EINVAL))?;
444 if iovec_count > UIO_MAXIOV as usize {
445 return error!(EINVAL);
446 }
447
448 if iovec_addr.is_arch32() {
449 let ub32s: UserBuffers32 =
450 self.read_objects_to_smallvec(iovec_addr.addr().into(), iovec_count)?;
451 Ok(ub32s.iter().map(|&ub32| ub32.into()).collect())
452 } else {
453 self.read_objects_to_smallvec(iovec_addr.addr().into(), iovec_count)
454 }
455 }
456
457 fn read_c_string_to_vec(
460 &self,
461 string: UserCString,
462 max_size: usize,
463 ) -> Result<FsString, Errno> {
464 let chunk_size = std::cmp::min(*PAGE_SIZE as usize, max_size);
465
466 let mut buf = Vec::with_capacity(chunk_size);
467 let mut index = 0;
468 loop {
469 let addr = string.addr().checked_add(index).ok_or_else(|| errno!(EFAULT))?;
471 let read = self.read_memory_partial_until_null_byte(
472 addr,
473 &mut buf.spare_capacity_mut()[index..][..chunk_size],
474 )?;
475 let read_len = read.len();
476
477 if read.last() == Some(&0) {
479 let null_index = index + read_len - 1;
480 unsafe { buf.set_len(null_index) }
482 if buf.len() > max_size {
483 return error!(ENAMETOOLONG);
484 }
485
486 return Ok(buf.into());
487 }
488 index += read_len;
489
490 if read_len < chunk_size || index >= max_size {
491 return error!(ENAMETOOLONG);
493 }
494
495 buf.reserve(index + chunk_size);
497 }
498 }
499
500 fn read_path(&self, path: UserCString) -> Result<FsString, Errno> {
504 self.read_c_string_to_vec(path, PATH_MAX as usize)
505 }
506
507 fn read_path_if_non_null(&self, path: UserCString) -> Result<FsString, Errno> {
511 if path.is_null() {
512 Ok(Default::default())
513 } else {
514 self.read_c_string_to_vec(path, PATH_MAX as usize)
515 }
516 }
517
518 fn read_nul_delimited_c_string_list(
523 &self,
524 start: UserAddress,
525 len: usize,
526 ) -> Result<Vec<FsString>, Errno> {
527 let buf = self.read_memory_to_vec(start, len)?;
528 let mut buf = &buf[..];
529
530 let mut list = vec![];
531 while !buf.is_empty() {
532 let len_consumed = match CStr::from_bytes_until_nul(buf) {
533 Ok(segment) => {
534 list.push(segment.to_bytes().into());
537 segment.to_bytes_with_nul().len()
538 }
539 Err(_) => {
540 list.push(buf.into());
543 buf.len()
544 }
545 };
546 buf = &buf[len_consumed..];
547 }
548
549 Ok(list)
550 }
551
552 fn read_c_string<'a>(
557 &self,
558 string: UserCString,
559 buffer: &'a mut [MaybeUninit<u8>],
560 ) -> Result<&'a FsStr, Errno> {
561 let buffer = self.read_memory_partial_until_null_byte(string.addr(), buffer)?;
562 if let Some((null_byte, buffer)) = buffer.split_last() {
564 if null_byte == &0 {
565 return Ok(buffer.into());
566 }
567 }
568
569 error!(ENAMETOOLONG)
570 }
571
572 fn read_c_string_if_non_null<'a>(
575 &self,
576 addr: UserCString,
577 buffer: &'a mut [MaybeUninit<u8>],
578 ) -> Result<&'a FsStr, Errno> {
579 if addr.is_null() { Ok(Default::default()) } else { self.read_c_string(addr, buffer) }
580 }
581
582 fn write_object<T: IntoBytes + Immutable>(
583 &self,
584 user: UserRef<T>,
585 object: &T,
586 ) -> Result<usize, Errno> {
587 self.write_memory(user.addr(), object.as_bytes())
588 }
589
590 fn write_objects<T: IntoBytes + Immutable>(
591 &self,
592 user: UserRef<T>,
593 objects: &[T],
594 ) -> Result<usize, Errno> {
595 self.write_memory(user.addr(), objects.as_bytes())
596 }
597
598 fn write_multi_arch_ptr<Addr, T64, T32>(
599 &self,
600 user: Addr,
601 object: MultiArchUserRef<T64, T32>,
602 ) -> Result<usize, Errno>
603 where
604 Addr: Into<UserAddress>,
605 {
606 if object.is_arch32() {
607 let value = u32::try_from(object.ptr()).map_err(|_| errno!(EINVAL))?;
608 self.write_memory(user.into(), value.as_bytes())
609 } else {
610 self.write_memory(user.into(), object.ptr().as_bytes())
611 }
612 }
613
614 fn write_multi_arch_object<
615 T,
616 T64: IntoBytes + Immutable + TryFrom<T>,
617 T32: IntoBytes + Immutable + TryFrom<T>,
618 >(
619 &self,
620 user: MappingMultiArchUserRef<T, T64, T32>,
621 object: T,
622 ) -> Result<usize, Errno> {
623 match user {
624 MappingMultiArchUserRef::<T, T64, T32>::Arch64(user, _) => {
625 self.write_object(user, &T64::try_from(object).map_err(|_| errno!(EINVAL))?)
626 }
627 MappingMultiArchUserRef::<T, T64, T32>::Arch32(user) => {
628 self.write_object(user, &T32::try_from(object).map_err(|_| errno!(EINVAL))?)
629 }
630 }
631 }
632}
633
634impl MemoryAccessorExt for dyn MemoryAccessor + '_ {}
635impl<T: MemoryAccessor> MemoryAccessorExt for T {}