starnix_uapi/
user_address.rs

1// Copyright 2023 The Fuchsia Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5use super::errors::Errno;
6use super::math::round_up_to_increment;
7use super::uapi;
8use crate::{error, uref};
9use std::marker::PhantomData;
10use std::{fmt, mem, ops};
11use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
12use zx_types::zx_vaddr_t;
13
14#[derive(
15    Clone, Copy, Eq, PartialEq, Hash, Ord, PartialOrd, IntoBytes, KnownLayout, FromBytes, Immutable,
16)]
17#[repr(transparent)]
18pub struct UserAddress(u64);
19
20#[derive(
21    Clone, Copy, Eq, PartialEq, Hash, Ord, PartialOrd, IntoBytes, KnownLayout, FromBytes, Immutable,
22)]
23#[repr(transparent)]
24pub struct UserAddress32(u32);
25
26impl UserAddress32 {
27    const NULL_PTR: u32 = 0;
28
29    pub const NULL: Self = Self(Self::NULL_PTR);
30}
31
32impl UserAddress {
33    const NULL_PTR: u64 = 0;
34
35    pub const NULL: Self = Self(Self::NULL_PTR);
36
37    // TODO(lindkvist): Remove this in favor of marking the From<u64> trait const once feature is
38    // stabilized.
39    pub const fn const_from(value: u64) -> Self {
40        UserAddress(value)
41    }
42
43    pub fn from_ptr(ptr: zx_vaddr_t) -> Self {
44        UserAddress(ptr as u64)
45    }
46
47    pub fn ptr(&self) -> zx_vaddr_t {
48        self.0 as zx_vaddr_t
49    }
50
51    pub fn round_up(&self, increment: u64) -> Result<UserAddress, Errno> {
52        Ok(UserAddress(round_up_to_increment(self.0 as usize, increment as usize)? as u64))
53    }
54
55    pub fn is_aligned(&self, alignment: u64) -> bool {
56        self.0 % alignment == 0
57    }
58
59    pub fn is_null(&self) -> bool {
60        self.0 == UserAddress::NULL_PTR
61    }
62
63    pub fn checked_add(&self, rhs: usize) -> Option<UserAddress> {
64        self.0.checked_add(rhs as u64).map(UserAddress)
65    }
66
67    pub fn checked_add_signed(&self, rhs: isize) -> Option<UserAddress> {
68        self.0.checked_add_signed(rhs as i64).map(UserAddress)
69    }
70
71    pub fn checked_sub(&self, rhs: usize) -> Option<UserAddress> {
72        self.0.checked_sub(rhs as u64).map(UserAddress)
73    }
74
75    pub fn saturating_add(&self, rhs: usize) -> Self {
76        UserAddress(self.0.saturating_add(rhs as u64))
77    }
78
79    pub fn saturating_sub(&self, rhs: usize) -> Self {
80        UserAddress(self.0.saturating_sub(rhs as u64))
81    }
82
83    pub fn is_lower_32bit(&self) -> bool {
84        self.0 < (1 << 32)
85    }
86}
87
88impl Default for UserAddress {
89    fn default() -> Self {
90        Self::NULL
91    }
92}
93
94impl From<u64> for UserAddress {
95    fn from(value: u64) -> Self {
96        UserAddress(value)
97    }
98}
99
100impl From<UserAddress> for u64 {
101    fn from(value: UserAddress) -> Self {
102        value.0
103    }
104}
105
106impl From<uapi::uaddr> for UserAddress {
107    fn from(value: uapi::uaddr) -> Self {
108        UserAddress(value.addr)
109    }
110}
111
112impl From<UserAddress> for uapi::uaddr {
113    fn from(value: UserAddress) -> Self {
114        Self { addr: value.0 }
115    }
116}
117
118impl From<uapi::uaddr32> for UserAddress {
119    fn from(value: uapi::uaddr32) -> Self {
120        UserAddress(value.addr.into())
121    }
122}
123
124impl TryFrom<UserAddress> for uapi::uaddr32 {
125    type Error = ();
126    fn try_from(value: UserAddress) -> Result<Self, ()> {
127        Ok(Self { addr: value.0.try_into().map_err(|_| ())? })
128    }
129}
130
131impl range_map::Gap for UserAddress {
132    fn measure_gap(&self, other: &Self) -> u64 {
133        if self.0 > other.0 { self.0 - other.0 } else { other.0 - self.0 }
134    }
135}
136
137impl ops::Add<u32> for UserAddress {
138    type Output = Result<UserAddress, Errno>;
139
140    fn add(self, rhs: u32) -> Result<UserAddress, Errno> {
141        self.checked_add(rhs as usize).map_or_else(|| error!(EFAULT), |res| Ok(res))
142    }
143}
144
145impl ops::Add<u64> for UserAddress {
146    type Output = Result<UserAddress, Errno>;
147
148    fn add(self, rhs: u64) -> Result<UserAddress, Errno> {
149        self.checked_add(rhs as usize).map_or_else(|| error!(EFAULT), |res| Ok(res))
150    }
151}
152
153impl ops::Add<usize> for UserAddress {
154    type Output = Result<UserAddress, Errno>;
155
156    fn add(self, rhs: usize) -> Result<UserAddress, Errno> {
157        self.checked_add(rhs).map_or_else(|| error!(EFAULT), |res| Ok(res))
158    }
159}
160
161impl ops::Sub<u32> for UserAddress {
162    type Output = Result<UserAddress, Errno>;
163
164    fn sub(self, rhs: u32) -> Result<UserAddress, Errno> {
165        self.checked_sub(rhs as usize).map_or_else(|| error!(EFAULT), |res| Ok(res))
166    }
167}
168
169impl ops::Sub<u64> for UserAddress {
170    type Output = Result<UserAddress, Errno>;
171
172    fn sub(self, rhs: u64) -> Result<UserAddress, Errno> {
173        self.checked_sub(rhs as usize).map_or_else(|| error!(EFAULT), |res| Ok(res))
174    }
175}
176
177impl ops::Sub<usize> for UserAddress {
178    type Output = Result<UserAddress, Errno>;
179
180    fn sub(self, rhs: usize) -> Result<UserAddress, Errno> {
181        self.checked_sub(rhs).map_or_else(|| error!(EFAULT), |res| Ok(res))
182    }
183}
184
185impl ops::Sub<UserAddress> for UserAddress {
186    type Output = usize;
187
188    fn sub(self, rhs: UserAddress) -> usize {
189        self.ptr() - rhs.ptr()
190    }
191}
192
193impl ops::Rem<u64> for UserAddress {
194    type Output = u64;
195
196    fn rem(self, rhs: u64) -> Self::Output {
197        self.0 % rhs
198    }
199}
200
201impl fmt::Display for UserAddress {
202    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
203        write!(f, "{:#x}", self.0)
204    }
205}
206
207impl fmt::Debug for UserAddress {
208    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
209        f.debug_tuple("UserAddress").field(&format_args!("{:#x}", self.0)).finish()
210    }
211}
212
213impl fmt::Debug for UserAddress32 {
214    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
215        f.debug_tuple("UserAddress32").field(&format_args!("{:#x}", self.0)).finish()
216    }
217}
218
219impl Default for UserAddress32 {
220    fn default() -> Self {
221        Self::NULL
222    }
223}
224
225impl From<u32> for UserAddress32 {
226    fn from(value: u32) -> Self {
227        UserAddress32(value)
228    }
229}
230
231impl TryFrom<UserAddress> for UserAddress32 {
232    type Error = Errno;
233    fn try_from(value: UserAddress) -> Result<Self, Self::Error> {
234        match u32::try_from(value.0) {
235            Ok(address_value) => Ok(UserAddress32(address_value)),
236            Err(_) => error!(EFAULT),
237        }
238    }
239}
240
241impl From<UserAddress32> for UserAddress {
242    fn from(value: UserAddress32) -> Self {
243        UserAddress(value.0 as u64)
244    }
245}
246
247#[derive(Eq, PartialEq, Hash, Ord, PartialOrd)]
248#[repr(transparent)]
249pub struct UserRef<T> {
250    addr: UserAddress,
251    phantom: PhantomData<T>,
252}
253
254impl<T> UserRef<T> {
255    pub fn new(addr: UserAddress) -> Self {
256        Self { addr, phantom: PhantomData }
257    }
258
259    pub fn addr(&self) -> UserAddress {
260        self.addr
261    }
262
263    pub fn next(&self) -> Result<UserRef<T>, Errno> {
264        self.addr()
265            .checked_add(mem::size_of::<T>())
266            .map_or_else(|| error!(EFAULT), |res| Ok(Self::new(res)))
267    }
268
269    pub fn at(&self, index: usize) -> Result<Self, Errno> {
270        let mem_offset = index * mem::size_of::<T>();
271        self.addr().checked_add(mem_offset).map_or_else(|| error!(EFAULT), |res| Ok(Self::new(res)))
272    }
273
274    pub fn cast<S>(&self) -> UserRef<S> {
275        UserRef::<S>::new(self.addr)
276    }
277}
278
279impl<T> Clone for UserRef<T> {
280    fn clone(&self) -> Self {
281        Self { addr: self.addr, phantom: Default::default() }
282    }
283}
284
285impl<T> Copy for UserRef<T> {}
286
287impl<T> Default for UserRef<T> {
288    fn default() -> Self {
289        Self::new(UserAddress::default())
290    }
291}
292
293impl<T> From<UserAddress> for UserRef<T> {
294    fn from(user_address: UserAddress) -> Self {
295        Self::new(user_address)
296    }
297}
298
299impl<T> From<UserRef<T>> for UserAddress {
300    fn from(user: UserRef<T>) -> UserAddress {
301        user.addr
302    }
303}
304
305impl<T> From<uapi::uref<T>> for UserRef<T> {
306    fn from(value: uapi::uref<T>) -> Self {
307        Self::new(value.addr.into())
308    }
309}
310
311impl<T> From<UserRef<T>> for uapi::uref<T> {
312    fn from(value: UserRef<T>) -> Self {
313        uapi::uaddr::from(value.addr).into()
314    }
315}
316
317impl<T> ops::Deref for UserRef<T> {
318    type Target = UserAddress;
319
320    fn deref(&self) -> &UserAddress {
321        &self.addr
322    }
323}
324
325impl<T> fmt::Display for UserRef<T> {
326    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
327        self.addr().fmt(f)
328    }
329}
330
331impl<T> fmt::Debug for UserRef<T> {
332    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
333        write!(f, "UserRef<{}>({:#x})", std::any::type_name::<T>(), self.addr().ptr())
334    }
335}
336
337pub trait ArchSpecific {
338    fn is_arch32(&self) -> bool;
339}
340
341pub trait MultiArchFrom<T>: Sized {
342    fn from_64(value: T) -> Self;
343    fn from_32(value: T) -> Self;
344}
345
346impl<T, U: From<T>> MultiArchFrom<T> for U {
347    fn from_64(value: T) -> Self {
348        Self::from(value)
349    }
350    fn from_32(value: T) -> Self {
351        Self::from(value)
352    }
353}
354
355pub trait Into64<T>: Sized {
356    fn into_64(self) -> T;
357}
358
359impl<T, U: MultiArchFrom<T>> Into64<U> for T {
360    fn into_64(self) -> U {
361        U::from_64(self)
362    }
363}
364
365pub trait Into32<T>: Sized {
366    fn into_32(self) -> T;
367}
368
369impl<T, U: MultiArchFrom<T>> Into32<U> for T {
370    fn into_32(self) -> U {
371        U::from_32(self)
372    }
373}
374
375#[derive(Debug)]
376pub enum MappingMultiArchUserRef<T, T64, T32> {
377    Arch64(UserRef<T64>, core::marker::PhantomData<T>),
378    Arch32(UserRef<T32>),
379}
380
381pub type MultiArchUserRef<T64, T32> = MappingMultiArchUserRef<T64, T64, T32>;
382
383impl<T, T64, T32> MappingMultiArchUserRef<T, T64, T32> {
384    pub fn new<Arch: ArchSpecific, Addr: Into<UserAddress>>(arch: &Arch, address: Addr) -> Self {
385        if arch.is_arch32() {
386            Self::Arch32(address.into().into())
387        } else {
388            Self::Arch64(address.into().into(), Default::default())
389        }
390    }
391
392    pub fn new_with_ref<
393        E,
394        Arch: ArchSpecific,
395        UR: TryInto<UserRef<T64>, Error = E> + TryInto<UserRef<T32>, Error = E>,
396    >(
397        arch: &Arch,
398        user_ref: UR,
399    ) -> Result<Self, E> {
400        if arch.is_arch32() {
401            user_ref.try_into().map(Self::Arch32)
402        } else {
403            user_ref.try_into().map(|r| Self::Arch64(r, Default::default()))
404        }
405    }
406
407    pub fn null<Arch: ArchSpecific>(arch: &Arch) -> Self {
408        Self::new(arch, UserAddress::NULL)
409    }
410
411    pub fn from_32(addr: UserRef<T32>) -> Self {
412        Self::Arch32(addr)
413    }
414
415    pub fn is_null(&self) -> bool {
416        self.addr() == UserAddress::NULL
417    }
418
419    pub fn addr(&self) -> UserAddress {
420        match self {
421            Self::Arch64(addr, _) => addr.addr(),
422            Self::Arch32(addr) => addr.addr(),
423        }
424    }
425}
426
427impl<T: TryInto<T64> + TryInto<T32>, T64: Immutable + IntoBytes, T32: Immutable + IntoBytes>
428    MappingMultiArchUserRef<T, T64, T32>
429{
430    pub fn into_bytes<Arch: ArchSpecific>(arch: &Arch, value: T) -> Result<Vec<u8>, ()> {
431        if arch.is_arch32() {
432            TryInto::<T32>::try_into(value).map(|v| v.as_bytes().to_owned()).map_err(|_| ())
433        } else {
434            TryInto::<T64>::try_into(value).map(|v| v.as_bytes().to_owned()).map_err(|_| ())
435        }
436    }
437}
438
439impl<T, T64: FromBytes, T32: FromBytes> MappingMultiArchUserRef<T, T64, T32> {
440    pub fn size_of_object(&self) -> usize {
441        Self::size_of_object_for(self)
442    }
443
444    pub fn size_of_object_for<A: ArchSpecific>(a: &A) -> usize {
445        if a.is_arch32() { std::mem::size_of::<T32>() } else { std::mem::size_of::<T64>() }
446    }
447
448    pub fn align_of_object_for<A: ArchSpecific>(a: &A) -> usize {
449        if a.is_arch32() { std::mem::align_of::<T32>() } else { std::mem::align_of::<T64>() }
450    }
451
452    pub fn next(&self) -> Result<Self, Errno> {
453        self.addr()
454            .checked_add(self.size_of_object())
455            .map_or_else(|| error!(EFAULT), |res| Ok(Self::new(self, res)))
456    }
457
458    pub fn at(&self, index: usize) -> Result<Self, Errno> {
459        let mem_offset = index * self.size_of_object();
460        self.addr()
461            .checked_add(mem_offset)
462            .map_or_else(|| error!(EFAULT), |res| Ok(Self::new(self, res)))
463    }
464}
465
466impl<T, T64: FromBytes + TryInto<T>, T32: FromBytes + TryInto<T>>
467    MappingMultiArchUserRef<T, T64, T32>
468{
469    pub fn read_from_prefix<A: ArchSpecific>(a: &A, bytes: &[u8]) -> Result<T, ()> {
470        if a.is_arch32() {
471            T32::read_from_prefix(bytes).map_err(|_| ())?.0.try_into().map_err(|_| ())
472        } else {
473            T64::read_from_prefix(bytes).map_err(|_| ())?.0.try_into().map_err(|_| ())
474        }
475    }
476}
477
478impl<T, T64, T32>
479    MappingMultiArchUserRef<
480        MappingMultiArchUserRef<T, T64, T32>,
481        MappingMultiArchUserRef<T, T64, T32>,
482        MappingMultiArchUserRef<T, T64, T32>,
483    >
484{
485    pub fn next(&self) -> Result<Self, Errno> {
486        let offset = if self.is_arch32() {
487            std::mem::size_of::<UserAddress32>()
488        } else {
489            std::mem::size_of::<UserAddress>()
490        };
491        self.addr()
492            .checked_add(offset)
493            .map_or_else(|| error!(EFAULT), |user_address| Ok(Self::new(self, user_address)))
494    }
495}
496
497impl<T, T64, T32> Clone for MappingMultiArchUserRef<T, T64, T32> {
498    fn clone(&self) -> Self {
499        match self {
500            Self::Arch64(ur, _) => Self::Arch64(*ur, Default::default()),
501            Self::Arch32(ur) => Self::Arch32(*ur),
502        }
503    }
504}
505
506impl<T, T64, T32> Copy for MappingMultiArchUserRef<T, T64, T32> {}
507
508impl<T, T64, T32> ArchSpecific for MappingMultiArchUserRef<T, T64, T32> {
509    fn is_arch32(&self) -> bool {
510        matches!(self, Self::Arch32(_))
511    }
512}
513
514impl<T, T64, T32> ops::Deref for MappingMultiArchUserRef<T, T64, T32> {
515    type Target = UserAddress;
516
517    fn deref(&self) -> &UserAddress {
518        match self {
519            Self::Arch64(addr, _) => addr.deref(),
520            Self::Arch32(addr) => addr.deref(),
521        }
522    }
523}
524
525impl<T, T64, T32> From<UserRef<T64>> for MappingMultiArchUserRef<T, T64, T32> {
526    fn from(addr: UserRef<T64>) -> Self {
527        Self::Arch64(addr, Default::default())
528    }
529}
530
531impl<T, T64, T32> From<uref<T64>> for MappingMultiArchUserRef<T, T64, T32> {
532    fn from(addr: uref<T64>) -> Self {
533        Self::Arch64(addr.into(), Default::default())
534    }
535}
536
537impl<T, T64, T32> TryFrom<MappingMultiArchUserRef<T, T64, T32>> for uref<T64> {
538    type Error = ();
539    fn try_from(addr: MappingMultiArchUserRef<T, T64, T32>) -> Result<Self, ()> {
540        if addr.is_arch32() { Err(()) } else { Ok(uapi::uaddr::from(addr.addr()).into()) }
541    }
542}
543
544impl<T, T64, T32> From<crate::uref32<T32>> for MappingMultiArchUserRef<T, T64, T32> {
545    fn from(addr: crate::uref32<T32>) -> Self {
546        Self::Arch32(uref::from(addr).into())
547    }
548}
549
550impl<T, T64, T32> TryFrom<MappingMultiArchUserRef<T, T64, T32>> for crate::uref32<T32> {
551    type Error = ();
552    fn try_from(addr: MappingMultiArchUserRef<T, T64, T32>) -> Result<Self, ()> {
553        if addr.is_arch32() { Ok(uapi::uaddr32::try_from(addr.addr())?.into()) } else { Err(()) }
554    }
555}
556
557pub type LongPtr = MultiArchUserRef<u64, u32>;
558pub type UserCString = MultiArchUserRef<u8, u8>;
559pub type UserCStringPtr = MultiArchUserRef<UserCString, UserCString>;
560
561impl fmt::Display for UserCString {
562    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
563        self.addr().fmt(f)
564    }
565}
566
567#[cfg(test)]
568mod tests {
569    use super::{UserAddress, UserRef};
570
571    #[test]
572    fn test_into() {
573        assert_eq!(UserRef::<u32>::default(), UserAddress::default().into());
574        let user_address = UserAddress::from(32);
575        assert_eq!(UserRef::<i32>::new(user_address), user_address.into());
576    }
577}