ringbuf/
storage.rs

1#[cfg(feature = "alloc")]
2use alloc::{boxed::Box, vec::Vec};
3use core::{cell::UnsafeCell, marker::PhantomData, mem::MaybeUninit, ops::Range, ptr::NonNull, slice};
4#[cfg(feature = "alloc")]
5use core::{mem::ManuallyDrop, ptr};
6
7/// Abstract storage for the ring buffer.
8///
9/// Storage items must be stored as a contiguous array.
10///
11/// # Safety
12///
13/// Must not alias with its contents
14/// (it must be safe to store mutable references to storage itself and to its data at the same time).
15///
16/// [`Self::as_mut_ptr`] must point to underlying data.
17///
18/// [`Self::len`] must always return the same value.
19pub unsafe trait Storage {
20    /// Stored item.
21    type Item: Sized;
22
23    /// Length of the storage.
24    fn len(&self) -> usize;
25    fn is_empty(&self) -> bool {
26        self.len() == 0
27    }
28
29    /// Return pointer to the beginning of the storage items.
30    fn as_ptr(&self) -> *const MaybeUninit<Self::Item> {
31        self.as_mut_ptr().cast_const()
32    }
33    /// Return mutable pointer to the beginning of the storage items.
34    fn as_mut_ptr(&self) -> *mut MaybeUninit<Self::Item>;
35
36    /// Returns a mutable slice of storage in specified `range`.
37    ///
38    /// # Safety
39    ///
40    /// Slice must not overlab with existing mutable slices.
41    ///
42    /// Non-`Sync` items must not be accessed concurrently.
43    unsafe fn slice(&self, range: Range<usize>) -> &[MaybeUninit<Self::Item>] {
44        slice::from_raw_parts(self.as_ptr().add(range.start), range.len())
45    }
46    /// Returns a mutable slice of storage in specified `range`.
47    ///
48    /// # Safety
49    ///
50    /// Slices must not overlap.
51    #[allow(clippy::mut_from_ref)]
52    unsafe fn slice_mut(&self, range: Range<usize>) -> &mut [MaybeUninit<Self::Item>] {
53        slice::from_raw_parts_mut(self.as_mut_ptr().add(range.start), range.len())
54    }
55}
56
57pub struct Ref<'a, T> {
58    _ghost: PhantomData<&'a mut [T]>,
59    ptr: *mut MaybeUninit<T>,
60    len: usize,
61}
62unsafe impl<T> Send for Ref<'_, T> where T: Send {}
63unsafe impl<T> Sync for Ref<'_, T> where T: Send {}
64unsafe impl<T> Storage for Ref<'_, T> {
65    type Item = T;
66    #[inline]
67    fn as_mut_ptr(&self) -> *mut MaybeUninit<T> {
68        self.ptr
69    }
70    #[inline]
71    fn len(&self) -> usize {
72        self.len
73    }
74}
75impl<'a, T> From<&'a mut [MaybeUninit<T>]> for Ref<'a, T> {
76    fn from(value: &'a mut [MaybeUninit<T>]) -> Self {
77        Self {
78            _ghost: PhantomData,
79            ptr: value.as_mut_ptr(),
80            len: value.len(),
81        }
82    }
83}
84impl<'a, T> From<Ref<'a, T>> for &'a mut [MaybeUninit<T>] {
85    fn from(value: Ref<'a, T>) -> Self {
86        unsafe { slice::from_raw_parts_mut(value.ptr, value.len) }
87    }
88}
89
90pub struct Owning<T: ?Sized> {
91    data: UnsafeCell<T>,
92}
93unsafe impl<T: ?Sized> Sync for Owning<T> where T: Send {}
94impl<T> From<T> for Owning<T> {
95    fn from(value: T) -> Self {
96        Self {
97            data: UnsafeCell::new(value),
98        }
99    }
100}
101
102pub type Array<T, const N: usize> = Owning<[MaybeUninit<T>; N]>;
103unsafe impl<T, const N: usize> Storage for Array<T, N> {
104    type Item = T;
105    #[inline]
106    fn as_mut_ptr(&self) -> *mut MaybeUninit<T> {
107        self.data.get().cast()
108    }
109    #[inline]
110    fn len(&self) -> usize {
111        N
112    }
113}
114impl<T, const N: usize> From<Array<T, N>> for [MaybeUninit<T>; N] {
115    fn from(value: Array<T, N>) -> Self {
116        value.data.into_inner()
117    }
118}
119
120pub type Slice<T> = Owning<[MaybeUninit<T>]>;
121unsafe impl<T> Storage for Slice<T> {
122    type Item = T;
123    #[inline]
124    fn as_mut_ptr(&self) -> *mut MaybeUninit<T> {
125        self.data.get().cast()
126    }
127    #[inline]
128    fn len(&self) -> usize {
129        unsafe { NonNull::new_unchecked(self.data.get()) }.len()
130    }
131}
132
133#[cfg(feature = "alloc")]
134pub struct Heap<T> {
135    ptr: *mut MaybeUninit<T>,
136    len: usize,
137}
138#[cfg(feature = "alloc")]
139unsafe impl<T> Send for Heap<T> where T: Send {}
140#[cfg(feature = "alloc")]
141unsafe impl<T> Sync for Heap<T> where T: Send {}
142#[cfg(feature = "alloc")]
143unsafe impl<T> Storage for Heap<T> {
144    type Item = T;
145    #[inline]
146    fn as_mut_ptr(&self) -> *mut MaybeUninit<T> {
147        self.ptr
148    }
149    #[inline]
150    fn len(&self) -> usize {
151        self.len
152    }
153}
154#[cfg(feature = "alloc")]
155impl<T> Heap<T> {
156    /// Create a new heap storage with exact capacity.
157    pub fn new(capacity: usize) -> Self {
158        let mut data = Vec::<MaybeUninit<T>>::with_capacity(capacity);
159        // `data.capacity()` is not guaranteed to be equal to `capacity`.
160        // We enforce that by `set_len` and converting to boxed slice.
161        unsafe { data.set_len(capacity) };
162        Self::from(data.into_boxed_slice())
163    }
164}
165#[cfg(feature = "alloc")]
166impl<T> From<Vec<MaybeUninit<T>>> for Heap<T> {
167    fn from(mut value: Vec<MaybeUninit<T>>) -> Self {
168        // Convert `value` to boxed slice of length equals to `value.capacity()`
169        // except for zero-sized types - for them length will be `value.len()` because `Vec::capacity` for ZST is undefined
170        // (see <https://doc.rust-lang.org/std/vec/struct.Vec.html#guarantees>).
171        if core::mem::size_of::<T>() != 0 {
172            unsafe { value.set_len(value.capacity()) };
173        }
174        Self::from(value.into_boxed_slice())
175    }
176}
177#[cfg(feature = "alloc")]
178impl<T> From<Box<[MaybeUninit<T>]>> for Heap<T> {
179    fn from(value: Box<[MaybeUninit<T>]>) -> Self {
180        Self {
181            len: value.len(),
182            ptr: Box::into_raw(value).cast(),
183        }
184    }
185}
186#[cfg(feature = "alloc")]
187impl<T> From<Heap<T>> for Box<[MaybeUninit<T>]> {
188    fn from(value: Heap<T>) -> Self {
189        let value = ManuallyDrop::new(value);
190        unsafe { Box::from_raw(ptr::slice_from_raw_parts_mut(value.ptr, value.len)) }
191    }
192}
193#[cfg(feature = "alloc")]
194impl<T> Drop for Heap<T> {
195    fn drop(&mut self) {
196        drop(unsafe { Box::from_raw(ptr::slice_from_raw_parts_mut(self.ptr, self.len)) });
197    }
198}
199
200#[cfg(test)]
201mod tests {
202    use super::*;
203    use core::{cell::Cell, marker::PhantomData};
204
205    struct Check<S: Storage + Send + Sync + ?Sized>(PhantomData<S>);
206
207    #[allow(dead_code)]
208    fn check_send_sync() {
209        let _: Check<Ref<Cell<i32>>>;
210        let _: Check<Array<Cell<i32>, 4>>;
211        let _: Check<Slice<Cell<i32>>>;
212        let _: Check<Heap<Cell<i32>>>;
213    }
214}