fdf_channel/
arena.rs

1// Copyright 2024 The Fuchsia Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5//! Safe bindings for the driver runtime arena stable ABI
6
7use core::alloc::Layout;
8use core::cmp::max;
9use core::marker::PhantomData;
10use core::mem::MaybeUninit;
11use core::ops::{Deref, DerefMut};
12use core::ptr::{NonNull, null_mut, slice_from_raw_parts_mut};
13use std::sync::{Arc, Weak};
14
15use zx::Status;
16
17use fdf_sys::*;
18
19pub use fdf_sys::fdf_arena_t;
20
21/// Implements a memory arena allocator to be used with the Fuchsia Driver
22/// Runtime when sending and receiving from channels.
23#[derive(Debug)]
24pub struct Arena(pub(crate) NonNull<fdf_arena_t>);
25
26// SAFETY: The api for `fdf_arena_t` is thread safe
27unsafe impl Send for Arena {}
28unsafe impl Sync for Arena {}
29
30impl Default for Arena {
31    fn default() -> Self {
32        Self::new()
33    }
34}
35
36impl Arena {
37    /// Allocates a new arena for use with the driver runtime
38    pub fn new() -> Self {
39        let mut arena = null_mut();
40        // SAFETY: the address we pass to fdf_arena_create is allocated on
41        // the stack and appropriately sized.
42        // This call cannot fail as the only reason it would fail is due to invalid
43        // option flags, and 0 is a valid option.
44        Status::ok(unsafe { fdf_arena_create(0, 0, &mut arena) }).expect("Failed to create arena");
45        // SAFETY: if fdf_arena_create returned ZX_OK, it will have placed
46        // a non-null pointer.
47        Arena(unsafe { NonNull::new_unchecked(arena) })
48    }
49
50    /// Creates an arena from a raw pointer to the arena object.
51    ///
52    /// # Safety
53    ///
54    /// The caller is responsible for ensuring that only one [`Arena`]
55    /// is constructed from this pointer, and that is has not previously
56    /// been freed.
57    pub unsafe fn from_raw(ptr: NonNull<fdf_arena_t>) -> Self {
58        Self(ptr)
59    }
60
61    /// Returns true if the allocation pointed to was made by this arena
62    pub fn contains_ptr<T: ?Sized>(&self, ptr: &T) -> bool {
63        // SAFETY: self.0 is valid as constructed, and `fdf_arena_contains` does not access data at the
64        // pointer but just compares its pointer value to the buffers in the arena.
65        unsafe {
66            fdf_arena_contains(self.0.as_ptr(), ptr as *const _ as *const _, size_of_val(ptr))
67        }
68    }
69
70    /// Returns true if the allocation was made by this arena
71    pub fn contains<T: ?Sized>(&self, item: &ArenaBox<'_, T>) -> bool {
72        self.contains_ptr(ArenaBox::deref(item))
73    }
74
75    /// Allocates the appropriate amount of memory for the given layout and
76    /// returns a pointer to `T` at the start of that memory.
77    ///
78    /// # Safety
79    ///
80    /// The caller is responsible for making sure that the `Layout` is laid out
81    /// properly for one or more `T` to be stored at it. This may be a single
82    /// object or a slice of them, but it must be a multiple of it.
83    unsafe fn alloc_bytes_for<T>(&self, layout: Layout) -> NonNull<T> {
84        // We make sure we allocate at least one byte so we return a unique
85        // pointer that is within the arena, which will ensure that subsequent
86        // verifications that the memory location is in the arena will pass.
87        let bytes = max(layout.size(), 1);
88        // SAFETY: Allocating a block of memory in the arena large enough to store
89        // the object we're allocating.
90        let storage =
91            unsafe { NonNull::new_unchecked(fdf_arena_allocate(self.0.as_ptr(), bytes) as *mut T) };
92        // TODO(b/352119228): when the arena allocator allows specifying alignment, use that
93        // instead of asserting the alignment after the fact.
94        assert_eq!(
95            storage.align_offset(layout.align()),
96            0,
97            "Arena returned an improperly aligned pointer: {}",
98            core::any::type_name::<T>(),
99        );
100        storage
101    }
102
103    /// Inserts a [`MaybeUninit`] object and returns the [`ArenaBox`] of it.
104    pub fn insert_uninit<T: Sized>(&self) -> ArenaBox<'_, MaybeUninit<T>> {
105        let layout = Layout::new::<MaybeUninit<T>>();
106        // SAFETY: The layout we're passing to `alloc_bytes_for` is for zero or
107        // more objects of type `T`, which is the pointer type we get back from
108        // it.
109        unsafe { ArenaBox::new(self.alloc_bytes_for(layout)) }
110    }
111
112    /// Inserts a slice of [`MaybeUninit`] objects of len `len`
113    ///
114    /// # Panics
115    ///
116    /// Panics if an array `[T; n]` is too large to be allocated.
117    pub fn insert_uninit_slice<T: Sized>(&self, len: usize) -> ArenaBox<'_, [MaybeUninit<T>]> {
118        let layout = Layout::array::<MaybeUninit<T>>(len).expect("allocation too large");
119        // SAFETY: The layout we're passing to `alloc_bytes_for` is for zero or
120        // more objects of type `T`, which is the pointer type we get back from
121        // it.
122        let storage = unsafe { self.alloc_bytes_for(layout) };
123        // At this point we have a `*mut T` but we need to return a `[T]`,
124        // which is unsized. We need to use [`slice_from_raw_parts_mut`]
125        // to construct the unsized pointer from the data and its length.
126        let ptr = slice_from_raw_parts_mut(storage.as_ptr(), len);
127        // SAFETY: alloc_bytes_for is expected to return a valid pointer.
128        unsafe { ArenaBox::new(NonNull::new_unchecked(ptr)) }
129    }
130
131    /// Moves `obj` of type `T` into the arena and returns an [`ArenaBox`]
132    /// containing the moved value.
133    pub fn insert<T: Sized>(&self, obj: T) -> ArenaBox<'_, T> {
134        let mut uninit = self.insert_uninit();
135        uninit.write(obj);
136        // SAFETY: we wrote `obj` to the object
137        unsafe { uninit.assume_init() }
138    }
139
140    /// Moves a [`Box`]ed slice into the arena and returns an [`ArenaBox`]
141    /// containing the moved value.
142    pub fn insert_boxed_slice<T: Sized>(&self, slice: Box<[T]>) -> ArenaBox<'_, [T]> {
143        let layout = Layout::for_value(&*slice);
144        let len = slice.len();
145        // SAFETY: The layout we give `alloc_bytes_for` is for storing 0 or more
146        // objects of type `T`, which is the pointer type we get from it.
147        let storage = unsafe { self.alloc_bytes_for(layout) };
148        let original_storage = Box::into_raw(slice);
149        // SAFETY: Moving the object into the arena memory we just allocated by
150        // first copying the bytes over and then deallocating the raw memory
151        // we took from the box.
152        let slice_box = unsafe {
153            core::ptr::copy_nonoverlapping(original_storage as *mut T, storage.as_ptr(), len);
154            let slice_ptr = slice_from_raw_parts_mut(storage.as_ptr(), len);
155            ArenaBox::new(NonNull::new_unchecked(slice_ptr))
156        };
157        if layout.size() != 0 {
158            // SAFETY: Since we have decomposed the Box we have to deallocate it,
159            // but only if it's not dangling.
160            unsafe {
161                std::alloc::dealloc(original_storage as *mut u8, layout);
162            }
163        }
164        slice_box
165    }
166
167    /// Copies the slice into the arena and returns an [`ArenaBox`] containing
168    /// the copied values.
169    pub fn insert_slice<T: Sized + Clone>(&self, slice: &[T]) -> ArenaBox<'_, [T]> {
170        let len = slice.len();
171        let mut uninit_slice = self.insert_uninit_slice(len);
172        for (from, to) in slice.iter().zip(uninit_slice.iter_mut()) {
173            to.write(from.clone());
174        }
175
176        // SAFETY: we wrote `from.clone()` to each item of the slice.
177        unsafe { uninit_slice.assume_init_slice() }
178    }
179
180    /// Inserts a slice of [`Default`]-initialized objects of type `T` to the
181    /// arena and returns an [`ArenaBox`] of it.
182    ///
183    /// # Panics
184    ///
185    /// Panics if an array `[T; n]` is too large to be allocated.
186    pub fn insert_default_slice<T: Sized + Default>(&self, len: usize) -> ArenaBox<'_, [T]> {
187        let mut uninit_slice = self.insert_uninit_slice(len);
188        for i in uninit_slice.iter_mut() {
189            i.write(T::default());
190        }
191        // SAFETY: we wrote `T::default()` to each item of the slice.
192        unsafe { uninit_slice.assume_init_slice() }
193    }
194
195    /// Returns an ArenaBox for the pointed to object, assuming that it is part
196    /// of this arena.
197    ///
198    /// # Safety
199    ///
200    /// This does not verify that the pointer came from this arena,
201    /// so the caller is responsible for verifying that.
202    pub unsafe fn assume_unchecked<T: ?Sized>(&self, ptr: NonNull<T>) -> ArenaBox<'_, T> {
203        // SAFETY: Caller is responsible for ensuring this per safety doc section.
204        unsafe { ArenaBox::new(ptr) }
205    }
206
207    /// Returns an [`ArenaBox`] for the pointed to object, verifying that it
208    /// is a part of this arena in the process.
209    ///
210    /// # Panics
211    ///
212    /// This function panics if the given pointer is not in this [`Arena`].
213    ///
214    /// # Safety
215    ///
216    /// The caller is responsible for ensuring that only one [`ArenaBox`] is constructed
217    /// for a given pointer, and that the pointer originated from an `ArenaBox<T>` or
218    /// a direct allocation with the arena through [`fdf_arena_allocate`], and is:
219    /// - initialized to a value of `T`.
220    /// - properly aligned for `T`.
221    /// - pointing to the beginning of the object, and not to a subfield of another
222    ///   [`ArenaBox`]ed object.
223    pub unsafe fn assume<T: ?Sized>(&self, ptr: NonNull<T>) -> ArenaBox<'_, T> {
224        // SAFETY: caller promises the pointer is initialized and valid
225        assert!(
226            self.contains_ptr(unsafe { ptr.as_ref() }),
227            "Arena can't assume ownership over a pointer not allocated from within it"
228        );
229        // SAFETY: we will verify the provenance below
230        unsafe { self.assume_unchecked(ptr) }
231    }
232
233    /// Moves the given [`ArenaBox`] into an [`ArenaRc`] with an owned
234    /// reference to this [`Arena`], allowing for it to be used in `'static`
235    /// contexts.
236    ///
237    /// # Panics
238    ///
239    /// Panics if the given [`ArenaBox`] is not allocated from this arena.
240    pub fn make_rc<T: ?Sized>(&self, data: ArenaBox<'_, T>) -> ArenaRc<T> {
241        assert!(self.contains(&data), "Arena doesn't own the ArenaBox");
242        // SAFETY: we just checked the box is owned by this arena.
243        unsafe { ArenaRc::new_unchecked(self.clone(), data) }
244    }
245
246    /// Moves the given [`ArenaBox`] into an [`ArenaStaticBox`] with an owned
247    /// reference to this [`Arena`], allowing for it to be used in `'static`
248    /// contexts.
249    ///
250    /// # Panics
251    ///
252    /// Panics if the given [`ArenaBox`] is not allocated from this arena.
253    pub fn make_static<T: ?Sized>(&self, data: ArenaBox<'_, T>) -> ArenaStaticBox<T> {
254        assert!(self.contains(&data), "Arena doesn't own the ArenaBox");
255        // SAFETY: we just checked the box is owned by this arena.
256        unsafe { ArenaStaticBox::new_unchecked(self.clone(), data) }
257    }
258
259    /// Creates an [`ArenaBox`]ed slice from an iterator implementing [`ExactSizeIterator`]. Note
260    /// that if [`ExactSizeIterator::len`] returns an incorrect value, the returned [`ArenaBox`]
261    /// will be no more than the length returned, and may be less.
262    pub fn insert_from_iter<I: IntoIterator>(&self, source: I) -> ArenaBox<'_, [I::Item]>
263    where
264        I::IntoIter: ExactSizeIterator,
265    {
266        let iter = source.into_iter();
267        let len = iter.len();
268        let mut actual_len = 0;
269        let mut storage = self.insert_uninit_slice(len);
270        for (output, input) in storage.iter_mut().zip(iter) {
271            output.write(input);
272            actual_len += 1;
273        }
274        // SAFETY: we wrote to `actual_len` elements of the storage
275        unsafe { ArenaBox::assume_init_slice_len(storage, actual_len) }
276    }
277
278    /// Tries to create an [`ArenaBox`]ed slice from an iterator implementing [`ExactSizeIterator`].
279    /// Note that if [`ExactSizeIterator::len`] returns an incorrect value, the returned
280    /// [`ArenaBox`] will be no more than the length returned, and may be less.
281    ///
282    /// If any item returned by the iterator returns an Err() result, results so far are discarded
283    pub fn try_insert_from_iter<I, T, E>(&self, source: I) -> Result<ArenaBox<'_, [T]>, E>
284    where
285        I: IntoIterator<Item = Result<T, E>>,
286        I::IntoIter: ExactSizeIterator,
287    {
288        let iter = source.into_iter();
289        let len = iter.len();
290        let mut actual_len = 0;
291        let mut storage = self.insert_uninit_slice(len);
292        for (output, input) in storage.iter_mut().zip(iter) {
293            match input {
294                Ok(input) => {
295                    output.write(input);
296                    actual_len += 1;
297                }
298                Err(e) => {
299                    // `assume_init` the slice so far so that drop handlers are properly called on the
300                    // items already moved. This will be dropped immediately.
301                    // SAFETY: `actual_len` will be the length of moved values into the slice so far.
302                    unsafe { ArenaBox::assume_init_slice_len(storage, actual_len) };
303                    return Err(e);
304                }
305            }
306        }
307        // SAFETY: we wrote to `actual_len` elements of the storage
308        Ok(unsafe { ArenaBox::assume_init_slice_len(storage, actual_len) })
309    }
310
311    /// Transforms this Arena into an fdf_arena_t without dropping the reference.
312    ///
313    /// If the caller drops the returned fdf_arena_t, the memory allocated by the
314    /// arena will never be freed.
315    pub fn into_raw(self) -> NonNull<fdf_arena_t> {
316        let res = self.0;
317        core::mem::forget(self);
318        res
319    }
320}
321
322impl Clone for Arena {
323    fn clone(&self) -> Self {
324        // SAFETY: We own this arena reference and so we can add ref it
325        unsafe { fdf_arena_add_ref(self.0.as_ptr()) }
326        Self(self.0)
327    }
328}
329
330impl Drop for Arena {
331    fn drop(&mut self) {
332        // SAFETY: We own this arena reference and so we can drop it.
333        unsafe { fdf_arena_drop_ref(self.0.as_ptr()) }
334    }
335}
336
337/// Holds a reference to data of type `T` in an [`Arena`] with lifetime `'a`,
338/// and ensures that the object is properly dropped before the [`Arena`] goes
339/// out of scope.
340#[derive(Debug)]
341pub struct ArenaBox<'a, T: ?Sized>(NonNull<T>, PhantomData<&'a Arena>);
342
343/// SAFETY: [`ArenaBox`] impls [`Send`] and [`Sync`] if `T` impls them.
344unsafe impl<'a, T: ?Sized> Send for ArenaBox<'a, T> where T: Send {}
345unsafe impl<'a, T: ?Sized> Sync for ArenaBox<'a, T> where T: Sync {}
346
347impl<'a, T> ArenaBox<'a, T> {
348    /// Moves the inner value of this ArenaBox out to owned storage.
349    pub fn take(value: Self) -> T {
350        // SAFETY: `Self::into_ptr` will forget `value` and prevent
351        // calling its `drop`.
352        unsafe { core::ptr::read(Self::into_ptr(value).as_ptr()) }
353    }
354
355    /// Moves the inner value of this ArenaBox out into a [`Box`] using the
356    /// global allocator. Using this instead of `Box::new(ArenaBox::take(v))`
357    /// helps to avoid any additional copies of the storage on its way to the
358    /// box.
359    ///
360    /// Note: if you want to take a slice, you will need to use
361    /// [`Self::take_boxed_slice`].
362    pub fn take_boxed(value: Self) -> Box<T> {
363        // SAFETY: we are allocating space for `T` with the layout of `T`, so
364        // this is simple.
365        let storage = unsafe { global_alloc(Layout::for_value(&*value)) };
366        // SAFETY: storage is sufficiently large to store the value in `value`
367        // and we used Layout to make sure that Box will be happy with its
368        // layout.
369        unsafe {
370            core::ptr::write(storage.as_ptr(), Self::take(value));
371            Box::from_raw(storage.as_ptr())
372        }
373    }
374}
375
376impl<'a, T> ArenaBox<'a, MaybeUninit<T>> {
377    /// Assumes the contents of this [`MaybeUninit`] box are initialized now.
378    ///
379    /// # Safety
380    ///
381    /// The caller is responsible for ensuring that the value is initialized
382    /// properly. See [`MaybeUninit::assume_init`] for more details on the
383    /// safety requirements of this.
384    pub unsafe fn assume_init(self) -> ArenaBox<'a, T> {
385        // SAFETY: This pointer came from an `ArenaBox` we just leaked,
386        // and casting `*MaybeUninit<T>` to `*T` is safe.
387        unsafe { ArenaBox::new(ArenaBox::into_ptr(self).cast()) }
388    }
389}
390
391impl<'a, T> ArenaBox<'a, [MaybeUninit<T>]> {
392    /// Assumes the contents of this box of `[MaybeUninit<T>]` are initialized now.
393    ///
394    /// # Safety
395    ///
396    /// The caller is responsible for ensuring that the value is initialized
397    /// properly. See [`MaybeUninit::assume_init`] for more details on the
398    /// safety requirements of this.
399    pub unsafe fn assume_init_slice(self) -> ArenaBox<'a, [T]> {
400        let len = self.len();
401        // SAFETY: We are about to reconstitute this pointer back into
402        // a new `ArenaBox` with the same lifetime, and casting
403        // `MaybeUninit<T>` to `T` is safe.
404        let data: NonNull<T> = unsafe { ArenaBox::into_ptr(self) }.cast();
405        let slice_ptr = NonNull::slice_from_raw_parts(data, len);
406
407        // SAFETY: We just got this pointer from an `ArenaBox` we decomposed.
408        unsafe { ArenaBox::new(slice_ptr) }
409    }
410
411    /// Assumes the contents of this box of `[MaybeUninit<T>]` are initialized now,
412    /// up to `len` elements and ignores the rest.
413    ///
414    /// # Safety
415    ///
416    /// The caller is responsible for ensuring that the value is initialized
417    /// properly. See [`MaybeUninit::assume_init`] for more details on the
418    /// safety requirements of this.
419    pub unsafe fn assume_init_slice_len(self, len: usize) -> ArenaBox<'a, [T]> {
420        // only use up to `len` elements of the slice.
421        let len = self.len().min(len);
422        // SAFETY: We are about to reconstitute this pointer back into
423        // a new `ArenaBox` with the same lifetime, and casting
424        // `MaybeUninit<T>` to `T` is safe.
425        let data: NonNull<T> = unsafe { ArenaBox::into_ptr(self) }.cast();
426        let slice_ptr = NonNull::slice_from_raw_parts(data, len);
427
428        // SAFETY: We just got this pointer from an `ArenaBox` we decomposed.
429        unsafe { ArenaBox::new(slice_ptr) }
430    }
431}
432
433impl<'a, T> ArenaBox<'a, [T]> {
434    /// Like [`Self::take_boxed`], this moves the inner value of this ArenaBox
435    /// out into a [`Box`] using the global allocator, and using it avoids
436    /// additional copies of the data, but this function works on slices of `T`,
437    /// which are unsized and so require special handling.
438    pub fn take_boxed_slice(value: Self) -> Box<[T]> {
439        let len = value.len();
440        // SAFETY: we are using the layout of the slice value of type `[T]` to
441        // allocate a pointer to the first element of the storage for the new
442        // slice, which is of type `T`.
443        let storage = unsafe { global_alloc(Layout::for_value(&*value)) };
444        // SAFETY: storage is sufficiently large to store the slice in `value`
445        let slice_ptr = unsafe {
446            core::ptr::copy_nonoverlapping(
447                Self::into_ptr(value).as_ptr() as *mut T,
448                storage.as_ptr(),
449                len,
450            );
451            core::ptr::slice_from_raw_parts_mut(storage.as_ptr(), len)
452        };
453        // SAFETY: we used Layout to make sure that Box will be happy with the
454        // layout of the stored value.
455        unsafe { Box::from_raw(slice_ptr) }
456    }
457}
458
459impl<'a, T: ?Sized> ArenaBox<'a, T> {
460    /// Creates a new [`ArenaBox`] from the given non-null pointer to an object of type `T`.
461    ///
462    /// # Safety
463    ///
464    /// The caller is responsible for ensuring that the object pointed to came from an [`Arena`]
465    /// and that the lifetime of this box is less than the lifetime of that [`Arena`].
466    pub unsafe fn new(obj: NonNull<T>) -> ArenaBox<'a, T> {
467        Self(obj, PhantomData)
468    }
469
470    /// Decomposes this [`ArenaBox`] into its pointer.
471    ///
472    /// # Safety
473    ///
474    /// This is unsafe because it loses the lifetime of the [`Arena`] it
475    /// came from. The caller must make sure to not let the pointer outlive the
476    /// arena. The caller is also responsible for making sure the object is
477    /// dropped before the [`Arena`], or it may leak resources.
478    pub unsafe fn into_ptr(value: Self) -> NonNull<T> {
479        let res = value.0;
480        core::mem::forget(value);
481        res
482    }
483
484    /// Turns this [`ArenaBox`] into one with the given lifetime.
485    ///
486    /// # Safety
487    ///
488    /// This is unsafe because it loses the lifetime of the [`Arena`] it
489    /// came from. The caller must make sure to not let the
490    /// [`ArenaBox`] outlive the [`Arena`] it was created from. The caller
491    /// is also responsible for making sure the object is dropped before
492    /// the [`Arena`], or it may leak resources.
493    pub unsafe fn erase_lifetime(value: Self) -> ArenaBox<'static, T> {
494        // SAFETY: the caller promises to ensure this object does not
495        // outlive the arena.
496        unsafe { ArenaBox::new(ArenaBox::into_ptr(value)) }
497    }
498
499    /// Consumes and leaks this [`ArenaBox`], returning a mutable reference
500    /// to its contents.
501    pub fn leak(mut this: Self) -> &'a mut T {
502        let res = unsafe { this.0.as_mut() };
503        core::mem::forget(this);
504        res
505    }
506}
507
508impl<'a> ArenaBox<'a, [MaybeUninit<u8>]> {
509    /// Transforms the [`ArenaBox`] into an `ArenaBox<T>`.
510    ///
511    /// # Safety
512    ///
513    /// The caller is responsible for ensuring that the contents of this
514    /// [`ArenaBox`] originated from a source with a properly allocated `T` with correct
515    /// alignment
516    pub unsafe fn cast_unchecked<T>(this: Self) -> ArenaBox<'a, T> {
517        let ptr = this.0.cast();
518        // Ensure we don't drop the original `ArenaBox`.
519        core::mem::forget(this);
520        // SAFETY: caller promises this is the correct type
521        unsafe { ArenaBox::new(ptr) }
522    }
523}
524
525impl<'a, T: ?Sized> Drop for ArenaBox<'a, T> {
526    fn drop(&mut self) {
527        // SAFETY: Since this value is allocated in the arena, and the arena
528        // will not drop the value, and ArenaBox can't be cloned, this ArenaBox
529        // owns the value and can drop it.
530        unsafe { core::ptr::drop_in_place(self.0.as_ptr()) }
531    }
532}
533
534impl<T: ?Sized> Deref for ArenaBox<'_, T> {
535    type Target = T;
536    fn deref(&self) -> &Self::Target {
537        // SAFETY: As these methods are the only way to get a reference to the
538        // contents of the ArenaBox, rust will enforce the aliasing rules
539        // of the contents of the inner `NonZero` object.
540        unsafe { self.0.as_ref() }
541    }
542}
543
544impl<T: ?Sized> DerefMut for ArenaBox<'_, T> {
545    fn deref_mut(&mut self) -> &mut Self::Target {
546        // SAFETY: As these methods are the only way to get a reference to the
547        // contents of the ArenaBox, rust will enforce the aliasing rules
548        // of the contents of the inner `NonZero` object.
549        unsafe { self.0.as_mut() }
550    }
551}
552
553impl<'a, T: 'a> IntoIterator for ArenaBox<'a, [T]> {
554    type IntoIter = IntoIter<T, PhantomData<&'a Arena>>;
555    type Item = T;
556
557    fn into_iter(self) -> Self::IntoIter {
558        let len = self.len();
559        let ptr = self.0.cast();
560        // SAFETY: we will never dereference `end`
561        let end = unsafe { ptr.add(len) };
562        // the IntoIter now owns the data, so we don't want to drop them here.
563        core::mem::forget(self);
564        IntoIter { ptr, end, _arena: PhantomData }
565    }
566}
567
568/// The implementation for an [`IntoIterator`] of an [`ArenaBox`] of a slice that manages the
569/// memory behind it and ensures that it's cleaned up.
570pub struct IntoIter<T, A> {
571    ptr: NonNull<T>,
572    end: NonNull<T>,
573    _arena: A,
574}
575
576impl<T, A> Iterator for IntoIter<T, A> {
577    type Item = T;
578
579    fn next(&mut self) -> Option<Self::Item> {
580        if self.ptr == self.end {
581            return None;
582        }
583        // SAFETY: all items from `ptr` to `end-1` are valid until moved out.
584        unsafe {
585            let res = self.ptr.read();
586            self.ptr = self.ptr.add(1);
587            Some(res)
588        }
589    }
590
591    fn size_hint(&self) -> (usize, Option<usize>) {
592        let len = self.len();
593        (len, Some(self.len()))
594    }
595}
596
597impl<T, A> ExactSizeIterator for IntoIter<T, A> {
598    fn len(&self) -> usize {
599        // SAFETY: end is always >= ptr
600        unsafe { self.end.offset_from(self.ptr) as usize }
601    }
602}
603
604impl<T, A> Drop for IntoIter<T, A> {
605    fn drop(&mut self) {
606        // go through and read all remaining items to drop them
607        while self.ptr != self.end {
608            // SAFETY: all items from `ptr` to `end-1` are valid until moved out.
609            unsafe {
610                drop(self.ptr.read());
611                self.ptr = self.ptr.add(1);
612            }
613        }
614    }
615}
616
617/// An equivalent to [`ArenaBox`] that holds onto a reference to the
618/// arena to allow it to have static lifetime.
619#[derive(Debug)]
620pub struct ArenaStaticBox<T: ?Sized> {
621    data: ArenaBox<'static, T>,
622    // Safety Note: it is important that this be last in the struct so that it is
623    // guaranteed to be freed after the [`ArenaBox`].
624    arena: Arena,
625}
626
627/// SAFETY: [`ArenaStaticBox`] impls [`Send`] and [`Sync`] if `T` impls them.
628unsafe impl<T: ?Sized> Send for ArenaStaticBox<T> where T: Send {}
629unsafe impl<T: ?Sized> Sync for ArenaStaticBox<T> where T: Sync {}
630
631impl<T: ?Sized> ArenaStaticBox<T> {
632    /// Transforms the given [`ArenaBox`] into an [`ArenaStaticBox`] with an owned
633    /// reference to the given [`Arena`], allowing for it to be used in `'static`
634    /// contexts.
635    ///
636    /// # Safety
637    ///
638    /// The caller must ensure that the given [`ArenaBox`] is owned by this
639    /// arena, or it may result in use-after-free.
640    pub unsafe fn new_unchecked(arena: Arena, data: ArenaBox<'_, T>) -> ArenaStaticBox<T> {
641        // SAFETY: The `ArenaBox` will not outlive the `Arena` as it is owned
642        // by the current struct and can't be moved out.
643        let data = unsafe { ArenaBox::erase_lifetime(data) };
644        Self { data, arena }
645    }
646
647    /// Takes ownership over the arena and data backing the given
648    /// [`ArenaStaticBox`].
649    ///
650    /// This returns an [`ArenaBox`] tied to the lifetime of the `&mut Option<Arena>`
651    /// given, and places the arena in that space.
652    pub fn unwrap(this: Self, arena: &mut Option<Arena>) -> ArenaBox<'_, T> {
653        let ArenaStaticBox { data, arena: inner_arena } = this;
654        arena.replace(inner_arena);
655        data
656    }
657
658    /// Takes ownership of the arena and data backing the given
659    /// [`ArenaStaticBox`] as raw pointers.
660    ///
661    /// Note that while this is safe, care must be taken to ensure that
662    /// the raw pointer to the data is not accessed after the arena pointer has
663    /// been released.
664    pub fn into_raw(this: Self) -> (NonNull<fdf_arena_t>, NonNull<T>) {
665        let res = (this.arena.0, this.data.0);
666        // make sure that drop handlers aren't called for the arena
667        // or box
668        core::mem::forget(this);
669        res
670    }
671}
672
673impl<T: 'static> IntoIterator for ArenaStaticBox<[T]> {
674    type IntoIter = IntoIter<T, Arena>;
675    type Item = T;
676
677    fn into_iter(self) -> Self::IntoIter {
678        let len = self.len();
679        let ptr = self.data.0.cast();
680        // SAFETY: we will never dereference `end`
681        let end = unsafe { ptr.add(len) };
682        IntoIter { ptr, end, _arena: self.arena }
683    }
684}
685
686impl<T: ?Sized> Deref for ArenaStaticBox<T> {
687    type Target = T;
688    fn deref(&self) -> &Self::Target {
689        ArenaBox::deref(&self.data)
690    }
691}
692
693impl<T: ?Sized> DerefMut for ArenaStaticBox<T> {
694    fn deref_mut(&mut self) -> &mut Self::Target {
695        ArenaBox::deref_mut(&mut self.data)
696    }
697}
698
699/// An equivalent to [`ArenaBox`] that holds onto a reference to the
700/// arena to allow it to have static lifetime, and implements [`Clone`]
701/// allowing it to be shared. Since it's shared, you can't get a mutable
702/// reference to it back without using [`Self::try_unwrap`] to get the
703/// inner [`ArenaStaticBox`].
704#[derive(Clone, Debug)]
705pub struct ArenaRc<T: ?Sized>(Arc<ArenaStaticBox<T>>);
706
707/// A weak reference to an [`ArenaRc`].
708#[derive(Clone, Debug)]
709pub struct ArenaWeak<T: ?Sized>(Weak<ArenaStaticBox<T>>);
710
711impl<T: ?Sized> ArenaRc<T> {
712    /// Transforms the given [`ArenaBox`] into an [`ArenaRc`] with an owned
713    /// reference to the given [`Arena`], allowing for it to be used in `'static`
714    /// contexts.
715    ///
716    /// # Safety
717    ///
718    /// The caller must ensure that the given [`ArenaBox`] is owned by this
719    /// arena, or it may result in use-after-free.
720    pub unsafe fn new_unchecked(arena: Arena, data: ArenaBox<'_, T>) -> ArenaRc<T> {
721        // SAFETY: The `ArenaBox` will not outlive the `Arena` as it is owned
722        // by the current struct and can't be moved out.
723        let data = unsafe { ArenaBox::erase_lifetime(data) };
724        Self(Arc::new(ArenaStaticBox { arena, data }))
725    }
726
727    /// Downgrades the given [`ArenaRc`] into an [`ArenaWeak`].
728    pub fn downgrade(this: &Self) -> ArenaWeak<T> {
729        ArenaWeak(Arc::downgrade(&this.0))
730    }
731
732    /// Attempts to take ownership over the arena and data backing the given
733    /// [`ArenaRc`] if there is only one strong reference held to it.
734    ///
735    /// If there is only one strong reference, this returns an [`ArenaBox`]
736    /// tied to the lifetime of the `&mut Option<Arena>` given, and places the
737    /// arena in that space.
738    pub fn try_unwrap(this: Self) -> Result<ArenaStaticBox<T>, Self> {
739        Arc::try_unwrap(this.0).map_err(|storage| Self(storage))
740    }
741}
742
743impl<T: ?Sized> From<ArenaStaticBox<T>> for ArenaRc<T> {
744    fn from(value: ArenaStaticBox<T>) -> Self {
745        Self(Arc::new(value))
746    }
747}
748
749impl<T: ?Sized> Deref for ArenaRc<T> {
750    type Target = T;
751    fn deref(&self) -> &Self::Target {
752        ArenaBox::deref(&self.0.data)
753    }
754}
755
756impl<T: ?Sized> ArenaWeak<T> {
757    /// Converts this [`ArenaWeak`] into a strong reference [`ArenaRc`] if there are still any
758    /// outstanding strong references to it.
759    pub fn upgrade(&self) -> Option<ArenaRc<T>> {
760        self.0.upgrade().map(ArenaRc)
761    }
762}
763
764/// Helper for allocating storage on the global heap appropriate for storing a
765/// copy of `val`. This returns a pointer of a different type than the type
766/// being referenced so that it can be used to allocate storage for unsized
767/// slices of `T`. `ActualType` should either be the same as `T` or an array of
768/// `T`.
769///
770/// This also correctly handles a zero sized type by returning
771/// [`NonZero::dangling`].
772///
773/// # Safety
774///
775/// In addition to all the safety requirements of [`std::alloc::alloc`], the
776/// caller must ensure that `T` is the type of elements of `ActualType`.
777unsafe fn global_alloc<T>(layout: Layout) -> NonNull<T> {
778    if layout.size() == 0 {
779        NonNull::dangling()
780    } else {
781        let ptr = unsafe { std::alloc::alloc(layout) };
782        if ptr.is_null() {
783            std::alloc::handle_alloc_error(layout);
784        }
785        unsafe { NonNull::new_unchecked(ptr as *mut T) }
786    }
787}
788
789#[cfg(test)]
790mod tests {
791    use std::sync::mpsc;
792
793    use super::*;
794    use crate::test_utils::*;
795
796    #[test]
797    fn arena_allocations() {
798        let arena = Arena::new();
799        let _val = arena.insert(());
800        let val = arena.insert(1);
801        assert_eq!(*val, 1);
802        let val = arena.insert(2);
803        assert_eq!(*val, 2);
804        let val = arena.insert_boxed_slice(Box::new([1, 2, 3, 4]));
805        assert_eq!(&*val, &[1, 2, 3, 4]);
806        let val: ArenaBox<'_, [()]> = arena.insert_boxed_slice(Box::new([]));
807        assert_eq!(&*val, &[]);
808        let val = arena.insert_slice(&[5, 6, 7, 8]);
809        assert_eq!(&*val, &[5, 6, 7, 8]);
810        let val: ArenaBox<'_, [()]> = arena.insert_slice(&[]);
811        assert_eq!(&*val, &[]);
812        let val: ArenaBox<'_, [u8]> = arena.insert_default_slice(10);
813        assert_eq!(&*val, &[0; 10]);
814    }
815
816    #[test]
817    #[allow(clippy::unit_cmp)]
818    fn arena_take() {
819        let arena = Arena::new();
820        let val = arena.insert(());
821        assert_eq!(ArenaBox::take(val), ());
822        let val = arena.insert(1);
823        assert_eq!(ArenaBox::take(val), 1);
824    }
825
826    #[test]
827    #[allow(clippy::unit_cmp)]
828    fn arena_take_boxed() {
829        let arena = Arena::new();
830        let val = arena.insert(());
831        assert_eq!(*ArenaBox::take_boxed(val), ());
832        let val = arena.insert(1);
833        assert_eq!(*ArenaBox::take_boxed(val), 1);
834    }
835
836    #[test]
837    fn arena_take_boxed_slice() {
838        let arena = Arena::new();
839        let val: ArenaBox<'_, [()]> = arena.insert_slice(&[]);
840        assert_eq!(&*ArenaBox::take_boxed_slice(val), &[]);
841        let val = arena.insert_slice(&[1, 2, 3, 4]);
842        assert_eq!(&*ArenaBox::take_boxed_slice(val), &[1, 2, 3, 4]);
843    }
844
845    #[test]
846    fn arena_drop() {
847        let (tx, rx) = mpsc::channel();
848        let arena = Arena::new();
849        let val = arena.insert(DropSender::new(1, tx.clone()));
850        drop(val);
851        assert_eq!(rx.try_recv().unwrap(), 1);
852
853        let val = arena.insert_boxed_slice(Box::new([DropSender::new(2, tx.clone())]));
854        drop(val);
855        assert_eq!(rx.try_recv().unwrap(), 2);
856
857        let val = arena.insert_slice(&[DropSender::new(3, tx.clone())]);
858        drop(val);
859        assert_eq!(rx.try_recv().unwrap(), 3);
860
861        rx.try_recv().expect_err("no more drops");
862    }
863
864    #[test]
865    fn arena_take_drop() {
866        let (tx, rx) = mpsc::channel();
867        let arena = Arena::new();
868
869        let val = arena.insert(DropSender::new(1, tx.clone()));
870        let inner = ArenaBox::take(val);
871        rx.try_recv().expect_err("shouldn't have dropped when taken");
872        drop(inner);
873        assert_eq!(rx.try_recv().unwrap(), 1);
874
875        let val = arena.insert_slice(&[DropSender::new(2, tx.clone())]);
876        let inner = ArenaBox::take_boxed_slice(val);
877        rx.try_recv().expect_err("shouldn't have dropped when taken");
878        drop(inner);
879        assert_eq!(rx.try_recv().unwrap(), 2);
880
881        rx.try_recv().expect_err("no more drops");
882    }
883
884    #[test]
885    fn arena_contains() {
886        let arena1 = Arena::new();
887        let arena2 = Arena::new();
888
889        let val1 = arena1.insert(1);
890        let val2 = arena2.insert(2);
891
892        assert!(arena1.contains(&val1));
893        assert!(arena2.contains(&val2));
894        assert!(!arena1.contains(&val2));
895        assert!(!arena2.contains(&val1));
896    }
897
898    #[test]
899    fn arena_assume() {
900        let arena = Arena::new();
901
902        let val = arena.insert(1);
903        let val_leaked = unsafe { ArenaBox::into_ptr(val) };
904        let val = unsafe { arena.assume(val_leaked) };
905
906        assert!(arena.contains(&val));
907    }
908
909    #[test]
910    #[should_panic]
911    fn arena_bad_assume() {
912        let arena = Arena::new();
913
914        unsafe { arena.assume(NonNull::<()>::dangling()) };
915    }
916
917    #[test]
918    #[should_panic]
919    fn bad_static_box_ownership() {
920        let arena1 = Arena::new();
921        let arena2 = Arena::new();
922
923        let val = arena1.insert(1);
924        arena2.make_static(val);
925    }
926
927    #[test]
928    #[should_panic]
929    fn bad_rc_ownership() {
930        let arena1 = Arena::new();
931        let arena2 = Arena::new();
932
933        let val = arena1.insert(1);
934        arena2.make_rc(val);
935    }
936
937    #[test]
938    fn box_lifecycle() {
939        let arena = Arena::new();
940
941        // create the initial value and modify it
942        let mut val = arena.insert(1);
943        *val = 2;
944        assert_eq!(*val, 2);
945
946        // make it a static box and modify it
947        let mut val = arena.make_static(val);
948        *val = 3;
949        assert_eq!(*val, 3);
950
951        // make it into a refcounted shared pointer and check the value is still the
952        // same
953        let val = ArenaRc::from(val);
954        assert_eq!(*val, 3);
955
956        // clone the refcount and verify that we can't unwrap it back to a static box.
957        let val_copied = val.clone();
958        assert_eq!(*val_copied, 3);
959        let val = ArenaRc::try_unwrap(val).expect_err("Double strong count should fail to unwrap");
960        assert_eq!(*val, 3);
961        drop(val_copied);
962
963        // now that the cloned rc is gone, unwrap it back to a static box and modify it
964        let mut val =
965            ArenaRc::try_unwrap(val).expect("strong count should be one so this should unwrap now");
966        *val = 4;
967        assert_eq!(*val, 4);
968
969        // bring it back to a normal arena box and modify it
970        let mut shared_arena = None;
971        let mut val = ArenaStaticBox::unwrap(val, &mut shared_arena);
972        *val = 5;
973        assert_eq!(*val, 5);
974
975        // make it back into an rc but directly rather than from a static box
976        let val = arena.make_rc(val);
977        assert_eq!(*val, 5);
978    }
979
980    #[test]
981    fn static_raw_roundtrip() {
982        let arena = Arena::new();
983        let val = arena.make_static(arena.insert(1));
984
985        // turn it into raw pointers and modify it
986        let (arena_ptr, mut data_ptr) = ArenaStaticBox::into_raw(val);
987        *unsafe { data_ptr.as_mut() } = 2;
988        assert_eq!(*unsafe { data_ptr.as_ref() }, 2);
989
990        // reconstitute it back to an `ArenaBox` and then transform it
991        let arena = unsafe { Arena::from_raw(arena_ptr) };
992        let val = unsafe { arena.assume(data_ptr) };
993
994        assert_eq!(*val, 2);
995    }
996
997    #[test]
998    fn arena_into_and_from_iter() {
999        let arena = Arena::new();
1000
1001        // empty slice to vec
1002        let val: ArenaBox<'_, [()]> = arena.insert_slice(&[]);
1003        let vec_val = Vec::from_iter(val);
1004        assert!(vec_val.is_empty());
1005
1006        // filled slice to vec
1007        let val = arena.insert_slice(&[1, 2, 3, 4]);
1008        let vec_val = Vec::from_iter(val);
1009        assert_eq!(&[1, 2, 3, 4], &*vec_val);
1010
1011        // filled static slice to vec
1012        let val = arena.make_static(arena.insert_slice(&[1, 2, 3, 4]));
1013        let vec_val = Vec::from_iter(val);
1014        assert_eq!(&[1, 2, 3, 4], &*vec_val);
1015
1016        // empty vec to arena box
1017        let val: Vec<()> = vec![];
1018        let arena_val = arena.insert_from_iter(val.clone());
1019        assert_eq!(val, &*arena_val);
1020
1021        // filled vec to arena box
1022        let val = vec![1, 2, 3, 4];
1023        let arena_val = arena.insert_from_iter(val);
1024        assert_eq!(&[1, 2, 3, 4], &*arena_val);
1025    }
1026
1027    #[test]
1028    fn arena_try_from_iter() {
1029        let arena = Arena::new();
1030
1031        let val: Vec<Result<_, ()>> = vec![Ok(1), Ok(2), Ok(3), Ok(4)];
1032        let arena_val = arena.try_insert_from_iter(val).unwrap();
1033        assert_eq!(&[1, 2, 3, 4], &*arena_val);
1034
1035        let (tx, rx) = mpsc::channel();
1036        let val = vec![Ok(DropSender::new(0, tx.clone())), Err(-1), Ok(DropSender::new(1, tx))];
1037        let Err(-1) = arena.try_insert_from_iter(val) else {
1038            panic!("early exit from try_insert_from_iter")
1039        };
1040        let Ok(0) = rx.try_recv() else {
1041            panic!("expected drop of leading ok value to have happened")
1042        };
1043        let Ok(1) = rx.try_recv() else {
1044            panic!("expected drop of trailing ok value to have happened")
1045        };
1046    }
1047}