fdf/
arena.rs

1// Copyright 2024 The Fuchsia Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5//! Safe bindings for the driver runtime arena stable ABI
6
7use core::alloc::Layout;
8use core::cmp::max;
9use core::marker::PhantomData;
10use core::mem::MaybeUninit;
11use core::ops::{Deref, DerefMut};
12use core::ptr::{null_mut, slice_from_raw_parts_mut, NonNull};
13use std::sync::{Arc, Weak};
14
15use zx::Status;
16
17use fdf_sys::*;
18
19pub use fdf_sys::fdf_arena_t;
20
21/// Implements a memory arena allocator to be used with the Fuchsia Driver
22/// Runtime when sending and receiving from channels.
23#[derive(Debug)]
24pub struct Arena(pub(crate) NonNull<fdf_arena_t>);
25
26// SAFETY: The api for `fdf_arena_t` is thread safe
27unsafe impl Send for Arena {}
28unsafe impl Sync for Arena {}
29
30impl Arena {
31    /// Allocates a new arena for use with the driver runtime
32    pub fn new() -> Self {
33        let mut arena = null_mut();
34        // SAFETY: the address we pass to fdf_arena_create is allocated on
35        // the stack and appropriately sized.
36        // This call cannot fail as the only reason it would fail is due to invalid
37        // option flags, and 0 is a valid option.
38        Status::ok(unsafe { fdf_arena_create(0, 0, &mut arena) }).expect("Failed to create arena");
39        // SAFETY: if fdf_arena_create returned ZX_OK, it will have placed
40        // a non-null pointer.
41        Arena(unsafe { NonNull::new_unchecked(arena) })
42    }
43
44    /// Creates an arena from a raw pointer to the arena object.
45    ///
46    /// # Safety
47    ///
48    /// The caller is responsible for ensuring that only one [`Arena`]
49    /// is constructed from this pointer, and that is has not previously
50    /// been freed.
51    pub unsafe fn from_raw(ptr: NonNull<fdf_arena_t>) -> Self {
52        Self(ptr)
53    }
54
55    /// Returns true if the allocation pointed to was made by this arena
56    pub fn contains_ptr<T: ?Sized>(&self, ptr: &T) -> bool {
57        // SAFETY: self.0 is valid as constructed, and `fdf_arena_contains` does not access data at the
58        // pointer but just compares its pointer value to the buffers in the arena.
59        unsafe {
60            fdf_arena_contains(self.0.as_ptr(), ptr as *const _ as *const _, size_of_val(ptr))
61        }
62    }
63
64    /// Returns true if the allocation was made by this arena
65    pub fn contains<T: ?Sized>(&self, item: &ArenaBox<'_, T>) -> bool {
66        self.contains_ptr(ArenaBox::deref(item))
67    }
68
69    /// Allocates the appropriate amount of memory for the given layout and
70    /// returns a pointer to `T` at the start of that memory.
71    ///
72    /// # Safety
73    ///
74    /// The caller is responsible for making sure that the `Layout` is laid out
75    /// properly for one or more `T` to be stored at it. This may be a single
76    /// object or a slice of them, but it must be a multiple of it.
77    unsafe fn alloc_bytes_for<T>(&self, layout: Layout) -> NonNull<T> {
78        // We make sure we allocate at least one byte so we return a unique
79        // pointer that is within the arena, which will ensure that subsequent
80        // verifications that the memory location is in the arena will pass.
81        let bytes = max(layout.size(), 1);
82        // SAFETY: Allocating a block of memory in the arena large enough to store
83        // the object we're allocating.
84        let storage =
85            unsafe { NonNull::new_unchecked(fdf_arena_allocate(self.0.as_ptr(), bytes) as *mut T) };
86        // TODO(b/352119228): when the arena allocator allows specifying alignment, use that
87        // instead of asserting the alignment after the fact.
88        assert_eq!(
89            storage.align_offset(layout.align()),
90            0,
91            "Arena returned an improperly aligned pointer: {}",
92            core::any::type_name::<T>(),
93        );
94        storage
95    }
96
97    /// Inserts a [`MaybeUninit`] object and returns the [`ArenaBox`] of it.
98    pub fn insert_uninit<T: Sized>(&self) -> ArenaBox<'_, MaybeUninit<T>> {
99        let layout = Layout::new::<MaybeUninit<T>>();
100        // SAFETY: The layout we're passing to `alloc_bytes_for` is for zero or
101        // more objects of type `T`, which is the pointer type we get back from
102        // it.
103        unsafe { ArenaBox::new(self.alloc_bytes_for(layout)) }
104    }
105
106    /// Inserts a slice of [`MaybeUninit`] objects of len `len`
107    ///
108    /// # Panics
109    ///
110    /// Panics if an array `[T; n]` is too large to be allocated.
111    pub fn insert_uninit_slice<T: Sized>(&self, len: usize) -> ArenaBox<'_, [MaybeUninit<T>]> {
112        let layout = Layout::array::<MaybeUninit<T>>(len).expect("allocation too large");
113        // SAFETY: The layout we're passing to `alloc_bytes_for` is for zero or
114        // more objects of type `T`, which is the pointer type we get back from
115        // it.
116        let storage = unsafe { self.alloc_bytes_for(layout) };
117        // At this point we have a `*mut T` but we need to return a `[T]`,
118        // which is unsized. We need to use [`slice_from_raw_parts_mut`]
119        // to construct the unsized pointer from the data and its length.
120        let ptr = slice_from_raw_parts_mut(storage.as_ptr(), len);
121        // SAFETY: alloc_bytes_for is expected to return a valid pointer.
122        unsafe { ArenaBox::new(NonNull::new_unchecked(ptr)) }
123    }
124
125    /// Moves `obj` of type `T` into the arena and returns an [`ArenaBox`]
126    /// containing the moved value.
127    pub fn insert<T: Sized>(&self, obj: T) -> ArenaBox<'_, T> {
128        let mut uninit = self.insert_uninit();
129        uninit.write(obj);
130        // SAFETY: we wrote `obj` to the object
131        unsafe { uninit.assume_init() }
132    }
133
134    /// Moves a [`Box`]ed slice into the arena and returns an [`ArenaBox`]
135    /// containing the moved value.
136    pub fn insert_boxed_slice<T: Sized>(&self, slice: Box<[T]>) -> ArenaBox<'_, [T]> {
137        let layout = Layout::for_value(&*slice);
138        let len = slice.len();
139        // SAFETY: The layout we give `alloc_bytes_for` is for storing 0 or more
140        // objects of type `T`, which is the pointer type we get from it.
141        let storage = unsafe { self.alloc_bytes_for(layout) };
142        let original_storage = Box::into_raw(slice);
143        // SAFETY: Moving the object into the arena memory we just allocated by
144        // first copying the bytes over and then deallocating the raw memory
145        // we took from the box.
146        let slice_box = unsafe {
147            core::ptr::copy_nonoverlapping(original_storage as *mut T, storage.as_ptr(), len);
148            let slice_ptr = slice_from_raw_parts_mut(storage.as_ptr(), len);
149            ArenaBox::new(NonNull::new_unchecked(slice_ptr))
150        };
151        if layout.size() != 0 {
152            // SAFETY: Since we have decomposed the Box we have to deallocate it,
153            // but only if it's not dangling.
154            unsafe {
155                std::alloc::dealloc(original_storage as *mut u8, layout);
156            }
157        }
158        slice_box
159    }
160
161    /// Copies the slice into the arena and returns an [`ArenaBox`] containing
162    /// the copied values.
163    pub fn insert_slice<T: Sized + Clone>(&self, slice: &[T]) -> ArenaBox<'_, [T]> {
164        let len = slice.len();
165        let mut uninit_slice = self.insert_uninit_slice(len);
166        for (from, to) in slice.iter().zip(uninit_slice.iter_mut()) {
167            to.write(from.clone());
168        }
169
170        // SAFETY: we wrote `from.clone()` to each item of the slice.
171        unsafe { uninit_slice.assume_init_slice() }
172    }
173
174    /// Inserts a slice of [`Default`]-initialized objects of type `T` to the
175    /// arena and returns an [`ArenaBox`] of it.
176    ///
177    /// # Panics
178    ///
179    /// Panics if an array `[T; n]` is too large to be allocated.
180    pub fn insert_default_slice<T: Sized + Default>(&self, len: usize) -> ArenaBox<'_, [T]> {
181        let mut uninit_slice = self.insert_uninit_slice(len);
182        for i in uninit_slice.iter_mut() {
183            i.write(T::default());
184        }
185        // SAFETY: we wrote `T::default()` to each item of the slice.
186        unsafe { uninit_slice.assume_init_slice() }
187    }
188
189    /// Returns an ArenaBox for the pointed to object, assuming that it is part
190    /// of this arena.
191    ///
192    /// # Safety
193    ///
194    /// This does not verify that the pointer came from this arena,
195    /// so the caller is responsible for verifying that.
196    pub unsafe fn assume_unchecked<T: ?Sized>(&self, ptr: NonNull<T>) -> ArenaBox<'_, T> {
197        // SAFETY: Caller is responsible for ensuring this per safety doc section.
198        unsafe { ArenaBox::new(ptr) }
199    }
200
201    /// Returns an [`ArenaBox`] for the pointed to object, verifying that it
202    /// is a part of this arena in the process.
203    ///
204    /// # Panics
205    ///
206    /// This function panics if the given pointer is not in this [`Arena`].
207    ///
208    /// # Safety
209    ///
210    /// The caller is responsible for ensuring that only one [`ArenaBox`] is constructed
211    /// for a given pointer, and that the pointer originated from an `ArenaBox<T>` or
212    /// a direct allocation with the arena through [`fdf_arena_allocate`], and is:
213    /// - initialized to a value of `T`.
214    /// - properly aligned for `T`.
215    /// - pointing to the beginning of the object, and not to a subfield of another
216    /// [`ArenaBox`]ed object.
217    pub unsafe fn assume<T: ?Sized>(&self, ptr: NonNull<T>) -> ArenaBox<'_, T> {
218        // SAFETY: caller promises the pointer is initialized and valid
219        assert!(
220            self.contains_ptr(unsafe { ptr.as_ref() }),
221            "Arena can't assume ownership over a pointer not allocated from within it"
222        );
223        // SAFETY: we will verify the provenance below
224        let data = unsafe { self.assume_unchecked(ptr) };
225        data
226    }
227
228    /// Moves the given [`ArenaBox`] into an [`ArenaRc`] with an owned
229    /// reference to this [`Arena`], allowing for it to be used in `'static`
230    /// contexts.
231    ///
232    /// # Panics
233    ///
234    /// Panics if the given [`ArenaBox`] is not allocated from this arena.
235    pub fn make_rc<T: ?Sized>(&self, data: ArenaBox<'_, T>) -> ArenaRc<T> {
236        assert!(self.contains(&data), "Arena doesn't own the ArenaBox");
237        // SAFETY: we just checked the box is owned by this arena.
238        unsafe { ArenaRc::new_unchecked(self.clone(), data) }
239    }
240
241    /// Moves the given [`ArenaBox`] into an [`ArenaStaticBox`] with an owned
242    /// reference to this [`Arena`], allowing for it to be used in `'static`
243    /// contexts.
244    ///
245    /// # Panics
246    ///
247    /// Panics if the given [`ArenaBox`] is not allocated from this arena.
248    pub fn make_static<T: ?Sized>(&self, data: ArenaBox<'_, T>) -> ArenaStaticBox<T> {
249        assert!(self.contains(&data), "Arena doesn't own the ArenaBox");
250        // SAFETY: we just checked the box is owned by this arena.
251        unsafe { ArenaStaticBox::new_unchecked(self.clone(), data) }
252    }
253
254    /// Creates an [`ArenaBox`]ed slice from an iterator implementing [`ExactSizeIterator`]. Note
255    /// that if [`ExactSizeIterator::len`] returns an incorrect value, the returned [`ArenaBox`]
256    /// will be no more than the length returned, and may be less.
257    pub fn insert_from_iter<I: IntoIterator>(&self, source: I) -> ArenaBox<'_, [I::Item]>
258    where
259        I::IntoIter: ExactSizeIterator,
260    {
261        let iter = source.into_iter();
262        let len = iter.len();
263        let mut actual_len = 0;
264        let mut storage = self.insert_uninit_slice(len);
265        for (output, input) in storage.iter_mut().zip(iter) {
266            output.write(input);
267            actual_len += 1;
268        }
269        // SAFETY: we wrote to `actual_len` elements of the storage
270        unsafe { ArenaBox::assume_init_slice_len(storage, actual_len) }
271    }
272
273    /// Tries to create an [`ArenaBox`]ed slice from an iterator implementing [`ExactSizeIterator`].
274    /// Note that if [`ExactSizeIterator::len`] returns an incorrect value, the returned
275    /// [`ArenaBox`] will be no more than the length returned, and may be less.
276    ///
277    /// If any item returned by the iterator returns an Err() result, results so far are discarded
278    pub fn try_insert_from_iter<I, T, E>(&self, source: I) -> Result<ArenaBox<'_, [T]>, E>
279    where
280        I: IntoIterator<Item = Result<T, E>>,
281        I::IntoIter: ExactSizeIterator,
282    {
283        let iter = source.into_iter();
284        let len = iter.len();
285        let mut actual_len = 0;
286        let mut storage = self.insert_uninit_slice(len);
287        for (output, input) in storage.iter_mut().zip(iter) {
288            match input {
289                Ok(input) => {
290                    output.write(input);
291                    actual_len += 1;
292                }
293                Err(e) => {
294                    // `assume_init` the slice so far so that drop handlers are properly called on the
295                    // items already moved. This will be dropped immediately.
296                    // SAFETY: `actual_len` will be the length of moved values into the slice so far.
297                    unsafe { ArenaBox::assume_init_slice_len(storage, actual_len) };
298                    return Err(e);
299                }
300            }
301        }
302        // SAFETY: we wrote to `actual_len` elements of the storage
303        Ok(unsafe { ArenaBox::assume_init_slice_len(storage, actual_len) })
304    }
305
306    /// Transforms this Arena into an fdf_arena_t without dropping the reference.
307    ///
308    /// If the caller drops the returned fdf_arena_t, the memory allocated by the
309    /// arena will never be freed.
310    pub fn into_raw(self) -> NonNull<fdf_arena_t> {
311        let res = self.0;
312        core::mem::forget(self);
313        return res;
314    }
315}
316
317impl Clone for Arena {
318    fn clone(&self) -> Self {
319        // SAFETY: We own this arena reference and so we can add ref it
320        unsafe { fdf_arena_add_ref(self.0.as_ptr()) }
321        Self(self.0)
322    }
323}
324
325impl Drop for Arena {
326    fn drop(&mut self) {
327        // SAFETY: We own this arena reference and so we can drop it.
328        unsafe { fdf_arena_drop_ref(self.0.as_ptr()) }
329    }
330}
331
332/// Holds a reference to data of type `T` in an [`Arena`] with lifetime `'a`,
333/// and ensures that the object is properly dropped before the [`Arena`] goes
334/// out of scope.
335#[derive(Debug)]
336pub struct ArenaBox<'a, T: ?Sized>(NonNull<T>, PhantomData<&'a Arena>);
337
338/// SAFETY: [`ArenaBox`] impls [`Send`] and [`Sync`] if `T` impls them.
339unsafe impl<'a, T: ?Sized> Send for ArenaBox<'a, T> where T: Send {}
340unsafe impl<'a, T: ?Sized> Sync for ArenaBox<'a, T> where T: Sync {}
341
342impl<'a, T> ArenaBox<'a, T> {
343    /// Moves the inner value of this ArenaBox out to owned storage.
344    pub fn take(value: Self) -> T {
345        // SAFETY: `Self::into_ptr` will forget `value` and prevent
346        // calling its `drop`.
347        unsafe { core::ptr::read(Self::into_ptr(value).as_ptr()) }
348    }
349
350    /// Moves the inner value of this ArenaBox out into a [`Box`] using the
351    /// global allocator. Using this instead of `Box::new(ArenaBox::take(v))`
352    /// helps to avoid any additional copies of the storage on its way to the
353    /// box.
354    ///
355    /// Note: if you want to take a slice, you will need to use
356    /// [`Self::take_boxed_slice`].
357    pub fn take_boxed(value: Self) -> Box<T> {
358        // SAFETY: we are allocating space for `T` with the layout of `T`, so
359        // this is simple.
360        let storage = unsafe { global_alloc(Layout::for_value(&*value)) };
361        // SAFETY: storage is sufficiently large to store the value in `value`
362        // and we used Layout to make sure that Box will be happy with its
363        // layout.
364        unsafe {
365            core::ptr::write(storage.as_ptr(), Self::take(value));
366            Box::from_raw(storage.as_ptr())
367        }
368    }
369}
370
371impl<'a, T> ArenaBox<'a, MaybeUninit<T>> {
372    /// Assumes the contents of this [`MaybeUninit`] box are initialized now.
373    ///
374    /// # Safety
375    ///
376    /// The caller is responsible for ensuring that the value is initialized
377    /// properly. See [`MaybeUninit::assume_init`] for more details on the
378    /// safety requirements of this.
379    pub unsafe fn assume_init(self) -> ArenaBox<'a, T> {
380        // SAFETY: This pointer came from an `ArenaBox` we just leaked,
381        // and casting `*MaybeUninit<T>` to `*T` is safe.
382        unsafe { ArenaBox::new(ArenaBox::into_ptr(self).cast()) }
383    }
384}
385
386impl<'a, T> ArenaBox<'a, [MaybeUninit<T>]> {
387    /// Assumes the contents of this box of `[MaybeUninit<T>]` are initialized now.
388    ///
389    /// # Safety
390    ///
391    /// The caller is responsible for ensuring that the value is initialized
392    /// properly. See [`MaybeUninit::assume_init`] for more details on the
393    /// safety requirements of this.
394    pub unsafe fn assume_init_slice(self) -> ArenaBox<'a, [T]> {
395        let len = self.len();
396        // SAFETY: We are about to reconstitute this pointer back into
397        // a new `ArenaBox` with the same lifetime, and casting
398        // `MaybeUninit<T>` to `T` is safe.
399        let data: NonNull<T> = unsafe { ArenaBox::into_ptr(self) }.cast();
400        let slice_ptr = NonNull::slice_from_raw_parts(data, len);
401
402        // SAFETY: We just got this pointer from an `ArenaBox` we decomposed.
403        unsafe { ArenaBox::new(slice_ptr) }
404    }
405
406    /// Assumes the contents of this box of `[MaybeUninit<T>]` are initialized now,
407    /// up to `len` elements and ignores the rest.
408    ///
409    /// # Safety
410    ///
411    /// The caller is responsible for ensuring that the value is initialized
412    /// properly. See [`MaybeUninit::assume_init`] for more details on the
413    /// safety requirements of this.
414    pub unsafe fn assume_init_slice_len(self, len: usize) -> ArenaBox<'a, [T]> {
415        // only use up to `len` elements of the slice.
416        let len = self.len().min(len);
417        // SAFETY: We are about to reconstitute this pointer back into
418        // a new `ArenaBox` with the same lifetime, and casting
419        // `MaybeUninit<T>` to `T` is safe.
420        let data: NonNull<T> = unsafe { ArenaBox::into_ptr(self) }.cast();
421        let slice_ptr = NonNull::slice_from_raw_parts(data, len);
422
423        // SAFETY: We just got this pointer from an `ArenaBox` we decomposed.
424        unsafe { ArenaBox::new(slice_ptr) }
425    }
426}
427
428impl<'a, T> ArenaBox<'a, [T]> {
429    /// Like [`Self::take_boxed`], this moves the inner value of this ArenaBox
430    /// out into a [`Box`] using the global allocator, and using it avoids
431    /// additional copies of the data, but this function works on slices of `T`,
432    /// which are unsized and so require special handling.
433    pub fn take_boxed_slice(value: Self) -> Box<[T]> {
434        let len = value.len();
435        // SAFETY: we are using the layout of the slice value of type `[T]` to
436        // allocate a pointer to the first element of the storage for the new
437        // slice, which is of type `T`.
438        let storage = unsafe { global_alloc(Layout::for_value(&*value)) };
439        // SAFETY: storage is sufficiently large to store the slice in `value`
440        let slice_ptr = unsafe {
441            core::ptr::copy_nonoverlapping(
442                Self::into_ptr(value).as_ptr() as *mut T,
443                storage.as_ptr(),
444                len,
445            );
446            core::ptr::slice_from_raw_parts_mut(storage.as_ptr(), len)
447        };
448        // SAFETY: we used Layout to make sure that Box will be happy with the
449        // layout of the stored value.
450        unsafe { Box::from_raw(slice_ptr) }
451    }
452}
453
454impl<'a, T: ?Sized> ArenaBox<'a, T> {
455    pub(crate) unsafe fn new(obj: NonNull<T>) -> ArenaBox<'a, T> {
456        Self(obj, PhantomData)
457    }
458
459    /// Decomposes this [`ArenaBox`] into its pointer.
460    ///
461    /// # Safety
462    ///
463    /// This is unsafe because it loses the lifetime of the [`Arena`] it
464    /// came from. The caller must make sure to not let the pointer outlive the
465    /// arena. The caller is also responsible for making sure the object is
466    /// dropped before the [`Arena`], or it may leak resources.
467    pub unsafe fn into_ptr(value: Self) -> NonNull<T> {
468        let res = value.0;
469        core::mem::forget(value);
470        res
471    }
472
473    /// Turns this [`ArenaBox`] into one with the given lifetime.
474    ///
475    /// # Safety
476    ///
477    /// This is unsafe because it loses the lifetime of the [`Arena`] it
478    /// came from. The caller must make sure to not let the
479    /// [`ArenaBox`] outlive the [`Arena`] it was created from. The caller
480    /// is also responsible for making sure the object is dropped before
481    /// the [`Arena`], or it may leak resources.
482    pub unsafe fn erase_lifetime(value: Self) -> ArenaBox<'static, T> {
483        // SAFETY: the caller promises to ensure this object does not
484        // outlive the arena.
485        unsafe { ArenaBox::new(ArenaBox::into_ptr(value)) }
486    }
487
488    /// Consumes and leaks this [`ArenaBox`], returning a mutable reference
489    /// to its contents.
490    pub fn leak(mut this: Self) -> &'a mut T {
491        let res = unsafe { this.0.as_mut() };
492        core::mem::forget(this);
493        res
494    }
495}
496
497impl<'a> ArenaBox<'a, [MaybeUninit<u8>]> {
498    /// Transforms the [`ArenaBox`] into an `ArenaBox<T>`.
499    ///
500    /// # Safety
501    ///
502    /// The caller is responsible for ensuring that the contents of this
503    /// [`ArenaBox`] originated from a source with a properly allocated `T` with correct
504    /// alignment
505    pub unsafe fn cast_unchecked<T>(this: Self) -> ArenaBox<'a, T> {
506        let ptr = this.0.cast();
507        // Ensure we don't drop the original `ArenaBox`.
508        core::mem::forget(this);
509        // SAFETY: caller promises this is the correct type
510        unsafe { ArenaBox::new(ptr) }
511    }
512}
513
514impl<'a, T: ?Sized> Drop for ArenaBox<'a, T> {
515    fn drop(&mut self) {
516        // SAFETY: Since this value is allocated in the arena, and the arena
517        // will not drop the value, and ArenaBox can't be cloned, this ArenaBox
518        // owns the value and can drop it.
519        unsafe { core::ptr::drop_in_place(self.0.as_ptr()) }
520    }
521}
522
523impl<T: ?Sized> Deref for ArenaBox<'_, T> {
524    type Target = T;
525    fn deref(&self) -> &Self::Target {
526        // SAFETY: As these methods are the only way to get a reference to the
527        // contents of the ArenaBox, rust will enforce the aliasing rules
528        // of the contents of the inner `NonZero` object.
529        unsafe { self.0.as_ref() }
530    }
531}
532
533impl<T: ?Sized> DerefMut for ArenaBox<'_, T> {
534    fn deref_mut(&mut self) -> &mut Self::Target {
535        // SAFETY: As these methods are the only way to get a reference to the
536        // contents of the ArenaBox, rust will enforce the aliasing rules
537        // of the contents of the inner `NonZero` object.
538        unsafe { self.0.as_mut() }
539    }
540}
541
542impl<'a, T: 'a> IntoIterator for ArenaBox<'a, [T]> {
543    type IntoIter = IntoIter<T, PhantomData<&'a Arena>>;
544    type Item = T;
545
546    fn into_iter(self) -> Self::IntoIter {
547        let len = self.len();
548        let ptr = self.0.cast();
549        // SAFETY: we will never dereference `end`
550        let end = unsafe { ptr.add(len) };
551        // the IntoIter now owns the data, so we don't want to drop them here.
552        core::mem::forget(self);
553        IntoIter { ptr, end, _arena: PhantomData }
554    }
555}
556
557pub struct IntoIter<T, A> {
558    ptr: NonNull<T>,
559    end: NonNull<T>,
560    _arena: A,
561}
562
563impl<T, A> Iterator for IntoIter<T, A> {
564    type Item = T;
565
566    fn next(&mut self) -> Option<Self::Item> {
567        if self.ptr == self.end {
568            return None;
569        }
570        // SAFETY: all items from `ptr` to `end-1` are valid until moved out.
571        unsafe {
572            let res = self.ptr.read();
573            self.ptr = self.ptr.add(1);
574            Some(res)
575        }
576    }
577
578    fn size_hint(&self) -> (usize, Option<usize>) {
579        let len = self.len();
580        (len, Some(self.len()))
581    }
582}
583
584impl<T, A> ExactSizeIterator for IntoIter<T, A> {
585    fn len(&self) -> usize {
586        // SAFETY: end is always >= ptr
587        unsafe { self.end.offset_from(self.ptr) as usize }
588    }
589}
590
591impl<T, A> Drop for IntoIter<T, A> {
592    fn drop(&mut self) {
593        // go through and read all remaining items to drop them
594        while self.ptr != self.end {
595            // SAFETY: all items from `ptr` to `end-1` are valid until moved out.
596            unsafe {
597                drop(self.ptr.read());
598                self.ptr = self.ptr.add(1);
599            }
600        }
601    }
602}
603
604/// An equivalent to [`ArenaBox`] that holds onto a reference to the
605/// arena to allow it to have static lifetime.
606#[derive(Debug)]
607pub struct ArenaStaticBox<T: ?Sized> {
608    data: ArenaBox<'static, T>,
609    // Safety Note: it is important that this be last in the struct so that it is
610    // guaranteed to be freed after the [`ArenaBox`].
611    arena: Arena,
612}
613
614/// SAFETY: [`ArenaStaticBox`] impls [`Send`] and [`Sync`] if `T` impls them.
615unsafe impl<T: ?Sized> Send for ArenaStaticBox<T> where T: Send {}
616unsafe impl<T: ?Sized> Sync for ArenaStaticBox<T> where T: Sync {}
617
618impl<T: ?Sized> ArenaStaticBox<T> {
619    /// Transforms the given [`ArenaBox`] into an [`ArenaStaticBox`] with an owned
620    /// reference to the given [`Arena`], allowing for it to be used in `'static`
621    /// contexts.
622    ///
623    /// # Safety
624    ///
625    /// The caller must ensure that the given [`ArenaBox`] is owned by this
626    /// arena, or it may result in use-after-free.
627    pub unsafe fn new_unchecked(arena: Arena, data: ArenaBox<'_, T>) -> ArenaStaticBox<T> {
628        // SAFETY: The `ArenaBox` will not outlive the `Arena` as it is owned
629        // by the current struct and can't be moved out.
630        let data = unsafe { ArenaBox::erase_lifetime(data) };
631        Self { data, arena }
632    }
633
634    /// Takes ownership over the arena and data backing the given
635    /// [`ArenaStaticBox`].
636    ///
637    /// This returns an [`ArenaBox`] tied to the lifetime of the `&mut Option<Arena>`
638    /// given, and places the arena in that space.
639    pub fn unwrap(this: Self, arena: &mut Option<Arena>) -> ArenaBox<'_, T> {
640        let ArenaStaticBox { data, arena: inner_arena } = this;
641        arena.replace(inner_arena);
642        data
643    }
644
645    /// Takes ownership of the arena and data backing the given
646    /// [`ArenaStaticBox`] as raw pointers.
647    ///
648    /// Note that while this is safe, care must be taken to ensure that
649    /// the raw pointer to the data is not accessed after the arena pointer has
650    /// been released.
651    pub fn into_raw(this: Self) -> (NonNull<fdf_arena_t>, NonNull<T>) {
652        let res = (this.arena.0, this.data.0);
653        // make sure that drop handlers aren't called for the arena
654        // or box
655        core::mem::forget(this);
656        res
657    }
658}
659
660impl<T: 'static> IntoIterator for ArenaStaticBox<[T]> {
661    type IntoIter = IntoIter<T, Arena>;
662    type Item = T;
663
664    fn into_iter(self) -> Self::IntoIter {
665        let len = self.len();
666        let ptr = self.data.0.cast();
667        // SAFETY: we will never dereference `end`
668        let end = unsafe { ptr.add(len) };
669        IntoIter { ptr, end, _arena: self.arena }
670    }
671}
672
673impl<T: ?Sized> Deref for ArenaStaticBox<T> {
674    type Target = T;
675    fn deref(&self) -> &Self::Target {
676        ArenaBox::deref(&self.data)
677    }
678}
679
680impl<T: ?Sized> DerefMut for ArenaStaticBox<T> {
681    fn deref_mut(&mut self) -> &mut Self::Target {
682        ArenaBox::deref_mut(&mut self.data)
683    }
684}
685
686/// An equivalent to [`ArenaBox`] that holds onto a reference to the
687/// arena to allow it to have static lifetime, and implements [`Clone`]
688/// allowing it to be shared. Since it's shared, you can't get a mutable
689/// reference to it back without using [`Self::try_unwrap`] to get the
690/// inner [`ArenaStaticBox`].
691#[derive(Clone, Debug)]
692pub struct ArenaRc<T: ?Sized>(Arc<ArenaStaticBox<T>>);
693#[derive(Clone, Debug)]
694pub struct ArenaWeak<T: ?Sized>(Weak<ArenaStaticBox<T>>);
695
696impl<T: ?Sized> ArenaRc<T> {
697    /// Transforms the given [`ArenaBox`] into an [`ArenaRc`] with an owned
698    /// reference to the given [`Arena`], allowing for it to be used in `'static`
699    /// contexts.
700    ///
701    /// # Safety
702    ///
703    /// The caller must ensure that the given [`ArenaBox`] is owned by this
704    /// arena, or it may result in use-after-free.
705    pub unsafe fn new_unchecked(arena: Arena, data: ArenaBox<'_, T>) -> ArenaRc<T> {
706        // SAFETY: The `ArenaBox` will not outlive the `Arena` as it is owned
707        // by the current struct and can't be moved out.
708        let data = unsafe { ArenaBox::erase_lifetime(data) };
709        Self(Arc::new(ArenaStaticBox { arena, data }))
710    }
711
712    /// Downgrades the given [`ArenaRc`] into an [`ArenaWeak`].
713    pub fn downgrade(this: &Self) -> ArenaWeak<T> {
714        ArenaWeak(Arc::downgrade(&this.0))
715    }
716
717    /// Attempts to take ownership over the arena and data backing the given
718    /// [`ArenaRc`] if there is only one strong reference held to it.
719    ///
720    /// If there is only one strong reference, this returns an [`ArenaBox`]
721    /// tied to the lifetime of the `&mut Option<Arena>` given, and places the
722    /// arena in that space.
723    pub fn try_unwrap(this: Self) -> Result<ArenaStaticBox<T>, Self> {
724        Arc::try_unwrap(this.0).map_err(|storage| Self(storage))
725    }
726}
727
728impl<T: ?Sized> From<ArenaStaticBox<T>> for ArenaRc<T> {
729    fn from(value: ArenaStaticBox<T>) -> Self {
730        Self(Arc::new(value))
731    }
732}
733
734impl<T: ?Sized> Deref for ArenaRc<T> {
735    type Target = T;
736    fn deref(&self) -> &Self::Target {
737        ArenaBox::deref(&self.0.data)
738    }
739}
740
741impl<T: ?Sized> ArenaWeak<T> {
742    pub fn upgrade(&self) -> Option<ArenaRc<T>> {
743        self.0.upgrade().map(ArenaRc)
744    }
745}
746
747/// Helper for allocating storage on the global heap appropriate for storing a
748/// copy of `val`. This returns a pointer of a different type than the type
749/// being referenced so that it can be used to allocate storage for unsized
750/// slices of `T`. `ActualType` should either be the same as `T` or an array of
751/// `T`.
752///
753/// This also correctly handles a zero sized type by returning
754/// [`NonZero::dangling`].
755///
756/// # Safety
757///
758/// In addition to all the safety requirements of [`std::alloc::alloc`], the
759/// caller must ensure that `T` is the type of elements of `ActualType`.
760unsafe fn global_alloc<T>(layout: Layout) -> NonNull<T> {
761    let storage = if layout.size() == 0 {
762        NonNull::dangling()
763    } else {
764        let ptr = unsafe { std::alloc::alloc(layout) };
765        if ptr.is_null() {
766            std::alloc::handle_alloc_error(layout);
767        }
768        unsafe { NonNull::new_unchecked(ptr as *mut T) }
769    };
770    storage
771}
772
773#[cfg(test)]
774pub(crate) mod tests {
775    use std::cell::Cell;
776    use std::sync::mpsc;
777
778    use super::*;
779
780    /// Implements a cloneable object that will send only one message
781    /// on an [`mpsc::Sender`] when its 'last' clone is dropped. It will assert
782    /// if an attempt to re-clone an already cloned [`DropSender`] happens,
783    /// ensuring that the object is only cloned in a linear path.
784    pub struct DropSender<T: Clone>(pub T, Cell<Option<mpsc::Sender<T>>>);
785    impl<T: Clone> DropSender<T> {
786        pub fn new(val: T, sender: mpsc::Sender<T>) -> Self {
787            Self(val, Cell::new(Some(sender)))
788        }
789    }
790    impl<T: Clone> Drop for DropSender<T> {
791        fn drop(&mut self) {
792            match self.1.get_mut() {
793                Some(sender) => {
794                    println!("dropping a drop sender");
795                    sender.send(self.0.clone()).unwrap();
796                }
797                _ => {}
798            }
799        }
800    }
801    impl<T: Clone> Clone for DropSender<T> {
802        fn clone(&self) -> Self {
803            Self(
804                self.0.clone(),
805                Cell::new(Some(self.1.take().expect("Attempted to re-clone a `DropSender`"))),
806            )
807        }
808    }
809
810    #[test]
811    fn arena_allocations() {
812        let arena = Arena::new();
813        let _val = arena.insert(());
814        let val = arena.insert(1);
815        assert_eq!(*val, 1);
816        let val = arena.insert(2);
817        assert_eq!(*val, 2);
818        let val = arena.insert_boxed_slice(Box::new([1, 2, 3, 4]));
819        assert_eq!(&*val, &[1, 2, 3, 4]);
820        let val: ArenaBox<'_, [()]> = arena.insert_boxed_slice(Box::new([]));
821        assert_eq!(&*val, &[]);
822        let val = arena.insert_slice(&[5, 6, 7, 8]);
823        assert_eq!(&*val, &[5, 6, 7, 8]);
824        let val: ArenaBox<'_, [()]> = arena.insert_slice(&[]);
825        assert_eq!(&*val, &[]);
826        let val: ArenaBox<'_, [u8]> = arena.insert_default_slice(10);
827        assert_eq!(&*val, &[0; 10]);
828    }
829
830    #[test]
831    #[allow(clippy::unit_cmp)]
832    fn arena_take() {
833        let arena = Arena::new();
834        let val = arena.insert(());
835        assert_eq!(ArenaBox::take(val), ());
836        let val = arena.insert(1);
837        assert_eq!(ArenaBox::take(val), 1);
838    }
839
840    #[test]
841    #[allow(clippy::unit_cmp)]
842    fn arena_take_boxed() {
843        let arena = Arena::new();
844        let val = arena.insert(());
845        assert_eq!(*ArenaBox::take_boxed(val), ());
846        let val = arena.insert(1);
847        assert_eq!(*ArenaBox::take_boxed(val), 1);
848    }
849
850    #[test]
851    fn arena_take_boxed_slice() {
852        let arena = Arena::new();
853        let val: ArenaBox<'_, [()]> = arena.insert_slice(&[]);
854        assert_eq!(&*ArenaBox::take_boxed_slice(val), &[]);
855        let val = arena.insert_slice(&[1, 2, 3, 4]);
856        assert_eq!(&*ArenaBox::take_boxed_slice(val), &[1, 2, 3, 4]);
857    }
858
859    #[test]
860    fn arena_drop() {
861        let (tx, rx) = mpsc::channel();
862        let arena = Arena::new();
863        let val = arena.insert(DropSender::new(1, tx.clone()));
864        drop(val);
865        assert_eq!(rx.try_recv().unwrap(), 1);
866
867        let val = arena.insert_boxed_slice(Box::new([DropSender::new(2, tx.clone())]));
868        drop(val);
869        assert_eq!(rx.try_recv().unwrap(), 2);
870
871        let val = arena.insert_slice(&[DropSender::new(3, tx.clone())]);
872        drop(val);
873        assert_eq!(rx.try_recv().unwrap(), 3);
874
875        rx.try_recv().expect_err("no more drops");
876    }
877
878    #[test]
879    fn arena_take_drop() {
880        let (tx, rx) = mpsc::channel();
881        let arena = Arena::new();
882
883        let val = arena.insert(DropSender::new(1, tx.clone()));
884        let inner = ArenaBox::take(val);
885        rx.try_recv().expect_err("shouldn't have dropped when taken");
886        drop(inner);
887        assert_eq!(rx.try_recv().unwrap(), 1);
888
889        let val = arena.insert_slice(&[DropSender::new(2, tx.clone())]);
890        let inner = ArenaBox::take_boxed_slice(val);
891        rx.try_recv().expect_err("shouldn't have dropped when taken");
892        drop(inner);
893        assert_eq!(rx.try_recv().unwrap(), 2);
894
895        rx.try_recv().expect_err("no more drops");
896    }
897
898    #[test]
899    fn arena_contains() {
900        let arena1 = Arena::new();
901        let arena2 = Arena::new();
902
903        let val1 = arena1.insert(1);
904        let val2 = arena2.insert(2);
905
906        assert!(arena1.contains(&val1));
907        assert!(arena2.contains(&val2));
908        assert!(!arena1.contains(&val2));
909        assert!(!arena2.contains(&val1));
910    }
911
912    #[test]
913    fn arena_assume() {
914        let arena = Arena::new();
915
916        let val = arena.insert(1);
917        let val_leaked = unsafe { ArenaBox::into_ptr(val) };
918        let val = unsafe { arena.assume(val_leaked) };
919
920        assert!(arena.contains(&val));
921    }
922
923    #[test]
924    #[should_panic]
925    fn arena_bad_assume() {
926        let arena = Arena::new();
927
928        unsafe { arena.assume(NonNull::<()>::dangling()) };
929    }
930
931    #[test]
932    #[should_panic]
933    fn bad_static_box_ownership() {
934        let arena1 = Arena::new();
935        let arena2 = Arena::new();
936
937        let val = arena1.insert(1);
938        arena2.make_static(val);
939    }
940
941    #[test]
942    #[should_panic]
943    fn bad_rc_ownership() {
944        let arena1 = Arena::new();
945        let arena2 = Arena::new();
946
947        let val = arena1.insert(1);
948        arena2.make_rc(val);
949    }
950
951    #[test]
952    fn box_lifecycle() {
953        let arena = Arena::new();
954
955        // create the initial value and modify it
956        let mut val = arena.insert(1);
957        *val = 2;
958        assert_eq!(*val, 2);
959
960        // make it a static box and modify it
961        let mut val = arena.make_static(val);
962        *val = 3;
963        assert_eq!(*val, 3);
964
965        // make it into a refcounted shared pointer and check the value is still the
966        // same
967        let val = ArenaRc::from(val);
968        assert_eq!(*val, 3);
969
970        // clone the refcount and verify that we can't unwrap it back to a static box.
971        let val_copied = val.clone();
972        assert_eq!(*val_copied, 3);
973        let val = ArenaRc::try_unwrap(val).expect_err("Double strong count should fail to unwrap");
974        assert_eq!(*val, 3);
975        drop(val_copied);
976
977        // now that the cloned rc is gone, unwrap it back to a static box and modify it
978        let mut val =
979            ArenaRc::try_unwrap(val).expect("strong count should be one so this should unwrap now");
980        *val = 4;
981        assert_eq!(*val, 4);
982
983        // bring it back to a normal arena box and modify it
984        let mut shared_arena = None;
985        let mut val = ArenaStaticBox::unwrap(val, &mut shared_arena);
986        *val = 5;
987        assert_eq!(*val, 5);
988
989        // make it back into an rc but directly rather than from a static box
990        let val = arena.make_rc(val);
991        assert_eq!(*val, 5);
992    }
993
994    #[test]
995    fn static_raw_roundtrip() {
996        let arena = Arena::new();
997        let val = arena.make_static(arena.insert(1));
998
999        // turn it into raw pointers and modify it
1000        let (arena_ptr, mut data_ptr) = ArenaStaticBox::into_raw(val);
1001        *unsafe { data_ptr.as_mut() } = 2;
1002        assert_eq!(*unsafe { data_ptr.as_ref() }, 2);
1003
1004        // reconstitute it back to an `ArenaBox` and then transform it
1005        let arena = unsafe { Arena::from_raw(arena_ptr) };
1006        let val = unsafe { arena.assume(data_ptr) };
1007
1008        assert_eq!(*val, 2);
1009    }
1010
1011    #[test]
1012    fn arena_into_and_from_iter() {
1013        let arena = Arena::new();
1014
1015        // empty slice to vec
1016        let val: ArenaBox<'_, [()]> = arena.insert_slice(&[]);
1017        let vec_val = Vec::from_iter(val);
1018        assert!(vec_val.len() == 0);
1019
1020        // filled slice to vec
1021        let val = arena.insert_slice(&[1, 2, 3, 4]);
1022        let vec_val = Vec::from_iter(val);
1023        assert_eq!(&[1, 2, 3, 4], &*vec_val);
1024
1025        // filled static slice to vec
1026        let val = arena.make_static(arena.insert_slice(&[1, 2, 3, 4]));
1027        let vec_val = Vec::from_iter(val);
1028        assert_eq!(&[1, 2, 3, 4], &*vec_val);
1029
1030        // empty vec to arena box
1031        let val: Vec<()> = vec![];
1032        let arena_val = arena.insert_from_iter(val.clone());
1033        assert_eq!(val, &*arena_val);
1034
1035        // filled vec to arena box
1036        let val = vec![1, 2, 3, 4];
1037        let arena_val = arena.insert_from_iter(val);
1038        assert_eq!(&[1, 2, 3, 4], &*arena_val);
1039    }
1040
1041    #[test]
1042    fn arena_try_from_iter() {
1043        let arena = Arena::new();
1044
1045        let val: Vec<Result<_, ()>> = vec![Ok(1), Ok(2), Ok(3), Ok(4)];
1046        let arena_val = arena.try_insert_from_iter(val).unwrap();
1047        assert_eq!(&[1, 2, 3, 4], &*arena_val);
1048
1049        let (tx, rx) = mpsc::channel();
1050        let val = vec![Ok(DropSender::new(0, tx.clone())), Err(-1), Ok(DropSender::new(1, tx))];
1051        let Err(-1) = arena.try_insert_from_iter(val) else {
1052            panic!("early exit from try_insert_from_iter")
1053        };
1054        let Ok(0) = rx.try_recv() else {
1055            panic!("expected drop of leading ok value to have happened")
1056        };
1057        let Ok(1) = rx.try_recv() else {
1058            panic!("expected drop of trailing ok value to have happened")
1059        };
1060    }
1061}