Skip to main content

rkyv/ser/allocator/
alloc.rs

1use core::{
2    alloc::Layout,
3    marker::PhantomData,
4    mem::{align_of, size_of, ManuallyDrop},
5    ptr::{slice_from_raw_parts_mut, NonNull},
6};
7
8use crate::{
9    alloc::alloc::{alloc, dealloc, handle_alloc_error},
10    ser::Allocator,
11};
12
13struct Block {
14    next_ptr: NonNull<Block>,
15    next_size: usize,
16}
17
18impl Block {
19    fn alloc(size: usize) -> NonNull<Self> {
20        debug_assert!(size >= size_of::<Self>());
21        let layout = Layout::from_size_align(size, align_of::<Self>()).unwrap();
22        let ptr = unsafe { alloc(layout).cast::<Self>() };
23        let Some(ptr) = NonNull::new(ptr) else {
24            handle_alloc_error(layout)
25        };
26
27        unsafe {
28            ptr.as_ptr().write(Self {
29                next_ptr: ptr,
30                next_size: layout.size(),
31            });
32        }
33
34        ptr
35    }
36
37    unsafe fn dealloc(ptr: NonNull<Self>, size: usize) {
38        let layout = unsafe {
39            Layout::from_size_align(size, align_of::<Self>()).unwrap_unchecked()
40        };
41        unsafe {
42            dealloc(ptr.as_ptr().cast(), layout);
43        }
44    }
45
46    /// # Safety
47    ///
48    /// `tail_ptr` and `new_ptr` must point to valid `Block`s and `new_ptr` must
49    /// be the only block in its loop.
50    unsafe fn push_next(
51        mut tail_ptr: NonNull<Self>,
52        mut new_ptr: NonNull<Self>,
53    ) {
54        let tail = unsafe { tail_ptr.as_mut() };
55        let new = unsafe { new_ptr.as_mut() };
56
57        debug_assert!(new.next_ptr == new_ptr);
58
59        let head = tail.next_ptr;
60        let head_cap = tail.next_size;
61        tail.next_ptr = new_ptr;
62        tail.next_size = new.next_size;
63        new.next_ptr = head;
64        new.next_size = head_cap;
65    }
66}
67
68/// An arena allocator for allocations.
69///
70/// Reusing the same arena for multiple serializations will reduce the number of
71/// global allocations, which can save a considerable amount of time.
72pub struct Arena {
73    head_ptr: NonNull<Block>,
74}
75
76// SAFETY: Arena is safe to send to other threads
77unsafe impl Send for Arena {}
78
79impl Drop for Arena {
80    fn drop(&mut self) {
81        self.shrink();
82        let head_size = unsafe { self.head_ptr.as_ref().next_size };
83        unsafe {
84            Block::dealloc(self.head_ptr, head_size);
85        }
86    }
87}
88
89impl Arena {
90    /// The default capacity for arenas.
91    pub const DEFAULT_CAPACITY: usize = 1024;
92
93    /// Creates a new `Arena` with the default capacity.
94    pub fn new() -> Self {
95        Self::with_capacity(Self::DEFAULT_CAPACITY)
96    }
97
98    /// Creates a new `Arena` with at least the requested capacity.
99    pub fn with_capacity(cap: usize) -> Self {
100        let head_size = (cap + size_of::<Block>()).next_power_of_two();
101        let head_ptr = Block::alloc(head_size);
102        Self { head_ptr }
103    }
104
105    /// Cleans up allocated blocks which are no longer in use.
106    ///
107    /// The arena is automatically shrunk by [`acquire`](Self::acquire).
108    pub fn shrink(&mut self) -> usize {
109        let (mut current_ptr, mut current_size) = {
110            let head = unsafe { self.head_ptr.as_ref() };
111            (head.next_ptr, head.next_size)
112        };
113
114        loop {
115            let current = unsafe { current_ptr.as_mut() };
116
117            if current.next_ptr == current_ptr {
118                // There was only one block in the loop. No deallocating needed.
119                break;
120            }
121
122            let next_ptr = current.next_ptr;
123            let next_size = current.next_size;
124
125            if next_ptr == self.head_ptr {
126                // End of the loop. Free the head block.
127                unsafe {
128                    Block::dealloc(next_ptr, next_size);
129                }
130
131                // Loop the head back on itself.
132                current.next_ptr = current_ptr;
133                current.next_size = current_size;
134                self.head_ptr = current_ptr;
135
136                break;
137            }
138
139            unsafe {
140                Block::dealloc(current_ptr, current_size);
141            }
142
143            current_ptr = next_ptr;
144            current_size = next_size;
145        }
146
147        current_size - size_of::<Block>()
148    }
149
150    /// Returns the available capacity of the arena.
151    pub fn capacity(&self) -> usize {
152        let mut current_ptr = self.head_ptr;
153        loop {
154            let current = unsafe { current_ptr.as_ref() };
155            if current.next_ptr == self.head_ptr {
156                break current.next_size - size_of::<Block>();
157            }
158            current_ptr = current.next_ptr;
159        }
160    }
161
162    /// Acquires a handle to the arena.
163    ///
164    /// The returned handle has exclusive allocation rights in the arena.
165    pub fn acquire(&mut self) -> ArenaHandle<'_> {
166        self.shrink();
167
168        ArenaHandle {
169            tail_ptr: self.head_ptr,
170            tail_size: unsafe { self.head_ptr.as_ref().next_size },
171            used: size_of::<Block>(),
172            _phantom: PhantomData,
173        }
174    }
175
176    /// Consumes the `Arena`, returning a raw pointer.
177    pub fn into_raw(self) -> NonNull<()> {
178        let this = ManuallyDrop::new(self);
179        this.head_ptr.cast()
180    }
181
182    /// Constructs an arena from a raw pointer.
183    ///
184    /// # Safety
185    ///
186    /// `raw` must have been returned from `into_raw`. `from_raw` takes
187    /// ownership over the pointer, and so `from_raw` must not be called on the
188    /// same pointer more than once.
189    pub unsafe fn from_raw(raw: NonNull<()>) -> Self {
190        Self {
191            head_ptr: raw.cast(),
192        }
193    }
194}
195
196impl Default for Arena {
197    fn default() -> Self {
198        Self::new()
199    }
200}
201
202/// A handle which can allocate within an arena.
203pub struct ArenaHandle<'a> {
204    tail_ptr: NonNull<Block>,
205    tail_size: usize,
206    used: usize,
207    _phantom: PhantomData<&'a mut Arena>,
208}
209
210// SAFETY: ArenaHandle is safe to send to other threads
211unsafe impl Send for ArenaHandle<'_> {}
212
213unsafe impl<E> Allocator<E> for ArenaHandle<'_> {
214    unsafe fn push_alloc(
215        &mut self,
216        layout: Layout,
217    ) -> Result<NonNull<[u8]>, E> {
218        let pos = self.tail_ptr.as_ptr() as usize + self.used;
219        let pad = 0usize.wrapping_sub(pos) % layout.align();
220        if pad + layout.size() <= self.tail_size - self.used {
221            self.used += pad;
222        } else {
223            // Allocation request is too large, allocate a new block
224            let size = usize::max(
225                2 * self.tail_size,
226                (size_of::<Block>() + layout.size() + layout.align())
227                    .next_power_of_two(),
228            );
229            let next = Block::alloc(size);
230            unsafe {
231                Block::push_next(self.tail_ptr, next);
232            }
233            self.tail_ptr = next;
234            self.tail_size = size;
235            let pos = self.tail_ptr.as_ptr() as usize + size_of::<Block>();
236            let pad = 0usize.wrapping_sub(pos) % layout.align();
237            self.used = size_of::<Block>() + pad;
238        }
239
240        // SAFETY: `self.used` is always less than the length of the allocated
241        // block that `tail_ptr` points to.
242        let ptr = unsafe { self.tail_ptr.as_ptr().cast::<u8>().add(self.used) };
243        let slice_ptr = slice_from_raw_parts_mut(ptr, layout.size());
244        // SAFETY: `slice_ptr` is guaranteed not to be null because it is offset
245        // from `self.tail_ptr` which is always non-null.
246        let result = unsafe { NonNull::new_unchecked(slice_ptr) };
247        self.used += layout.size();
248        Ok(result)
249    }
250
251    unsafe fn pop_alloc(
252        &mut self,
253        ptr: NonNull<u8>,
254        _: Layout,
255    ) -> Result<(), E> {
256        // If the popped allocation was in the current tail block, then we can
257        // reduce the amount of used space.
258        let start = self.tail_ptr.as_ptr() as usize;
259        let end = start + self.tail_size;
260        let pos = ptr.as_ptr() as usize;
261        if (start..end).contains(&pos) {
262            self.used = pos - start;
263        }
264
265        Ok(())
266    }
267}
268
269#[cfg(test)]
270mod tests {
271    use core::alloc::Layout;
272
273    use rancor::{Panic, ResultExt};
274
275    use crate::{
276        alloc::{string::ToString, vec},
277        api::high::to_bytes_in_with_alloc,
278        ser::{allocator::Arena, Allocator},
279        util::AlignedVec,
280    };
281
282    #[test]
283    fn reuse_arena() {
284        let mut arena = Arena::with_capacity(2);
285
286        let value = vec![
287            "hello".to_string(),
288            "world".to_string(),
289            "foo".to_string(),
290            "bar".to_string(),
291            "baz".to_string(),
292        ];
293
294        for _ in 0..10 {
295            to_bytes_in_with_alloc::<_, _, Panic>(
296                &value,
297                AlignedVec::<16>::new(),
298                arena.acquire(),
299            )
300            .unwrap();
301        }
302    }
303
304    #[test]
305    fn pop_non_tail() {
306        let mut arena = Arena::new();
307        let mut handle = arena.acquire();
308
309        let layout =
310            Layout::from_size_align(Arena::DEFAULT_CAPACITY, 1).unwrap();
311
312        unsafe {
313            let a =
314                Allocator::<Panic>::push_alloc(&mut handle, layout).always_ok();
315            let b =
316                Allocator::<Panic>::push_alloc(&mut handle, layout).always_ok();
317            Allocator::<Panic>::pop_alloc(&mut handle, b.cast(), layout)
318                .always_ok();
319            Allocator::<Panic>::pop_alloc(&mut handle, a.cast(), layout)
320                .always_ok();
321        }
322    }
323}