typed_arena/
lib.rs

1//! The arena, a fast but limited type of allocator.
2//!
3//! **A fast (but limited) allocation arena for values of a single type.**
4//!
5//! Allocated objects are destroyed all at once, when the arena itself is
6//! destroyed. There is no deallocation of individual objects while the arena
7//! itself is still alive. The flipside is that allocation is fast: typically
8//! just a vector push.
9//!
10//! There is also a method `into_vec()` to recover ownership of allocated
11//! objects when the arena is no longer required, instead of destroying
12//! everything.
13//!
14//! ## Example
15//!
16//! ```
17//! use typed_arena::Arena;
18//!
19//! struct Monster {
20//!     level: u32,
21//! }
22//!
23//! let monsters = Arena::new();
24//!
25//! let vegeta = monsters.alloc(Monster { level: 9001 });
26//! assert!(vegeta.level > 9000);
27//! ```
28//!
29//! ## Safe Cycles
30//!
31//! All allocated objects get the same lifetime, so you can safely create cycles
32//! between them. This can be useful for certain data structures, such as graphs
33//! and trees with parent pointers.
34//!
35//! ```
36//! use std::cell::Cell;
37//! use typed_arena::Arena;
38//!
39//! struct CycleParticipant<'a> {
40//!     other: Cell<Option<&'a CycleParticipant<'a>>>,
41//! }
42//!
43//! let arena = Arena::new();
44//!
45//! let a = arena.alloc(CycleParticipant { other: Cell::new(None) });
46//! let b = arena.alloc(CycleParticipant { other: Cell::new(None) });
47//!
48//! a.other.set(Some(b));
49//! b.other.set(Some(a));
50//! ```
51
52// Potential optimizations:
53// 1) add and stabilize a method for in-place reallocation of vecs.
54// 2) add and stabilize placement new.
55// 3) use an iterator. This may add far too much unsafe code.
56
57#![deny(missing_docs)]
58#![cfg_attr(not(any(feature = "std", test)), no_std)]
59#![cfg_attr(not(feature = "std"), feature(alloc))]
60
61#[cfg(not(feature = "std"))]
62extern crate alloc;
63
64#[cfg(any(feature = "std", test))]
65extern crate core;
66
67#[cfg(not(feature = "std"))]
68use alloc::vec::Vec;
69
70use core::cell::RefCell;
71use core::cmp;
72use core::iter;
73use core::mem;
74use core::slice;
75
76#[cfg(test)]
77mod test;
78
79// Initial size in bytes.
80const INITIAL_SIZE: usize = 1024;
81// Minimum capacity. Must be larger than 0.
82const MIN_CAPACITY: usize = 1;
83
84/// An arena of objects of type `T`.
85///
86/// ## Example
87///
88/// ```
89/// use typed_arena::Arena;
90///
91/// struct Monster {
92///     level: u32,
93/// }
94///
95/// let monsters = Arena::new();
96///
97/// let vegeta = monsters.alloc(Monster { level: 9001 });
98/// assert!(vegeta.level > 9000);
99/// ```
100pub struct Arena<T> {
101    chunks: RefCell<ChunkList<T>>,
102}
103
104struct ChunkList<T> {
105    current: Vec<T>,
106    rest: Vec<Vec<T>>,
107}
108
109impl<T> Arena<T> {
110    /// Construct a new arena.
111    ///
112    /// ## Example
113    ///
114    /// ```
115    /// use typed_arena::Arena;
116    ///
117    /// let arena = Arena::new();
118    /// # arena.alloc(1);
119    /// ```
120    pub fn new() -> Arena<T> {
121        let size = cmp::max(1, mem::size_of::<T>());
122        Arena::with_capacity(INITIAL_SIZE / size)
123    }
124
125    /// Construct a new arena with capacity for `n` values pre-allocated.
126    ///
127    /// ## Example
128    ///
129    /// ```
130    /// use typed_arena::Arena;
131    ///
132    /// let arena = Arena::with_capacity(1337);
133    /// # arena.alloc(1);
134    /// ```
135    pub fn with_capacity(n: usize) -> Arena<T> {
136        let n = cmp::max(MIN_CAPACITY, n);
137        Arena {
138            chunks: RefCell::new(ChunkList {
139                current: Vec::with_capacity(n),
140                rest: Vec::new(),
141            }),
142        }
143    }
144
145    /// Return the size of the arena
146    ///
147    /// This is useful for using the size of previous typed arenas to build new typed arenas with large enough spaces.
148    ///
149    /// ## Example
150    ///
151    /// ```
152    ///  use typed_arena::Arena;
153    ///
154    ///  let arena = Arena::with_capacity(0);
155    ///  let a = arena.alloc(1);
156    ///  let b = arena.alloc(2);
157    ///
158    ///  assert_eq!(arena.len(), 2);
159    /// ```
160    pub fn len(&self) -> usize {
161        let chunks = self.chunks.borrow();
162
163        let mut res = 0;
164        for vec in chunks.rest.iter() {
165            res += vec.len()
166        }
167
168        res + chunks.current.len()
169    }
170
171    /// Allocates a value in the arena, and returns a mutable reference
172    /// to that value.
173    ///
174    /// ## Example
175    ///
176    /// ```
177    /// use typed_arena::Arena;
178    ///
179    /// let arena = Arena::new();
180    /// let x = arena.alloc(42);
181    /// assert_eq!(*x, 42);
182    /// ```
183    #[inline]
184    pub fn alloc(&self, value: T) -> &mut T {
185        self.alloc_fast_path(value)
186            .unwrap_or_else(|value| self.alloc_slow_path(value))
187    }
188
189    #[inline]
190    fn alloc_fast_path(&self, value: T) -> Result<&mut T, T> {
191        let mut chunks = self.chunks.borrow_mut();
192        let len = chunks.current.len();
193        if len < chunks.current.capacity() {
194            chunks.current.push(value);
195            // Avoid going through `Vec::deref_mut`, which overlaps
196            // other references we have already handed out!
197            debug_assert!(len < chunks.current.len()); // bounds check
198            Ok(unsafe { &mut *chunks.current.as_mut_ptr().add(len) })
199        } else {
200            Err(value)
201        }
202    }
203
204    fn alloc_slow_path(&self, value: T) -> &mut T {
205        &mut self.alloc_extend(iter::once(value))[0]
206    }
207
208    /// Uses the contents of an iterator to allocate values in the arena.
209    /// Returns a mutable slice that contains these values.
210    ///
211    /// ## Example
212    ///
213    /// ```
214    /// use typed_arena::Arena;
215    ///
216    /// let arena = Arena::new();
217    /// let abc = arena.alloc_extend("abcdefg".chars().take(3));
218    /// assert_eq!(abc, ['a', 'b', 'c']);
219    /// ```
220    pub fn alloc_extend<I>(&self, iterable: I) -> &mut [T]
221    where
222        I: IntoIterator<Item = T>,
223    {
224        let mut iter = iterable.into_iter();
225
226        let mut chunks = self.chunks.borrow_mut();
227
228        let iter_min_len = iter.size_hint().0;
229        let mut next_item_index;
230        debug_assert!(
231            chunks.current.capacity() >= chunks.current.len(),
232            "capacity is always greater than or equal to len, so we don't need to worry about underflow"
233        );
234        if iter_min_len > chunks.current.capacity() - chunks.current.len() {
235            chunks.reserve(iter_min_len);
236            chunks.current.extend(iter);
237            next_item_index = 0;
238        } else {
239            next_item_index = chunks.current.len();
240            let mut i = 0;
241            while let Some(elem) = iter.next() {
242                if chunks.current.len() == chunks.current.capacity() {
243                    // The iterator was larger than we could fit into the current chunk.
244                    let chunks = &mut *chunks;
245                    // Create a new chunk into which we can freely push the entire iterator into
246                    chunks.reserve(i + 1);
247                    let previous_chunk = chunks.rest.last_mut().unwrap();
248                    let previous_chunk_len = previous_chunk.len();
249                    // Move any elements we put into the previous chunk into this new chunk
250                    chunks
251                        .current
252                        .extend(previous_chunk.drain(previous_chunk_len - i..));
253                    chunks.current.push(elem);
254                    // And the remaining elements in the iterator
255                    chunks.current.extend(iter);
256                    next_item_index = 0;
257                    break;
258                } else {
259                    chunks.current.push(elem);
260                }
261                i += 1;
262            }
263        }
264        let new_slice_ref = &mut chunks.current[next_item_index..];
265
266        // Extend the lifetime from that of `chunks_borrow` to that of `self`.
267        // This is OK because we’re careful to never move items
268        // by never pushing to inner `Vec`s beyond their initial capacity.
269        // The returned reference is unique (`&mut`):
270        // the `Arena` never gives away references to existing items.
271        unsafe { mem::transmute::<&mut [T], &mut [T]>(new_slice_ref) }
272    }
273
274    /// Allocates space for a given number of values, but doesn't initialize it.
275    ///
276    /// ## Unsafety and Undefined Behavior
277    ///
278    /// The same caveats that apply to
279    /// [`std::mem::uninitialized`](https://doc.rust-lang.org/nightly/std/mem/fn.uninitialized.html)
280    /// apply here:
281    ///
282    /// > **This is incredibly dangerous and should not be done lightly. Deeply
283    /// consider initializing your memory with a default value instead.**
284    ///
285    /// In particular, it is easy to trigger undefined behavior by allocating
286    /// uninitialized values, failing to properly initialize them, and then the
287    /// `Arena` will attempt to drop them when it is dropped. Initializing an
288    /// uninitialized value is trickier than it might seem: a normal assignment
289    /// to a field will attempt to drop the old, uninitialized value, which
290    /// almost certainly also triggers undefined behavior. You must also
291    /// consider all the places where your code might "unexpectedly" drop values
292    /// earlier than it "should" because of unwinding during panics.
293    pub unsafe fn alloc_uninitialized(&self, num: usize) -> *mut [T] {
294        let mut chunks = self.chunks.borrow_mut();
295
296        debug_assert!(
297            chunks.current.capacity() >= chunks.current.len(),
298            "capacity is always greater than or equal to len, so we don't need to worry about underflow"
299        );
300        if num > chunks.current.capacity() - chunks.current.len() {
301            chunks.reserve(num);
302        }
303
304        // At this point, the current chunk must have free capacity.
305        let next_item_index = chunks.current.len();
306        chunks.current.set_len(next_item_index + num);
307        // Extend the lifetime...
308        &mut chunks.current[next_item_index..] as *mut _
309    }
310
311    /// Returns unused space.
312    ///
313    /// *This unused space is still not considered "allocated".* Therefore, it
314    /// won't be dropped unless there are further calls to `alloc`,
315    /// `alloc_uninitialized`, or `alloc_extend` which is why the method is
316    /// safe.
317    pub fn uninitialized_array(&self) -> *mut [T] {
318        let chunks = self.chunks.borrow();
319        let len = chunks.current.capacity() - chunks.current.len();
320        let next_item_index = chunks.current.len();
321        let slice = &chunks.current[next_item_index..];
322        unsafe { slice::from_raw_parts_mut(slice.as_ptr() as *mut T, len) as *mut _ }
323    }
324
325    /// Convert this `Arena` into a `Vec<T>`.
326    ///
327    /// Items in the resulting `Vec<T>` appear in the order that they were
328    /// allocated in.
329    ///
330    /// ## Example
331    ///
332    /// ```
333    /// use typed_arena::Arena;
334    ///
335    /// let arena = Arena::new();
336    ///
337    /// arena.alloc("a");
338    /// arena.alloc("b");
339    /// arena.alloc("c");
340    ///
341    /// let easy_as_123 = arena.into_vec();
342    ///
343    /// assert_eq!(easy_as_123, vec!["a", "b", "c"]);
344    /// ```
345    pub fn into_vec(self) -> Vec<T> {
346        let mut chunks = self.chunks.into_inner();
347        // keep order of allocation in the resulting Vec
348        let n = chunks
349            .rest
350            .iter()
351            .fold(chunks.current.len(), |a, v| a + v.len());
352        let mut result = Vec::with_capacity(n);
353        for mut vec in chunks.rest {
354            result.append(&mut vec);
355        }
356        result.append(&mut chunks.current);
357        result
358    }
359
360    /// Returns an iterator that allows modifying each value.
361    ///
362    /// Items are yielded in the order that they were allocated.
363    ///
364    /// ## Example
365    ///
366    /// ```
367    /// use typed_arena::Arena;
368    ///
369    /// #[derive(Debug, PartialEq, Eq)]
370    /// struct Point { x: i32, y: i32 };
371    ///
372    /// let mut arena = Arena::new();
373    ///
374    /// arena.alloc(Point { x: 0, y: 0 });
375    /// arena.alloc(Point { x: 1, y: 1 });
376    ///
377    /// for point in arena.iter_mut() {
378    ///     point.x += 10;
379    /// }
380    ///
381    /// let points = arena.into_vec();
382    ///
383    /// assert_eq!(points, vec![Point { x: 10, y: 0 }, Point { x: 11, y: 1 }]);
384    ///
385    /// ```
386    ///
387    /// ## Immutable Iteration
388    ///
389    /// Note that there is no corresponding `iter` method. Access to the arena's contents
390    /// requries mutable access to the arena itself.
391    ///
392    /// ```compile_fail
393    /// use typed_arena::Arena;
394    ///
395    /// let mut arena = Arena::new();
396    /// let x = arena.alloc(1);
397    ///
398    /// // borrow error!
399    /// for i in arena.iter_mut() {
400    ///     println!("i: {}", i);
401    /// }
402    ///
403    /// // borrow error!
404    /// *x = 2;
405    /// ```
406    #[inline]
407    pub fn iter_mut(&mut self) -> IterMut<T> {
408        let chunks = self.chunks.get_mut();
409        let position = if !chunks.rest.is_empty() {
410            let index = 0;
411            let inner_iter = chunks.rest[index].iter_mut();
412            // Extend the lifetime of the individual elements to that of the arena.
413            // This is OK because we borrow the arena mutably to prevent new allocations
414            // and we take care here to never move items inside the arena while the
415            // iterator is alive.
416            let inner_iter = unsafe { mem::transmute(inner_iter) };
417            IterMutState::ChunkListRest { index, inner_iter }
418        } else {
419            // Extend the lifetime of the individual elements to that of the arena.
420            let iter = unsafe { mem::transmute(chunks.current.iter_mut()) };
421            IterMutState::ChunkListCurrent { iter }
422        };
423        IterMut {
424            chunks,
425            state: position,
426        }
427    }
428}
429
430impl<T> Default for Arena<T> {
431    fn default() -> Self {
432        Self::new()
433    }
434}
435
436impl<T> ChunkList<T> {
437    #[inline(never)]
438    #[cold]
439    fn reserve(&mut self, additional: usize) {
440        let double_cap = self
441            .current
442            .capacity()
443            .checked_mul(2)
444            .expect("capacity overflow");
445        let required_cap = additional
446            .checked_next_power_of_two()
447            .expect("capacity overflow");
448        let new_capacity = cmp::max(double_cap, required_cap);
449        let chunk = mem::replace(&mut self.current, Vec::with_capacity(new_capacity));
450        self.rest.push(chunk);
451    }
452}
453
454enum IterMutState<'a, T> {
455    ChunkListRest {
456        index: usize,
457        inner_iter: slice::IterMut<'a, T>,
458    },
459    ChunkListCurrent {
460        iter: slice::IterMut<'a, T>,
461    },
462}
463
464/// Mutable arena iterator.
465///
466/// This struct is created by the [`iter_mut`](struct.Arena.html#method.iter_mut) method on [Arenas](struct.Arena.html).
467pub struct IterMut<'a, T: 'a> {
468    chunks: &'a mut ChunkList<T>,
469    state: IterMutState<'a, T>,
470}
471
472impl<'a, T> Iterator for IterMut<'a, T> {
473    type Item = &'a mut T;
474    fn next(&mut self) -> Option<&'a mut T> {
475        loop {
476            self.state = match self.state {
477                IterMutState::ChunkListRest {
478                    mut index,
479                    ref mut inner_iter,
480                } => {
481                    match inner_iter.next() {
482                        Some(item) => return Some(item),
483                        None => {
484                            index += 1;
485                            if index < self.chunks.rest.len() {
486                                let inner_iter = self.chunks.rest[index].iter_mut();
487                                // Extend the lifetime of the individual elements to that of the arena.
488                                let inner_iter = unsafe { mem::transmute(inner_iter) };
489                                IterMutState::ChunkListRest { index, inner_iter }
490                            } else {
491                                let iter = self.chunks.current.iter_mut();
492                                // Extend the lifetime of the individual elements to that of the arena.
493                                let iter = unsafe { mem::transmute(iter) };
494                                IterMutState::ChunkListCurrent { iter }
495                            }
496                        }
497                    }
498                }
499                IterMutState::ChunkListCurrent { ref mut iter } => return iter.next(),
500            };
501        }
502    }
503
504    fn size_hint(&self) -> (usize, Option<usize>) {
505        let current_len = self.chunks.current.len();
506        let current_cap = self.chunks.current.capacity();
507        if self.chunks.rest.is_empty() {
508            (current_len, Some(current_len))
509        } else {
510            let rest_len = self.chunks.rest.len();
511            let last_chunk_len = self
512                .chunks
513                .rest
514                .last()
515                .map(|chunk| chunk.len())
516                .unwrap_or(0);
517
518            let min = current_len + last_chunk_len;
519            let max = min + (rest_len * current_cap / rest_len);
520
521            (min, Some(max))
522        }
523    }
524}