surpass/layout/slice_cache.rs
1// Copyright 2022 The Fuchsia Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5// IMPORTANT: Upon any code-related modification to this file, please ensure that all commented-out
6// tests that start with `fails_` actually fail to compile *independently* from one
7// another.
8
9use std::marker::PhantomData;
10use std::ops::{Bound, Deref, DerefMut, RangeBounds};
11use std::ptr::NonNull;
12use std::{fmt, hint, mem, slice};
13
14use crossbeam_utils::atomic::AtomicCell;
15
16// `SliceCache` is virtually identical to a `Vec<Range<usize>>` whose range are statically
17// guaranteed not to overlap or overflow the initial `slice` that the object has been made with.
18//
19// This is achieved by forcing the user to produce `Span`s in a closure provided to the constructor
20// from a root `Span` that cannot escape the closure.
21//
22// In `SliceCache::access`, we make sure that the slice doesn't overflow the `len` passed in
23// `SliceCache::new` and then save the pointer to the global `ROOT` that is then guarded until the
24// `Ref` is dropped.
25
26#[repr(transparent)]
27#[derive(Clone, Copy, Eq, PartialEq)]
28struct SendNonNull<T> {
29 ptr: NonNull<T>,
30}
31
32unsafe impl<T> Send for SendNonNull<T> {}
33
34impl<T> From<NonNull<T>> for SendNonNull<T> {
35 fn from(ptr: NonNull<T>) -> Self {
36 Self { ptr }
37 }
38}
39
40static ROOT: AtomicCell<Option<SendNonNull<()>>> = AtomicCell::new(None);
41
42/// A [`prim@slice`] wrapper produced by [`SliceCache::access`].
43#[repr(C)]
44pub struct Slice<'a, T> {
45 offset: isize,
46 len: usize,
47 _phantom: PhantomData<&'a mut [T]>,
48}
49
50// Since this type is equivalent to `&mut [T]`, it also implements `Send`.
51unsafe impl<'a, T: Send> Send for Slice<'a, T> {}
52
53// Since this type is equivalent to `&mut [T]`, it also implements `Sync`.
54unsafe impl<'a, T: Sync> Sync for Slice<'a, T> {}
55
56impl<'a, T> Deref for Slice<'a, T> {
57 type Target = [T];
58
59 #[inline]
60 fn deref(&self) -> &'a Self::Target {
61 let root: NonNull<T> = ROOT.load().unwrap().ptr.cast();
62
63 // `Slice`s should only be dereferences when tainted with the `'s` lifetime from the
64 // `SliceCache::access` method. This ensures that the slice that results from derefrencing
65 // here will also be constrained by the same lifetime.
66 //
67 // This also expects the `ROOT` pointer to be correctly set up in `SliceCache::access`.
68 unsafe { slice::from_raw_parts(root.as_ptr().offset(self.offset), self.len) }
69 }
70}
71
72impl<'a, T> DerefMut for Slice<'a, T> {
73 #[inline]
74 fn deref_mut(&mut self) -> &'a mut Self::Target {
75 let root: NonNull<T> = ROOT.load().unwrap().ptr.cast();
76
77 // `Slice`s should only be dereferences when tainted with the `'s` lifetime from the
78 // `SliceCache::access` method. This ensures that the slice that results from derefrencing
79 // here will also be constrained by the same lifetime.
80 //
81 // This also expects the `ROOT` pointer to be correctly set up in `SliceCache::access`.
82 unsafe { slice::from_raw_parts_mut(root.as_ptr().offset(self.offset), self.len) }
83 }
84}
85
86impl<T: fmt::Debug> fmt::Debug for Slice<'_, T> {
87 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
88 (**self).fmt(f)
89 }
90}
91
92/// A marker produced by [`SliceCache`] that ensures that all resulting `Span`s will be mutually
93/// non-overlapping.
94///
95/// # Examples
96///
97/// ```
98/// # use surpass::layout::SliceCache;
99/// let mut cache = SliceCache::new(4, |span| {
100/// Box::new([span])
101/// });
102/// ```
103#[repr(transparent)]
104pub struct Span<'a>(Slice<'a, ()>);
105
106impl<'a> Span<'a> {
107 fn from_slice(slice: &Slice<'a, ()>) -> Self {
108 Self(Slice { offset: slice.offset, len: slice.len, _phantom: PhantomData })
109 }
110
111 /// cache span at `mid`. Analogous to [`slice::split_at`].
112 ///
113 /// # Examples
114 ///
115 /// ```
116 /// # use surpass::layout::SliceCache;
117 /// let mut cache = SliceCache::new(4, |span| {
118 /// let (left, right) = span.split_at(2);
119 /// Box::new([left, right])
120 /// });
121 /// ```
122 #[inline]
123 pub fn slice<R: RangeBounds<usize>>(&self, range: R) -> Option<Self> {
124 let start = match range.start_bound() {
125 Bound::Included(&i) => i,
126 Bound::Excluded(&i) => i + 1,
127 Bound::Unbounded => 0,
128 };
129 let end = match range.end_bound() {
130 Bound::Included(&i) => i + 1,
131 Bound::Excluded(&i) => i,
132 Bound::Unbounded => self.0.len,
133 };
134
135 (start <= end && end <= self.0.len)
136 .then(|| Span(Slice { offset: self.0.offset + start as isize, len: end, ..self.0 }))
137 }
138
139 /// cache span at `mid`. Analogous to [`slice::split_at`].
140 ///
141 /// # Examples
142 ///
143 /// ```
144 /// # use surpass::layout::SliceCache;
145 /// let mut cache = SliceCache::new(4, |span| {
146 /// let (left, right) = span.split_at(2);
147 /// Box::new([left, right])
148 /// });
149 /// ```
150 #[inline]
151 pub fn split_at(&self, mid: usize) -> (Self, Self) {
152 assert!(mid <= self.0.len);
153
154 (
155 Span(Slice { len: mid, ..self.0 }),
156 Span(Slice { offset: self.0.offset + mid as isize, len: self.0.len - mid, ..self.0 }),
157 )
158 }
159
160 /// Returns an [Iterator](Chunks) over `chunk_size` elements of thr slice. Analogous to [`slice::chunks`].
161 ///
162 /// # Examples
163 ///
164 /// ```
165 /// # use surpass::layout::SliceCache;
166 /// let mut cache = SliceCache::new(4, |span| {
167 /// span.chunks(2).collect()
168 /// });
169 /// ```
170 #[inline]
171 pub fn chunks(self, chunk_size: usize) -> Chunks<'a> {
172 Chunks { slice: self.0, size: chunk_size }
173 }
174}
175
176impl fmt::Debug for Span<'_> {
177 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
178 self.0.offset.fmt(f)?;
179 write!(f, "..")?;
180 self.0.len.fmt(f)?;
181
182 Ok(())
183 }
184}
185
186/// An [iterator](std::iter::Iterator) returned by [`Span::chunks`].
187pub struct Chunks<'a> {
188 slice: Slice<'a, ()>,
189 size: usize,
190}
191
192impl<'a> Iterator for Chunks<'a> {
193 type Item = Span<'a>;
194
195 #[inline]
196 fn next(&mut self) -> Option<Self::Item> {
197 (self.slice.len > 0).then(|| {
198 let span = Span(Slice { len: self.size.min(self.slice.len), ..self.slice });
199
200 self.slice.offset += self.size as isize;
201 self.slice.len = self.slice.len.saturating_sub(self.size);
202
203 span
204 })
205 }
206}
207
208impl fmt::Debug for Chunks<'_> {
209 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
210 f.debug_struct("Chunks")
211 .field("span", &Span::from_slice(&self.slice))
212 .field("size", &self.size)
213 .finish()
214 }
215}
216
217/// A [reference] wrapper returned by [`SliceCache::access`].
218#[repr(transparent)]
219#[derive(Debug)]
220pub struct Ref<'a, T: ?Sized>(&'a mut T);
221
222impl<'a, T: ?Sized> Ref<'a, T> {
223 pub fn get(&'a mut self) -> &'a mut T {
224 self.0
225 }
226}
227
228impl<T: ?Sized> Deref for Ref<'_, T> {
229 type Target = T;
230
231 #[inline]
232 fn deref(&self) -> &Self::Target {
233 self.0
234 }
235}
236
237impl<T: ?Sized> DerefMut for Ref<'_, T> {
238 #[inline]
239 fn deref_mut(&mut self) -> &mut Self::Target {
240 self.0
241 }
242}
243
244impl<T: ?Sized> Drop for Ref<'_, T> {
245 #[inline]
246 fn drop(&mut self) {
247 ROOT.store(None);
248 }
249}
250
251/// A cache of non-overlapping mutable sub-slices of that enforces lifetimes dynamically.
252///
253/// This type is useful when you have to give up on the mutable reference to a slice but need
254/// a way to cache mutable sub-slices deriving from it.
255///
256/// # Examples
257///
258/// ```
259/// # use surpass::layout::SliceCache;
260/// let mut array = [1, 2, 3];
261///
262/// let mut cache = SliceCache::new(3, |span| {
263/// let (left, right) = span.split_at(1);
264/// Box::new([left, right])
265/// });
266///
267/// for slice in cache.access(&mut array).unwrap().iter_mut() {
268/// for val in slice.iter_mut() {
269/// *val += 1;
270/// }
271/// }
272///
273/// assert_eq!(array, [2, 3, 4]);
274/// ```
275pub struct SliceCache {
276 len: usize,
277 slices: Box<[Slice<'static, ()>]>,
278}
279
280impl SliceCache {
281 /// Creates a new slice cache by storing sub-spans created from a root passed to the closure
282 /// `f`. `len` is the minimum slice length that can then be passed to [`access`](Self::access).
283 ///
284 /// # Examples
285 ///
286 /// ```
287 /// # use surpass::layout::SliceCache;
288 /// let mut cache = SliceCache::new(3, |span| {
289 /// let (left, right) = span.split_at(1);
290 /// // All returned sub-spans stem from the span passed above.
291 /// Box::new([left, right])
292 /// });
293 /// ```
294 #[inline]
295 pub fn new<F>(len: usize, f: F) -> Self
296 where
297 F: Fn(Span<'_>) -> Box<[Span<'_>]> + 'static,
298 {
299 let span = Span(Slice { offset: 0, len, _phantom: PhantomData });
300
301 // `Span<'_>` is transparent over `Slice<'_, ()>`. Since the `'_` above is used just to
302 // trap the span inside the closure, transmuting to `Slice<'static, ()>` does not make any
303 // difference.
304 Self { len, slices: unsafe { mem::transmute(f(span)) } }
305 }
306
307 /// Accesses the `slice` by returning all the sub-slices equivalent to the previously created
308 /// [spans](Span) in the closure passed to [`new`](Self::new).
309 ///
310 /// If the `slice` does not have a length at least as large as the one passed to
311 /// [`new`](Self::new), this function returns `None`.
312 ///
313 /// Note: this method should not be called concurrently with any other `access` calls since it
314 /// will wait for the previously returned [`Ref`] to be dropped.
315 ///
316 /// # Examples
317 ///
318 /// ```
319 /// # use surpass::layout::SliceCache;
320 /// let mut array = [1, 2, 3];
321 ///
322 /// let mut cache = SliceCache::new(3, |span| {
323 /// let (left, right) = span.split_at(1);
324 /// Box::new([left, right])
325 /// });
326 ///
327 /// let mut copy = array;
328 /// let skipped_one = cache.access(&mut array).unwrap();
329 ///
330 /// assert_eq!(&*skipped_one[1], ©[1..]);
331 /// ```
332 #[inline]
333 pub fn access<'c, 's, T>(&'c mut self, slice: &'s mut [T]) -> Option<Ref<'c, [Slice<'s, T>]>> {
334 if slice.len() >= self.len {
335 while ROOT
336 .compare_exchange(
337 None,
338 Some(NonNull::new(slice.as_mut_ptr()).unwrap().cast().into()),
339 )
340 .is_err()
341 {
342 // This spin lock here is mostly for being able to run tests in parallel. Being
343 // able to render to `forma::Composition`s in parallel is currently not supported
344 // and might poor performance due to priority inversion.
345 hint::spin_loop();
346 }
347
348 // Generic `Slice<'static, ()>` are transmuted to `Slice<'s, T>`, enforcing the
349 // original `slice`'s lifetime. Since slices are simply pairs of `(offset, len)`,
350 // transmuting `()` to `T` relies on the `ROOT` being set up above with the correct pointer.
351 return Some(unsafe { mem::transmute(&mut *self.slices) });
352 }
353
354 None
355 }
356
357 #[cfg(test)]
358 fn try_access<'c, 's, T>(&'c mut self, slice: &'s mut [T]) -> Option<Ref<'c, [Slice<'s, T>]>> {
359 if slice.len() >= self.len
360 && ROOT
361 .compare_exchange(
362 None,
363 Some(NonNull::new(slice.as_mut_ptr()).unwrap().cast().into()),
364 )
365 .is_ok()
366 {
367 // Generic `Slice<'static, ()>` are transmuted to `Slice<'s, T>`, enforcing the
368 // original `slice`'s lifetime. Since slices are simply pairs of `(offset, len)`,
369 // transmuting `()` to `T` relies on the `ROOT` being set up above with the correct pointer.
370 return Some(unsafe { mem::transmute(&mut *self.slices) });
371 }
372
373 None
374 }
375}
376
377impl fmt::Debug for SliceCache {
378 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
379 f.debug_list().entries(self.slices.iter().map(Span::from_slice)).finish()
380 }
381}
382
383#[cfg(test)]
384mod tests {
385 use super::*;
386
387 #[test]
388 fn split_at() {
389 let mut cache = SliceCache::new(5, |span| {
390 let (left, right) = span.split_at(2);
391 Box::new([left, right])
392 });
393 let mut array = [1, 2, 3, 4, 5];
394
395 for slice in cache.access(&mut array).unwrap().iter_mut() {
396 for val in slice.iter_mut() {
397 *val += 1;
398 }
399 }
400
401 assert_eq!(array, [2, 3, 4, 5, 6]);
402 }
403
404 #[test]
405 fn chunks() {
406 let mut cache = SliceCache::new(5, |span| span.chunks(2).collect());
407 let mut array = [1, 2, 3, 4, 5];
408
409 for slice in cache.access(&mut array).unwrap().iter_mut() {
410 for val in slice.iter_mut() {
411 *val += 1;
412 }
413 }
414
415 assert_eq!(array, [2, 3, 4, 5, 6]);
416 }
417
418 #[test]
419 fn ref_twice() {
420 let mut cache = SliceCache::new(5, |span| {
421 let (left, right) = span.split_at(2);
422 Box::new([left, right])
423 });
424 let mut array = [1, 2, 3, 4, 5];
425
426 for slice in cache.access(&mut array).unwrap().iter_mut() {
427 for val in slice.iter_mut() {
428 *val += 1;
429 }
430 }
431
432 for slice in cache.access(&mut array).unwrap().iter_mut() {
433 for val in slice.iter_mut() {
434 *val += 1;
435 }
436 }
437
438 assert_eq!(array, [3, 4, 5, 6, 7]);
439 }
440
441 #[test]
442 fn access_twice() {
443 let mut cache0 = SliceCache::new(5, |span| Box::new([span]));
444 let mut cache1 = SliceCache::new(5, |span| Box::new([span]));
445
446 let mut array0 = [1, 2, 3, 4, 5];
447 let mut array1 = [1, 2, 3, 4, 5];
448
449 let _slices = cache0.access(&mut array0).unwrap();
450
451 assert!(matches!(cache1.try_access(&mut array1), None));
452 }
453
454 // #[test]
455 // fn fails_due_to_too_short_lifetime() {
456 // let mut cache = SliceCache::new(16, |span| Box::new([span]));
457
458 // let slices = {
459 // let mut buffer = [0u8; 16];
460
461 // let slices = cache.access(&mut buffer).unwrap();
462 // let slice = &mut *slices[0];
463
464 // slice
465 // };
466
467 // &slices[0];
468 // }
469
470 // #[test]
471 // fn fails_due_to_mixed_spans() {
472 // SliceCache::new(16, |span0| {
473 // let (left, right) = span0.split_at(2);
474
475 // SliceCache::new(4, |span1| {
476 // Box::new([left])
477 // });
478
479 // Box::new([right])
480 // });
481 // }
482
483 // #[test]
484 // fn fails_due_to_t_not_being_send() {
485 // use std::rc::Rc;
486
487 // use rayon::prelude::*;
488
489 // let mut array = [Rc::new(1), Rc::new(2), Rc::new(3)];
490
491 // let mut cache = SliceCache::new(3, |span| {
492 // let (left, right) = span.split_at(1);
493 // Box::new([left, right])
494 // });
495
496 // cache.access(&mut array).unwrap().par_iter_mut().for_each(|slice| {
497 // for val in slice.iter_mut() {
498 // *val += 1;
499 // }
500 // });
501 // }
502
503 // #[test]
504 // fn fails_to_export_span() {
505 // let mut leaked = None;
506
507 // let mut cache0 = SliceCache::new(1, |span| {
508 // leaked = Some(span);
509 // Box::new([])
510 // });
511
512 // let mut cache1 = SliceCache::new(1, |span| {
513 // Box::new([leaked.take().unwrap()])
514 // });
515 // }
516
517 // #[test]
518 // fn fails_due_to_dropped_slice() {
519 // let mut array = [1, 2, 3];
520
521 // let mut cache = SliceCache::new(3, |span| {
522 // let (left, right) = span.split_at(1);
523 // Box::new([left, right])
524 // });
525
526 // let slices = cache.access(&mut array).unwrap();
527
528 // std::mem::drop(array);
529
530 // slices[0][0] = 0;
531 // }
532}