1#![allow(dead_code)]
29
30use core::hash::Hasher;
34
35use std::hash::Hash;
36use std::ops::Deref;
37use std::sync::atomic::{AtomicUsize, Ordering, fence};
38use std::sync::{Arc, Weak};
39
40pub trait Releasable {
43 type Context<'a>;
44
45 fn release<'a>(self: Self, c: Self::Context<'a>);
48}
49
50impl<T: Releasable> Releasable for Option<T> {
52 type Context<'a> = T::Context<'a>;
53
54 fn release<'a>(self: Self, c: Self::Context<'a>) {
55 if let Some(v) = self {
56 v.release(c);
57 }
58 }
59}
60
61impl<T: Releasable> Releasable for Vec<T>
63where
64 for<'a> T::Context<'a>: Clone,
65{
66 type Context<'a> = T::Context<'a>;
67
68 fn release<'a>(self: Self, c: Self::Context<'a>) {
69 for v in self {
70 v.release(c.clone());
71 }
72 }
73}
74
75impl<T: Releasable, E> Releasable for Result<T, E> {
77 type Context<'a> = T::Context<'a>;
78
79 fn release<'a>(self: Self, c: Self::Context<'a>) {
80 if let Ok(v) = self {
81 v.release(c);
82 }
83 }
84}
85
86impl<T: Releasable> Releasable for ReleaseGuard<T> {
87 type Context<'a> = T::Context<'a>;
88
89 fn release<'a>(self: Self, c: Self::Context<'a>) {
90 self.drop_guard.disarm();
91 self.value.release(c);
92 }
93}
94
95pub trait Share {
98 fn share(&self) -> Self;
99}
100
101impl<T: Share> Share for Option<T> {
102 fn share(&self) -> Self {
103 match self {
104 None => None,
105 Some(t) => Some(t.share()),
106 }
107 }
108}
109
110#[must_use = "OwnedRef must be released"]
114pub struct OwnedRef<T> {
115 inner: Option<Arc<RefInner<T>>>,
117
118 drop_guard: DropGuard,
120}
121
122impl<T> OwnedRef<T> {
123 pub fn new(value: T) -> Self {
124 Self { inner: Some(Arc::new(RefInner::new(value))), drop_guard: Default::default() }
125 }
126
127 pub fn new_cyclic<F>(data_fn: F) -> Self
128 where
129 F: FnOnce(WeakRef<T>) -> T,
130 {
131 let inner = Arc::new_cyclic(|weak_inner| {
132 let weak = WeakRef(weak_inner.clone());
133 RefInner::new(data_fn(weak))
134 });
135 Self { inner: Some(inner), drop_guard: Default::default() }
136 }
137
138 pub fn as_ptr(this: &Self) -> *const T {
142 &Self::inner(this).value.value as *const T
143 }
144
145 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
147 Self::as_ptr(this) == Self::as_ptr(other)
148 }
149
150 pub fn downgrade(this: &Self) -> WeakRef<T> {
152 WeakRef(Arc::downgrade(Self::inner(this)))
153 }
154
155 pub fn temp(this: &Self) -> TempRef<'_, T> {
158 TempRef::new(Arc::clone(Self::inner(this)))
159 }
160
161 fn inner(this: &Self) -> &Arc<RefInner<T>> {
162 this.inner.as_ref().unwrap_or_else(|| {
163 panic!("OwnedRef<{}> has been released.", std::any::type_name::<T>())
164 })
165 }
166
167 fn re_own(inner: Arc<RefInner<T>>) -> Option<Self> {
168 let mut owned_refs = inner.owned_refs_count.load(Ordering::Relaxed);
169 loop {
170 if owned_refs == 0 {
171 return None;
172 }
173 match inner.owned_refs_count.compare_exchange(
174 owned_refs,
175 owned_refs + 1,
176 Ordering::Acquire,
177 Ordering::Relaxed,
178 ) {
179 Ok(_) => {
180 return Some(Self { inner: Some(inner), drop_guard: Default::default() });
181 }
182 Err(v) => {
183 owned_refs = v;
184 }
185 }
186 }
187 }
188}
189
190impl<T: Releasable> OwnedRef<T> {
191 pub fn take(this: &mut Self) -> Option<ReleaseGuard<T>> {
194 this.drop_guard.disarm();
195 let inner = this.inner.take().unwrap_or_else(|| {
196 panic!("OwnedRef<{}> has been released.", std::any::type_name::<T>())
197 });
198 let previous_count = inner.owned_refs_count.fetch_sub(1, Ordering::Release);
199 if previous_count == 1 {
200 fence(Ordering::Acquire);
201 Some(Self::wait_and_take_value(inner))
202 } else {
203 None
204 }
205 }
206
207 fn wait_and_take_value(mut inner: Arc<RefInner<T>>) -> ReleaseGuard<T> {
211 loop {
212 debug_assert_eq!(inner.owned_refs_count.load(Ordering::Acquire), 0);
214 match Arc::try_unwrap(inner) {
215 Ok(value) => return value.value,
216 Err(value) => inner = value,
217 }
218 inner.wait_for_no_ref_once();
219 }
220 }
221}
222
223impl<T: std::fmt::Debug> std::fmt::Debug for OwnedRef<T> {
224 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
225 Self::inner(self).value.fmt(f)
226 }
227}
228
229impl<T: Releasable> Share for OwnedRef<T> {
230 fn share(&self) -> Self {
232 let inner = Self::inner(self);
233 let previous_count = inner.owned_refs_count.fetch_add(1, Ordering::Relaxed);
234 debug_assert!(previous_count > 0, "OwnedRef should not be used after being released.");
235 Self { inner: Some(Arc::clone(inner)), drop_guard: Default::default() }
236 }
237}
238
239impl<T: Releasable> Releasable for OwnedRef<T> {
240 type Context<'a> = T::Context<'a>;
241
242 #[allow(unused_mut)]
245 fn release<'a>(mut self, c: Self::Context<'a>) {
246 OwnedRef::take(&mut self).release(c);
247 }
248}
249
250impl<T: Default> Default for OwnedRef<T> {
251 fn default() -> Self {
252 Self::new(T::default())
253 }
254}
255
256impl<T> std::ops::Deref for OwnedRef<T> {
257 type Target = T;
258
259 fn deref(&self) -> &Self::Target {
260 &Self::inner(self).deref().value
261 }
262}
263
264impl<T> std::borrow::Borrow<T> for OwnedRef<T> {
265 fn borrow(&self) -> &T {
266 self.deref()
267 }
268}
269
270impl<T> std::convert::AsRef<T> for OwnedRef<T> {
271 fn as_ref(&self) -> &T {
272 self.deref()
273 }
274}
275
276impl<T: PartialEq> PartialEq<TempRef<'_, T>> for OwnedRef<T> {
277 fn eq(&self, other: &TempRef<'_, T>) -> bool {
278 Arc::ptr_eq(Self::inner(self), &other.0)
279 }
280}
281
282impl<T: PartialEq> PartialEq for OwnedRef<T> {
283 fn eq(&self, other: &OwnedRef<T>) -> bool {
284 Arc::ptr_eq(Self::inner(self), Self::inner(other)) || **self == **other
285 }
286}
287
288impl<T: Eq> Eq for OwnedRef<T> {}
289
290impl<T: PartialOrd> PartialOrd for OwnedRef<T> {
291 fn partial_cmp(&self, other: &OwnedRef<T>) -> Option<std::cmp::Ordering> {
292 (**self).partial_cmp(&**other)
293 }
294}
295
296impl<T: Ord> Ord for OwnedRef<T> {
297 fn cmp(&self, other: &OwnedRef<T>) -> std::cmp::Ordering {
298 (**self).cmp(&**other)
299 }
300}
301
302impl<T: Hash> Hash for OwnedRef<T> {
303 fn hash<H: Hasher>(&self, state: &mut H) {
304 (**self).hash(state)
305 }
306}
307
308impl<T> From<&OwnedRef<T>> for WeakRef<T> {
309 fn from(owner: &OwnedRef<T>) -> Self {
310 OwnedRef::downgrade(owner)
311 }
312}
313
314impl<'a, T> From<&'a OwnedRef<T>> for TempRef<'a, T> {
315 fn from(owner: &'a OwnedRef<T>) -> Self {
316 OwnedRef::temp(owner)
317 }
318}
319
320impl<'a, T> From<&'a mut OwnedRef<T>> for TempRef<'a, T> {
321 fn from(owner: &'a mut OwnedRef<T>) -> Self {
322 OwnedRef::temp(owner)
323 }
324}
325
326#[derive(Debug)]
329pub struct WeakRef<T>(Weak<RefInner<T>>);
330
331impl<T> WeakRef<T> {
332 pub fn new() -> Self {
333 Self(Weak::new())
334 }
335
336 pub fn upgrade(&self) -> Option<TempRef<'_, T>> {
341 if let Some(value) = self.0.upgrade() {
342 let temp_ref = TempRef::new(value);
345 if temp_ref.0.owned_refs_count.load(Ordering::Acquire) > 0 {
348 return Some(temp_ref);
349 }
350 }
351 None
352 }
353
354 pub fn re_own(&self) -> Option<OwnedRef<T>> {
357 self.0.upgrade().and_then(OwnedRef::re_own)
358 }
359
360 pub fn as_ptr(&self) -> *const T {
364 let base = self.0.as_ptr();
365 let value = memoffset::raw_field!(base, RefInner<T>, value);
366 memoffset::raw_field!(value, ReleaseGuard<T>, value)
367 }
368
369 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
371 Self::as_ptr(this) == Self::as_ptr(other)
372 }
373}
374
375impl<T> Default for WeakRef<T> {
376 fn default() -> Self {
377 Self::new()
378 }
379}
380
381impl<T> Clone for WeakRef<T> {
382 fn clone(&self) -> Self {
383 Self(self.0.clone())
384 }
385}
386
387pub struct WeakRefKey<T>(pub WeakRef<T>);
389impl<T> PartialEq for WeakRefKey<T> {
390 fn eq(&self, other: &Self) -> bool {
391 WeakRef::ptr_eq(&self.0, &other.0)
392 }
393}
394impl<T> PartialOrd for WeakRefKey<T> {
395 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
396 Some(self.cmp(other))
397 }
398}
399impl<T> Ord for WeakRefKey<T> {
400 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
401 WeakRef::as_ptr(&self.0).cmp(&WeakRef::as_ptr(&other.0))
402 }
403}
404impl<T> From<WeakRef<T>> for WeakRefKey<T> {
405 fn from(weak_ref: WeakRef<T>) -> Self {
406 Self(weak_ref)
407 }
408}
409impl<'a, T> From<&TempRef<'a, T>> for WeakRefKey<T> {
410 fn from(temp_ref: &TempRef<'a, T>) -> Self {
411 Self(WeakRef::from(temp_ref))
412 }
413}
414impl<'a, T> From<&OwnedRef<T>> for WeakRefKey<T> {
415 fn from(owned_ref: &OwnedRef<T>) -> Self {
416 Self(WeakRef::from(owned_ref))
417 }
418}
419impl<T> Clone for WeakRefKey<T> {
420 fn clone(&self) -> Self {
421 Self(self.0.clone())
422 }
423}
424impl<T> Eq for WeakRefKey<T> {}
425impl<T> Hash for WeakRefKey<T> {
426 fn hash<H: Hasher>(&self, state: &mut H) {
427 WeakRef::as_ptr(&self.0).hash(state);
428 }
429}
430impl<T> std::ops::Deref for WeakRefKey<T> {
431 type Target = WeakRef<T>;
432 fn deref(&self) -> &Self::Target {
433 &self.0
434 }
435}
436impl<T> std::fmt::Debug for WeakRefKey<T> {
437 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
438 fmt.debug_tuple(std::any::type_name::<Self>()).field(&self.0.as_ptr()).finish()
439 }
440}
441
442pub struct TempRef<'a, T>(Arc<RefInner<T>>, std::marker::PhantomData<(&'a T, *mut u8)>);
449
450impl<'a, T> std::fmt::Debug for TempRef<'a, T>
451where
452 T: std::fmt::Debug,
453{
454 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
455 self.deref().fmt(f)
456 }
457}
458
459impl<'a, T> Drop for TempRef<'a, T> {
460 fn drop(&mut self) {
461 self.0.dec_temp_ref();
462 }
463}
464
465impl<'a, T> TempRef<'a, T> {
466 fn new(inner: Arc<RefInner<T>>) -> Self {
468 inner.inc_temp_ref();
469 Self(inner, Default::default())
470 }
471
472 pub fn as_ptr(this: &Self) -> *const T {
476 &this.0.value.value as *const T
477 }
478
479 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
481 Self::as_ptr(this) == Self::as_ptr(other)
482 }
483
484 pub fn into_static(this: Self) -> TempRef<'static, T> {
490 TempRef::new(this.0.clone())
491 }
492
493 pub fn re_own(&self) -> Option<OwnedRef<T>> {
496 OwnedRef::re_own(Arc::clone(&self.0))
497 }
498}
499
500impl<'a, T> From<&TempRef<'a, T>> for WeakRef<T> {
501 fn from(temp_ref: &TempRef<'a, T>) -> Self {
502 Self(Arc::downgrade(&temp_ref.0))
503 }
504}
505
506impl<'a, T> From<TempRef<'a, T>> for WeakRef<T> {
507 fn from(temp_ref: TempRef<'a, T>) -> Self {
508 Self(Arc::downgrade(&temp_ref.0))
509 }
510}
511
512impl<'a, T> std::ops::Deref for TempRef<'a, T> {
513 type Target = T;
514
515 fn deref(&self) -> &Self::Target {
516 &self.0.deref().value
517 }
518}
519
520impl<'a, T> std::borrow::Borrow<T> for TempRef<'a, T> {
521 fn borrow(&self) -> &T {
522 &self.0.deref().value
523 }
524}
525
526impl<'a, T> std::convert::AsRef<T> for TempRef<'a, T> {
527 fn as_ref(&self) -> &T {
528 &self.0.deref().value
529 }
530}
531
532impl<'a, T: PartialEq> PartialEq for TempRef<'a, T> {
533 fn eq(&self, other: &TempRef<'_, T>) -> bool {
534 Arc::ptr_eq(&self.0, &other.0) || **self == **other
535 }
536}
537
538impl<'a, T: Eq> Eq for TempRef<'a, T> {}
539
540impl<'a, T: PartialOrd> PartialOrd for TempRef<'a, T> {
541 fn partial_cmp(&self, other: &TempRef<'_, T>) -> Option<std::cmp::Ordering> {
542 (**self).partial_cmp(&**other)
543 }
544}
545
546impl<'a, T: Ord> Ord for TempRef<'a, T> {
547 fn cmp(&self, other: &TempRef<'_, T>) -> std::cmp::Ordering {
548 (**self).cmp(&**other)
549 }
550}
551
552impl<'a, T: Hash> Hash for TempRef<'a, T> {
553 fn hash<H: Hasher>(&self, state: &mut H) {
554 (**self).hash(state)
555 }
556}
557
558pub struct TempRefKey<'a, T>(pub TempRef<'a, T>);
560impl<'a, T> PartialEq for TempRefKey<'a, T> {
561 fn eq(&self, other: &Self) -> bool {
562 TempRef::ptr_eq(&self.0, &other.0)
563 }
564}
565impl<'a, T> Eq for TempRefKey<'a, T> {}
566impl<'a, T> Hash for TempRefKey<'a, T> {
567 fn hash<H: Hasher>(&self, state: &mut H) {
568 TempRef::as_ptr(&self.0).hash(state);
569 }
570}
571impl<'a, T> std::ops::Deref for TempRefKey<'a, T> {
572 type Target = T;
573 fn deref(&self) -> &Self::Target {
574 self.0.deref()
575 }
576}
577
578#[must_use = "ReleaseGuard must be released"]
581pub struct ReleaseGuard<T> {
582 value: T,
584
585 drop_guard: DropGuard,
587}
588
589#[cfg(test)]
590impl<T> ReleaseGuard<T> {
591 pub fn new_released(value: T) -> Self {
592 let result: Self = value.into();
593 result.drop_guard.disarm();
594 result
595 }
596}
597
598impl<T> ReleaseGuard<T> {
599 pub fn take(this: ReleaseGuard<T>) -> T {
603 this.drop_guard.disarm();
604 this.value
605 }
606}
607
608#[cfg(test)]
609impl<T: Default> ReleaseGuard<T> {
610 pub fn default_released() -> Self {
611 Self::new_released(T::default())
612 }
613}
614
615impl<T: std::fmt::Debug> std::fmt::Debug for ReleaseGuard<T> {
616 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
617 self.value.fmt(f)
618 }
619}
620
621impl<T: Default> Default for ReleaseGuard<T> {
622 fn default() -> Self {
623 T::default().into()
624 }
625}
626
627impl<T: Clone> Clone for ReleaseGuard<T> {
628 fn clone(&self) -> Self {
629 self.value.clone().into()
630 }
631}
632
633impl<T> From<T> for ReleaseGuard<T> {
634 fn from(value: T) -> Self {
635 Self { value, drop_guard: Default::default() }
636 }
637}
638
639impl<T> std::ops::Deref for ReleaseGuard<T> {
640 type Target = T;
641
642 fn deref(&self) -> &Self::Target {
643 &self.value
644 }
645}
646
647impl<T> std::ops::DerefMut for ReleaseGuard<T> {
648 fn deref_mut(&mut self) -> &mut Self::Target {
649 &mut self.value
650 }
651}
652
653impl<T> std::borrow::Borrow<T> for ReleaseGuard<T> {
654 fn borrow(&self) -> &T {
655 self.deref()
656 }
657}
658
659impl<T> std::convert::AsRef<T> for ReleaseGuard<T> {
660 fn as_ref(&self) -> &T {
661 self.deref()
662 }
663}
664
665impl<T: PartialEq> PartialEq for ReleaseGuard<T> {
666 fn eq(&self, other: &ReleaseGuard<T>) -> bool {
667 **self == **other
668 }
669}
670
671impl<T: Eq> Eq for ReleaseGuard<T> {}
672
673impl<T: PartialOrd> PartialOrd for ReleaseGuard<T> {
674 fn partial_cmp(&self, other: &ReleaseGuard<T>) -> Option<std::cmp::Ordering> {
675 (**self).partial_cmp(&**other)
676 }
677}
678
679impl<T: Ord> Ord for ReleaseGuard<T> {
680 fn cmp(&self, other: &ReleaseGuard<T>) -> std::cmp::Ordering {
681 (**self).cmp(&**other)
682 }
683}
684
685impl<T: Hash> Hash for ReleaseGuard<T> {
686 fn hash<H: Hasher>(&self, state: &mut H) {
687 (**self).hash(state)
688 }
689}
690
691#[derive(Default, Debug)]
692pub struct DropGuard {
693 #[cfg(any(test, debug_assertions))]
694 released: std::sync::atomic::AtomicBool,
695}
696
697impl DropGuard {
698 #[inline(always)]
699 pub fn disarm(&self) {
700 #[cfg(any(test, debug_assertions))]
701 {
702 if self
703 .released
704 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
705 .is_err()
706 {
707 panic!("Guard was disarmed twice");
708 }
709 }
710 }
711}
712
713#[cfg(any(test, debug_assertions))]
714impl Drop for DropGuard {
715 fn drop(&mut self) {
716 assert!(*self.released.get_mut());
717 }
718}
719#[cfg(any(test, debug_assertions))]
720thread_local! {
721 static TEMP_REF_LOCAL_COUNT: std::cell::RefCell<usize> = const { std::cell::RefCell::new(0) };
724}
725
726pub fn debug_assert_no_local_temp_ref() {
729 #[cfg(any(test, debug_assertions))]
730 {
731 TEMP_REF_LOCAL_COUNT.with(|count| {
732 assert_eq!(*count.borrow(), 0, "Current threads owns {} TempRef", *count.borrow());
733 });
734 }
735}
736
737struct RefInner<T> {
748 value: ReleaseGuard<T>,
750 owned_refs_count: AtomicUsize,
752 temp_refs_count: zx::Futex,
757}
758
759impl<T> RefInner<T> {
760 fn new(value: T) -> Self {
761 Self {
762 value: value.into(),
763 owned_refs_count: AtomicUsize::new(1),
764 temp_refs_count: zx::Futex::new(0),
765 }
766 }
767
768 fn inc_temp_ref(&self) {
770 self.temp_refs_count.fetch_add(1, Ordering::Relaxed);
771 #[cfg(any(test, debug_assertions))]
772 {
773 TEMP_REF_LOCAL_COUNT.with(|count| {
774 *count.borrow_mut() += 1;
775 });
776 }
777 }
778
779 fn dec_temp_ref(&self) {
783 let previous_count = self.temp_refs_count.fetch_sub(1, Ordering::Release);
784 if previous_count == 1 {
785 fence(Ordering::Acquire);
786 self.temp_refs_count.wake_single_owner();
787 }
788 #[cfg(any(test, debug_assertions))]
789 {
790 TEMP_REF_LOCAL_COUNT.with(|count| {
791 *count.borrow_mut() -= 1;
792 });
793 }
794 }
795
796 fn wait_for_no_ref_once(self: &Arc<Self>) {
798 let current_value = self.temp_refs_count.load(Ordering::Acquire);
800 if current_value == 0 {
801 return;
803 }
804 let result = self.temp_refs_count.wait(current_value, None, zx::MonotonicInstant::INFINITE);
807 debug_assert!(
808 result == Ok(()) || result == Err(zx::Status::BAD_STATE),
809 "Unexpected result: {result:?}"
810 );
811 }
812}
813
814#[macro_export]
817macro_rules! release_on_error {
818 ($releasable_name:ident, $context:expr, $body:block ) => {{
819 #[allow(clippy::redundant_closure_call)]
820 let result = { (|| $body)() };
821 match result {
822 Err(e) => {
823 $releasable_name.release($context);
824 return Err(e);
825 }
826 Ok(x) => x,
827 }
828 }};
829}
830
831#[macro_export]
833macro_rules! release_after {
834 ($releasable_name:ident, $context:expr, async || $($output_type:ty)? $body:block ) => {{
835 #[allow(clippy::redundant_closure_call)]
836 let result = { (async || $(-> $output_type)? { $body })().await };
837 $releasable_name.release($context);
838 result
839 }};
840 ($releasable_name:ident, $context:expr, $(|| -> $output_type:ty)? $body:block ) => {{
841 #[allow(clippy::redundant_closure_call)]
842 let result = { (|| $(-> $output_type)? { $body })() };
843 $releasable_name.release($context);
844 result
845 }};
846}
847
848#[macro_export]
851macro_rules! release_iter_after {
852 ($releasable_iter:ident, $context:expr, async || $(-> $output_type:ty)? $body:block ) => {{
853 #[allow(clippy::redundant_closure_call)]
854 let result = { (async || $(-> $output_type)? { $body })().await };
855 for item in $releasable_iter.into_iter() {
856 item.release($context);
857 }
858 result
859 }};
860 ($releasable_iter:ident, $context:expr, $(|| -> $output_type:ty)? $body:block ) => {{
861 #[allow(clippy::redundant_closure_call)]
862 let result = { (|| $(-> $output_type)? { $body })() };
863 for item in $releasable_iter.into_iter() {
864 item.release($context);
865 }
866 result
867 }};
868}
869
870pub use {release_after, release_iter_after, release_on_error};
871
872#[cfg(test)]
873mod test {
874 use super::*;
875
876 #[derive(Default)]
877 struct Data;
878
879 impl Releasable for Data {
880 type Context<'a> = ();
881 fn release<'a>(self, _: ()) {}
882 }
883
884 #[derive(Default)]
885 struct DataWithMutableReleaseContext;
886
887 impl Releasable for DataWithMutableReleaseContext {
888 type Context<'a> = &'a mut ();
889 fn release<'a>(self, _: &'a mut ()) {}
890 }
891
892 #[::fuchsia::test]
893 #[should_panic]
894 fn drop_without_release() {
895 let _ = OwnedRef::new(Data {});
896 }
897
898 #[::fuchsia::test]
899 fn test_creation_and_reference() {
900 let value = OwnedRef::new(Data {});
901 let reference = WeakRef::from(&value);
902 reference.upgrade().expect("upgrade");
903 value.release(());
904 assert!(reference.upgrade().is_none());
905 }
906
907 #[::fuchsia::test]
908 fn test_clone() {
909 let value = OwnedRef::new(Data {});
910 {
911 let value2 = OwnedRef::share(&value);
912 value2.release(());
913 }
914 #[allow(clippy::redundant_clone)]
915 {
916 let reference = WeakRef::from(&value);
917 let _reference2 = reference.clone();
918 }
919 value.release(());
920 }
921
922 #[::fuchsia::test]
923 fn test_default() {
924 let reference = WeakRef::<Data>::default();
925 assert!(reference.upgrade().is_none());
926 }
927
928 #[::fuchsia::test]
929 fn test_release_on_error() {
930 fn release_on_error() -> Result<(), ()> {
931 let value = OwnedRef::new(Data {});
932 release_on_error!(value, (), {
933 if true {
934 return Err(());
935 }
936 Ok(())
937 });
938 Ok(())
939 }
940 assert_eq!(release_on_error(), Err(()));
941 }
942
943 #[::fuchsia::test]
944 fn test_into_static() {
945 let value = OwnedRef::new(Data {});
946 let weak = WeakRef::from(&value);
947 let static_ref = TempRef::into_static(weak.upgrade().unwrap());
949 std::mem::drop(weak);
951 std::mem::drop(static_ref);
953 value.release(());
954 }
955
956 #[::fuchsia::test]
957 fn test_debug_assert_no_local_temp_ref() {
958 debug_assert_no_local_temp_ref();
959 let value = OwnedRef::new(Data {});
960 debug_assert_no_local_temp_ref();
961 let _temp_ref = OwnedRef::temp(&value);
962 std::thread::spawn(|| {
963 debug_assert_no_local_temp_ref();
964 })
965 .join()
966 .expect("join");
967 std::mem::drop(_temp_ref);
968 debug_assert_no_local_temp_ref();
969 value.release(());
970 debug_assert_no_local_temp_ref();
971 }
972
973 #[::fuchsia::test]
974 #[should_panic]
975 fn test_debug_assert_no_local_temp_ref_aborts() {
976 let value = OwnedRef::new(Data {});
977 {
978 let _temp_ref = OwnedRef::temp(&value);
979 debug_assert_no_local_temp_ref();
980 }
981 value.release(());
984 }
985
986 #[::fuchsia::test]
987 #[should_panic]
988 fn test_unrelease_release_guard() {
989 let _value = ReleaseGuard::<Data>::default();
990 }
991
992 #[::fuchsia::test]
993 fn test_released_release_guard() {
994 let _value = ReleaseGuard::<Data>::default_released();
995 }
996
997 #[::fuchsia::test]
998 fn release_with_mutable_context() {
999 let value = OwnedRef::new(DataWithMutableReleaseContext {});
1000 let mut context = ();
1001 value.release(&mut context);
1002 }
1003
1004 #[::fuchsia::test]
1007 fn upgrade_while_release() {
1008 let value = OwnedRef::new(Data {});
1009 for _ in 0..10 {
1011 std::thread::spawn({
1012 let weak = OwnedRef::downgrade(&value);
1013 move || loop {
1014 if weak.upgrade().is_none() {
1015 return;
1016 }
1017 }
1018 });
1019 }
1020 std::thread::sleep(std::time::Duration::from_millis(10));
1022 value.release(());
1023 }
1025
1026 #[::fuchsia::test]
1027 fn new_cyclic() {
1028 let mut weak_value = None;
1029 let value = OwnedRef::new_cyclic(|weak| {
1030 weak_value = Some(weak);
1031 Data {}
1032 });
1033 let weak_value = weak_value.expect("weak_value");
1034 assert!(weak_value.upgrade().is_some());
1035 value.release(());
1036 assert!(weak_value.upgrade().is_none());
1037 }
1038
1039 #[::fuchsia::test]
1040 fn as_ptr() {
1041 let value = OwnedRef::new(Data {});
1042 let weak = OwnedRef::downgrade(&value);
1043 let temp = weak.upgrade().expect("upgrade");
1044 assert_eq!(OwnedRef::as_ptr(&value), weak.as_ptr());
1045 assert_eq!(OwnedRef::as_ptr(&value), TempRef::as_ptr(&temp));
1046 std::mem::drop(temp);
1047 value.release(());
1048 }
1049
1050 #[::fuchsia::test]
1051 fn test_re_own() {
1052 let data = Data::default();
1053 let owned = OwnedRef::new(data);
1054 let weak = WeakRef::from(&owned);
1055
1056 let re_owned = weak.re_own();
1057 assert!(re_owned.is_some());
1058
1059 owned.release(());
1061
1062 let re_owned_again = weak.re_own();
1064 assert!(re_owned_again.is_some());
1065 re_owned_again.release(());
1066
1067 re_owned.release(());
1069
1070 let re_owned_finally = weak.re_own();
1072 assert!(re_owned_finally.is_none());
1073 }
1074
1075 #[::fuchsia::test]
1076 fn test_re_own_concurrent() {
1077 let owned = OwnedRef::new(Data::default());
1078 let weak = WeakRef::from(&owned);
1079 let num_threads = 10;
1080
1081 let mut handles = vec![];
1082 for _ in 0..num_threads {
1083 let weak = weak.clone();
1084 let handle = std::thread::spawn(move || {
1085 loop {
1086 if let Some(re_owned) = weak.re_own() {
1087 re_owned.release(());
1088 } else {
1089 return;
1090 }
1091 }
1092 });
1093 handles.push(handle);
1094 }
1095
1096 owned.release(());
1097
1098 for handle in handles {
1099 handle.join().unwrap();
1100 }
1101
1102 assert!(weak.re_own().is_none());
1103 }
1104
1105 #[::fuchsia::test]
1106 fn test_release_after() {
1107 let owned = OwnedRef::new(Data::default());
1108 let value = release_after!(owned, (), { 0 });
1109 assert_eq!(value, 0);
1110 }
1111
1112 #[::fuchsia::test]
1113 async fn test_release_after_async() {
1114 let owned = OwnedRef::new(Data::default());
1115 let value = release_after!(owned, (), async || { 0 });
1116 assert_eq!(value, 0);
1117 }
1118}