1#![allow(dead_code)]
29
30use core::hash::Hasher;
34
35use std::hash::Hash;
36use std::ops::Deref;
37use std::sync::atomic::{AtomicUsize, Ordering, fence};
38use std::sync::{Arc, Weak};
39
40pub trait Releasable {
43 type Context<'a>;
44
45 fn release<'a>(self: Self, c: Self::Context<'a>);
48}
49
50impl<T: Releasable> Releasable for Option<T> {
52 type Context<'a> = T::Context<'a>;
53
54 fn release<'a>(self: Self, c: Self::Context<'a>) {
55 if let Some(v) = self {
56 v.release(c);
57 }
58 }
59}
60
61impl<T: Releasable> Releasable for Vec<T>
63where
64 for<'a> T::Context<'a>: Clone,
65{
66 type Context<'a> = T::Context<'a>;
67
68 fn release<'a>(self: Self, c: Self::Context<'a>) {
69 for v in self {
70 v.release(c.clone());
71 }
72 }
73}
74
75impl<T: Releasable, E> Releasable for Result<T, E> {
77 type Context<'a> = T::Context<'a>;
78
79 fn release<'a>(self: Self, c: Self::Context<'a>) {
80 if let Ok(v) = self {
81 v.release(c);
82 }
83 }
84}
85
86impl<T: Releasable> Releasable for ReleaseGuard<T> {
87 type Context<'a> = T::Context<'a>;
88
89 fn release<'a>(self: Self, c: Self::Context<'a>) {
90 self.drop_guard.disarm();
91 self.value.release(c);
92 }
93}
94
95pub trait Share {
98 fn share(&self) -> Self;
99}
100
101impl<T: Share> Share for Option<T> {
102 fn share(&self) -> Self {
103 match self {
104 None => None,
105 Some(t) => Some(t.share()),
106 }
107 }
108}
109
110#[must_use = "OwnedRef must be released"]
114pub struct OwnedRef<T> {
115 inner: Option<Arc<RefInner<T>>>,
117
118 drop_guard: DropGuard,
120}
121
122impl<T> OwnedRef<T> {
123 pub fn new(value: T) -> Self {
124 Self { inner: Some(Arc::new(RefInner::new(value))), drop_guard: Default::default() }
125 }
126
127 pub fn new_cyclic<F>(data_fn: F) -> Self
128 where
129 F: FnOnce(WeakRef<T>) -> T,
130 {
131 let inner = Arc::new_cyclic(|weak_inner| {
132 let weak = WeakRef(weak_inner.clone());
133 RefInner::new(data_fn(weak))
134 });
135 Self { inner: Some(inner), drop_guard: Default::default() }
136 }
137
138 pub fn as_ptr(this: &Self) -> *const T {
142 &Self::inner(this).value.value as *const T
143 }
144
145 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
147 Self::as_ptr(this) == Self::as_ptr(other)
148 }
149
150 pub fn downgrade(this: &Self) -> WeakRef<T> {
152 WeakRef(Arc::downgrade(Self::inner(this)))
153 }
154
155 pub fn temp(this: &Self) -> TempRef<'_, T> {
158 TempRef::new(Arc::clone(Self::inner(this)))
159 }
160
161 fn inner(this: &Self) -> &Arc<RefInner<T>> {
162 this.inner.as_ref().unwrap_or_else(|| {
163 panic!("OwnedRef<{}> has been released.", std::any::type_name::<T>())
164 })
165 }
166
167 fn re_own(inner: Arc<RefInner<T>>) -> Option<Self> {
168 let mut owned_refs = inner.owned_refs_count.load(Ordering::Relaxed);
169 loop {
170 if owned_refs == 0 {
171 return None;
172 }
173 match inner.owned_refs_count.compare_exchange(
174 owned_refs,
175 owned_refs + 1,
176 Ordering::Acquire,
177 Ordering::Relaxed,
178 ) {
179 Ok(_) => {
180 return Some(Self { inner: Some(inner), drop_guard: Default::default() });
181 }
182 Err(v) => {
183 owned_refs = v;
184 }
185 }
186 }
187 }
188}
189
190impl<T: Releasable> OwnedRef<T> {
191 pub fn take(this: &mut Self) -> Option<ReleaseGuard<T>> {
194 this.drop_guard.disarm();
195 let inner = this.inner.take().unwrap_or_else(|| {
196 panic!("OwnedRef<{}> has been released.", std::any::type_name::<T>())
197 });
198 let previous_count = inner.owned_refs_count.fetch_sub(1, Ordering::Release);
199 if previous_count == 1 {
200 fence(Ordering::Acquire);
201 Some(Self::wait_and_take_value(inner))
202 } else {
203 None
204 }
205 }
206
207 fn wait_and_take_value(mut inner: Arc<RefInner<T>>) -> ReleaseGuard<T> {
211 loop {
212 debug_assert_eq!(inner.owned_refs_count.load(Ordering::Acquire), 0);
214 match Arc::try_unwrap(inner) {
215 Ok(value) => return value.value,
216 Err(value) => inner = value,
217 }
218 inner.wait_for_no_ref_once();
219 }
220 }
221}
222
223impl<T: std::fmt::Debug> std::fmt::Debug for OwnedRef<T> {
224 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
225 Self::inner(self).value.fmt(f)
226 }
227}
228
229impl<T: Releasable> Share for OwnedRef<T> {
230 fn share(&self) -> Self {
232 let inner = Self::inner(self);
233 let previous_count = inner.owned_refs_count.fetch_add(1, Ordering::Relaxed);
234 debug_assert!(previous_count > 0, "OwnedRef should not be used after being released.");
235 Self { inner: Some(Arc::clone(inner)), drop_guard: Default::default() }
236 }
237}
238
239impl<T: Releasable> Releasable for OwnedRef<T> {
240 type Context<'a> = T::Context<'a>;
241
242 #[allow(unused_mut)]
245 fn release<'a>(mut self, c: Self::Context<'a>) {
246 OwnedRef::take(&mut self).release(c);
247 }
248}
249
250impl<T: Default> Default for OwnedRef<T> {
251 fn default() -> Self {
252 Self::new(T::default())
253 }
254}
255
256impl<T> std::ops::Deref for OwnedRef<T> {
257 type Target = T;
258
259 fn deref(&self) -> &Self::Target {
260 &Self::inner(self).deref().value
261 }
262}
263
264impl<T> std::borrow::Borrow<T> for OwnedRef<T> {
265 fn borrow(&self) -> &T {
266 self.deref()
267 }
268}
269
270impl<T> std::convert::AsRef<T> for OwnedRef<T> {
271 fn as_ref(&self) -> &T {
272 self.deref()
273 }
274}
275
276impl<T: PartialEq> PartialEq<TempRef<'_, T>> for OwnedRef<T> {
277 fn eq(&self, other: &TempRef<'_, T>) -> bool {
278 Arc::ptr_eq(Self::inner(self), &other.0)
279 }
280}
281
282impl<T: PartialEq> PartialEq for OwnedRef<T> {
283 fn eq(&self, other: &OwnedRef<T>) -> bool {
284 Arc::ptr_eq(Self::inner(self), Self::inner(other)) || **self == **other
285 }
286}
287
288impl<T: Eq> Eq for OwnedRef<T> {}
289
290impl<T: PartialOrd> PartialOrd for OwnedRef<T> {
291 fn partial_cmp(&self, other: &OwnedRef<T>) -> Option<std::cmp::Ordering> {
292 (**self).partial_cmp(&**other)
293 }
294}
295
296impl<T: Ord> Ord for OwnedRef<T> {
297 fn cmp(&self, other: &OwnedRef<T>) -> std::cmp::Ordering {
298 (**self).cmp(&**other)
299 }
300}
301
302impl<T: Hash> Hash for OwnedRef<T> {
303 fn hash<H: Hasher>(&self, state: &mut H) {
304 (**self).hash(state)
305 }
306}
307
308impl<T> From<&OwnedRef<T>> for WeakRef<T> {
309 fn from(owner: &OwnedRef<T>) -> Self {
310 OwnedRef::downgrade(owner)
311 }
312}
313
314impl<'a, T> From<&'a OwnedRef<T>> for TempRef<'a, T> {
315 fn from(owner: &'a OwnedRef<T>) -> Self {
316 OwnedRef::temp(owner)
317 }
318}
319
320impl<'a, T> From<&'a mut OwnedRef<T>> for TempRef<'a, T> {
321 fn from(owner: &'a mut OwnedRef<T>) -> Self {
322 OwnedRef::temp(owner)
323 }
324}
325
326#[derive(Debug)]
329pub struct WeakRef<T>(Weak<RefInner<T>>);
330
331impl<T> WeakRef<T> {
332 pub fn new() -> Self {
333 Self(Weak::new())
334 }
335
336 pub fn upgrade(&self) -> Option<TempRef<'_, T>> {
341 if let Some(value) = self.0.upgrade() {
342 let temp_ref = TempRef::new(value);
345 if temp_ref.0.owned_refs_count.load(Ordering::Acquire) > 0 {
348 return Some(temp_ref);
349 }
350 }
351 None
352 }
353
354 pub fn re_own(&self) -> Option<OwnedRef<T>> {
357 self.0.upgrade().and_then(OwnedRef::re_own)
358 }
359
360 pub fn as_ptr(&self) -> *const T {
364 let base = self.0.as_ptr();
365 let value = memoffset::raw_field!(base, RefInner<T>, value);
366 memoffset::raw_field!(value, ReleaseGuard<T>, value)
367 }
368
369 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
371 Self::as_ptr(this) == Self::as_ptr(other)
372 }
373}
374
375impl<T> Default for WeakRef<T> {
376 fn default() -> Self {
377 Self::new()
378 }
379}
380
381impl<T> Clone for WeakRef<T> {
382 fn clone(&self) -> Self {
383 Self(self.0.clone())
384 }
385}
386
387impl<T> PartialEq for WeakRef<T> {
388 fn eq(&self, other: &Self) -> bool {
389 WeakRef::ptr_eq(self, other)
390 }
391}
392
393pub struct WeakRefKey<T>(pub WeakRef<T>);
395impl<T> PartialEq for WeakRefKey<T> {
396 fn eq(&self, other: &Self) -> bool {
397 WeakRef::ptr_eq(&self.0, &other.0)
398 }
399}
400impl<T> PartialOrd for WeakRefKey<T> {
401 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
402 Some(self.cmp(other))
403 }
404}
405impl<T> Ord for WeakRefKey<T> {
406 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
407 WeakRef::as_ptr(&self.0).cmp(&WeakRef::as_ptr(&other.0))
408 }
409}
410impl<T> From<WeakRef<T>> for WeakRefKey<T> {
411 fn from(weak_ref: WeakRef<T>) -> Self {
412 Self(weak_ref)
413 }
414}
415impl<'a, T> From<&TempRef<'a, T>> for WeakRefKey<T> {
416 fn from(temp_ref: &TempRef<'a, T>) -> Self {
417 Self(WeakRef::from(temp_ref))
418 }
419}
420impl<'a, T> From<&OwnedRef<T>> for WeakRefKey<T> {
421 fn from(owned_ref: &OwnedRef<T>) -> Self {
422 Self(WeakRef::from(owned_ref))
423 }
424}
425impl<T> Clone for WeakRefKey<T> {
426 fn clone(&self) -> Self {
427 Self(self.0.clone())
428 }
429}
430impl<T> Eq for WeakRefKey<T> {}
431impl<T> Hash for WeakRefKey<T> {
432 fn hash<H: Hasher>(&self, state: &mut H) {
433 WeakRef::as_ptr(&self.0).hash(state);
434 }
435}
436impl<T> std::ops::Deref for WeakRefKey<T> {
437 type Target = WeakRef<T>;
438 fn deref(&self) -> &Self::Target {
439 &self.0
440 }
441}
442impl<T> std::fmt::Debug for WeakRefKey<T> {
443 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
444 fmt.debug_tuple(std::any::type_name::<Self>()).field(&self.0.as_ptr()).finish()
445 }
446}
447
448pub struct TempRef<'a, T>(Arc<RefInner<T>>, std::marker::PhantomData<(&'a T, *mut u8)>);
455
456impl<'a, T> std::fmt::Debug for TempRef<'a, T>
457where
458 T: std::fmt::Debug,
459{
460 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
461 self.deref().fmt(f)
462 }
463}
464
465impl<'a, T> Drop for TempRef<'a, T> {
466 fn drop(&mut self) {
467 self.0.dec_temp_ref();
468 }
469}
470
471impl<'a, T> TempRef<'a, T> {
472 fn new(inner: Arc<RefInner<T>>) -> Self {
474 inner.inc_temp_ref();
475 Self(inner, Default::default())
476 }
477
478 pub fn as_ptr(this: &Self) -> *const T {
482 &this.0.value.value as *const T
483 }
484
485 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
487 Self::as_ptr(this) == Self::as_ptr(other)
488 }
489
490 pub fn into_static(this: Self) -> TempRef<'static, T> {
496 TempRef::new(this.0.clone())
497 }
498
499 pub fn re_own(&self) -> Option<OwnedRef<T>> {
502 OwnedRef::re_own(Arc::clone(&self.0))
503 }
504}
505
506impl<'a, T> From<&TempRef<'a, T>> for WeakRef<T> {
507 fn from(temp_ref: &TempRef<'a, T>) -> Self {
508 Self(Arc::downgrade(&temp_ref.0))
509 }
510}
511
512impl<'a, T> From<TempRef<'a, T>> for WeakRef<T> {
513 fn from(temp_ref: TempRef<'a, T>) -> Self {
514 Self(Arc::downgrade(&temp_ref.0))
515 }
516}
517
518impl<'a, T> std::ops::Deref for TempRef<'a, T> {
519 type Target = T;
520
521 fn deref(&self) -> &Self::Target {
522 &self.0.deref().value
523 }
524}
525
526impl<'a, T> std::borrow::Borrow<T> for TempRef<'a, T> {
527 fn borrow(&self) -> &T {
528 &self.0.deref().value
529 }
530}
531
532impl<'a, T> std::convert::AsRef<T> for TempRef<'a, T> {
533 fn as_ref(&self) -> &T {
534 &self.0.deref().value
535 }
536}
537
538impl<'a, T: PartialEq> PartialEq for TempRef<'a, T> {
539 fn eq(&self, other: &TempRef<'_, T>) -> bool {
540 Arc::ptr_eq(&self.0, &other.0) || **self == **other
541 }
542}
543
544impl<'a, T: Eq> Eq for TempRef<'a, T> {}
545
546impl<'a, T: PartialOrd> PartialOrd for TempRef<'a, T> {
547 fn partial_cmp(&self, other: &TempRef<'_, T>) -> Option<std::cmp::Ordering> {
548 (**self).partial_cmp(&**other)
549 }
550}
551
552impl<'a, T: Ord> Ord for TempRef<'a, T> {
553 fn cmp(&self, other: &TempRef<'_, T>) -> std::cmp::Ordering {
554 (**self).cmp(&**other)
555 }
556}
557
558impl<'a, T: Hash> Hash for TempRef<'a, T> {
559 fn hash<H: Hasher>(&self, state: &mut H) {
560 (**self).hash(state)
561 }
562}
563
564pub struct TempRefKey<'a, T>(pub TempRef<'a, T>);
566impl<'a, T> PartialEq for TempRefKey<'a, T> {
567 fn eq(&self, other: &Self) -> bool {
568 TempRef::ptr_eq(&self.0, &other.0)
569 }
570}
571impl<'a, T> Eq for TempRefKey<'a, T> {}
572impl<'a, T> Hash for TempRefKey<'a, T> {
573 fn hash<H: Hasher>(&self, state: &mut H) {
574 TempRef::as_ptr(&self.0).hash(state);
575 }
576}
577impl<'a, T> std::ops::Deref for TempRefKey<'a, T> {
578 type Target = T;
579 fn deref(&self) -> &Self::Target {
580 self.0.deref()
581 }
582}
583
584#[must_use = "ReleaseGuard must be released"]
587pub struct ReleaseGuard<T> {
588 value: T,
590
591 drop_guard: DropGuard,
593}
594
595#[cfg(test)]
596impl<T> ReleaseGuard<T> {
597 pub fn new_released(value: T) -> Self {
598 let result: Self = value.into();
599 result.drop_guard.disarm();
600 result
601 }
602}
603
604impl<T> ReleaseGuard<T> {
605 pub fn take(this: ReleaseGuard<T>) -> T {
609 this.drop_guard.disarm();
610 this.value
611 }
612}
613
614#[cfg(test)]
615impl<T: Default> ReleaseGuard<T> {
616 pub fn default_released() -> Self {
617 Self::new_released(T::default())
618 }
619}
620
621impl<T: std::fmt::Debug> std::fmt::Debug for ReleaseGuard<T> {
622 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
623 self.value.fmt(f)
624 }
625}
626
627impl<T: Default> Default for ReleaseGuard<T> {
628 fn default() -> Self {
629 T::default().into()
630 }
631}
632
633impl<T: Clone> Clone for ReleaseGuard<T> {
634 fn clone(&self) -> Self {
635 self.value.clone().into()
636 }
637}
638
639impl<T> From<T> for ReleaseGuard<T> {
640 fn from(value: T) -> Self {
641 Self { value, drop_guard: Default::default() }
642 }
643}
644
645impl<T> std::ops::Deref for ReleaseGuard<T> {
646 type Target = T;
647
648 fn deref(&self) -> &Self::Target {
649 &self.value
650 }
651}
652
653impl<T> std::ops::DerefMut for ReleaseGuard<T> {
654 fn deref_mut(&mut self) -> &mut Self::Target {
655 &mut self.value
656 }
657}
658
659impl<T> std::borrow::Borrow<T> for ReleaseGuard<T> {
660 fn borrow(&self) -> &T {
661 self.deref()
662 }
663}
664
665impl<T> std::convert::AsRef<T> for ReleaseGuard<T> {
666 fn as_ref(&self) -> &T {
667 self.deref()
668 }
669}
670
671impl<T: PartialEq> PartialEq for ReleaseGuard<T> {
672 fn eq(&self, other: &ReleaseGuard<T>) -> bool {
673 **self == **other
674 }
675}
676
677impl<T: Eq> Eq for ReleaseGuard<T> {}
678
679impl<T: PartialOrd> PartialOrd for ReleaseGuard<T> {
680 fn partial_cmp(&self, other: &ReleaseGuard<T>) -> Option<std::cmp::Ordering> {
681 (**self).partial_cmp(&**other)
682 }
683}
684
685impl<T: Ord> Ord for ReleaseGuard<T> {
686 fn cmp(&self, other: &ReleaseGuard<T>) -> std::cmp::Ordering {
687 (**self).cmp(&**other)
688 }
689}
690
691impl<T: Hash> Hash for ReleaseGuard<T> {
692 fn hash<H: Hasher>(&self, state: &mut H) {
693 (**self).hash(state)
694 }
695}
696
697#[derive(Default, Debug)]
698pub struct DropGuard {
699 #[cfg(any(test, debug_assertions))]
700 released: std::sync::atomic::AtomicBool,
701}
702
703impl DropGuard {
704 #[inline(always)]
705 pub fn disarm(&self) {
706 #[cfg(any(test, debug_assertions))]
707 {
708 if self
709 .released
710 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
711 .is_err()
712 {
713 panic!("Guard was disarmed twice");
714 }
715 }
716 }
717}
718
719#[cfg(any(test, debug_assertions))]
720impl Drop for DropGuard {
721 fn drop(&mut self) {
722 assert!(*self.released.get_mut());
723 }
724}
725#[cfg(any(test, debug_assertions))]
726thread_local! {
727 static TEMP_REF_LOCAL_COUNT: std::cell::RefCell<usize> = const { std::cell::RefCell::new(0) };
730}
731
732pub fn debug_assert_no_local_temp_ref() {
735 #[cfg(any(test, debug_assertions))]
736 {
737 TEMP_REF_LOCAL_COUNT.with(|count| {
738 assert_eq!(*count.borrow(), 0, "Current threads owns {} TempRef", *count.borrow());
739 });
740 }
741}
742
743struct RefInner<T> {
754 value: ReleaseGuard<T>,
756 owned_refs_count: AtomicUsize,
758 temp_refs_count: zx::Futex,
763}
764
765impl<T> RefInner<T> {
766 fn new(value: T) -> Self {
767 Self {
768 value: value.into(),
769 owned_refs_count: AtomicUsize::new(1),
770 temp_refs_count: zx::Futex::new(0),
771 }
772 }
773
774 fn inc_temp_ref(&self) {
776 self.temp_refs_count.fetch_add(1, Ordering::Relaxed);
777 #[cfg(any(test, debug_assertions))]
778 {
779 TEMP_REF_LOCAL_COUNT.with(|count| {
780 *count.borrow_mut() += 1;
781 });
782 }
783 }
784
785 fn dec_temp_ref(&self) {
789 let previous_count = self.temp_refs_count.fetch_sub(1, Ordering::Release);
790 if previous_count == 1 {
791 fence(Ordering::Acquire);
792 self.temp_refs_count.wake_single_owner();
793 }
794 #[cfg(any(test, debug_assertions))]
795 {
796 TEMP_REF_LOCAL_COUNT.with(|count| {
797 *count.borrow_mut() -= 1;
798 });
799 }
800 }
801
802 fn wait_for_no_ref_once(self: &Arc<Self>) {
804 let current_value = self.temp_refs_count.load(Ordering::Acquire);
806 if current_value == 0 {
807 return;
809 }
810 let result = self.temp_refs_count.wait(current_value, None, zx::MonotonicInstant::INFINITE);
813 debug_assert!(
814 result == Ok(()) || result == Err(zx::Status::BAD_STATE),
815 "Unexpected result: {result:?}"
816 );
817 }
818}
819
820#[macro_export]
823macro_rules! release_on_error {
824 ($releasable_name:ident, $context:expr, $body:block ) => {{
825 #[allow(clippy::redundant_closure_call)]
826 let result = { (|| $body)() };
827 match result {
828 Err(e) => {
829 $releasable_name.release($context);
830 return Err(e);
831 }
832 Ok(x) => x,
833 }
834 }};
835}
836
837#[macro_export]
839macro_rules! release_after {
840 ($releasable_name:ident, $context:expr, async || $($output_type:ty)? $body:block ) => {{
841 #[allow(clippy::redundant_closure_call)]
842 let result = { (async || $(-> $output_type)? { $body })().await };
843 $releasable_name.release($context);
844 result
845 }};
846 ($releasable_name:ident, $context:expr, $(|| -> $output_type:ty)? $body:block ) => {{
847 #[allow(clippy::redundant_closure_call)]
848 let result = { (|| $(-> $output_type)? { $body })() };
849 $releasable_name.release($context);
850 result
851 }};
852}
853
854#[macro_export]
857macro_rules! release_iter_after {
858 ($releasable_iter:ident, $context:expr, async || $(-> $output_type:ty)? $body:block ) => {{
859 #[allow(clippy::redundant_closure_call)]
860 let result = { (async || $(-> $output_type)? { $body })().await };
861 for item in $releasable_iter.into_iter() {
862 item.release($context);
863 }
864 result
865 }};
866 ($releasable_iter:ident, $context:expr, $(|| -> $output_type:ty)? $body:block ) => {{
867 #[allow(clippy::redundant_closure_call)]
868 let result = { (|| $(-> $output_type)? { $body })() };
869 for item in $releasable_iter.into_iter() {
870 item.release($context);
871 }
872 result
873 }};
874}
875
876pub use {release_after, release_iter_after, release_on_error};
877
878#[cfg(test)]
879mod test {
880 use super::*;
881
882 #[derive(Default)]
883 struct Data;
884
885 impl Releasable for Data {
886 type Context<'a> = ();
887 fn release<'a>(self, _: ()) {}
888 }
889
890 #[derive(Default)]
891 struct DataWithMutableReleaseContext;
892
893 impl Releasable for DataWithMutableReleaseContext {
894 type Context<'a> = &'a mut ();
895 fn release<'a>(self, _: &'a mut ()) {}
896 }
897
898 #[::fuchsia::test]
899 #[should_panic]
900 fn drop_without_release() {
901 let _ = OwnedRef::new(Data {});
902 }
903
904 #[::fuchsia::test]
905 fn test_creation_and_reference() {
906 let value = OwnedRef::new(Data {});
907 let reference = WeakRef::from(&value);
908 reference.upgrade().expect("upgrade");
909 value.release(());
910 assert!(reference.upgrade().is_none());
911 }
912
913 #[::fuchsia::test]
914 fn test_clone() {
915 let value = OwnedRef::new(Data {});
916 {
917 let value2 = OwnedRef::share(&value);
918 value2.release(());
919 }
920 #[allow(clippy::redundant_clone)]
921 {
922 let reference = WeakRef::from(&value);
923 let _reference2 = reference.clone();
924 }
925 value.release(());
926 }
927
928 #[::fuchsia::test]
929 fn test_default() {
930 let reference = WeakRef::<Data>::default();
931 assert!(reference.upgrade().is_none());
932 }
933
934 #[::fuchsia::test]
935 fn test_release_on_error() {
936 fn release_on_error() -> Result<(), ()> {
937 let value = OwnedRef::new(Data {});
938 release_on_error!(value, (), {
939 if true {
940 return Err(());
941 }
942 Ok(())
943 });
944 Ok(())
945 }
946 assert_eq!(release_on_error(), Err(()));
947 }
948
949 #[::fuchsia::test]
950 fn test_into_static() {
951 let value = OwnedRef::new(Data {});
952 let weak = WeakRef::from(&value);
953 let static_ref = TempRef::into_static(weak.upgrade().unwrap());
955 std::mem::drop(weak);
957 std::mem::drop(static_ref);
959 value.release(());
960 }
961
962 #[::fuchsia::test]
963 fn test_debug_assert_no_local_temp_ref() {
964 debug_assert_no_local_temp_ref();
965 let value = OwnedRef::new(Data {});
966 debug_assert_no_local_temp_ref();
967 let _temp_ref = OwnedRef::temp(&value);
968 std::thread::spawn(|| {
969 debug_assert_no_local_temp_ref();
970 })
971 .join()
972 .expect("join");
973 std::mem::drop(_temp_ref);
974 debug_assert_no_local_temp_ref();
975 value.release(());
976 debug_assert_no_local_temp_ref();
977 }
978
979 #[::fuchsia::test]
980 #[should_panic]
981 fn test_debug_assert_no_local_temp_ref_aborts() {
982 let value = OwnedRef::new(Data {});
983 {
984 let _temp_ref = OwnedRef::temp(&value);
985 debug_assert_no_local_temp_ref();
986 }
987 value.release(());
990 }
991
992 #[::fuchsia::test]
993 #[should_panic]
994 fn test_unrelease_release_guard() {
995 let _value = ReleaseGuard::<Data>::default();
996 }
997
998 #[::fuchsia::test]
999 fn test_released_release_guard() {
1000 let _value = ReleaseGuard::<Data>::default_released();
1001 }
1002
1003 #[::fuchsia::test]
1004 fn release_with_mutable_context() {
1005 let value = OwnedRef::new(DataWithMutableReleaseContext {});
1006 let mut context = ();
1007 value.release(&mut context);
1008 }
1009
1010 #[::fuchsia::test]
1013 fn upgrade_while_release() {
1014 let value = OwnedRef::new(Data {});
1015 for _ in 0..10 {
1017 std::thread::spawn({
1018 let weak = OwnedRef::downgrade(&value);
1019 move || loop {
1020 if weak.upgrade().is_none() {
1021 return;
1022 }
1023 }
1024 });
1025 }
1026 std::thread::sleep(std::time::Duration::from_millis(10));
1028 value.release(());
1029 }
1031
1032 #[::fuchsia::test]
1033 fn new_cyclic() {
1034 let mut weak_value = None;
1035 let value = OwnedRef::new_cyclic(|weak| {
1036 weak_value = Some(weak);
1037 Data {}
1038 });
1039 let weak_value = weak_value.expect("weak_value");
1040 assert!(weak_value.upgrade().is_some());
1041 value.release(());
1042 assert!(weak_value.upgrade().is_none());
1043 }
1044
1045 #[::fuchsia::test]
1046 fn as_ptr() {
1047 let value = OwnedRef::new(Data {});
1048 let weak = OwnedRef::downgrade(&value);
1049 let temp = weak.upgrade().expect("upgrade");
1050 assert_eq!(OwnedRef::as_ptr(&value), weak.as_ptr());
1051 assert_eq!(OwnedRef::as_ptr(&value), TempRef::as_ptr(&temp));
1052 std::mem::drop(temp);
1053 value.release(());
1054 }
1055
1056 #[::fuchsia::test]
1057 fn test_re_own() {
1058 let data = Data::default();
1059 let owned = OwnedRef::new(data);
1060 let weak = WeakRef::from(&owned);
1061
1062 let re_owned = weak.re_own();
1063 assert!(re_owned.is_some());
1064
1065 owned.release(());
1067
1068 let re_owned_again = weak.re_own();
1070 assert!(re_owned_again.is_some());
1071 re_owned_again.release(());
1072
1073 re_owned.release(());
1075
1076 let re_owned_finally = weak.re_own();
1078 assert!(re_owned_finally.is_none());
1079 }
1080
1081 #[::fuchsia::test]
1082 fn test_re_own_concurrent() {
1083 let owned = OwnedRef::new(Data::default());
1084 let weak = WeakRef::from(&owned);
1085 let num_threads = 10;
1086
1087 let mut handles = vec![];
1088 for _ in 0..num_threads {
1089 let weak = weak.clone();
1090 let handle = std::thread::spawn(move || {
1091 loop {
1092 if let Some(re_owned) = weak.re_own() {
1093 re_owned.release(());
1094 } else {
1095 return;
1096 }
1097 }
1098 });
1099 handles.push(handle);
1100 }
1101
1102 owned.release(());
1103
1104 for handle in handles {
1105 handle.join().unwrap();
1106 }
1107
1108 assert!(weak.re_own().is_none());
1109 }
1110
1111 #[::fuchsia::test]
1112 fn test_release_after() {
1113 let owned = OwnedRef::new(Data::default());
1114 let value = release_after!(owned, (), { 0 });
1115 assert_eq!(value, 0);
1116 }
1117
1118 #[::fuchsia::test]
1119 async fn test_release_after_async() {
1120 let owned = OwnedRef::new(Data::default());
1121 let value = release_after!(owned, (), async || { 0 });
1122 assert_eq!(value, 0);
1123 }
1124}