1use core::fmt::Debug;
17use core::hash::{Hash, Hasher};
18use core::ops::Deref;
19use core::panic::Location;
20use core::sync::atomic::{AtomicBool, Ordering};
21
22use derivative::Derivative;
23
24mod caller {
25 use core::fmt::Debug;
31 use core::panic::Location;
32
33 #[derive(Default)]
35 pub(super) struct Callers {
36 #[cfg(feature = "rc-debug-names")]
46 pub(super) callers: std::sync::Mutex<std::collections::HashMap<Location<'static>, usize>>,
47 }
48
49 impl Debug for Callers {
50 #[cfg(not(feature = "rc-debug-names"))]
51 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
52 write!(f, "(Not Tracked)")
53 }
54 #[cfg(feature = "rc-debug-names")]
55 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
56 let Self { callers } = self;
57 let callers = callers.lock().unwrap();
58 write!(f, "[\n")?;
59 for (l, c) in callers.iter() {
60 write!(f, " {l} => {c},\n")?;
61 }
62 write!(f, "]")
63 }
64 }
65
66 impl Callers {
67 pub(super) fn insert(&self, caller: &Location<'static>) -> TrackedCaller {
71 #[cfg(not(feature = "rc-debug-names"))]
72 {
73 let _ = caller;
74 TrackedCaller {}
75 }
76 #[cfg(feature = "rc-debug-names")]
77 {
78 let Self { callers } = self;
79 let mut callers = callers.lock().unwrap();
80 let count = callers.entry(caller.clone()).or_insert(0);
81 *count += 1;
82 TrackedCaller { location: caller.clone() }
83 }
84 }
85 }
86
87 #[derive(Debug)]
88 pub(super) struct TrackedCaller {
89 #[cfg(feature = "rc-debug-names")]
90 pub(super) location: Location<'static>,
91 }
92
93 impl TrackedCaller {
94 #[cfg(not(feature = "rc-debug-names"))]
95 pub(super) fn release(&mut self, Callers {}: &Callers) {
96 let Self {} = self;
97 }
98
99 #[cfg(feature = "rc-debug-names")]
100 pub(super) fn release(&mut self, Callers { callers }: &Callers) {
101 let Self { location } = self;
102 let mut callers = callers.lock().unwrap();
103 let mut entry = match callers.entry(location.clone()) {
104 std::collections::hash_map::Entry::Vacant(_) => {
105 panic!("location {location:?} was not in the callers map")
106 }
107 std::collections::hash_map::Entry::Occupied(o) => o,
108 };
109
110 let sub = entry
111 .get()
112 .checked_sub(1)
113 .unwrap_or_else(|| panic!("zero-count location {location:?} in map"));
114 if sub == 0 {
115 let _: usize = entry.remove();
116 } else {
117 *entry.get_mut() = sub;
118 }
119 }
120 }
121}
122
123mod resource_token {
124 use core::fmt::Debug;
125 use core::sync::atomic::{AtomicU64, Ordering};
126 use std::marker::PhantomData;
127
128 #[cfg_attr(any(test, feature = "testutils"), derive(PartialEq, Eq, PartialOrd, Ord))]
138 pub struct ResourceToken<'a> {
139 value: u64,
140 _marker: PhantomData<&'a ()>,
141 }
142
143 impl<'a> ResourceToken<'a> {
144 pub fn extend_lifetime(self) -> ResourceToken<'static> {
153 ResourceToken { value: self.value, _marker: PhantomData }
154 }
155
156 pub fn export_value(self) -> u64 {
166 self.value
167 }
168 }
169
170 impl<'a> Debug for ResourceToken<'a> {
171 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
172 write!(f, "{}", self.value)
173 }
174 }
175
176 pub struct ResourceTokenValue(u64);
181
182 impl ResourceTokenValue {
183 pub fn token(&self) -> ResourceToken<'_> {
185 let ResourceTokenValue(value) = self;
186 ResourceToken { value: *value, _marker: PhantomData }
187 }
188 }
189
190 impl core::fmt::Debug for ResourceTokenValue {
191 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
192 let ResourceTokenValue(value) = self;
193 write!(f, "{}", value)
194 }
195 }
196
197 impl Default for ResourceTokenValue {
198 fn default() -> Self {
199 static NEXT_TOKEN: AtomicU64 = AtomicU64::new(0);
200 Self(NEXT_TOKEN.fetch_add(1, Ordering::Relaxed))
206 }
207 }
208}
209
210pub use resource_token::{ResourceToken, ResourceTokenValue};
211
212mod debug_id {
213 use super::ResourceToken;
214 use core::fmt::Debug;
215
216 pub(super) enum DebugId<T> {
221 WithToken { ptr: *const T, token: ResourceToken<'static> },
224 WithoutToken { ptr: *const T },
227 }
228
229 impl<T> Debug for DebugId<T> {
230 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
231 match self {
232 DebugId::WithToken { ptr, token } => write!(f, "{:?}:{:?}", token, ptr),
233 DebugId::WithoutToken { ptr } => write!(f, "?:{:?}", ptr),
234 }
235 }
236 }
237}
238
239#[derive(Derivative)]
240#[derivative(Debug)]
241struct Inner<T> {
242 marked_for_destruction: AtomicBool,
243 callers: caller::Callers,
244 data: core::mem::ManuallyDrop<T>,
245 #[derivative(Debug = "ignore")]
249 notifier: crate::Mutex<Option<Box<dyn Notifier<T>>>>,
250 resource_token: ResourceTokenValue,
251}
252
253impl<T> Inner<T> {
254 fn pre_drop_check(marked_for_destruction: &AtomicBool) {
255 assert!(marked_for_destruction.load(Ordering::Acquire), "Must be marked for destruction");
260 }
261
262 fn unwrap(mut self) -> T {
263 let Inner { marked_for_destruction, data, callers: holders, notifier, resource_token } =
267 &mut self;
268
269 Inner::<T>::pre_drop_check(marked_for_destruction);
275
276 let data = unsafe {
280 core::ptr::drop_in_place(marked_for_destruction);
282 core::ptr::drop_in_place(holders);
283 core::ptr::drop_in_place(notifier);
284 core::ptr::drop_in_place(resource_token);
285
286 core::mem::ManuallyDrop::take(data)
287 };
288 core::mem::forget(self);
292
293 data
294 }
295
296 fn set_notifier<N: Notifier<T> + 'static>(&self, notifier: N) {
300 let Self { notifier: slot, .. } = self;
301
302 let boxed: Box<dyn Notifier<T>> = Box::new(notifier);
308 let prev_notifier = { slot.lock().replace(boxed) };
309 assert!(prev_notifier.is_none(), "can't have a notifier already installed");
311 }
312}
313
314impl<T> Drop for Inner<T> {
315 fn drop(&mut self) {
316 let Inner { marked_for_destruction, data, callers: _, notifier, resource_token: _ } = self;
317 let data = unsafe { core::mem::ManuallyDrop::take(data) };
323 Self::pre_drop_check(marked_for_destruction);
324 if let Some(mut notifier) = notifier.lock().take() {
325 notifier.notify(data);
326 }
327 }
328}
329
330#[derive(Debug)]
339pub struct Primary<T> {
340 inner: core::mem::ManuallyDrop<alloc::sync::Arc<Inner<T>>>,
341}
342
343impl<T> Drop for Primary<T> {
344 fn drop(&mut self) {
345 let was_marked = self.mark_for_destruction();
346 let Self { inner } = self;
347 let inner = unsafe { core::mem::ManuallyDrop::take(inner) };
351
352 if !std::thread::panicking() {
358 assert_eq!(was_marked, false, "Must not be marked for destruction yet");
359
360 let Inner {
361 marked_for_destruction: _,
362 callers,
363 data: _,
364 notifier: _,
365 resource_token: _,
366 } = &*inner;
367
368 let refs = alloc::sync::Arc::strong_count(&inner).checked_sub(1).unwrap();
371 assert!(
372 refs == 0,
373 "dropped Primary with {refs} strong refs remaining, \
374 Callers={callers:?}"
375 );
376 }
377 }
378}
379
380impl<T> AsRef<T> for Primary<T> {
381 fn as_ref(&self) -> &T {
382 self.deref()
383 }
384}
385
386impl<T> Deref for Primary<T> {
387 type Target = T;
388
389 fn deref(&self) -> &T {
390 let Self { inner } = self;
391 let Inner { marked_for_destruction: _, data, callers: _, notifier: _, resource_token: _ } =
392 &***inner;
393 data
394 }
395}
396
397impl<T> Primary<T> {
398 fn mark_for_destruction(&mut self) -> bool {
403 let Self { inner } = self;
404 inner.marked_for_destruction.swap(true, Ordering::Release)
408 }
409
410 pub fn new(data: T) -> Primary<T> {
412 Primary {
413 inner: core::mem::ManuallyDrop::new(alloc::sync::Arc::new(Inner {
414 marked_for_destruction: AtomicBool::new(false),
415 callers: caller::Callers::default(),
416 data: core::mem::ManuallyDrop::new(data),
417 notifier: crate::Mutex::new(None),
418 resource_token: ResourceTokenValue::default(),
419 })),
420 }
421 }
422
423 pub fn new_cyclic(data_fn: impl FnOnce(Weak<T>) -> T) -> Primary<T> {
430 Primary {
431 inner: core::mem::ManuallyDrop::new(alloc::sync::Arc::new_cyclic(move |weak| Inner {
432 marked_for_destruction: AtomicBool::new(false),
433 callers: caller::Callers::default(),
434 data: core::mem::ManuallyDrop::new(data_fn(Weak(weak.clone()))),
435 notifier: crate::Mutex::new(None),
436 resource_token: ResourceTokenValue::default(),
437 })),
438 }
439 }
440
441 #[cfg_attr(feature = "rc-debug-names", track_caller)]
443 pub fn clone_strong(Self { inner }: &Self) -> Strong<T> {
444 let Inner { data: _, callers, marked_for_destruction: _, notifier: _, resource_token: _ } =
445 &***inner;
446 let caller = callers.insert(Location::caller());
447 Strong { inner: alloc::sync::Arc::clone(inner), caller }
448 }
449
450 pub fn downgrade(Self { inner }: &Self) -> Weak<T> {
452 Weak(alloc::sync::Arc::downgrade(inner))
453 }
454
455 pub fn ptr_eq(
457 Self { inner: this }: &Self,
458 Strong { inner: other, caller: _ }: &Strong<T>,
459 ) -> bool {
460 alloc::sync::Arc::ptr_eq(this, other)
461 }
462
463 pub fn debug_id(&self) -> impl Debug + '_ {
466 let Self { inner } = self;
467
468 let token = inner.resource_token.token().extend_lifetime();
471
472 debug_id::DebugId::WithToken { ptr: alloc::sync::Arc::as_ptr(inner), token }
473 }
474
475 fn mark_for_destruction_and_take_inner(mut this: Self) -> alloc::sync::Arc<Inner<T>> {
476 assert!(!this.mark_for_destruction());
478 let Self { inner } = &mut this;
479 let inner = unsafe { core::mem::ManuallyDrop::take(inner) };
482 core::mem::forget(this);
483 inner
484 }
485
486 fn try_unwrap(this: Self) -> Result<T, alloc::sync::Arc<Inner<T>>> {
487 let inner = Self::mark_for_destruction_and_take_inner(this);
488 alloc::sync::Arc::try_unwrap(inner).map(Inner::unwrap)
489 }
490
491 pub fn unwrap(this: Self) -> T {
497 Self::try_unwrap(this).unwrap_or_else(|inner| {
498 let callers = &inner.callers;
499 let refs = alloc::sync::Arc::strong_count(&inner).checked_sub(1).unwrap();
500 panic!("can't unwrap, still had {refs} strong refs: {callers:?}");
501 })
502 }
503
504 pub fn unwrap_with_notifier<N: Notifier<T> + 'static>(this: Self, notifier: N) {
509 let inner = Self::mark_for_destruction_and_take_inner(this);
510 inner.set_notifier(notifier);
511 core::mem::drop(inner);
514 }
515
516 pub fn unwrap_or_notify_with<N: Notifier<T> + 'static, O, F: FnOnce() -> (N, O)>(
523 this: Self,
524 new_notifier: F,
525 ) -> Result<T, O> {
526 Self::try_unwrap(this).map_err(move |inner| {
527 let (notifier, output) = new_notifier();
528 inner.set_notifier(notifier);
529 output
530 })
531 }
532
533 pub fn debug_references(this: &Self) -> DebugReferences<T> {
535 let Self { inner } = this;
536 DebugReferences(alloc::sync::Arc::downgrade(&*inner))
537 }
538}
539
540#[derive(Debug, Derivative)]
549pub struct Strong<T> {
550 inner: alloc::sync::Arc<Inner<T>>,
551 caller: caller::TrackedCaller,
552}
553
554impl<T> Drop for Strong<T> {
555 fn drop(&mut self) {
556 let Self { inner, caller } = self;
557 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, resource_token: _ } =
558 &**inner;
559 caller.release(callers);
560 }
561}
562
563impl<T> AsRef<T> for Strong<T> {
564 fn as_ref(&self) -> &T {
565 self.deref()
566 }
567}
568
569impl<T> Deref for Strong<T> {
570 type Target = T;
571
572 fn deref(&self) -> &T {
573 let Self { inner, caller: _ } = self;
574 let Inner { marked_for_destruction: _, data, callers: _, notifier: _, resource_token: _ } =
575 inner.deref();
576 data
577 }
578}
579
580impl<T> core::cmp::Eq for Strong<T> {}
581
582impl<T> core::cmp::PartialEq for Strong<T> {
583 fn eq(&self, other: &Self) -> bool {
584 Self::ptr_eq(self, other)
585 }
586}
587
588impl<T> Hash for Strong<T> {
589 fn hash<H: Hasher>(&self, state: &mut H) {
590 let Self { inner, caller: _ } = self;
591 alloc::sync::Arc::as_ptr(inner).hash(state)
592 }
593}
594
595impl<T> Clone for Strong<T> {
596 #[cfg_attr(feature = "rc-debug-names", track_caller)]
597 fn clone(&self) -> Self {
598 let Self { inner, caller: _ } = self;
599 let Inner { data: _, marked_for_destruction: _, callers, notifier: _, resource_token: _ } =
600 &**inner;
601 let caller = callers.insert(Location::caller());
602 Self { inner: alloc::sync::Arc::clone(inner), caller }
603 }
604}
605
606impl<T> Strong<T> {
607 pub fn downgrade(Self { inner, caller: _ }: &Self) -> Weak<T> {
609 Weak(alloc::sync::Arc::downgrade(inner))
610 }
611
612 pub fn debug_id(&self) -> impl Debug + '_ {
615 let Self { inner, caller: _ } = self;
616
617 let token = inner.resource_token.token().extend_lifetime();
620
621 debug_id::DebugId::WithToken { ptr: alloc::sync::Arc::as_ptr(inner), token }
622 }
623
624 pub fn resource_token(&self) -> ResourceToken<'_> {
626 self.inner.resource_token.token()
627 }
628
629 pub fn marked_for_destruction(Self { inner, caller: _ }: &Self) -> bool {
631 let Inner { marked_for_destruction, data: _, callers: _, notifier: _, resource_token: _ } =
632 inner.as_ref();
633 marked_for_destruction.load(Ordering::Acquire)
638 }
639
640 pub fn weak_ptr_eq(Self { inner: this, caller: _ }: &Self, Weak(other): &Weak<T>) -> bool {
642 core::ptr::eq(alloc::sync::Arc::as_ptr(this), other.as_ptr())
643 }
644
645 pub fn ptr_eq(
647 Self { inner: this, caller: _ }: &Self,
648 Self { inner: other, caller: _ }: &Self,
649 ) -> bool {
650 alloc::sync::Arc::ptr_eq(this, other)
651 }
652
653 pub fn ptr_cmp(
655 Self { inner: this, caller: _ }: &Self,
656 Self { inner: other, caller: _ }: &Self,
657 ) -> core::cmp::Ordering {
658 let this = alloc::sync::Arc::as_ptr(this);
659 let other = alloc::sync::Arc::as_ptr(other);
660 this.cmp(&other)
661 }
662
663 pub fn debug_references(this: &Self) -> DebugReferences<T> {
665 let Self { inner, caller: _ } = this;
666 DebugReferences(alloc::sync::Arc::downgrade(inner))
667 }
668}
669
670#[derive(Debug)]
681pub struct Weak<T>(alloc::sync::Weak<Inner<T>>);
682
683impl<T> core::cmp::Eq for Weak<T> {}
684
685impl<T> core::cmp::PartialEq for Weak<T> {
686 fn eq(&self, other: &Self) -> bool {
687 Self::ptr_eq(self, other)
688 }
689}
690
691impl<T> Hash for Weak<T> {
692 fn hash<H: Hasher>(&self, state: &mut H) {
693 let Self(this) = self;
694 this.as_ptr().hash(state)
695 }
696}
697
698impl<T> Clone for Weak<T> {
699 fn clone(&self) -> Self {
700 let Self(this) = self;
701 Weak(this.clone())
702 }
703}
704
705impl<T> Weak<T> {
706 pub fn ptr_eq(&self, Self(other): &Self) -> bool {
708 let Self(this) = self;
709 this.ptr_eq(other)
710 }
711
712 pub fn debug_id(&self) -> impl Debug + '_ {
715 match self.upgrade() {
716 Some(strong) => {
717 let Strong { inner, caller: _ } = &strong;
718
719 let token = inner.resource_token.token().extend_lifetime();
722
723 debug_id::DebugId::WithToken { ptr: alloc::sync::Arc::as_ptr(&inner), token }
724 }
725 None => {
726 let Self(this) = self;
727 debug_id::DebugId::WithoutToken { ptr: this.as_ptr() }
729 }
730 }
731 }
732
733 #[cfg_attr(feature = "rc-debug-names", track_caller)]
737 pub fn upgrade(&self) -> Option<Strong<T>> {
738 let Self(weak) = self;
739 let arc = weak.upgrade()?;
740 let Inner { marked_for_destruction, data: _, callers, notifier: _, resource_token: _ } =
741 arc.deref();
742
743 if !marked_for_destruction.load(Ordering::Acquire) {
748 let caller = callers.insert(Location::caller());
749 Some(Strong { inner: arc, caller })
750 } else {
751 None
752 }
753 }
754
755 pub fn strong_count(&self) -> usize {
757 let Self(weak) = self;
758 weak.strong_count()
759 }
760
761 pub fn debug_references(&self) -> DebugReferences<T> {
763 let Self(inner) = self;
764 DebugReferences(inner.clone())
765 }
766}
767
768fn debug_refs(
769 refs: Option<(usize, &AtomicBool, &caller::Callers)>,
770 name: &'static str,
771 f: &mut core::fmt::Formatter<'_>,
772) -> core::fmt::Result {
773 let mut f = f.debug_struct(name);
774 match refs {
775 Some((strong_count, marked_for_destruction, callers)) => f
776 .field("strong_count", &strong_count)
777 .field("marked_for_destruction", marked_for_destruction)
778 .field("callers", callers)
779 .finish(),
780 None => {
781 let strong_count = 0_usize;
782 f.field("strong_count", &strong_count).finish_non_exhaustive()
783 }
784 }
785}
786
787#[derive(Clone)]
790pub struct DebugReferences<T>(alloc::sync::Weak<Inner<T>>);
791
792impl<T> Debug for DebugReferences<T> {
793 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
794 let Self(inner) = self;
795 let inner = inner.upgrade();
796 let refs = inner.as_ref().map(|inner| {
797 (alloc::sync::Arc::strong_count(inner), &inner.marked_for_destruction, &inner.callers)
798 });
799 debug_refs(refs, "DebugReferences", f)
800 }
801}
802
803impl<T: Send + Sync + 'static> DebugReferences<T> {
804 pub fn into_dyn(self) -> DynDebugReferences {
806 let Self(w) = self;
807 DynDebugReferences(w)
808 }
809}
810
811#[derive(Clone)]
813pub struct DynDebugReferences(alloc::sync::Weak<dyn ExposeRefs>);
814
815impl Debug for DynDebugReferences {
816 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
817 let Self(inner) = self;
818 let inner = inner.upgrade();
819 let refs = inner.as_ref().map(|inner| {
820 let (marked_for_destruction, callers) = inner.refs_info();
821 (alloc::sync::Arc::strong_count(inner), marked_for_destruction, callers)
822 });
823 debug_refs(refs, "DynDebugReferences", f)
824 }
825}
826
827trait ExposeRefs: Send + Sync + 'static {
829 fn refs_info(&self) -> (&AtomicBool, &caller::Callers);
830}
831
832impl<T: Send + Sync + 'static> ExposeRefs for Inner<T> {
833 fn refs_info(&self) -> (&AtomicBool, &caller::Callers) {
834 (&self.marked_for_destruction, &self.callers)
835 }
836}
837
838pub trait Notifier<T>: Send {
843 fn notify(&mut self, data: T);
846}
847
848#[derive(Debug, Derivative)]
853#[derivative(Clone(bound = ""))]
854pub struct ArcNotifier<T>(alloc::sync::Arc<crate::Mutex<Option<T>>>);
855
856impl<T> ArcNotifier<T> {
857 pub fn new() -> Self {
859 Self(alloc::sync::Arc::new(crate::Mutex::new(None)))
860 }
861
862 pub fn take(&self) -> Option<T> {
864 let Self(inner) = self;
865 inner.lock().take()
866 }
867}
868
869impl<T: Send> Notifier<T> for ArcNotifier<T> {
870 fn notify(&mut self, data: T) {
871 let Self(inner) = self;
872 assert!(inner.lock().replace(data).is_none(), "notified twice");
873 }
874}
875
876pub struct MapNotifier<N, F> {
879 inner: N,
880 map: Option<F>,
881}
882
883impl<N, F> MapNotifier<N, F> {
884 pub fn new(notifier: N, map: F) -> Self {
887 Self { inner: notifier, map: Some(map) }
888 }
889}
890
891impl<A, B, N: Notifier<B>, F: FnOnce(A) -> B> Notifier<A> for MapNotifier<N, F>
892where
893 Self: Send,
894{
895 fn notify(&mut self, data: A) {
896 let Self { inner, map } = self;
897 let map = map.take().expect("notified twice");
898 inner.notify(map(data))
899 }
900}
901
902impl<T> Notifier<T> for core::convert::Infallible {
904 fn notify(&mut self, _data: T) {
905 match *self {}
906 }
907}
908
909#[cfg(test)]
910mod tests {
911 use super::*;
912
913 #[test]
914 fn zombie_weak() {
915 let primary = Primary::new(());
916 let weak = {
917 let strong = Primary::clone_strong(&primary);
918 Strong::downgrade(&strong)
919 };
920 core::mem::drop(primary);
921
922 assert!(weak.upgrade().is_none());
923 }
924
925 #[test]
926 fn rcs() {
927 const INITIAL_VAL: u8 = 1;
928 const NEW_VAL: u8 = 2;
929
930 let primary = Primary::new(crate::sync::Mutex::new(INITIAL_VAL));
931 let strong = Primary::clone_strong(&primary);
932 let weak = Strong::downgrade(&strong);
933
934 *primary.lock().unwrap() = NEW_VAL;
935 assert_eq!(*primary.deref().lock().unwrap(), NEW_VAL);
936 assert_eq!(*strong.deref().lock().unwrap(), NEW_VAL);
937 assert_eq!(*weak.upgrade().unwrap().deref().lock().unwrap(), NEW_VAL);
938 }
939
940 #[test]
941 fn unwrap_primary_without_strong_held() {
942 const VAL: u16 = 6;
943 let primary = Primary::new(VAL);
944 assert_eq!(Primary::unwrap(primary), VAL);
945 }
946
947 #[test]
948 #[should_panic(expected = "can't unwrap, still had 1 strong refs")]
949 fn unwrap_primary_with_strong_held() {
950 let primary = Primary::new(8);
951 let _strong: Strong<_> = Primary::clone_strong(&primary);
952 let _: u16 = Primary::unwrap(primary);
953 }
954
955 #[test]
956 #[should_panic(expected = "dropped Primary with 1 strong refs remaining")]
957 fn drop_primary_with_strong_held() {
958 let primary = Primary::new(9);
959 let _strong: Strong<_> = Primary::clone_strong(&primary);
960 core::mem::drop(primary);
961 }
962
963 #[cfg(not(target_os = "fuchsia"))]
967 #[test]
968 #[should_panic(expected = "oopsie")]
969 fn double_panic_protect() {
970 let primary = Primary::new(9);
971 let strong = Primary::clone_strong(&primary);
972 let _tuple_to_invert_drop_order = (primary, strong);
975 panic!("oopsie");
976 }
977
978 #[cfg(feature = "rc-debug-names")]
979 #[test]
980 fn tracked_callers() {
981 let primary = Primary::new(10);
982 let here = Location::caller();
985 let strong1 = Primary::clone_strong(&primary);
986 let strong2 = strong1.clone();
987 let weak = Strong::downgrade(&strong2);
988 let strong3 = weak.upgrade().unwrap();
989
990 let Primary { inner } = &primary;
991 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, resource_token: _ } =
992 &***inner;
993
994 let strongs = [strong1, strong2, strong3];
995 let _: &Location<'_> = strongs.iter().enumerate().fold(here, |prev, (i, cur)| {
996 let Strong { inner: _, caller: caller::TrackedCaller { location: cur } } = cur;
997 assert_eq!(prev.file(), cur.file(), "{i}");
998 assert!(prev.line() < cur.line(), "{prev} < {cur}, {i}");
999 {
1000 let callers = callers.callers.lock().unwrap();
1001 assert_eq!(callers.get(cur).copied(), Some(1));
1002 }
1003
1004 cur
1005 });
1006
1007 std::mem::drop(strongs);
1009 {
1010 let callers = callers.callers.lock().unwrap();
1011 let callers = callers.deref();
1012 assert!(callers.is_empty(), "{callers:?}");
1013 }
1014 }
1015 #[cfg(feature = "rc-debug-names")]
1016 #[test]
1017 fn same_location_caller_tracking() {
1018 fn clone_in_fn<T>(p: &Primary<T>) -> Strong<T> {
1019 Primary::clone_strong(p)
1020 }
1021
1022 let primary = Primary::new(10);
1023 let strong1 = clone_in_fn(&primary);
1024 let strong2 = clone_in_fn(&primary);
1025 assert_eq!(strong1.caller.location, strong2.caller.location);
1026
1027 let Primary { inner } = &primary;
1028 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, resource_token: _ } =
1029 &***inner;
1030
1031 {
1032 let callers = callers.callers.lock().unwrap();
1033 assert_eq!(callers.get(&strong1.caller.location).copied(), Some(2));
1034 }
1035
1036 std::mem::drop(strong1);
1037 std::mem::drop(strong2);
1038
1039 {
1040 let callers = callers.callers.lock().unwrap();
1041 let callers = callers.deref();
1042 assert!(callers.is_empty(), "{callers:?}");
1043 }
1044 }
1045
1046 #[cfg(feature = "rc-debug-names")]
1047 #[test]
1048 #[should_panic(expected = "core/sync/src/rc.rs")]
1049 fn callers_in_panic() {
1050 let primary = Primary::new(10);
1051 let _strong = Primary::clone_strong(&primary);
1052 drop(primary);
1053 }
1054
1055 #[test]
1056 fn unwrap_with_notifier() {
1057 let primary = Primary::new(10);
1058 let strong = Primary::clone_strong(&primary);
1059 let notifier = ArcNotifier::new();
1060 Primary::unwrap_with_notifier(primary, notifier.clone());
1061 assert_eq!(notifier.take(), None);
1063 core::mem::drop(strong);
1064 assert_eq!(notifier.take(), Some(10));
1065 }
1066
1067 #[test]
1068 fn unwrap_or_notify_with_immediate() {
1069 let primary = Primary::new(10);
1070 let result = Primary::unwrap_or_notify_with::<ArcNotifier<_>, (), _>(primary, || {
1071 panic!("should not try to create notifier")
1072 });
1073 assert_eq!(result, Ok(10));
1074 }
1075
1076 #[test]
1077 fn unwrap_or_notify_with_deferred() {
1078 let primary = Primary::new(10);
1079 let strong = Primary::clone_strong(&primary);
1080 let result = Primary::unwrap_or_notify_with(primary, || {
1081 let notifier = ArcNotifier::new();
1082 (notifier.clone(), notifier)
1083 });
1084 let notifier = result.unwrap_err();
1085 assert_eq!(notifier.take(), None);
1086 core::mem::drop(strong);
1087 assert_eq!(notifier.take(), Some(10));
1088 }
1089
1090 #[test]
1091 fn map_notifier() {
1092 let primary = Primary::new(10);
1093 let notifier = ArcNotifier::new();
1094 let map_notifier = MapNotifier::new(notifier.clone(), |data| (data, data + 1));
1095 Primary::unwrap_with_notifier(primary, map_notifier);
1096 assert_eq!(notifier.take(), Some((10, 11)));
1097 }
1098
1099 #[test]
1100 fn new_cyclic() {
1101 #[derive(Debug)]
1102 struct Data {
1103 value: i32,
1104 weak: Weak<Data>,
1105 }
1106
1107 let primary = Primary::new_cyclic(|weak| Data { value: 2, weak });
1108 assert_eq!(primary.value, 2);
1109 let strong = primary.weak.upgrade().unwrap();
1110 assert_eq!(strong.value, 2);
1111 assert!(Primary::ptr_eq(&primary, &strong));
1112 }
1113
1114 macro_rules! assert_debug_id_eq {
1115 ($id1:expr, $id2:expr) => {
1116 assert_eq!(alloc::format!("{:?}", $id1), alloc::format!("{:?}", $id2))
1117 };
1118 }
1119 macro_rules! assert_debug_id_ne {
1120 ($id1:expr, $id2:expr) => {
1121 assert_ne!(alloc::format!("{:?}", $id1), alloc::format!("{:?}", $id2))
1122 };
1123 }
1124
1125 #[test]
1126 fn debug_ids_are_stable() {
1127 let primary = Primary::new(1);
1129 let strong = Primary::clone_strong(&primary);
1130 let weak_p = Primary::downgrade(&primary);
1131 let weak_s = Strong::downgrade(&strong);
1132 let weak_c = weak_p.clone();
1133 assert_debug_id_eq!(&primary.debug_id(), &strong.debug_id());
1134 assert_debug_id_eq!(&primary.debug_id(), &weak_p.debug_id());
1135 assert_debug_id_eq!(&primary.debug_id(), &weak_s.debug_id());
1136 assert_debug_id_eq!(&primary.debug_id(), &weak_c.debug_id());
1137 }
1138
1139 #[test]
1140 fn debug_ids_are_unique() {
1141 let primary1 = Primary::new(1);
1143 let primary2 = Primary::new(1);
1144 assert_debug_id_ne!(&primary1.debug_id(), &primary2.debug_id());
1145
1146 let id1 = format!("{:?}", primary1.debug_id());
1148 std::mem::drop(primary1);
1149 let primary3 = Primary::new(1);
1150 assert_ne!(id1, format!("{:?}", primary3.debug_id()));
1151 }
1152}