1use core::fmt::Debug;
17use core::hash::{Hash, Hasher};
18use core::ops::Deref;
19use core::panic::Location;
20use core::sync::atomic::{AtomicBool, Ordering};
21
22use derivative::Derivative;
23
24mod caller {
25 use core::fmt::Debug;
31 use core::panic::Location;
32
33 #[derive(Default)]
35 pub(super) struct Callers {
36 #[cfg(feature = "rc-debug-names")]
46 pub(super) callers: std::sync::Mutex<std::collections::HashMap<Location<'static>, usize>>,
47 }
48
49 impl Debug for Callers {
50 #[cfg(not(feature = "rc-debug-names"))]
51 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
52 write!(f, "(Not Tracked)")
53 }
54 #[cfg(feature = "rc-debug-names")]
55 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
56 let Self { callers } = self;
57 let callers = callers.lock().unwrap();
58 write!(f, "[\n")?;
59 for (l, c) in callers.iter() {
60 write!(f, " {l} => {c},\n")?;
61 }
62 write!(f, "]")
63 }
64 }
65
66 impl Callers {
67 pub(super) fn insert(&self, caller: &Location<'static>) -> TrackedCaller {
71 #[cfg(not(feature = "rc-debug-names"))]
72 {
73 let _ = caller;
74 TrackedCaller {}
75 }
76 #[cfg(feature = "rc-debug-names")]
77 {
78 let Self { callers } = self;
79 let mut callers = callers.lock().unwrap();
80 let count = callers.entry(caller.clone()).or_insert(0);
81 *count += 1;
82 TrackedCaller { location: caller.clone() }
83 }
84 }
85 }
86
87 #[derive(Debug)]
88 pub(super) struct TrackedCaller {
89 #[cfg(feature = "rc-debug-names")]
90 pub(super) location: Location<'static>,
91 }
92
93 impl TrackedCaller {
94 #[cfg(not(feature = "rc-debug-names"))]
95 pub(super) fn release(&mut self, Callers {}: &Callers) {
96 let Self {} = self;
97 }
98
99 #[cfg(feature = "rc-debug-names")]
100 pub(super) fn release(&mut self, Callers { callers }: &Callers) {
101 let Self { location } = self;
102 let mut callers = callers.lock().unwrap();
103 let mut entry = match callers.entry(location.clone()) {
104 std::collections::hash_map::Entry::Vacant(_) => {
105 panic!("location {location:?} was not in the callers map")
106 }
107 std::collections::hash_map::Entry::Occupied(o) => o,
108 };
109
110 let sub = entry
111 .get()
112 .checked_sub(1)
113 .unwrap_or_else(|| panic!("zero-count location {location:?} in map"));
114 if sub == 0 {
115 let _: usize = entry.remove();
116 } else {
117 *entry.get_mut() = sub;
118 }
119 }
120 }
121}
122
123mod resource_token {
124 use core::fmt::Debug;
125 use core::sync::atomic::{AtomicU64, Ordering};
126 use std::marker::PhantomData;
127 use std::num::NonZeroU64;
128
129 #[cfg_attr(any(test, feature = "testutils"), derive(PartialEq, Eq, PartialOrd, Ord))]
143 #[derive(Clone)]
144 pub struct ResourceToken<'a> {
145 value: NonZeroU64,
146 _marker: PhantomData<&'a ()>,
147 }
148
149 impl<'a> ResourceToken<'a> {
150 pub fn extend_lifetime(self) -> ResourceToken<'static> {
159 ResourceToken { value: self.value, _marker: PhantomData }
160 }
161
162 pub fn export_value(self) -> u64 {
172 self.value.get()
173 }
174 }
175
176 impl<'a> Debug for ResourceToken<'a> {
177 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
178 write!(f, "{}", self.value)
179 }
180 }
181
182 pub struct ResourceTokenValue(NonZeroU64);
187
188 impl ResourceTokenValue {
189 pub fn token(&self) -> ResourceToken<'_> {
191 let ResourceTokenValue(value) = self;
192 ResourceToken { value: *value, _marker: PhantomData }
193 }
194 }
195
196 impl core::fmt::Debug for ResourceTokenValue {
197 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
198 let ResourceTokenValue(value) = self;
199 write!(f, "{}", value)
200 }
201 }
202
203 impl Default for ResourceTokenValue {
204 fn default() -> Self {
205 static NEXT_TOKEN: AtomicU64 = AtomicU64::new(1);
206 Self(NonZeroU64::new(NEXT_TOKEN.fetch_add(1, Ordering::Relaxed)).unwrap())
212 }
213 }
214}
215
216pub use resource_token::{ResourceToken, ResourceTokenValue};
217
218mod debug_id {
219 use super::ResourceToken;
220 use core::fmt::Debug;
221
222 pub(super) enum DebugId<T> {
227 WithToken { ptr: *const T, token: ResourceToken<'static> },
230 WithoutToken { ptr: *const T },
233 }
234
235 impl<T> Debug for DebugId<T> {
236 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
237 match self {
238 DebugId::WithToken { ptr, token } => write!(f, "{:?}:{:?}", token, ptr),
239 DebugId::WithoutToken { ptr } => write!(f, "?:{:?}", ptr),
240 }
241 }
242 }
243}
244
245#[derive(Derivative)]
246#[derivative(Debug)]
247struct Inner<T> {
248 marked_for_destruction: AtomicBool,
249 callers: caller::Callers,
250 data: core::mem::ManuallyDrop<T>,
251 #[derivative(Debug = "ignore")]
255 notifier: crate::Mutex<Option<Box<dyn Notifier<T>>>>,
256 resource_token: ResourceTokenValue,
257}
258
259impl<T> Inner<T> {
260 fn pre_drop_check(marked_for_destruction: &AtomicBool) {
261 assert!(marked_for_destruction.load(Ordering::Acquire), "Must be marked for destruction");
266 }
267
268 fn unwrap(mut self) -> T {
269 let Inner { marked_for_destruction, data, callers: holders, notifier, resource_token } =
273 &mut self;
274
275 Inner::<T>::pre_drop_check(marked_for_destruction);
281
282 let data = unsafe {
286 core::ptr::drop_in_place(marked_for_destruction);
288 core::ptr::drop_in_place(holders);
289 core::ptr::drop_in_place(notifier);
290 core::ptr::drop_in_place(resource_token);
291
292 core::mem::ManuallyDrop::take(data)
293 };
294 core::mem::forget(self);
298
299 data
300 }
301
302 fn set_notifier<N: Notifier<T> + 'static>(&self, notifier: N) {
306 let Self { notifier: slot, .. } = self;
307
308 let boxed: Box<dyn Notifier<T>> = Box::new(notifier);
314 let prev_notifier = { slot.lock().replace(boxed) };
315 assert!(prev_notifier.is_none(), "can't have a notifier already installed");
317 }
318}
319
320impl<T> Drop for Inner<T> {
321 fn drop(&mut self) {
322 let Inner { marked_for_destruction, data, callers: _, notifier, resource_token: _ } = self;
323 let data = unsafe { core::mem::ManuallyDrop::take(data) };
329 Self::pre_drop_check(marked_for_destruction);
330 if let Some(mut notifier) = notifier.lock().take() {
331 notifier.notify(data);
332 }
333 }
334}
335
336#[derive(Debug)]
345pub struct Primary<T> {
346 inner: core::mem::ManuallyDrop<alloc::sync::Arc<Inner<T>>>,
347}
348
349impl<T> Drop for Primary<T> {
350 fn drop(&mut self) {
351 let was_marked = self.mark_for_destruction();
352 let Self { inner } = self;
353 let inner = unsafe { core::mem::ManuallyDrop::take(inner) };
357
358 if !std::thread::panicking() {
364 assert_eq!(was_marked, false, "Must not be marked for destruction yet");
365
366 let Inner {
367 marked_for_destruction: _,
368 callers,
369 data: _,
370 notifier: _,
371 resource_token: _,
372 } = &*inner;
373
374 let refs = alloc::sync::Arc::strong_count(&inner).checked_sub(1).unwrap();
377 assert!(
378 refs == 0,
379 "dropped Primary with {refs} strong refs remaining, \
380 Callers={callers:?}"
381 );
382 }
383 }
384}
385
386impl<T> AsRef<T> for Primary<T> {
387 fn as_ref(&self) -> &T {
388 self.deref()
389 }
390}
391
392impl<T> Deref for Primary<T> {
393 type Target = T;
394
395 fn deref(&self) -> &T {
396 let Self { inner } = self;
397 let Inner { marked_for_destruction: _, data, callers: _, notifier: _, resource_token: _ } =
398 &***inner;
399 data
400 }
401}
402
403impl<T> Primary<T> {
404 fn mark_for_destruction(&mut self) -> bool {
409 let Self { inner } = self;
410 inner.marked_for_destruction.swap(true, Ordering::Release)
414 }
415
416 pub fn new(data: T) -> Primary<T> {
418 Primary {
419 inner: core::mem::ManuallyDrop::new(alloc::sync::Arc::new(Inner {
420 marked_for_destruction: AtomicBool::new(false),
421 callers: caller::Callers::default(),
422 data: core::mem::ManuallyDrop::new(data),
423 notifier: crate::Mutex::new(None),
424 resource_token: ResourceTokenValue::default(),
425 })),
426 }
427 }
428
429 pub fn new_cyclic(data_fn: impl FnOnce(Weak<T>) -> T) -> Primary<T> {
436 Primary {
437 inner: core::mem::ManuallyDrop::new(alloc::sync::Arc::new_cyclic(move |weak| Inner {
438 marked_for_destruction: AtomicBool::new(false),
439 callers: caller::Callers::default(),
440 data: core::mem::ManuallyDrop::new(data_fn(Weak(weak.clone()))),
441 notifier: crate::Mutex::new(None),
442 resource_token: ResourceTokenValue::default(),
443 })),
444 }
445 }
446
447 #[cfg_attr(feature = "rc-debug-names", track_caller)]
449 pub fn clone_strong(Self { inner }: &Self) -> Strong<T> {
450 let Inner { data: _, callers, marked_for_destruction: _, notifier: _, resource_token: _ } =
451 &***inner;
452 let caller = callers.insert(Location::caller());
453 Strong { inner: alloc::sync::Arc::clone(inner), caller }
454 }
455
456 pub fn downgrade(Self { inner }: &Self) -> Weak<T> {
458 Weak(alloc::sync::Arc::downgrade(inner))
459 }
460
461 pub fn ptr_eq(
463 Self { inner: this }: &Self,
464 Strong { inner: other, caller: _ }: &Strong<T>,
465 ) -> bool {
466 alloc::sync::Arc::ptr_eq(this, other)
467 }
468
469 pub fn debug_id(&self) -> impl Debug + '_ {
472 let Self { inner } = self;
473
474 let token = inner.resource_token.token().extend_lifetime();
477
478 debug_id::DebugId::WithToken { ptr: alloc::sync::Arc::as_ptr(inner), token }
479 }
480
481 fn mark_for_destruction_and_take_inner(mut this: Self) -> alloc::sync::Arc<Inner<T>> {
482 assert!(!this.mark_for_destruction());
484 let Self { inner } = &mut this;
485 let inner = unsafe { core::mem::ManuallyDrop::take(inner) };
488 core::mem::forget(this);
489 inner
490 }
491
492 fn try_unwrap(this: Self) -> Result<T, alloc::sync::Arc<Inner<T>>> {
493 let inner = Self::mark_for_destruction_and_take_inner(this);
494 alloc::sync::Arc::try_unwrap(inner).map(Inner::unwrap)
495 }
496
497 pub fn unwrap(this: Self) -> T {
503 Self::try_unwrap(this).unwrap_or_else(|inner| {
504 let callers = &inner.callers;
505 let refs = alloc::sync::Arc::strong_count(&inner).checked_sub(1).unwrap();
506 panic!("can't unwrap, still had {refs} strong refs: {callers:?}");
507 })
508 }
509
510 pub fn unwrap_with_notifier<N: Notifier<T> + 'static>(this: Self, notifier: N) {
515 let inner = Self::mark_for_destruction_and_take_inner(this);
516 inner.set_notifier(notifier);
517 core::mem::drop(inner);
520 }
521
522 pub fn unwrap_or_notify_with<N: Notifier<T> + 'static, O, F: FnOnce() -> (N, O)>(
529 this: Self,
530 new_notifier: F,
531 ) -> Result<T, O> {
532 Self::try_unwrap(this).map_err(move |inner| {
533 let (notifier, output) = new_notifier();
534 inner.set_notifier(notifier);
535 output
536 })
537 }
538
539 pub fn debug_references(this: &Self) -> DebugReferences<T> {
541 let Self { inner } = this;
542 DebugReferences(alloc::sync::Arc::downgrade(&*inner))
543 }
544}
545
546#[derive(Debug, Derivative)]
555pub struct Strong<T> {
556 inner: alloc::sync::Arc<Inner<T>>,
557 caller: caller::TrackedCaller,
558}
559
560impl<T> Drop for Strong<T> {
561 fn drop(&mut self) {
562 let Self { inner, caller } = self;
563 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, resource_token: _ } =
564 &**inner;
565 caller.release(callers);
566 }
567}
568
569impl<T> AsRef<T> for Strong<T> {
570 fn as_ref(&self) -> &T {
571 self.deref()
572 }
573}
574
575impl<T> Deref for Strong<T> {
576 type Target = T;
577
578 fn deref(&self) -> &T {
579 let Self { inner, caller: _ } = self;
580 let Inner { marked_for_destruction: _, data, callers: _, notifier: _, resource_token: _ } =
581 inner.deref();
582 data
583 }
584}
585
586impl<T> core::cmp::Eq for Strong<T> {}
587
588impl<T> core::cmp::PartialEq for Strong<T> {
589 fn eq(&self, other: &Self) -> bool {
590 Self::ptr_eq(self, other)
591 }
592}
593
594impl<T> Hash for Strong<T> {
595 fn hash<H: Hasher>(&self, state: &mut H) {
596 let Self { inner, caller: _ } = self;
597 alloc::sync::Arc::as_ptr(inner).hash(state)
598 }
599}
600
601impl<T> Clone for Strong<T> {
602 #[cfg_attr(feature = "rc-debug-names", track_caller)]
603 fn clone(&self) -> Self {
604 let Self { inner, caller: _ } = self;
605 let Inner { data: _, marked_for_destruction: _, callers, notifier: _, resource_token: _ } =
606 &**inner;
607 let caller = callers.insert(Location::caller());
608 Self { inner: alloc::sync::Arc::clone(inner), caller }
609 }
610}
611
612impl<T> Strong<T> {
613 pub fn downgrade(Self { inner, caller: _ }: &Self) -> Weak<T> {
615 Weak(alloc::sync::Arc::downgrade(inner))
616 }
617
618 pub fn debug_id(&self) -> impl Debug + '_ {
621 let Self { inner, caller: _ } = self;
622
623 let token = inner.resource_token.token().extend_lifetime();
626
627 debug_id::DebugId::WithToken { ptr: alloc::sync::Arc::as_ptr(inner), token }
628 }
629
630 pub fn resource_token(&self) -> ResourceToken<'_> {
632 self.inner.resource_token.token()
633 }
634
635 pub fn marked_for_destruction(Self { inner, caller: _ }: &Self) -> bool {
637 let Inner { marked_for_destruction, data: _, callers: _, notifier: _, resource_token: _ } =
638 inner.as_ref();
639 marked_for_destruction.load(Ordering::Acquire)
644 }
645
646 pub fn weak_ptr_eq(Self { inner: this, caller: _ }: &Self, Weak(other): &Weak<T>) -> bool {
648 core::ptr::eq(alloc::sync::Arc::as_ptr(this), other.as_ptr())
649 }
650
651 pub fn ptr_eq(
653 Self { inner: this, caller: _ }: &Self,
654 Self { inner: other, caller: _ }: &Self,
655 ) -> bool {
656 alloc::sync::Arc::ptr_eq(this, other)
657 }
658
659 pub fn ptr_cmp(
661 Self { inner: this, caller: _ }: &Self,
662 Self { inner: other, caller: _ }: &Self,
663 ) -> core::cmp::Ordering {
664 let this = alloc::sync::Arc::as_ptr(this);
665 let other = alloc::sync::Arc::as_ptr(other);
666 this.cmp(&other)
667 }
668
669 pub fn debug_references(this: &Self) -> DebugReferences<T> {
671 let Self { inner, caller: _ } = this;
672 DebugReferences(alloc::sync::Arc::downgrade(inner))
673 }
674}
675
676#[derive(Debug)]
687pub struct Weak<T>(alloc::sync::Weak<Inner<T>>);
688
689impl<T> core::cmp::Eq for Weak<T> {}
690
691impl<T> core::cmp::PartialEq for Weak<T> {
692 fn eq(&self, other: &Self) -> bool {
693 Self::ptr_eq(self, other)
694 }
695}
696
697impl<T> Hash for Weak<T> {
698 fn hash<H: Hasher>(&self, state: &mut H) {
699 let Self(this) = self;
700 this.as_ptr().hash(state)
701 }
702}
703
704impl<T> Clone for Weak<T> {
705 fn clone(&self) -> Self {
706 let Self(this) = self;
707 Weak(this.clone())
708 }
709}
710
711impl<T> Weak<T> {
712 pub fn ptr_eq(&self, Self(other): &Self) -> bool {
714 let Self(this) = self;
715 this.ptr_eq(other)
716 }
717
718 pub fn debug_id(&self) -> impl Debug + '_ {
721 match self.upgrade() {
722 Some(strong) => {
723 let Strong { inner, caller: _ } = &strong;
724
725 let token = inner.resource_token.token().extend_lifetime();
728
729 debug_id::DebugId::WithToken { ptr: alloc::sync::Arc::as_ptr(&inner), token }
730 }
731 None => {
732 let Self(this) = self;
733 debug_id::DebugId::WithoutToken { ptr: this.as_ptr() }
735 }
736 }
737 }
738
739 #[cfg_attr(feature = "rc-debug-names", track_caller)]
743 pub fn upgrade(&self) -> Option<Strong<T>> {
744 let Self(weak) = self;
745 let arc = weak.upgrade()?;
746 let Inner { marked_for_destruction, data: _, callers, notifier: _, resource_token: _ } =
747 arc.deref();
748
749 if !marked_for_destruction.load(Ordering::Acquire) {
754 let caller = callers.insert(Location::caller());
755 Some(Strong { inner: arc, caller })
756 } else {
757 None
758 }
759 }
760
761 pub fn strong_count(&self) -> usize {
763 let Self(weak) = self;
764 weak.strong_count()
765 }
766
767 pub fn debug_references(&self) -> DebugReferences<T> {
769 let Self(inner) = self;
770 DebugReferences(inner.clone())
771 }
772}
773
774fn debug_refs(
775 refs: Option<(usize, &AtomicBool, &caller::Callers)>,
776 name: &'static str,
777 f: &mut core::fmt::Formatter<'_>,
778) -> core::fmt::Result {
779 let mut f = f.debug_struct(name);
780 match refs {
781 Some((strong_count, marked_for_destruction, callers)) => f
782 .field("strong_count", &strong_count)
783 .field("marked_for_destruction", marked_for_destruction)
784 .field("callers", callers)
785 .finish(),
786 None => {
787 let strong_count = 0_usize;
788 f.field("strong_count", &strong_count).finish_non_exhaustive()
789 }
790 }
791}
792
793#[derive(Clone)]
796pub struct DebugReferences<T>(alloc::sync::Weak<Inner<T>>);
797
798impl<T> Debug for DebugReferences<T> {
799 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
800 let Self(inner) = self;
801 let inner = inner.upgrade();
802 let refs = inner.as_ref().map(|inner| {
803 (alloc::sync::Arc::strong_count(inner), &inner.marked_for_destruction, &inner.callers)
804 });
805 debug_refs(refs, "DebugReferences", f)
806 }
807}
808
809impl<T: Send + Sync + 'static> DebugReferences<T> {
810 pub fn into_dyn(self) -> DynDebugReferences {
812 let Self(w) = self;
813 DynDebugReferences(w)
814 }
815}
816
817#[derive(Clone)]
819pub struct DynDebugReferences(alloc::sync::Weak<dyn ExposeRefs>);
820
821impl Debug for DynDebugReferences {
822 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
823 let Self(inner) = self;
824 let inner = inner.upgrade();
825 let refs = inner.as_ref().map(|inner| {
826 let (marked_for_destruction, callers) = inner.refs_info();
827 (alloc::sync::Arc::strong_count(inner), marked_for_destruction, callers)
828 });
829 debug_refs(refs, "DynDebugReferences", f)
830 }
831}
832
833trait ExposeRefs: Send + Sync + 'static {
835 fn refs_info(&self) -> (&AtomicBool, &caller::Callers);
836}
837
838impl<T: Send + Sync + 'static> ExposeRefs for Inner<T> {
839 fn refs_info(&self) -> (&AtomicBool, &caller::Callers) {
840 (&self.marked_for_destruction, &self.callers)
841 }
842}
843
844pub trait Notifier<T>: Send {
849 fn notify(&mut self, data: T);
852}
853
854#[derive(Debug, Derivative)]
859#[derivative(Clone(bound = ""))]
860pub struct ArcNotifier<T>(alloc::sync::Arc<crate::Mutex<Option<T>>>);
861
862impl<T> ArcNotifier<T> {
863 pub fn new() -> Self {
865 Self(alloc::sync::Arc::new(crate::Mutex::new(None)))
866 }
867
868 pub fn take(&self) -> Option<T> {
870 let Self(inner) = self;
871 inner.lock().take()
872 }
873}
874
875impl<T: Send> Notifier<T> for ArcNotifier<T> {
876 fn notify(&mut self, data: T) {
877 let Self(inner) = self;
878 assert!(inner.lock().replace(data).is_none(), "notified twice");
879 }
880}
881
882pub struct MapNotifier<N, F> {
885 inner: N,
886 map: Option<F>,
887}
888
889impl<N, F> MapNotifier<N, F> {
890 pub fn new(notifier: N, map: F) -> Self {
893 Self { inner: notifier, map: Some(map) }
894 }
895}
896
897impl<A, B, N: Notifier<B>, F: FnOnce(A) -> B> Notifier<A> for MapNotifier<N, F>
898where
899 Self: Send,
900{
901 fn notify(&mut self, data: A) {
902 let Self { inner, map } = self;
903 let map = map.take().expect("notified twice");
904 inner.notify(map(data))
905 }
906}
907
908impl<T> Notifier<T> for core::convert::Infallible {
910 fn notify(&mut self, _data: T) {
911 match *self {}
912 }
913}
914
915#[cfg(test)]
916mod tests {
917 use super::*;
918
919 #[test]
920 fn zombie_weak() {
921 let primary = Primary::new(());
922 let weak = {
923 let strong = Primary::clone_strong(&primary);
924 Strong::downgrade(&strong)
925 };
926 core::mem::drop(primary);
927
928 assert!(weak.upgrade().is_none());
929 }
930
931 #[test]
932 fn rcs() {
933 const INITIAL_VAL: u8 = 1;
934 const NEW_VAL: u8 = 2;
935
936 let primary = Primary::new(crate::sync::Mutex::new(INITIAL_VAL));
937 let strong = Primary::clone_strong(&primary);
938 let weak = Strong::downgrade(&strong);
939
940 *primary.lock().unwrap() = NEW_VAL;
941 assert_eq!(*primary.deref().lock().unwrap(), NEW_VAL);
942 assert_eq!(*strong.deref().lock().unwrap(), NEW_VAL);
943 assert_eq!(*weak.upgrade().unwrap().deref().lock().unwrap(), NEW_VAL);
944 }
945
946 #[test]
947 fn unwrap_primary_without_strong_held() {
948 const VAL: u16 = 6;
949 let primary = Primary::new(VAL);
950 assert_eq!(Primary::unwrap(primary), VAL);
951 }
952
953 #[test]
954 #[should_panic(expected = "can't unwrap, still had 1 strong refs")]
955 fn unwrap_primary_with_strong_held() {
956 let primary = Primary::new(8);
957 let _strong: Strong<_> = Primary::clone_strong(&primary);
958 let _: u16 = Primary::unwrap(primary);
959 }
960
961 #[test]
962 #[should_panic(expected = "dropped Primary with 1 strong refs remaining")]
963 fn drop_primary_with_strong_held() {
964 let primary = Primary::new(9);
965 let _strong: Strong<_> = Primary::clone_strong(&primary);
966 core::mem::drop(primary);
967 }
968
969 #[cfg(not(target_os = "fuchsia"))]
973 #[test]
974 #[should_panic(expected = "oopsie")]
975 fn double_panic_protect() {
976 let primary = Primary::new(9);
977 let strong = Primary::clone_strong(&primary);
978 let _tuple_to_invert_drop_order = (primary, strong);
981 panic!("oopsie");
982 }
983
984 #[cfg(feature = "rc-debug-names")]
985 #[test]
986 fn tracked_callers() {
987 let primary = Primary::new(10);
988 let here = Location::caller();
991 let strong1 = Primary::clone_strong(&primary);
992 let strong2 = strong1.clone();
993 let weak = Strong::downgrade(&strong2);
994 let strong3 = weak.upgrade().unwrap();
995
996 let Primary { inner } = &primary;
997 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, resource_token: _ } =
998 &***inner;
999
1000 let strongs = [strong1, strong2, strong3];
1001 let _: &Location<'_> = strongs.iter().enumerate().fold(here, |prev, (i, cur)| {
1002 let Strong { inner: _, caller: caller::TrackedCaller { location: cur } } = cur;
1003 assert_eq!(prev.file(), cur.file(), "{i}");
1004 assert!(prev.line() < cur.line(), "{prev} < {cur}, {i}");
1005 {
1006 let callers = callers.callers.lock().unwrap();
1007 assert_eq!(callers.get(cur).copied(), Some(1));
1008 }
1009
1010 cur
1011 });
1012
1013 std::mem::drop(strongs);
1015 {
1016 let callers = callers.callers.lock().unwrap();
1017 let callers = callers.deref();
1018 assert!(callers.is_empty(), "{callers:?}");
1019 }
1020 }
1021 #[cfg(feature = "rc-debug-names")]
1022 #[test]
1023 fn same_location_caller_tracking() {
1024 fn clone_in_fn<T>(p: &Primary<T>) -> Strong<T> {
1025 Primary::clone_strong(p)
1026 }
1027
1028 let primary = Primary::new(10);
1029 let strong1 = clone_in_fn(&primary);
1030 let strong2 = clone_in_fn(&primary);
1031 assert_eq!(strong1.caller.location, strong2.caller.location);
1032
1033 let Primary { inner } = &primary;
1034 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, resource_token: _ } =
1035 &***inner;
1036
1037 {
1038 let callers = callers.callers.lock().unwrap();
1039 assert_eq!(callers.get(&strong1.caller.location).copied(), Some(2));
1040 }
1041
1042 std::mem::drop(strong1);
1043 std::mem::drop(strong2);
1044
1045 {
1046 let callers = callers.callers.lock().unwrap();
1047 let callers = callers.deref();
1048 assert!(callers.is_empty(), "{callers:?}");
1049 }
1050 }
1051
1052 #[cfg(feature = "rc-debug-names")]
1053 #[test]
1054 #[should_panic(expected = "core/sync/src/rc.rs")]
1055 fn callers_in_panic() {
1056 let primary = Primary::new(10);
1057 let _strong = Primary::clone_strong(&primary);
1058 drop(primary);
1059 }
1060
1061 #[test]
1062 fn unwrap_with_notifier() {
1063 let primary = Primary::new(10);
1064 let strong = Primary::clone_strong(&primary);
1065 let notifier = ArcNotifier::new();
1066 Primary::unwrap_with_notifier(primary, notifier.clone());
1067 assert_eq!(notifier.take(), None);
1069 core::mem::drop(strong);
1070 assert_eq!(notifier.take(), Some(10));
1071 }
1072
1073 #[test]
1074 fn unwrap_or_notify_with_immediate() {
1075 let primary = Primary::new(10);
1076 let result = Primary::unwrap_or_notify_with::<ArcNotifier<_>, (), _>(primary, || {
1077 panic!("should not try to create notifier")
1078 });
1079 assert_eq!(result, Ok(10));
1080 }
1081
1082 #[test]
1083 fn unwrap_or_notify_with_deferred() {
1084 let primary = Primary::new(10);
1085 let strong = Primary::clone_strong(&primary);
1086 let result = Primary::unwrap_or_notify_with(primary, || {
1087 let notifier = ArcNotifier::new();
1088 (notifier.clone(), notifier)
1089 });
1090 let notifier = result.unwrap_err();
1091 assert_eq!(notifier.take(), None);
1092 core::mem::drop(strong);
1093 assert_eq!(notifier.take(), Some(10));
1094 }
1095
1096 #[test]
1097 fn map_notifier() {
1098 let primary = Primary::new(10);
1099 let notifier = ArcNotifier::new();
1100 let map_notifier = MapNotifier::new(notifier.clone(), |data| (data, data + 1));
1101 Primary::unwrap_with_notifier(primary, map_notifier);
1102 assert_eq!(notifier.take(), Some((10, 11)));
1103 }
1104
1105 #[test]
1106 fn new_cyclic() {
1107 #[derive(Debug)]
1108 struct Data {
1109 value: i32,
1110 weak: Weak<Data>,
1111 }
1112
1113 let primary = Primary::new_cyclic(|weak| Data { value: 2, weak });
1114 assert_eq!(primary.value, 2);
1115 let strong = primary.weak.upgrade().unwrap();
1116 assert_eq!(strong.value, 2);
1117 assert!(Primary::ptr_eq(&primary, &strong));
1118 }
1119
1120 macro_rules! assert_debug_id_eq {
1121 ($id1:expr, $id2:expr) => {
1122 assert_eq!(alloc::format!("{:?}", $id1), alloc::format!("{:?}", $id2))
1123 };
1124 }
1125 macro_rules! assert_debug_id_ne {
1126 ($id1:expr, $id2:expr) => {
1127 assert_ne!(alloc::format!("{:?}", $id1), alloc::format!("{:?}", $id2))
1128 };
1129 }
1130
1131 #[test]
1132 fn debug_ids_are_stable() {
1133 let primary = Primary::new(1);
1135 let strong = Primary::clone_strong(&primary);
1136 let weak_p = Primary::downgrade(&primary);
1137 let weak_s = Strong::downgrade(&strong);
1138 let weak_c = weak_p.clone();
1139 assert_debug_id_eq!(&primary.debug_id(), &strong.debug_id());
1140 assert_debug_id_eq!(&primary.debug_id(), &weak_p.debug_id());
1141 assert_debug_id_eq!(&primary.debug_id(), &weak_s.debug_id());
1142 assert_debug_id_eq!(&primary.debug_id(), &weak_c.debug_id());
1143 }
1144
1145 #[test]
1146 fn debug_ids_are_unique() {
1147 let primary1 = Primary::new(1);
1149 let primary2 = Primary::new(1);
1150 assert_debug_id_ne!(&primary1.debug_id(), &primary2.debug_id());
1151
1152 let id1 = format!("{:?}", primary1.debug_id());
1154 std::mem::drop(primary1);
1155 let primary3 = Primary::new(1);
1156 assert_ne!(id1, format!("{:?}", primary3.debug_id()));
1157 }
1158}