osom_lib_strings/immutable/weak_string.rs
1use core::sync::atomic::{Ordering, fence};
2
3use osom_lib_alloc::traits::Allocator;
4use osom_lib_reprc::macros::reprc;
5
6use crate::immutable::{ImmutableString, internal_string::InternalString};
7
8/// A weak reference to the underlying [`ImmutableString`].
9///
10/// This object cannot inspect the underlying string. But it does track
11/// weak references, and each weak reference can build a strong reference,
12/// assuming any other strong reference is alive.
13///
14/// # Examples
15///
16/// ```rust
17/// use osom_lib_strings::immutable::ImmutableStringError;
18/// use osom_lib_strings::immutable::std::StdImmutableString;
19///
20/// fn main() -> Result<(), ImmutableStringError> {
21/// // The first instance counts as both strong and weak reference.
22/// let strong1 = StdImmutableString::from_str_slice("foo")?;
23/// assert_eq!(strong1.strong_count(), 1);
24/// assert_eq!(strong1.weak_count(), 1);
25/// assert_eq!(strong1, "foo");
26///
27/// let weak = strong1.downgrade(); // Cheap operation
28/// assert_eq!(strong1.strong_count(), 1);
29/// assert_eq!(strong1.weak_count(), 2);
30/// assert_eq!(weak.strong_count(), 1);
31/// assert_eq!(weak.weak_count(), 2);
32///
33/// let strong2 = weak.upgrade().unwrap(); // Cheap operation
34/// assert_eq!(strong1.strong_count(), 2);
35/// assert_eq!(strong1.weak_count(), 2);
36/// assert_eq!(weak.strong_count(), 2);
37/// assert_eq!(weak.weak_count(), 2);
38/// assert_eq!(strong2, strong1);
39///
40/// drop(strong2);
41/// assert_eq!(strong1.strong_count(), 1);
42/// assert_eq!(strong1.weak_count(), 2);
43/// assert_eq!(weak.strong_count(), 1);
44/// assert_eq!(weak.weak_count(), 2);
45///
46/// drop(strong1);
47/// // Both counters are decreased, since there are no more strong references.
48/// assert_eq!(weak.strong_count(), 0);
49/// assert_eq!(weak.weak_count(), 1);
50///
51/// // No more strong reference, .upgrade() won't work.
52/// assert!(weak.upgrade().is_none());
53///
54/// drop(weak); // The actual deallocation happens here.
55/// Ok(())
56/// }
57/// ```
58#[reprc]
59#[repr(transparent)]
60#[derive(Debug)]
61pub struct WeakImmutableString<TAllocator: Allocator> {
62 internal: InternalString<TAllocator>,
63}
64
65impl<TAllocator: Allocator> WeakImmutableString<TAllocator> {
66 #[inline(always)]
67 pub(crate) fn from_internal(internal: InternalString<TAllocator>) -> Self {
68 Self { internal }
69 }
70
71 #[inline(always)]
72 pub fn strong_count(&self) -> u32 {
73 self.internal.strong().load(Ordering::Relaxed)
74 }
75
76 #[inline(always)]
77 pub fn weak_count(&self) -> u32 {
78 self.internal.weak().load(Ordering::Relaxed)
79 }
80
81 /// Upgrades current weak reference to the strong [`ImmutableString`].
82 ///
83 /// Returns `None` if this this cannot be done (because there were no strong
84 /// references alive). Otherwise returns `Some()` with a strong reference.
85 pub fn upgrade(&self) -> Option<ImmutableString<TAllocator>> {
86 let strong = self.internal.strong();
87 let mut current = strong.load(Ordering::Relaxed);
88 loop {
89 if current == 0 {
90 return None;
91 }
92 match strong.compare_exchange_weak(current, current + 1, Ordering::Acquire, Ordering::Relaxed) {
93 Ok(_) => return Some(ImmutableString::from_internal(self.internal.clone())),
94 Err(new) => current = new,
95 }
96 }
97 }
98
99 /// Abandons current weak reference.
100 ///
101 /// If the internal weak counter is positive it returns false.
102 ///
103 /// Otherwise it deallocates the underlying memory and returns true.
104 /// In particular only single (the last) [`WeakImmutableString`] returns true
105 /// by calling this.
106 #[inline(always)]
107 #[must_use]
108 pub fn abandon(mut self) -> bool {
109 let result = self.internal_abandon();
110 core::mem::forget(self);
111 result
112 }
113
114 fn internal_abandon(&mut self) -> bool {
115 let internal = unsafe { core::ptr::read(&raw const self.internal) };
116 let prev = internal.weak().fetch_sub(1, Ordering::Release);
117 if prev > 1 {
118 return false;
119 }
120
121 // Synchronize with all prior Release decrements before deallocating.
122 fence(Ordering::Acquire);
123 internal.deallocate();
124 true
125 }
126}
127
128impl<TAllocator: Allocator> Drop for WeakImmutableString<TAllocator> {
129 fn drop(&mut self) {
130 let _ = self.internal_abandon();
131 }
132}
133
134impl<TAllocator: Allocator> Clone for WeakImmutableString<TAllocator> {
135 fn clone(&self) -> Self {
136 let internal_clone = self.internal.clone();
137 internal_clone.weak().fetch_add(1, Ordering::Relaxed);
138 Self {
139 internal: internal_clone,
140 }
141 }
142}