osom_lib_arrays/immutable_array/
immutable_weak_array.rs

1use core::sync::atomic::Ordering;
2
3use osom_lib_alloc::Allocator;
4
5#[cfg(feature = "std_alloc")]
6use osom_lib_alloc::StdAllocator;
7
8use super::ImmutableArray;
9use super::internal_array::{HeapData, InternalArray};
10
11/// A weak reference to an [`ImmutableArray`].
12///
13/// It doesn't provide direct access to the data itself,
14/// but is useful for tracking whether the associated [`ImmutableArray`]
15/// is still alive or not. Through [`ImmutableWeakArray::upgrade`] method.
16#[repr(transparent)]
17pub struct ImmutableWeakArray<
18    T: Sized,
19    #[cfg(feature = "std_alloc")] TAllocator = StdAllocator,
20    #[cfg(not(feature = "std_alloc"))] TAllocator,
21> where
22    TAllocator: Allocator,
23{
24    internal: InternalArray<T, TAllocator>,
25}
26
27impl<T: Sized, TAllocator: Allocator> ImmutableWeakArray<T, TAllocator> {
28    /// Upgrades the weak reference to a strong [`ImmutableArray`] reference.
29    ///
30    /// Returns `None` if the array has been deallocated. Otherwise, returns a strong reference.
31    pub fn upgrade(&self) -> Option<ImmutableArray<T, TAllocator>> {
32        let mut strong_counter = self.internal.heap_data().strong_counter().load(Ordering::SeqCst);
33        if strong_counter == 0 {
34            return None;
35        }
36
37        let new_strong_reference = ImmutableArray::from(self.internal.clone());
38
39        loop {
40            let result = self.internal.heap_data().strong_counter().compare_exchange_weak(
41                strong_counter,
42                strong_counter + 1,
43                Ordering::SeqCst,
44                Ordering::SeqCst,
45            );
46            match result {
47                Ok(_) => return Some(new_strong_reference),
48                Err(_) => {
49                    strong_counter = self.internal.heap_data().strong_counter().load(Ordering::SeqCst);
50                }
51            }
52        }
53    }
54
55    /// Returns the number of strong references to the string.
56    #[must_use]
57    pub fn strong_count(&self) -> usize {
58        self.internal.heap_data().strong_counter().load(Ordering::SeqCst) as usize
59    }
60
61    /// Returns the number of weak references to the string.
62    #[must_use]
63    pub fn weak_count(&self) -> usize {
64        self.internal.heap_data().weak_counter().load(Ordering::SeqCst) as usize
65    }
66
67    /// Returns a reference to the allocator of the [`ImmutableWeakArray`].
68    #[inline(always)]
69    pub const fn allocator(&self) -> &TAllocator {
70        self.internal.allocator()
71    }
72
73    /// Releases the weak reference.
74    ///
75    /// Returns `true` if it was the last weak reference and the memory was deallocated.
76    /// Otherwise, returns `false`.
77    #[inline(always)]
78    pub fn release(mut self) -> bool {
79        let result = self.internal_release();
80        core::mem::forget(self);
81        result
82    }
83
84    pub(crate) fn internal_release(&mut self) -> bool {
85        let weak_counter = self.internal.heap_data().weak_counter().fetch_sub(1, Ordering::SeqCst);
86        if weak_counter == 1 {
87            if core::mem::needs_drop::<T>() {
88                let slice = self.internal.as_slice_mut();
89                let mut start = slice.as_mut_ptr();
90                let end = unsafe { start.add(slice.len()) };
91                while start < end {
92                    unsafe {
93                        core::ptr::drop_in_place(start);
94                        start = start.add(1);
95                    }
96                }
97            }
98
99            let internal = unsafe { core::ptr::read(&self.internal) };
100            internal.deallocate();
101            true
102        } else {
103            false
104        }
105    }
106}
107
108impl<T: Sized, TAllocator: Allocator> Clone for ImmutableWeakArray<T, TAllocator> {
109    fn clone(&self) -> Self {
110        self.internal.heap_data().weak_counter().fetch_add(1, Ordering::SeqCst);
111        Self {
112            internal: self.internal.clone(),
113        }
114    }
115}
116
117impl<T: Sized, TAllocator: Allocator> Drop for ImmutableWeakArray<T, TAllocator> {
118    fn drop(&mut self) {
119        self.internal_release();
120    }
121}
122
123impl<T: Sized, TAllocator: Allocator> From<InternalArray<T, TAllocator>> for ImmutableWeakArray<T, TAllocator> {
124    fn from(internal: InternalArray<T, TAllocator>) -> Self {
125        Self { internal }
126    }
127}
128
129impl<T: Sized, TAllocator: Allocator> core::fmt::Debug for ImmutableWeakArray<T, TAllocator> {
130    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
131        let ptr = core::ptr::from_ref::<HeapData<T>>(self.internal.heap_data());
132        f.debug_struct("ImmutableWeakArray")
133            .field("strong_count", &self.strong_count())
134            .field("weak_count", &self.weak_count())
135            .field("len", &self.internal.len())
136            .field("capacity", &self.internal.capacity())
137            .field("raw_ptr", &ptr.addr())
138            .finish()
139    }
140}