osom_tools_runtime/arrays/immutable_array/
immutable_weak_array.rs1use core::sync::atomic::Ordering;
2
3use crate::allocator::Allocator;
4
5#[cfg(feature = "std_alloc")]
6use crate::allocator::StdAllocator;
7
8use super::ImmutableArray;
9use super::internal_array::{HeapData, InternalArray};
10
11#[repr(transparent)]
17pub struct ImmutableWeakArray<
18 T: Sized,
19 #[cfg(feature = "std_alloc")] TAllocator = StdAllocator,
20 #[cfg(not(feature = "std_alloc"))] TAllocator,
21> where
22 TAllocator: Allocator,
23{
24 internal: InternalArray<T, TAllocator>,
25}
26
27impl<T: Sized, TAllocator: Allocator> ImmutableWeakArray<T, TAllocator> {
28 pub fn upgrade(&self) -> Option<ImmutableArray<T, TAllocator>> {
32 let mut strong_counter = self.internal.heap_data().strong_counter().load(Ordering::SeqCst);
33 if strong_counter == 0 {
34 return None;
35 }
36
37 let new_strong_reference = ImmutableArray::from(self.internal.clone());
38
39 loop {
40 let result = self.internal.heap_data().strong_counter().compare_exchange_weak(
41 strong_counter,
42 strong_counter + 1,
43 Ordering::SeqCst,
44 Ordering::SeqCst,
45 );
46 match result {
47 Ok(_) => return Some(new_strong_reference),
48 Err(_) => {
49 strong_counter = self.internal.heap_data().strong_counter().load(Ordering::SeqCst);
50 }
51 }
52 }
53 }
54
55 #[must_use]
57 pub fn strong_count(&self) -> usize {
58 self.internal.heap_data().strong_counter().load(Ordering::SeqCst) as usize
59 }
60
61 #[must_use]
63 pub fn weak_count(&self) -> usize {
64 self.internal.heap_data().weak_counter().load(Ordering::SeqCst) as usize
65 }
66
67 #[inline(always)]
72 pub fn release(mut self) -> bool {
73 let result = self.internal_release();
74 core::mem::forget(self);
75 result
76 }
77
78 pub(crate) fn internal_release(&mut self) -> bool {
79 let weak_counter = self.internal.heap_data().weak_counter().fetch_sub(1, Ordering::SeqCst);
80 if weak_counter == 1 {
81 if core::mem::needs_drop::<T>() {
82 let slice = self.internal.as_slice();
83 let mut start = slice.as_ptr();
84 let end = unsafe { start.add(slice.len()) };
85 while start < end {
86 unsafe {
87 core::mem::drop(start.read());
88 start = start.add(1);
89 }
90 }
91 }
92
93 let internal = unsafe { core::ptr::read(&self.internal) };
94 internal.deallocate();
95 true
96 } else {
97 false
98 }
99 }
100}
101
102impl<T: Sized, TAllocator: Allocator> Clone for ImmutableWeakArray<T, TAllocator> {
103 fn clone(&self) -> Self {
104 self.internal.heap_data().weak_counter().fetch_add(1, Ordering::SeqCst);
105 Self {
106 internal: self.internal.clone(),
107 }
108 }
109}
110
111impl<T: Sized, TAllocator: Allocator> Drop for ImmutableWeakArray<T, TAllocator> {
112 fn drop(&mut self) {
113 self.internal_release();
114 }
115}
116
117impl<T: Sized, TAllocator: Allocator> From<InternalArray<T, TAllocator>> for ImmutableWeakArray<T, TAllocator> {
118 fn from(internal: InternalArray<T, TAllocator>) -> Self {
119 Self { internal }
120 }
121}
122
123impl<T: Sized, TAllocator: Allocator> core::fmt::Debug for ImmutableWeakArray<T, TAllocator> {
124 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
125 let ptr = core::ptr::from_ref::<HeapData<T>>(self.internal.heap_data());
126 f.debug_struct("ImmutableWeakArray")
127 .field("strong_count", &self.strong_count())
128 .field("weak_count", &self.weak_count())
129 .field("len", &self.internal.len())
130 .field("capacity", &self.internal.capacity())
131 .field("raw_ptr", &ptr.addr())
132 .finish()
133 }
134}