osom_lib_arrays/immutable_array/
immutable_array.rs1#![allow(clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_sign_loss)]
2
3use core::sync::atomic::Ordering;
4
5use osom_lib_alloc::Allocator;
6use osom_lib_primitives::Length;
7
8#[cfg(feature = "std_alloc")]
9use osom_lib_alloc::StdAllocator;
10
11use crate::errors::ArrayConstructionError;
12
13use super::ImmutableWeakArray;
14use super::internal_array::{HeapData, InternalArray, MAX_LENGTH};
15
16#[must_use]
23#[repr(transparent)]
24pub struct ImmutableArray<
25 T: Sized,
26 #[cfg(feature = "std_alloc")] TAllocator = StdAllocator,
27 #[cfg(not(feature = "std_alloc"))] TAllocator,
28> where
29 TAllocator: Allocator,
30{
31 internal: InternalArray<T, TAllocator>,
32}
33
34impl<T: Sized, TAllocator: Allocator> From<InternalArray<T, TAllocator>> for ImmutableArray<T, TAllocator> {
35 fn from(internal: InternalArray<T, TAllocator>) -> Self {
36 Self { internal }
37 }
38}
39
40impl<T: Sized, TAllocator: Allocator> ImmutableArray<T, TAllocator> {
41 pub const MAX_LENGTH: usize = MAX_LENGTH;
44
45 #[inline(always)]
47 #[must_use]
48 pub fn as_slice(&self) -> &[T] {
49 self.internal.as_slice()
50 }
51
52 #[inline(always)]
54 pub const fn len(&self) -> Length {
55 self.internal.len()
56 }
57
58 #[inline(always)]
60 #[must_use]
61 pub const fn is_empty(&self) -> bool {
62 self.len().value() == 0
63 }
64
65 #[inline(always)]
67 pub const fn capacity(&self) -> Length {
68 self.internal.capacity()
69 }
70
71 #[inline(always)]
73 pub const fn allocator(&self) -> &TAllocator {
74 self.internal.allocator()
75 }
76
77 #[must_use]
79 pub fn downgrade(instance: &Self) -> ImmutableWeakArray<T, TAllocator> {
80 instance
81 .internal
82 .heap_data()
83 .weak_counter()
84 .fetch_add(1, Ordering::SeqCst);
85 let internal = instance.internal.clone();
86 ImmutableWeakArray::from(internal)
87 }
88
89 pub fn release(mut instance: Self) -> Option<ImmutableWeakArray<T, TAllocator>> {
94 let result = instance.internal_release();
95 core::mem::forget(instance);
96 result
97 }
98
99 #[must_use]
101 pub fn strong_count(instance: &Self) -> usize {
102 instance.internal.heap_data().strong_counter().load(Ordering::SeqCst) as usize
103 }
104
105 #[must_use]
107 pub fn weak_count(instance: &Self) -> usize {
108 instance.internal.heap_data().weak_counter().load(Ordering::SeqCst) as usize
109 }
110
111 #[inline(always)]
116 #[must_use]
117 pub fn ref_equal(left: &Self, right: &Self) -> bool {
118 let left = core::ptr::from_ref(left.internal.heap_data());
119 let right = core::ptr::from_ref(right.internal.heap_data());
120 core::ptr::addr_eq(left, right)
121 }
122
123 pub(crate) fn internal_release(&mut self) -> Option<ImmutableWeakArray<T, TAllocator>> {
124 let strong_counter = self
125 .internal
126 .heap_data()
127 .strong_counter()
128 .fetch_sub(1, Ordering::SeqCst);
129 if strong_counter == 1 {
130 let internal = unsafe { core::ptr::read(&self.internal) };
131 Some(ImmutableWeakArray::from(internal))
132 } else {
133 None
134 }
135 }
136}
137
138impl<T: Sized, TAllocator: Allocator> ImmutableArray<T, TAllocator> {
139 pub fn from_array<const N: usize>(array: [T; N]) -> Result<Self, ArrayConstructionError> {
146 Self::from_array_with_allocator(array, TAllocator::default())
147 }
148
149 pub fn from_array_with_allocator<const N: usize>(
156 array: [T; N],
157 allocator: TAllocator,
158 ) -> Result<Self, ArrayConstructionError> {
159 let slice_len = array.len();
160 if slice_len > Self::MAX_LENGTH {
161 return Err(ArrayConstructionError::ArrayTooLong);
162 }
163
164 let slice_len = unsafe { Length::new_unchecked(slice_len as i32) };
165
166 let mut internal: InternalArray<T, TAllocator> = InternalArray::allocate(slice_len, slice_len, allocator)?;
167
168 unsafe {
169 let ptr = internal.heap_data_mut().data().as_ptr();
170 debug_assert!(ptr.is_aligned(), "Data pointer is not aligned.");
171 ptr.copy_from_nonoverlapping(array.as_ptr(), slice_len.into());
172 core::mem::forget(array);
173 }
174
175 {
176 *internal.heap_data_mut().strong_counter_mut().get_mut() = 1;
177 *internal.heap_data_mut().weak_counter_mut().get_mut() = 1;
178 }
179
180 Ok(Self { internal })
181 }
182}
183
184impl<T: Sized + Clone, TAllocator: Allocator> ImmutableArray<T, TAllocator> {
185 pub fn from_slice(slice: &[T]) -> Result<Self, ArrayConstructionError> {
192 Self::from_slice_with_allocator(slice, TAllocator::default())
193 }
194
195 pub fn from_slice_with_allocator(slice: &[T], allocator: TAllocator) -> Result<Self, ArrayConstructionError> {
202 let slice_len = slice.len();
203 if slice_len > Self::MAX_LENGTH {
204 return Err(ArrayConstructionError::ArrayTooLong);
205 }
206
207 let slice_len = unsafe { Length::new_unchecked(slice_len as i32) };
208
209 let mut internal: InternalArray<T, TAllocator> = InternalArray::allocate(slice_len, slice_len, allocator)?;
210
211 unsafe {
212 let mut ptr = internal.heap_data_mut().data().as_ptr();
213 debug_assert!(ptr.is_aligned(), "Data pointer is not aligned.");
214
215 for item in slice {
216 ptr.write(item.clone());
217 ptr = ptr.add(1);
218 }
219 }
220
221 {
222 *internal.heap_data_mut().strong_counter_mut().get_mut() = 1;
223 *internal.heap_data_mut().weak_counter_mut().get_mut() = 1;
224 }
225
226 Ok(Self { internal })
227 }
228}
229
230impl<T: Sized, TAllocator: Allocator> Drop for ImmutableArray<T, TAllocator> {
231 fn drop(&mut self) {
232 self.internal_release();
233 }
234}
235
236impl<T: Sized, TAllocator: Allocator> Clone for ImmutableArray<T, TAllocator> {
237 fn clone(&self) -> Self {
238 self.internal
239 .heap_data()
240 .strong_counter()
241 .fetch_add(1, Ordering::SeqCst);
242 Self {
243 internal: self.internal.clone(),
244 }
245 }
246}
247
248impl<T: Sized + PartialEq, TAllocator1: Allocator, TAllocator2: Allocator> PartialEq<ImmutableArray<T, TAllocator1>>
249 for ImmutableArray<T, TAllocator2>
250{
251 fn eq(&self, other: &ImmutableArray<T, TAllocator1>) -> bool {
252 self.as_slice() == other.as_slice()
253 }
254}
255
256impl<T: Sized + Eq, TAllocator: Allocator> Eq for ImmutableArray<T, TAllocator> {}
257
258impl<T: Sized + core::hash::Hash, TAllocator: Allocator> core::hash::Hash for ImmutableArray<T, TAllocator> {
259 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
260 self.as_slice().hash(state);
261 }
262}
263
264impl<T: Sized, TAllocator: Allocator> core::fmt::Debug for ImmutableArray<T, TAllocator> {
265 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
266 let ptr = core::ptr::from_ref::<HeapData<T>>(self.internal.heap_data());
267 f.debug_struct("ImmutableArray")
268 .field("strong_count", &Self::strong_count(self))
269 .field("weak_count", &Self::weak_count(self))
270 .field("len", &self.len())
271 .field("capacity", &self.internal.capacity())
272 .field("raw_ptr", &ptr.addr())
273 .finish()
274 }
275}
276
277impl<T: Sized, TAllocator: Allocator> core::ops::Deref for ImmutableArray<T, TAllocator> {
278 type Target = [T];
279
280 fn deref(&self) -> &Self::Target {
281 self.as_slice()
282 }
283}
284
285impl<T: Sized, TAllocator: Allocator> AsRef<[T]> for ImmutableArray<T, TAllocator> {
286 fn as_ref(&self) -> &[T] {
287 self.as_slice()
288 }
289}