osom_tools_runtime/arrays/immutable_array/
immutable_array.rs

1#![allow(clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_sign_loss)]
2
3use core::sync::atomic::Ordering;
4
5use crate::allocator::{AllocationError, Allocator};
6
7use crate::Length;
8
9#[cfg(feature = "std_alloc")]
10use crate::allocator::StdAllocator;
11
12use super::ImmutableWeakArray;
13use super::internal_array::{HeapData, InternalArray, MAX_LENGTH};
14
15/// Represents an error that occurs when constructing new [`ImmutableArray`].
16#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
17#[must_use]
18#[repr(u8)]
19pub enum ImmutableArrayConstructionError {
20    /// The allocator failed to allocate memory.
21    AllocationError,
22
23    /// The passed array is too long, it exceeds [`MAX_LENGTH`][`ImmutableArray::MAX_LENGTH`].
24    ArrayTooLong,
25}
26
27impl From<AllocationError> for ImmutableArrayConstructionError {
28    fn from(_: AllocationError) -> Self {
29        ImmutableArrayConstructionError::AllocationError
30    }
31}
32
33/// A smart pointer to an immutable array. It tracks both strong and
34/// weak references to the array, and is thread safe.
35/// Therefore cloning of this struct is very cheap.
36///
37/// It is also immutable, and no changes to the array are
38/// allowed, except for internal mutability of course.
39#[must_use]
40#[repr(transparent)]
41pub struct ImmutableArray<
42    T: Sized,
43    #[cfg(feature = "std_alloc")] TAllocator = StdAllocator,
44    #[cfg(not(feature = "std_alloc"))] TAllocator,
45> where
46    TAllocator: Allocator,
47{
48    internal: InternalArray<T, TAllocator>,
49}
50
51impl<T: Sized, TAllocator: Allocator> From<InternalArray<T, TAllocator>> for ImmutableArray<T, TAllocator> {
52    fn from(internal: InternalArray<T, TAllocator>) -> Self {
53        Self { internal }
54    }
55}
56
57impl<T: Sized, TAllocator: Allocator> ImmutableArray<T, TAllocator> {
58    /// The maximum length of an array that can be constructed.
59    /// It is guaranteed that [`MAX_LENGTH`][`Self::MAX_LENGTH`] is less than [`i32::MAX`].
60    pub const MAX_LENGTH: usize = MAX_LENGTH;
61
62    /// Converts the [`ImmutableArray`] into a slice.
63    #[inline(always)]
64    #[must_use]
65    pub fn as_slice(&self) -> &[T] {
66        self.internal.as_slice()
67    }
68
69    /// Returns the length of the [`ImmutableArray`].
70    #[inline(always)]
71    pub const fn len(&self) -> Length {
72        self.internal.len()
73    }
74
75    /// Returns the capacity of the [`ImmutableArray`].
76    #[inline(always)]
77    pub const fn capacity(&self) -> Length {
78        self.internal.capacity()
79    }
80
81    /// Creates a new weak reference out of the [`ImmutableArray`].
82    #[must_use]
83    pub fn downgrade(instance: &Self) -> ImmutableWeakArray<T, TAllocator> {
84        instance
85            .internal
86            .heap_data()
87            .weak_counter()
88            .fetch_add(1, Ordering::SeqCst);
89        let internal = instance.internal.clone();
90        ImmutableWeakArray::from(internal)
91    }
92
93    /// Releases the strong reference.
94    ///
95    /// Returns a weak reference if this was the last strong reference.
96    /// Otherwise, returns `None`.
97    pub fn release(mut instance: Self) -> Option<ImmutableWeakArray<T, TAllocator>> {
98        let result = instance.internal_release();
99        core::mem::forget(instance);
100        result
101    }
102
103    /// Returns the number of strong references to the string.
104    #[must_use]
105    pub fn strong_count(instance: &Self) -> usize {
106        instance.internal.heap_data().strong_counter().load(Ordering::SeqCst) as usize
107    }
108
109    /// Returns the number of weak references to the string.
110    #[must_use]
111    pub fn weak_count(instance: &Self) -> usize {
112        instance.internal.heap_data().weak_counter().load(Ordering::SeqCst) as usize
113    }
114
115    /// Returns `true` if the two [`ImmutableArray`] instances refer to the same memory.
116    /// Otherwise, returns `false`. This is different from `==` comparison, which
117    /// checks whether the content of two strings is the same, ragardless of whether
118    /// they point to the same memory or not.
119    #[inline(always)]
120    #[must_use]
121    pub fn ref_equal(left: &Self, right: &Self) -> bool {
122        let left = core::ptr::from_ref(left.internal.heap_data());
123        let right = core::ptr::from_ref(right.internal.heap_data());
124        core::ptr::addr_eq(left, right)
125    }
126
127    pub(crate) fn internal_release(&mut self) -> Option<ImmutableWeakArray<T, TAllocator>> {
128        let strong_counter = self
129            .internal
130            .heap_data()
131            .strong_counter()
132            .fetch_sub(1, Ordering::SeqCst);
133        if strong_counter == 1 {
134            let internal = unsafe { core::ptr::read(&self.internal) };
135            Some(ImmutableWeakArray::from(internal))
136        } else {
137            None
138        }
139    }
140}
141
142impl<T: Sized, TAllocator: Allocator> ImmutableArray<T, TAllocator> {
143    /// Constructs a new [`ImmutableArray`] from a slice with default allocator.
144    /// It copies the slice into the new [`ImmutableArray`].
145    ///
146    /// # Errors
147    ///
148    /// For details see [`ImmutableArrayConstructionError`].
149    pub fn from_array<const N: usize>(array: [T; N]) -> Result<Self, ImmutableArrayConstructionError> {
150        Self::from_array_with_allocator(array, TAllocator::default())
151    }
152
153    /// Constructs a new [`ImmutableArray`] from a slice and an allocator.
154    /// It copies the slice into the new [`ImmutableArray`].
155    ///
156    /// # Errors
157    ///
158    /// For details see [`ImmutableArrayConstructionError`].
159    pub fn from_array_with_allocator<const N: usize>(
160        array: [T; N],
161        allocator: TAllocator,
162    ) -> Result<Self, ImmutableArrayConstructionError> {
163        let slice_len = array.len();
164        if slice_len > Self::MAX_LENGTH {
165            return Err(ImmutableArrayConstructionError::ArrayTooLong);
166        }
167
168        let slice_len = unsafe { Length::new_unchecked(slice_len as i32) };
169
170        let mut internal: InternalArray<T, TAllocator> = InternalArray::allocate(slice_len, slice_len, allocator)?;
171
172        unsafe {
173            let ptr = internal.heap_data_mut().data().as_ptr();
174            debug_assert!(ptr.is_aligned(), "Data pointer is not aligned.");
175            ptr.copy_from_nonoverlapping(array.as_ptr(), slice_len.into());
176            core::mem::forget(array);
177        }
178
179        {
180            *internal.heap_data_mut().strong_counter_mut().get_mut() = 1;
181            *internal.heap_data_mut().weak_counter_mut().get_mut() = 1;
182        }
183
184        Ok(Self { internal })
185    }
186}
187
188impl<T: Sized + Clone, TAllocator: Allocator> ImmutableArray<T, TAllocator> {
189    /// Constructs a new [`ImmutableArray`] from a slice with default allocator.
190    /// It clones the slice into the new [`ImmutableArray`].
191    ///
192    /// # Errors
193    ///
194    /// For details see [`ImmutableArrayConstructionError`].
195    pub fn from_slice(slice: &[T]) -> Result<Self, ImmutableArrayConstructionError> {
196        Self::from_slice_with_allocator(slice, TAllocator::default())
197    }
198
199    /// Constructs a new [`ImmutableArray`] from a slice and an allocator.
200    /// It clones the slice into the new [`ImmutableArray`].
201    ///
202    /// # Errors
203    ///
204    /// For details see [`ImmutableArrayConstructionError`].
205    pub fn from_slice_with_allocator(
206        slice: &[T],
207        allocator: TAllocator,
208    ) -> Result<Self, ImmutableArrayConstructionError> {
209        let slice_len = slice.len();
210        if slice_len > Self::MAX_LENGTH {
211            return Err(ImmutableArrayConstructionError::ArrayTooLong);
212        }
213
214        let slice_len = unsafe { Length::new_unchecked(slice_len as i32) };
215
216        let mut internal: InternalArray<T, TAllocator> = InternalArray::allocate(slice_len, slice_len, allocator)?;
217
218        unsafe {
219            let mut ptr = internal.heap_data_mut().data().as_ptr();
220            debug_assert!(ptr.is_aligned(), "Data pointer is not aligned.");
221
222            for item in slice {
223                ptr.write(item.clone());
224                ptr = ptr.add(1);
225            }
226        }
227
228        {
229            *internal.heap_data_mut().strong_counter_mut().get_mut() = 1;
230            *internal.heap_data_mut().weak_counter_mut().get_mut() = 1;
231        }
232
233        Ok(Self { internal })
234    }
235}
236
237impl<T: Sized, TAllocator: Allocator> Drop for ImmutableArray<T, TAllocator> {
238    fn drop(&mut self) {
239        self.internal_release();
240    }
241}
242
243impl<T: Sized, TAllocator: Allocator> Clone for ImmutableArray<T, TAllocator> {
244    fn clone(&self) -> Self {
245        self.internal
246            .heap_data()
247            .strong_counter()
248            .fetch_add(1, Ordering::SeqCst);
249        Self {
250            internal: self.internal.clone(),
251        }
252    }
253}
254
255impl<T: Sized + PartialEq, TAllocator1: Allocator, TAllocator2: Allocator> PartialEq<ImmutableArray<T, TAllocator1>>
256    for ImmutableArray<T, TAllocator2>
257{
258    fn eq(&self, other: &ImmutableArray<T, TAllocator1>) -> bool {
259        self.as_slice() == other.as_slice()
260    }
261}
262
263impl<T: Sized + Eq, TAllocator: Allocator> Eq for ImmutableArray<T, TAllocator> {}
264
265impl<T: Sized + core::hash::Hash, TAllocator: Allocator> core::hash::Hash for ImmutableArray<T, TAllocator> {
266    fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
267        self.as_slice().hash(state);
268    }
269}
270
271impl<T: Sized, TAllocator: Allocator> core::fmt::Debug for ImmutableArray<T, TAllocator> {
272    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
273        let ptr = core::ptr::from_ref::<HeapData<T>>(self.internal.heap_data());
274        f.debug_struct("ImmutableArray")
275            .field("strong_count", &Self::strong_count(self))
276            .field("weak_count", &Self::weak_count(self))
277            .field("len", &self.len())
278            .field("capacity", &self.internal.capacity())
279            .field("raw_ptr", &ptr.addr())
280            .finish()
281    }
282}
283
284impl<T: Sized, TAllocator: Allocator> core::ops::Deref for ImmutableArray<T, TAllocator> {
285    type Target = [T];
286
287    fn deref(&self) -> &Self::Target {
288        self.as_slice()
289    }
290}
291
292impl<T: Sized, TAllocator: Allocator> AsRef<[T]> for ImmutableArray<T, TAllocator> {
293    fn as_ref(&self) -> &[T] {
294        self.as_slice()
295    }
296}