flex_array/flex_array/
array.rs

1use core::alloc::Layout;
2use core::marker::PhantomData;
3use core::mem::forget;
4use core::ops::Index;
5use core::ops::IndexMut;
6use core::ptr;
7use core::ptr::NonNull;
8use core::slice;
9
10use super::inner::Inner;
11use crate::alloc::AltAllocator;
12#[cfg(feature = "std_alloc")]
13use crate::alloc::Global;
14use crate::types::ErrorReason;
15use crate::types::FlexArrErr;
16use crate::types::FlexArrResult;
17use crate::types::LengthType;
18
19macro_rules! define_array_struct {
20    ($($global:ty)?) => {
21        /// `FlexArr` is a dynamic array that addresses some of the limitations of Rust’s standard `Vec`.
22        ///
23        /// `FlexArr` uses fallible allocations, meaning that instead of panicking on allocation failure,
24        /// it returns an error. This allow one to handle the error in a more graceful or robust manner.
25        /// `Vec` does have some fallible allocation methods, but most are currently unstable.
26        ///
27        /// In addition, one can customize the type used for the length, capacity, and indexing operations.
28        /// For example on a 64-bit system, the standard `Vec` typically uses 24 bytes. `FlexArr` specifying
29        /// a smaller type than `usize` as a generic (e.g. `u32`) with `FlexArr` can reduce this overhead to
30        /// just 16 bytes.
31        ///
32        /// Lastly, the allocator API is not stable yet, so this crate provides and alternate trait `AltAllocator`
33        /// that works like `Allocator` the trait can be used with `FlexArr` to specify the allocator to use.
34        #[derive(Debug)]
35        pub struct FlexArr<T, A: AltAllocator $(= $global)?, L: LengthType = u32>
36        where
37            usize: TryFrom<L>,
38        {
39            inner: Inner<A, L>,
40            _ph:   PhantomData<T>,
41        }
42    };
43}
44
45#[cfg(feature = "std_alloc")]
46define_array_struct!(Global);
47
48#[cfg(not(feature = "std_alloc"))]
49define_array_struct!();
50
51// Creation and Reservation methods.
52impl<T, A: AltAllocator, L: LengthType> FlexArr<T, A, L>
53where
54    usize: TryFrom<L>,
55{
56    /// Constructs a new, empty `FlexArr` using the given allocator.
57    ///
58    /// This function initializes the array without performing any memory allocation. The resulting
59    /// `FlexArr` is empty, and memory will only be allocated when elements are added.
60    pub const fn new_in(alloc: A) -> Self {
61        return Self {
62            inner: Inner::new_in::<T>(alloc),
63            _ph:   PhantomData,
64        };
65    }
66
67    /// Creates a new `FlexArr` with the specified capacity using the provided allocator.
68    ///
69    /// This function attempts to allocate enough memory for the desired capacity during initialization.
70    /// If the allocation fails, a `FlexArrErr` is returned.
71    pub fn with_capacity_in(alloc: A, capacity: L) -> FlexArrResult<Self> {
72        let mut inner = Inner::new_in::<T>(alloc);
73        inner.expand_capacity_to(capacity, Self::LAYOUT)?;
74        return Ok(Self {
75            inner: inner,
76            _ph:   PhantomData,
77        });
78    }
79
80    /// Ensures that `FlexArr` has enough capacity to store at least `additional` more elements.
81    /// It may reserve more than `additional` elements. You can use this if you anticipate
82    /// how many elements need to be inserted to avoid frequent reallocations.
83    ///
84    /// If the capacity is already sufficient, this method does nothing.
85    ///
86    /// # Errors
87    ///
88    /// Returns a `FlexArrErr` if memory reallocation fails or if there is an error converting
89    /// the required capacity.
90    #[inline]
91    pub fn reserve(&mut self, additional: L) -> FlexArrResult<()> {
92        let needed = self.capacity_needed(additional)?;
93        let cap = self.capacity();
94        if cap >= needed {
95            return Ok(());
96        }
97
98        return self.inner.expand_capacity_at_least(needed, Self::LAYOUT);
99    }
100
101    /// Ensures that `FlexArr` can store at least `additional` more elements,
102    /// with the capacity specified as a `usize`.
103    ///
104    /// This method works the same as `self.reserve()`, but it accepts a `usize`
105    /// for convenience. It attempts to convert the value to the `LengthType`
106    /// and reserves the necessary capacity.
107    ///
108    /// # Errors
109    ///
110    /// Returns a `FlexArrErr` on type conversion, overflow or if memory reallocation fails.
111    #[inline]
112    pub fn reserve_usize(&mut self, additional: usize) -> FlexArrResult<()> {
113        let Ok(add) = L::try_from(additional) else {
114            return Err(FlexArrErr::new(ErrorReason::CapacityOverflow));
115        };
116        return self.reserve(add);
117    }
118
119    /// Ensures that `FlexArr` has exactly enough capacity for `additional` more elements.
120    ///
121    /// While the allocator may allocate slightly more memory than requested, this method
122    /// aims to match the exact required capacity. Use this when you know the exact number
123    /// of elements to be inserted to minimize wasted memory.
124    ///
125    /// If the current capacity is already sufficient, this method does nothing.
126    ///
127    /// # Errors
128    ///
129    /// Returns a `FlexArrErr` if memory reallocation fails or if there is an error converting
130    /// the required capacity.
131    pub fn reserve_exact(&mut self, additional: L) -> FlexArrResult<()> {
132        let needed = self.capacity_needed(additional)?;
133        let cap = self.capacity();
134        if cap >= needed {
135            return Ok(());
136        }
137
138        return self.inner.expand_capacity_to(needed, Self::LAYOUT);
139    }
140
141    /// Clears all elements from the `FlexArr`, dropping each element without releasing allocated memory.
142    ///
143    /// This operation resets the array’s length to zero while preserving its capacity.
144    pub fn clear(&mut self) {
145        unsafe { ptr::drop_in_place(self.as_mut_slice()) };
146        self.inner.length = L::ZERO_VALUE;
147    }
148
149    /// Reduces the length of the `FlexArr` to the specified value, dropping all elements beyond that point.
150    ///
151    /// If the provided `length` is greater than or equal to the current length, the method does nothing.
152    pub fn truncate(&mut self, length: L) {
153        let len = self.len();
154        if length >= len {
155            return;
156        }
157        let left_over = (len - length).as_usize();
158        let usz = length.as_usize();
159
160        let loc = unsafe { self.as_mut_ptr().add(usz) };
161        let slc = unsafe { slice::from_raw_parts_mut(loc, left_over) };
162        unsafe { ptr::drop_in_place(slc) };
163
164        self.inner.length = length;
165    }
166
167    /// Returns a reference to the current allocator.
168    #[inline]
169    pub const fn allocator(array: &Self) -> &A {
170        return Inner::allocator(&array.inner);
171    }
172}
173
174// Methods for working with individual items.
175impl<T, A: AltAllocator, L: LengthType> FlexArr<T, A, L>
176where
177    usize: TryFrom<L>,
178{
179    /// Removes and returns the last element from the `FlexArr`.
180    ///
181    /// If the array is empty, this method returns `None`.
182    pub fn pop(&mut self) -> Option<T> {
183        let len = self.inner.length;
184        if len <= L::ZERO_VALUE {
185            return None;
186        }
187        let ret = unsafe { ptr::read(self.as_ptr().add(len.as_usize() - 1)) };
188        self.inner.length = len - L::ONE_VALUE;
189        return Some(ret);
190    }
191
192    /// Appends an element to the end of the `FlexArr`.
193    ///
194    /// If there isn’t enough capacity, this method attempts to expand the underlying storage.
195    /// Should the allocation fail, a `FlexArrErr` is returned.
196    ///
197    /// # Errors
198    ///
199    /// Returns a `FlexArrErr` if memory expansion fails or if there is a conversion error when
200    /// determining the new index.
201    pub fn push(&mut self, item: T) -> FlexArrResult<()> {
202        let needed = self.capacity_needed(L::ONE_VALUE)?;
203
204        if needed > self.capacity() {
205            self.inner.expand_capacity_at_least(needed, Self::LAYOUT)?;
206        }
207
208        let old_len = self.inner.length;
209        // This should always be fine to use `as` since the capacity
210        // should be larger than length. So there is no need to use
211        // try_from() like I was. Since the capacity would have had
212        // to been converted to usize to even allocate the memory.
213        //
214        // In the event the type is a ZST and the length type can
215        // be larger than usize this is also fine, since ANYTHING
216        // added to the dangling pointer for a ZST is going to be
217        // the same Dangling pointer.
218        let usz_len = old_len.as_usize();
219
220        let loc = unsafe { self.as_mut_ptr().add(usz_len) };
221        unsafe { ptr::write(loc, item) };
222
223        // This will always be less or equal to needed so
224        // plain addition is fine.
225        self.inner.length = old_len + L::ONE_VALUE;
226
227        return Ok(());
228    }
229
230    /// Removes and returns the element at the specified `index` from the `FlexArr`.
231    ///
232    /// If the `index` is out of bounds, this method returns `None`.
233    ///
234    /// Note that this operation shifts all elements after `index` one position to the left,
235    /// resulting in **O(n)** time complexity.
236    ///
237    /// # Returns
238    ///
239    /// - `Some(T)` if the element at `index` was successfully removed.
240    /// - `None` if `index` is out of bounds.
241    pub fn remove(&mut self, index: L) -> Option<T> {
242        let len = self.len();
243        if index >= len {
244            return None;
245        }
246
247        let usz_len = len.as_usize();
248        let usz_ind = index.as_usize();
249        let items = usz_len - usz_ind - 1;
250
251        let loc = unsafe { self.as_mut_ptr().add(usz_ind) };
252        let src = unsafe { loc.add(1) } as *const T;
253        let item = unsafe { ptr::read(loc) };
254
255        unsafe { ptr::copy(src, loc, items) };
256
257        self.inner.length = self.len() - L::ONE_VALUE;
258        return Some(item);
259    }
260
261    /// Removes an element from the `FlexArr` by swapping it with the last element, then popping it off.
262    ///
263    /// Unlike `Vec::swap_remove()`, this method returns `None` if `index` is out of bounds instead of panicking.
264    /// This operation does not preserve the order of elements but runs in **O(1)** time.
265    ///
266    /// # Returns
267    ///
268    /// - `Some(T)` if the element at `index` was successfully removed.
269    /// - `None` if `index` is out of bounds.
270    pub fn swap_remove(&mut self, index: L) -> Option<T> {
271        if index >= self.len() {
272            return None;
273        }
274
275        // if the check above succeeded then there is always at least one element.
276        let ptr = self.as_mut_ptr();
277        let loc = unsafe { ptr.add(index.as_usize()) };
278        let end = unsafe { ptr.add(self.len().as_usize() - 1) } as *const T;
279        let item = unsafe { ptr::read(loc) };
280        unsafe { ptr::copy(end, loc, 1) };
281
282        self.inner.length = self.len() - L::ONE_VALUE;
283        return Some(item);
284    }
285
286    /// Returns a reference to the element at the specified `index`,
287    /// or `None` if the index is out of bounds.
288    ///
289    /// Note that this method only supports single-element access, not
290    /// ranges. Extending to range-based access would require a custom
291    /// trait since Rust's `SliceIndex` trait is sealed
292    pub fn get(&self, index: L) -> Option<&T> {
293        let len = self.len();
294        if index >= len {
295            return None;
296        }
297        return Some(unsafe { self.get_unchecked(index) });
298    }
299
300    /// Returns a reference to the element at the specified `index`
301    /// without performing any bounds checking.
302    ///
303    /// This method behaves like `get()`, but skips the bounds check.
304    /// It is marked as `unsafe` because providing an out-of-bounds
305    /// index will result in undefined behavior.
306    ///
307    /// # Safety
308    ///
309    /// The caller must ensure that `index` is within bounds.
310    #[inline]
311    pub unsafe fn get_unchecked(&self, index: L) -> &T {
312        let usz_ind = index.as_usize();
313        let loc = unsafe { self.as_ptr().add(usz_ind) };
314        let refr = unsafe { &*loc };
315        return refr;
316    }
317
318    /// Returns a reference to the element at the specified `index`,
319    /// or `None` if the index is out of bounds.
320    ///
321    /// Note that this method only supports single-element access, not
322    /// ranges. Extending to range-based access would require a custom
323    /// trait since Rust's `SliceIndex` trait is sealed
324    pub fn get_mut(&mut self, index: L) -> Option<&mut T> {
325        let len = self.len();
326        if index >= len {
327            return None;
328        }
329        return Some(unsafe { self.get_mut_unchecked(index) });
330    }
331
332    /// Returns a mutable reference to the element at the specified `index`
333    /// without performing any bounds checking.
334    ///
335    /// This method behaves like `get()`, but skips the bounds check.
336    /// It is marked as `unsafe` because providing an out-of-bounds
337    /// index will result in undefined behavior.
338    ///
339    /// # Safety
340    ///
341    /// The caller must ensure that `index` is within bounds.
342    #[inline]
343    pub unsafe fn get_mut_unchecked(&mut self, index: L) -> &mut T {
344        let usz_ind = index.as_usize();
345        let loc = unsafe { self.as_mut_ptr().add(usz_ind) };
346        let refr = unsafe { &mut *loc };
347        return refr;
348    }
349
350    /// Inserts an element at the specified `index`. If the index is out of bounds, an error
351    /// is returned.
352    ///
353    /// If there isn’t enough capacity, this method attempts to expand the underlying storage.
354    /// Should the allocation fail, a `FlexArrErr` is returned.
355    ///
356    /// # Errors
357    ///
358    /// Returns a `FlexArrErr` if memory expansion fails or if there is a conversion error when
359    /// determining the new index.
360    ///
361    /// Additionally, can return `FlexArrErr` with a reason of `IndexOutOfBounds` if the index is out of bounds.
362    pub fn insert(&mut self, index: L, item: T) -> FlexArrResult<()> {
363        let len = self.inner.length.as_usize();
364        let Ok(index) = usize::try_from(index) else {
365            return Err(FlexArrErr::new(ErrorReason::UsizeOverflow));
366        };
367
368        if index > len {
369            return Err(FlexArrErr::new(ErrorReason::IndexOutOfBounds));
370        }
371
372        let needed = self.capacity_needed(L::ONE_VALUE)?;
373        if needed > self.capacity() {
374            self.inner.expand_capacity_at_least(needed, Self::LAYOUT)?;
375        }
376
377        // Shift all the elements over one to insert the item.
378        let pos = unsafe { self.as_mut_ptr().add(index) };
379        if index < len {
380            unsafe { ptr::copy(pos, pos.add(1), len - index) };
381        }
382        unsafe { ptr::write(pos, item) };
383
384        self.inner.length = self.inner.length + L::ONE_VALUE;
385        return Ok(());
386    }
387}
388
389// Methods for working with or getting slices.
390impl<T, A: AltAllocator, L: LengthType> FlexArr<T, A, L>
391where
392    usize: TryFrom<L>,
393{
394    /// Appends a slice of `T` elements to the end of the `FlexArr`.
395    ///
396    /// This method is available for types that implement `Copy`. It reserves any necessary
397    /// additional capacity and then copies the elements from the provided slice into the array.
398    ///
399    /*/// If the type `T` does not implement `Copy`, consider using `extend_from_slice_clone`.*/
400    ///
401    /// # Errors
402    ///
403    /// Returns a `FlexArrErr` if memory expansion fails or if there is an error converting
404    /// the capacity or length.
405    pub fn extend_from_slice(&mut self, slice: &[T]) -> FlexArrResult<()>
406    where
407        T: Copy,
408    {
409        let slc_len = slice.len();
410        self.reserve_usize(slc_len)?;
411
412        let usz_len = self.inner.length.as_usize();
413        let ptr = unsafe { self.as_mut_ptr().add(usz_len) };
414        unsafe { ptr::copy_nonoverlapping(slice.as_ptr(), ptr, slc_len) };
415
416        self.inner.length = L::usize_as_self(slc_len + usz_len);
417        return Ok(());
418    }
419    /*
420        Comment this out for now since while a type that implements Clone may
421        not always allocate memory, if it does there is no way to get the
422        status of the allocation failure. Perhaps a different trait that users
423        can implement.
424
425        pub fn extend_from_slice_clone(&mut self, slice: &[T]) -> FlexArrResult<()>
426        where
427            T: Clone,
428        {
429            let slc_len = slice.len();
430            self.expand_by_slice_len(slc_len)?;
431
432            let usz_len = self.inner.length.as_usize();
433            let mut arr_ptr = unsafe { self.as_mut_ptr().add(usz_len) };
434            let mut slc_ptr = slice.as_ptr();
435            let slc_end = unsafe { slice.as_ptr().add(slc_len) };
436
437            while slc_ptr < slc_end {
438                // Hmm if clone allocates memory it may panic...
439                let cloned = unsafe { (*slc_ptr).clone() };
440                unsafe { ptr::write(arr_ptr, cloned) };
441                arr_ptr = unsafe { arr_ptr.add(1) };
442                slc_ptr = unsafe { slc_ptr.add(1) };
443            }
444
445            return Ok(());
446        }
447    */
448
449    /// Returns a reference to the underlying storage as a slice.
450    /// Unfortunately, since a `slice` is a built in type, the indexing operations
451    /// on it will be a `usize`.
452    #[inline]
453    pub fn as_slice(&self) -> &[T] {
454        unsafe { slice::from_raw_parts(self.as_ptr(), self.inner.length.as_usize()) }
455    }
456
457    /// Returns a mutable reference to the underlying storage as a slice.
458    /// Unfortunately, since a `slice` is a built in type, the indexing operations
459    /// on it will be a `usize`.
460    #[inline]
461    pub fn as_mut_slice(&mut self) -> &mut [T] {
462        unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.inner.length.as_usize()) }
463    }
464}
465
466// Pretty much attribute methods and constants.
467impl<T, A: AltAllocator, L: LengthType> FlexArr<T, A, L>
468where
469    usize: TryFrom<L>,
470{
471    const LAYOUT: Layout = Layout::new::<T>();
472    const SIZE: usize = size_of::<T>();
473
474    /// Determines if the `FlexArr` is empty.
475    #[inline]
476    pub fn is_empty(&self) -> bool {
477        return self.len() == L::ZERO_VALUE;
478    }
479
480    /// Returns the number of elements in the `FlexArr`.
481    #[inline]
482    pub const fn len(&self) -> L {
483        return self.inner.length;
484    }
485
486    /// Returns the number of elements `FlexArr` can store without needing to reallocate.
487    ///
488    /// For zero sized types, this function will return the maximum value for the `LengthType`.
489    pub const fn capacity(&self) -> L {
490        return self.inner.capacity(Self::SIZE);
491    }
492
493    /// Returns a raw pointer to the underlying storage. If the type is zero sized
494    /// the pointer value will be a dangling pointer. Like one would get with
495    /// `NonNull::dangling()` ect...
496    ///
497    /// # Safety
498    /// The caller should ensure the underlying storage outlives this pointer.
499    /// Adding/removing items to the `FlexArr` can cause the pointer to become invalid.
500    #[inline]
501    pub const fn as_ptr(&self) -> *const T {
502        return self.inner.get_ptr();
503    }
504
505    /// Returns a raw mutable pointer to the underlying storage. If the type is zero sized
506    /// the pointer value will be a dangling pointer. Like one would get with
507    /// `NonNull::dangling()` ect...
508    ///
509    /// # Safety
510    /// The caller should ensure the underlying storage outlives this pointer.
511    /// Adding/removing items to the `FlexArr` can cause the pointer to become invalid.
512    #[inline]
513    pub const fn as_mut_ptr(&mut self) -> *mut T {
514        return self.inner.get_mut_ptr();
515    }
516
517    /// Returns a `NonNull` pointer to the underlying storage. If the type is zero sized
518    /// the pointer value will be a dangling pointer. Like one would get with
519    /// `NonNull::dangling()` ect...
520    ///
521    /// # Safety
522    /// The caller should ensure the underlying storage outlives this pointer.
523    /// Adding/removing items to the `FlexArr` can cause the pointer to become invalid.
524    #[inline]
525    pub const fn as_non_null(&mut self) -> NonNull<T> {
526        return self.inner.get_non_null();
527    }
528
529    /// Consumes the `FlexArr` and returns a `NonNull` pointer to the underlying memory.
530    ///
531    /// Unlike `into_parts()`, this method only returns the pointer; it does not return
532    /// the length, capacity, or allocator. This is mainly useful if you are already tracking
533    /// those separately.
534    ///
535    /// After calling this method, you are responsible for managing the memory. If you need
536    /// to properly deallocate it and avoid leaks, you should reconstruct a `FlexArr` using
537    /// `from_parts()`.
538    #[inline]
539    pub const fn into_non_null(mut self) -> NonNull<T> {
540        let ptr = self.inner.get_non_null();
541        forget(self);
542        return ptr;
543    }
544
545    /// Constructs a `FlexArr` from its raw components: a pointer, length, capacity, and allocator.
546    ///
547    /// # Safety
548    ///
549    /// This function has quite a few safety requirements that must be upheld:
550    ///
551    /// - `ptr`
552    ///   - Must point to a memory block allocated by `alloc`.
553    ///   - The total size in bytes must not exceed `isize::MAX`.
554    /// - `T`
555    ///   - The layout of `T` must match the layout used when allocating `ptr`.
556    /// - `length`
557    ///   - Must be ≤ `capacity`.
558    ///   - Must not exceed the number of properly initialized elements in `ptr`.
559    /// - `capacity`
560    ///   - Must match the number of elements the original allocation can hold (i.e., the layout used).
561    ///
562    /// Violating any of these requirements results like will result in undefined behavior
563    #[inline]
564    pub const unsafe fn from_parts(ptr: NonNull<T>, length: L, capacity: L, alloc: A) -> Self {
565        return Self {
566            inner: Inner {
567                ptr:      ptr.cast(),
568                length:   length,
569                capacity: capacity,
570                alloc:    alloc,
571            },
572            _ph:   PhantomData,
573        };
574    }
575
576    /// Consumes the `FlexArr` and returns its raw components as a tuple:
577    ///
578    /// - `NonNull<T>`: A pointer to the underlying memory.
579    /// - `L`: The length of the `FlexArr`.
580    /// - `L`: The capacity of the `FlexArr`.
581    /// - `A`: The allocator used to allocate the memory.
582    ///
583    /// After calling this method, you are responsible for managing the memory. If you need
584    /// to properly deallocate it and avoid leaks, you should reconstruct a `FlexArr` using
585    /// `from_parts()`.
586    #[inline]
587    pub const fn into_parts(mut self) -> (NonNull<T>, L, L, A) {
588        let ptr: NonNull<T> = self.inner.get_non_null();
589        let len = self.inner.length;
590        let cap = self.inner.capacity(Self::SIZE);
591
592        let self_ptr = &mut self as *mut Self;
593        let alloc_ptr = unsafe { &mut (*self_ptr).inner.alloc as *mut A };
594        let alloc = unsafe { alloc_ptr.read() };
595
596        forget(self);
597        return (ptr, len, cap, alloc);
598    }
599}
600
601// Non-public helper methods.
602impl<T, A: AltAllocator, L: LengthType> FlexArr<T, A, L>
603where
604    usize: TryFrom<L>,
605{
606    #[inline(always)]
607    fn capacity_needed(&self, requested: L) -> FlexArrResult<L> {
608        let Some(needed) = self.inner.length.checked_add(requested) else {
609            return Err(FlexArrErr::new(ErrorReason::CapacityOverflow));
610        };
611        return Ok(needed);
612    }
613}
614
615#[cfg(feature = "std_alloc")]
616impl<T, L: LengthType> FlexArr<T, Global, L>
617where
618    usize: TryFrom<L>,
619{
620    /// Creates a new, empty `FlexArr` using the standard allocator.
621    ///
622    /// This functions similarly to `FlexArr::new_in()`, but automatically
623    /// uses the global allocator. No memory is allocated until elements are added
624    ///
625    /// This is only available if the `std_alloc` feature is enabled.
626    pub const fn new() -> Self {
627        return Self::new_in(Global);
628    }
629
630    /// Creates a new `FlexArr` with the specified capacity using the standard allocator.
631    ///
632    /// This functions similarly to `FlexArr::with_capacity_in()`, but automatically
633    /// uses the global allocator.
634    ///
635    /// This is only available if the `std_alloc` feature is enabled.
636    pub fn with_capacity(capacity: L) -> FlexArrResult<Self> {
637        return Self::with_capacity_in(Global, capacity);
638    }
639}
640
641// Trait implementations.
642
643/// # Note on Indexing
644/// Just like `[]` on rusts slices, arras and Vec, an `index >= length`
645/// will panic. This can also panic if the index value is too large to
646/// fit into a `usize`.
647impl<T, A: AltAllocator, L: LengthType> Index<L> for FlexArr<T, A, L>
648where
649    usize: TryFrom<L>,
650{
651    type Output = T;
652    fn index(&self, index: L) -> &Self::Output {
653        // If the LengthType is larger than a usize
654        // the possibility that using `index as usize`
655        // will just truncate the value. The could cause
656        // the index operation on the slice to succeed
657        // when it should fail. So make sure that the
658        // index can fit into a usize before even
659        // attempting to index the slice.
660        let Ok(i) = usize::try_from(index) else {
661            panic!("Index cannot be converted to usize");
662        };
663        return &self.as_slice()[i];
664    }
665}
666
667/// # Note on Indexing
668/// Just like `[]` on rusts slices, arras and Vec, an `index >= length`
669/// will panic. This can also panic if the index value is too large to
670/// fit into a `usize`.
671impl<T, A: AltAllocator, L: LengthType> IndexMut<L> for FlexArr<T, A, L>
672where
673    usize: TryFrom<L>,
674{
675    fn index_mut(&mut self, index: L) -> &mut Self::Output {
676        let Ok(i) = usize::try_from(index) else {
677            panic!("Index cannot be converted to usize");
678        };
679        return &mut self.as_mut_slice()[i];
680    }
681}
682
683impl<T, A: AltAllocator, L: LengthType> Drop for FlexArr<T, A, L>
684where
685    usize: TryFrom<L>,
686{
687    fn drop(&mut self) {
688        unsafe {
689            ptr::drop_in_place(self.as_mut_slice());
690            self.inner.deallocate(Self::LAYOUT);
691        }
692    }
693}
694
695impl<T, A: AltAllocator, L: LengthType> core::ops::Deref for FlexArr<T, A, L>
696where
697    usize: TryFrom<L>,
698{
699    type Target = [T];
700
701    #[inline]
702    fn deref(&self) -> &[T] {
703        return self.as_slice();
704    }
705}
706
707impl<T, A: AltAllocator, L: LengthType> core::ops::DerefMut for FlexArr<T, A, L>
708where
709    usize: TryFrom<L>,
710{
711    #[inline]
712    fn deref_mut(&mut self) -> &mut [T] {
713        return self.as_mut_slice();
714    }
715}
716
717impl<'a, T, A: AltAllocator, L: LengthType> IntoIterator for &'a FlexArr<T, A, L>
718where
719    usize: TryFrom<L>,
720{
721    type Item = &'a T;
722    type IntoIter = core::slice::Iter<'a, T>;
723
724    fn into_iter(self) -> Self::IntoIter {
725        return self.as_slice().iter();
726    }
727}
728
729impl<'a, T, A: AltAllocator, L: LengthType> IntoIterator for &'a mut FlexArr<T, A, L>
730where
731    usize: TryFrom<L>,
732{
733    type Item = &'a mut T;
734    type IntoIter = core::slice::IterMut<'a, T>;
735    fn into_iter(self) -> Self::IntoIter {
736        return self.as_mut_slice().iter_mut();
737    }
738}