imbl_sized_chunks/inline_array/
mod.rs

1// This Source Code Form is subject to the terms of the Mozilla Public
2// License, v. 2.0. If a copy of the MPL was not distributed with this
3// file, You can obtain one at http://mozilla.org/MPL/2.0/.
4
5//! A fixed capacity array sized to match some other type `T`.
6//!
7//! See [`InlineArray`](struct.InlineArray.html)
8
9use core::borrow::{Borrow, BorrowMut};
10use core::cmp::Ordering;
11use core::fmt::{Debug, Error, Formatter};
12use core::hash::{Hash, Hasher};
13use core::iter::FromIterator;
14use core::marker::PhantomData;
15use core::mem::{self, MaybeUninit};
16use core::ops::{Deref, DerefMut};
17use core::ptr;
18use core::ptr::NonNull;
19use core::slice::{from_raw_parts, from_raw_parts_mut, Iter as SliceIter, IterMut as SliceIterMut};
20
21mod iter;
22pub use self::iter::{Drain, Iter};
23
24/// A fixed capacity array sized to match some other type `T`.
25///
26/// This works like a vector, but allocated on the stack (and thus marginally
27/// faster than `Vec`), with the allocated space exactly matching the size of
28/// the given type `T`. The vector consists of a `usize` tracking its current
29/// length and zero or more elements of type `A`. The capacity is thus
30/// `( size_of::<T>() - size_of::<usize>() ) / size_of::<A>()`. This could lead
31/// to situations where the capacity is zero, if `size_of::<A>()` is greater
32/// than `size_of::<T>() - size_of::<usize>()`, which is not an error and
33/// handled properly by the data structure.
34///
35/// If `size_of::<T>()` is less than `size_of::<usize>()`, meaning the vector
36/// has no space to store its length, `InlineArray::new()` will panic.
37///
38/// This is meant to facilitate optimisations where a list data structure
39/// allocates a fairly large struct for itself, allowing you to replace it with
40/// an `InlineArray` until it grows beyond its capacity. This not only gives you
41/// a performance boost at very small sizes, it also saves you from having to
42/// allocate anything on the heap until absolutely necessary.
43///
44/// For instance, `im::Vector<A>` in its final form currently looks like this
45/// (approximately):
46///
47/// ```rust, ignore
48/// struct RRB<A> {
49///     length: usize,
50///     tree_height: usize,
51///     outer_head: Rc<Chunk<A>>,
52///     inner_head: Rc<Chunk<A>>,
53///     tree: Rc<TreeNode<A>>,
54///     inner_tail: Rc<Chunk<A>>,
55///     outer_tail: Rc<Chunk<A>>,
56/// }
57/// ```
58///
59/// That's two `usize`s and five `Rc`s, which comes in at 56 bytes on x86_64
60/// architectures. With `InlineArray`, that leaves us with 56 -
61/// `size_of::<usize>()` = 48 bytes we can use before having to expand into the
62/// full data struture. If `A` is `u8`, that's 48 elements, and even if `A` is a
63/// pointer we can still keep 6 of them inline before we run out of capacity.
64///
65/// We can declare an enum like this:
66///
67/// ```rust, ignore
68/// enum VectorWrapper<A> {
69///     Inline(InlineArray<A, RRB<A>>),
70///     Full(RRB<A>),
71/// }
72/// ```
73///
74/// Both of these will have the same size, and we can swap the `Inline` case out
75/// with the `Full` case once the `InlineArray` runs out of capacity.
76#[repr(C)]
77pub struct InlineArray<A, T> {
78    // Alignment tricks
79    //
80    // We need both the `_header_align` and `data` to be properly aligned in memory. We do a few tricks
81    // to handle that.
82    //
83    // * An alignment is always power of 2. Therefore, with a pair of alignments, one is always
84    //   a multiple of the other (one way or the other).
85    // * A struct is aligned to at least the max alignment of each of its fields.
86    // * A `repr(C)` struct follows the order of fields and pushes each as close to the previous one
87    //   as allowed by alignment.
88    //
89    // By placing two "fake" fields that have 0 size, but an alignment first, we make sure that all
90    // 3 start at the beginning of the struct and that all of them are aligned to their maximum
91    // alignment.
92    //
93    // Unfortunately, we can't use `[A; 0]` to align to actual alignment of the type `A`, because
94    // it prevents use of `InlineArray` in recursive types.
95    // We rely on alignment of `u64`/`usize` or `T` to be sufficient, and panic otherwise. We use
96    // `u64` to handle all common types on 32-bit systems too.
97    //
98    // Furthermore, because we don't know if `u64` or `A` has bigger alignment, we decide on case by
99    // case basis if the header or the elements go first. By placing the one with higher alignment
100    // requirements first, we align that one and the other one will be aligned "automatically" when
101    // placed just after it.
102    //
103    // To the best of our knowledge, this is all guaranteed by the compiler. But just to make sure,
104    // we have bunch of asserts in the constructor to check; as these are invariants enforced by
105    // the compiler, it should be trivial for it to remove the checks so they are for free (if we
106    // are correct) or will save us (if we are not).
107    _header_align: [(u64, usize); 0],
108    _phantom: PhantomData<A>,
109    data: MaybeUninit<T>,
110}
111
112const fn capacity(
113    host_size: usize,
114    header_size: usize,
115    element_size: usize,
116    element_align: usize,
117    container_align: usize,
118) -> usize {
119    if element_size == 0 {
120        usize::MAX
121    } else if element_align <= container_align && host_size > header_size {
122        (host_size - header_size) / element_size
123    } else {
124        0 // larger alignment can't be guaranteed, so it'd be unsafe to store any elements
125    }
126}
127
128impl<A, T> InlineArray<A, T> {
129    const HOST_SIZE: usize = mem::size_of::<T>();
130    const ELEMENT_SIZE: usize = mem::size_of::<A>();
131    const HEADER_SIZE: usize = mem::size_of::<usize>();
132    // Do we place the header before the elements or the other way around?
133    const HEADER_FIRST: bool = mem::align_of::<usize>() >= mem::align_of::<A>();
134    // Note: one of the following is always 0
135    // How many usizes to skip before the first element?
136    const ELEMENT_SKIP: usize = Self::HEADER_FIRST as usize;
137    // How many elements to skip before the header
138    const HEADER_SKIP: usize = Self::CAPACITY * (1 - Self::ELEMENT_SKIP);
139
140    /// The maximum number of elements the `InlineArray` can hold.
141    pub const CAPACITY: usize = capacity(
142        Self::HOST_SIZE,
143        Self::HEADER_SIZE,
144        Self::ELEMENT_SIZE,
145        mem::align_of::<A>(),
146        mem::align_of::<Self>(),
147    );
148
149    #[inline]
150    #[must_use]
151    unsafe fn len_const(&self) -> *const usize {
152        let ptr = unsafe {
153            self.data
154                .as_ptr()
155                .cast::<A>()
156                .add(Self::HEADER_SKIP)
157                .cast::<usize>()
158        };
159        debug_assert!(ptr as usize % mem::align_of::<usize>() == 0);
160        ptr
161    }
162
163    #[inline]
164    #[must_use]
165    pub(crate) unsafe fn len_mut(&mut self) -> *mut usize {
166        let ptr = unsafe {
167            self.data
168                .as_mut_ptr()
169                .cast::<A>()
170                .add(Self::HEADER_SKIP)
171                .cast::<usize>()
172        };
173        debug_assert!(ptr as usize % mem::align_of::<usize>() == 0);
174        ptr
175    }
176
177    #[inline]
178    #[must_use]
179    pub(crate) unsafe fn data(&self) -> *const A {
180        if Self::CAPACITY == 0 {
181            return NonNull::<A>::dangling().as_ptr();
182        }
183        let ptr = unsafe {
184            self.data
185                .as_ptr()
186                .cast::<usize>()
187                .add(Self::ELEMENT_SKIP)
188                .cast::<A>()
189        };
190        debug_assert!(ptr as usize % mem::align_of::<A>() == 0);
191        ptr
192    }
193
194    #[inline]
195    #[must_use]
196    unsafe fn data_mut(&mut self) -> *mut A {
197        if Self::CAPACITY == 0 {
198            return NonNull::<A>::dangling().as_ptr();
199        }
200        let ptr = unsafe {
201            self.data
202                .as_mut_ptr()
203                .cast::<usize>()
204                .add(Self::ELEMENT_SKIP)
205                .cast::<A>()
206        };
207        debug_assert!(ptr as usize % mem::align_of::<A>() == 0);
208        ptr
209    }
210
211    #[inline]
212    #[must_use]
213    unsafe fn ptr_at(&self, index: usize) -> *const A {
214        debug_assert!(index < Self::CAPACITY);
215        unsafe { self.data().add(index) }
216    }
217
218    #[inline]
219    #[must_use]
220    unsafe fn ptr_at_mut(&mut self, index: usize) -> *mut A {
221        debug_assert!(index < Self::CAPACITY);
222        unsafe { self.data_mut().add(index) }
223    }
224
225    #[inline]
226    unsafe fn read_at(&self, index: usize) -> A {
227        unsafe { ptr::read(self.ptr_at(index)) }
228    }
229
230    #[inline]
231    unsafe fn write_at(&mut self, index: usize, value: A) {
232        unsafe { ptr::write(self.ptr_at_mut(index), value) };
233    }
234
235    /// Get the length of the array.
236    #[inline]
237    #[must_use]
238    pub fn len(&self) -> usize {
239        unsafe { *self.len_const() }
240    }
241
242    /// Test if the array is empty.
243    #[inline]
244    #[must_use]
245    pub fn is_empty(&self) -> bool {
246        self.len() == 0
247    }
248
249    /// Test if the array is at capacity.
250    #[inline]
251    #[must_use]
252    pub fn is_full(&self) -> bool {
253        self.len() >= Self::CAPACITY
254    }
255
256    /// Construct a new empty array.
257    ///
258    /// # Panics
259    ///
260    /// If the element type requires large alignment, which is larger than
261    /// both alignment of `usize` and alignment of the type that provides the capacity.
262    #[inline]
263    #[must_use]
264    pub fn new() -> Self {
265        assert!(Self::HOST_SIZE > Self::HEADER_SIZE);
266        assert!(
267            (Self::CAPACITY == 0) || (mem::align_of::<Self>() % mem::align_of::<A>() == 0),
268            "InlineArray can't satisfy alignment of {}",
269            core::any::type_name::<A>()
270        );
271
272        let mut self_ = Self {
273            _header_align: [],
274            _phantom: PhantomData,
275            data: MaybeUninit::uninit(),
276        };
277        // Sanity check our assumptions about what is guaranteed by the compiler. If we are right,
278        // these should completely optimize out of the resulting binary.
279        assert_eq!(
280            &self_ as *const _ as usize,
281            self_.data.as_ptr() as usize,
282            "Padding at the start of struct",
283        );
284        assert_eq!(
285            self_.data.as_ptr() as usize % mem::align_of::<usize>(),
286            0,
287            "Unaligned header"
288        );
289        assert!(
290            mem::size_of::<Self>() == mem::size_of::<T>()
291                || mem::align_of::<T>() < mem::align_of::<Self>()
292        );
293        assert_eq!(0, unsafe { self_.data() } as usize % mem::align_of::<A>());
294        assert_eq!(
295            0,
296            unsafe { self_.data_mut() } as usize % mem::align_of::<A>()
297        );
298        assert!(Self::ELEMENT_SKIP == 0 || Self::HEADER_SKIP == 0);
299        unsafe { ptr::write(self_.len_mut(), 0usize) };
300        self_
301    }
302
303    /// Push an item to the back of the array.
304    ///
305    /// Panics if the capacity of the array is exceeded.
306    ///
307    /// Time: O(1)
308    pub fn push(&mut self, value: A) {
309        if self.is_full() {
310            panic!("InlineArray::push: chunk size overflow");
311        }
312        unsafe {
313            self.write_at(self.len(), value);
314            *self.len_mut() += 1;
315        }
316    }
317
318    /// Pop an item from the back of the array.
319    ///
320    /// Returns `None` if the array is empty.
321    ///
322    /// Time: O(1)
323    pub fn pop(&mut self) -> Option<A> {
324        if self.is_empty() {
325            None
326        } else {
327            unsafe {
328                *self.len_mut() -= 1;
329            }
330            Some(unsafe { self.read_at(self.len()) })
331        }
332    }
333
334    /// Insert a new value at index `index`, shifting all the following values
335    /// to the right.
336    ///
337    /// Panics if the index is out of bounds or the array is at capacity.
338    ///
339    /// Time: O(n) for the number of items shifted
340    pub fn insert(&mut self, index: usize, value: A) {
341        if self.is_full() {
342            panic!("InlineArray::push: chunk size overflow");
343        }
344        if index > self.len() {
345            panic!("InlineArray::insert: index out of bounds");
346        }
347        unsafe {
348            let src = self.ptr_at_mut(index);
349            ptr::copy(src, src.add(1), self.len() - index);
350            ptr::write(src, value);
351            *self.len_mut() += 1;
352        }
353    }
354
355    /// Remove the value at index `index`, shifting all the following values to
356    /// the left.
357    ///
358    /// Returns the removed value, or `None` if the array is empty or the index
359    /// is out of bounds.
360    ///
361    /// Time: O(n) for the number of items shifted
362    pub fn remove(&mut self, index: usize) -> Option<A> {
363        if index >= self.len() {
364            None
365        } else {
366            unsafe {
367                let src = self.ptr_at_mut(index);
368                let value = ptr::read(src);
369                *self.len_mut() -= 1;
370                ptr::copy(src.add(1), src, self.len() - index);
371                Some(value)
372            }
373        }
374    }
375
376    /// Split an array into two, the original array containing
377    /// everything up to `index` and the returned array containing
378    /// everything from `index` onwards.
379    ///
380    /// Panics if `index` is out of bounds.
381    ///
382    /// Time: O(n) for the number of items in the new chunk
383    pub fn split_off(&mut self, index: usize) -> Self {
384        if index > self.len() {
385            panic!("InlineArray::split_off: index out of bounds");
386        }
387        let mut out = Self::new();
388        if index < self.len() {
389            unsafe {
390                ptr::copy(self.ptr_at(index), out.data_mut(), self.len() - index);
391                *out.len_mut() = self.len() - index;
392                *self.len_mut() = index;
393            }
394        }
395        out
396    }
397
398    /// Shortens the array, keeping the first `len` elements and dropping the
399    /// rest.
400    ///
401    /// If `len` is greater or equal to the array's current length, this has no
402    /// effect.
403    pub fn truncate(&mut self, len: usize) {
404        if len >= self.len() {
405            return;
406        }
407
408        unsafe {
409            ptr::drop_in_place::<[A]>(&mut (**self)[len..]);
410            *self.len_mut() = len;
411        }
412    }
413
414    #[inline]
415    unsafe fn drop_contents(&mut self) {
416        unsafe { ptr::drop_in_place::<[A]>(&mut **self) } // uses DerefMut
417    }
418
419    /// Discard the contents of the array.
420    ///
421    /// Time: O(n)
422    pub fn clear(&mut self) {
423        unsafe {
424            self.drop_contents();
425            *self.len_mut() = 0;
426        }
427    }
428
429    /// Construct an iterator that drains values from the front of the array.
430    pub fn drain(&mut self) -> Drain<'_, A, T> {
431        Drain { array: self }
432    }
433}
434
435impl<A, T> Drop for InlineArray<A, T> {
436    fn drop(&mut self) {
437        unsafe { self.drop_contents() }
438    }
439}
440
441impl<A, T> Default for InlineArray<A, T> {
442    fn default() -> Self {
443        Self::new()
444    }
445}
446
447// WANT:
448// impl<A, T> Copy for InlineArray<A, T> where A: Copy {}
449
450impl<A, T> Clone for InlineArray<A, T>
451where
452    A: Clone,
453{
454    fn clone(&self) -> Self {
455        let mut copy = Self::new();
456        for i in 0..self.len() {
457            unsafe {
458                copy.write_at(i, self.get_unchecked(i).clone());
459            }
460        }
461        unsafe {
462            *copy.len_mut() = self.len();
463        }
464        copy
465    }
466}
467
468impl<A, T> Deref for InlineArray<A, T> {
469    type Target = [A];
470    fn deref(&self) -> &Self::Target {
471        unsafe { from_raw_parts(self.data(), self.len()) }
472    }
473}
474
475impl<A, T> DerefMut for InlineArray<A, T> {
476    fn deref_mut(&mut self) -> &mut Self::Target {
477        unsafe { from_raw_parts_mut(self.data_mut(), self.len()) }
478    }
479}
480
481impl<A, T> Borrow<[A]> for InlineArray<A, T> {
482    fn borrow(&self) -> &[A] {
483        self.deref()
484    }
485}
486
487impl<A, T> BorrowMut<[A]> for InlineArray<A, T> {
488    fn borrow_mut(&mut self) -> &mut [A] {
489        self.deref_mut()
490    }
491}
492
493impl<A, T> AsRef<[A]> for InlineArray<A, T> {
494    fn as_ref(&self) -> &[A] {
495        self.deref()
496    }
497}
498
499impl<A, T> AsMut<[A]> for InlineArray<A, T> {
500    fn as_mut(&mut self) -> &mut [A] {
501        self.deref_mut()
502    }
503}
504impl<A, T, Slice> PartialEq<Slice> for InlineArray<A, T>
505where
506    Slice: Borrow<[A]>,
507    A: PartialEq,
508{
509    fn eq(&self, other: &Slice) -> bool {
510        self.deref() == other.borrow()
511    }
512}
513
514impl<A, T> Eq for InlineArray<A, T> where A: Eq {}
515
516impl<A, T> PartialOrd for InlineArray<A, T>
517where
518    A: PartialOrd,
519{
520    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
521        self.iter().partial_cmp(other.iter())
522    }
523}
524
525impl<A, T> Ord for InlineArray<A, T>
526where
527    A: Ord,
528{
529    fn cmp(&self, other: &Self) -> Ordering {
530        self.iter().cmp(other.iter())
531    }
532}
533
534impl<A, T> Debug for InlineArray<A, T>
535where
536    A: Debug,
537{
538    fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
539        f.write_str("Chunk")?;
540        f.debug_list().entries(self.iter()).finish()
541    }
542}
543
544impl<A, T> Hash for InlineArray<A, T>
545where
546    A: Hash,
547{
548    fn hash<H>(&self, hasher: &mut H)
549    where
550        H: Hasher,
551    {
552        for item in self {
553            item.hash(hasher)
554        }
555    }
556}
557
558impl<A, T> IntoIterator for InlineArray<A, T> {
559    type Item = A;
560    type IntoIter = Iter<A, T>;
561    fn into_iter(self) -> Self::IntoIter {
562        Iter { array: self }
563    }
564}
565
566impl<A, T> FromIterator<A> for InlineArray<A, T> {
567    fn from_iter<I>(it: I) -> Self
568    where
569        I: IntoIterator<Item = A>,
570    {
571        let mut chunk = Self::new();
572        for item in it {
573            chunk.push(item);
574        }
575        chunk
576    }
577}
578
579impl<'a, A, T> IntoIterator for &'a InlineArray<A, T> {
580    type Item = &'a A;
581    type IntoIter = SliceIter<'a, A>;
582    fn into_iter(self) -> Self::IntoIter {
583        self.iter()
584    }
585}
586
587impl<'a, A, T> IntoIterator for &'a mut InlineArray<A, T> {
588    type Item = &'a mut A;
589    type IntoIter = SliceIterMut<'a, A>;
590    fn into_iter(self) -> Self::IntoIter {
591        self.iter_mut()
592    }
593}
594
595impl<A, T> Extend<A> for InlineArray<A, T> {
596    /// Append the contents of the iterator to the back of the array.
597    ///
598    /// Panics if the array exceeds its capacity.
599    ///
600    /// Time: O(n) for the length of the iterator
601    fn extend<I>(&mut self, it: I)
602    where
603        I: IntoIterator<Item = A>,
604    {
605        for item in it {
606            self.push(item);
607        }
608    }
609}
610
611impl<'a, A, T> Extend<&'a A> for InlineArray<A, T>
612where
613    A: 'a + Copy,
614{
615    /// Append the contents of the iterator to the back of the array.
616    ///
617    /// Panics if the array exceeds its capacity.
618    ///
619    /// Time: O(n) for the length of the iterator
620    fn extend<I>(&mut self, it: I)
621    where
622        I: IntoIterator<Item = &'a A>,
623    {
624        for item in it {
625            self.push(*item);
626        }
627    }
628}
629
630#[allow(dead_code)]
631#[cfg(test)]
632mod test {
633    use super::*;
634    use crate::tests::DropTest;
635    use std::sync::atomic::{AtomicUsize, Ordering};
636
637    #[test]
638    fn dropping() {
639        let counter = AtomicUsize::new(0);
640        {
641            let mut chunk: InlineArray<DropTest<'_>, [usize; 32]> = InlineArray::new();
642            for _i in 0..16 {
643                chunk.push(DropTest::new(&counter));
644            }
645            assert_eq!(16, counter.load(Ordering::Relaxed));
646            for _i in 0..8 {
647                chunk.pop();
648            }
649            assert_eq!(8, counter.load(Ordering::Relaxed));
650        }
651        assert_eq!(0, counter.load(Ordering::Relaxed));
652    }
653
654    #[test]
655    fn truncate() {
656        let counter = AtomicUsize::new(0);
657        {
658            let mut chunk: InlineArray<DropTest<'_>, [usize; 32]> = InlineArray::new();
659            for _i in 0..16 {
660                chunk.push(DropTest::new(&counter));
661            }
662            assert_eq!(16, counter.load(Ordering::Relaxed));
663            chunk.truncate(8);
664            assert_eq!(8, counter.load(Ordering::Relaxed));
665        }
666        assert_eq!(0, counter.load(Ordering::Relaxed));
667    }
668
669    #[test]
670    fn zero_sized_values() {
671        let mut chunk: InlineArray<(), [usize; 32]> = InlineArray::new();
672        for _i in 0..65536 {
673            chunk.push(());
674        }
675        assert_eq!(65536, chunk.len());
676        assert_eq!(Some(()), chunk.pop());
677    }
678
679    #[test]
680    fn low_align_base() {
681        let mut chunk: InlineArray<String, [u8; 512]> = InlineArray::new();
682        chunk.push("Hello".to_owned());
683        assert_eq!(chunk[0], "Hello");
684
685        let mut chunk: InlineArray<String, [u16; 512]> = InlineArray::new();
686        chunk.push("Hello".to_owned());
687        assert_eq!(chunk[0], "Hello");
688    }
689
690    #[test]
691    fn float_align() {
692        let mut chunk: InlineArray<f64, [u8; 16]> = InlineArray::new();
693        chunk.push(1234.);
694        assert_eq!(chunk[0], 1234.);
695
696        let mut chunk: InlineArray<f64, [u8; 17]> = InlineArray::new();
697        chunk.push(1234.);
698        assert_eq!(chunk[0], 1234.);
699    }
700
701    #[test]
702    fn recursive_types_compile() {
703        #[allow(dead_code)]
704        enum Recursive {
705            A(InlineArray<Recursive, u64>),
706            B,
707        }
708    }
709
710    #[test]
711    fn insufficient_alignment1() {
712        #[repr(align(256))]
713        struct BigAlign(u8);
714        #[repr(align(32))]
715        struct MediumAlign(u8);
716
717        assert_eq!(0, InlineArray::<BigAlign, [usize; 256]>::CAPACITY);
718        assert_eq!(0, InlineArray::<BigAlign, [u64; 256]>::CAPACITY);
719        assert_eq!(0, InlineArray::<BigAlign, [f64; 256]>::CAPACITY);
720        assert_eq!(0, InlineArray::<BigAlign, [MediumAlign; 256]>::CAPACITY);
721    }
722
723    #[test]
724    fn insufficient_alignment2() {
725        #[repr(align(256))]
726        struct BigAlign(usize);
727
728        let mut bad: InlineArray<BigAlign, [usize; 256]> = InlineArray::new();
729        assert_eq!(0, InlineArray::<BigAlign, [usize; 256]>::CAPACITY);
730        assert_eq!(0, bad.len());
731        assert_eq!(0, bad[..].len());
732        assert!(bad.is_full());
733        assert_eq!(0, bad.drain().count());
734        assert!(bad.pop().is_none());
735        assert!(bad.remove(0).is_none());
736        assert!(bad.split_off(0).is_full());
737        bad.clear();
738    }
739
740    #[test]
741    fn sufficient_alignment1() {
742        #[repr(align(256))]
743        struct BigAlign(u8);
744
745        assert_eq!(13, InlineArray::<BigAlign, [BigAlign; 14]>::CAPACITY);
746        assert_eq!(1, InlineArray::<BigAlign, [BigAlign; 2]>::CAPACITY);
747        assert_eq!(0, InlineArray::<BigAlign, [BigAlign; 1]>::CAPACITY);
748
749        let mut chunk: InlineArray<BigAlign, [BigAlign; 2]> = InlineArray::new();
750        chunk.push(BigAlign(42));
751        assert_eq!(
752            chunk.first().unwrap() as *const _ as usize % mem::align_of::<BigAlign>(),
753            0
754        );
755    }
756
757    #[test]
758    fn sufficient_alignment2() {
759        #[repr(align(128))]
760        struct BigAlign([u8; 64]);
761        #[repr(align(256))]
762        struct BiggerAlign(u8);
763        assert_eq!(128, mem::align_of::<BigAlign>());
764        assert_eq!(256, mem::align_of::<BiggerAlign>());
765
766        assert_eq!(199, InlineArray::<BigAlign, [BiggerAlign; 100]>::CAPACITY);
767        assert_eq!(3, InlineArray::<BigAlign, [BiggerAlign; 2]>::CAPACITY);
768        assert_eq!(1, InlineArray::<BigAlign, [BiggerAlign; 1]>::CAPACITY);
769        assert_eq!(0, InlineArray::<BigAlign, [BiggerAlign; 0]>::CAPACITY);
770
771        let mut chunk: InlineArray<BigAlign, [BiggerAlign; 1]> = InlineArray::new();
772        chunk.push(BigAlign([0; 64]));
773        assert_eq!(
774            chunk.first().unwrap() as *const _ as usize % mem::align_of::<BigAlign>(),
775            0
776        );
777    }
778}