inline_array/
lib.rs

1#![allow(unsafe_code)]
2
3//! [`InlineArray`] is a stack-inlinable array of bytes that is intended for situations where many bytes
4//! are being shared in database-like scenarios, where optimizing for space usage is extremely
5//! important.
6//!
7//! `InlineArray` uses 8 bytes on the stack. It will inline arrays of up to 7 bytes. If the bytes
8//! are longer than that, it will store them in an optimized reference-count-backed structure of
9//! two different variants. For arrays up to length 255, the data is stored with an `AtomicU8`
10//! reference counter and `u8` length field, for only two bytes of overhead. For values larger
11//! than that, they are stored with an `AtomicU16` reference counter and a 48-bit length field.
12//! If the maximum counter is reached for either variant, the bytes are copied into a new
13//! `InlineArray` with a fresh reference count of 1. This is made with the assumption that most
14//! reference counts will be far lower than 2^16 and only rarely surpassing 255 in the small case.
15//!
16//! The inline and both types of shared instances of `InlineArray` guarantee that the stored array is
17//! always aligned to 8-byte boundaries, regardless of if it is inline on the stack or
18//! shared on the heap. This is advantageous for using in combination with certain
19//! zero-copy serialization techniques that require alignment guarantees.
20//!
21//! Byte arrays that require more than 48 bits to store their length (256 terabytes) are not supported.
22//!
23//! [`InlineArray::make_mut`] (inspired by [`std::sync::Arc::make_mut`]) can be used for getting a mutable
24//! reference to the bytes in this structure. If the shared reference counter is higher than  1, this acts
25//! like a [`std::borrow::Cow`] and will make self into a private copy that is safe for modification.
26//!
27//! # Features
28//!
29//! * `serde` implements `serde::Serialize` and `serde::Deserialize` for `InlineArray` (disabled by
30//! default)
31//!
32//! # Examples
33//!
34//! ```
35//! use inline_array::InlineArray;
36//!
37//! let ia = InlineArray::from(b"yo!");
38//!
39//! // then use it more or less like you would an Arc<[u8]>
40//! ```
41
42use std::{
43    alloc::{alloc, dealloc, Layout},
44    convert::TryFrom,
45    fmt,
46    hash::{Hash, Hasher},
47    iter::FromIterator,
48    mem::size_of,
49    num::NonZeroU64,
50    ops::Deref,
51    sync::atomic::{AtomicU16, AtomicU8, Ordering},
52};
53
54#[cfg(feature = "concurrent_map_minimum")]
55impl concurrent_map::Minimum for InlineArray {
56    const MIN: InlineArray = EMPTY;
57}
58
59#[cfg(feature = "serde")]
60mod serde;
61
62const SZ: usize = 8;
63const INLINE_CUTOFF: usize = SZ - 1;
64const SMALL_REMOTE_CUTOFF: usize = u8::MAX as usize;
65const BIG_REMOTE_LEN_BYTES: usize = 6;
66
67const INLINE_TRAILER_TAG: u8 = 0b01;
68const SMALL_REMOTE_TRAILER_TAG: u8 = 0b10;
69const BIG_REMOTE_TRAILER_TAG: u8 = 0b11;
70const TRAILER_TAG_MASK: u8 = 0b0000_0011;
71const TRAILER_PTR_MASK: u8 = 0b1111_1100;
72
73/// A const-friendly empty `InlineArray`
74pub const EMPTY: InlineArray = InlineArray([0, 0, 0, 0, 0, 0, 0, INLINE_TRAILER_TAG]);
75
76#[repr(u8)]
77#[derive(Debug, Clone, Copy, PartialEq, Eq)]
78enum Kind {
79    Inline,
80    SmallRemote,
81    BigRemote,
82}
83
84const fn _static_tests() {
85    // static assert that BigRemoteHeader is 8 bytes in size
86    let _: [u8; 8] = [0; std::mem::size_of::<BigRemoteHeader>()];
87
88    // static assert that BigRemoteHeader is 8 byte-aligned
89    let _: [u8; 8] = [0; std::mem::align_of::<BigRemoteHeader>()];
90
91    // static assert that SmallRemoteTrailer is 2 bytes in size
92    let _: [u8; 2] = [0; std::mem::size_of::<SmallRemoteTrailer>()];
93
94    // static assert that SmallRemoteTrailer is 1 byte-aligned
95    let _: [u8; 1] = [0; std::mem::align_of::<SmallRemoteTrailer>()];
96
97    // static assert that InlineArray is 8 bytes
98    let _: [u8; 8] = [0; std::mem::size_of::<InlineArray>()];
99
100    // static assert that InlineArray is 8 byte-aligned
101    let _: [u8; 8] = [0; std::mem::align_of::<InlineArray>()];
102}
103
104/// A buffer that may either be inline or remote and protected
105/// by an Arc. The inner buffer is guaranteed to be aligned to
106/// 8 byte boundaries.
107#[repr(align(8))]
108pub struct InlineArray([u8; SZ]);
109
110impl Clone for InlineArray {
111    fn clone(&self) -> InlineArray {
112        // We use 16 bytes for the reference count at
113        // the cost of this CAS and copying the inline
114        // array when we reach our max reference count size.
115        //
116        // When measured against the standard Arc reference
117        // count increment, this had a negligible performance
118        // hit that only became measurable at high contention,
119        // which is probably not likely for DB workloads where
120        // it is expected that most concurrent operations will
121        // distributed somewhat across larger structures.
122
123        if self.kind() == Kind::SmallRemote {
124            let rc = &self.deref_small_trailer().rc;
125
126            loop {
127                let current = rc.load(Ordering::Relaxed);
128                if current == u8::MAX {
129                    return InlineArray::from(self.deref());
130                }
131
132                let cas_res = rc.compare_exchange_weak(
133                    current,
134                    current + 1,
135                    Ordering::Relaxed,
136                    Ordering::Relaxed,
137                );
138                if cas_res.is_ok() {
139                    break;
140                }
141            }
142        } else if self.kind() == Kind::BigRemote {
143            let rc = &self.deref_big_header().rc;
144
145            loop {
146                let current = rc.load(Ordering::Relaxed);
147                if current == u16::MAX {
148                    return InlineArray::from(self.deref());
149                }
150
151                let cas_res = rc.compare_exchange_weak(
152                    current,
153                    current + 1,
154                    Ordering::Relaxed,
155                    Ordering::Relaxed,
156                );
157                if cas_res.is_ok() {
158                    break;
159                }
160            }
161        }
162        InlineArray(self.0)
163    }
164}
165
166impl Drop for InlineArray {
167    fn drop(&mut self) {
168        let kind = self.kind();
169
170        if kind == Kind::SmallRemote {
171            let small_trailer = self.deref_small_trailer();
172            let rc = small_trailer.rc.fetch_sub(1, Ordering::Release) - 1;
173
174            if rc == 0 {
175                std::sync::atomic::fence(Ordering::Acquire);
176
177                let layout = Layout::from_size_align(
178                    small_trailer.len() + size_of::<SmallRemoteTrailer>(),
179                    8,
180                )
181                .unwrap();
182
183                unsafe {
184                    let ptr = self.remote_ptr().sub(small_trailer.len());
185                    dealloc(ptr as *mut u8, layout);
186                }
187            }
188        } else if kind == Kind::BigRemote {
189            let big_header = self.deref_big_header();
190            let rc = big_header.rc.fetch_sub(1, Ordering::Release) - 1;
191
192            if rc == 0 {
193                std::sync::atomic::fence(Ordering::Acquire);
194
195                let layout =
196                    Layout::from_size_align(big_header.len() + size_of::<BigRemoteHeader>(), 8)
197                        .unwrap();
198
199                unsafe {
200                    dealloc(self.remote_ptr() as *mut u8, layout);
201                }
202            }
203        }
204    }
205}
206
207struct SmallRemoteTrailer {
208    rc: AtomicU8,
209    len: u8,
210}
211
212impl SmallRemoteTrailer {
213    const fn len(&self) -> usize {
214        self.len as usize
215    }
216}
217
218#[repr(align(8))]
219struct BigRemoteHeader {
220    rc: AtomicU16,
221    len: [u8; BIG_REMOTE_LEN_BYTES],
222}
223
224impl BigRemoteHeader {
225    const fn len(&self) -> usize {
226        #[cfg(any(target_pointer_width = "32", feature = "fake_32_bit"))]
227        let buf: [u8; 4] = [self.len[0], self.len[1], self.len[2], self.len[3]];
228
229        #[cfg(all(target_pointer_width = "64", not(feature = "fake_32_bit")))]
230        let buf: [u8; 8] = [
231            self.len[0],
232            self.len[1],
233            self.len[2],
234            self.len[3],
235            self.len[4],
236            self.len[5],
237            0,
238            0,
239        ];
240
241        #[cfg(feature = "fake_32_bit")]
242        let ret = u32::from_le_bytes(buf) as usize;
243
244        #[cfg(not(feature = "fake_32_bit"))]
245        let ret = usize::from_le_bytes(buf);
246
247        ret
248    }
249}
250
251impl Deref for InlineArray {
252    type Target = [u8];
253
254    #[inline]
255    fn deref(&self) -> &[u8] {
256        match self.kind() {
257            Kind::Inline => &self.0[..self.inline_len()],
258            Kind::SmallRemote => unsafe {
259                let len = self.deref_small_trailer().len();
260                let data_ptr = self.remote_ptr().sub(len);
261                std::slice::from_raw_parts(data_ptr, len)
262            },
263            Kind::BigRemote => unsafe {
264                let data_ptr = self.remote_ptr().add(size_of::<BigRemoteHeader>());
265                let len = self.deref_big_header().len();
266                std::slice::from_raw_parts(data_ptr, len)
267            },
268        }
269    }
270}
271
272impl AsRef<[u8]> for InlineArray {
273    #[inline]
274    fn as_ref(&self) -> &[u8] {
275        self
276    }
277}
278
279impl Default for InlineArray {
280    fn default() -> Self {
281        Self::from(&[])
282    }
283}
284
285impl Hash for InlineArray {
286    fn hash<H: Hasher>(&self, state: &mut H) {
287        self.deref().hash(state);
288    }
289}
290
291impl InlineArray {
292    fn new(slice: &[u8]) -> Self {
293        let mut data = [0_u8; SZ];
294        if slice.len() <= INLINE_CUTOFF {
295            data[SZ - 1] = u8::try_from(slice.len()).unwrap() << 2;
296            data[..slice.len()].copy_from_slice(slice);
297            data[SZ - 1] |= INLINE_TRAILER_TAG;
298        } else if slice.len() <= SMALL_REMOTE_CUTOFF {
299            let layout =
300                Layout::from_size_align(slice.len() + size_of::<SmallRemoteTrailer>(), 8).unwrap();
301
302            let trailer = SmallRemoteTrailer {
303                rc: 1.into(),
304                len: u8::try_from(slice.len()).unwrap(),
305            };
306
307            unsafe {
308                let data_ptr = alloc(layout);
309                assert!(!data_ptr.is_null());
310                let trailer_ptr = data_ptr.add(slice.len());
311
312                std::ptr::write(trailer_ptr as *mut SmallRemoteTrailer, trailer);
313                std::ptr::copy_nonoverlapping(slice.as_ptr(), data_ptr, slice.len());
314                std::ptr::write_unaligned(data.as_mut_ptr() as _, trailer_ptr);
315            }
316
317            // assert that the bottom 3 bits are empty, as we expect
318            // the buffer to always have an alignment of 8 (2 ^ 3).
319            #[cfg(not(miri))]
320            assert_eq!(data[SZ - 1] & 0b111, 0);
321
322            data[SZ - 1] |= SMALL_REMOTE_TRAILER_TAG;
323        } else {
324            let layout =
325                Layout::from_size_align(slice.len() + size_of::<BigRemoteHeader>(), 8).unwrap();
326
327            let slice_len_buf: [u8; 8] = (slice.len() as u64).to_le_bytes();
328
329            let len: [u8; BIG_REMOTE_LEN_BYTES] = [
330                slice_len_buf[0],
331                slice_len_buf[1],
332                slice_len_buf[2],
333                slice_len_buf[3],
334                slice_len_buf[4],
335                slice_len_buf[5],
336            ];
337            assert_eq!(slice_len_buf[6], 0);
338            assert_eq!(slice_len_buf[7], 0);
339
340            let header = BigRemoteHeader { rc: 1.into(), len };
341
342            unsafe {
343                let header_ptr = alloc(layout);
344                assert!(!header_ptr.is_null());
345                let data_ptr = header_ptr.add(size_of::<BigRemoteHeader>());
346
347                std::ptr::write(header_ptr as *mut BigRemoteHeader, header);
348                std::ptr::copy_nonoverlapping(slice.as_ptr(), data_ptr, slice.len());
349                std::ptr::write_unaligned(data.as_mut_ptr() as _, header_ptr);
350            }
351
352            // assert that the bottom 3 bits are empty, as we expect
353            // the buffer to always have an alignment of 8 (2 ^ 3).
354            #[cfg(not(miri))]
355            assert_eq!(data[SZ - 1] & 0b111, 0);
356
357            data[SZ - 1] |= BIG_REMOTE_TRAILER_TAG;
358        }
359        Self(data)
360    }
361
362    fn remote_ptr(&self) -> *const u8 {
363        assert_ne!(self.kind(), Kind::Inline);
364        let mut copied = self.0;
365        copied[SZ - 1] &= TRAILER_PTR_MASK;
366
367        unsafe { std::ptr::read((&copied).as_ptr() as *const *const u8) }
368    }
369
370    fn deref_small_trailer(&self) -> &SmallRemoteTrailer {
371        assert_eq!(self.kind(), Kind::SmallRemote);
372        unsafe { &*(self.remote_ptr() as *mut SmallRemoteTrailer) }
373    }
374
375    fn deref_big_header(&self) -> &BigRemoteHeader {
376        assert_eq!(self.kind(), Kind::BigRemote);
377        unsafe { &*(self.remote_ptr() as *mut BigRemoteHeader) }
378    }
379
380    #[cfg(miri)]
381    fn inline_len(&self) -> usize {
382        (self.trailer() >> 2) as usize
383    }
384
385    #[cfg(miri)]
386    fn kind(&self) -> Kind {
387        self.trailer() & TRAILER_TAG_MASK == INLINE_TRAILER_TAG
388    }
389
390    #[cfg(miri)]
391    fn inline_trailer(&self) -> u8 {
392        self.deref()[SZ - 1]
393    }
394
395    #[cfg(not(miri))]
396    const fn inline_len(&self) -> usize {
397        (self.inline_trailer() >> 2) as usize
398    }
399
400    #[cfg(not(miri))]
401    const fn kind(&self) -> Kind {
402        match self.inline_trailer() & TRAILER_TAG_MASK {
403            INLINE_TRAILER_TAG => Kind::Inline,
404            SMALL_REMOTE_TRAILER_TAG => Kind::SmallRemote,
405            BIG_REMOTE_TRAILER_TAG => Kind::BigRemote,
406            _other => unsafe { std::hint::unreachable_unchecked() },
407        }
408    }
409
410    #[cfg(not(miri))]
411    const fn inline_trailer(&self) -> u8 {
412        self.0[SZ - 1]
413    }
414
415    /// This function returns a mutable reference to the inner
416    /// byte array. If there are more than 1 atomic references
417    /// to the inner array, the array is copied into a new
418    /// `InlineVec` and a reference to that is returned. This
419    /// functions similarly in spirit to [`std::sync::Arc::make_mut`].
420    pub fn make_mut(&mut self) -> &mut [u8] {
421        match self.kind() {
422            Kind::Inline => {
423                let inline_len = self.inline_len();
424                &mut self.0[..inline_len]
425            }
426            Kind::SmallRemote => {
427                if self.deref_small_trailer().rc.load(Ordering::Acquire) != 1 {
428                    *self = InlineArray::from(self.deref())
429                }
430                unsafe {
431                    let len = self.deref_small_trailer().len();
432                    let data_ptr = self.remote_ptr().sub(len);
433                    std::slice::from_raw_parts_mut(data_ptr as *mut u8, len)
434                }
435            }
436            Kind::BigRemote => {
437                if self.deref_big_header().rc.load(Ordering::Acquire) != 1 {
438                    *self = InlineArray::from(self.deref())
439                }
440                unsafe {
441                    let data_ptr = self.remote_ptr().add(size_of::<BigRemoteHeader>());
442                    let len = self.deref_big_header().len();
443                    std::slice::from_raw_parts_mut(data_ptr as *mut u8, len)
444                }
445            }
446        }
447    }
448
449    /// Similar in spirit to [`std::boxed::Box::into_raw`] except always keeps the 8-byte representation,
450    /// so we return a `NonZeroU64` here instead of a pointer. Must be paired with exactly one
451    /// corresponding [`InlineArray::from_raw`] to avoid a leak.
452    ///
453    /// Be certain to pay attention to the unsafe contract for `from_raw`.
454    ///
455    /// # Examples
456    /// ```
457    /// use std::num::NonZeroU64;
458    ///
459    /// use inline_array::InlineArray;
460    ///
461    /// let bytes = b"yo!";
462    ///
463    /// let ia = InlineArray::from(bytes);
464    ///
465    /// let ptr: NonZeroU64 = ia.into_raw();
466    ///
467    /// let ia_2 = unsafe { InlineArray::from_raw(ptr) };
468    ///
469    /// assert_eq!(&ia_2, bytes);
470    /// ```
471    pub fn into_raw(self) -> NonZeroU64 {
472        let ret = NonZeroU64::new(u64::from_le_bytes(self.0)).unwrap();
473
474        std::mem::forget(self);
475
476        ret
477    }
478
479    /// Similar in spirit to [`std::boxed::Box::from_raw`].
480    ///
481    /// # Unsafe contract
482    ///
483    /// * Must only be used with a `NonZeroU64` that was produced from [`InlineArray::into_raw`]
484    /// * When an [`InlineArray`] drops, it decrements a reference count (if its size is over the inline threshold)
485    ///   and when that reference count reaches 0, the backing memory that this points to is
486    ///   deallocated.
487    /// * As the reference count is small, when the max reference count is reached, a new
488    ///   allocation is created with a new reference count. This means that you can't expect
489    ///   to have as many [`InlineArray`] objects created from the same arbitrary `NonZeroU64` as calls to
490    ///   clone on the initial object would intuitively (but mistakenly) allow.
491    /// * To be safe in light of the above two points, treat calls to [`InlineArray::from_raw`] as
492    ///   consuming, owned semantics for corresponding previous calls to a `into_raw`. If you try
493    ///   to be tricky with multiple calls to `from_raw` for a particular `NonZeroU64`, you must be
494    ///   certain that drops are not causing your backing allocation to be deallocated and leading
495    ///   to a use after free, which may be harder to reason about than you expect at first glance.
496    ///   Headaches around use after frees are likely to follow if you don't treat a `NonZeroU64` created by a
497    ///   particular call to `into_raw` as a unique pointer that should be paired with at most one
498    ///   call to `from_raw`, similar to [`std::sync::Arc::from_raw`] but actually harder to reason
499    ///   about due to the smaller reference counts causing new allocations when they reach their
500    ///   max count.
501    ///
502    /// # Examples
503    /// ```
504    /// use std::num::NonZeroU64;
505    ///
506    /// use inline_array::InlineArray;
507    ///
508    /// let bytes = b"yo!";
509    ///
510    /// let ia = InlineArray::from(bytes);
511    ///
512    /// let ptr: NonZeroU64 = ia.into_raw();
513    ///
514    /// let ia_2 = unsafe { InlineArray::from_raw(ptr) };
515    ///
516    /// assert_eq!(&ia_2, bytes);
517    /// ```
518    pub unsafe fn from_raw(raw: NonZeroU64) -> InlineArray {
519        InlineArray(raw.get().to_le_bytes())
520    }
521}
522
523impl FromIterator<u8> for InlineArray {
524    fn from_iter<T>(iter: T) -> Self
525    where
526        T: IntoIterator<Item = u8>,
527    {
528        let bs: Vec<u8> = iter.into_iter().collect();
529        bs.into()
530    }
531}
532
533impl From<&[u8]> for InlineArray {
534    fn from(slice: &[u8]) -> Self {
535        InlineArray::new(slice)
536    }
537}
538
539impl From<&str> for InlineArray {
540    fn from(s: &str) -> Self {
541        Self::from(s.as_bytes())
542    }
543}
544
545impl From<String> for InlineArray {
546    fn from(s: String) -> Self {
547        Self::from(s.as_bytes())
548    }
549}
550
551impl From<&String> for InlineArray {
552    fn from(s: &String) -> Self {
553        Self::from(s.as_bytes())
554    }
555}
556
557impl From<&InlineArray> for InlineArray {
558    fn from(v: &Self) -> Self {
559        v.clone()
560    }
561}
562
563impl From<Vec<u8>> for InlineArray {
564    fn from(v: Vec<u8>) -> Self {
565        InlineArray::new(&v)
566    }
567}
568
569impl From<Box<[u8]>> for InlineArray {
570    fn from(v: Box<[u8]>) -> Self {
571        InlineArray::new(&v)
572    }
573}
574
575impl std::borrow::Borrow<[u8]> for InlineArray {
576    fn borrow(&self) -> &[u8] {
577        self.as_ref()
578    }
579}
580
581impl std::borrow::Borrow<[u8]> for &InlineArray {
582    fn borrow(&self) -> &[u8] {
583        self.as_ref()
584    }
585}
586
587impl<const N: usize> From<&[u8; N]> for InlineArray {
588    fn from(v: &[u8; N]) -> Self {
589        Self::from(&v[..])
590    }
591}
592
593impl Ord for InlineArray {
594    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
595        self.as_ref().cmp(other.as_ref())
596    }
597}
598
599impl PartialOrd for InlineArray {
600    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
601        Some(self.cmp(other))
602    }
603}
604
605impl<T: AsRef<[u8]>> PartialEq<T> for InlineArray {
606    fn eq(&self, other: &T) -> bool {
607        self.as_ref() == other.as_ref()
608    }
609}
610
611impl PartialEq<[u8]> for InlineArray {
612    fn eq(&self, other: &[u8]) -> bool {
613        self.as_ref() == other
614    }
615}
616
617impl Eq for InlineArray {}
618
619impl fmt::Debug for InlineArray {
620    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
621        self.as_ref().fmt(f)
622    }
623}
624
625#[cfg(test)]
626mod tests {
627    use super::InlineArray;
628
629    #[test]
630    fn inline_array_smoke() {
631        let ia = InlineArray::from(vec![1, 2, 3]);
632        assert_eq!(ia, vec![1, 2, 3]);
633    }
634
635    #[test]
636    fn small_remote_array_smoke() {
637        let ia = InlineArray::from(&[4; 200][..]);
638        assert_eq!(ia, vec![4; 200]);
639    }
640
641    #[test]
642    fn big_remote_array_smoke() {
643        let ia = InlineArray::from(&[4; 256][..]);
644        assert_eq!(ia, vec![4; 256]);
645    }
646
647    #[test]
648    fn boxed_slice_conversion() {
649        let boite1: Box<[u8]> = Box::new([1, 2, 3]);
650        let iv1: InlineArray = boite1.into();
651        assert_eq!(iv1, vec![1, 2, 3]);
652        let boite2: Box<[u8]> = Box::new([4; 128]);
653        let iv2: InlineArray = boite2.into();
654        assert_eq!(iv2, vec![4; 128]);
655    }
656
657    #[test]
658    fn inline_array_as_mut_identity() {
659        let initial = &[1];
660        let mut iv = InlineArray::from(initial);
661        assert_eq!(initial, &*iv);
662        assert_eq!(initial, iv.make_mut());
663    }
664
665    fn prop_identity(inline_array: &InlineArray) -> bool {
666        let mut iv2 = inline_array.clone();
667
668        if iv2 != inline_array {
669            println!("expected clone to equal original");
670            return false;
671        }
672
673        if *inline_array != *iv2 {
674            println!("expected AsMut to equal original");
675            return false;
676        }
677
678        if &*inline_array != iv2.make_mut() {
679            println!("expected AsMut to equal original");
680            return false;
681        }
682
683        let buf: &[u8] = inline_array.as_ref();
684        assert_eq!(buf.as_ptr() as usize % 8, 0);
685
686        true
687    }
688
689    #[cfg(feature = "serde")]
690    fn prop_serde_roundtrip(inline_array: &InlineArray) -> bool {
691        let ser = bincode::serialize(inline_array).unwrap();
692        let de: InlineArray = bincode::deserialize(&ser).unwrap();
693        de == inline_array
694    }
695
696    impl quickcheck::Arbitrary for InlineArray {
697        fn arbitrary(g: &mut quickcheck::Gen) -> Self {
698            InlineArray::from(Vec::arbitrary(g))
699        }
700    }
701
702    quickcheck::quickcheck! {
703        #[cfg_attr(miri, ignore)]
704        fn inline_array(item: InlineArray) -> bool {
705            dbg!(item.len());
706            assert!(prop_identity(&item));
707
708            #[cfg(feature = "serde")]
709            assert!(prop_serde_roundtrip(&item));
710
711            true
712        }
713    }
714
715    #[test]
716    fn inline_array_bug_00() {
717        assert!(prop_identity(&InlineArray::new(&[
718            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
719        ])));
720    }
721}