1use core::borrow::{Borrow, BorrowMut};
10use core::cmp::Ordering;
11use core::fmt::{Debug, Error, Formatter};
12use core::hash::{Hash, Hasher};
13use core::iter::FromIterator;
14use core::marker::PhantomData;
15use core::mem::{self, MaybeUninit};
16use core::ops::{Deref, DerefMut};
17use core::ptr;
18use core::ptr::NonNull;
19use core::slice::{from_raw_parts, from_raw_parts_mut, Iter as SliceIter, IterMut as SliceIterMut};
20
21mod iter;
22pub use self::iter::{Drain, Iter};
23
24#[repr(C)]
77pub struct InlineArray<A, T> {
78 _header_align: [(u64, usize); 0],
108 _phantom: PhantomData<A>,
109 data: MaybeUninit<T>,
110}
111
112const fn capacity(
113 host_size: usize,
114 header_size: usize,
115 element_size: usize,
116 element_align: usize,
117 container_align: usize,
118) -> usize {
119 if element_size == 0 {
120 usize::MAX
121 } else if element_align <= container_align && host_size > header_size {
122 (host_size - header_size) / element_size
123 } else {
124 0 }
126}
127
128impl<A, T> InlineArray<A, T> {
129 const HOST_SIZE: usize = mem::size_of::<T>();
130 const ELEMENT_SIZE: usize = mem::size_of::<A>();
131 const HEADER_SIZE: usize = mem::size_of::<usize>();
132 const HEADER_FIRST: bool = mem::align_of::<usize>() >= mem::align_of::<A>();
134 const ELEMENT_SKIP: usize = Self::HEADER_FIRST as usize;
137 const HEADER_SKIP: usize = Self::CAPACITY * (1 - Self::ELEMENT_SKIP);
139
140 pub const CAPACITY: usize = capacity(
142 Self::HOST_SIZE,
143 Self::HEADER_SIZE,
144 Self::ELEMENT_SIZE,
145 mem::align_of::<A>(),
146 mem::align_of::<Self>(),
147 );
148
149 #[inline]
150 #[must_use]
151 unsafe fn len_const(&self) -> *const usize {
152 let ptr = unsafe {
153 self.data
154 .as_ptr()
155 .cast::<A>()
156 .add(Self::HEADER_SKIP)
157 .cast::<usize>()
158 };
159 debug_assert!(ptr as usize % mem::align_of::<usize>() == 0);
160 ptr
161 }
162
163 #[inline]
164 #[must_use]
165 pub(crate) unsafe fn len_mut(&mut self) -> *mut usize {
166 let ptr = unsafe {
167 self.data
168 .as_mut_ptr()
169 .cast::<A>()
170 .add(Self::HEADER_SKIP)
171 .cast::<usize>()
172 };
173 debug_assert!(ptr as usize % mem::align_of::<usize>() == 0);
174 ptr
175 }
176
177 #[inline]
178 #[must_use]
179 pub(crate) unsafe fn data(&self) -> *const A {
180 if Self::CAPACITY == 0 {
181 return NonNull::<A>::dangling().as_ptr();
182 }
183 let ptr = unsafe {
184 self.data
185 .as_ptr()
186 .cast::<usize>()
187 .add(Self::ELEMENT_SKIP)
188 .cast::<A>()
189 };
190 debug_assert!(ptr as usize % mem::align_of::<A>() == 0);
191 ptr
192 }
193
194 #[inline]
195 #[must_use]
196 unsafe fn data_mut(&mut self) -> *mut A {
197 if Self::CAPACITY == 0 {
198 return NonNull::<A>::dangling().as_ptr();
199 }
200 let ptr = unsafe {
201 self.data
202 .as_mut_ptr()
203 .cast::<usize>()
204 .add(Self::ELEMENT_SKIP)
205 .cast::<A>()
206 };
207 debug_assert!(ptr as usize % mem::align_of::<A>() == 0);
208 ptr
209 }
210
211 #[inline]
212 #[must_use]
213 unsafe fn ptr_at(&self, index: usize) -> *const A {
214 debug_assert!(index < Self::CAPACITY);
215 unsafe { self.data().add(index) }
216 }
217
218 #[inline]
219 #[must_use]
220 unsafe fn ptr_at_mut(&mut self, index: usize) -> *mut A {
221 debug_assert!(index < Self::CAPACITY);
222 unsafe { self.data_mut().add(index) }
223 }
224
225 #[inline]
226 unsafe fn read_at(&self, index: usize) -> A {
227 unsafe { ptr::read(self.ptr_at(index)) }
228 }
229
230 #[inline]
231 unsafe fn write_at(&mut self, index: usize, value: A) {
232 unsafe { ptr::write(self.ptr_at_mut(index), value) };
233 }
234
235 #[inline]
237 #[must_use]
238 pub fn len(&self) -> usize {
239 unsafe { *self.len_const() }
240 }
241
242 #[inline]
244 #[must_use]
245 pub fn is_empty(&self) -> bool {
246 self.len() == 0
247 }
248
249 #[inline]
251 #[must_use]
252 pub fn is_full(&self) -> bool {
253 self.len() >= Self::CAPACITY
254 }
255
256 #[inline]
263 #[must_use]
264 pub fn new() -> Self {
265 assert!(Self::HOST_SIZE > Self::HEADER_SIZE);
266 assert!(
267 (Self::CAPACITY == 0) || (mem::align_of::<Self>() % mem::align_of::<A>() == 0),
268 "InlineArray can't satisfy alignment of {}",
269 core::any::type_name::<A>()
270 );
271
272 let mut self_ = Self {
273 _header_align: [],
274 _phantom: PhantomData,
275 data: MaybeUninit::uninit(),
276 };
277 assert_eq!(
280 &self_ as *const _ as usize,
281 self_.data.as_ptr() as usize,
282 "Padding at the start of struct",
283 );
284 assert_eq!(
285 self_.data.as_ptr() as usize % mem::align_of::<usize>(),
286 0,
287 "Unaligned header"
288 );
289 assert!(
290 mem::size_of::<Self>() == mem::size_of::<T>()
291 || mem::align_of::<T>() < mem::align_of::<Self>()
292 );
293 assert_eq!(0, unsafe { self_.data() } as usize % mem::align_of::<A>());
294 assert_eq!(
295 0,
296 unsafe { self_.data_mut() } as usize % mem::align_of::<A>()
297 );
298 assert!(Self::ELEMENT_SKIP == 0 || Self::HEADER_SKIP == 0);
299 unsafe { ptr::write(self_.len_mut(), 0usize) };
300 self_
301 }
302
303 pub fn push(&mut self, value: A) {
309 if self.is_full() {
310 panic!("InlineArray::push: chunk size overflow");
311 }
312 unsafe {
313 self.write_at(self.len(), value);
314 *self.len_mut() += 1;
315 }
316 }
317
318 pub fn pop(&mut self) -> Option<A> {
324 if self.is_empty() {
325 None
326 } else {
327 unsafe {
328 *self.len_mut() -= 1;
329 }
330 Some(unsafe { self.read_at(self.len()) })
331 }
332 }
333
334 pub fn insert(&mut self, index: usize, value: A) {
341 if self.is_full() {
342 panic!("InlineArray::push: chunk size overflow");
343 }
344 if index > self.len() {
345 panic!("InlineArray::insert: index out of bounds");
346 }
347 unsafe {
348 let src = self.ptr_at_mut(index);
349 ptr::copy(src, src.add(1), self.len() - index);
350 ptr::write(src, value);
351 *self.len_mut() += 1;
352 }
353 }
354
355 pub fn remove(&mut self, index: usize) -> Option<A> {
363 if index >= self.len() {
364 None
365 } else {
366 unsafe {
367 let src = self.ptr_at_mut(index);
368 let value = ptr::read(src);
369 *self.len_mut() -= 1;
370 ptr::copy(src.add(1), src, self.len() - index);
371 Some(value)
372 }
373 }
374 }
375
376 pub fn split_off(&mut self, index: usize) -> Self {
384 if index > self.len() {
385 panic!("InlineArray::split_off: index out of bounds");
386 }
387 let mut out = Self::new();
388 if index < self.len() {
389 unsafe {
390 ptr::copy(self.ptr_at(index), out.data_mut(), self.len() - index);
391 *out.len_mut() = self.len() - index;
392 *self.len_mut() = index;
393 }
394 }
395 out
396 }
397
398 pub fn truncate(&mut self, len: usize) {
404 if len >= self.len() {
405 return;
406 }
407
408 unsafe {
409 ptr::drop_in_place::<[A]>(&mut (**self)[len..]);
410 *self.len_mut() = len;
411 }
412 }
413
414 #[inline]
415 unsafe fn drop_contents(&mut self) {
416 unsafe { ptr::drop_in_place::<[A]>(&mut **self) } }
418
419 pub fn clear(&mut self) {
423 unsafe {
424 self.drop_contents();
425 *self.len_mut() = 0;
426 }
427 }
428
429 pub fn drain(&mut self) -> Drain<'_, A, T> {
431 Drain { array: self }
432 }
433}
434
435impl<A, T> Drop for InlineArray<A, T> {
436 fn drop(&mut self) {
437 unsafe { self.drop_contents() }
438 }
439}
440
441impl<A, T> Default for InlineArray<A, T> {
442 fn default() -> Self {
443 Self::new()
444 }
445}
446
447impl<A, T> Clone for InlineArray<A, T>
451where
452 A: Clone,
453{
454 fn clone(&self) -> Self {
455 let mut copy = Self::new();
456 for i in 0..self.len() {
457 unsafe {
458 copy.write_at(i, self.get_unchecked(i).clone());
459 }
460 }
461 unsafe {
462 *copy.len_mut() = self.len();
463 }
464 copy
465 }
466}
467
468impl<A, T> Deref for InlineArray<A, T> {
469 type Target = [A];
470 fn deref(&self) -> &Self::Target {
471 unsafe { from_raw_parts(self.data(), self.len()) }
472 }
473}
474
475impl<A, T> DerefMut for InlineArray<A, T> {
476 fn deref_mut(&mut self) -> &mut Self::Target {
477 unsafe { from_raw_parts_mut(self.data_mut(), self.len()) }
478 }
479}
480
481impl<A, T> Borrow<[A]> for InlineArray<A, T> {
482 fn borrow(&self) -> &[A] {
483 self.deref()
484 }
485}
486
487impl<A, T> BorrowMut<[A]> for InlineArray<A, T> {
488 fn borrow_mut(&mut self) -> &mut [A] {
489 self.deref_mut()
490 }
491}
492
493impl<A, T> AsRef<[A]> for InlineArray<A, T> {
494 fn as_ref(&self) -> &[A] {
495 self.deref()
496 }
497}
498
499impl<A, T> AsMut<[A]> for InlineArray<A, T> {
500 fn as_mut(&mut self) -> &mut [A] {
501 self.deref_mut()
502 }
503}
504impl<A, T, Slice> PartialEq<Slice> for InlineArray<A, T>
505where
506 Slice: Borrow<[A]>,
507 A: PartialEq,
508{
509 fn eq(&self, other: &Slice) -> bool {
510 self.deref() == other.borrow()
511 }
512}
513
514impl<A, T> Eq for InlineArray<A, T> where A: Eq {}
515
516impl<A, T> PartialOrd for InlineArray<A, T>
517where
518 A: PartialOrd,
519{
520 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
521 self.iter().partial_cmp(other.iter())
522 }
523}
524
525impl<A, T> Ord for InlineArray<A, T>
526where
527 A: Ord,
528{
529 fn cmp(&self, other: &Self) -> Ordering {
530 self.iter().cmp(other.iter())
531 }
532}
533
534impl<A, T> Debug for InlineArray<A, T>
535where
536 A: Debug,
537{
538 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
539 f.write_str("Chunk")?;
540 f.debug_list().entries(self.iter()).finish()
541 }
542}
543
544impl<A, T> Hash for InlineArray<A, T>
545where
546 A: Hash,
547{
548 fn hash<H>(&self, hasher: &mut H)
549 where
550 H: Hasher,
551 {
552 for item in self {
553 item.hash(hasher)
554 }
555 }
556}
557
558impl<A, T> IntoIterator for InlineArray<A, T> {
559 type Item = A;
560 type IntoIter = Iter<A, T>;
561 fn into_iter(self) -> Self::IntoIter {
562 Iter { array: self }
563 }
564}
565
566impl<A, T> FromIterator<A> for InlineArray<A, T> {
567 fn from_iter<I>(it: I) -> Self
568 where
569 I: IntoIterator<Item = A>,
570 {
571 let mut chunk = Self::new();
572 for item in it {
573 chunk.push(item);
574 }
575 chunk
576 }
577}
578
579impl<'a, A, T> IntoIterator for &'a InlineArray<A, T> {
580 type Item = &'a A;
581 type IntoIter = SliceIter<'a, A>;
582 fn into_iter(self) -> Self::IntoIter {
583 self.iter()
584 }
585}
586
587impl<'a, A, T> IntoIterator for &'a mut InlineArray<A, T> {
588 type Item = &'a mut A;
589 type IntoIter = SliceIterMut<'a, A>;
590 fn into_iter(self) -> Self::IntoIter {
591 self.iter_mut()
592 }
593}
594
595impl<A, T> Extend<A> for InlineArray<A, T> {
596 fn extend<I>(&mut self, it: I)
602 where
603 I: IntoIterator<Item = A>,
604 {
605 for item in it {
606 self.push(item);
607 }
608 }
609}
610
611impl<'a, A, T> Extend<&'a A> for InlineArray<A, T>
612where
613 A: 'a + Copy,
614{
615 fn extend<I>(&mut self, it: I)
621 where
622 I: IntoIterator<Item = &'a A>,
623 {
624 for item in it {
625 self.push(*item);
626 }
627 }
628}
629
630#[allow(dead_code)]
631#[cfg(test)]
632mod test {
633 use super::*;
634 use crate::tests::DropTest;
635 use std::sync::atomic::{AtomicUsize, Ordering};
636
637 #[test]
638 fn dropping() {
639 let counter = AtomicUsize::new(0);
640 {
641 let mut chunk: InlineArray<DropTest<'_>, [usize; 32]> = InlineArray::new();
642 for _i in 0..16 {
643 chunk.push(DropTest::new(&counter));
644 }
645 assert_eq!(16, counter.load(Ordering::Relaxed));
646 for _i in 0..8 {
647 chunk.pop();
648 }
649 assert_eq!(8, counter.load(Ordering::Relaxed));
650 }
651 assert_eq!(0, counter.load(Ordering::Relaxed));
652 }
653
654 #[test]
655 fn truncate() {
656 let counter = AtomicUsize::new(0);
657 {
658 let mut chunk: InlineArray<DropTest<'_>, [usize; 32]> = InlineArray::new();
659 for _i in 0..16 {
660 chunk.push(DropTest::new(&counter));
661 }
662 assert_eq!(16, counter.load(Ordering::Relaxed));
663 chunk.truncate(8);
664 assert_eq!(8, counter.load(Ordering::Relaxed));
665 }
666 assert_eq!(0, counter.load(Ordering::Relaxed));
667 }
668
669 #[test]
670 fn zero_sized_values() {
671 let mut chunk: InlineArray<(), [usize; 32]> = InlineArray::new();
672 for _i in 0..65536 {
673 chunk.push(());
674 }
675 assert_eq!(65536, chunk.len());
676 assert_eq!(Some(()), chunk.pop());
677 }
678
679 #[test]
680 fn low_align_base() {
681 let mut chunk: InlineArray<String, [u8; 512]> = InlineArray::new();
682 chunk.push("Hello".to_owned());
683 assert_eq!(chunk[0], "Hello");
684
685 let mut chunk: InlineArray<String, [u16; 512]> = InlineArray::new();
686 chunk.push("Hello".to_owned());
687 assert_eq!(chunk[0], "Hello");
688 }
689
690 #[test]
691 fn float_align() {
692 let mut chunk: InlineArray<f64, [u8; 16]> = InlineArray::new();
693 chunk.push(1234.);
694 assert_eq!(chunk[0], 1234.);
695
696 let mut chunk: InlineArray<f64, [u8; 17]> = InlineArray::new();
697 chunk.push(1234.);
698 assert_eq!(chunk[0], 1234.);
699 }
700
701 #[test]
702 fn recursive_types_compile() {
703 #[allow(dead_code)]
704 enum Recursive {
705 A(InlineArray<Recursive, u64>),
706 B,
707 }
708 }
709
710 #[test]
711 fn insufficient_alignment1() {
712 #[repr(align(256))]
713 struct BigAlign(u8);
714 #[repr(align(32))]
715 struct MediumAlign(u8);
716
717 assert_eq!(0, InlineArray::<BigAlign, [usize; 256]>::CAPACITY);
718 assert_eq!(0, InlineArray::<BigAlign, [u64; 256]>::CAPACITY);
719 assert_eq!(0, InlineArray::<BigAlign, [f64; 256]>::CAPACITY);
720 assert_eq!(0, InlineArray::<BigAlign, [MediumAlign; 256]>::CAPACITY);
721 }
722
723 #[test]
724 fn insufficient_alignment2() {
725 #[repr(align(256))]
726 struct BigAlign(usize);
727
728 let mut bad: InlineArray<BigAlign, [usize; 256]> = InlineArray::new();
729 assert_eq!(0, InlineArray::<BigAlign, [usize; 256]>::CAPACITY);
730 assert_eq!(0, bad.len());
731 assert_eq!(0, bad[..].len());
732 assert!(bad.is_full());
733 assert_eq!(0, bad.drain().count());
734 assert!(bad.pop().is_none());
735 assert!(bad.remove(0).is_none());
736 assert!(bad.split_off(0).is_full());
737 bad.clear();
738 }
739
740 #[test]
741 fn sufficient_alignment1() {
742 #[repr(align(256))]
743 struct BigAlign(u8);
744
745 assert_eq!(13, InlineArray::<BigAlign, [BigAlign; 14]>::CAPACITY);
746 assert_eq!(1, InlineArray::<BigAlign, [BigAlign; 2]>::CAPACITY);
747 assert_eq!(0, InlineArray::<BigAlign, [BigAlign; 1]>::CAPACITY);
748
749 let mut chunk: InlineArray<BigAlign, [BigAlign; 2]> = InlineArray::new();
750 chunk.push(BigAlign(42));
751 assert_eq!(
752 chunk.first().unwrap() as *const _ as usize % mem::align_of::<BigAlign>(),
753 0
754 );
755 }
756
757 #[test]
758 fn sufficient_alignment2() {
759 #[repr(align(128))]
760 struct BigAlign([u8; 64]);
761 #[repr(align(256))]
762 struct BiggerAlign(u8);
763 assert_eq!(128, mem::align_of::<BigAlign>());
764 assert_eq!(256, mem::align_of::<BiggerAlign>());
765
766 assert_eq!(199, InlineArray::<BigAlign, [BiggerAlign; 100]>::CAPACITY);
767 assert_eq!(3, InlineArray::<BigAlign, [BiggerAlign; 2]>::CAPACITY);
768 assert_eq!(1, InlineArray::<BigAlign, [BiggerAlign; 1]>::CAPACITY);
769 assert_eq!(0, InlineArray::<BigAlign, [BiggerAlign; 0]>::CAPACITY);
770
771 let mut chunk: InlineArray<BigAlign, [BiggerAlign; 1]> = InlineArray::new();
772 chunk.push(BigAlign([0; 64]));
773 assert_eq!(
774 chunk.first().unwrap() as *const _ as usize % mem::align_of::<BigAlign>(),
775 0
776 );
777 }
778}