1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
use core::mem::MaybeUninit;

use crate::traits::{Initialize, InitializeExt as _, InitializeVectored, TrustedDeref};
use crate::wrappers::{AsUninit, AssertInit, AssertInitVectors, SingleVector};

/// An initialized tracking a container type that dereferences into a slice of
/// possibly-uninitialized items, and how many items have been initialized, respectively. The inner
/// data can always be moved out as uninitialized, but when the buffer _has_ been fully
/// initialized, the buffer can be turned into the initialized equivalent.
// TODO: Is #[derive(Debug)] sound here?
#[derive(Debug)]
pub struct BufferInitializer<T> {
    // This is the Buffer type, that wraps a _single_ buffer that is not guaranteed to be fully
    // initialized when wrapped, but where the number of initialized items is tracked, so that it
    // can still call naïve APIs that expected initialized buffers without overhead.
    //
    // The inner data, which must implement `Initialize` to do anything useful with points to a
    // slice of possibly uninitialized items, and must always return the same slice in the trait
    // (which is an unsafe trait implementation contract).
    pub(crate) inner: T,
    // Then we also have the initialization cursor, which marks the start of the uninitialized
    // region. This is unrelated to the number of items filled into the buffer, but it must always
    // be greater than or equal that.
    //
    // If this buffer is constructed from an already initialized slice, then this will be set to
    // the total capacity of the buffer. This cursor can never be decreased, and it must be less
    // than or equal to the total capacity.
    //
    // This allows dividing the buffer into an initialized region, and an uninitialized region.
    pub(crate) items_initialized: usize,
    // NOTE: If any of these contracts are broken inside the struct, expect UB. The
    // _`debug_assert_valid_len`_ method will check this everywhere when debug assertions are
    // enabled.
}

impl<T> BufferInitializer<T> {
    /// Wrap a possibly-uninitialized buffer into the initializer, with the current initialization
    /// cursor set to zero.
    #[inline]
    pub const fn uninit(inner: T) -> Self {
        Self {
            inner,
            items_initialized: 0,
        }
    }

    #[inline]
    pub fn into_inner(self) -> T {
        self.inner
    }

    #[inline]
    pub const fn items_initialized(&self) -> usize {
        self.items_initialized
    }
}
impl<T, Item> BufferInitializer<AsUninit<T>>
where
    T: core::ops::Deref<Target = [Item]> + core::ops::DerefMut + TrustedDeref,
{
    /// Construct an initializer to a container that is already initialized. This ensures that no
    /// bytes will be filled with zeroes, as they do not have to.
    pub fn new(init: T) -> Self {
        let mut this = Self::uninit(AsUninit(init));
        // SAFETY: AsUninit wrapper ensures the type was already initialized.
        unsafe {
            this.advance_to_end();
        }
        this
    }
}
impl<T> BufferInitializer<T>
where
    T: Initialize,
{
    pub(crate) fn debug_assert_validity(&self) {
        debug_assert!(self.items_initialized <= self.capacity());
    }

    /// Advance the initialization counter by `count` items.
    ///
    /// # Safety
    ///
    /// For this to be safe, the caller must be sure that another `count` items from the previous
    /// initialization offset, are initialized.
    ///
    /// This method does not do any bounds checking. Ergo, `count` can never be larger than
    /// the value returned by [`remaining`](Self::remaining).
    #[inline]
    pub unsafe fn advance(&mut self, count: usize) {
        self.items_initialized += count;
    }
    /// Advance the initialization counter to the end.
    ///
    /// # Safety
    ///
    /// While this eliminates the need for the caller to bounds check manually, unlike with
    /// [`advance`](Self::advance), the caller must uphold the initialization invariant.
    #[inline]
    pub unsafe fn advance_to_end(&mut self) {
        self.items_initialized = self.all_uninit().len();
    }

    /// Assume that the inner value is fully initialized, finalizing the original type into its
    /// initialized counterpart.
    ///
    /// # Safety
    ///
    /// The caller must uphold the initialization invariant.
    #[inline]
    pub unsafe fn assume_init(self) -> AssertInit<T> {
        self.inner.assume_init()
    }

    #[inline]
    pub fn into_raw_parts(self) -> (T, usize) {
        let Self {
            inner,
            items_initialized,
        } = self;
        (inner, items_initialized)
    }

    /// Retrieve a slice of a possibly uninitialized items, over the entire buffer.
    #[inline]
    pub fn all_uninit(&self) -> &[MaybeUninit<T::Item>] {
        self.inner.as_maybe_uninit_slice()
    }
    /// Retrieve a mutable slice of a possibly uninitialized items, over the entire buffer.
    ///
    /// # Safety
    ///
    /// This is unsafe, because the caller must not de-initialize the slice as the API also
    /// promises the initialized region to always actually be initialized.
    #[inline]
    pub unsafe fn all_uninit_mut(&mut self) -> &mut [MaybeUninit<T::Item>] {
        self.inner.as_maybe_uninit_slice_mut()
    }
    /// Get the total size of the buffer that is being initialized.
    #[inline]
    pub fn capacity(&self) -> usize {
        self.all_uninit().len()
    }
    /// Get the number of items that must be filled before the buffer gets fully initialized, and
    /// can be turned into an initialized type (e.g. `Box<[U]>`).
    #[inline]
    pub fn remaining(&self) -> usize {
        debug_assert!(self.capacity() >= self.items_initialized);
        self.capacity().wrapping_sub(self.items_initialized)
    }
    /// Check whether the buffer is completely initialized. Note that this is unrelated to it being
    /// filled.
    #[inline]
    pub fn is_completely_init(&self) -> bool {
        self.items_initialized() == self.capacity()
    }
    /// Check whether no single item of the buffer has been initialized.
    #[inline]
    pub fn is_completely_uninit(&self) -> bool {
        self.items_initialized() == 0
    }
    /// Retrieve a shared reference to the uninitialized part of the buffer. This is only included
    /// for completeness, since apart from some corner cases where one does not have exclusive
    /// access to the buffer but still wants to initialize it, is rather useless.
    #[inline]
    pub fn uninit_part(&self) -> &[MaybeUninit<T::Item>] {
        let all = self.all_uninit();

        // Validate that items_filled is valid, when _debug assertions_ are enabled.
        self.debug_assert_validity();

        // NOTE: We use unsafe to eliminate unnecessary bounds checking. This may be negligible for
        // many scenarios, but we want to keep this interface zero-cost.
        unsafe {
            // SAFETY: We uphold the safety invariants:
            //
            // 1) the pointer belongs to the same allocated object, since we are simply taking a
            //    subslice of an existing slice;
            // 2) the offset multiplied by the item size cannot overflow an isize. This is
            //    impossible, since while the size of U may be larget 1, the wrapper can never be
            //    constructed, if the total size would exceed isize::MAX;
            // 3) the resulting pointer cannot overflow the pointer size. This is also impossible,
            //    since for `all` to be a valid slice, it must not wrap around in address space,
            //    between its start and end range.
            let ptr = all.as_ptr().add(self.items_initialized);
            let len = all.len().wrapping_sub(self.items_initialized);

            // SAFETY: This is safe, because:
            //
            // 1) the data is valid for the entire memory range, since we are taking a subset of an
            //    already well-defined slice. Everything is therefore contained within a single
            //    allocation object, and the pointer is already non-null, since the pointer
            //    addition must not overflow;
            // 2) while no item whatsoever in [items_filled, len) is guaranteed to be initialized,
            //    MaybeUninit is special and is always considered initialized (even though the
            //    value it wraps has no such guarantee);
            // 3) the value is not mutated for the lifetime of the slice, since the slice is owned
            //    by this struct, and there cannot exist a mutable borrow within this borrow for
            //    the anonymous lifetime `'_`;
            // 4) the number of items filled is not larger than isize::MAX. This is checked every
            //    time items_filled changes.
            core::slice::from_raw_parts(ptr, len)
        }
    }
    /// Retrieve a shared slice to the initialized part of the buffer. Note that this is different
    /// from the _filled_ part, as a buffer can be fully initialized but not filled.
    #[inline]
    pub fn init_part(&self) -> &[T::Item] {
        // Validate that items_filled is valid, when _debug assertions_ are enabled.
        self.debug_assert_validity();

        // NOTE: Use of unsafe is only to eliminate bounds checks and maintain zero-cost.
        unsafe {
            let ptr = self.all_uninit().as_ptr();
            let len = self.items_initialized;

            // SAFETY: This is safe, due to the same invariants as with `uninit_part`, except for
            // the initialization invariant. We uphold this, by guaranteeing that the entire slice
            // we construct, is initialized, since this is a contract merely from using this
            // wrapper. We also uphold the validity variant, which is somewhat different in this
            // case, since we know that items_filled must be smaller than or equal to the size of
            // the slice.
            core::slice::from_raw_parts(ptr as *const T::Item, len)
        }
    }

    /// Get a mutable slice to the uninitialized part of the buffer. Note that this is different
    /// from the unfilled part of it.
    #[inline]
    pub fn uninit_part_mut(&mut self) -> &mut [MaybeUninit<T::Item>] {
        // NOTE: We extract pointers to avoid multiple mutable aliases when invoking
        // core::slice::from_raw_parts_mut.
        let (orig_ptr, orig_len) = unsafe {
            let orig = self.all_uninit_mut();
            (orig.as_mut_ptr(), orig.len())
        };
        unsafe {
            // Validate that items_filled is correct when debug assertions are enabled.
            self.debug_assert_validity();

            // SAFETY: This pointer arithmetic operation, is safe for the same reasons as with
            // `uninit_part`.
            let ptr = orig_ptr.add(self.items_initialized);
            let len = orig_len.wrapping_sub(self.items_initialized);

            // SAFETY: This is safe for the exact same reasons as with `uninit_part`, but that
            // there must not be any reference _at all_ to the inner slice. This is upheld by
            // knowing that we have already borrowed the owner of the slice mutably.
            core::slice::from_raw_parts_mut(ptr, len)
        }
    }

    /// Retrieve a mutable slice to the initialized part of the buffer. Note that this is not the
    /// same as the filled part.
    #[inline]
    pub fn init_part_mut(&mut self) -> &mut [T::Item] {
        let orig_ptr = unsafe { self.all_uninit_mut().as_mut_ptr() };

        unsafe {
            let ptr = orig_ptr;
            let len = self.items_initialized;

            // SAFETY: This is safe for the exact same reasons as with `init_part`, except that we
            // also ensure that there is no access whatsoever to the inner data, since we are
            // borrowing `self` mutably.
            core::slice::from_raw_parts_mut(ptr as *mut T::Item, len)
        }
    }
    /// Try to transform the initializing type, into its initialized counterpart, provided that the
    /// it has been fully initialized.
    #[inline]
    pub fn try_into_init(self) -> Result<AssertInit<T>, Self> {
        if self.is_completely_init() {
            Ok(unsafe { self.assume_init() })
        } else {
            Err(self)
        }
    }
    /// Finish the initialization by writing `item` to the uninitialized region, and then get the
    /// final initialized type.
    pub fn finish_init_by_filling(mut self, item: T::Item) -> AssertInit<T>
    where
        T::Item: Copy,
    {
        self.fill_uninit_part(item);
        unsafe { self.assume_init() }
    }
    /// Fill the uninitialized part with copies of `item` (memset).
    ///
    /// After this method has been called, it is safe to [`assume_init`]. [`try_into_init`] will
    /// then also succeed.
    ///
    /// [`assume_init`]: #method.assume_init
    /// [`try_into_init`]: #method.try_into_init
    #[inline]
    pub fn fill_uninit_part(&mut self, item: T::Item)
    where
        T::Item: Copy,
    {
        crate::fill_uninit_slice(self.uninit_part_mut(), item);
        unsafe {
            self.advance_to_end();
        }
    }
    #[inline]
    pub fn partially_fill_uninit_part(&mut self, count: usize, item: T::Item)
    where
        T::Item: Copy,
    {
        crate::fill_uninit_slice(&mut self.uninit_part_mut()[..count], item);
        // SAFETY: The slice indexing will already bounds check.
        unsafe { self.advance(count) }
    }
    /// Get both the initialized and uninitialized parts simultaneously. This method is nothing but
    /// a shorthand for the individual methods, but included for completeness.
    ///
    /// This is because the mutable counterpart [`init_uninit_parts_mut`] cannot be done separately
    /// by calling the [`init_part_mut`] and [`uninit_part_mut`] methods.
    ///
    /// [`init_part_mut`]: #method.init_part_mut
    /// [`uninit_part_mut`]: #method.uninit_part_mut
    /// [`init_uninit_parts_mut`]: #method.init_uninit_parts_mut
    #[inline]
    pub fn init_uninit_parts(&self) -> (&[T::Item], &[MaybeUninit<T::Item>]) {
        (self.init_part(), self.uninit_part())
    }
    /// Borrow both the initialized as well as the uninitialized parts, mutably.
    #[inline]
    pub fn init_uninit_parts_mut(&mut self) -> (&mut [T::Item], &mut [MaybeUninit<T::Item>]) {
        let (all_ptr, all_len) = unsafe {
            let all = self.all_uninit_mut();

            (all.as_mut_ptr(), all.len())
        };

        unsafe {
            self.debug_assert_validity();

            let init_base_ptr = all_ptr as *mut T::Item;
            let init_len = self.items_initialized;

            let uninit_base_ptr = all_ptr.add(self.items_initialized);
            let uninit_len = all_len.wrapping_sub(self.items_initialized);

            let init = core::slice::from_raw_parts_mut(init_base_ptr, init_len);
            let uninit = core::slice::from_raw_parts_mut(uninit_base_ptr, uninit_len);

            (init, uninit)
        }
    }
}
impl<T> BufferInitializer<T>
where
    // TODO: Other zeroable types than u8. Perhaps num-traits, or just a macro for all the
    // primitive integers?
    T: Initialize<Item = u8>,
{
    /// Finish the initialization by zeroing uninitialized region, and then get the final
    /// initialized type.
    pub fn finish_init_by_zeroing(self) -> AssertInit<T> {
        self.finish_init_by_filling(0_u8)
    }
    #[inline]
    pub fn partially_zero_uninit_part(&mut self, count: usize) {
        crate::fill_uninit_slice(&mut self.uninit_part_mut()[..count], 0_u8);
        // SAFETY: The slice indexing will already bounds check.
        unsafe { self.advance(count) }
    }
    /// Zero the uninitialized part.
    ///
    /// After this method has been called, it is safe to [`assume_init`]. [`try_into_init`] will
    /// then also succeed.
    ///
    /// [`assume_init`]: #method.assume_init
    /// [`try_into_init`]: #method.try_into_init
    #[inline]
    pub fn zero_uninit_part(&mut self) {
        self.fill_uninit_part(0_u8);
        unsafe { self.advance_to_end() }
    }
}

pub struct BuffersInitializer<T> {
    // The inner buffer. At the moment there is no type-level restriction that it has to implement
    // InitializeVectored for BuffersInitializer to be able to wrap it (to allow for const fn), but
    // that may change in a future release.
    pub(crate) inner: T,

    // A cursor marking the number of _vectors_ that have been fully initialized. Once
    // `items_initialized_for_vector` approaches the length of the vector indexed by this field,
    // this index will increment, and `items_initialized_for_vector` will reset to zero. Vectors
    // with length zero are skipped entirely.
    pub(crate) vectors_initialized: usize,
    pub(crate) items_initialized_for_vector: usize,
}

impl<T> BuffersInitializer<T> {
    #[inline]
    pub const fn uninit(inner: T) -> Self {
        Self {
            inner,
            vectors_initialized: 0,
            items_initialized_for_vector: 0,
        }
    }
    #[inline]
    pub fn into_raw_parts(self) -> (T, usize, usize) {
        let Self {
            inner,
            vectors_initialized,
            items_initialized_for_vector,
        } = self;

        (inner, vectors_initialized, items_initialized_for_vector)
    }
    #[inline]
    pub fn into_inner(self) -> T {
        let (inner, _, _) = self.into_raw_parts();

        inner
    }
}
impl<T> BuffersInitializer<SingleVector<T>> {
    pub fn from_single_buffer_initializer(single: BufferInitializer<T>) -> Self {
        let BufferInitializer {
            items_initialized,
            inner,
        } = single;

        Self {
            items_initialized_for_vector: items_initialized,
            vectors_initialized: 0,
            inner: SingleVector(inner),
        }
    }
}

impl<T, Item> BuffersInitializer<T>
where
    T: InitializeVectored,
    T::UninitVector: Initialize<Item = Item>,
{
    #[inline]
    fn all_vectors_uninit(&self) -> &[T::UninitVector] {
        self.inner.as_maybe_uninit_vectors()
    }

    /// Retrieve the current buffer immutably, provided that there is one.
    #[inline]
    pub fn current_vector_all(&self) -> Option<&[MaybeUninit<Item>]> {
        self.debug_assert_validity();

        let vectors_initialized = self.vectors_initialized;

        if vectors_initialized != self.total_vector_count() {
            Some(unsafe {
                self.all_vectors_uninit()
                    .get_unchecked(vectors_initialized)
                    .as_maybe_uninit_slice()
            })
        } else {
            None
        }
    }
    /// Retrieve the current buffer mutably, provided that there is one.
    ///
    /// # Safety
    ///
    /// This is unsafe because the caller must not de-initialize the buffer.
    #[inline]
    pub unsafe fn current_vector_all_mut(&mut self) -> Option<&mut [MaybeUninit<Item>]> {
        self.debug_assert_validity();

        let vectors_initialized = self.vectors_initialized;

        if vectors_initialized != self.total_vector_count() {
            let all_vectors_uninit_mut = self.all_uninit_vectors_mut();
            let current_vector_uninit_mut =
                all_vectors_uninit_mut.get_unchecked_mut(vectors_initialized);
            Some(current_vector_uninit_mut.as_maybe_uninit_slice_mut())
        } else {
            None
        }
    }

    #[inline]
    pub fn current_vector_init_part(&self) -> Option<&[Item]> {
        let (init_part, _) = self.current_vector_init_uninit_parts()?;

        Some(init_part)
    }

    #[inline]
    pub fn current_vector_uninit_part(&self) -> Option<&[MaybeUninit<Item>]> {
        let (_, uninit_part) = self.current_vector_init_uninit_parts()?;

        Some(uninit_part)
    }
    #[inline]
    pub fn current_vector_init_uninit_parts(&self) -> Option<(&[Item], &[MaybeUninit<Item>])> {
        let vector = self.current_vector_all()?;

        Some(unsafe {
            let init_vector_base_ptr = vector.as_ptr() as *const Item;
            let init_vector_len = self.items_initialized_for_vector;

            let init_vector = core::slice::from_raw_parts(init_vector_base_ptr, init_vector_len);

            let uninit_vector_base_ptr = vector.as_ptr().add(self.items_initialized_for_vector);
            let uninit_vector_len = vector.len().wrapping_sub(self.items_initialized_for_vector);

            let uninit_vector =
                core::slice::from_raw_parts(uninit_vector_base_ptr, uninit_vector_len);

            (init_vector, uninit_vector)
        })
    }

    #[inline]
    pub fn current_vector_init_part_mut(&mut self) -> Option<&mut [Item]> {
        let (init_part_mut, _) = self.current_vector_init_uninit_parts_mut()?;

        Some(init_part_mut)
    }

    #[inline]
    pub fn current_vector_uninit_part_mut(&mut self) -> Option<&mut [MaybeUninit<Item>]> {
        let (_, uninit_part_mut) = self.current_vector_init_uninit_parts_mut()?;

        Some(uninit_part_mut)
    }
    #[inline]
    pub fn current_vector_init_uninit_parts_mut(
        &mut self,
    ) -> Option<(&mut [Item], &mut [MaybeUninit<Item>])> {
        let (orig_base_ptr, orig_len) = unsafe {
            let vector = self.current_vector_all_mut()?;

            (vector.as_mut_ptr(), vector.len())
        };
        Some(unsafe {
            let init_vector_base_ptr = orig_base_ptr as *mut Item;
            let init_vector_len = self.items_initialized_for_vector;

            let init_vector =
                core::slice::from_raw_parts_mut(init_vector_base_ptr, init_vector_len);

            let uninit_vector_base_ptr = orig_base_ptr.add(self.items_initialized_for_vector);
            let uninit_vector_len = orig_len.wrapping_sub(self.items_initialized_for_vector);

            let uninit_vector =
                core::slice::from_raw_parts_mut(uninit_vector_base_ptr, uninit_vector_len);

            (init_vector, uninit_vector)
        })
    }

    fn debug_assert_validity(&self) {
        debug_assert!(self
            .inner
            .as_maybe_uninit_vectors()
            .get(self.vectors_initialized)
            .map_or(true, |current_vector| current_vector
                .as_maybe_uninit_slice()
                .len()
                >= self.items_initialized_for_vector));
        debug_assert!(self.items_initialized_for_vector <= isize::MAX as usize);
        debug_assert!(self.inner.as_maybe_uninit_vectors().len() >= self.vectors_initialized);
    }

    /// Get the total number of vectors, since the wrapper was constructed.
    #[inline]
    pub fn total_vector_count(&self) -> usize {
        self.inner.as_maybe_uninit_vectors().len()
    }

    /// Get the number of vectors that are currently filled and completely initialized.
    #[inline]
    pub fn vectors_initialized(&self) -> usize {
        self.vectors_initialized
    }

    /// Get the number of vectors remaining, possibly including the vector that is currently
    /// initializing.
    #[inline]
    pub fn vectors_remaining(&self) -> usize {
        self.total_vector_count()
            .wrapping_sub(self.vectors_initialized())
    }

    /// Counts the items that must be filled before the whole buffer is initialized.
    ///
    /// Note that this can be expensive if there are many buffers; it is O(n), where `n` is the
    /// number of vectors that have not yet been initialized. If all vectors are already filled,
    /// then this completes in constant time.
    pub fn count_items_to_initialize(&self) -> usize {
        let items_to_initialize_for_remaining = self
            .all_uninit_vectors()
            .iter()
            .skip(self.vectors_initialized + 1_usize)
            .map(|buffer| buffer.as_maybe_uninit_slice().len())
            .sum::<usize>();

        self.items_initialized_for_vector + items_to_initialize_for_remaining
    }
    pub fn count_total_items_in_all_vectors(&self) -> usize {
        self.all_uninit_vectors()
            .iter()
            .map(|buffer| buffer.as_maybe_uninit_slice().len())
            .sum()
    }

    /// Get the number of items that have been initialized for the vector at the given index.
    ///
    /// For vectors that has not yet been advanced to, this will zero, while vectors that are
    /// already initialized will return the full length of that vector. The currently initializing
    /// vector will return the partial length of the items within that vector that have been
    /// initialized.
    ///
    /// # Safety
    ///
    /// The caller must ensure that vector_index is within the bounds of the slice of vectors given
    /// by the inner wrapped value.
    #[inline]
    pub unsafe fn items_initialized_for_vector_unchecked(&self, vector_index: usize) -> usize {
        let ordering = vector_index.cmp(&self.vectors_initialized);

        match ordering {
            core::cmp::Ordering::Equal => self.items_initialized_for_vector,
            core::cmp::Ordering::Greater => 0,
            core::cmp::Ordering::Less => self
                .all_uninit_vectors()
                .get_unchecked(vector_index)
                .as_maybe_uninit_slice()
                .len(),
        }
    }
    #[inline]
    pub fn items_initialized_for_vector(&self, vector_index: usize) -> usize {
        assert!(vector_index < self.total_vector_count());

        unsafe { self.items_initialized_for_vector_unchecked(vector_index) }
    }
    #[inline]
    pub fn items_initialized_for_current_vector(&self) -> usize {
        if self.vectors_initialized() != self.total_vector_count() {
            self.items_initialized_for_vector
        } else {
            0
        }
    }

    /// Get the uninitialized version of all vectors wrapped by this initializer.
    #[inline]
    pub fn all_uninit_vectors(&self) -> &[T::UninitVector] {
        self.inner.as_maybe_uninit_vectors()
    }
    /// Get the uninitialized version of all vectors wrapped by this initializer, mutably.
    ///
    /// # Safety
    ///
    /// The caller must not de-initialize any values.
    #[inline]
    pub unsafe fn all_uninit_vectors_mut(&mut self) -> &mut [T::UninitVector] {
        self.inner.as_maybe_uninit_vectors_mut()
    }
    /// Advance the initialization cursor by `count` items, counting the number of items that were
    /// advanced. The `count` input may be higher than the total number of items in the buffer,
    /// without panics or UB.
    ///
    /// # Safety
    ///
    /// For this to be safe, then `count` items in the vectors ranging from the current vector, to
    /// subsequent vectors, must be initialized.
    ///
    /// Additionally, `count` must never overflow `isize::MAX`, but it may be larger than the total
    /// number of items in the vectors (to avoid having to count them in the beginning, for
    /// performance purposes).
    #[inline]
    pub unsafe fn advance(&mut self, mut count: usize) -> usize {
        let mut items_advanced = 0;

        while let Some(current_uninit_part) = self.current_vector_uninit_part() {
            let current_uninit_part_len = current_uninit_part.len();

            if count >= current_uninit_part_len {
                self.vectors_initialized = self
                    .vectors_initialized
                    .checked_add(1)
                    .expect("reached usize::MAX when incrementing the buffer index");
                self.items_initialized_for_vector = 0;

                count -= current_uninit_part_len;

                items_advanced -= current_uninit_part_len;
                continue;
            } else {
                self.items_initialized_for_vector += current_uninit_part_len;
            }
        }

        items_advanced
    }
    /// Advance the initialization progress to the end of the current vector, thus wrapping and
    /// continuing to the next
    ///
    /// # Safety
    ///
    /// This is unsafe not only because of the initialization invariant, but also because it does
    /// not check whether the _initialized vectors_ counter is at the end (meaning that all vectors
    /// are initialized). The caller must hence ensure that the value of [`vectors_remaining`] is
    /// larger than zero.
    ///
    /// [`vectors_remaining`]: #method.vectors_remaining
    pub unsafe fn advance_current_vector_to_end(&mut self) {
        self.debug_assert_validity();

        self.vectors_initialized += 1;
        self.items_initialized_for_vector = 0;
    }
    pub unsafe fn advance_current_vector(&mut self, count: usize) {
        self.debug_assert_validity();

        if let Some(current_vector) = self.current_vector_all() {
            let current_vector_len = current_vector.len();
            let end = self.items_initialized_for_vector + count;

            assert!(end <= current_vector_len);

            if end == current_vector_len {
                self.vectors_initialized += 1;
                self.items_initialized_for_vector = 0;
            } else {
                self.items_initialized_for_vector = end;
            }
        } else if count > 0 {
            panic!("cannot advance beyond the end of the current vector")
        }
    }
    pub fn partially_fill_current_vector_uninit_part(&mut self, count: usize, item: Item)
    where
        Item: Copy,
    {
        if let Some(current_vector_uninit_part_mut) = self.current_vector_uninit_part_mut() {
            crate::fill_uninit_slice(&mut current_vector_uninit_part_mut[..count], item);
            unsafe { self.advance_current_vector(count) }
        } else if count > 0 {
            panic!("cannot partially fill a vector when none are left");
        }
    }
    pub fn fill_current_vector_uninit_part(&mut self, item: Item)
    where
        Item: Copy,
    {
        if let Some(current_vector_uninit_part_mut) = self.current_vector_uninit_part_mut() {
            crate::fill_uninit_slice(current_vector_uninit_part_mut, item);
            unsafe { self.advance_current_vector_to_end() }
        }
    }
    pub fn try_into_init(self) -> Result<AssertInitVectors<T>, Self> {
        if self.vectors_remaining() == 0 {
            Ok(unsafe { AssertInitVectors::new_unchecked(self.into_inner()) })
        } else {
            Err(self)
        }
    }
}
impl<T> BuffersInitializer<T>
where
    T: InitializeVectored,
    // TODO: Again, additional zeroable types.
    T::UninitVector: Initialize<Item = u8>,
{
    pub fn partially_zero_current_vector_uninit_part(&mut self, count: usize) {
        self.partially_fill_current_vector_uninit_part(count, 0_u8)
    }
    pub fn zero_current_vector_uninit_part(&mut self) {
        self.fill_current_vector_uninit_part(0_u8)
    }
}

#[cfg(test)]
mod tests {
    use super::*;

    mod single {
        use super::*;

        #[test]
        fn new_fills_completely() {
            let slice = *b"Calling BufferInitializer::new() will ensure that the initialization marker is put at the end of the slice, making it fully zero-cost when already using initialized memory.";
            let mut copy = slice;

            let mut initializer = BufferInitializer::new(&mut copy[..]);

            assert_eq!(initializer.remaining(), 0);
            assert_eq!(initializer.capacity(), slice.len());
            assert!(initializer.is_completely_init());
            assert!(!initializer.is_completely_uninit());
            assert!(initializer.uninit_part().is_empty());
            assert!(initializer.uninit_part_mut().is_empty());
            assert_eq!(initializer.init_part(), slice);
            assert_eq!(initializer.init_part_mut(), slice);
            assert!(initializer.try_into_init().is_ok());
        }

        #[test]
        fn basic_initialization() {
            let mut slice = [MaybeUninit::uninit(); 32];
            let buffer = BufferInitializer::uninit(&mut slice[..]);
            let initialized = buffer.finish_init_by_filling(42_u8);
            assert!(initialized.iter().all(|&byte| byte == 42_u8));
        }
        #[test]
        fn buffer_parts() {
            let mut slice = [MaybeUninit::<u8>::uninit(); 32];
            let mut buffer = BufferInitializer::uninit(&mut slice[..]);

            assert_eq!(buffer.uninit_part().len(), 32);
            assert_eq!(buffer.uninit_part_mut().len(), 32);
            assert_eq!(buffer.init_part(), &[]);
            assert_eq!(buffer.init_part_mut(), &mut []);
            assert!(!buffer.is_completely_init());

            // TODO: Fill partially, and then check further.
        }
    }
    mod vectored {
        use super::*;

        #[test]
        fn fill_uninit_part() {
            let mut first = [MaybeUninit::uninit(); 32];
            let mut second = [MaybeUninit::uninit(); 128];
            let mut third = [MaybeUninit::uninit(); 64];

            let mut vectors = [&mut first[..], &mut second[..], &mut third[..]];
            let mut initializer = BuffersInitializer::uninit(&mut vectors[..]);

            initializer.zero_current_vector_uninit_part();
            assert_eq!(initializer.vectors_initialized(), 1);
            assert_eq!(initializer.items_initialized_for_current_vector(), 0);

            initializer.partially_zero_current_vector_uninit_part(96);
            assert_eq!(initializer.vectors_initialized(), 1);
            assert_eq!(initializer.items_initialized_for_current_vector(), 96);

            initializer.partially_fill_current_vector_uninit_part(32, 0x13_u8);
            assert_eq!(initializer.vectors_initialized(), 2);
            assert_eq!(initializer.items_initialized_for_current_vector(), 0);

            initializer.partially_fill_current_vector_uninit_part(16, 0x37_u8);
            assert_eq!(initializer.vectors_initialized(), 2);
            assert_eq!(initializer.items_initialized_for_current_vector(), 16);
            initializer.fill_current_vector_uninit_part(0x42);
            assert_eq!(initializer.vectors_initialized(), 3);
            assert!(initializer.current_vector_all().is_none());
        }
    }
}