swap_buffer_queue/write_vectored/
array.rs

1use std::{io::IoSlice, mem, mem::MaybeUninit, ops::Range};
2
3use crate::{
4    buffer::{Buffer, CellBuffer, Drain},
5    loom::{
6        cell::Cell,
7        sync::atomic::{AtomicUsize, Ordering},
8    },
9    utils::{init_array, ArrayWithHeaderAndTrailer},
10    write_vectored::{VectoredSlice, EMPTY_SLICE},
11};
12
13/// A buffer of [`IoSlice`] of size `N`
14///
15/// The total size of the buffer is `N * mem::size_of::<T>() + (N + 2) * mem::size_of::<IoSlice>()`.
16pub struct WriteVectoredArrayBuffer<T, const N: usize> {
17    owned: [Cell<MaybeUninit<T>>; N],
18    slices: ArrayWithHeaderAndTrailer<Cell<IoSlice<'static>>, 1, N, 1>,
19    total_size: AtomicUsize,
20}
21
22impl<T, const N: usize> Default for WriteVectoredArrayBuffer<T, N> {
23    fn default() -> Self {
24        Self {
25            owned: init_array(|| Cell::new(MaybeUninit::uninit())),
26            slices: ArrayWithHeaderAndTrailer::new(|| Cell::new(IoSlice::new(EMPTY_SLICE))),
27            total_size: Default::default(),
28        }
29    }
30}
31
32// SAFETY: `WriteVectoredArrayBuffer::clear` does clear the inserted range from the buffer
33unsafe impl<T, const N: usize> Buffer for WriteVectoredArrayBuffer<T, N>
34where
35    T: AsRef<[u8]>,
36{
37    type Slice<'a> = VectoredSlice<'a>
38    where
39        T: 'a;
40
41    #[inline]
42    fn capacity(&self) -> usize {
43        N
44    }
45
46    #[inline]
47    unsafe fn slice(&mut self, range: Range<usize>) -> Self::Slice<'_> {
48        // SAFETY: [Cell<IoSlice>] has the same layout as [IoSlice]
49        // and function contract guarantees that the range is initialized
50        let slices = unsafe {
51            &mut *(&mut self.slices[range.start..range.end + 2] as *mut _
52                as *mut [IoSlice<'static>])
53        };
54        // SAFETY: slices are never read and live along their owner in the buffer, as they are
55        // inserted and removed together
56        unsafe { VectoredSlice::new(slices, self.total_size.load(Ordering::Acquire)) }
57    }
58
59    #[inline]
60    unsafe fn clear(&mut self, range: Range<usize>) {
61        *self.total_size.get_mut() = 0;
62        for index in range {
63            // SAFETY: function contract guarantees that the range is initialized
64            unsafe { self.remove(index) };
65        }
66    }
67}
68
69// SAFETY: `insert` does initialize the index in the buffer
70unsafe impl<T, const N: usize> CellBuffer<T> for WriteVectoredArrayBuffer<T, N>
71where
72    T: AsRef<[u8]>,
73{
74    unsafe fn insert(&self, index: usize, value: T) {
75        // SAFETY: slice is never read with static lifetime, it will only be used as a reference
76        // with the same lifetime than the slice owner
77        let slice = unsafe { mem::transmute::<IoSlice, IoSlice>(IoSlice::new(value.as_ref())) };
78        self.slices[index + 1].set(slice);
79        self.owned[index].set(MaybeUninit::new(value));
80        self.total_size.fetch_add(slice.len(), Ordering::AcqRel);
81    }
82}
83
84// SAFETY: `WriteVectoredArrayBuffer::remove` does remove the index from the buffer
85unsafe impl<T, const N: usize> Drain for WriteVectoredArrayBuffer<T, N>
86where
87    T: AsRef<[u8]>,
88{
89    type Value = T;
90
91    #[inline]
92    unsafe fn remove(&mut self, index: usize) -> Self::Value {
93        // SAFETY: function contract guarantees that the index has been inserted and is then initialized
94        let value = unsafe {
95            self.owned[index]
96                .replace(MaybeUninit::uninit())
97                .assume_init()
98        };
99        self.total_size
100            .fetch_sub(value.as_ref().len(), Ordering::Release);
101        value
102    }
103}