1use std::fmt;
4use std::mem::MaybeUninit;
5use std::num::NonZeroUsize;
6
7#[derive(Clone, Default)]
8pub struct Buffer {
10 inner: Vec<u8>,
12
13 unfilled_initialized: usize,
15
16 offset: usize,
18}
19
20impl fmt::Debug for Buffer {
21 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
22 f.debug_struct("Buffer")
23 .field("len", &self.inner.len())
24 .field("capacity", &self.inner.capacity())
25 .field("unfilled_initialized", &self.unfilled_initialized)
26 .field("offset", &self.offset)
27 .finish()
28 }
29}
30
31impl From<Vec<u8>> for Buffer {
32 #[inline]
33 fn from(buffer: Vec<u8>) -> Self {
34 Self::new(buffer)
35 }
36}
37
38impl Buffer {
39 #[inline]
40 #[must_use]
41 pub fn new(buffer: Vec<u8>) -> Self {
43 Self {
44 inner: buffer,
45 unfilled_initialized: 0,
46 offset: 0,
47 }
48 }
49
50 #[must_use]
51 pub const fn empty() -> Self {
53 Self {
54 inner: Vec::new(),
55 unfilled_initialized: 0,
56 offset: 0,
57 }
58 }
59
60 #[track_caller]
61 pub fn read<F>(&mut self, f: F) -> Option<NonZeroUsize>
70 where
71 F: FnOnce(&[u8]) -> usize,
72 {
73 if self.inner.is_empty() {
74 return None;
77 }
78
79 let Some((_, unread)) = self.inner.split_at_checked(self.offset) else {
80 unreachable!(
81 "The offset is always within the buffer length, but it is not: offset = {}, len = \
82 {}",
83 self.offset,
84 self.inner.len()
85 );
86 };
87
88 if unread.is_empty() {
89 self.reset();
91
92 return None;
93 }
94
95 let has_read = NonZeroUsize::new(f(unread));
96
97 match has_read {
98 Some(n) if n.get() <= unread.len() => {
99 self.offset = self.offset.saturating_add(n.get());
102 }
103 Some(n) => panic!(
104 "The closure read more bytes than available: read = {}, available = {}",
105 n,
106 unread.len()
107 ),
108 None => {}
109 }
110
111 has_read
112 }
113
114 #[inline]
115 #[must_use]
116 pub fn unread(&self) -> &[u8] {
118 &self.inner[self.offset..]
119 }
120
121 #[inline]
122 pub fn drain(&mut self) -> Option<Vec<u8>> {
125 if self.unread().is_empty() {
126 None
127 } else {
128 let drained = self.unread().to_vec();
129
130 self.reset();
132
133 Some(drained)
134 }
135 }
136
137 #[inline]
138 pub(crate) fn reserve(&mut self, additional: usize) {
140 self.inner.reserve(additional);
141 }
142
143 #[inline]
144 pub(crate) fn unfilled_mut(&mut self) -> &mut [MaybeUninit<u8>] {
147 self.unfilled_initialized = 0;
148
149 self.inner.spare_capacity_mut()
150 }
151
152 #[inline]
153 pub(crate) fn unfilled_initialized(&self) -> &[u8] {
155 #[allow(unsafe_code)]
156 unsafe {
158 std::slice::from_raw_parts(
159 self.inner
160 .as_ptr()
161 .add(self.inner.len()),
162 self.unfilled_initialized,
163 )
164 }
165 }
166
167 #[allow(unsafe_code)]
168 #[inline]
169 pub(crate) unsafe fn assume_init_additional(&mut self, cnt: usize) {
182 let unfilled_initialized = self.unfilled_initialized + cnt;
183
184 debug_assert!(self.inner.len() + unfilled_initialized <= self.inner.capacity());
185
186 self.unfilled_initialized = unfilled_initialized;
187 }
188
189 #[inline]
190 pub(crate) fn set_filled_all(&mut self) {
192 let initialized = self.inner.len() + self.unfilled_initialized;
193
194 debug_assert!(initialized <= self.inner.capacity());
195
196 #[allow(unsafe_code)]
197 unsafe {
199 self.inner.set_len(initialized);
200 };
201 }
202
203 #[inline]
204 fn reset(&mut self) {
207 self.inner.truncate(0);
208 self.inner.shrink_to(65536);
209 self.offset = 0;
210 }
211}