1use core::{convert::Infallible, fmt};
2
3#[cfg(feature = "alloc")]
4use alloc::vec::Vec;
5
6pub trait Buffer {
10 type Error;
12
13 type Reborrow<'a>: Buffer<Error = Self::Error>
15 where
16 Self: 'a;
17
18 fn reborrow(&mut self) -> Self::Reborrow<'_>;
20
21 fn write_stack(&mut self, heap: usize, stack: usize, bytes: &[u8]) -> Result<(), Self::Error>;
27
28 fn pad_stack(&mut self, heap: usize, stack: usize, len: usize) -> Result<(), Self::Error>;
34
35 fn move_to_heap(&mut self, heap: usize, stack: usize, len: usize);
37
38 fn reserve_heap(
52 &mut self,
53 heap: usize,
54 stack: usize,
55 len: usize,
56 ) -> Result<&mut [u8], Self::Error>;
57}
58
59#[derive(Clone, Copy, Default)]
62pub struct DryBuffer;
63
64impl Buffer for DryBuffer {
65 type Error = Infallible;
66 type Reborrow<'a> = Self;
67
68 #[inline(always)]
69 fn reborrow(&mut self) -> DryBuffer {
70 *self
71 }
72
73 #[inline(always)]
74 fn write_stack(
75 &mut self,
76 _heap: usize,
77 _stack: usize,
78 _bytes: &[u8],
79 ) -> Result<(), Infallible> {
80 Ok(())
81 }
82
83 #[inline(always)]
84 fn pad_stack(&mut self, _heap: usize, _stack: usize, _len: usize) -> Result<(), Infallible> {
85 Ok(())
86 }
87
88 #[inline(always)]
89 fn move_to_heap(&mut self, _heap: usize, _stack: usize, _len: usize) {}
90
91 #[inline(always)]
92 fn reserve_heap(
93 &mut self,
94 _heap: usize,
95 _stack: usize,
96 _len: usize,
97 ) -> Result<&mut [u8], Infallible> {
98 Ok(&mut [])
99 }
100}
101
102#[derive(Clone, Copy, Debug, PartialEq, Eq)]
108pub struct BufferExhausted;
109
110impl fmt::Display for BufferExhausted {
111 #[inline]
112 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
113 write!(f, "buffer exhausted")
114 }
115}
116
117#[repr(transparent)]
120pub struct CheckedFixedBuffer<'a> {
121 buf: &'a mut [u8],
122}
123
124impl<'a> CheckedFixedBuffer<'a> {
125 #[inline(always)]
127 pub fn new(buf: &'a mut [u8]) -> Self {
128 CheckedFixedBuffer { buf }
129 }
130}
131
132impl<'a> Buffer for CheckedFixedBuffer<'a> {
133 type Error = BufferExhausted;
134 type Reborrow<'b> = CheckedFixedBuffer<'b> where 'a: 'b;
135
136 #[inline(always)]
137 fn reborrow(&mut self) -> Self::Reborrow<'_> {
138 CheckedFixedBuffer { buf: self.buf }
139 }
140
141 #[inline(always)]
142 fn write_stack(
143 &mut self,
144 heap: usize,
145 stack: usize,
146 bytes: &[u8],
147 ) -> Result<(), BufferExhausted> {
148 debug_assert!(heap + stack <= self.buf.len());
149 if self.buf.len() - heap - stack < bytes.len() {
150 return Err(BufferExhausted);
151 }
152 let at = self.buf.len() - stack - bytes.len();
153 self.buf[at..][..bytes.len()].copy_from_slice(bytes);
154 Ok(())
155 }
156
157 #[inline(always)]
158 fn pad_stack(&mut self, heap: usize, stack: usize, len: usize) -> Result<(), BufferExhausted> {
159 debug_assert!(heap + stack <= self.buf.len());
160 if self.buf.len() - heap - stack < len {
161 return Err(BufferExhausted);
162 }
163
164 #[cfg(test)]
165 {
166 let at = self.buf.len() - stack - len;
167 self.buf[at..][..len].fill(0);
168 }
169 Ok(())
170 }
171
172 #[inline(always)]
173 fn move_to_heap(&mut self, heap: usize, stack: usize, len: usize) {
174 debug_assert!(heap + stack <= self.buf.len());
175 let start = self.buf.len() - stack;
176 let end = start + len;
177 self.buf.copy_within(start..end, heap);
178 }
179
180 #[inline(always)]
181 fn reserve_heap(
182 &mut self,
183 heap: usize,
184 stack: usize,
185 len: usize,
186 ) -> Result<&mut [u8], BufferExhausted> {
187 debug_assert!(heap + stack <= self.buf.len());
188 if self.buf.len() - heap - stack < len {
189 return Err(BufferExhausted);
190 }
191 let end = heap + len;
192 Ok(&mut self.buf[..end])
193 }
194}
195
196impl<'a> Buffer for &'a mut [u8] {
197 type Error = Infallible;
198
199 type Reborrow<'b> = &'b mut [u8] where 'a: 'b;
200
201 #[inline(always)]
202 fn reborrow(&mut self) -> &'_ mut [u8] {
203 self
204 }
205
206 #[inline(always)]
207 fn write_stack(&mut self, heap: usize, stack: usize, bytes: &[u8]) -> Result<(), Infallible> {
208 debug_assert!(heap + stack <= self.len());
209 let at = self.len() - stack - bytes.len();
210 self[at..][..bytes.len()].copy_from_slice(bytes);
211 Ok(())
212 }
213
214 #[inline(always)]
215 fn pad_stack(&mut self, heap: usize, stack: usize, len: usize) -> Result<(), Infallible> {
216 debug_assert!(heap + stack <= self.len());
217 assert!(self.len() - heap - stack >= len);
218
219 #[cfg(test)]
220 {
221 let at = self.len() - stack - len;
222 self[at..][..len].fill(0);
223 }
224 Ok(())
225 }
226
227 #[inline(always)]
228 fn move_to_heap(&mut self, heap: usize, stack: usize, len: usize) {
229 debug_assert!(stack >= len);
230 debug_assert!(heap + stack <= self.len());
231 let start = self.len() - stack;
232 let end = start + len;
233 self.copy_within(start..end, heap);
234 }
235
236 #[inline(always)]
237 fn reserve_heap(
238 &mut self,
239 heap: usize,
240 stack: usize,
241 len: usize,
242 ) -> Result<&mut [u8], Infallible> {
243 debug_assert!(heap + stack <= self.len());
244 let end = heap + len;
245 Ok(&mut self[..end])
246 }
247}
248
249pub struct MaybeFixedBuffer<'a> {
254 buf: &'a mut [u8],
255 exhausted: &'a mut bool,
256}
257
258impl<'a> MaybeFixedBuffer<'a> {
259 pub fn new(buf: &'a mut [u8], exhausted: &'a mut bool) -> Self {
261 MaybeFixedBuffer { buf, exhausted }
262 }
263}
264
265impl<'a> Buffer for MaybeFixedBuffer<'a> {
266 type Error = Infallible;
267
268 type Reborrow<'b> = MaybeFixedBuffer<'b> where 'a: 'b;
269
270 #[inline(always)]
271 fn reborrow(&mut self) -> Self::Reborrow<'_> {
272 MaybeFixedBuffer {
273 buf: self.buf,
274 exhausted: self.exhausted,
275 }
276 }
277
278 #[inline(always)]
279 fn write_stack(&mut self, heap: usize, stack: usize, bytes: &[u8]) -> Result<(), Infallible> {
280 if !*self.exhausted {
281 debug_assert!(heap + stack <= self.buf.len());
282 if self.buf.len() - heap - stack < bytes.len() {
283 *self.exhausted = true;
284 }
285 }
286
287 if !*self.exhausted {
288 let at = self.buf.len() - stack - bytes.len();
289 self.buf[at..][..bytes.len()].copy_from_slice(bytes);
290 }
291 Ok(())
292 }
293
294 #[inline(always)]
295 fn pad_stack(&mut self, heap: usize, stack: usize, len: usize) -> Result<(), Infallible> {
296 if !*self.exhausted {
297 debug_assert!(heap + stack <= self.buf.len());
298 if self.buf.len() - heap - stack < len {
299 *self.exhausted = true;
300 }
301 }
302 Ok(())
303 }
304
305 #[inline(always)]
306 fn move_to_heap(&mut self, heap: usize, stack: usize, len: usize) {
307 debug_assert!(stack >= len);
308 if !*self.exhausted {
309 debug_assert!(heap + stack <= self.buf.len());
310 let start = self.buf.len() - stack;
311 let end = start + len;
312 self.buf.copy_within(start..end, heap);
313 }
314 }
315
316 #[inline(always)]
317 fn reserve_heap(
318 &mut self,
319 heap: usize,
320 stack: usize,
321 len: usize,
322 ) -> Result<&mut [u8], Infallible> {
323 if !*self.exhausted {
324 debug_assert!(heap + stack <= self.buf.len());
325 if self.buf.len() - heap - stack < len {
326 *self.exhausted = true;
327 }
328 }
329
330 if *self.exhausted {
331 Ok(&mut [])
332 } else {
333 let end = heap + len;
334 Ok(&mut self.buf[..end])
335 }
336 }
337}
338
339#[cfg(feature = "alloc")]
343pub struct VecBuffer<'a> {
344 buf: &'a mut Vec<u8>,
345}
346
347#[cfg(feature = "alloc")]
348impl<'a> VecBuffer<'a> {
349 pub fn new(buf: &'a mut Vec<u8>) -> Self {
351 VecBuffer { buf }
352 }
353}
354
355#[cfg(feature = "alloc")]
356impl VecBuffer<'_> {
357 #[cold]
358 fn do_reserve(&mut self, heap: usize, stack: usize, additional: usize) {
359 let old_len = self.buf.len();
360 self.buf.resize(heap + stack + additional, 0);
361 let new_len = self.buf.len();
362 self.buf
363 .copy_within(old_len - stack..old_len, new_len - stack);
364 }
365 fn reserve(&mut self, heap: usize, stack: usize, additional: usize) {
368 let free = self.buf.len() - heap - stack;
369 if free < additional {
370 self.do_reserve(heap, stack, additional);
371 }
372 }
373}
374
375#[cfg(feature = "alloc")]
376impl<'a> Buffer for VecBuffer<'a> {
377 type Error = Infallible;
378 type Reborrow<'b> = VecBuffer<'b> where 'a: 'b;
379
380 #[inline(always)]
381 fn reborrow(&mut self) -> Self::Reborrow<'_> {
382 VecBuffer { buf: self.buf }
383 }
384
385 #[inline(always)]
386 fn write_stack(&mut self, heap: usize, stack: usize, bytes: &[u8]) -> Result<(), Infallible> {
387 debug_assert!(heap + stack <= self.buf.len());
388 self.reserve(heap, stack, bytes.len());
389 let at = self.buf.len() - stack - bytes.len();
390 self.buf[at..][..bytes.len()].copy_from_slice(bytes);
391 Ok(())
392 }
393
394 #[inline(always)]
395 fn pad_stack(&mut self, heap: usize, stack: usize, len: usize) -> Result<(), Infallible> {
396 debug_assert!(heap + stack <= self.buf.len());
397 self.reserve(heap, stack, len);
398
399 #[cfg(test)]
400 {
401 let at = self.buf.len() - stack - len;
402 self.buf[at..][..len].fill(0);
403 }
404 Ok(())
405 }
406
407 #[inline(always)]
408 fn move_to_heap(&mut self, heap: usize, stack: usize, len: usize) {
409 debug_assert!(heap + stack <= self.buf.len());
410 debug_assert!(stack >= len);
411 let at = self.buf.len() - stack;
412 self.buf.copy_within(at..at + len, heap);
413 }
414
415 #[inline(always)]
416 fn reserve_heap(
417 &mut self,
418 heap: usize,
419 stack: usize,
420 len: usize,
421 ) -> Result<&mut [u8], Infallible> {
422 debug_assert!(heap + stack <= self.buf.len());
423 self.reserve(heap, stack, len);
424 Ok(&mut self.buf[..heap + len])
425 }
426}