bytesbuf_io/testing/
null.rs1use std::convert::Infallible;
5
6use bytesbuf::mem::testing::TransparentMemory;
7use bytesbuf::mem::{HasMemory, Memory, MemoryShared, OpaqueMemory};
8use bytesbuf::{BytesBuf, BytesView};
9
10use crate::{Read, Write};
11
12#[derive(Debug)]
17pub struct Null {
18 memory: OpaqueMemory,
19}
20
21impl Null {
22 #[must_use]
24 pub fn builder() -> NullBuilder {
25 NullBuilder {
26 memory: OpaqueMemory::new(TransparentMemory::new()),
27 }
28 }
29
30 #[must_use]
32 pub fn new() -> Self {
33 Self::builder().build()
34 }
35
36 #[cfg_attr(test, mutants::skip)] #[expect(
43 clippy::needless_pass_by_ref_mut,
44 clippy::unused_async,
45 reason = "API compatibility between trait and inherent fn"
46 )]
47 pub async fn read_at_most_into(&mut self, _len: usize, into: BytesBuf) -> Result<(usize, BytesBuf), Infallible> {
48 Ok((0, into))
49 }
50
51 #[cfg_attr(test, mutants::skip)] #[expect(
58 clippy::needless_pass_by_ref_mut,
59 clippy::unused_async,
60 reason = "API compatibility between trait and inherent fn"
61 )]
62 pub async fn read_more_into(&mut self, into: BytesBuf) -> Result<(usize, BytesBuf), Infallible> {
63 Ok((0, into))
64 }
65
66 #[cfg_attr(test, mutants::skip)] #[expect(
73 clippy::needless_pass_by_ref_mut,
74 clippy::unused_async,
75 reason = "API compatibility between trait and inherent fn"
76 )]
77 pub async fn read_any(&mut self) -> Result<BytesBuf, Infallible> {
78 Ok(BytesBuf::default())
79 }
80
81 #[cfg_attr(test, mutants::skip)] #[expect(
88 clippy::needless_pass_by_ref_mut,
89 clippy::unused_async,
90 reason = "API compatibility between trait and inherent fn"
91 )]
92 pub async fn write(&mut self, _sequence: BytesView) -> Result<(), Infallible> {
93 Ok(())
94 }
95
96 #[must_use]
98 pub fn memory(&self) -> impl MemoryShared {
99 self.memory.clone()
100 }
101
102 #[must_use]
120 pub fn reserve(&self, min_bytes: usize) -> BytesBuf {
121 self.memory.reserve(min_bytes)
122 }
123}
124
125impl Default for Null {
126 fn default() -> Self {
127 Self::new()
128 }
129}
130
131#[cfg_attr(coverage_nightly, coverage(off))] impl Read for Null {
133 type Error = Infallible;
134
135 #[cfg_attr(test, mutants::skip)] async fn read_at_most_into(&mut self, len: usize, into: BytesBuf) -> Result<(usize, BytesBuf), Infallible> {
137 self.read_at_most_into(len, into).await
138 }
139
140 #[cfg_attr(test, mutants::skip)] async fn read_more_into(&mut self, into: BytesBuf) -> Result<(usize, BytesBuf), Infallible> {
142 self.read_more_into(into).await
143 }
144
145 #[cfg_attr(test, mutants::skip)] async fn read_any(&mut self) -> Result<BytesBuf, Infallible> {
147 self.read_any().await
148 }
149}
150
151#[cfg_attr(coverage_nightly, coverage(off))] impl Write for Null {
153 type Error = Infallible;
154
155 #[cfg_attr(test, mutants::skip)] async fn write(&mut self, _sequence: BytesView) -> Result<(), Infallible> {
157 Ok(())
158 }
159}
160
161#[cfg_attr(coverage_nightly, coverage(off))] impl HasMemory for Null {
163 #[cfg_attr(test, mutants::skip)] fn memory(&self) -> impl MemoryShared {
165 self.memory()
166 }
167}
168
169#[cfg_attr(coverage_nightly, coverage(off))] impl Memory for Null {
171 #[cfg_attr(test, mutants::skip)] fn reserve(&self, min_bytes: usize) -> BytesBuf {
173 self.reserve(min_bytes)
174 }
175}
176
177#[derive(Debug)]
181pub struct NullBuilder {
182 memory: OpaqueMemory,
183}
184
185impl NullBuilder {
186 #[must_use]
194 pub fn memory(mut self, memory: OpaqueMemory) -> Self {
195 self.memory = memory;
196 self
197 }
198
199 #[must_use]
201 pub fn build(self) -> Null {
202 Null { memory: self.memory }
203 }
204}
205
206#[cfg(test)]
207#[cfg_attr(coverage_nightly, coverage(off))]
208mod tests {
209 use std::sync::Arc;
210 use std::sync::atomic::{AtomicBool, Ordering};
211
212 use bytesbuf::mem::CallbackMemory;
213 use testing_aids::execute_or_terminate_process;
214
215 use super::*;
216
217 #[test]
218 fn smoke_test() {
219 execute_or_terminate_process(|| {
220 futures::executor::block_on(async {
221 let mut s = Null::new();
222
223 let buffer = s.reserve(1000);
224 assert!(buffer.remaining_capacity() >= 1000);
225
226 let (bytes_read, buffer) = s.read_at_most_into(100, buffer).await.unwrap();
227 assert_eq!(bytes_read, 0);
228 assert_eq!(buffer.len(), 0);
229
230 let (bytes_read, buffer) = s.read_more_into(buffer).await.unwrap();
231 assert_eq!(bytes_read, 0);
232 assert_eq!(buffer.len(), 0);
233
234 let mut buffer = s.read_any().await.unwrap();
235 assert_eq!(buffer.len(), 0);
236
237 s.write(buffer.consume_all()).await.unwrap();
238 });
239 });
240 }
241
242 #[test]
243 fn default_returns_working_instance() {
244 execute_or_terminate_process(|| {
245 futures::executor::block_on(async {
246 let mut s = Null::default();
247
248 let buffer = s.reserve(100);
250 assert!(buffer.remaining_capacity() >= 100);
251
252 let (bytes_read, _) = s.read_at_most_into(10, BytesBuf::new()).await.unwrap();
254 assert_eq!(bytes_read, 0);
255
256 let empty_view = BytesView::default();
258 s.write(empty_view).await.unwrap();
259 });
260 });
261 }
262
263 #[test]
264 fn memory_returns_configured_provider() {
265 let callback_called = Arc::new(AtomicBool::new(false));
266
267 let custom_memory = OpaqueMemory::new(CallbackMemory::new({
268 let callback_called = Arc::clone(&callback_called);
269 move |min_bytes| {
270 callback_called.store(true, Ordering::SeqCst);
271 TransparentMemory::new().reserve(min_bytes)
272 }
273 }));
274
275 let null_stream = Null::builder().memory(custom_memory).build();
276
277 let stream_memory = null_stream.memory();
279 let _buf = stream_memory.reserve(10);
280
281 assert!(
282 callback_called.load(Ordering::SeqCst),
283 "Custom memory callback should have been called"
284 );
285 }
286}