Skip to main content

float_pigment_consistent_bincode/
io.rs

1use alloc::vec::Vec;
2use byteorder::ByteOrder;
3
4#[derive(Debug, Clone, Copy, PartialEq, Eq)]
5pub struct Error {
6    kind: ErrorKind,
7}
8
9impl Error {
10    pub fn new(kind: ErrorKind) -> Self {
11        Self { kind }
12    }
13
14    pub fn kind(&self) -> ErrorKind {
15        self.kind
16    }
17
18    fn is_interrupted(&self) -> bool {
19        match self.kind {
20            ErrorKind::Unknown => false,
21            ErrorKind::UnexpectedEof => true,
22        }
23    }
24}
25
26impl core::fmt::Display for Error {
27    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
28        f.write_str(match self.kind {
29            ErrorKind::Unknown => "I/O Error",
30            ErrorKind::UnexpectedEof => "Unexpected EOF",
31        })
32    }
33}
34
35#[derive(Debug, Clone, Copy, PartialEq, Eq)]
36pub enum ErrorKind {
37    Unknown,
38    UnexpectedEof,
39}
40
41impl From<ErrorKind> for Error {
42    fn from(kind: ErrorKind) -> Self {
43        Self { kind }
44    }
45}
46
47impl serde::de::StdError for Error {}
48
49pub type Result<T> = core::result::Result<T, Error>;
50
51pub trait Read {
52    fn read(&mut self, buf: &mut [u8]) -> Result<usize>;
53
54    fn read_exact(&mut self, mut buf: &mut [u8]) -> Result<()> {
55        while !buf.is_empty() {
56            match self.read(buf) {
57                Ok(0) => break,
58                Ok(n) => {
59                    buf = &mut buf[n..];
60                }
61                Err(ref e) if e.is_interrupted() => {}
62                Err(e) => return Err(e),
63            }
64        }
65        if !buf.is_empty() {
66            Err(Error::new(ErrorKind::UnexpectedEof))
67        } else {
68            Ok(())
69        }
70    }
71
72    fn read_u8(&mut self) -> Result<u8> {
73        let mut buf = [0; 1];
74        self.read_exact(&mut buf)?;
75        Ok(buf[0])
76    }
77
78    fn read_u16<T: ByteOrder + 'static>(&mut self) -> Result<u16> {
79        let mut buf = [0; 2];
80        self.read_exact(&mut buf)?;
81        Ok(T::read_u16(&buf))
82    }
83
84    fn read_u32<T: ByteOrder + 'static>(&mut self) -> Result<u32> {
85        let mut buf = [0; 4];
86        self.read_exact(&mut buf)?;
87        Ok(T::read_u32(&buf))
88    }
89
90    fn read_u64<T: ByteOrder + 'static>(&mut self) -> Result<u64> {
91        let mut buf = [0; 8];
92        self.read_exact(&mut buf)?;
93        Ok(T::read_u64(&buf))
94    }
95
96    fn read_u128<T: ByteOrder + 'static>(&mut self) -> Result<u128> {
97        let mut buf = [0; 16];
98        self.read_exact(&mut buf)?;
99        Ok(T::read_u128(&buf))
100    }
101
102    fn read_f32<T: ByteOrder + 'static>(&mut self) -> Result<f32> {
103        let mut buf = [0; 4];
104        self.read_exact(&mut buf)?;
105        Ok(T::read_f32(&buf))
106    }
107
108    fn read_f64<T: ByteOrder + 'static>(&mut self) -> Result<f64> {
109        let mut buf = [0; 8];
110        self.read_exact(&mut buf)?;
111        Ok(T::read_f64(&buf))
112    }
113}
114
115impl Read for &[u8] {
116    #[inline]
117    fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
118        let amt = core::cmp::min(buf.len(), self.len());
119        let (a, b) = self.split_at(amt);
120
121        // First check if the amount of bytes we want to read is small:
122        // `copy_from_slice` will generally expand to a call to `memcpy`, and
123        // for a single byte the overhead is significant.
124        if amt == 1 {
125            buf[0] = a[0];
126        } else {
127            buf[..amt].copy_from_slice(a);
128        }
129
130        *self = b;
131        Ok(amt)
132    }
133}
134
135impl<R: Read + ?Sized> Read for &mut R {
136    fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
137        (**self).read(buf)
138    }
139}
140
141pub trait Write {
142    fn write(&mut self, buf: &[u8]) -> Result<usize>;
143
144    fn write_all(&mut self, mut buf: &[u8]) -> Result<()> {
145        while !buf.is_empty() {
146            match self.write(buf) {
147                Ok(0) => {
148                    return Err(ErrorKind::Unknown.into());
149                }
150                Ok(n) => buf = &buf[n..],
151                Err(ref e) if e.is_interrupted() => {}
152                Err(e) => return Err(e),
153            }
154        }
155        Ok(())
156    }
157
158    fn write_u8(&mut self, n: u8) -> Result<()> {
159        self.write_all(&[n])
160    }
161
162    fn write_u16<T: ByteOrder + 'static>(&mut self, n: u16) -> Result<()> {
163        let mut buf = [0; 2];
164        T::write_u16(&mut buf, n);
165        self.write_all(&buf)
166    }
167
168    fn write_u32<T: ByteOrder + 'static>(&mut self, n: u32) -> Result<()> {
169        let mut buf = [0; 4];
170        T::write_u32(&mut buf, n);
171        self.write_all(&buf)
172    }
173
174    fn write_u64<T: ByteOrder + 'static>(&mut self, n: u64) -> Result<()> {
175        let mut buf = [0; 8];
176        T::write_u64(&mut buf, n);
177        self.write_all(&buf)
178    }
179
180    fn write_u128<T: ByteOrder + 'static>(&mut self, n: u128) -> Result<()> {
181        let mut buf = [0; 16];
182        T::write_u128(&mut buf, n);
183        self.write_all(&buf)
184    }
185
186    fn write_f32<T: ByteOrder + 'static>(&mut self, n: f32) -> Result<()> {
187        let mut buf = [0; 4];
188        T::write_f32(&mut buf, n);
189        self.write_all(&buf)
190    }
191
192    fn write_f64<T: ByteOrder + 'static>(&mut self, n: f64) -> Result<()> {
193        let mut buf = [0; 8];
194        T::write_f64(&mut buf, n);
195        self.write_all(&buf)
196    }
197}
198
199impl Write for &mut Vec<u8> {
200    fn write(&mut self, buf: &[u8]) -> Result<usize> {
201        self.extend(buf.iter());
202        Ok(buf.len())
203    }
204}