1use std::io;
15
16use amplify::Wrapper;
17use bitcoin::consensus::ReadExt;
18
19use super::{Error, LightningDecode, LightningEncode};
20
21#[derive(Wrapper, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, From)]
34#[wrapper(
35 FromStr,
36 Display,
37 Debug,
38 Octal,
39 LowerHex,
40 UpperHex,
41 Add,
42 Sub,
43 Mul,
44 Div,
45 Rem,
46 Shl,
47 Shr,
48 Not,
49 BitAnd,
50 BitOr,
51 BitXor,
52 AddAssign,
53 SubAssign,
54 MulAssign,
55 DivAssign,
56 RemAssign,
57 ShlAssign,
58 ShrAssign,
59 BitAndAssign,
60 BitOrAssign,
61 BitXorAssign
62)]
63#[from(u8)]
64#[from(u16)]
65#[from(u32)]
66#[from(u64)]
67pub struct BigSize(u64);
68
69impl From<usize> for BigSize {
70 fn from(val: usize) -> Self {
71 (val as u64).into()
72 }
73}
74
75impl From<BigSize> for u8 {
76 fn from(big_size: BigSize) -> Self {
77 big_size.into_inner() as u8
78 }
79}
80
81impl From<BigSize> for u16 {
82 fn from(big_size: BigSize) -> Self {
83 big_size.into_inner() as u16
84 }
85}
86
87impl From<BigSize> for u32 {
88 fn from(big_size: BigSize) -> Self {
89 big_size.into_inner() as u32
90 }
91}
92
93impl From<BigSize> for usize {
94 fn from(big_size: BigSize) -> Self {
95 big_size.into_inner() as usize
96 }
97}
98
99impl BigSize {
100 #[allow(clippy::len_without_is_empty)]
102 pub fn len(self) -> usize {
103 match self.0 {
104 0..=0xFC => 1,
105 0xFD..=0xFFFF => 3,
106 0x10000..=0xFFFFFFFF => 5,
107 _ => 9,
108 }
109 }
110}
111
112impl LightningEncode for BigSize {
113 fn lightning_encode<E: io::Write>(&self, mut e: E) -> Result<usize, Error> {
114 let vec = match self.0 {
115 0..=0xFC => vec![self.0 as u8],
116 0xFD..=0xFFFF => {
117 let mut result = (self.0 as u16).to_be_bytes().to_vec();
118 result.insert(0, 0xFDu8);
119 result
120 }
121 0x10000..=0xFFFFFFFF => {
122 let mut result = (self.0 as u32).to_be_bytes().to_vec();
123 result.insert(0, 0xFEu8);
124 result
125 }
126 _ => {
127 let mut result = self.0.to_be_bytes().to_vec();
128 result.insert(0, 0xFF);
129 result
130 }
131 };
132 e.write_all(&vec)?;
133 Ok(vec.len())
134 }
135}
136
137impl LightningDecode for BigSize {
138 fn lightning_decode<D: io::Read>(mut d: D) -> Result<Self, Error> {
139 match d.read_u8().map_err(|_| Error::BigSizeNoValue)? {
140 0xFFu8 => {
141 let mut x = [0u8; 8];
142 d.read_exact(&mut x).map_err(|_| Error::BigSizeEof)?;
143 let value = u64::from_be_bytes(x);
144 if value < 0x100000000 {
145 Err(Error::BigSizeNotCanonical)
146 } else {
147 Ok(BigSize(value))
148 }
149 }
150 0xFEu8 => {
151 let mut x = [0u8; 4];
152 d.read_exact(&mut x).map_err(|_| Error::BigSizeEof)?;
153 let value = u32::from_be_bytes(x);
154 if value < 0x10000 {
155 Err(Error::BigSizeNotCanonical)
156 } else {
157 Ok(BigSize(value as u64))
158 }
159 }
160 0xFDu8 => {
161 let mut x = [0u8; 2];
162 d.read_exact(&mut x).map_err(|_| Error::BigSizeEof)?;
163 let value = u16::from_be_bytes(x);
164 if value < 0xFD {
165 Err(Error::BigSizeNotCanonical)
166 } else {
167 Ok(BigSize(value as u64))
168 }
169 }
170 small => Ok(BigSize(small as u64)),
171 }
172 }
173}
174
175#[cfg(test)]
176mod test {
177 use super::*;
178
179 fn test_runner(value: u64, bytes: &[u8]) {
184 let bigsize = BigSize(value);
185
186 let encoded_bigsize = bigsize.lightning_serialize().unwrap();
187
188 assert_eq!(encoded_bigsize, bytes);
189
190 let decoded_bigsize =
191 BigSize::lightning_deserialize(&encoded_bigsize).unwrap();
192
193 assert_eq!(decoded_bigsize, bigsize);
194 }
195
196 #[test]
197 fn test_1() {
198 test_runner(0, &[0x00]);
199 test_runner(252, &[0xfc]);
200 test_runner(253, &[0xfd, 0x00, 0xfd]);
201 test_runner(65535, &[0xfd, 0xff, 0xff]);
202 test_runner(65536, &[0xfe, 0x00, 0x01, 0x00, 0x00]);
203 test_runner(4294967295, &[0xfe, 0xff, 0xff, 0xff, 0xff]);
204 test_runner(4294967296, &[
205 0xff, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00,
206 ]);
207 test_runner(18446744073709551615, &[
208 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
209 ]);
210 }
211
212 #[should_panic(expected = "BigSizeNotCanonical")]
213 #[test]
214 fn test_canonical_value_error_1() {
215 BigSize::lightning_deserialize(&[0xfd, 0x00, 0xfc]).unwrap();
216 }
217
218 #[should_panic(expected = "BigSizeNotCanonical")]
219 #[test]
220 fn test_canonical_value_error_2() {
221 BigSize::lightning_deserialize(&[0xfe, 0x00, 0x00, 0xff, 0xff])
222 .unwrap();
223 }
224
225 #[should_panic(expected = "BigSizeNotCanonical")]
226 #[test]
227 fn test_canonical_value_error_3() {
228 BigSize::lightning_deserialize(&[
229 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff,
230 ])
231 .unwrap();
232 }
233
234 #[should_panic(expected = "BigSizeEof")]
235 #[test]
236 fn test_eof_error_1() {
237 BigSize::lightning_deserialize(&[0xfd, 0x00]).unwrap();
238 }
239
240 #[should_panic(expected = "BigSizeEof")]
241 #[test]
242 fn test_eof_error_2() {
243 BigSize::lightning_deserialize(&[0xfe, 0xff, 0xff]).unwrap();
244 }
245
246 #[should_panic(expected = "BigSizeEof")]
247 #[test]
248 fn test_eof_error_3() {
249 BigSize::lightning_deserialize(&[0xff, 0xff, 0xff, 0xff, 0xff])
250 .unwrap();
251 }
252
253 #[should_panic(expected = "BigSizeEof")]
254 #[test]
255 fn test_eof_error_4() {
256 BigSize::lightning_deserialize(&[0xfd]).unwrap();
257 }
258
259 #[should_panic(expected = "BigSizeEof")]
260 #[test]
261 fn test_eof_error_5() {
262 BigSize::lightning_deserialize(&[0xfe]).unwrap();
263 }
264
265 #[should_panic(expected = "BigSizeEof")]
266 #[test]
267 fn test_eof_error_6() {
268 BigSize::lightning_deserialize(&[0xff]).unwrap();
269 }
270}