1#[cfg(feature = "std")] use std::collections::{HashMap as Map};
10#[cfg(not(feature = "std"))] use alloc::collections::{BTreeMap as Map};
11use elastic_array::ElasticArray1024;
12#[cfg(feature = "std")]
13use common::{BLOCKS_RLP_SWAPPER, SNAPSHOT_RLP_SWAPPER};
14use {UntrustedRlp, Compressible, encode, RlpStream};
15
16pub struct InvalidRlpSwapper<'a> {
18 invalid_to_valid: Map<&'a [u8], &'a [u8]>,
19 valid_to_invalid: Map<&'a [u8], &'a [u8]>,
20}
21
22impl<'a> InvalidRlpSwapper<'a> {
23 pub fn new(rlps_to_swap: &[&'a [u8]], invalid_rlps: &[&'a [u8]]) -> Self {
25 if rlps_to_swap.len() > 0x7e {
26 panic!("Invalid usage, only 127 RLPs can be swappable.");
27 }
28 let mut invalid_to_valid = Map::new();
29 let mut valid_to_invalid = Map::new();
30 for (&rlp, &invalid) in rlps_to_swap.iter().zip(invalid_rlps.iter()) {
31 invalid_to_valid.insert(invalid, rlp);
32 valid_to_invalid.insert(rlp, invalid);
33 }
34 InvalidRlpSwapper {
35 invalid_to_valid: invalid_to_valid,
36 valid_to_invalid: valid_to_invalid
37 }
38 }
39 fn get_valid(&self, invalid_rlp: &[u8]) -> Option<&[u8]> {
41 self.invalid_to_valid.get(invalid_rlp).cloned()
42 }
43 fn get_invalid(&self, valid_rlp: &[u8]) -> Option<&[u8]> {
45 self.valid_to_invalid.get(valid_rlp).cloned()
46 }
47}
48
49pub enum RlpType {
51 Blocks,
53 Snapshot,
55}
56
57fn to_elastic(slice: &[u8]) -> ElasticArray1024<u8> {
58 let mut out = ElasticArray1024::new();
59 out.append_slice(slice);
60 out
61}
62
63fn map_rlp<F>(rlp: &UntrustedRlp, f: F) -> Option<ElasticArray1024<u8>> where
64 F: Fn(&UntrustedRlp) -> Option<ElasticArray1024<u8>> {
65 match rlp.iter()
66 .fold((false, RlpStream::new_list(rlp.item_count().unwrap_or(0))),
67 |(is_some, mut acc), subrlp| {
68 let new = f(&subrlp);
69 if let Some(ref insert) = new {
70 acc.append_raw(&insert[..], 1);
71 } else {
72 acc.append_raw(subrlp.as_raw(), 1);
73 }
74 (is_some || new.is_some(), acc)
75 }) {
76 (true, s) => Some(s.drain()),
77 _ => None,
78 }
79}
80
81fn simple_compress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> ElasticArray1024<u8> {
83 if rlp.is_data() {
84 to_elastic(swapper.get_invalid(rlp.as_raw()).unwrap_or_else(|| rlp.as_raw()))
85 } else {
86 map_rlp(rlp, |r| Some(simple_compress(r, swapper))).unwrap_or_else(|| to_elastic(rlp.as_raw()))
87 }
88}
89
90fn simple_decompress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> ElasticArray1024<u8> {
92 if rlp.is_data() {
93 to_elastic(swapper.get_valid(rlp.as_raw()).unwrap_or_else(|| rlp.as_raw()))
94 } else {
95 map_rlp(rlp, |r| Some(simple_decompress(r, swapper))).unwrap_or_else(|| to_elastic(rlp.as_raw()))
96 }
97}
98
99fn deep_compress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> Option<ElasticArray1024<u8>> {
102 let simple_swap = ||
103 swapper.get_invalid(rlp.as_raw()).map(to_elastic);
104 if rlp.is_data() {
105 return match rlp.payload_info() {
107 Ok(ref p) if p.value_len < 70 => simple_swap(),
109 _ => {
110 if let Ok(d) = rlp.data() {
111 let internal_rlp = UntrustedRlp::new(d);
112 if let Some(new_d) = deep_compress(&internal_rlp, swapper) {
113 let mut rlp = RlpStream::new_list(2);
115 rlp.append_raw(&[0x81, 0x7f], 1);
116 rlp.append_raw(&new_d[..], 1);
117 return Some(rlp.drain());
118 }
119 }
120 simple_swap()
121 },
122 };
123 }
124 map_rlp(rlp, |r| deep_compress(r, swapper))
126}
127
128fn deep_decompress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> Option<ElasticArray1024<u8>> {
131 let simple_swap = ||
132 swapper.get_valid(rlp.as_raw()).map(to_elastic);
133 if rlp.is_data() { return simple_swap(); }
135 match rlp.item_count().unwrap_or(0) {
136 2 if rlp.at(0).map(|r| r.as_raw() == &[0x81, 0x7f]).unwrap_or(false) =>
138 rlp.at(1).ok().map_or(simple_swap(),
139 |r| deep_decompress(&r, swapper).map(|d| { let v = d.to_vec(); encode(&v) })),
140 _ => map_rlp(rlp, |r| deep_decompress(r, swapper)),
142 }
143}
144
145#[cfg(feature = "std")]
146impl<'a> Compressible for UntrustedRlp<'a> {
147 type DataType = RlpType;
148
149 fn compress(&self, t: RlpType) -> ElasticArray1024<u8> {
150 match t {
151 RlpType::Snapshot => simple_compress(self, &SNAPSHOT_RLP_SWAPPER),
152 RlpType::Blocks => deep_compress(self, &BLOCKS_RLP_SWAPPER).unwrap_or_else(|| to_elastic(self.as_raw())),
153 }
154 }
155
156 fn decompress(&self, t: RlpType) -> ElasticArray1024<u8> {
157 match t {
158 RlpType::Snapshot => simple_decompress(self, &SNAPSHOT_RLP_SWAPPER),
159 RlpType::Blocks => deep_decompress(self, &BLOCKS_RLP_SWAPPER).unwrap_or_else(|| to_elastic(self.as_raw())),
160 }
161 }
162}
163
164#[cfg(test)]
165mod tests {
166 use compression::InvalidRlpSwapper;
167 use {UntrustedRlp, Compressible, RlpType};
168
169 #[test]
170 fn invalid_rlp_swapper() {
171 let to_swap: &[&[u8]] = &[&[0x83, b'c', b'a', b't'], &[0x83, b'd', b'o', b'g']];
172 let invalid_rlp: &[&[u8]] = &[&[0x81, 0x00], &[0x81, 0x01]];
173 let swapper = InvalidRlpSwapper::new(to_swap, invalid_rlp);
174 assert_eq!(Some(invalid_rlp[0]), swapper.get_invalid(&[0x83, b'c', b'a', b't']));
175 assert_eq!(None, swapper.get_invalid(&[0x83, b'b', b'a', b't']));
176 assert_eq!(Some(to_swap[1]), swapper.get_valid(invalid_rlp[1]));
177 }
178
179 #[test]
180 fn simple_compression() {
181 let basic_account_rlp = vec![248, 68, 4, 2, 160, 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, 160, 197, 210, 70, 1, 134, 247, 35, 60, 146, 126, 125, 178, 220, 199, 3, 192, 229, 0, 182, 83, 202, 130, 39, 59, 123, 250, 216, 4, 93, 133, 164, 112];
182 let rlp = UntrustedRlp::new(&basic_account_rlp);
183 let compressed = rlp.compress(RlpType::Snapshot).to_vec();
184 assert_eq!(compressed, vec![198, 4, 2, 129, 0, 129, 1]);
185 let compressed_rlp = UntrustedRlp::new(&compressed);
186 assert_eq!(compressed_rlp.decompress(RlpType::Snapshot).to_vec(), basic_account_rlp);
187 }
188
189 #[test]
190 fn data_compression() {
191 let data_basic_account_rlp = vec![184, 70, 248, 68, 4, 2, 160, 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, 160, 197, 210, 70, 1, 134, 247, 35, 60, 146, 126, 125, 178, 220, 199, 3, 192, 229, 0, 182, 83, 202, 130, 39, 59, 123, 250, 216, 4, 93, 133, 164, 112];
192 let data_rlp = UntrustedRlp::new(&data_basic_account_rlp);
193 let compressed = data_rlp.compress(RlpType::Blocks).to_vec();
194 assert_eq!(compressed, vec![201, 129, 127, 198, 4, 2, 129, 0, 129, 1]);
195 let compressed_rlp = UntrustedRlp::new(&compressed);
196 assert_eq!(compressed_rlp.decompress(RlpType::Blocks).to_vec(), data_basic_account_rlp);
197 }
198
199 #[test]
200 fn nested_list_rlp() {
201 let nested_basic_account_rlp = vec![228, 4, 226, 2, 160, 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33];
202 let nested_rlp = UntrustedRlp::new(&nested_basic_account_rlp);
203 let compressed = nested_rlp.compress(RlpType::Blocks).to_vec();
204 assert_eq!(compressed, vec![197, 4, 195, 2, 129, 0]);
205 let compressed_rlp = UntrustedRlp::new(&compressed);
206 assert_eq!(compressed_rlp.decompress(RlpType::Blocks).to_vec(), nested_basic_account_rlp);
207 let compressed = nested_rlp.compress(RlpType::Snapshot).to_vec();
208 assert_eq!(compressed, vec![197, 4, 195, 2, 129, 0]);
209 let compressed_rlp = UntrustedRlp::new(&compressed);
210 assert_eq!(compressed_rlp.decompress(RlpType::Snapshot).to_vec(), nested_basic_account_rlp);
211 }
212
213 #[test]
214 fn malformed_rlp() {
215 let malformed = vec![248, 81, 128, 128, 128, 128, 128, 160, 12, 51, 241, 93, 69, 218, 74, 138, 79, 115, 227, 44, 216, 81, 46, 132, 85, 235, 96, 45, 252, 48, 181, 29, 75, 141, 217, 215, 86, 160, 109, 130, 160, 140, 36, 93, 200, 109, 215, 100, 241, 246, 99, 135, 92, 168, 149, 170, 114, 9, 143, 4, 93, 25, 76, 54, 176, 119, 230, 170, 154, 105, 47, 121, 10, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128];
216 let malformed_rlp = UntrustedRlp::new(&malformed);
217 assert_eq!(malformed_rlp.decompress(RlpType::Blocks).to_vec(), malformed);
218 }
219}