1use crate::{
7 codec::{EncodeSize, Read, Write},
8 error::Error,
9 RangeCfg,
10};
11use bytes::{Buf, BufMut};
12use std::{
13 cmp::Ordering,
14 collections::{BTreeMap, HashMap},
15 hash::Hash,
16};
17
18const BTREEMAP_TYPE: &str = "BTreeMap";
19const HASHMAP_TYPE: &str = "HashMap";
20
21fn read_ordered_map<K, V, F>(
23 buf: &mut impl Buf,
24 len: usize,
25 k_cfg: &K::Cfg,
26 v_cfg: &V::Cfg,
27 mut insert: F,
28 map_type: &'static str,
29) -> Result<(), Error>
30where
31 K: Read + Ord,
32 V: Read,
33 F: FnMut(K, V) -> Option<V>,
34{
35 let mut last: Option<(K, V)> = None;
36 for _ in 0..len {
37 let key = K::read_cfg(buf, k_cfg)?;
39
40 if let Some((ref last_key, _)) = last {
42 match key.cmp(last_key) {
43 Ordering::Equal => return Err(Error::Invalid(map_type, "Duplicate key")),
44 Ordering::Less => return Err(Error::Invalid(map_type, "Keys must ascend")),
45 _ => {}
46 }
47 }
48
49 let value = V::read_cfg(buf, v_cfg)?;
51
52 if let Some((last_key, last_value)) = last.take() {
54 insert(last_key, last_value);
55 }
56 last = Some((key, value));
57 }
58
59 if let Some((last_key, last_value)) = last {
61 insert(last_key, last_value);
62 }
63
64 Ok(())
65}
66
67impl<K: Ord + Hash + Eq + Write, V: Write> Write for BTreeMap<K, V> {
70 fn write(&self, buf: &mut impl BufMut) {
71 self.len().write(buf);
72
73 for (k, v) in self {
75 k.write(buf);
76 v.write(buf);
77 }
78 }
79}
80
81impl<K: Ord + Hash + Eq + EncodeSize, V: EncodeSize> EncodeSize for BTreeMap<K, V> {
82 fn encode_size(&self) -> usize {
83 let mut size = self.len().encode_size();
85
86 for (k, v) in self {
88 size += k.encode_size();
89 size += v.encode_size();
90 }
91 size
92 }
93}
94
95impl<K: Read + Clone + Ord + Hash + Eq, V: Read + Clone> Read for BTreeMap<K, V> {
96 type Cfg = (RangeCfg, (K::Cfg, V::Cfg));
97
98 fn read_cfg(buf: &mut impl Buf, (range, (k_cfg, v_cfg)): &Self::Cfg) -> Result<Self, Error> {
99 let len = usize::read_cfg(buf, range)?;
101 let mut map = BTreeMap::new();
102
103 read_ordered_map(
105 buf,
106 len,
107 k_cfg,
108 v_cfg,
109 |k, v| map.insert(k, v),
110 BTREEMAP_TYPE,
111 )?;
112
113 Ok(map)
114 }
115}
116
117impl<K: Ord + Hash + Eq + Write, V: Write> Write for HashMap<K, V> {
120 fn write(&self, buf: &mut impl BufMut) {
121 self.len().write(buf);
122
123 let mut entries: Vec<_> = self.iter().collect();
125 entries.sort_by(|a, b| a.0.cmp(b.0));
126 for (k, v) in entries {
127 k.write(buf);
128 v.write(buf);
129 }
130 }
131}
132
133impl<K: Ord + Hash + Eq + EncodeSize, V: EncodeSize> EncodeSize for HashMap<K, V> {
134 fn encode_size(&self) -> usize {
135 let mut size = self.len().encode_size();
137
138 for (k, v) in self {
141 size += k.encode_size();
142 size += v.encode_size();
143 }
144 size
145 }
146}
147
148impl<K: Read + Clone + Ord + Hash + Eq, V: Read + Clone> Read for HashMap<K, V> {
150 type Cfg = (RangeCfg, (K::Cfg, V::Cfg));
151
152 fn read_cfg(buf: &mut impl Buf, (range, (k_cfg, v_cfg)): &Self::Cfg) -> Result<Self, Error> {
153 let len = usize::read_cfg(buf, range)?;
155 let mut map = HashMap::with_capacity(len);
156
157 read_ordered_map(
159 buf,
160 len,
161 k_cfg,
162 v_cfg,
163 |k, v| map.insert(k, v),
164 HASHMAP_TYPE,
165 )?;
166
167 Ok(map)
168 }
169}
170
171#[cfg(test)]
172mod tests {
173 use super::*;
174 use crate::{Decode, Encode, FixedSize};
175 use bytes::{Bytes, BytesMut};
176 use std::fmt::Debug;
177
178 fn round_trip_btree<K, V, KCfg, VCfg>(
180 map: &BTreeMap<K, V>,
181 range_cfg: RangeCfg,
182 k_cfg: KCfg,
183 v_cfg: VCfg,
184 ) where
185 K: Write + EncodeSize + Read<Cfg = KCfg> + Clone + Ord + Hash + Eq + PartialEq + Debug,
186 V: Write + EncodeSize + Read<Cfg = VCfg> + Clone + PartialEq + Debug,
187 BTreeMap<K, V>: Read<Cfg = (RangeCfg, (K::Cfg, V::Cfg))>
188 + Decode<Cfg = (RangeCfg, (K::Cfg, V::Cfg))>
189 + PartialEq
190 + Write
191 + EncodeSize,
192 {
193 let encoded = map.encode();
194 assert_eq!(encoded.len(), map.encode_size());
195 let config_tuple = (range_cfg, (k_cfg, v_cfg));
196 let decoded = BTreeMap::<K, V>::decode_cfg(encoded, &config_tuple)
197 .expect("decode_cfg failed for BTreeMap");
198 assert_eq!(map, &decoded);
199 }
200
201 fn round_trip_hash<K, V, KCfg, VCfg>(
202 map: &HashMap<K, V>,
203 range_cfg: RangeCfg,
204 k_cfg: KCfg,
205 v_cfg: VCfg,
206 ) where
207 K: Write + EncodeSize + Read<Cfg = KCfg> + Clone + Ord + Hash + Eq + PartialEq + Debug,
208 V: Write + EncodeSize + Read<Cfg = VCfg> + Clone + PartialEq + Debug,
209 HashMap<K, V>: Read<Cfg = (RangeCfg, (K::Cfg, V::Cfg))>
210 + Decode<Cfg = (RangeCfg, (K::Cfg, V::Cfg))>
211 + PartialEq
212 + Write
213 + EncodeSize,
214 {
215 let encoded = map.encode();
216 assert_eq!(encoded.len(), map.encode_size());
217 let config_tuple = (range_cfg, (k_cfg, v_cfg));
218 let decoded = HashMap::<K, V>::decode_cfg(encoded, &config_tuple)
219 .expect("decode_cfg failed for HashMap");
220 assert_eq!(map, &decoded);
221 }
222
223 fn round_trip<K, V>(map: &HashMap<K, V>, range_cfg: RangeCfg, k_cfg: K::Cfg, v_cfg: V::Cfg)
225 where
226 K: Write + EncodeSize + Read + Clone + Ord + Hash + Eq + PartialEq + Debug,
227 V: Write + EncodeSize + Read + Clone + PartialEq + Debug,
228 HashMap<K, V>: Read<Cfg = (RangeCfg, (K::Cfg, V::Cfg))> + PartialEq + Write + EncodeSize,
229 {
230 let encoded = map.encode();
231 let config_tuple = (range_cfg, (k_cfg, v_cfg));
232 let decoded = HashMap::<K, V>::decode_cfg(encoded, &config_tuple)
233 .expect("decode_cfg failed for HashMap");
234 assert_eq!(map, &decoded);
235 }
236
237 #[test]
240 fn test_empty_btreemap() {
241 let map = BTreeMap::<u32, u64>::new();
242 round_trip_btree(&map, (..).into(), (), ());
243 assert_eq!(map.encode_size(), 1); let encoded = map.encode();
245 assert_eq!(encoded, Bytes::from_static(&[0]));
246 }
247
248 #[test]
249 fn test_simple_btreemap_u32_u64() {
250 let mut map = BTreeMap::new();
251 map.insert(1u32, 100u64);
252 map.insert(5u32, 500u64);
253 map.insert(2u32, 200u64);
254 round_trip_btree(&map, (..).into(), (), ());
255 assert_eq!(map.encode_size(), 1 + 3 * (u32::SIZE + u64::SIZE));
256 let mut expected = BytesMut::new();
258 3usize.write(&mut expected); 1u32.write(&mut expected);
260 100u64.write(&mut expected);
261 2u32.write(&mut expected);
262 200u64.write(&mut expected);
263 5u32.write(&mut expected);
264 500u64.write(&mut expected);
265 assert_eq!(map.encode(), expected.freeze());
266 }
267
268 #[test]
269 fn test_large_btreemap() {
270 let mut map = BTreeMap::new();
272 for i in 0..1000 {
273 map.insert(i as u16, i as u64 * 2);
274 }
275 round_trip_btree(&map, (0..=1000).into(), (), ());
276
277 let mut map = BTreeMap::new();
279 for i in 0..1000usize {
280 map.insert(i, 1000usize + i);
281 }
282 round_trip_btree(
283 &map,
284 (0..=1000).into(),
285 (..=1000).into(),
286 (1000..=2000).into(),
287 );
288 }
289
290 #[test]
291 fn test_btreemap_with_variable_values() {
292 let mut map = BTreeMap::new();
293 map.insert(Bytes::from_static(b"apple"), vec![1, 2]);
294 map.insert(Bytes::from_static(b"banana"), vec![3, 4, 5]);
295 map.insert(Bytes::from_static(b"cherry"), vec![]);
296
297 let map_range = 0..=10;
298 let key_range = ..=10;
299 let val_range = 0..=100;
300
301 round_trip_btree(
302 &map,
303 map_range.into(),
304 key_range.into(),
305 (val_range.into(), ()),
306 );
307 }
308
309 #[test]
310 fn test_btreemap_decode_length_limit_exceeded() {
311 let mut map = BTreeMap::new();
312 map.insert(1u32, 100u64);
313 map.insert(5u32, 500u64);
314
315 let encoded = map.encode();
316 let config_tuple = ((0..=1).into(), ((), ()));
317
318 let result = BTreeMap::<u32, u64>::decode_cfg(encoded, &config_tuple);
319 assert!(matches!(result, Err(Error::InvalidLength(2))));
320 }
321
322 #[test]
323 fn test_btreemap_decode_value_length_limit_exceeded() {
324 let mut map = BTreeMap::new();
325 map.insert(Bytes::from_static(b"key1"), vec![1, 2, 3, 4, 5]);
326
327 let key_range = ..=10;
328 let map_range = 0..=10;
329 let restrictive_val_range = 0..=3;
330
331 let encoded = map.encode();
332 let config_tuple = (
333 map_range.into(),
334 (key_range.into(), (restrictive_val_range.into(), ())),
335 );
336 let result = BTreeMap::<Bytes, Vec<u8>>::decode_cfg(encoded, &config_tuple);
337
338 assert!(matches!(result, Err(Error::InvalidLength(5))));
339 }
340
341 #[test]
342 fn test_btreemap_decode_invalid_key_order() {
343 let mut encoded = BytesMut::new();
344 2usize.write(&mut encoded); 5u32.write(&mut encoded); 500u64.write(&mut encoded); 2u32.write(&mut encoded); 200u64.write(&mut encoded); let range = (..).into();
351 let config_tuple = (range, ((), ()));
352
353 let result = BTreeMap::<u32, u64>::decode_cfg(encoded, &config_tuple);
354 assert!(matches!(
356 result,
357 Err(Error::Invalid("BTreeMap", "Keys must ascend"))
358 ));
359 }
360
361 #[test]
362 fn test_btreemap_decode_duplicate_key() {
363 let mut encoded = BytesMut::new();
364 2usize.write(&mut encoded); 1u32.write(&mut encoded); 100u64.write(&mut encoded); 1u32.write(&mut encoded); 200u64.write(&mut encoded); let range = (..).into();
371 let config_tuple = (range, ((), ()));
372
373 let result = BTreeMap::<u32, u64>::decode_cfg(encoded, &config_tuple);
374 assert!(matches!(
376 result,
377 Err(Error::Invalid("BTreeMap", "Duplicate key"))
378 ));
379 }
380
381 #[test]
382 fn test_btreemap_decode_end_of_buffer_key() {
383 let mut map = BTreeMap::new();
384 map.insert(1u32, 100u64);
385 map.insert(5u32, 500u64);
386
387 let mut encoded = map.encode();
388 encoded.truncate(map.encode_size() - 10); let range = (..).into();
391 let config_tuple = (range, ((), ()));
392 let result = BTreeMap::<u32, u64>::decode_cfg(encoded, &config_tuple);
393 assert!(matches!(result, Err(Error::EndOfBuffer)));
394 }
395
396 #[test]
397 fn test_btreemap_decode_end_of_buffer_value() {
398 let mut map = BTreeMap::new();
399 map.insert(1u32, 100u64);
400 map.insert(5u32, 500u64);
401
402 let mut encoded = map.encode();
403 encoded.truncate(map.encode_size() - 4); let range = (..).into();
406 let config_tuple = (range, ((), ()));
407 let result = BTreeMap::<u32, u64>::decode_cfg(encoded, &config_tuple);
408 assert!(matches!(result, Err(Error::EndOfBuffer)));
409 }
410
411 #[test]
412 fn test_btreemap_decode_extra_data() {
413 let mut map = BTreeMap::new();
414 map.insert(1u32, 100u64);
415
416 let mut encoded = map.encode();
417 encoded.put_u8(0xFF); let config_tuple = ((..).into(), ((), ()));
421 let result = BTreeMap::<u32, u64>::decode_cfg(encoded.clone(), &config_tuple);
422 assert!(matches!(result, Err(Error::ExtraData(1))));
423
424 let read_result = BTreeMap::<u32, u64>::read_cfg(&mut encoded.clone(), &config_tuple);
426 assert!(read_result.is_ok());
427 let decoded_map = read_result.unwrap();
428 assert_eq!(decoded_map.len(), 1);
429 assert_eq!(decoded_map.get(&1u32), Some(&100u64));
430 }
431
432 #[test]
433 fn test_btreemap_deterministic_encoding() {
434 let mut map2 = BTreeMap::new();
436 (0..=1000u32).for_each(|i| {
437 map2.insert(i, i * 2);
438 });
439
440 let mut map1 = BTreeMap::new();
442 (0..=1000u32).rev().for_each(|i| {
443 map1.insert(i, i * 2);
444 });
445
446 assert_eq!(map1.encode(), map2.encode());
447 }
448
449 #[test]
450 fn test_btreemap_conformity() {
451 let map1 = BTreeMap::<u8, u16>::new();
453 let mut expected1 = BytesMut::new();
454 0usize.write(&mut expected1); assert_eq!(map1.encode(), expected1.freeze());
456 assert_eq!(map1.encode_size(), 1);
457
458 let mut map2 = BTreeMap::<u8, u16>::new();
461 map2.insert(2u8, 0xBBBBu16); map2.insert(1u8, 0xAAAAu16);
463
464 let mut expected2 = BytesMut::new();
465 2usize.write(&mut expected2); 1u8.write(&mut expected2); 0xAAAAu16.write(&mut expected2); 2u8.write(&mut expected2); 0xBBBBu16.write(&mut expected2); assert_eq!(map2.encode(), expected2.freeze());
471 assert_eq!(map2.encode_size(), 1 + (u8::SIZE + u16::SIZE) * 2);
473
474 let mut map3 = BTreeMap::<u16, bool>::new();
477 map3.insert(0x0303u16, true);
478 map3.insert(0x0101u16, false);
479 map3.insert(0x0202u16, true);
480
481 let mut expected3 = BytesMut::new();
482 3usize.write(&mut expected3); 0x0101u16.write(&mut expected3); false.write(&mut expected3); 0x0202u16.write(&mut expected3); true.write(&mut expected3); 0x0303u16.write(&mut expected3); true.write(&mut expected3); assert_eq!(map3.encode(), expected3.freeze());
490 assert_eq!(map3.encode_size(), 1 + (u16::SIZE + bool::SIZE) * 3);
492
493 let mut map4 = BTreeMap::<Bytes, Vec<u8>>::new();
496 map4.insert(Bytes::from_static(b"b"), vec![20u8, 21u8]);
497 map4.insert(Bytes::from_static(b"a"), vec![10u8]);
498
499 let mut expected4 = BytesMut::new();
500 2usize.write(&mut expected4); Bytes::from_static(b"a").write(&mut expected4);
504 vec![10u8].write(&mut expected4);
506
507 Bytes::from_static(b"b").write(&mut expected4);
509 vec![20u8, 21u8].write(&mut expected4);
511
512 assert_eq!(map4.encode(), expected4.freeze());
513 let expected_size = 1usize.encode_size()
521 + Bytes::from_static(b"a").encode_size()
522 + vec![10u8].encode_size()
523 + Bytes::from_static(b"b").encode_size()
524 + vec![20u8, 21u8].encode_size();
525 assert_eq!(map4.encode_size(), expected_size);
526 }
527
528 #[test]
531 fn test_empty_hashmap() {
532 let map = HashMap::<u32, u64>::new();
533 round_trip_hash(&map, (..).into(), (), ());
534 assert_eq!(map.encode_size(), 1);
535 let encoded = map.encode();
536 assert_eq!(encoded, 0usize.encode());
537 }
538
539 #[test]
540 fn test_simple_hashmap_u32_u64() {
541 let mut map = HashMap::new();
542 map.insert(1u32, 100u64);
543 map.insert(5u32, 500u64);
544 map.insert(2u32, 200u64);
545 round_trip(&map, (..).into(), (), ());
546 assert_eq!(map.encode_size(), 1 + 3 * (u32::SIZE + u64::SIZE));
547 }
548
549 #[test]
550 fn test_large_hashmap() {
551 let mut map = HashMap::new();
553 for i in 0..1000 {
554 map.insert(i as u16, i as u64 * 2);
555 }
556 round_trip_hash(&map, (0..=1000).into(), (), ());
557
558 let mut map = HashMap::new();
560 for i in 0..1000usize {
561 map.insert(i, 1000usize + i);
562 }
563 round_trip_hash(
564 &map,
565 (0..=1000).into(),
566 (..=1000).into(),
567 (1000..=2000).into(),
568 );
569 }
570
571 #[test]
572 fn test_hashmap_with_variable_values() {
573 let mut map = HashMap::new();
574 map.insert(Bytes::from_static(b"apple"), vec![1, 2]);
575 map.insert(Bytes::from_static(b"banana"), vec![3, 4, 5]);
576 map.insert(Bytes::from_static(b"cherry"), vec![]);
577
578 let map_range = RangeCfg::from(0..=10);
579 let key_range = RangeCfg::from(..=10);
580 let val_range = RangeCfg::from(0..=100);
581
582 round_trip_hash(&map, map_range, key_range, (val_range, ()));
583 }
584
585 #[test]
586 fn test_hashmap_decode_length_limit_exceeded() {
587 let mut map = HashMap::new();
588 map.insert(1u32, 100u64);
589 map.insert(5u32, 500u64);
590
591 let encoded = map.encode();
592 let config_tuple = ((0..=1).into(), ((), ()));
593
594 let result = HashMap::<u32, u64>::decode_cfg(encoded, &config_tuple);
595 assert!(matches!(result, Err(Error::InvalidLength(2))));
596 }
597
598 #[test]
599 fn test_hashmap_decode_value_length_limit_exceeded() {
600 let mut map = HashMap::new();
601 map.insert(Bytes::from_static(b"key1"), vec![1u8, 2u8, 3u8, 4u8, 5u8]);
602
603 let key_range = RangeCfg::from(..=10);
604 let map_range = RangeCfg::from(0..=10);
605 let restrictive_val_range = RangeCfg::from(0..=3);
606
607 let encoded = map.encode();
608 let config_tuple = (map_range, (key_range, (restrictive_val_range, ())));
609 let result = HashMap::<Bytes, Vec<u8>>::decode_cfg(encoded, &config_tuple);
610
611 assert!(matches!(result, Err(Error::InvalidLength(5))));
612 }
613
614 #[test]
615 fn test_hashmap_decode_invalid_key_order() {
616 let mut encoded = BytesMut::new();
617 2usize.write(&mut encoded); 5u32.write(&mut encoded); 500u64.write(&mut encoded); 2u32.write(&mut encoded); 200u64.write(&mut encoded); let range = (..).into();
624 let config_tuple = (range, ((), ()));
625
626 let result = HashMap::<u32, u64>::decode_cfg(encoded, &config_tuple);
627 assert!(matches!(
628 result,
629 Err(Error::Invalid("HashMap", "Keys must ascend"))
630 ));
631 }
632
633 #[test]
634 fn test_hashmap_decode_duplicate_key() {
635 let mut encoded = BytesMut::new();
636 2usize.write(&mut encoded); 1u32.write(&mut encoded); 100u64.write(&mut encoded); 1u32.write(&mut encoded); 200u64.write(&mut encoded); let range = (..).into();
643 let config_tuple = (range, ((), ()));
644
645 let result = HashMap::<u32, u64>::decode_cfg(encoded, &config_tuple);
646 assert!(matches!(
647 result,
648 Err(Error::Invalid("HashMap", "Duplicate key"))
649 ));
650 }
651
652 #[test]
653 fn test_hashmap_decode_end_of_buffer_key() {
654 let mut map = HashMap::new();
655 map.insert(1u32, 100u64);
656 map.insert(5u32, 500u64);
657
658 let mut encoded = map.encode();
659 encoded.truncate(map.encode_size() - 10); let range = (..).into();
662 let config_tuple = (range, ((), ()));
663 let result = HashMap::<u32, u64>::decode_cfg(encoded, &config_tuple);
664 assert!(matches!(result, Err(Error::EndOfBuffer)));
665 }
666
667 #[test]
668 fn test_hashmap_decode_end_of_buffer_value() {
669 let mut map = HashMap::new();
670 map.insert(1u32, 100u64);
671 map.insert(5u32, 500u64);
672
673 let mut encoded = map.encode();
674 encoded.truncate(map.encode_size() - 4); let range = RangeCfg::from(..);
677 let config_tuple = (range, ((), ()));
678 let result = HashMap::<u32, u64>::decode_cfg(encoded, &config_tuple);
679 assert!(matches!(result, Err(Error::EndOfBuffer)));
680 }
681
682 #[test]
683 fn test_hashmap_decode_extra_data() {
684 let mut map = HashMap::new();
685 map.insert(1u32, 100u64);
686
687 let mut encoded = map.encode();
688 encoded.put_u8(0xFF); let config_tuple = ((..).into(), ((), ()));
692 let result = HashMap::<u32, u64>::decode_cfg(encoded.clone(), &config_tuple);
693 assert!(matches!(result, Err(Error::ExtraData(1))));
694
695 let read_result = HashMap::<u32, u64>::read_cfg(&mut encoded, &config_tuple);
697 assert!(read_result.is_ok());
698 let decoded_map = read_result.unwrap();
699 assert_eq!(decoded_map.len(), 1);
700 assert_eq!(decoded_map.get(&1u32), Some(&100u64));
701 }
702
703 #[test]
704 fn test_hashmap_deterministic_encoding() {
705 let mut map2 = HashMap::new();
707 (0..=1000u32).for_each(|i| {
708 map2.insert(i, i * 2);
709 });
710
711 let mut map1 = HashMap::new();
713 (0..=1000u32).rev().for_each(|i| {
714 map1.insert(i, i * 2);
715 });
716
717 assert_eq!(map1.encode(), map2.encode());
718 }
719
720 #[test]
721 fn test_hashmap_conformity() {
722 let map1 = HashMap::<u8, u16>::new();
724 let mut expected1 = BytesMut::new();
725 0usize.write(&mut expected1); assert_eq!(map1.encode(), expected1.freeze());
727
728 let mut map2 = HashMap::<u8, u16>::new();
731 map2.insert(2u8, 0xBBBBu16); map2.insert(1u8, 0xAAAAu16);
733
734 let mut expected2 = BytesMut::new();
735 2usize.write(&mut expected2); 1u8.write(&mut expected2); 0xAAAAu16.write(&mut expected2); 2u8.write(&mut expected2); 0xBBBBu16.write(&mut expected2); assert_eq!(map2.encode(), expected2.freeze());
741
742 let mut map3 = HashMap::<u16, bool>::new();
745 map3.insert(0x0303u16, true);
746 map3.insert(0x0101u16, false);
747 map3.insert(0x0202u16, true);
748
749 let mut expected3 = BytesMut::new();
750 3usize.write(&mut expected3); 0x0101u16.write(&mut expected3); false.write(&mut expected3); 0x0202u16.write(&mut expected3); true.write(&mut expected3); 0x0303u16.write(&mut expected3); true.write(&mut expected3); assert_eq!(map3.encode(), expected3.freeze());
758
759 let mut map4 = HashMap::<Bytes, Vec<u8>>::new();
762 map4.insert(Bytes::from_static(b"b"), vec![20u8, 21u8]);
763 map4.insert(Bytes::from_static(b"a"), vec![10u8]);
764
765 let mut expected4 = BytesMut::new();
766 2usize.write(&mut expected4); Bytes::from_static(b"a").write(&mut expected4);
770 vec![10u8].write(&mut expected4);
772
773 Bytes::from_static(b"b").write(&mut expected4);
775 vec![20u8, 21u8].write(&mut expected4);
777
778 assert_eq!(map4.encode(), expected4.freeze());
779 }
780}