1use crate::{Get, TryCollect};
21use alloc::collections::BTreeMap;
22use core::{borrow::Borrow, marker::PhantomData, ops::Deref};
23#[cfg(feature = "serde")]
24use serde::{
25 de::{Error, MapAccess, Visitor},
26 Deserialize, Deserializer, Serialize,
27};
28
29#[cfg_attr(feature = "serde", derive(Serialize), serde(transparent))]
37#[cfg_attr(feature = "scale-codec", derive(scale_codec::Encode, scale_info::TypeInfo))]
38#[cfg_attr(feature = "scale-codec", scale_info(skip_type_params(S)))]
39#[cfg_attr(feature = "jam-codec", derive(jam_codec::Encode))]
40pub struct BoundedBTreeMap<K, V, S>(
41 BTreeMap<K, V>,
42 #[cfg_attr(feature = "serde", serde(skip_serializing))] PhantomData<S>,
43);
44
45#[cfg(feature = "serde")]
46impl<'de, K, V, S: Get<u32>> Deserialize<'de> for BoundedBTreeMap<K, V, S>
47where
48 K: Deserialize<'de> + Ord,
49 V: Deserialize<'de>,
50{
51 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
52 where
53 D: Deserializer<'de>,
54 {
55 struct BTreeMapVisitor<K, V, S>(PhantomData<(K, V, S)>);
57
58 impl<'de, K, V, S> Visitor<'de> for BTreeMapVisitor<K, V, S>
59 where
60 K: Deserialize<'de> + Ord,
61 V: Deserialize<'de>,
62 S: Get<u32>,
63 {
64 type Value = BTreeMap<K, V>;
65
66 fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result {
67 formatter.write_str("a map")
68 }
69
70 fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
71 where
72 A: MapAccess<'de>,
73 {
74 let size = map.size_hint().unwrap_or(0);
75 let max = S::get() as usize;
76 if size > max {
77 Err(A::Error::custom("map exceeds the size of the bounds"))
78 } else {
79 let mut values = BTreeMap::new();
80
81 while let Some(key) = map.next_key()? {
82 if values.len() >= max {
83 return Err(A::Error::custom("map exceeds the size of the bounds"));
84 }
85 let value = map.next_value()?;
86 values.insert(key, value);
87 }
88
89 Ok(values)
90 }
91 }
92 }
93
94 let visitor: BTreeMapVisitor<K, V, S> = BTreeMapVisitor(PhantomData);
95 deserializer.deserialize_map(visitor).map(|v| {
96 BoundedBTreeMap::<K, V, S>::try_from(v)
97 .map_err(|_| Error::custom("failed to create a BoundedBTreeMap from the provided map"))
98 })?
99 }
100}
101
102impl<K, V, S> BoundedBTreeMap<K, V, S>
103where
104 S: Get<u32>,
105{
106 pub fn bound() -> usize {
108 S::get() as usize
109 }
110}
111
112impl<K, V, S> BoundedBTreeMap<K, V, S>
113where
114 K: Ord,
115 S: Get<u32>,
116{
117 fn unchecked_from(t: BTreeMap<K, V>) -> Self {
119 Self(t, Default::default())
120 }
121
122 pub fn retain<F: FnMut(&K, &mut V) -> bool>(&mut self, f: F) {
127 self.0.retain(f)
128 }
129
130 pub fn new() -> Self {
134 BoundedBTreeMap(BTreeMap::new(), PhantomData)
135 }
136
137 pub fn into_inner(self) -> BTreeMap<K, V> {
142 debug_assert!(self.0.len() <= Self::bound());
143 self.0
144 }
145
146 pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut BTreeMap<K, V>)) -> Option<Self> {
154 mutate(&mut self.0);
155 (self.0.len() <= Self::bound()).then(move || self)
156 }
157
158 pub fn clear(&mut self) {
160 self.0.clear()
161 }
162
163 pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
168 where
169 K: Borrow<Q>,
170 Q: Ord + ?Sized,
171 {
172 self.0.get_mut(key)
173 }
174
175 pub fn try_insert(&mut self, key: K, value: V) -> Result<Option<V>, (K, V)> {
180 if self.len() < Self::bound() || self.0.contains_key(&key) {
181 Ok(self.0.insert(key, value))
182 } else {
183 Err((key, value))
184 }
185 }
186
187 pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
193 where
194 K: Borrow<Q>,
195 Q: Ord + ?Sized,
196 {
197 self.0.remove(key)
198 }
199
200 pub fn remove_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
206 where
207 K: Borrow<Q>,
208 Q: Ord + ?Sized,
209 {
210 self.0.remove_entry(key)
211 }
212
213 pub fn iter_mut(&mut self) -> alloc::collections::btree_map::IterMut<K, V> {
217 self.0.iter_mut()
218 }
219
220 pub fn map<T, F>(self, mut f: F) -> BoundedBTreeMap<K, T, S>
222 where
223 F: FnMut((&K, V)) -> T,
224 {
225 BoundedBTreeMap::<K, T, S>::unchecked_from(
226 self.0
227 .into_iter()
228 .map(|(k, v)| {
229 let t = f((&k, v));
230 (k, t)
231 })
232 .collect(),
233 )
234 }
235
236 pub fn try_map<T, E, F>(self, mut f: F) -> Result<BoundedBTreeMap<K, T, S>, E>
240 where
241 F: FnMut((&K, V)) -> Result<T, E>,
242 {
243 Ok(BoundedBTreeMap::<K, T, S>::unchecked_from(
244 self.0
245 .into_iter()
246 .map(|(k, v)| (f((&k, v)).map(|t| (k, t))))
247 .collect::<Result<BTreeMap<_, _>, _>>()?,
248 ))
249 }
250
251 pub fn is_full(&self) -> bool {
253 self.len() >= Self::bound()
254 }
255}
256
257impl<K, V, S> Default for BoundedBTreeMap<K, V, S>
258where
259 K: Ord,
260 S: Get<u32>,
261{
262 fn default() -> Self {
263 Self::new()
264 }
265}
266
267impl<K, V, S> Clone for BoundedBTreeMap<K, V, S>
268where
269 BTreeMap<K, V>: Clone,
270{
271 fn clone(&self) -> Self {
272 BoundedBTreeMap(self.0.clone(), PhantomData)
273 }
274}
275
276impl<K, V, S> core::fmt::Debug for BoundedBTreeMap<K, V, S>
277where
278 BTreeMap<K, V>: core::fmt::Debug,
279 S: Get<u32>,
280{
281 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
282 f.debug_tuple("BoundedBTreeMap").field(&self.0).field(&Self::bound()).finish()
283 }
284}
285
286#[cfg(feature = "std")]
289impl<K: std::hash::Hash, V: std::hash::Hash, S> std::hash::Hash for BoundedBTreeMap<K, V, S> {
290 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
291 self.0.hash(state);
292 }
293}
294
295impl<K, V, S1, S2> PartialEq<BoundedBTreeMap<K, V, S1>> for BoundedBTreeMap<K, V, S2>
296where
297 BTreeMap<K, V>: PartialEq,
298 S1: Get<u32>,
299 S2: Get<u32>,
300{
301 fn eq(&self, other: &BoundedBTreeMap<K, V, S1>) -> bool {
302 S1::get() == S2::get() && self.0 == other.0
303 }
304}
305
306impl<K, V, S> Eq for BoundedBTreeMap<K, V, S>
307where
308 BTreeMap<K, V>: Eq,
309 S: Get<u32>,
310{
311}
312
313impl<K, V, S> PartialEq<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
314where
315 BTreeMap<K, V>: PartialEq,
316{
317 fn eq(&self, other: &BTreeMap<K, V>) -> bool {
318 self.0 == *other
319 }
320}
321
322impl<K, V, S> PartialOrd for BoundedBTreeMap<K, V, S>
323where
324 BTreeMap<K, V>: PartialOrd,
325 S: Get<u32>,
326{
327 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
328 self.0.partial_cmp(&other.0)
329 }
330}
331
332impl<K, V, S> Ord for BoundedBTreeMap<K, V, S>
333where
334 BTreeMap<K, V>: Ord,
335 S: Get<u32>,
336{
337 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
338 self.0.cmp(&other.0)
339 }
340}
341
342impl<K, V, S> IntoIterator for BoundedBTreeMap<K, V, S> {
343 type Item = (K, V);
344 type IntoIter = alloc::collections::btree_map::IntoIter<K, V>;
345
346 fn into_iter(self) -> Self::IntoIter {
347 self.0.into_iter()
348 }
349}
350
351impl<'a, K, V, S> IntoIterator for &'a BoundedBTreeMap<K, V, S> {
352 type Item = (&'a K, &'a V);
353 type IntoIter = alloc::collections::btree_map::Iter<'a, K, V>;
354
355 fn into_iter(self) -> Self::IntoIter {
356 self.0.iter()
357 }
358}
359
360impl<'a, K, V, S> IntoIterator for &'a mut BoundedBTreeMap<K, V, S> {
361 type Item = (&'a K, &'a mut V);
362 type IntoIter = alloc::collections::btree_map::IterMut<'a, K, V>;
363
364 fn into_iter(self) -> Self::IntoIter {
365 self.0.iter_mut()
366 }
367}
368
369impl<K, V, S> Deref for BoundedBTreeMap<K, V, S>
370where
371 K: Ord,
372{
373 type Target = BTreeMap<K, V>;
374
375 fn deref(&self) -> &Self::Target {
376 &self.0
377 }
378}
379
380impl<K, V, S> AsRef<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
381where
382 K: Ord,
383{
384 fn as_ref(&self) -> &BTreeMap<K, V> {
385 &self.0
386 }
387}
388
389impl<K, V, S> From<BoundedBTreeMap<K, V, S>> for BTreeMap<K, V>
390where
391 K: Ord,
392{
393 fn from(map: BoundedBTreeMap<K, V, S>) -> Self {
394 map.0
395 }
396}
397
398impl<K, V, S> TryFrom<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
399where
400 K: Ord,
401 S: Get<u32>,
402{
403 type Error = ();
404
405 fn try_from(value: BTreeMap<K, V>) -> Result<Self, Self::Error> {
406 (value.len() <= Self::bound())
407 .then(move || BoundedBTreeMap(value, PhantomData))
408 .ok_or(())
409 }
410}
411
412impl<I, K, V, Bound> TryCollect<BoundedBTreeMap<K, V, Bound>> for I
413where
414 K: Ord,
415 I: ExactSizeIterator + Iterator<Item = (K, V)>,
416 Bound: Get<u32>,
417{
418 type Error = &'static str;
419
420 fn try_collect(self) -> Result<BoundedBTreeMap<K, V, Bound>, Self::Error> {
421 if self.len() > Bound::get() as usize {
422 Err("iterator length too big")
423 } else {
424 Ok(BoundedBTreeMap::<K, V, Bound>::unchecked_from(self.collect::<BTreeMap<K, V>>()))
425 }
426 }
427}
428
429#[cfg(any(feature = "scale-codec", feature = "jam-codec"))]
430macro_rules! codec_impl {
431 ($codec:ident) => {
432 use super::*;
433 use crate::codec_utils::PrependCompactInput;
434 use $codec::{
435 Compact, Decode, DecodeLength, DecodeWithMemTracking, Encode, EncodeLike, Error, Input, MaxEncodedLen,
436 };
437
438 impl<K, V, S> Decode for BoundedBTreeMap<K, V, S>
439 where
440 K: Decode + Ord,
441 V: Decode,
442 S: Get<u32>,
443 {
444 fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
445 let len = <Compact<u32>>::decode(input)?;
447 if len.0 > S::get() {
448 return Err("BoundedBTreeMap exceeds its limit".into());
449 }
450 let inner = BTreeMap::decode(&mut PrependCompactInput {
452 encoded_len: len.encode().as_ref(),
453 read: 0,
454 inner: input,
455 })?;
456 Ok(Self(inner, PhantomData))
457 }
458
459 fn skip<I: Input>(input: &mut I) -> Result<(), Error> {
460 BTreeMap::<K, V>::skip(input)
461 }
462 }
463
464 impl<K, V, S> DecodeWithMemTracking for BoundedBTreeMap<K, V, S>
465 where
466 K: DecodeWithMemTracking + Ord,
467 V: DecodeWithMemTracking,
468 S: Get<u32>,
469 BoundedBTreeMap<K, V, S>: Decode,
470 {
471 }
472
473 impl<K, V, S> MaxEncodedLen for BoundedBTreeMap<K, V, S>
474 where
475 K: MaxEncodedLen,
476 V: MaxEncodedLen,
477 S: Get<u32>,
478 {
479 fn max_encoded_len() -> usize {
480 Self::bound()
481 .saturating_mul(K::max_encoded_len().saturating_add(V::max_encoded_len()))
482 .saturating_add(Compact(S::get()).encoded_size())
483 }
484 }
485
486 impl<K, V, S> EncodeLike<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S> where BTreeMap<K, V>: Encode {}
487
488 impl<K, V, S> DecodeLength for BoundedBTreeMap<K, V, S> {
489 fn len(self_encoded: &[u8]) -> Result<usize, Error> {
490 <BTreeMap<K, V> as DecodeLength>::len(self_encoded)
494 }
495 }
496 };
497}
498
499#[cfg(feature = "scale-codec")]
500mod scale_codec_impl {
501 codec_impl!(scale_codec);
502}
503
504#[cfg(feature = "jam-codec")]
505mod jam_codec_impl {
506 codec_impl!(jam_codec);
507}
508
509#[cfg(test)]
510mod test {
511 use super::*;
512 use crate::ConstU32;
513 use alloc::{vec, vec::Vec};
514 #[cfg(feature = "scale-codec")]
515 use scale_codec::{Compact, CompactLen, Decode, Encode};
516
517 fn map_from_keys<K>(keys: &[K]) -> BTreeMap<K, ()>
518 where
519 K: Ord + Copy,
520 {
521 keys.iter().copied().zip(core::iter::repeat(())).collect()
522 }
523
524 fn boundedmap_from_keys<K, S>(keys: &[K]) -> BoundedBTreeMap<K, (), S>
525 where
526 K: Ord + Copy,
527 S: Get<u32>,
528 {
529 map_from_keys(keys).try_into().unwrap()
530 }
531
532 #[test]
533 #[cfg(feature = "scale-codec")]
534 fn encoding_same_as_unbounded_map() {
535 let b = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
536 let m = map_from_keys(&[1, 2, 3, 4, 5, 6]);
537
538 assert_eq!(b.encode(), m.encode());
539 }
540
541 #[test]
542 #[cfg(feature = "scale-codec")]
543 fn encode_then_decode_gives_original_map() {
544 let b = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
545 let b_encode_decode = BoundedBTreeMap::<u32, (), ConstU32<7>>::decode(&mut &b.encode()[..]).unwrap();
546
547 assert_eq!(b_encode_decode, b);
548 }
549
550 #[test]
551 fn try_insert_works() {
552 let mut bounded = boundedmap_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]);
553 bounded.try_insert(0, ()).unwrap();
554 assert_eq!(*bounded, map_from_keys(&[1, 0, 2, 3]));
555
556 assert!(bounded.try_insert(9, ()).is_err());
557 assert_eq!(*bounded, map_from_keys(&[1, 0, 2, 3]));
558 }
559
560 #[test]
561 fn deref_coercion_works() {
562 let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3]);
563 assert_eq!(bounded.len(), 3);
565 assert!(bounded.iter().next().is_some());
566 assert!(!bounded.is_empty());
567 }
568
569 #[test]
570 fn try_mutate_works() {
571 let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
572 let bounded = bounded
573 .try_mutate(|v| {
574 v.insert(7, ());
575 })
576 .unwrap();
577 assert_eq!(bounded.len(), 7);
578 assert!(bounded
579 .try_mutate(|v| {
580 v.insert(8, ());
581 })
582 .is_none());
583 }
584
585 #[test]
586 fn btree_map_eq_works() {
587 let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
588 assert_eq!(bounded, map_from_keys(&[1, 2, 3, 4, 5, 6]));
589 }
590
591 #[test]
592 #[cfg(feature = "scale-codec")]
593 fn too_big_fail_to_decode() {
594 let v: Vec<(u32, u32)> = vec![(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)];
595 assert_eq!(
596 BoundedBTreeMap::<u32, u32, ConstU32<4>>::decode(&mut &v.encode()[..]),
597 Err("BoundedBTreeMap exceeds its limit".into()),
598 );
599 }
600
601 #[test]
602 #[cfg(feature = "scale-codec")]
603 fn dont_consume_more_data_than_bounded_len() {
604 let m = map_from_keys(&[1, 2, 3, 4, 5, 6]);
605 let data = m.encode();
606 let data_input = &mut &data[..];
607
608 BoundedBTreeMap::<u32, u32, ConstU32<4>>::decode(data_input).unwrap_err();
609 assert_eq!(data_input.len(), data.len() - Compact::<u32>::compact_len(&(data.len() as u32)));
610 }
611
612 #[test]
613 fn unequal_eq_impl_insert_works() {
614 #[derive(Debug)]
616 struct Unequal(u32, bool);
617
618 impl PartialEq for Unequal {
619 fn eq(&self, other: &Self) -> bool {
620 self.0 == other.0
621 }
622 }
623 impl Eq for Unequal {}
624
625 impl Ord for Unequal {
626 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
627 self.0.cmp(&other.0)
628 }
629 }
630
631 impl PartialOrd for Unequal {
632 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
633 Some(self.cmp(other))
634 }
635 }
636
637 let mut map = BoundedBTreeMap::<Unequal, u32, ConstU32<4>>::new();
638
639 for i in 0..4 {
642 map.try_insert(Unequal(i, false), i).unwrap();
643 }
644
645 map.try_insert(Unequal(5, false), 5).unwrap_err();
647
648 map.try_insert(Unequal(0, true), 6).unwrap();
651 assert_eq!(map.len(), 4);
652 let (zero_key, zero_value) = map.get_key_value(&Unequal(0, true)).unwrap();
653 assert_eq!(zero_key.0, 0);
654 assert_eq!(zero_key.1, false);
655 assert_eq!(*zero_value, 6);
656 }
657
658 #[test]
659 fn eq_works() {
660 let b1 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
662 let b2 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
663 assert_eq!(b1, b2);
664
665 crate::parameter_types! {
667 B1: u32 = 7;
668 B2: u32 = 7;
669 }
670 let b1 = boundedmap_from_keys::<u32, B1>(&[1, 2]);
671 let b2 = boundedmap_from_keys::<u32, B2>(&[1, 2]);
672 assert_eq!(b1, b2);
673 }
674
675 #[test]
676 fn can_be_collected() {
677 let b1 = boundedmap_from_keys::<u32, ConstU32<5>>(&[1, 2, 3, 4]);
678 let b2: BoundedBTreeMap<u32, (), ConstU32<5>> = b1.iter().map(|(k, v)| (k + 1, *v)).try_collect().unwrap();
679 assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3, 4, 5]);
680
681 let b2: BoundedBTreeMap<u32, (), ConstU32<4>> = b1.iter().map(|(k, v)| (k + 1, *v)).try_collect().unwrap();
683 assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3, 4, 5]);
684
685 let b2: BoundedBTreeMap<u32, (), ConstU32<5>> =
687 b1.iter().map(|(k, v)| (k + 1, *v)).rev().skip(2).try_collect().unwrap();
688 assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3]);
690
691 let b2: BoundedBTreeMap<u32, (), ConstU32<5>> =
692 b1.iter().map(|(k, v)| (k + 1, *v)).take(2).try_collect().unwrap();
693 assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3]);
694
695 let b2: Result<BoundedBTreeMap<u32, (), ConstU32<3>>, _> = b1.iter().map(|(k, v)| (k + 1, *v)).try_collect();
697 assert!(b2.is_err());
698
699 let b2: Result<BoundedBTreeMap<u32, (), ConstU32<1>>, _> =
700 b1.iter().map(|(k, v)| (k + 1, *v)).skip(2).try_collect();
701 assert!(b2.is_err());
702 }
703
704 #[test]
705 fn test_iter_mut() {
706 let mut b1: BoundedBTreeMap<u8, u8, ConstU32<7>> =
707 [1, 2, 3, 4].into_iter().map(|k| (k, k)).try_collect().unwrap();
708
709 let b2: BoundedBTreeMap<u8, u8, ConstU32<7>> =
710 [1, 2, 3, 4].into_iter().map(|k| (k, k * 2)).try_collect().unwrap();
711
712 b1.iter_mut().for_each(|(_, v)| *v *= 2);
713
714 assert_eq!(b1, b2);
715 }
716
717 #[test]
718 fn map_retains_size() {
719 let b1 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
720 let b2 = b1.clone();
721
722 assert_eq!(b1.len(), b2.map(|(_, _)| 5_u32).len());
723 }
724
725 #[test]
726 fn map_maps_properly() {
727 let b1: BoundedBTreeMap<u32, u32, ConstU32<7>> =
728 [1, 2, 3, 4].into_iter().map(|k| (k, k * 2)).try_collect().unwrap();
729 let b2: BoundedBTreeMap<u32, u32, ConstU32<7>> =
730 [1, 2, 3, 4].into_iter().map(|k| (k, k)).try_collect().unwrap();
731
732 assert_eq!(b1, b2.map(|(_, v)| v * 2));
733 }
734
735 #[test]
736 fn try_map_retains_size() {
737 let b1 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
738 let b2 = b1.clone();
739
740 assert_eq!(b1.len(), b2.try_map::<_, (), _>(|(_, _)| Ok(5_u32)).unwrap().len());
741 }
742
743 #[test]
744 fn try_map_maps_properly() {
745 let b1: BoundedBTreeMap<u32, u32, ConstU32<7>> =
746 [1, 2, 3, 4].into_iter().map(|k| (k, k * 2)).try_collect().unwrap();
747 let b2: BoundedBTreeMap<u32, u32, ConstU32<7>> =
748 [1, 2, 3, 4].into_iter().map(|k| (k, k)).try_collect().unwrap();
749
750 assert_eq!(b1, b2.try_map::<_, (), _>(|(_, v)| Ok(v * 2)).unwrap());
751 }
752
753 #[test]
754 fn try_map_short_circuit() {
755 let b1: BoundedBTreeMap<u8, u8, ConstU32<7>> = [1, 2, 3, 4].into_iter().map(|k| (k, k)).try_collect().unwrap();
756
757 assert_eq!(Err("overflow"), b1.try_map(|(_, v)| v.checked_mul(100).ok_or("overflow")));
758 }
759
760 #[test]
761 fn try_map_ok() {
762 let b1: BoundedBTreeMap<u8, u8, ConstU32<7>> = [1, 2, 3, 4].into_iter().map(|k| (k, k)).try_collect().unwrap();
763 let b2: BoundedBTreeMap<u8, u16, ConstU32<7>> =
764 [1, 2, 3, 4].into_iter().map(|k| (k, (k as u16) * 100)).try_collect().unwrap();
765
766 assert_eq!(Ok(b2), b1.try_map(|(_, v)| (v as u16).checked_mul(100_u16).ok_or("overflow")));
767 }
768
769 #[test]
772 #[cfg(feature = "std")]
773 fn container_can_derive_hash() {
774 #[derive(Hash, Default)]
775 struct Foo {
776 bar: u8,
777 map: BoundedBTreeMap<String, usize, ConstU32<16>>,
778 }
779 let _foo = Foo::default();
780 }
781
782 #[cfg(feature = "serde")]
783 mod serde {
784 use super::*;
785 use crate::alloc::string::ToString;
786
787 #[test]
788 fn test_bounded_btreemap_serializer() {
789 let mut map = BoundedBTreeMap::<u32, u32, ConstU32<6>>::new();
790 map.try_insert(0, 100).unwrap();
791 map.try_insert(1, 101).unwrap();
792 map.try_insert(2, 102).unwrap();
793
794 let serialized = serde_json::to_string(&map).unwrap();
795 assert_eq!(serialized, r#"{"0":100,"1":101,"2":102}"#);
796 }
797
798 #[test]
799 fn test_bounded_btreemap_deserializer() {
800 let json_str = r#"{"0":100,"1":101,"2":102}"#;
801 let map: Result<BoundedBTreeMap<u32, u32, ConstU32<6>>, serde_json::Error> = serde_json::from_str(json_str);
802 assert!(map.is_ok());
803 let map = map.unwrap();
804
805 assert_eq!(map.len(), 3);
806 assert_eq!(map.get(&0), Some(&100));
807 assert_eq!(map.get(&1), Some(&101));
808 assert_eq!(map.get(&2), Some(&102));
809 }
810
811 #[test]
812 fn test_bounded_btreemap_deserializer_bound() {
813 let json_str = r#"{"0":100,"1":101,"2":102}"#;
814 let map: Result<BoundedBTreeMap<u32, u32, ConstU32<3>>, serde_json::Error> = serde_json::from_str(json_str);
815 assert!(map.is_ok());
816 let map = map.unwrap();
817
818 assert_eq!(map.len(), 3);
819 assert_eq!(map.get(&0), Some(&100));
820 assert_eq!(map.get(&1), Some(&101));
821 assert_eq!(map.get(&2), Some(&102));
822 }
823
824 #[test]
825 fn test_bounded_btreemap_deserializer_failed() {
826 let json_str = r#"{"0":100,"1":101,"2":102,"3":103,"4":104}"#;
827 let map: Result<BoundedBTreeMap<u32, u32, ConstU32<4>>, serde_json::Error> = serde_json::from_str(json_str);
828
829 match map {
830 Err(e) => {
831 assert!(e.to_string().contains("map exceeds the size of the bounds"));
832 },
833 _ => unreachable!("deserializer must raise error"),
834 }
835 }
836 }
837
838 #[test]
839 fn is_full_works() {
840 let mut bounded = boundedmap_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]);
841 assert!(!bounded.is_full());
842 bounded.try_insert(0, ()).unwrap();
843 assert_eq!(*bounded, map_from_keys(&[1, 0, 2, 3]));
844
845 assert!(bounded.is_full());
846 assert!(bounded.try_insert(9, ()).is_err());
847 assert_eq!(*bounded, map_from_keys(&[1, 0, 2, 3]));
848 }
849}