1use super::WeakBoundedVec;
22use crate::{Get, TryCollect};
23use alloc::vec::Vec;
24use core::{
25 marker::PhantomData,
26 ops::{Deref, Index, IndexMut, RangeBounds},
27 slice::SliceIndex,
28};
29#[cfg(feature = "serde")]
30use serde::{
31 de::{Error, SeqAccess, Visitor},
32 Deserialize, Deserializer, Serialize,
33};
34
35#[cfg_attr(feature = "serde", derive(Serialize), serde(transparent))]
43#[cfg_attr(feature = "jam-codec", derive(jam_codec::Encode))]
44#[cfg_attr(feature = "scale-codec", derive(scale_codec::Encode, scale_info::TypeInfo))]
45#[cfg_attr(feature = "scale-codec", scale_info(skip_type_params(S)))]
46#[cfg_attr(feature = "json-schema", derive(schemars::JsonSchema))]
47pub struct BoundedVec<T, S>(pub(super) Vec<T>, #[cfg_attr(feature = "serde", serde(skip_serializing))] PhantomData<S>);
48
49pub trait TruncateFrom<T> {
51 fn truncate_from(unbound: T) -> Self;
53}
54
55#[cfg(feature = "serde")]
56mod serde_impl {
57 use super::*;
58
59 impl<'de, T, S: Get<u32>> Deserialize<'de> for BoundedVec<T, S>
60 where
61 T: Deserialize<'de>,
62 {
63 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
64 where
65 D: Deserializer<'de>,
66 {
67 struct VecVisitor<T, S: Get<u32>>(PhantomData<(T, S)>);
68
69 impl<'de, T, S: Get<u32>> Visitor<'de> for VecVisitor<T, S>
70 where
71 T: Deserialize<'de>,
72 {
73 type Value = Vec<T>;
74
75 fn expecting(&self, formatter: &mut alloc::fmt::Formatter) -> alloc::fmt::Result {
76 formatter.write_str("a sequence")
77 }
78
79 fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
80 where
81 A: SeqAccess<'de>,
82 {
83 let size = seq.size_hint().unwrap_or(0);
84 let max = match usize::try_from(S::get()) {
85 Ok(n) => n,
86 Err(_) => return Err(A::Error::custom("can't convert to usize")),
87 };
88 if size > max {
89 Err(A::Error::custom("out of bounds"))
90 } else {
91 let mut values = Vec::with_capacity(size);
92
93 while let Some(value) = seq.next_element()? {
94 if values.len() >= max {
95 return Err(A::Error::custom("out of bounds"));
96 }
97 values.push(value);
98 }
99
100 Ok(values)
101 }
102 }
103 }
104
105 let visitor: VecVisitor<T, S> = VecVisitor(PhantomData);
106 deserializer
107 .deserialize_seq(visitor)
108 .map(|v| BoundedVec::<T, S>::try_from(v).map_err(|_| Error::custom("out of bounds")))?
109 }
110 }
111}
112
113#[cfg_attr(feature = "scale-codec", derive(scale_codec::Encode, scale_info::TypeInfo))]
117#[cfg_attr(feature = "jam-codec", derive(jam_codec::Encode))]
118pub struct BoundedSlice<'a, T, S>(pub(super) &'a [T], PhantomData<S>);
119
120impl<'a, T, BoundSelf, BoundRhs> PartialEq<BoundedSlice<'a, T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
121where
122 T: PartialEq,
123 BoundSelf: Get<u32>,
124 BoundRhs: Get<u32>,
125{
126 fn eq(&self, other: &BoundedSlice<'a, T, BoundRhs>) -> bool {
127 self.0 == other.0
128 }
129}
130
131impl<'a, T, BoundSelf, BoundRhs> PartialEq<BoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
132where
133 T: PartialEq,
134 BoundSelf: Get<u32>,
135 BoundRhs: Get<u32>,
136{
137 fn eq(&self, other: &BoundedVec<T, BoundRhs>) -> bool {
138 self.0 == other.0
139 }
140}
141
142impl<'a, T, BoundSelf, BoundRhs> PartialEq<WeakBoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
143where
144 T: PartialEq,
145 BoundSelf: Get<u32>,
146 BoundRhs: Get<u32>,
147{
148 fn eq(&self, other: &WeakBoundedVec<T, BoundRhs>) -> bool {
149 self.0 == other.0
150 }
151}
152
153impl<'a, T, S: Get<u32>> Eq for BoundedSlice<'a, T, S> where T: Eq {}
154
155impl<'a, T, BoundSelf, BoundRhs> PartialOrd<BoundedSlice<'a, T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
156where
157 T: PartialOrd,
158 BoundSelf: Get<u32>,
159 BoundRhs: Get<u32>,
160{
161 fn partial_cmp(&self, other: &BoundedSlice<'a, T, BoundRhs>) -> Option<core::cmp::Ordering> {
162 self.0.partial_cmp(other.0)
163 }
164}
165
166impl<'a, T, BoundSelf, BoundRhs> PartialOrd<BoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
167where
168 T: PartialOrd,
169 BoundSelf: Get<u32>,
170 BoundRhs: Get<u32>,
171{
172 fn partial_cmp(&self, other: &BoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
173 self.0.partial_cmp(&*other.0)
174 }
175}
176
177impl<'a, T, BoundSelf, BoundRhs> PartialOrd<WeakBoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
178where
179 T: PartialOrd,
180 BoundSelf: Get<u32>,
181 BoundRhs: Get<u32>,
182{
183 fn partial_cmp(&self, other: &WeakBoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
184 self.0.partial_cmp(&*other.0)
185 }
186}
187
188impl<'a, T: Ord, Bound: Get<u32>> Ord for BoundedSlice<'a, T, Bound> {
189 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
190 self.0.cmp(&other.0)
191 }
192}
193
194impl<'a, T, S: Get<u32>> TryFrom<&'a [T]> for BoundedSlice<'a, T, S> {
195 type Error = &'a [T];
196 fn try_from(t: &'a [T]) -> Result<Self, Self::Error> {
197 if t.len() <= S::get() as usize {
198 Ok(BoundedSlice(t, PhantomData))
199 } else {
200 Err(t)
201 }
202 }
203}
204
205impl<'a, T, S> From<BoundedSlice<'a, T, S>> for &'a [T] {
206 fn from(t: BoundedSlice<'a, T, S>) -> Self {
207 t.0
208 }
209}
210
211impl<'a, T, S: Get<u32>> TruncateFrom<&'a [T]> for BoundedSlice<'a, T, S> {
212 fn truncate_from(unbound: &'a [T]) -> Self {
213 BoundedSlice::<T, S>::truncate_from(unbound)
214 }
215}
216
217impl<'a, T, S> Clone for BoundedSlice<'a, T, S> {
218 fn clone(&self) -> Self {
219 BoundedSlice(self.0, PhantomData)
220 }
221}
222
223impl<'a, T, S> core::fmt::Debug for BoundedSlice<'a, T, S>
224where
225 &'a [T]: core::fmt::Debug,
226 S: Get<u32>,
227{
228 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
229 f.debug_tuple("BoundedSlice").field(&self.0).field(&S::get()).finish()
230 }
231}
232
233impl<'a, T, S> Copy for BoundedSlice<'a, T, S> {}
235
236impl<'a, T, S> Deref for BoundedSlice<'a, T, S> {
238 type Target = [T];
239
240 fn deref(&self) -> &Self::Target {
241 self.0
242 }
243}
244
245#[cfg(feature = "std")]
248impl<'a, T: std::hash::Hash, S> std::hash::Hash for BoundedSlice<'a, T, S> {
249 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
250 self.0.hash(state);
251 }
252}
253
254impl<'a, T, S> core::iter::IntoIterator for BoundedSlice<'a, T, S> {
255 type Item = &'a T;
256 type IntoIter = core::slice::Iter<'a, T>;
257 fn into_iter(self) -> Self::IntoIter {
258 self.0.iter()
259 }
260}
261
262impl<'a, T, S: Get<u32>> BoundedSlice<'a, T, S> {
263 pub fn truncate_from(s: &'a [T]) -> Self {
266 Self(&s[0..(s.len().min(S::get() as usize))], PhantomData)
267 }
268}
269
270impl<T, S> BoundedVec<T, S> {
271 pub fn new() -> Self {
273 Self(Vec::new(), Default::default())
274 }
275
276 fn unchecked_from(t: Vec<T>) -> Self {
278 Self(t, Default::default())
279 }
280
281 pub fn clear(&mut self) {
283 self.0.clear()
284 }
285
286 pub fn into_inner(self) -> Vec<T> {
293 self.0
294 }
295
296 pub fn sort_by<F>(&mut self, compare: F)
300 where
301 F: FnMut(&T, &T) -> core::cmp::Ordering,
302 {
303 self.0.sort_by(compare)
304 }
305
306 pub fn sort_by_key<K, F>(&mut self, f: F)
310 where
311 F: FnMut(&T) -> K,
312 K: core::cmp::Ord,
313 {
314 self.0.sort_by_key(f)
315 }
316
317 pub fn sort(&mut self)
321 where
322 T: core::cmp::Ord,
323 {
324 self.0.sort()
325 }
326
327 pub fn remove(&mut self, index: usize) -> T {
333 self.0.remove(index)
334 }
335
336 pub fn swap_remove(&mut self, index: usize) -> T {
342 self.0.swap_remove(index)
343 }
344
345 pub fn retain<F: FnMut(&T) -> bool>(&mut self, f: F) {
347 self.0.retain(f)
348 }
349
350 pub fn get_mut<I: SliceIndex<[T]>>(&mut self, index: I) -> Option<&mut <I as SliceIndex<[T]>>::Output> {
352 self.0.get_mut(index)
353 }
354
355 pub fn truncate(&mut self, s: usize) {
359 self.0.truncate(s);
360 }
361
362 pub fn pop(&mut self) -> Option<T> {
366 self.0.pop()
367 }
368
369 pub fn iter_mut(&mut self) -> core::slice::IterMut<'_, T> {
371 self.0.iter_mut()
372 }
373
374 pub fn last_mut(&mut self) -> Option<&mut T> {
376 self.0.last_mut()
377 }
378
379 pub fn drain<R>(&mut self, range: R) -> alloc::vec::Drain<'_, T>
381 where
382 R: RangeBounds<usize>,
383 {
384 self.0.drain(range)
385 }
386}
387
388impl<T, S: Get<u32>> From<BoundedVec<T, S>> for Vec<T> {
389 fn from(x: BoundedVec<T, S>) -> Vec<T> {
390 x.0
391 }
392}
393
394impl<T, S: Get<u32>> BoundedVec<T, S> {
395 pub fn with_bounded_capacity(capacity: usize) -> Self {
399 let capacity = capacity.min(Self::bound());
400 Self(Vec::with_capacity(capacity), Default::default())
401 }
402
403 pub fn with_max_capacity() -> Self {
405 Self::with_bounded_capacity(Self::bound())
406 }
407
408 pub fn truncate_from(mut v: Vec<T>) -> Self {
410 v.truncate(Self::bound());
411 Self::unchecked_from(v)
412 }
413
414 pub fn bound() -> usize {
416 S::get() as usize
417 }
418
419 pub fn is_full(&self) -> bool {
421 self.len() >= Self::bound()
422 }
423
424 pub fn force_insert_keep_right(&mut self, index: usize, mut element: T) -> Result<Option<T>, T> {
435 if Self::bound() < index || self.len() < index {
437 Err(element)
438 } else if self.len() < Self::bound() {
439 self.0.insert(index, element);
441 Ok(None)
442 } else {
443 if index == 0 {
444 return Err(element)
445 }
446 core::mem::swap(&mut self[0], &mut element);
447 self[0..index].rotate_left(1);
450 Ok(Some(element))
451 }
452 }
453
454 pub fn force_insert_keep_left(&mut self, index: usize, element: T) -> Result<Option<T>, T> {
465 if Self::bound() < index || self.len() < index || Self::bound() == 0 {
467 return Err(element)
468 }
469 if Self::bound() == index && self.len() <= Self::bound() {
471 return Err(element)
472 }
473 let maybe_removed = if self.is_full() {
474 self.0.truncate(Self::bound());
476 self.0.pop()
478 } else {
479 None
480 };
481
482 self.0.insert(index, element);
484 Ok(maybe_removed)
485 }
486
487 pub fn slide(&mut self, index: usize, insert_position: usize) -> bool {
497 if self.len() <= index || self.len() < insert_position || index == usize::MAX {
499 return false
500 }
501 if index == insert_position || index + 1 == insert_position {
503 return false
504 }
505 if insert_position < index && index < self.len() {
506 self[insert_position..index + 1].rotate_right(1);
516 return true
517 } else if insert_position > 0 && index + 1 < insert_position {
518 self[index..insert_position].rotate_left(1);
530 return true
531 }
532
533 debug_assert!(false, "all noop conditions should have been covered above");
534 false
535 }
536
537 pub fn force_push(&mut self, element: T) {
541 if Self::bound() > 0 {
542 self.0.truncate(Self::bound() as usize - 1);
543 self.0.push(element);
544 }
545 }
546
547 pub fn bounded_resize(&mut self, size: usize, value: T)
550 where
551 T: Clone,
552 {
553 let size = size.min(Self::bound());
554 self.0.resize(size, value);
555 }
556
557 pub fn try_extend(&mut self, with: impl IntoIterator<Item = T> + ExactSizeIterator) -> Result<(), ()> {
560 if with.len().saturating_add(self.len()) <= Self::bound() {
561 self.0.extend(with);
562 Ok(())
563 } else {
564 Err(())
565 }
566 }
567
568 pub fn try_append(&mut self, other: &mut Vec<T>) -> Result<(), ()> {
571 if other.len().saturating_add(self.len()) <= Self::bound() {
572 self.0.append(other);
573 Ok(())
574 } else {
575 Err(())
576 }
577 }
578
579 pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut Vec<T>)) -> Option<Self> {
587 mutate(&mut self.0);
588 (self.0.len() <= Self::bound()).then(move || self)
589 }
590
591 pub fn try_insert(&mut self, index: usize, element: T) -> Result<(), T> {
598 if self.len() < Self::bound() {
599 self.0.insert(index, element);
600 Ok(())
601 } else {
602 Err(element)
603 }
604 }
605
606 pub fn try_push(&mut self, element: T) -> Result<(), T> {
613 if self.len() < Self::bound() {
614 self.0.push(element);
615 Ok(())
616 } else {
617 Err(element)
618 }
619 }
620
621 pub fn try_rotate_left(&mut self, mid: usize) -> Result<(), ()> {
623 if mid > self.len() {
624 return Err(())
625 }
626
627 self.0.rotate_left(mid);
628 Ok(())
629 }
630
631 pub fn try_rotate_right(&mut self, mid: usize) -> Result<(), ()> {
633 if mid > self.len() {
634 return Err(())
635 }
636
637 self.0.rotate_right(mid);
638 Ok(())
639 }
640}
641
642impl<T, S> BoundedVec<T, S> {
643 pub fn as_bounded_slice(&self) -> BoundedSlice<T, S> {
645 BoundedSlice(&self.0[..], PhantomData::default())
646 }
647}
648
649impl<T, S> Default for BoundedVec<T, S> {
650 fn default() -> Self {
651 Self::unchecked_from(Vec::default())
653 }
654}
655
656impl<T, S> core::fmt::Debug for BoundedVec<T, S>
657where
658 Vec<T>: core::fmt::Debug,
659 S: Get<u32>,
660{
661 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
662 f.debug_tuple("BoundedVec").field(&self.0).field(&Self::bound()).finish()
663 }
664}
665
666impl<T, S> Clone for BoundedVec<T, S>
667where
668 T: Clone,
669{
670 fn clone(&self) -> Self {
671 Self::unchecked_from(self.0.clone())
673 }
674}
675
676impl<T, S: Get<u32>> TryFrom<Vec<T>> for BoundedVec<T, S> {
677 type Error = Vec<T>;
678 fn try_from(t: Vec<T>) -> Result<Self, Self::Error> {
679 if t.len() <= Self::bound() {
680 Ok(Self::unchecked_from(t))
682 } else {
683 Err(t)
684 }
685 }
686}
687
688impl<T, S: Get<u32>> TruncateFrom<Vec<T>> for BoundedVec<T, S> {
689 fn truncate_from(unbound: Vec<T>) -> Self {
690 BoundedVec::<T, S>::truncate_from(unbound)
691 }
692}
693
694#[cfg(feature = "std")]
697impl<T: std::hash::Hash, S> std::hash::Hash for BoundedVec<T, S> {
698 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
699 self.0.hash(state);
700 }
701}
702
703impl<T, S> AsRef<Vec<T>> for BoundedVec<T, S> {
705 fn as_ref(&self) -> &Vec<T> {
706 &self.0
707 }
708}
709
710impl<T, S> AsRef<[T]> for BoundedVec<T, S> {
711 fn as_ref(&self) -> &[T] {
712 &self.0
713 }
714}
715
716impl<T, S> AsMut<[T]> for BoundedVec<T, S> {
717 fn as_mut(&mut self) -> &mut [T] {
718 &mut self.0
719 }
720}
721
722impl<T, S> Deref for BoundedVec<T, S> {
724 type Target = Vec<T>;
725
726 fn deref(&self) -> &Self::Target {
727 &self.0
728 }
729}
730
731impl<T, S, I> Index<I> for BoundedVec<T, S>
733where
734 I: SliceIndex<[T]>,
735{
736 type Output = I::Output;
737
738 #[inline]
739 fn index(&self, index: I) -> &Self::Output {
740 self.0.index(index)
741 }
742}
743
744impl<T, S, I> IndexMut<I> for BoundedVec<T, S>
745where
746 I: SliceIndex<[T]>,
747{
748 #[inline]
749 fn index_mut(&mut self, index: I) -> &mut Self::Output {
750 self.0.index_mut(index)
751 }
752}
753
754impl<T, S> core::iter::IntoIterator for BoundedVec<T, S> {
755 type Item = T;
756 type IntoIter = alloc::vec::IntoIter<T>;
757 fn into_iter(self) -> Self::IntoIter {
758 self.0.into_iter()
759 }
760}
761
762impl<'a, T, S> core::iter::IntoIterator for &'a BoundedVec<T, S> {
763 type Item = &'a T;
764 type IntoIter = core::slice::Iter<'a, T>;
765 fn into_iter(self) -> Self::IntoIter {
766 self.0.iter()
767 }
768}
769
770impl<'a, T, S> core::iter::IntoIterator for &'a mut BoundedVec<T, S> {
771 type Item = &'a mut T;
772 type IntoIter = core::slice::IterMut<'a, T>;
773 fn into_iter(self) -> Self::IntoIter {
774 self.0.iter_mut()
775 }
776}
777
778impl<T, BoundSelf, BoundRhs> PartialEq<BoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
779where
780 T: PartialEq,
781 BoundSelf: Get<u32>,
782 BoundRhs: Get<u32>,
783{
784 fn eq(&self, rhs: &BoundedVec<T, BoundRhs>) -> bool {
785 self.0 == rhs.0
786 }
787}
788
789impl<T, BoundSelf, BoundRhs> PartialEq<WeakBoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
790where
791 T: PartialEq,
792 BoundSelf: Get<u32>,
793 BoundRhs: Get<u32>,
794{
795 fn eq(&self, rhs: &WeakBoundedVec<T, BoundRhs>) -> bool {
796 self.0 == rhs.0
797 }
798}
799
800impl<'a, T, BoundSelf, BoundRhs> PartialEq<BoundedSlice<'a, T, BoundRhs>> for BoundedVec<T, BoundSelf>
801where
802 T: PartialEq,
803 BoundSelf: Get<u32>,
804 BoundRhs: Get<u32>,
805{
806 fn eq(&self, rhs: &BoundedSlice<'a, T, BoundRhs>) -> bool {
807 self.0 == rhs.0
808 }
809}
810
811impl<'a, T: PartialEq, S: Get<u32>> PartialEq<&'a [T]> for BoundedSlice<'a, T, S> {
812 fn eq(&self, other: &&'a [T]) -> bool {
813 &self.0 == other
814 }
815}
816
817impl<T: PartialEq, S: Get<u32>> PartialEq<Vec<T>> for BoundedVec<T, S> {
818 fn eq(&self, other: &Vec<T>) -> bool {
819 &self.0 == other
820 }
821}
822
823impl<T, S: Get<u32>> Eq for BoundedVec<T, S> where T: Eq {}
824
825impl<T, BoundSelf, BoundRhs> PartialOrd<BoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
826where
827 T: PartialOrd,
828 BoundSelf: Get<u32>,
829 BoundRhs: Get<u32>,
830{
831 fn partial_cmp(&self, other: &BoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
832 self.0.partial_cmp(&other.0)
833 }
834}
835
836impl<T, BoundSelf, BoundRhs> PartialOrd<WeakBoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
837where
838 T: PartialOrd,
839 BoundSelf: Get<u32>,
840 BoundRhs: Get<u32>,
841{
842 fn partial_cmp(&self, other: &WeakBoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
843 self.0.partial_cmp(&other.0)
844 }
845}
846
847impl<'a, T, BoundSelf, BoundRhs> PartialOrd<BoundedSlice<'a, T, BoundRhs>> for BoundedVec<T, BoundSelf>
848where
849 T: PartialOrd,
850 BoundSelf: Get<u32>,
851 BoundRhs: Get<u32>,
852{
853 fn partial_cmp(&self, other: &BoundedSlice<'a, T, BoundRhs>) -> Option<core::cmp::Ordering> {
854 (&*self.0).partial_cmp(other.0)
855 }
856}
857
858impl<T: Ord, Bound: Get<u32>> Ord for BoundedVec<T, Bound> {
859 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
860 self.0.cmp(&other.0)
861 }
862}
863
864impl<I, T, Bound> TryCollect<BoundedVec<T, Bound>> for I
865where
866 I: ExactSizeIterator + Iterator<Item = T>,
867 Bound: Get<u32>,
868{
869 type Error = &'static str;
870
871 fn try_collect(self) -> Result<BoundedVec<T, Bound>, Self::Error> {
872 if self.len() > Bound::get() as usize {
873 Err("iterator length too big")
874 } else {
875 Ok(BoundedVec::<T, Bound>::unchecked_from(self.collect::<Vec<T>>()))
876 }
877 }
878}
879
880#[cfg(any(feature = "scale-codec", feature = "jam-codec"))]
881macro_rules! codec_impl {
882 ($codec:ident) => {
883 use super::*;
884
885 use $codec::{
886 decode_vec_with_len, Compact, Decode, DecodeLength, DecodeWithMemTracking, Encode, EncodeLike, Error,
887 Input, MaxEncodedLen,
888 };
889
890 impl<T: Decode, S: Get<u32>> Decode for BoundedVec<T, S> {
891 fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
892 let len: u32 = <Compact<u32>>::decode(input)?.into();
895 if len > S::get() {
896 return Err("BoundedVec exceeds its limit".into());
897 }
898 let inner = decode_vec_with_len(input, len as usize)?;
899 Ok(Self(inner, PhantomData))
900 }
901
902 fn skip<I: Input>(input: &mut I) -> Result<(), Error> {
903 Vec::<T>::skip(input)
904 }
905 }
906
907 impl<T: DecodeWithMemTracking, S: Get<u32>> DecodeWithMemTracking for BoundedVec<T, S> {}
908
909 impl<T: Encode + Decode, S: Get<u32>> EncodeLike<Vec<T>> for BoundedVec<T, S> {}
911
912 impl<T, S> MaxEncodedLen for BoundedVec<T, S>
913 where
914 T: MaxEncodedLen,
915 S: Get<u32>,
916 BoundedVec<T, S>: Encode,
917 {
918 fn max_encoded_len() -> usize {
919 Compact(S::get())
923 .encoded_size()
924 .saturating_add(Self::bound().saturating_mul(T::max_encoded_len()))
925 }
926 }
927
928 impl<T, S> DecodeLength for BoundedVec<T, S> {
929 fn len(self_encoded: &[u8]) -> Result<usize, Error> {
930 <Vec<T> as DecodeLength>::len(self_encoded)
933 }
934 }
935
936 impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<BoundedVec<T, S>> for BoundedSlice<'a, T, S> {}
939
940 impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<WeakBoundedVec<T, S>> for BoundedSlice<'a, T, S> {}
941
942 impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<Vec<T>> for BoundedSlice<'a, T, S> {}
943 };
944}
945
946#[cfg(feature = "scale-codec")]
947mod scale_codec_impl {
948 codec_impl!(scale_codec);
949}
950
951#[cfg(feature = "jam-codec")]
952mod jam_codec_impl {
953 codec_impl!(jam_codec);
954}
955
956#[cfg(all(test, feature = "std"))]
957mod test {
958 use super::*;
959 use crate::{bounded_vec, ConstU32};
960 #[cfg(feature = "scale-codec")]
961 use scale_codec::{Compact, CompactLen, Decode, Encode};
962
963 #[test]
964 #[cfg(feature = "scale-codec")]
965 fn encoding_same_as_unbounded_vec() {
966 let b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
967 let v: Vec<u32> = vec![0, 1, 2, 3, 4, 5];
968
969 assert_eq!(b.encode(), v.encode());
970 }
971
972 #[test]
973 fn slice_truncate_from_works() {
974 let bounded = BoundedSlice::<u32, ConstU32<4>>::truncate_from(&[1, 2, 3, 4, 5]);
975 assert_eq!(bounded.deref(), &[1, 2, 3, 4]);
976 let bounded = BoundedSlice::<u32, ConstU32<4>>::truncate_from(&[1, 2, 3, 4]);
977 assert_eq!(bounded.deref(), &[1, 2, 3, 4]);
978 let bounded = BoundedSlice::<u32, ConstU32<4>>::truncate_from(&[1, 2, 3]);
979 assert_eq!(bounded.deref(), &[1, 2, 3]);
980 }
981
982 #[test]
983 fn slide_works() {
984 let mut b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
985 assert!(b.slide(1, 5));
986 assert_eq!(*b, vec![0, 2, 3, 4, 1, 5]);
987 assert!(b.slide(4, 0));
988 assert_eq!(*b, vec![1, 0, 2, 3, 4, 5]);
989 assert!(b.slide(0, 2));
990 assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
991 assert!(b.slide(1, 6));
992 assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
993 assert!(b.slide(0, 6));
994 assert_eq!(*b, vec![2, 3, 4, 5, 1, 0]);
995 assert!(b.slide(5, 0));
996 assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
997 assert!(!b.slide(6, 0));
998 assert!(!b.slide(7, 0));
999 assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
1000
1001 let mut c: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2];
1002 assert!(!c.slide(1, 5));
1003 assert_eq!(*c, vec![0, 1, 2]);
1004 assert!(!c.slide(4, 0));
1005 assert_eq!(*c, vec![0, 1, 2]);
1006 assert!(!c.slide(3, 0));
1007 assert_eq!(*c, vec![0, 1, 2]);
1008 assert!(c.slide(2, 0));
1009 assert_eq!(*c, vec![2, 0, 1]);
1010 }
1011
1012 #[test]
1013 fn slide_noops_work() {
1014 let mut b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
1015 assert!(!b.slide(3, 3));
1016 assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
1017 assert!(!b.slide(3, 4));
1018 assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
1019 }
1020
1021 #[test]
1022 fn force_insert_keep_left_works() {
1023 let mut b: BoundedVec<u32, ConstU32<4>> = bounded_vec![];
1024 assert_eq!(b.force_insert_keep_left(1, 10), Err(10));
1025 assert!(b.is_empty());
1026
1027 assert_eq!(b.force_insert_keep_left(0, 30), Ok(None));
1028 assert_eq!(b.force_insert_keep_left(0, 10), Ok(None));
1029 assert_eq!(b.force_insert_keep_left(1, 20), Ok(None));
1030 assert_eq!(b.force_insert_keep_left(3, 40), Ok(None));
1031 assert_eq!(*b, vec![10, 20, 30, 40]);
1032 assert_eq!(b.force_insert_keep_left(4, 41), Err(41));
1034 assert_eq!(*b, vec![10, 20, 30, 40]);
1035 assert_eq!(b.force_insert_keep_left(3, 31), Ok(Some(40)));
1036 assert_eq!(*b, vec![10, 20, 30, 31]);
1037 assert_eq!(b.force_insert_keep_left(1, 11), Ok(Some(31)));
1038 assert_eq!(*b, vec![10, 11, 20, 30]);
1039 assert_eq!(b.force_insert_keep_left(0, 1), Ok(Some(30)));
1040 assert_eq!(*b, vec![1, 10, 11, 20]);
1041
1042 let mut z: BoundedVec<u32, ConstU32<0>> = bounded_vec![];
1043 assert!(z.is_empty());
1044 assert_eq!(z.force_insert_keep_left(0, 10), Err(10));
1045 assert!(z.is_empty());
1046 }
1047
1048 #[test]
1049 fn force_insert_keep_right_works() {
1050 let mut b: BoundedVec<u32, ConstU32<4>> = bounded_vec![];
1051 assert_eq!(b.force_insert_keep_right(1, 10), Err(10));
1052 assert!(b.is_empty());
1053
1054 assert_eq!(b.force_insert_keep_right(0, 30), Ok(None));
1055 assert_eq!(b.force_insert_keep_right(0, 10), Ok(None));
1056 assert_eq!(b.force_insert_keep_right(1, 20), Ok(None));
1057 assert_eq!(b.force_insert_keep_right(3, 40), Ok(None));
1058 assert_eq!(*b, vec![10, 20, 30, 40]);
1059
1060 assert_eq!(b.force_insert_keep_right(0, 0), Err(0));
1062 assert_eq!(*b, vec![10, 20, 30, 40]);
1063 assert_eq!(b.force_insert_keep_right(1, 11), Ok(Some(10)));
1064 assert_eq!(*b, vec![11, 20, 30, 40]);
1065 assert_eq!(b.force_insert_keep_right(3, 31), Ok(Some(11)));
1066 assert_eq!(*b, vec![20, 30, 31, 40]);
1067 assert_eq!(b.force_insert_keep_right(4, 41), Ok(Some(20)));
1068 assert_eq!(*b, vec![30, 31, 40, 41]);
1069
1070 assert_eq!(b.force_insert_keep_right(5, 69), Err(69));
1071 assert_eq!(*b, vec![30, 31, 40, 41]);
1072
1073 let mut z: BoundedVec<u32, ConstU32<0>> = bounded_vec![];
1074 assert!(z.is_empty());
1075 assert_eq!(z.force_insert_keep_right(0, 10), Err(10));
1076 assert!(z.is_empty());
1077 }
1078
1079 #[test]
1080 fn bound_returns_correct_value() {
1081 assert_eq!(BoundedVec::<u32, ConstU32<7>>::bound(), 7);
1082 }
1083
1084 #[test]
1085 fn try_insert_works() {
1086 let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
1087 bounded.try_insert(1, 0).unwrap();
1088 assert_eq!(*bounded, vec![1, 0, 2, 3]);
1089
1090 assert!(bounded.try_insert(0, 9).is_err());
1091 assert_eq!(*bounded, vec![1, 0, 2, 3]);
1092 }
1093
1094 #[test]
1095 fn constructor_macro_works() {
1096 let bv: BoundedVec<(u32, u32), ConstU32<3>> = bounded_vec![(1, 2), (1, 2), (1, 2)];
1098 assert_eq!(bv, vec![(1, 2), (1, 2), (1, 2)]);
1099
1100 let bv: BoundedVec<(u32, u32), ConstU32<3>> = bounded_vec![(1, 2); 3];
1102 assert_eq!(bv, vec![(1, 2), (1, 2), (1, 2)]);
1103 }
1104
1105 #[test]
1106 #[should_panic(expected = "insertion index (is 9) should be <= len (is 3)")]
1107 fn try_inert_panics_if_oob() {
1108 let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
1109 bounded.try_insert(9, 0).unwrap();
1110 }
1111
1112 #[test]
1113 fn try_push_works() {
1114 let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
1115 bounded.try_push(0).unwrap();
1116 assert_eq!(*bounded, vec![1, 2, 3, 0]);
1117
1118 assert!(bounded.try_push(9).is_err());
1119 }
1120
1121 #[test]
1122 fn deref_vec_coercion_works() {
1123 let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1124 assert_eq!(bounded.len(), 3);
1126 assert!(bounded.iter().next().is_some());
1127 assert!(!bounded.is_empty());
1128 }
1129
1130 #[test]
1131 fn deref_slice_coercion_works() {
1132 let bounded = BoundedSlice::<u32, ConstU32<7>>::try_from(&[1, 2, 3][..]).unwrap();
1133 assert_eq!(bounded.len(), 3);
1135 assert!(bounded.iter().next().is_some());
1136 assert!(!bounded.is_empty());
1137 }
1138
1139 #[test]
1140 fn try_mutate_works() {
1141 let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
1142 let bounded = bounded.try_mutate(|v| v.push(7)).unwrap();
1143 assert_eq!(bounded.len(), 7);
1144 assert!(bounded.try_mutate(|v| v.push(8)).is_none());
1145 }
1146
1147 #[test]
1148 fn slice_indexing_works() {
1149 let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
1150 assert_eq!(&bounded[0..=2], &[1, 2, 3]);
1151 }
1152
1153 #[test]
1154 fn vec_eq_works() {
1155 let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
1156 assert_eq!(bounded, vec![1, 2, 3, 4, 5, 6]);
1157 }
1158
1159 #[test]
1160 #[cfg(feature = "scale-codec")]
1161 fn too_big_vec_fail_to_decode() {
1162 let v: Vec<u32> = vec![1, 2, 3, 4, 5];
1163 assert_eq!(
1164 BoundedVec::<u32, ConstU32<4>>::decode(&mut &v.encode()[..]),
1165 Err("BoundedVec exceeds its limit".into()),
1166 );
1167 }
1168
1169 #[test]
1170 #[cfg(feature = "scale-codec")]
1171 fn dont_consume_more_data_than_bounded_len() {
1172 let v: Vec<u32> = vec![1, 2, 3, 4, 5];
1173 let data = v.encode();
1174 let data_input = &mut &data[..];
1175
1176 BoundedVec::<u32, ConstU32<4>>::decode(data_input).unwrap_err();
1177 assert_eq!(data_input.len(), data.len() - Compact::<u32>::compact_len(&(data.len() as u32)));
1178 }
1179
1180 #[test]
1181 fn eq_works() {
1182 let b1: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1184 let b2: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1185 assert_eq!(b1, b2);
1186
1187 crate::parameter_types! {
1189 B1: u32 = 7;
1190 B2: u32 = 7;
1191 }
1192 let b1: BoundedVec<u32, B1> = bounded_vec![1, 2, 3];
1193 let b2: BoundedVec<u32, B2> = bounded_vec![1, 2, 3];
1194 assert_eq!(b1, b2);
1195 }
1196
1197 #[test]
1198 fn ord_works() {
1199 use std::cmp::Ordering;
1200 let b1: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1201 let b2: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 3, 2];
1202
1203 assert_eq!(b1.cmp(&b2), Ordering::Less);
1205 assert_eq!(b1.cmp(&b2), b1.into_inner().cmp(&b2.into_inner()));
1206 }
1207
1208 #[test]
1209 fn try_extend_works() {
1210 let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
1211
1212 assert!(b.try_extend(vec![4].into_iter()).is_ok());
1213 assert_eq!(*b, vec![1, 2, 3, 4]);
1214
1215 assert!(b.try_extend(vec![5].into_iter()).is_ok());
1216 assert_eq!(*b, vec![1, 2, 3, 4, 5]);
1217
1218 assert!(b.try_extend(vec![6].into_iter()).is_err());
1219 assert_eq!(*b, vec![1, 2, 3, 4, 5]);
1220
1221 let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
1222 assert!(b.try_extend(vec![4, 5].into_iter()).is_ok());
1223 assert_eq!(*b, vec![1, 2, 3, 4, 5]);
1224
1225 let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
1226 assert!(b.try_extend(vec![4, 5, 6].into_iter()).is_err());
1227 assert_eq!(*b, vec![1, 2, 3]);
1228 }
1229
1230 #[test]
1231 fn test_serializer() {
1232 let c: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2];
1233 assert_eq!(serde_json::json!(&c).to_string(), r#"[0,1,2]"#);
1234 }
1235
1236 #[test]
1237 fn test_deserializer() {
1238 let c: BoundedVec<u32, ConstU32<6>> = serde_json::from_str(r#"[0,1,2]"#).unwrap();
1239
1240 assert_eq!(c.len(), 3);
1241 assert_eq!(c[0], 0);
1242 assert_eq!(c[1], 1);
1243 assert_eq!(c[2], 2);
1244 }
1245
1246 #[test]
1247 fn test_deserializer_bound() {
1248 let c: BoundedVec<u32, ConstU32<3>> = serde_json::from_str(r#"[0,1,2]"#).unwrap();
1249
1250 assert_eq!(c.len(), 3);
1251 assert_eq!(c[0], 0);
1252 assert_eq!(c[1], 1);
1253 assert_eq!(c[2], 2);
1254 }
1255
1256 #[test]
1257 fn test_deserializer_failed() {
1258 let c: Result<BoundedVec<u32, ConstU32<4>>, serde_json::error::Error> = serde_json::from_str(r#"[0,1,2,3,4]"#);
1259
1260 match c {
1261 Err(msg) => assert_eq!(msg.to_string(), "out of bounds at line 1 column 11"),
1262 _ => unreachable!("deserializer must raise error"),
1263 }
1264 }
1265
1266 #[test]
1267 fn bounded_vec_try_from_works() {
1268 assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0]).is_ok());
1269 assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0, 1]).is_ok());
1270 assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0, 1, 2]).is_err());
1271 }
1272
1273 #[test]
1274 fn bounded_slice_try_from_works() {
1275 assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0][..]).is_ok());
1276 assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0, 1][..]).is_ok());
1277 assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0, 1, 2][..]).is_err());
1278 }
1279
1280 #[test]
1281 fn can_be_collected() {
1282 let b1: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3, 4];
1283 let b2: BoundedVec<u32, ConstU32<5>> = b1.iter().map(|x| x + 1).try_collect().unwrap();
1284 assert_eq!(b2, vec![2, 3, 4, 5]);
1285
1286 let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).try_collect().unwrap();
1288 assert_eq!(b2, vec![2, 3, 4, 5]);
1289
1290 let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).rev().try_collect().unwrap();
1292 assert_eq!(b2, vec![5, 4, 3, 2]);
1293
1294 let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).rev().skip(2).try_collect().unwrap();
1295 assert_eq!(b2, vec![3, 2]);
1296 let b2: BoundedVec<u32, ConstU32<2>> = b1.iter().map(|x| x + 1).rev().skip(2).try_collect().unwrap();
1297 assert_eq!(b2, vec![3, 2]);
1298
1299 let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).rev().take(2).try_collect().unwrap();
1300 assert_eq!(b2, vec![5, 4]);
1301 let b2: BoundedVec<u32, ConstU32<2>> = b1.iter().map(|x| x + 1).rev().take(2).try_collect().unwrap();
1302 assert_eq!(b2, vec![5, 4]);
1303
1304 let b2: Result<BoundedVec<u32, ConstU32<3>>, _> = b1.iter().map(|x| x + 1).try_collect();
1306 assert!(b2.is_err());
1307
1308 let b2: Result<BoundedVec<u32, ConstU32<1>>, _> = b1.iter().map(|x| x + 1).rev().take(2).try_collect();
1309 assert!(b2.is_err());
1310 }
1311
1312 #[test]
1313 fn bounded_vec_debug_works() {
1314 let bound = BoundedVec::<u32, ConstU32<5>>::truncate_from(vec![1, 2, 3]);
1315 assert_eq!(format!("{:?}", bound), "BoundedVec([1, 2, 3], 5)");
1316 }
1317
1318 #[test]
1319 fn bounded_slice_debug_works() {
1320 let bound = BoundedSlice::<u32, ConstU32<5>>::truncate_from(&[1, 2, 3]);
1321 assert_eq!(format!("{:?}", bound), "BoundedSlice([1, 2, 3], 5)");
1322 }
1323
1324 #[test]
1325 fn bounded_vec_sort_by_key_works() {
1326 let mut v: BoundedVec<i32, ConstU32<5>> = bounded_vec![-5, 4, 1, -3, 2];
1327 v.sort_by_key(|k| k.abs());
1329 assert_eq!(v, vec![1, 2, -3, 4, -5]);
1330 }
1331
1332 #[test]
1333 fn bounded_vec_truncate_from_works() {
1334 let unbound = vec![1, 2, 3, 4, 5];
1335 let bound = BoundedVec::<u32, ConstU32<3>>::truncate_from(unbound.clone());
1336 assert_eq!(bound, vec![1, 2, 3]);
1337 }
1338
1339 #[test]
1340 fn bounded_slice_truncate_from_works() {
1341 let unbound = [1, 2, 3, 4, 5];
1342 let bound = BoundedSlice::<u32, ConstU32<3>>::truncate_from(&unbound);
1343 assert_eq!(bound, &[1, 2, 3][..]);
1344 }
1345
1346 #[test]
1347 fn bounded_slice_partialeq_slice_works() {
1348 let unbound = [1, 2, 3];
1349 let bound = BoundedSlice::<u32, ConstU32<3>>::truncate_from(&unbound);
1350
1351 assert_eq!(bound, &unbound[..]);
1352 assert!(bound == &unbound[..]);
1353 }
1354
1355 #[test]
1356 fn bounded_vec_try_rotate_left_works() {
1357 let o = BoundedVec::<u32, ConstU32<3>>::truncate_from(vec![1, 2, 3]);
1358 let mut bound = o.clone();
1359
1360 bound.try_rotate_left(0).unwrap();
1361 assert_eq!(bound, o);
1362 bound.try_rotate_left(3).unwrap();
1363 assert_eq!(bound, o);
1364
1365 bound.try_rotate_left(4).unwrap_err();
1366 assert_eq!(bound, o);
1367
1368 bound.try_rotate_left(1).unwrap();
1369 assert_eq!(bound, vec![2, 3, 1]);
1370 bound.try_rotate_left(2).unwrap();
1371 assert_eq!(bound, o);
1372 }
1373
1374 #[test]
1375 fn bounded_vec_try_rotate_right_works() {
1376 let o = BoundedVec::<u32, ConstU32<3>>::truncate_from(vec![1, 2, 3]);
1377 let mut bound = o.clone();
1378
1379 bound.try_rotate_right(0).unwrap();
1380 assert_eq!(bound, o);
1381 bound.try_rotate_right(3).unwrap();
1382 assert_eq!(bound, o);
1383
1384 bound.try_rotate_right(4).unwrap_err();
1385 assert_eq!(bound, o);
1386
1387 bound.try_rotate_right(1).unwrap();
1388 assert_eq!(bound, vec![3, 1, 2]);
1389 bound.try_rotate_right(2).unwrap();
1390 assert_eq!(bound, o);
1391 }
1392
1393 #[test]
1396 #[cfg(feature = "std")]
1397 fn container_can_derive_hash() {
1398 #[derive(Hash)]
1399 struct Foo<'a> {
1400 bar: u8,
1401 slice: BoundedSlice<'a, usize, ConstU32<4>>,
1402 map: BoundedVec<String, ConstU32<16>>,
1403 }
1404 let _foo = Foo { bar: 42, slice: BoundedSlice::truncate_from(&[0, 1][..]), map: BoundedVec::default() };
1405 }
1406
1407 #[test]
1408 fn is_full_works() {
1409 let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
1410 assert!(!bounded.is_full());
1411 bounded.try_insert(1, 0).unwrap();
1412 assert_eq!(*bounded, vec![1, 0, 2, 3]);
1413
1414 assert!(bounded.is_full());
1415 assert!(bounded.try_insert(0, 9).is_err());
1416 assert_eq!(*bounded, vec![1, 0, 2, 3]);
1417 }
1418}