1use super::{BoundedSlice, BoundedVec};
22use crate::Get;
23use alloc::vec::Vec;
24use codec::{Decode, DecodeWithMemTracking, Encode, MaxEncodedLen};
25use core::{
26 marker::PhantomData,
27 ops::{Deref, Index, IndexMut},
28 slice::SliceIndex,
29};
30#[cfg(feature = "serde")]
31use serde::{
32 de::{Error, SeqAccess, Visitor},
33 Deserialize, Deserializer, Serialize,
34};
35
36#[cfg_attr(feature = "serde", derive(Serialize), serde(transparent))]
44#[derive(Encode, scale_info::TypeInfo)]
45#[scale_info(skip_type_params(S))]
46pub struct WeakBoundedVec<T, S>(
47 pub(super) Vec<T>,
48 #[cfg_attr(feature = "serde", serde(skip_serializing))] PhantomData<S>,
49);
50
51#[cfg(feature = "serde")]
52impl<'de, T, S: Get<u32>> Deserialize<'de> for WeakBoundedVec<T, S>
53where
54 T: Deserialize<'de>,
55{
56 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
57 where
58 D: Deserializer<'de>,
59 {
60 struct VecVisitor<T, S: Get<u32>>(PhantomData<(T, S)>);
61
62 impl<'de, T, S: Get<u32>> Visitor<'de> for VecVisitor<T, S>
63 where
64 T: Deserialize<'de>,
65 {
66 type Value = Vec<T>;
67
68 fn expecting(&self, formatter: &mut alloc::fmt::Formatter) -> alloc::fmt::Result {
69 formatter.write_str("a sequence")
70 }
71
72 fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
73 where
74 A: SeqAccess<'de>,
75 {
76 let size = seq.size_hint().unwrap_or(0);
77 let max = match usize::try_from(S::get()) {
78 Ok(n) => n,
79 Err(_) => return Err(A::Error::custom("can't convert to usize")),
80 };
81 if size > max {
82 log::warn!(
83 target: "runtime",
84 "length of a bounded vector while deserializing is not respected.",
85 );
86 }
87 let mut values = Vec::with_capacity(size);
88
89 while let Some(value) = seq.next_element()? {
90 values.push(value);
91 if values.len() > max {
92 log::warn!(
93 target: "runtime",
94 "length of a bounded vector while deserializing is not respected.",
95 );
96 }
97 }
98
99 Ok(values)
100 }
101 }
102
103 let visitor: VecVisitor<T, S> = VecVisitor(PhantomData);
104 deserializer
105 .deserialize_seq(visitor)
106 .map(|v| WeakBoundedVec::<T, S>::try_from(v).map_err(|_| Error::custom("out of bounds")))?
107 }
108}
109
110impl<T: Decode, S: Get<u32>> Decode for WeakBoundedVec<T, S> {
111 fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
112 let inner = Vec::<T>::decode(input)?;
113 Ok(Self::force_from(inner, Some("decode")))
114 }
115
116 fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
117 Vec::<T>::skip(input)
118 }
119}
120
121impl<T: DecodeWithMemTracking, S: Get<u32>> DecodeWithMemTracking for WeakBoundedVec<T, S> {}
122
123impl<T, S> WeakBoundedVec<T, S> {
124 fn unchecked_from(t: Vec<T>) -> Self {
126 Self(t, Default::default())
127 }
128
129 pub fn into_inner(self) -> Vec<T> {
136 self.0
137 }
138
139 pub fn remove(&mut self, index: usize) -> T {
145 self.0.remove(index)
146 }
147
148 pub fn swap_remove(&mut self, index: usize) -> T {
154 self.0.swap_remove(index)
155 }
156
157 pub fn retain<F: FnMut(&T) -> bool>(&mut self, f: F) {
159 self.0.retain(f)
160 }
161
162 pub fn get_mut<I: SliceIndex<[T]>>(&mut self, index: I) -> Option<&mut <I as SliceIndex<[T]>>::Output> {
164 self.0.get_mut(index)
165 }
166}
167
168impl<T, S: Get<u32>> WeakBoundedVec<T, S> {
169 pub fn bound() -> usize {
171 S::get() as usize
172 }
173
174 pub fn force_from(t: Vec<T>, scope: Option<&'static str>) -> Self {
178 if t.len() > Self::bound() {
179 log::warn!(
180 target: "runtime",
181 "length of a bounded vector in scope {} is not respected.",
182 scope.unwrap_or("UNKNOWN"),
183 );
184 }
185
186 Self::unchecked_from(t)
187 }
188
189 pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut Vec<T>)) -> Option<Self> {
197 mutate(&mut self.0);
198 (self.0.len() <= Self::bound()).then(move || self)
199 }
200
201 pub fn try_insert(&mut self, index: usize, element: T) -> Result<(), ()> {
208 if self.len() < Self::bound() {
209 self.0.insert(index, element);
210 Ok(())
211 } else {
212 Err(())
213 }
214 }
215
216 pub fn try_push(&mut self, element: T) -> Result<(), ()> {
223 if self.len() < Self::bound() {
224 self.0.push(element);
225 Ok(())
226 } else {
227 Err(())
228 }
229 }
230
231 pub fn is_full(&self) -> bool {
233 self.len() >= Self::bound()
234 }
235}
236
237impl<T, S> Default for WeakBoundedVec<T, S> {
238 fn default() -> Self {
239 Self::unchecked_from(Vec::default())
241 }
242}
243
244impl<T, S> core::fmt::Debug for WeakBoundedVec<T, S>
245where
246 Vec<T>: core::fmt::Debug,
247 S: Get<u32>,
248{
249 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
250 f.debug_tuple("WeakBoundedVec").field(&self.0).field(&Self::bound()).finish()
251 }
252}
253
254impl<T, S> Clone for WeakBoundedVec<T, S>
255where
256 T: Clone,
257{
258 fn clone(&self) -> Self {
259 Self::unchecked_from(self.0.clone())
261 }
262}
263
264impl<T, S: Get<u32>> TryFrom<Vec<T>> for WeakBoundedVec<T, S> {
265 type Error = ();
266 fn try_from(t: Vec<T>) -> Result<Self, Self::Error> {
267 if t.len() <= Self::bound() {
268 Ok(Self::unchecked_from(t))
270 } else {
271 Err(())
272 }
273 }
274}
275
276impl<T, S> AsRef<Vec<T>> for WeakBoundedVec<T, S> {
278 fn as_ref(&self) -> &Vec<T> {
279 &self.0
280 }
281}
282
283impl<T, S> AsRef<[T]> for WeakBoundedVec<T, S> {
284 fn as_ref(&self) -> &[T] {
285 &self.0
286 }
287}
288
289impl<T, S> AsMut<[T]> for WeakBoundedVec<T, S> {
290 fn as_mut(&mut self) -> &mut [T] {
291 &mut self.0
292 }
293}
294
295impl<T, S> Deref for WeakBoundedVec<T, S> {
297 type Target = Vec<T>;
298
299 fn deref(&self) -> &Self::Target {
300 &self.0
301 }
302}
303
304impl<T, S, I> Index<I> for WeakBoundedVec<T, S>
306where
307 I: SliceIndex<[T]>,
308{
309 type Output = I::Output;
310
311 #[inline]
312 fn index(&self, index: I) -> &Self::Output {
313 self.0.index(index)
314 }
315}
316
317impl<T, S, I> IndexMut<I> for WeakBoundedVec<T, S>
318where
319 I: SliceIndex<[T]>,
320{
321 #[inline]
322 fn index_mut(&mut self, index: I) -> &mut Self::Output {
323 self.0.index_mut(index)
324 }
325}
326
327impl<T, S> core::iter::IntoIterator for WeakBoundedVec<T, S> {
328 type Item = T;
329 type IntoIter = alloc::vec::IntoIter<T>;
330 fn into_iter(self) -> Self::IntoIter {
331 self.0.into_iter()
332 }
333}
334
335impl<'a, T, S> core::iter::IntoIterator for &'a WeakBoundedVec<T, S> {
336 type Item = &'a T;
337 type IntoIter = core::slice::Iter<'a, T>;
338 fn into_iter(self) -> Self::IntoIter {
339 self.0.iter()
340 }
341}
342
343impl<'a, T, S> core::iter::IntoIterator for &'a mut WeakBoundedVec<T, S> {
344 type Item = &'a mut T;
345 type IntoIter = core::slice::IterMut<'a, T>;
346 fn into_iter(self) -> Self::IntoIter {
347 self.0.iter_mut()
348 }
349}
350
351impl<T, S> codec::DecodeLength for WeakBoundedVec<T, S> {
352 fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
353 <Vec<T> as codec::DecodeLength>::len(self_encoded)
356 }
357}
358
359impl<T, BoundSelf, BoundRhs> PartialEq<WeakBoundedVec<T, BoundRhs>> for WeakBoundedVec<T, BoundSelf>
360where
361 T: PartialEq,
362 BoundSelf: Get<u32>,
363 BoundRhs: Get<u32>,
364{
365 fn eq(&self, rhs: &WeakBoundedVec<T, BoundRhs>) -> bool {
366 self.0 == rhs.0
367 }
368}
369
370impl<T, BoundSelf, BoundRhs> PartialEq<BoundedVec<T, BoundRhs>> for WeakBoundedVec<T, BoundSelf>
371where
372 T: PartialEq,
373 BoundSelf: Get<u32>,
374 BoundRhs: Get<u32>,
375{
376 fn eq(&self, rhs: &BoundedVec<T, BoundRhs>) -> bool {
377 self.0 == rhs.0
378 }
379}
380
381impl<'a, T, BoundSelf, BoundRhs> PartialEq<BoundedSlice<'a, T, BoundRhs>> for WeakBoundedVec<T, BoundSelf>
382where
383 T: PartialEq,
384 BoundSelf: Get<u32>,
385 BoundRhs: Get<u32>,
386{
387 fn eq(&self, rhs: &BoundedSlice<'a, T, BoundRhs>) -> bool {
388 self.0 == rhs.0
389 }
390}
391
392impl<T: PartialEq, S: Get<u32>> PartialEq<Vec<T>> for WeakBoundedVec<T, S> {
393 fn eq(&self, other: &Vec<T>) -> bool {
394 &self.0 == other
395 }
396}
397
398impl<T, S: Get<u32>> Eq for WeakBoundedVec<T, S> where T: Eq {}
399
400impl<T, BoundSelf, BoundRhs> PartialOrd<WeakBoundedVec<T, BoundRhs>> for WeakBoundedVec<T, BoundSelf>
401where
402 T: PartialOrd,
403 BoundSelf: Get<u32>,
404 BoundRhs: Get<u32>,
405{
406 fn partial_cmp(&self, other: &WeakBoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
407 self.0.partial_cmp(&other.0)
408 }
409}
410
411impl<T, BoundSelf, BoundRhs> PartialOrd<BoundedVec<T, BoundRhs>> for WeakBoundedVec<T, BoundSelf>
412where
413 T: PartialOrd,
414 BoundSelf: Get<u32>,
415 BoundRhs: Get<u32>,
416{
417 fn partial_cmp(&self, other: &BoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
418 self.0.partial_cmp(&other.0)
419 }
420}
421
422impl<'a, T, BoundSelf, BoundRhs> PartialOrd<BoundedSlice<'a, T, BoundRhs>> for WeakBoundedVec<T, BoundSelf>
423where
424 T: PartialOrd,
425 BoundSelf: Get<u32>,
426 BoundRhs: Get<u32>,
427{
428 fn partial_cmp(&self, other: &BoundedSlice<'a, T, BoundRhs>) -> Option<core::cmp::Ordering> {
429 (&*self.0).partial_cmp(other.0)
430 }
431}
432
433impl<T: Ord, S: Get<u32>> Ord for WeakBoundedVec<T, S> {
434 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
435 self.0.cmp(&other.0)
436 }
437}
438
439impl<T, S> MaxEncodedLen for WeakBoundedVec<T, S>
440where
441 T: MaxEncodedLen,
442 S: Get<u32>,
443 WeakBoundedVec<T, S>: Encode,
444{
445 fn max_encoded_len() -> usize {
446 codec::Compact(S::get())
450 .encoded_size()
451 .saturating_add(Self::bound().saturating_mul(T::max_encoded_len()))
452 }
453}
454
455#[cfg(test)]
456mod test {
457 use super::*;
458 use crate::ConstU32;
459 use alloc::vec;
460
461 #[test]
462 fn bound_returns_correct_value() {
463 assert_eq!(WeakBoundedVec::<u32, ConstU32<7>>::bound(), 7);
464 }
465
466 #[test]
467 fn try_insert_works() {
468 let mut bounded: WeakBoundedVec<u32, ConstU32<4>> = vec![1, 2, 3].try_into().unwrap();
469 bounded.try_insert(1, 0).unwrap();
470 assert_eq!(*bounded, vec![1, 0, 2, 3]);
471
472 assert!(bounded.try_insert(0, 9).is_err());
473 assert_eq!(*bounded, vec![1, 0, 2, 3]);
474 }
475
476 #[test]
477 #[should_panic(expected = "insertion index (is 9) should be <= len (is 3)")]
478 fn try_inert_panics_if_oob() {
479 let mut bounded: WeakBoundedVec<u32, ConstU32<4>> = vec![1, 2, 3].try_into().unwrap();
480 bounded.try_insert(9, 0).unwrap();
481 }
482
483 #[test]
484 fn try_push_works() {
485 let mut bounded: WeakBoundedVec<u32, ConstU32<4>> = vec![1, 2, 3].try_into().unwrap();
486 bounded.try_push(0).unwrap();
487 assert_eq!(*bounded, vec![1, 2, 3, 0]);
488
489 assert!(bounded.try_push(9).is_err());
490 }
491
492 #[test]
493 fn deref_coercion_works() {
494 let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3].try_into().unwrap();
495 assert_eq!(bounded.len(), 3);
497 assert!(bounded.iter().next().is_some());
498 assert!(!bounded.is_empty());
499 }
500
501 #[test]
502 fn try_mutate_works() {
503 let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3, 4, 5, 6].try_into().unwrap();
504 let bounded = bounded.try_mutate(|v| v.push(7)).unwrap();
505 assert_eq!(bounded.len(), 7);
506 assert!(bounded.try_mutate(|v| v.push(8)).is_none());
507 }
508
509 #[test]
510 fn slice_indexing_works() {
511 let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3, 4, 5, 6].try_into().unwrap();
512 assert_eq!(&bounded[0..=2], &[1, 2, 3]);
513 }
514
515 #[test]
516 fn vec_eq_works() {
517 let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3, 4, 5, 6].try_into().unwrap();
518 assert_eq!(bounded, vec![1, 2, 3, 4, 5, 6]);
519 }
520
521 #[test]
522 fn too_big_succeed_to_decode() {
523 let v: Vec<u32> = vec![1, 2, 3, 4, 5];
524 let w = WeakBoundedVec::<u32, ConstU32<4>>::decode(&mut &v.encode()[..]).unwrap();
525 assert_eq!(v, *w);
526 }
527
528 #[test]
529 fn is_full_works() {
530 let mut bounded: WeakBoundedVec<u32, ConstU32<4>> = vec![1, 2, 3].try_into().unwrap();
531 assert!(!bounded.is_full());
532 bounded.try_insert(1, 0).unwrap();
533 assert_eq!(*bounded, vec![1, 0, 2, 3]);
534
535 assert!(bounded.is_full());
536 assert!(bounded.try_insert(0, 9).is_err());
537 assert_eq!(*bounded, vec![1, 0, 2, 3]);
538 }
539}