bounded_collections/
bounded_vec.rs

1// This file is part of Substrate.
2
3// Copyright (C) 2017-2023 Parity Technologies (UK) Ltd.
4// SPDX-License-Identifier: Apache-2.0
5
6// Licensed under the Apache License, Version 2.0 (the "License");
7// you may not use this file except in compliance with the License.
8// You may obtain a copy of the License at
9//
10// 	http://www.apache.org/licenses/LICENSE-2.0
11//
12// Unless required by applicable law or agreed to in writing, software
13// distributed under the License is distributed on an "AS IS" BASIS,
14// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15// See the License for the specific language governing permissions and
16// limitations under the License.
17
18//! Traits, types and structs to support putting a bounded vector into storage, as a raw value, map
19//! or a double map.
20
21use super::WeakBoundedVec;
22use crate::{Get, TryCollect};
23use alloc::vec::Vec;
24use codec::{decode_vec_with_len, Compact, Decode, DecodeWithMemTracking, Encode, EncodeLike, MaxEncodedLen};
25use core::{
26	marker::PhantomData,
27	ops::{Deref, Index, IndexMut, RangeBounds},
28	slice::SliceIndex,
29};
30#[cfg(feature = "serde")]
31use serde::{
32	de::{Error, SeqAccess, Visitor},
33	Deserialize, Deserializer, Serialize,
34};
35
36/// A bounded vector.
37///
38/// It has implementations for efficient append and length decoding, as with a normal `Vec<_>`, once
39/// put into storage as a raw value, map or double-map.
40///
41/// As the name suggests, the length of the queue is always bounded. All internal operations ensure
42/// this bound is respected.
43#[cfg_attr(feature = "serde", derive(Serialize), serde(transparent))]
44#[derive(Encode, scale_info::TypeInfo)]
45#[scale_info(skip_type_params(S))]
46#[cfg_attr(feature = "json-schema", derive(schemars::JsonSchema))]
47pub struct BoundedVec<T, S>(pub(super) Vec<T>, #[cfg_attr(feature = "serde", serde(skip_serializing))] PhantomData<S>);
48
49/// Create an object through truncation.
50pub trait TruncateFrom<T> {
51	/// Create an object through truncation.
52	fn truncate_from(unbound: T) -> Self;
53}
54
55#[cfg(feature = "serde")]
56impl<'de, T, S: Get<u32>> Deserialize<'de> for BoundedVec<T, S>
57where
58	T: Deserialize<'de>,
59{
60	fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
61	where
62		D: Deserializer<'de>,
63	{
64		struct VecVisitor<T, S: Get<u32>>(PhantomData<(T, S)>);
65
66		impl<'de, T, S: Get<u32>> Visitor<'de> for VecVisitor<T, S>
67		where
68			T: Deserialize<'de>,
69		{
70			type Value = Vec<T>;
71
72			fn expecting(&self, formatter: &mut alloc::fmt::Formatter) -> alloc::fmt::Result {
73				formatter.write_str("a sequence")
74			}
75
76			fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
77			where
78				A: SeqAccess<'de>,
79			{
80				let size = seq.size_hint().unwrap_or(0);
81				let max = match usize::try_from(S::get()) {
82					Ok(n) => n,
83					Err(_) => return Err(A::Error::custom("can't convert to usize")),
84				};
85				if size > max {
86					Err(A::Error::custom("out of bounds"))
87				} else {
88					let mut values = Vec::with_capacity(size);
89
90					while let Some(value) = seq.next_element()? {
91						if values.len() >= max {
92							return Err(A::Error::custom("out of bounds"))
93						}
94						values.push(value);
95					}
96
97					Ok(values)
98				}
99			}
100		}
101
102		let visitor: VecVisitor<T, S> = VecVisitor(PhantomData);
103		deserializer
104			.deserialize_seq(visitor)
105			.map(|v| BoundedVec::<T, S>::try_from(v).map_err(|_| Error::custom("out of bounds")))?
106	}
107}
108
109/// A bounded slice.
110///
111/// Similar to a `BoundedVec`, but not owned and cannot be decoded.
112#[derive(Encode, scale_info::TypeInfo)]
113pub struct BoundedSlice<'a, T, S>(pub(super) &'a [T], PhantomData<S>);
114
115// `BoundedSlice`s encode to something which will always decode into a `BoundedVec`,
116// `WeakBoundedVec`, or a `Vec`.
117impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<BoundedVec<T, S>> for BoundedSlice<'a, T, S> {}
118impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<WeakBoundedVec<T, S>> for BoundedSlice<'a, T, S> {}
119impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<Vec<T>> for BoundedSlice<'a, T, S> {}
120
121impl<'a, T, BoundSelf, BoundRhs> PartialEq<BoundedSlice<'a, T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
122where
123	T: PartialEq,
124	BoundSelf: Get<u32>,
125	BoundRhs: Get<u32>,
126{
127	fn eq(&self, other: &BoundedSlice<'a, T, BoundRhs>) -> bool {
128		self.0 == other.0
129	}
130}
131
132impl<'a, T, BoundSelf, BoundRhs> PartialEq<BoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
133where
134	T: PartialEq,
135	BoundSelf: Get<u32>,
136	BoundRhs: Get<u32>,
137{
138	fn eq(&self, other: &BoundedVec<T, BoundRhs>) -> bool {
139		self.0 == other.0
140	}
141}
142
143impl<'a, T, BoundSelf, BoundRhs> PartialEq<WeakBoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
144where
145	T: PartialEq,
146	BoundSelf: Get<u32>,
147	BoundRhs: Get<u32>,
148{
149	fn eq(&self, other: &WeakBoundedVec<T, BoundRhs>) -> bool {
150		self.0 == other.0
151	}
152}
153
154impl<'a, T, S: Get<u32>> Eq for BoundedSlice<'a, T, S> where T: Eq {}
155
156impl<'a, T, BoundSelf, BoundRhs> PartialOrd<BoundedSlice<'a, T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
157where
158	T: PartialOrd,
159	BoundSelf: Get<u32>,
160	BoundRhs: Get<u32>,
161{
162	fn partial_cmp(&self, other: &BoundedSlice<'a, T, BoundRhs>) -> Option<core::cmp::Ordering> {
163		self.0.partial_cmp(other.0)
164	}
165}
166
167impl<'a, T, BoundSelf, BoundRhs> PartialOrd<BoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
168where
169	T: PartialOrd,
170	BoundSelf: Get<u32>,
171	BoundRhs: Get<u32>,
172{
173	fn partial_cmp(&self, other: &BoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
174		self.0.partial_cmp(&*other.0)
175	}
176}
177
178impl<'a, T, BoundSelf, BoundRhs> PartialOrd<WeakBoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
179where
180	T: PartialOrd,
181	BoundSelf: Get<u32>,
182	BoundRhs: Get<u32>,
183{
184	fn partial_cmp(&self, other: &WeakBoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
185		self.0.partial_cmp(&*other.0)
186	}
187}
188
189impl<'a, T: Ord, Bound: Get<u32>> Ord for BoundedSlice<'a, T, Bound> {
190	fn cmp(&self, other: &Self) -> core::cmp::Ordering {
191		self.0.cmp(&other.0)
192	}
193}
194
195impl<'a, T, S: Get<u32>> TryFrom<&'a [T]> for BoundedSlice<'a, T, S> {
196	type Error = &'a [T];
197	fn try_from(t: &'a [T]) -> Result<Self, Self::Error> {
198		if t.len() <= S::get() as usize {
199			Ok(BoundedSlice(t, PhantomData))
200		} else {
201			Err(t)
202		}
203	}
204}
205
206impl<'a, T, S> From<BoundedSlice<'a, T, S>> for &'a [T] {
207	fn from(t: BoundedSlice<'a, T, S>) -> Self {
208		t.0
209	}
210}
211
212impl<'a, T, S: Get<u32>> TruncateFrom<&'a [T]> for BoundedSlice<'a, T, S> {
213	fn truncate_from(unbound: &'a [T]) -> Self {
214		BoundedSlice::<T, S>::truncate_from(unbound)
215	}
216}
217
218impl<'a, T, S> Clone for BoundedSlice<'a, T, S> {
219	fn clone(&self) -> Self {
220		BoundedSlice(self.0, PhantomData)
221	}
222}
223
224impl<'a, T, S> core::fmt::Debug for BoundedSlice<'a, T, S>
225where
226	&'a [T]: core::fmt::Debug,
227	S: Get<u32>,
228{
229	fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
230		f.debug_tuple("BoundedSlice").field(&self.0).field(&S::get()).finish()
231	}
232}
233
234// Since a reference `&T` is always `Copy`, so is `BoundedSlice<'a, T, S>`.
235impl<'a, T, S> Copy for BoundedSlice<'a, T, S> {}
236
237// will allow for all immutable operations of `[T]` on `BoundedSlice<T>`.
238impl<'a, T, S> Deref for BoundedSlice<'a, T, S> {
239	type Target = [T];
240
241	fn deref(&self) -> &Self::Target {
242		self.0
243	}
244}
245
246// Custom implementation of `Hash` since deriving it would require all generic bounds to also
247// implement it.
248#[cfg(feature = "std")]
249impl<'a, T: std::hash::Hash, S> std::hash::Hash for BoundedSlice<'a, T, S> {
250	fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
251		self.0.hash(state);
252	}
253}
254
255impl<'a, T, S> core::iter::IntoIterator for BoundedSlice<'a, T, S> {
256	type Item = &'a T;
257	type IntoIter = core::slice::Iter<'a, T>;
258	fn into_iter(self) -> Self::IntoIter {
259		self.0.iter()
260	}
261}
262
263impl<'a, T, S: Get<u32>> BoundedSlice<'a, T, S> {
264	/// Create an instance from the first elements of the given slice (or all of it if it is smaller
265	/// than the length bound).
266	pub fn truncate_from(s: &'a [T]) -> Self {
267		Self(&s[0..(s.len().min(S::get() as usize))], PhantomData)
268	}
269}
270
271impl<T: Decode, S: Get<u32>> Decode for BoundedVec<T, S> {
272	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
273		// Same as the underlying implementation for `Decode` on `Vec`, except we fail early if the
274		// len is too big.
275		let len: u32 = <Compact<u32>>::decode(input)?.into();
276		if len > S::get() {
277			return Err("BoundedVec exceeds its limit".into())
278		}
279		let inner = decode_vec_with_len(input, len as usize)?;
280		Ok(Self(inner, PhantomData))
281	}
282
283	fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
284		Vec::<T>::skip(input)
285	}
286}
287
288impl<T: DecodeWithMemTracking, S: Get<u32>> DecodeWithMemTracking for BoundedVec<T, S> {}
289
290// `BoundedVec`s encode to something which will always decode as a `Vec`.
291impl<T: Encode + Decode, S: Get<u32>> EncodeLike<Vec<T>> for BoundedVec<T, S> {}
292
293impl<T, S> BoundedVec<T, S> {
294	/// Create `Self` with no items.
295	pub fn new() -> Self {
296		Self(Vec::new(), Default::default())
297	}
298
299	/// Create `Self` from `t` without any checks.
300	fn unchecked_from(t: Vec<T>) -> Self {
301		Self(t, Default::default())
302	}
303
304	/// Exactly the same semantics as `Vec::clear`.
305	pub fn clear(&mut self) {
306		self.0.clear()
307	}
308
309	/// Consume self, and return the inner `Vec`. Henceforth, the `Vec<_>` can be altered in an
310	/// arbitrary way. At some point, if the reverse conversion is required, `TryFrom<Vec<_>>` can
311	/// be used.
312	///
313	/// This is useful for cases if you need access to an internal API of the inner `Vec<_>` which
314	/// is not provided by the wrapper `BoundedVec`.
315	pub fn into_inner(self) -> Vec<T> {
316		self.0
317	}
318
319	/// Exactly the same semantics as [`slice::sort_by`].
320	///
321	/// This is safe since sorting cannot change the number of elements in the vector.
322	pub fn sort_by<F>(&mut self, compare: F)
323	where
324		F: FnMut(&T, &T) -> core::cmp::Ordering,
325	{
326		self.0.sort_by(compare)
327	}
328
329	/// Exactly the same semantics as [`slice::sort_by_key`].
330	///
331	/// This is safe since sorting cannot change the number of elements in the vector.
332	pub fn sort_by_key<K, F>(&mut self, f: F)
333	where
334		F: FnMut(&T) -> K,
335		K: core::cmp::Ord,
336	{
337		self.0.sort_by_key(f)
338	}
339
340	/// Exactly the same semantics as [`slice::sort`].
341	///
342	/// This is safe since sorting cannot change the number of elements in the vector.
343	pub fn sort(&mut self)
344	where
345		T: core::cmp::Ord,
346	{
347		self.0.sort()
348	}
349
350	/// Exactly the same semantics as `Vec::remove`.
351	///
352	/// # Panics
353	///
354	/// Panics if `index` is out of bounds.
355	pub fn remove(&mut self, index: usize) -> T {
356		self.0.remove(index)
357	}
358
359	/// Exactly the same semantics as `slice::swap_remove`.
360	///
361	/// # Panics
362	///
363	/// Panics if `index` is out of bounds.
364	pub fn swap_remove(&mut self, index: usize) -> T {
365		self.0.swap_remove(index)
366	}
367
368	/// Exactly the same semantics as `Vec::retain`.
369	pub fn retain<F: FnMut(&T) -> bool>(&mut self, f: F) {
370		self.0.retain(f)
371	}
372
373	/// Exactly the same semantics as `slice::get_mut`.
374	pub fn get_mut<I: SliceIndex<[T]>>(&mut self, index: I) -> Option<&mut <I as SliceIndex<[T]>>::Output> {
375		self.0.get_mut(index)
376	}
377
378	/// Exactly the same semantics as `Vec::truncate`.
379	///
380	/// This is safe because `truncate` can never increase the length of the internal vector.
381	pub fn truncate(&mut self, s: usize) {
382		self.0.truncate(s);
383	}
384
385	/// Exactly the same semantics as `Vec::pop`.
386	///
387	/// This is safe since popping can only shrink the inner vector.
388	pub fn pop(&mut self) -> Option<T> {
389		self.0.pop()
390	}
391
392	/// Exactly the same semantics as [`slice::iter_mut`].
393	pub fn iter_mut(&mut self) -> core::slice::IterMut<'_, T> {
394		self.0.iter_mut()
395	}
396
397	/// Exactly the same semantics as [`slice::last_mut`].
398	pub fn last_mut(&mut self) -> Option<&mut T> {
399		self.0.last_mut()
400	}
401
402	/// Exact same semantics as [`Vec::drain`].
403	pub fn drain<R>(&mut self, range: R) -> alloc::vec::Drain<'_, T>
404	where
405		R: RangeBounds<usize>,
406	{
407		self.0.drain(range)
408	}
409}
410
411impl<T, S: Get<u32>> From<BoundedVec<T, S>> for Vec<T> {
412	fn from(x: BoundedVec<T, S>) -> Vec<T> {
413		x.0
414	}
415}
416
417impl<T, S: Get<u32>> BoundedVec<T, S> {
418	/// Pre-allocate `capacity` items in self.
419	///
420	/// If `capacity` is greater than [`Self::bound`], then the minimum of the two is used.
421	pub fn with_bounded_capacity(capacity: usize) -> Self {
422		let capacity = capacity.min(Self::bound());
423		Self(Vec::with_capacity(capacity), Default::default())
424	}
425
426	/// Allocate self with the maximum possible capacity.
427	pub fn with_max_capacity() -> Self {
428		Self::with_bounded_capacity(Self::bound())
429	}
430
431	/// Consume and truncate the vector `v` in order to create a new instance of `Self` from it.
432	pub fn truncate_from(mut v: Vec<T>) -> Self {
433		v.truncate(Self::bound());
434		Self::unchecked_from(v)
435	}
436
437	/// Get the bound of the type in `usize`.
438	pub fn bound() -> usize {
439		S::get() as usize
440	}
441
442	/// Returns true if this collection is full.
443	pub fn is_full(&self) -> bool {
444		self.len() >= Self::bound()
445	}
446
447	/// Forces the insertion of `element` into `self` retaining all items with index at least
448	/// `index`.
449	///
450	/// If `index == 0` and `self.len() == Self::bound()`, then this is a no-op.
451	///
452	/// If `Self::bound() < index` or `self.len() < index`, then this is also a no-op.
453	///
454	/// Returns `Ok(maybe_removed)` if the item was inserted, where `maybe_removed` is
455	/// `Some(removed)` if an item was removed to make room for the new one. Returns `Err(element)`
456	/// if `element` cannot be inserted.
457	pub fn force_insert_keep_right(&mut self, index: usize, mut element: T) -> Result<Option<T>, T> {
458		// Check against panics.
459		if Self::bound() < index || self.len() < index {
460			Err(element)
461		} else if self.len() < Self::bound() {
462			// Cannot panic since self.len() >= index;
463			self.0.insert(index, element);
464			Ok(None)
465		} else {
466			if index == 0 {
467				return Err(element)
468			}
469			core::mem::swap(&mut self[0], &mut element);
470			// `[0..index] cannot panic since self.len() >= index.
471			// `rotate_left(1)` cannot panic because there is at least 1 element.
472			self[0..index].rotate_left(1);
473			Ok(Some(element))
474		}
475	}
476
477	/// Forces the insertion of `element` into `self` retaining all items with index at most
478	/// `index`.
479	///
480	/// If `index == Self::bound()` and `self.len() == Self::bound()`, then this is a no-op.
481	///
482	/// If `Self::bound() < index` or `self.len() < index`, then this is also a no-op.
483	///
484	/// Returns `Ok(maybe_removed)` if the item was inserted, where `maybe_removed` is
485	/// `Some(removed)` if an item was removed to make room for the new one. Returns `Err(element)`
486	/// if `element` cannot be inserted.
487	pub fn force_insert_keep_left(&mut self, index: usize, element: T) -> Result<Option<T>, T> {
488		// Check against panics.
489		if Self::bound() < index || self.len() < index || Self::bound() == 0 {
490			return Err(element)
491		}
492		// Noop condition.
493		if Self::bound() == index && self.len() <= Self::bound() {
494			return Err(element)
495		}
496		let maybe_removed = if self.is_full() {
497			// defensive-only: since we are at capacity, this is a noop.
498			self.0.truncate(Self::bound());
499			// if we truncate anything, it will be the last one.
500			self.0.pop()
501		} else {
502			None
503		};
504
505		// Cannot panic since `self.len() >= index`;
506		self.0.insert(index, element);
507		Ok(maybe_removed)
508	}
509
510	/// Move the position of an item from one location to another in the slice.
511	///
512	/// Except for the item being moved, the order of the slice remains the same.
513	///
514	/// - `index` is the location of the item to be moved.
515	/// - `insert_position` is the index of the item in the slice which should *immediately follow*
516	///   the item which is being moved.
517	///
518	/// Returns `true` of the operation was successful, otherwise `false` if a noop.
519	pub fn slide(&mut self, index: usize, insert_position: usize) -> bool {
520		// Check against panics.
521		if self.len() <= index || self.len() < insert_position || index == usize::MAX {
522			return false
523		}
524		// Noop conditions.
525		if index == insert_position || index + 1 == insert_position {
526			return false
527		}
528		if insert_position < index && index < self.len() {
529			// --- --- --- === === === === @@@ --- --- ---
530			//            ^-- N            ^O^
531			// ...
532			//               /-----<<<-----\
533			// --- --- --- === === === === @@@ --- --- ---
534			//               >>> >>> >>> >>>
535			// ...
536			// --- --- --- @@@ === === === === --- --- ---
537			//             ^N^
538			self[insert_position..index + 1].rotate_right(1);
539			return true
540		} else if insert_position > 0 && index + 1 < insert_position {
541			// Note that the apparent asymmetry of these two branches is due to the
542			// fact that the "new" position is the position to be inserted *before*.
543			// --- --- --- @@@ === === === === --- --- ---
544			//             ^O^                ^-- N
545			// ...
546			//               /----->>>-----\
547			// --- --- --- @@@ === === === === --- --- ---
548			//               <<< <<< <<< <<<
549			// ...
550			// --- --- --- === === === === @@@ --- --- ---
551			//                             ^N^
552			self[index..insert_position].rotate_left(1);
553			return true
554		}
555
556		debug_assert!(false, "all noop conditions should have been covered above");
557		false
558	}
559
560	/// Forces the insertion of `s` into `self` truncating first if necessary.
561	///
562	/// Infallible, but if the bound is zero, then it's a no-op.
563	pub fn force_push(&mut self, element: T) {
564		if Self::bound() > 0 {
565			self.0.truncate(Self::bound() as usize - 1);
566			self.0.push(element);
567		}
568	}
569
570	/// Same as `Vec::resize`, but if `size` is more than [`Self::bound`], then [`Self::bound`] is
571	/// used.
572	pub fn bounded_resize(&mut self, size: usize, value: T)
573	where
574		T: Clone,
575	{
576		let size = size.min(Self::bound());
577		self.0.resize(size, value);
578	}
579
580	/// Exactly the same semantics as [`Vec::extend`], but returns an error and does nothing if the
581	/// length of the outcome is larger than the bound.
582	pub fn try_extend(&mut self, with: impl IntoIterator<Item = T> + ExactSizeIterator) -> Result<(), ()> {
583		if with.len().saturating_add(self.len()) <= Self::bound() {
584			self.0.extend(with);
585			Ok(())
586		} else {
587			Err(())
588		}
589	}
590
591	/// Exactly the same semantics as [`Vec::append`], but returns an error and does nothing if the
592	/// length of the outcome is larger than the bound.
593	pub fn try_append(&mut self, other: &mut Vec<T>) -> Result<(), ()> {
594		if other.len().saturating_add(self.len()) <= Self::bound() {
595			self.0.append(other);
596			Ok(())
597		} else {
598			Err(())
599		}
600	}
601
602	/// Consumes self and mutates self via the given `mutate` function.
603	///
604	/// If the outcome of mutation is within bounds, `Some(Self)` is returned. Else, `None` is
605	/// returned.
606	///
607	/// This is essentially a *consuming* shorthand [`Self::into_inner`] -> `...` ->
608	/// [`Self::try_from`].
609	pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut Vec<T>)) -> Option<Self> {
610		mutate(&mut self.0);
611		(self.0.len() <= Self::bound()).then(move || self)
612	}
613
614	/// Exactly the same semantics as [`Vec::insert`], but returns an `Err` (and is a noop) if the
615	/// new length of the vector exceeds `S`.
616	///
617	/// # Panics
618	///
619	/// Panics if `index > len`.
620	pub fn try_insert(&mut self, index: usize, element: T) -> Result<(), T> {
621		if self.len() < Self::bound() {
622			self.0.insert(index, element);
623			Ok(())
624		} else {
625			Err(element)
626		}
627	}
628
629	/// Exactly the same semantics as [`Vec::push`], but returns an `Err` (and is a noop) if the
630	/// new length of the vector exceeds `S`.
631	///
632	/// # Panics
633	///
634	/// Panics if the new capacity exceeds isize::MAX bytes.
635	pub fn try_push(&mut self, element: T) -> Result<(), T> {
636		if self.len() < Self::bound() {
637			self.0.push(element);
638			Ok(())
639		} else {
640			Err(element)
641		}
642	}
643
644	/// Exactly the same semantics as [`Vec::rotate_left`], but returns an `Err` (and is a noop) if `mid` is larger then the current length.
645	pub fn try_rotate_left(&mut self, mid: usize) -> Result<(), ()> {
646		if mid > self.len() {
647			return Err(())
648		}
649
650		self.0.rotate_left(mid);
651		Ok(())
652	}
653
654	/// Exactly the same semantics as [`Vec::rotate_right`], but returns an `Err` (and is a noop) if `mid` is larger then the current length.
655	pub fn try_rotate_right(&mut self, mid: usize) -> Result<(), ()> {
656		if mid > self.len() {
657			return Err(())
658		}
659
660		self.0.rotate_right(mid);
661		Ok(())
662	}
663}
664
665impl<T, S> BoundedVec<T, S> {
666	/// Return a [`BoundedSlice`] with the content and bound of [`Self`].
667	pub fn as_bounded_slice(&self) -> BoundedSlice<T, S> {
668		BoundedSlice(&self.0[..], PhantomData::default())
669	}
670}
671
672impl<T, S> Default for BoundedVec<T, S> {
673	fn default() -> Self {
674		// the bound cannot be below 0, which is satisfied by an empty vector
675		Self::unchecked_from(Vec::default())
676	}
677}
678
679impl<T, S> core::fmt::Debug for BoundedVec<T, S>
680where
681	Vec<T>: core::fmt::Debug,
682	S: Get<u32>,
683{
684	fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
685		f.debug_tuple("BoundedVec").field(&self.0).field(&Self::bound()).finish()
686	}
687}
688
689impl<T, S> Clone for BoundedVec<T, S>
690where
691	T: Clone,
692{
693	fn clone(&self) -> Self {
694		// bound is retained
695		Self::unchecked_from(self.0.clone())
696	}
697}
698
699impl<T, S: Get<u32>> TryFrom<Vec<T>> for BoundedVec<T, S> {
700	type Error = Vec<T>;
701	fn try_from(t: Vec<T>) -> Result<Self, Self::Error> {
702		if t.len() <= Self::bound() {
703			// explicit check just above
704			Ok(Self::unchecked_from(t))
705		} else {
706			Err(t)
707		}
708	}
709}
710
711impl<T, S: Get<u32>> TruncateFrom<Vec<T>> for BoundedVec<T, S> {
712	fn truncate_from(unbound: Vec<T>) -> Self {
713		BoundedVec::<T, S>::truncate_from(unbound)
714	}
715}
716
717// Custom implementation of `Hash` since deriving it would require all generic bounds to also
718// implement it.
719#[cfg(feature = "std")]
720impl<T: std::hash::Hash, S> std::hash::Hash for BoundedVec<T, S> {
721	fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
722		self.0.hash(state);
723	}
724}
725
726// It is okay to give a non-mutable reference of the inner vec to anyone.
727impl<T, S> AsRef<Vec<T>> for BoundedVec<T, S> {
728	fn as_ref(&self) -> &Vec<T> {
729		&self.0
730	}
731}
732
733impl<T, S> AsRef<[T]> for BoundedVec<T, S> {
734	fn as_ref(&self) -> &[T] {
735		&self.0
736	}
737}
738
739impl<T, S> AsMut<[T]> for BoundedVec<T, S> {
740	fn as_mut(&mut self) -> &mut [T] {
741		&mut self.0
742	}
743}
744
745// will allow for all immutable operations of `Vec<T>` on `BoundedVec<T>`.
746impl<T, S> Deref for BoundedVec<T, S> {
747	type Target = Vec<T>;
748
749	fn deref(&self) -> &Self::Target {
750		&self.0
751	}
752}
753
754// Allows for indexing similar to a normal `Vec`. Can panic if out of bound.
755impl<T, S, I> Index<I> for BoundedVec<T, S>
756where
757	I: SliceIndex<[T]>,
758{
759	type Output = I::Output;
760
761	#[inline]
762	fn index(&self, index: I) -> &Self::Output {
763		self.0.index(index)
764	}
765}
766
767impl<T, S, I> IndexMut<I> for BoundedVec<T, S>
768where
769	I: SliceIndex<[T]>,
770{
771	#[inline]
772	fn index_mut(&mut self, index: I) -> &mut Self::Output {
773		self.0.index_mut(index)
774	}
775}
776
777impl<T, S> core::iter::IntoIterator for BoundedVec<T, S> {
778	type Item = T;
779	type IntoIter = alloc::vec::IntoIter<T>;
780	fn into_iter(self) -> Self::IntoIter {
781		self.0.into_iter()
782	}
783}
784
785impl<'a, T, S> core::iter::IntoIterator for &'a BoundedVec<T, S> {
786	type Item = &'a T;
787	type IntoIter = core::slice::Iter<'a, T>;
788	fn into_iter(self) -> Self::IntoIter {
789		self.0.iter()
790	}
791}
792
793impl<'a, T, S> core::iter::IntoIterator for &'a mut BoundedVec<T, S> {
794	type Item = &'a mut T;
795	type IntoIter = core::slice::IterMut<'a, T>;
796	fn into_iter(self) -> Self::IntoIter {
797		self.0.iter_mut()
798	}
799}
800
801impl<T, S> codec::DecodeLength for BoundedVec<T, S> {
802	fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
803		// `BoundedVec<T, _>` stored just a `Vec<T>`, thus the length is at the beginning in
804		// `Compact` form, and same implementation as `Vec<T>` can be used.
805		<Vec<T> as codec::DecodeLength>::len(self_encoded)
806	}
807}
808
809impl<T, BoundSelf, BoundRhs> PartialEq<BoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
810where
811	T: PartialEq,
812	BoundSelf: Get<u32>,
813	BoundRhs: Get<u32>,
814{
815	fn eq(&self, rhs: &BoundedVec<T, BoundRhs>) -> bool {
816		self.0 == rhs.0
817	}
818}
819
820impl<T, BoundSelf, BoundRhs> PartialEq<WeakBoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
821where
822	T: PartialEq,
823	BoundSelf: Get<u32>,
824	BoundRhs: Get<u32>,
825{
826	fn eq(&self, rhs: &WeakBoundedVec<T, BoundRhs>) -> bool {
827		self.0 == rhs.0
828	}
829}
830
831impl<'a, T, BoundSelf, BoundRhs> PartialEq<BoundedSlice<'a, T, BoundRhs>> for BoundedVec<T, BoundSelf>
832where
833	T: PartialEq,
834	BoundSelf: Get<u32>,
835	BoundRhs: Get<u32>,
836{
837	fn eq(&self, rhs: &BoundedSlice<'a, T, BoundRhs>) -> bool {
838		self.0 == rhs.0
839	}
840}
841
842impl<'a, T: PartialEq, S: Get<u32>> PartialEq<&'a [T]> for BoundedSlice<'a, T, S> {
843	fn eq(&self, other: &&'a [T]) -> bool {
844		&self.0 == other
845	}
846}
847
848impl<T: PartialEq, S: Get<u32>> PartialEq<Vec<T>> for BoundedVec<T, S> {
849	fn eq(&self, other: &Vec<T>) -> bool {
850		&self.0 == other
851	}
852}
853
854impl<T, S: Get<u32>> Eq for BoundedVec<T, S> where T: Eq {}
855
856impl<T, BoundSelf, BoundRhs> PartialOrd<BoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
857where
858	T: PartialOrd,
859	BoundSelf: Get<u32>,
860	BoundRhs: Get<u32>,
861{
862	fn partial_cmp(&self, other: &BoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
863		self.0.partial_cmp(&other.0)
864	}
865}
866
867impl<T, BoundSelf, BoundRhs> PartialOrd<WeakBoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
868where
869	T: PartialOrd,
870	BoundSelf: Get<u32>,
871	BoundRhs: Get<u32>,
872{
873	fn partial_cmp(&self, other: &WeakBoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
874		self.0.partial_cmp(&other.0)
875	}
876}
877
878impl<'a, T, BoundSelf, BoundRhs> PartialOrd<BoundedSlice<'a, T, BoundRhs>> for BoundedVec<T, BoundSelf>
879where
880	T: PartialOrd,
881	BoundSelf: Get<u32>,
882	BoundRhs: Get<u32>,
883{
884	fn partial_cmp(&self, other: &BoundedSlice<'a, T, BoundRhs>) -> Option<core::cmp::Ordering> {
885		(&*self.0).partial_cmp(other.0)
886	}
887}
888
889impl<T: Ord, Bound: Get<u32>> Ord for BoundedVec<T, Bound> {
890	fn cmp(&self, other: &Self) -> core::cmp::Ordering {
891		self.0.cmp(&other.0)
892	}
893}
894
895impl<T, S> MaxEncodedLen for BoundedVec<T, S>
896where
897	T: MaxEncodedLen,
898	S: Get<u32>,
899	BoundedVec<T, S>: Encode,
900{
901	fn max_encoded_len() -> usize {
902		// BoundedVec<T, S> encodes like Vec<T> which encodes like [T], which is a compact u32
903		// plus each item in the slice:
904		// See: https://docs.substrate.io/reference/scale-codec/
905		codec::Compact(S::get())
906			.encoded_size()
907			.saturating_add(Self::bound().saturating_mul(T::max_encoded_len()))
908	}
909}
910
911impl<I, T, Bound> TryCollect<BoundedVec<T, Bound>> for I
912where
913	I: ExactSizeIterator + Iterator<Item = T>,
914	Bound: Get<u32>,
915{
916	type Error = &'static str;
917
918	fn try_collect(self) -> Result<BoundedVec<T, Bound>, Self::Error> {
919		if self.len() > Bound::get() as usize {
920			Err("iterator length too big")
921		} else {
922			Ok(BoundedVec::<T, Bound>::unchecked_from(self.collect::<Vec<T>>()))
923		}
924	}
925}
926
927#[cfg(all(test, feature = "std"))]
928mod test {
929	use super::*;
930	use crate::{bounded_vec, ConstU32};
931	use codec::CompactLen;
932
933	#[test]
934	fn encoding_same_as_unbounded_vec() {
935		let b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
936		let v: Vec<u32> = vec![0, 1, 2, 3, 4, 5];
937
938		assert_eq!(b.encode(), v.encode());
939	}
940
941	#[test]
942	fn slice_truncate_from_works() {
943		let bounded = BoundedSlice::<u32, ConstU32<4>>::truncate_from(&[1, 2, 3, 4, 5]);
944		assert_eq!(bounded.deref(), &[1, 2, 3, 4]);
945		let bounded = BoundedSlice::<u32, ConstU32<4>>::truncate_from(&[1, 2, 3, 4]);
946		assert_eq!(bounded.deref(), &[1, 2, 3, 4]);
947		let bounded = BoundedSlice::<u32, ConstU32<4>>::truncate_from(&[1, 2, 3]);
948		assert_eq!(bounded.deref(), &[1, 2, 3]);
949	}
950
951	#[test]
952	fn slide_works() {
953		let mut b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
954		assert!(b.slide(1, 5));
955		assert_eq!(*b, vec![0, 2, 3, 4, 1, 5]);
956		assert!(b.slide(4, 0));
957		assert_eq!(*b, vec![1, 0, 2, 3, 4, 5]);
958		assert!(b.slide(0, 2));
959		assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
960		assert!(b.slide(1, 6));
961		assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
962		assert!(b.slide(0, 6));
963		assert_eq!(*b, vec![2, 3, 4, 5, 1, 0]);
964		assert!(b.slide(5, 0));
965		assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
966		assert!(!b.slide(6, 0));
967		assert!(!b.slide(7, 0));
968		assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
969
970		let mut c: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2];
971		assert!(!c.slide(1, 5));
972		assert_eq!(*c, vec![0, 1, 2]);
973		assert!(!c.slide(4, 0));
974		assert_eq!(*c, vec![0, 1, 2]);
975		assert!(!c.slide(3, 0));
976		assert_eq!(*c, vec![0, 1, 2]);
977		assert!(c.slide(2, 0));
978		assert_eq!(*c, vec![2, 0, 1]);
979	}
980
981	#[test]
982	fn slide_noops_work() {
983		let mut b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
984		assert!(!b.slide(3, 3));
985		assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
986		assert!(!b.slide(3, 4));
987		assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
988	}
989
990	#[test]
991	fn force_insert_keep_left_works() {
992		let mut b: BoundedVec<u32, ConstU32<4>> = bounded_vec![];
993		assert_eq!(b.force_insert_keep_left(1, 10), Err(10));
994		assert!(b.is_empty());
995
996		assert_eq!(b.force_insert_keep_left(0, 30), Ok(None));
997		assert_eq!(b.force_insert_keep_left(0, 10), Ok(None));
998		assert_eq!(b.force_insert_keep_left(1, 20), Ok(None));
999		assert_eq!(b.force_insert_keep_left(3, 40), Ok(None));
1000		assert_eq!(*b, vec![10, 20, 30, 40]);
1001		// at capacity.
1002		assert_eq!(b.force_insert_keep_left(4, 41), Err(41));
1003		assert_eq!(*b, vec![10, 20, 30, 40]);
1004		assert_eq!(b.force_insert_keep_left(3, 31), Ok(Some(40)));
1005		assert_eq!(*b, vec![10, 20, 30, 31]);
1006		assert_eq!(b.force_insert_keep_left(1, 11), Ok(Some(31)));
1007		assert_eq!(*b, vec![10, 11, 20, 30]);
1008		assert_eq!(b.force_insert_keep_left(0, 1), Ok(Some(30)));
1009		assert_eq!(*b, vec![1, 10, 11, 20]);
1010
1011		let mut z: BoundedVec<u32, ConstU32<0>> = bounded_vec![];
1012		assert!(z.is_empty());
1013		assert_eq!(z.force_insert_keep_left(0, 10), Err(10));
1014		assert!(z.is_empty());
1015	}
1016
1017	#[test]
1018	fn force_insert_keep_right_works() {
1019		let mut b: BoundedVec<u32, ConstU32<4>> = bounded_vec![];
1020		assert_eq!(b.force_insert_keep_right(1, 10), Err(10));
1021		assert!(b.is_empty());
1022
1023		assert_eq!(b.force_insert_keep_right(0, 30), Ok(None));
1024		assert_eq!(b.force_insert_keep_right(0, 10), Ok(None));
1025		assert_eq!(b.force_insert_keep_right(1, 20), Ok(None));
1026		assert_eq!(b.force_insert_keep_right(3, 40), Ok(None));
1027		assert_eq!(*b, vec![10, 20, 30, 40]);
1028
1029		// at capacity.
1030		assert_eq!(b.force_insert_keep_right(0, 0), Err(0));
1031		assert_eq!(*b, vec![10, 20, 30, 40]);
1032		assert_eq!(b.force_insert_keep_right(1, 11), Ok(Some(10)));
1033		assert_eq!(*b, vec![11, 20, 30, 40]);
1034		assert_eq!(b.force_insert_keep_right(3, 31), Ok(Some(11)));
1035		assert_eq!(*b, vec![20, 30, 31, 40]);
1036		assert_eq!(b.force_insert_keep_right(4, 41), Ok(Some(20)));
1037		assert_eq!(*b, vec![30, 31, 40, 41]);
1038
1039		assert_eq!(b.force_insert_keep_right(5, 69), Err(69));
1040		assert_eq!(*b, vec![30, 31, 40, 41]);
1041
1042		let mut z: BoundedVec<u32, ConstU32<0>> = bounded_vec![];
1043		assert!(z.is_empty());
1044		assert_eq!(z.force_insert_keep_right(0, 10), Err(10));
1045		assert!(z.is_empty());
1046	}
1047
1048	#[test]
1049	fn bound_returns_correct_value() {
1050		assert_eq!(BoundedVec::<u32, ConstU32<7>>::bound(), 7);
1051	}
1052
1053	#[test]
1054	fn try_insert_works() {
1055		let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
1056		bounded.try_insert(1, 0).unwrap();
1057		assert_eq!(*bounded, vec![1, 0, 2, 3]);
1058
1059		assert!(bounded.try_insert(0, 9).is_err());
1060		assert_eq!(*bounded, vec![1, 0, 2, 3]);
1061	}
1062
1063	#[test]
1064	fn constructor_macro_works() {
1065		// With values. Use some brackets to make sure the macro doesn't expand.
1066		let bv: BoundedVec<(u32, u32), ConstU32<3>> = bounded_vec![(1, 2), (1, 2), (1, 2)];
1067		assert_eq!(bv, vec![(1, 2), (1, 2), (1, 2)]);
1068
1069		// With repetition.
1070		let bv: BoundedVec<(u32, u32), ConstU32<3>> = bounded_vec![(1, 2); 3];
1071		assert_eq!(bv, vec![(1, 2), (1, 2), (1, 2)]);
1072	}
1073
1074	#[test]
1075	#[should_panic(expected = "insertion index (is 9) should be <= len (is 3)")]
1076	fn try_inert_panics_if_oob() {
1077		let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
1078		bounded.try_insert(9, 0).unwrap();
1079	}
1080
1081	#[test]
1082	fn try_push_works() {
1083		let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
1084		bounded.try_push(0).unwrap();
1085		assert_eq!(*bounded, vec![1, 2, 3, 0]);
1086
1087		assert!(bounded.try_push(9).is_err());
1088	}
1089
1090	#[test]
1091	fn deref_vec_coercion_works() {
1092		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1093		// these methods come from deref-ed vec.
1094		assert_eq!(bounded.len(), 3);
1095		assert!(bounded.iter().next().is_some());
1096		assert!(!bounded.is_empty());
1097	}
1098
1099	#[test]
1100	fn deref_slice_coercion_works() {
1101		let bounded = BoundedSlice::<u32, ConstU32<7>>::try_from(&[1, 2, 3][..]).unwrap();
1102		// these methods come from deref-ed slice.
1103		assert_eq!(bounded.len(), 3);
1104		assert!(bounded.iter().next().is_some());
1105		assert!(!bounded.is_empty());
1106	}
1107
1108	#[test]
1109	fn try_mutate_works() {
1110		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
1111		let bounded = bounded.try_mutate(|v| v.push(7)).unwrap();
1112		assert_eq!(bounded.len(), 7);
1113		assert!(bounded.try_mutate(|v| v.push(8)).is_none());
1114	}
1115
1116	#[test]
1117	fn slice_indexing_works() {
1118		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
1119		assert_eq!(&bounded[0..=2], &[1, 2, 3]);
1120	}
1121
1122	#[test]
1123	fn vec_eq_works() {
1124		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
1125		assert_eq!(bounded, vec![1, 2, 3, 4, 5, 6]);
1126	}
1127
1128	#[test]
1129	fn too_big_vec_fail_to_decode() {
1130		let v: Vec<u32> = vec![1, 2, 3, 4, 5];
1131		assert_eq!(
1132			BoundedVec::<u32, ConstU32<4>>::decode(&mut &v.encode()[..]),
1133			Err("BoundedVec exceeds its limit".into()),
1134		);
1135	}
1136
1137	#[test]
1138	fn dont_consume_more_data_than_bounded_len() {
1139		let v: Vec<u32> = vec![1, 2, 3, 4, 5];
1140		let data = v.encode();
1141		let data_input = &mut &data[..];
1142
1143		BoundedVec::<u32, ConstU32<4>>::decode(data_input).unwrap_err();
1144		assert_eq!(data_input.len(), data.len() - Compact::<u32>::compact_len(&(data.len() as u32)));
1145	}
1146
1147	#[test]
1148	fn eq_works() {
1149		// of same type
1150		let b1: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1151		let b2: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1152		assert_eq!(b1, b2);
1153
1154		// of different type, but same value and bound.
1155		crate::parameter_types! {
1156			B1: u32 = 7;
1157			B2: u32 = 7;
1158		}
1159		let b1: BoundedVec<u32, B1> = bounded_vec![1, 2, 3];
1160		let b2: BoundedVec<u32, B2> = bounded_vec![1, 2, 3];
1161		assert_eq!(b1, b2);
1162	}
1163
1164	#[test]
1165	fn ord_works() {
1166		use std::cmp::Ordering;
1167		let b1: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1168		let b2: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 3, 2];
1169
1170		// ordering for vec is lexicographic.
1171		assert_eq!(b1.cmp(&b2), Ordering::Less);
1172		assert_eq!(b1.cmp(&b2), b1.into_inner().cmp(&b2.into_inner()));
1173	}
1174
1175	#[test]
1176	fn try_extend_works() {
1177		let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
1178
1179		assert!(b.try_extend(vec![4].into_iter()).is_ok());
1180		assert_eq!(*b, vec![1, 2, 3, 4]);
1181
1182		assert!(b.try_extend(vec![5].into_iter()).is_ok());
1183		assert_eq!(*b, vec![1, 2, 3, 4, 5]);
1184
1185		assert!(b.try_extend(vec![6].into_iter()).is_err());
1186		assert_eq!(*b, vec![1, 2, 3, 4, 5]);
1187
1188		let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
1189		assert!(b.try_extend(vec![4, 5].into_iter()).is_ok());
1190		assert_eq!(*b, vec![1, 2, 3, 4, 5]);
1191
1192		let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
1193		assert!(b.try_extend(vec![4, 5, 6].into_iter()).is_err());
1194		assert_eq!(*b, vec![1, 2, 3]);
1195	}
1196
1197	#[test]
1198	fn test_serializer() {
1199		let c: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2];
1200		assert_eq!(serde_json::json!(&c).to_string(), r#"[0,1,2]"#);
1201	}
1202
1203	#[test]
1204	fn test_deserializer() {
1205		let c: BoundedVec<u32, ConstU32<6>> = serde_json::from_str(r#"[0,1,2]"#).unwrap();
1206
1207		assert_eq!(c.len(), 3);
1208		assert_eq!(c[0], 0);
1209		assert_eq!(c[1], 1);
1210		assert_eq!(c[2], 2);
1211	}
1212
1213	#[test]
1214	fn test_deserializer_bound() {
1215		let c: BoundedVec<u32, ConstU32<3>> = serde_json::from_str(r#"[0,1,2]"#).unwrap();
1216
1217		assert_eq!(c.len(), 3);
1218		assert_eq!(c[0], 0);
1219		assert_eq!(c[1], 1);
1220		assert_eq!(c[2], 2);
1221	}
1222
1223	#[test]
1224	fn test_deserializer_failed() {
1225		let c: Result<BoundedVec<u32, ConstU32<4>>, serde_json::error::Error> = serde_json::from_str(r#"[0,1,2,3,4]"#);
1226
1227		match c {
1228			Err(msg) => assert_eq!(msg.to_string(), "out of bounds at line 1 column 11"),
1229			_ => unreachable!("deserializer must raise error"),
1230		}
1231	}
1232
1233	#[test]
1234	fn bounded_vec_try_from_works() {
1235		assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0]).is_ok());
1236		assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0, 1]).is_ok());
1237		assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0, 1, 2]).is_err());
1238	}
1239
1240	#[test]
1241	fn bounded_slice_try_from_works() {
1242		assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0][..]).is_ok());
1243		assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0, 1][..]).is_ok());
1244		assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0, 1, 2][..]).is_err());
1245	}
1246
1247	#[test]
1248	fn can_be_collected() {
1249		let b1: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3, 4];
1250		let b2: BoundedVec<u32, ConstU32<5>> = b1.iter().map(|x| x + 1).try_collect().unwrap();
1251		assert_eq!(b2, vec![2, 3, 4, 5]);
1252
1253		// can also be collected into a collection of length 4.
1254		let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).try_collect().unwrap();
1255		assert_eq!(b2, vec![2, 3, 4, 5]);
1256
1257		// can be mutated further into iterators that are `ExactSizedIterator`.
1258		let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).rev().try_collect().unwrap();
1259		assert_eq!(b2, vec![5, 4, 3, 2]);
1260
1261		let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).rev().skip(2).try_collect().unwrap();
1262		assert_eq!(b2, vec![3, 2]);
1263		let b2: BoundedVec<u32, ConstU32<2>> = b1.iter().map(|x| x + 1).rev().skip(2).try_collect().unwrap();
1264		assert_eq!(b2, vec![3, 2]);
1265
1266		let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).rev().take(2).try_collect().unwrap();
1267		assert_eq!(b2, vec![5, 4]);
1268		let b2: BoundedVec<u32, ConstU32<2>> = b1.iter().map(|x| x + 1).rev().take(2).try_collect().unwrap();
1269		assert_eq!(b2, vec![5, 4]);
1270
1271		// but these worn't work
1272		let b2: Result<BoundedVec<u32, ConstU32<3>>, _> = b1.iter().map(|x| x + 1).try_collect();
1273		assert!(b2.is_err());
1274
1275		let b2: Result<BoundedVec<u32, ConstU32<1>>, _> = b1.iter().map(|x| x + 1).rev().take(2).try_collect();
1276		assert!(b2.is_err());
1277	}
1278
1279	#[test]
1280	fn bounded_vec_debug_works() {
1281		let bound = BoundedVec::<u32, ConstU32<5>>::truncate_from(vec![1, 2, 3]);
1282		assert_eq!(format!("{:?}", bound), "BoundedVec([1, 2, 3], 5)");
1283	}
1284
1285	#[test]
1286	fn bounded_slice_debug_works() {
1287		let bound = BoundedSlice::<u32, ConstU32<5>>::truncate_from(&[1, 2, 3]);
1288		assert_eq!(format!("{:?}", bound), "BoundedSlice([1, 2, 3], 5)");
1289	}
1290
1291	#[test]
1292	fn bounded_vec_sort_by_key_works() {
1293		let mut v: BoundedVec<i32, ConstU32<5>> = bounded_vec![-5, 4, 1, -3, 2];
1294		// Sort by absolute value.
1295		v.sort_by_key(|k| k.abs());
1296		assert_eq!(v, vec![1, 2, -3, 4, -5]);
1297	}
1298
1299	#[test]
1300	fn bounded_vec_truncate_from_works() {
1301		let unbound = vec![1, 2, 3, 4, 5];
1302		let bound = BoundedVec::<u32, ConstU32<3>>::truncate_from(unbound.clone());
1303		assert_eq!(bound, vec![1, 2, 3]);
1304	}
1305
1306	#[test]
1307	fn bounded_slice_truncate_from_works() {
1308		let unbound = [1, 2, 3, 4, 5];
1309		let bound = BoundedSlice::<u32, ConstU32<3>>::truncate_from(&unbound);
1310		assert_eq!(bound, &[1, 2, 3][..]);
1311	}
1312
1313	#[test]
1314	fn bounded_slice_partialeq_slice_works() {
1315		let unbound = [1, 2, 3];
1316		let bound = BoundedSlice::<u32, ConstU32<3>>::truncate_from(&unbound);
1317
1318		assert_eq!(bound, &unbound[..]);
1319		assert!(bound == &unbound[..]);
1320	}
1321
1322	#[test]
1323	fn bounded_vec_try_rotate_left_works() {
1324		let o = BoundedVec::<u32, ConstU32<3>>::truncate_from(vec![1, 2, 3]);
1325		let mut bound = o.clone();
1326
1327		bound.try_rotate_left(0).unwrap();
1328		assert_eq!(bound, o);
1329		bound.try_rotate_left(3).unwrap();
1330		assert_eq!(bound, o);
1331
1332		bound.try_rotate_left(4).unwrap_err();
1333		assert_eq!(bound, o);
1334
1335		bound.try_rotate_left(1).unwrap();
1336		assert_eq!(bound, vec![2, 3, 1]);
1337		bound.try_rotate_left(2).unwrap();
1338		assert_eq!(bound, o);
1339	}
1340
1341	#[test]
1342	fn bounded_vec_try_rotate_right_works() {
1343		let o = BoundedVec::<u32, ConstU32<3>>::truncate_from(vec![1, 2, 3]);
1344		let mut bound = o.clone();
1345
1346		bound.try_rotate_right(0).unwrap();
1347		assert_eq!(bound, o);
1348		bound.try_rotate_right(3).unwrap();
1349		assert_eq!(bound, o);
1350
1351		bound.try_rotate_right(4).unwrap_err();
1352		assert_eq!(bound, o);
1353
1354		bound.try_rotate_right(1).unwrap();
1355		assert_eq!(bound, vec![3, 1, 2]);
1356		bound.try_rotate_right(2).unwrap();
1357		assert_eq!(bound, o);
1358	}
1359
1360	// Just a test that structs containing `BoundedVec` and `BoundedSlice` can derive `Hash`. (This was broken when
1361	// they were deriving `Hash`).
1362	#[test]
1363	#[cfg(feature = "std")]
1364	fn container_can_derive_hash() {
1365		#[derive(Hash)]
1366		struct Foo<'a> {
1367			bar: u8,
1368			slice: BoundedSlice<'a, usize, ConstU32<4>>,
1369			map: BoundedVec<String, ConstU32<16>>,
1370		}
1371		let _foo = Foo { bar: 42, slice: BoundedSlice::truncate_from(&[0, 1][..]), map: BoundedVec::default() };
1372	}
1373
1374	#[test]
1375	fn is_full_works() {
1376		let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
1377		assert!(!bounded.is_full());
1378		bounded.try_insert(1, 0).unwrap();
1379		assert_eq!(*bounded, vec![1, 0, 2, 3]);
1380
1381		assert!(bounded.is_full());
1382		assert!(bounded.try_insert(0, 9).is_err());
1383		assert_eq!(*bounded, vec![1, 0, 2, 3]);
1384	}
1385}