1use core::borrow::{Borrow, BorrowMut};
131use core::cmp::Ordering;
132use core::fmt;
133use core::iter::repeat;
134use core::mem::{self, ManuallyDrop, MaybeUninit};
135use core::ops::{Bound, Deref, DerefMut, Range, RangeBounds};
136use core::ptr::{self, NonNull};
137use core::slice;
138
139use const_default::ConstDefault;
140
141#[cfg(feature = "zeroize")]
142use zeroize::{Zeroize, ZeroizeOnDrop};
143
144use crate::alloc::{AllocatorDefault, Global};
145use crate::boxed::Box;
146use crate::capacity::{Grow, Index};
147use crate::error::{StorageError, UpdateError};
148use crate::storage::{insert::Inserter, Inline, RawBuffer};
149
150use self::buffer::VecBuffer;
151use self::config::{VecConfig, VecConfigAlloc, VecConfigNew, VecConfigSpawn, VecNewIn};
152
153#[cfg(feature = "alloc")]
154use crate::alloc::ConvertAlloc;
155
156pub use self::{drain::Drain, into_iter::IntoIter, splice::Splice};
157
158pub mod buffer;
159pub mod config;
160
161#[macro_use]
162mod macros;
163
164mod cow;
165mod drain;
166mod into_iter;
167mod splice;
168
169pub type InlineVec<T, const N: usize> = Vec<T, Inline<N>>;
171
172#[cfg(feature = "alloc")]
173pub type ThinVec<T> = Vec<T, self::config::Thin<Global>>;
176
177#[cfg(feature = "zeroize")]
178pub type ZeroizingVec<T> = Vec<T, crate::alloc::ZeroizingAlloc<Global>>;
180
181#[cold]
182#[inline(never)]
183pub(super) fn index_panic() -> ! {
184 panic!("Invalid element index");
185}
186
187#[inline]
188fn bounds_to_range<I: Index>(range: impl RangeBounds<I>, length: I) -> Range<usize> {
189 let start = match range.start_bound() {
190 Bound::Unbounded => 0,
191 Bound::Included(i) => i.to_usize(),
192 Bound::Excluded(i) => i.to_usize() + 1,
193 };
194 let end = match range.end_bound() {
195 Bound::Unbounded => length.to_usize(),
196 Bound::Included(i) => i.to_usize() + 1,
197 Bound::Excluded(i) => i.to_usize(),
198 };
199 Range { start, end }
200}
201
202struct DropSlice<T> {
203 ptr: *mut T,
204 len: usize,
205}
206
207impl<T> Iterator for DropSlice<T> {
208 type Item = *mut T;
209
210 #[inline]
211 fn next(&mut self) -> Option<Self::Item> {
212 if self.len > 0 {
213 let ret = self.ptr;
214 unsafe {
215 self.ptr = self.ptr.add(1);
216 }
217 self.len -= 1;
218 Some(ret)
219 } else {
220 None
221 }
222 }
223}
224
225impl<T> Drop for DropSlice<T> {
226 fn drop(&mut self) {
227 if self.len > 0 {
228 unsafe { ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.ptr, self.len)) }
229 }
230 }
231}
232
233#[cfg(feature = "alloc")]
234#[inline]
235pub fn from_array<T, const N: usize>(data: [T; N]) -> Vec<T> {
237 let mut v = Vec::new();
238 v.extend(data);
239 v
240}
241
242#[inline]
243pub fn from_array_in<T, C, const N: usize>(data: [T; N], alloc_in: C) -> Vec<T, C::Config>
245where
246 C: VecNewIn<T>,
247{
248 let mut v = Vec::new_in(alloc_in);
249 v.extend(data);
250 v
251}
252
253#[cfg(feature = "alloc")]
254#[inline]
255pub fn from_elem<T: Clone>(elem: T, count: usize) -> Vec<T, Global> {
257 Vec::from_iter(repeat(elem).take(count))
258}
259
260#[inline]
261pub fn from_elem_in<T, C>(elem: T, count: usize, alloc_in: C) -> Vec<T, C::Config>
264where
265 T: Clone,
266 C: VecNewIn<T>,
267{
268 Vec::from_iter_in(repeat(elem).take(count), alloc_in)
269}
270
271#[repr(transparent)]
273pub struct Vec<T, C: VecConfig = Global> {
274 buffer: C::Buffer<T>,
275}
276
277impl<T, C: VecConfigNew<T>> Vec<T, C> {
278 pub const fn new() -> Self {
288 Self {
289 buffer: C::EMPTY_BUFFER,
290 }
291 }
292
293 pub fn try_with_capacity(capacity: C::Index) -> Result<Self, StorageError> {
295 let buffer = C::buffer_try_new(capacity, false)?;
296 Ok(Self { buffer })
297 }
298
299 pub fn with_capacity(capacity: C::Index) -> Self {
303 match Self::try_with_capacity(capacity) {
304 Ok(res) => res,
305 Err(error) => error.panic(),
306 }
307 }
308
309 pub fn from_slice(data: &[T]) -> Self
313 where
314 T: Clone,
315 {
316 let Some(len) = C::Index::try_from_usize(data.len()) else {
317 index_panic();
318 };
319 let mut vec = Self::with_capacity(len);
320 vec.extend_from_slice(data);
321 vec
322 }
323
324 pub fn try_from_slice(data: &[T]) -> Result<Self, StorageError>
326 where
327 T: Clone,
328 {
329 let Some(len) = C::Index::try_from_usize(data.len()) else {
330 return Err(StorageError::CapacityLimit);
331 };
332 let mut vec = Self::try_with_capacity(len)?;
333 vec.extend_from_slice(data);
334 Ok(vec)
335 }
336}
337
338impl<T, C: VecConfig> Vec<T, C> {
339 pub fn new_in<A>(alloc_in: A) -> Self
343 where
344 A: VecNewIn<T, Config = C>,
345 {
346 match A::buffer_try_new_in(alloc_in, C::Index::ZERO, false) {
347 Ok(buffer) => Self { buffer },
348 Err(err) => err.panic(),
349 }
350 }
351
352 pub fn try_new_in<A>(alloc_in: A) -> Result<Self, StorageError>
354 where
355 A: VecNewIn<T, Config = C>,
356 {
357 Ok(Self {
358 buffer: A::buffer_try_new_in(alloc_in, C::Index::ZERO, false)?,
359 })
360 }
361
362 pub fn with_capacity_in<A>(capacity: C::Index, alloc_in: A) -> Self
367 where
368 A: VecNewIn<T, Config = C>,
369 {
370 match Self::try_with_capacity_in(capacity, alloc_in) {
371 Ok(res) => res,
372 Err(error) => error.panic(),
373 }
374 }
375
376 pub fn try_with_capacity_in<A>(capacity: C::Index, alloc_in: A) -> Result<Self, StorageError>
379 where
380 A: VecNewIn<T, Config = C>,
381 {
382 Ok(Self {
383 buffer: A::buffer_try_new_in(alloc_in, capacity, false)?,
384 })
385 }
386
387 pub fn from_slice_in<A>(data: &[T], alloc_in: A) -> Self
392 where
393 T: Clone,
394 A: VecNewIn<T, Config = C>,
395 {
396 let Some(len) = C::Index::try_from_usize(data.len()) else {
397 index_panic();
398 };
399 let mut vec = Self::with_capacity_in(len, alloc_in);
400 vec.extend_from_slice(data);
401 vec
402 }
403
404 pub fn try_from_slice_in<A>(data: &[T], alloc_in: A) -> Result<Self, StorageError>
407 where
408 T: Clone,
409 A: VecNewIn<T, Config = C>,
410 {
411 let Some(len) = C::Index::try_from_usize(data.len()) else {
412 return Err(StorageError::CapacityLimit);
413 };
414 let mut vec = Self::try_with_capacity_in(len, alloc_in)?;
415 vec.extend_from_slice(data);
416 Ok(vec)
417 }
418}
419
420impl<T, C: VecConfig> Vec<T, C> {
421 #[inline]
422 fn into_inner(self) -> C::Buffer<T> {
423 let me = ManuallyDrop::new(self);
424 unsafe { ptr::read(&me.buffer) }
425 }
426}
427
428impl<T, C: VecConfigAlloc<T>> Vec<T, C> {
429 pub fn allocator(&self) -> &C::Alloc {
431 C::allocator(&self.buffer)
432 }
433
434 pub fn into_boxed_slice(mut self) -> Box<[T], C::Alloc> {
439 self.shrink_to_fit();
440 let (data, length, capacity, alloc) = self.into_parts();
441 assert_eq!(capacity, length, "length-capacity mismatch");
442 let data = ptr::slice_from_raw_parts_mut(data.as_ptr(), length.to_usize());
443 unsafe { Box::from_raw_in(data, alloc) }
444 }
445
446 pub fn try_into_boxed_slice(mut self) -> Result<Box<[T], C::Alloc>, UpdateError<Self>> {
449 match self.try_shrink_to_fit() {
450 Ok(()) => (),
451 Err(e) => return Err(UpdateError::new(e, self)),
452 }
453 let (data, length, capacity, alloc) = self.into_parts();
454 assert_eq!(capacity, length, "length-capacity mismatch");
455 let data = ptr::slice_from_raw_parts_mut(data.as_ptr(), length.to_usize());
456 Ok(unsafe { Box::from_raw_in(data, alloc) })
457 }
458
459 #[inline]
464 pub unsafe fn from_raw_parts_in(
465 data: *mut T,
466 length: C::Index,
467 capacity: C::Index,
468 alloc: C::Alloc,
469 ) -> Self {
470 Self {
471 buffer: C::buffer_from_parts(
472 NonNull::new(data).expect("Expected non-null pointer"),
473 length,
474 capacity,
475 alloc,
476 ),
477 }
478 }
479
480 #[inline]
493 pub fn into_raw_parts_with_alloc(self) -> (*mut T, C::Index, C::Index, C::Alloc) {
494 let (ptr, len, cap, alloc) = C::buffer_into_parts(self.into_inner());
495 (ptr.as_ptr(), len, cap, alloc)
496 }
497
498 #[inline]
499 pub(crate) fn into_parts(self) -> (NonNull<T>, C::Index, C::Index, C::Alloc) {
500 C::buffer_into_parts(self.into_inner())
501 }
502
503 #[inline]
504 pub(crate) unsafe fn from_parts(
505 data: NonNull<T>,
506 length: C::Index,
507 capacity: C::Index,
508 alloc: C::Alloc,
509 ) -> Self {
510 Self {
511 buffer: C::buffer_from_parts(data, length, capacity, alloc),
512 }
513 }
514}
515
516impl<T, C> Vec<T, C>
517where
518 C: VecConfigAlloc<T>,
519 C::Alloc: AllocatorDefault,
520{
521 #[inline]
525 pub unsafe fn from_raw_parts(data: *mut T, length: C::Index, capacity: C::Index) -> Self {
526 Self {
527 buffer: C::buffer_from_parts(
528 NonNull::new(data).expect("Expected non-null pointer"),
529 length,
530 capacity,
531 C::Alloc::DEFAULT,
532 ),
533 }
534 }
535
536 #[inline]
549 pub fn into_raw_parts(self) -> (*mut T, C::Index, C::Index) {
550 let (ptr, len, cap, _alloc) = C::buffer_into_parts(self.into_inner());
551 (ptr.as_ptr(), len, cap)
552 }
553}
554
555impl<T, C: VecConfig> Vec<T, C> {
556 #[inline]
559 pub fn as_ptr(&mut self) -> *const T {
560 self.buffer.data_ptr()
561 }
562
563 #[inline]
566 pub fn as_mut_ptr(&mut self) -> *mut T {
567 self.buffer.data_ptr_mut()
568 }
569
570 #[inline]
572 pub fn as_slice(&self) -> &[T] {
573 self.buffer.as_slice()
574 }
575
576 #[inline]
578 pub fn as_mut_slice(&mut self) -> &mut [T] {
579 self.buffer.as_mut_slice()
580 }
581
582 #[inline]
585 pub fn capacity(&self) -> C::Index {
586 self.buffer.capacity()
587 }
588
589 #[inline]
592 pub fn clear(&mut self) {
593 self.truncate(C::Index::ZERO);
594 }
595
596 #[inline]
598 pub fn is_empty(&self) -> bool {
599 self.len() == C::Index::ZERO
600 }
601
602 #[inline]
613 pub fn leak<'a>(self) -> &'a mut [T]
614 where
615 C: 'a,
616 {
617 let mut me = ManuallyDrop::new(self);
618 unsafe { slice::from_raw_parts_mut(me.as_mut_ptr(), me.len().to_usize()) }
619 }
620
621 #[inline]
623 pub fn len(&self) -> C::Index {
624 self.buffer.length()
625 }
626
627 #[inline]
634 pub unsafe fn set_len(&mut self, length: C::Index) {
635 self.buffer.set_length(length)
636 }
637
638 #[inline]
643 pub fn reserve(&mut self, reserve: C::Index) {
644 match self.try_reserve(reserve) {
645 Ok(_) => (),
646 Err(error) => error.panic(),
647 }
648 }
649
650 #[inline]
653 pub fn try_reserve(&mut self, reserve: C::Index) -> Result<(), StorageError> {
654 self._try_reserve(reserve.into(), false)
655 }
656
657 fn _try_reserve(&mut self, reserve: usize, exact: bool) -> Result<(), StorageError> {
658 let buf_cap: usize = self.buffer.capacity().to_usize();
659 let Some(buf_needed) = self.buffer.length().to_usize().checked_add(reserve) else {
660 return Err(StorageError::CapacityLimit);
661 };
662 if buf_cap >= buf_needed {
663 return Ok(());
664 }
665 let Some(mut capacity) = C::Index::try_from_usize(buf_needed) else {
666 return Err(StorageError::CapacityLimit);
667 };
668 if !exact {
669 capacity = C::Grow::next_capacity::<T, _>(self.buffer.capacity(), capacity);
670 }
671 self.buffer.grow_buffer(capacity, exact)?;
672 Ok(())
673 }
674
675 #[inline]
679 pub fn reserve_exact(&mut self, reserve: C::Index) {
680 match self.try_reserve_exact(reserve) {
681 Ok(_) => (),
682 Err(error) => error.panic(),
683 }
684 }
685
686 #[inline]
690 pub fn try_reserve_exact(&mut self, reserve: C::Index) -> Result<(), StorageError> {
691 self._try_reserve(reserve.into(), true)
692 }
693
694 pub fn append(&mut self, other: &mut Self) {
697 if self.is_empty() {
698 mem::swap(&mut self.buffer, &mut other.buffer);
699 } else if !other.is_empty() {
700 let cur_len = self.buffer.length().to_usize();
701 let cp_len = other.len();
702 self.reserve(cp_len);
703 unsafe {
704 ptr::copy_nonoverlapping(
705 other.buffer.data_ptr(),
706 self.buffer.data_ptr_mut().add(cur_len),
707 cp_len.to_usize(),
708 );
709 }
710 unsafe { other.buffer.set_length(C::Index::ZERO) };
712 unsafe {
713 self.buffer
714 .set_length(C::Index::from_usize(cur_len + cp_len.to_usize()))
715 };
716 }
717 }
718
719 #[inline]
724 pub fn dedup(&mut self)
725 where
726 T: Eq,
727 {
728 self.dedup_by(|a, b| a == b)
729 }
730
731 pub fn dedup_by<F>(&mut self, mut cmp: F)
741 where
742 F: FnMut(&mut T, &mut T) -> bool,
743 {
744 let orig_len = self.buffer.length().to_usize();
745 if orig_len < 2 {
746 return;
747 }
748 let mut new_len = 1;
749 let mut head = self.as_mut_ptr();
750 let tail_slice = DropSlice {
751 ptr: unsafe { head.add(1) },
752 len: orig_len - 1,
753 };
754 for tail in tail_slice {
755 if !cmp(unsafe { &mut *tail }, unsafe { &mut *head }) {
756 head = unsafe { head.add(1) };
757 if head != tail {
758 unsafe { ptr::copy_nonoverlapping(tail, head, 1) };
759 }
760 new_len += 1;
761 } else {
762 unsafe {
763 ptr::drop_in_place(tail);
764 }
765 }
766 }
767 unsafe { self.buffer.set_length(C::Index::from_usize(new_len)) }
769 }
770
771 #[inline]
776 pub fn dedup_by_key<F, K>(&mut self, mut key_f: F)
777 where
778 F: FnMut(&mut T) -> K,
779 K: PartialEq,
780 {
781 self.dedup_by(|a, b| key_f(a) == key_f(b))
782 }
783
784 #[inline]
787 pub fn drain<R>(&mut self, range: R) -> Drain<'_, C::Buffer<T>>
788 where
789 R: RangeBounds<C::Index>,
790 {
791 let range = bounds_to_range(range, self.buffer.length());
792 Drain::new(&mut self.buffer, range)
793 }
794
795 pub fn extend_from_slice(&mut self, items: &[T])
799 where
800 T: Clone,
801 {
802 match self._try_reserve(items.len(), false) {
803 Ok(_) => (),
804 Err(error) => error.panic(),
805 }
806 unsafe {
807 self.extend_unchecked(items);
808 }
809 }
810
811 pub fn try_extend_from_slice(&mut self, items: &[T]) -> Result<(), StorageError>
816 where
817 T: Clone,
818 {
819 self._try_reserve(items.len(), false)?;
820 unsafe {
821 self.extend_unchecked(items);
822 }
823 Ok(())
824 }
825
826 pub fn extend_from_within<R>(&mut self, range: R)
830 where
831 R: RangeBounds<C::Index>,
832 T: Clone,
833 {
834 let prev_len = self.len();
835 let range = bounds_to_range(range, self.buffer.length());
836 match self._try_reserve(range.len(), false) {
837 Ok(_) => (),
838 Err(error) => error.panic(),
839 }
840 let (head, spare) = self.split_at_spare_mut();
841 let mut insert = Inserter::new(spare);
842 insert.push_slice(&head[range]);
843 let added = insert.complete();
844 if added > 0 {
845 unsafe { self.buffer.set_length(prev_len.saturating_add(added)) };
846 }
847 }
848
849 pub fn try_extend_from_within<R>(&mut self, range: R) -> Result<(), StorageError>
854 where
855 R: RangeBounds<C::Index>,
856 T: Clone,
857 {
858 let prev_len = self.len();
859 let range = bounds_to_range(range, self.buffer.length());
860 self._try_reserve(range.len(), false)?;
861 let (head, spare) = self.split_at_spare_mut();
862 let mut insert = Inserter::new(spare);
863 insert.push_slice(&head[range]);
864 let added = insert.complete();
865 if added > 0 {
866 unsafe { self.buffer.set_length(prev_len.saturating_add(added)) };
867 }
868 Ok(())
869 }
870
871 unsafe fn extend_unchecked(&mut self, items: &[T])
872 where
873 T: Clone,
874 {
875 let prev_len = self.len();
876 let mut insert = Inserter::new(self.spare_capacity_mut());
877 for item in items.iter() {
878 insert.push_unchecked(item.clone());
879 }
880 let added = insert.complete();
881 if added > 0 {
882 unsafe { self.buffer.set_length(prev_len.saturating_add(added)) };
883 }
884 }
885
886 fn try_extend(&mut self, iter: &mut impl Iterator<Item = T>) -> Result<(), UpdateError<T>> {
887 loop {
888 let prev_len = self.buffer.length();
889 let mut insert = Inserter::new(self.spare_capacity_mut());
890 insert.push_iter(iter);
891 let ins_count = insert.complete();
892 let new_len = prev_len.saturating_add(ins_count);
893 if ins_count > 0 {
894 unsafe { self.buffer.set_length(new_len) };
895 }
896 if new_len < self.buffer.capacity() {
897 break;
899 }
900 if let Some(item) = iter.next() {
901 let min_reserve = iter.size_hint().0.saturating_add(1);
902 match self._try_reserve(min_reserve, false) {
903 Ok(_) => {
904 unsafe { self.buffer.uninit_index(new_len.to_usize()) }.write(item);
905 unsafe { self.buffer.set_length(new_len.saturating_add(1)) };
906 }
907 Err(err) => return Err(UpdateError::new(err, item)),
908 }
909 } else {
910 break;
911 }
912 }
913 Ok(())
914 }
915
916 pub fn from_iter_in<A, I>(iter: A, alloc_in: I) -> Self
918 where
919 A: IntoIterator<Item = T>,
920 I: VecNewIn<T, Config = C>,
921 {
922 let iter = iter.into_iter();
923 let (min_cap, _) = iter.size_hint();
924 let Some(min_cap) = C::Index::try_from_usize(min_cap) else {
925 index_panic();
926 };
927 let mut vec = Self::with_capacity_in(min_cap, alloc_in);
928 vec.extend(iter);
929 vec
930 }
931
932 pub fn insert(&mut self, index: C::Index, value: T) {
937 match self.try_insert(index, value) {
938 Ok(_) => (),
939 Err(error) => error.panic(),
940 }
941 }
942
943 pub fn try_insert(&mut self, index: C::Index, value: T) -> Result<(), UpdateError<T>> {
948 let prev_len = self.buffer.length();
949 if index > prev_len {
950 index_panic();
951 }
952 let index = index.to_usize();
953 let tail_count = prev_len.to_usize() - index;
954 match self._try_reserve(1, false) {
955 Ok(_) => (),
956 Err(error) => return Err(UpdateError::new(error, value)),
957 };
958 unsafe {
959 let head = self.buffer.data_ptr_mut().add(index);
960 if tail_count > 0 {
961 ptr::copy(head, head.add(1), tail_count);
962 }
963 head.write(value);
964 }
965 unsafe { self.buffer.set_length(prev_len.saturating_add(1)) };
967 Ok(())
968 }
969
970 pub fn insert_slice(&mut self, index: C::Index, other: &[T])
975 where
976 T: Clone,
977 {
978 match self.try_insert_slice(index, other) {
979 Ok(_) => (),
980 Err(error) => error.panic(),
981 }
982 }
983
984 pub fn try_insert_slice(&mut self, index: C::Index, other: &[T]) -> Result<(), StorageError>
989 where
990 T: Clone,
991 {
992 let prev_len = self.buffer.length().to_usize();
993 let index = index.to_usize();
994 if index > prev_len {
995 index_panic();
996 }
997 let ins_count = other.len();
998 if ins_count == 0 {
999 return Ok(());
1000 }
1001 self._try_reserve(ins_count, false)?;
1002 let tail_count = prev_len - index;
1003 let head = unsafe { self.buffer.data_ptr_mut().add(index) };
1004 if tail_count > 0 {
1005 unsafe {
1008 self.buffer.set_length(C::Index::from_usize(index));
1009 }
1010 unsafe { ptr::copy(head, head.add(index + ins_count), tail_count) };
1014 }
1015 let mut insert = Inserter::new_with_tail(
1016 &mut self.buffer.as_uninit_slice()[index..index + ins_count + tail_count],
1017 tail_count,
1018 );
1019 insert.push_slice(other);
1020 insert.complete();
1021 unsafe {
1023 self.buffer
1024 .set_length(C::Index::from_usize(prev_len + ins_count));
1025 }
1026 Ok(())
1027 }
1028
1029 pub fn pop(&mut self) -> Option<T> {
1031 let mut tail = self.buffer.length().to_usize();
1032 if tail > 0 {
1033 tail -= 1;
1034 unsafe { self.buffer.set_length(C::Index::from_usize(tail)) };
1035 Some(unsafe { self.buffer.uninit_index(tail).assume_init_read() })
1036 } else {
1037 None
1038 }
1039 }
1040
1041 pub fn push(&mut self, item: T) {
1045 match self._try_reserve(1, false) {
1046 Ok(_) => (),
1047 Err(error) => error.panic(),
1048 }
1049 unsafe {
1050 self.push_unchecked(item);
1051 }
1052 }
1053
1054 pub fn push_within_capacity(&mut self, item: T) -> Result<(), T> {
1061 if self.len() < self.capacity() {
1062 unsafe {
1063 self.push_unchecked(item);
1064 }
1065 Ok(())
1066 } else {
1067 Err(item)
1068 }
1069 }
1070
1071 pub fn try_push(&mut self, item: T) -> Result<(), UpdateError<T>> {
1076 if let Err(error) = self._try_reserve(1, false) {
1077 return Err(UpdateError::new(error, item));
1078 }
1079 unsafe {
1080 self.push_unchecked(item);
1081 }
1082 Ok(())
1083 }
1084
1085 #[inline]
1092 pub unsafe fn push_unchecked(&mut self, item: T) {
1093 let length = self.buffer.length().to_usize();
1094 self.buffer.uninit_index(length).write(item);
1095 self.buffer.set_length(C::Index::from_usize(length + 1));
1096 }
1097
1098 pub fn remove(&mut self, index: C::Index) -> T {
1108 let len = self.buffer.length().to_usize();
1109 let index = index.to_usize();
1110 if index >= len {
1111 index_panic();
1112 }
1113 let copy_count = len - index - 1;
1114 unsafe {
1115 let result = self.buffer.uninit_index(index).assume_init_read();
1116 if copy_count > 0 {
1117 let head = self.as_mut_ptr().add(index);
1118 ptr::copy(head.add(1), head, copy_count);
1119 }
1120 self.buffer.set_length(C::Index::from_usize(len - 1));
1121 result
1122 }
1123 }
1124
1125 #[inline]
1136 pub fn resize(&mut self, new_len: C::Index, value: T)
1137 where
1138 T: Clone,
1139 {
1140 match self.try_resize(new_len, value) {
1141 Ok(_) => (),
1142 Err(err) => err.panic(),
1143 }
1144 }
1145
1146 pub fn try_resize(&mut self, new_len: C::Index, value: T) -> Result<(), StorageError>
1160 where
1161 T: Clone,
1162 {
1163 let len = self.buffer.length();
1164 match new_len.cmp(&len) {
1165 Ordering::Greater => {
1166 let ins_count = new_len.to_usize() - len.to_usize();
1167 self._try_reserve(ins_count, false)?;
1168 let mut insert = Inserter::new(self.spare_capacity_mut());
1169 insert.push_repeat(&value, ins_count);
1170 insert.complete();
1171 unsafe { self.buffer.set_length(new_len) }
1173 }
1174 Ordering::Less => {
1175 self.truncate(new_len);
1176 }
1177 Ordering::Equal => {}
1178 }
1179 Ok(())
1180 }
1181
1182 #[inline]
1194 pub fn resize_with<F>(&mut self, new_len: C::Index, f: F)
1195 where
1196 F: FnMut() -> T,
1197 {
1198 match self.try_resize_with(new_len, f) {
1199 Ok(_) => (),
1200 Err(err) => err.panic(),
1201 }
1202 }
1203
1204 pub fn try_resize_with<F>(&mut self, new_len: C::Index, mut f: F) -> Result<(), StorageError>
1219 where
1220 F: FnMut() -> T,
1221 {
1222 let len = self.buffer.length();
1223 match new_len.cmp(&len) {
1224 Ordering::Greater => {
1225 let ins_count = new_len.to_usize() - len.to_usize();
1226 self._try_reserve(ins_count, false)?;
1227 let mut insert = Inserter::new(self.spare_capacity_mut());
1228 for _ in 0..ins_count {
1229 insert.push(f());
1230 }
1231 insert.complete();
1232 unsafe { self.buffer.set_length(new_len) }
1234 }
1235 Ordering::Less => {
1236 self.truncate(new_len);
1237 }
1238 Ordering::Equal => {}
1239 }
1240 Ok(())
1241 }
1242
1243 #[inline]
1249 pub fn retain<F>(&mut self, mut f: F)
1250 where
1251 F: FnMut(&T) -> bool,
1252 {
1253 self.retain_mut(|r| f(r))
1254 }
1255
1256 pub fn retain_mut<F>(&mut self, mut f: F)
1263 where
1264 F: FnMut(&mut T) -> bool,
1265 {
1266 let orig_len = self.buffer.length().to_usize();
1267 if orig_len == 0 {
1268 return;
1269 }
1270 unsafe { self.buffer.set_length(C::Index::ZERO) };
1272 let mut len = 0;
1273 let read_slice = DropSlice {
1274 ptr: self.as_mut_ptr(),
1275 len: orig_len,
1276 };
1277 let mut tail = self.as_mut_ptr();
1278 for read in read_slice {
1279 unsafe {
1280 if f(&mut *read) {
1281 if tail != read {
1282 ptr::copy_nonoverlapping(read, tail, 1);
1283 }
1284 tail = tail.add(1);
1285 len += 1;
1286 } else {
1287 ptr::drop_in_place(read);
1288 }
1289 }
1290 }
1291 unsafe { self.buffer.set_length(C::Index::from_usize(len)) };
1292 }
1293
1294 #[inline]
1301 pub fn shrink_to(&mut self, min_capacity: C::Index) {
1302 match self.try_shrink_to(min_capacity) {
1303 Ok(_) => (),
1304 Err(err) => err.panic(),
1305 }
1306 }
1307
1308 pub fn try_shrink_to(&mut self, min_capacity: C::Index) -> Result<(), StorageError> {
1317 let len = self.buffer.length().max(min_capacity);
1318 if self.buffer.capacity() != len {
1319 self.buffer.shrink_buffer(len)?;
1320 }
1321 Ok(())
1322 }
1323
1324 #[inline]
1330 pub fn shrink_to_fit(&mut self) {
1331 match self.try_shrink_to_fit() {
1332 Ok(_) => (),
1333 Err(err) => err.panic(),
1334 }
1335 }
1336
1337 #[inline]
1345 pub fn try_shrink_to_fit(&mut self) -> Result<(), StorageError> {
1346 self.try_shrink_to(self.buffer.length())
1347 }
1348
1349 #[inline]
1356 pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
1357 let length = self.len().into();
1358 &mut self.buffer.as_uninit_slice()[length..]
1359 }
1360
1361 pub fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [MaybeUninit<T>]) {
1372 let length = self.len().into();
1373 let (data, spare) = self.buffer.as_uninit_slice().split_at_mut(length);
1374 (
1375 unsafe { slice::from_raw_parts_mut(data.as_mut_ptr().cast(), length) },
1376 spare,
1377 )
1378 }
1379
1380 pub fn split_off(&mut self, index: C::Index) -> Self
1395 where
1396 C: VecConfigSpawn<T>,
1397 {
1398 let len = self.buffer.length().to_usize();
1399 let index_usize = index.to_usize();
1400 if index_usize >= len {
1401 index_panic();
1402 }
1403 let move_len = C::Index::from_usize(len - index_usize);
1404 match C::buffer_try_spawn(&self.buffer, move_len, false) {
1405 Ok(mut buffer) => {
1406 if index_usize == 0 {
1407 mem::swap(&mut buffer, &mut self.buffer);
1408 } else {
1409 unsafe {
1410 ptr::copy_nonoverlapping(
1411 self.buffer.data_ptr().add(index_usize),
1412 buffer.data_ptr_mut(),
1413 move_len.to_usize(),
1414 );
1415 }
1416 unsafe { buffer.set_length(move_len) };
1418 unsafe { self.buffer.set_length(index) };
1419 }
1420 Self { buffer }
1421 }
1422 Err(err) => err.panic(),
1423 }
1424 }
1425
1426 pub fn splice<R, I>(
1440 &mut self,
1441 range: R,
1442 replace_with: I,
1443 ) -> Splice<'_, <I as IntoIterator>::IntoIter, C::Buffer<T>, C::Grow>
1444 where
1445 R: RangeBounds<C::Index>,
1446 I: IntoIterator<Item = T>,
1447 {
1448 let range = bounds_to_range(range, self.buffer.length());
1449 Splice::new(&mut self.buffer, replace_with.into_iter(), range)
1450 }
1451
1452 pub fn swap_remove(&mut self, index: C::Index) -> T {
1461 let index: usize = index.to_usize();
1462 let length: usize = self.buffer.length().to_usize();
1463 if index >= length {
1464 index_panic();
1465 }
1466 let last: usize = length - 1;
1467 unsafe { self.buffer.set_length(C::Index::from_usize(last)) };
1469 let data = self.buffer.as_uninit_slice();
1470 let result = unsafe { data[index].assume_init_read() };
1471 if index != last {
1472 unsafe { data[index].write(data[last].assume_init_read()) };
1473 }
1474 result
1475 }
1476
1477 pub fn truncate(&mut self, length: C::Index) {
1480 let old_len: usize = self.len().to_usize();
1481 let new_len = length.to_usize().min(old_len);
1482 let remove = old_len - new_len;
1483 if remove > 0 {
1484 unsafe { self.buffer.set_length(C::Index::from_usize(new_len)) };
1486 let drop_start = unsafe { self.buffer.data_ptr_mut().add(new_len) };
1487 let to_drop = ptr::slice_from_raw_parts_mut(drop_start, remove);
1488 unsafe {
1489 ptr::drop_in_place(to_drop);
1490 }
1491 }
1492 }
1493}
1494
1495impl<T, C: VecConfig> AsRef<[T]> for Vec<T, C> {
1496 #[inline]
1497 fn as_ref(&self) -> &[T] {
1498 self.as_slice()
1499 }
1500}
1501
1502impl<T, C: VecConfig> AsMut<[T]> for Vec<T, C> {
1503 #[inline]
1504 fn as_mut(&mut self) -> &mut [T] {
1505 self.as_mut_slice()
1506 }
1507}
1508
1509impl<T, C: VecConfig> Borrow<[T]> for Vec<T, C> {
1510 #[inline]
1511 fn borrow(&self) -> &[T] {
1512 self.as_slice()
1513 }
1514}
1515
1516impl<T, C: VecConfig> BorrowMut<[T]> for Vec<T, C> {
1517 #[inline]
1518 fn borrow_mut(&mut self) -> &mut [T] {
1519 self.as_mut_slice()
1520 }
1521}
1522
1523impl<T: Clone, C: VecConfigSpawn<T>> Clone for Vec<T, C> {
1524 fn clone(&self) -> Self {
1525 let mut inst = Self {
1526 buffer: match C::buffer_try_spawn(&self.buffer, self.buffer.length(), false) {
1527 Ok(buf) => buf,
1528 Err(err) => err.panic(),
1529 },
1530 };
1531 inst.extend_from_slice(self);
1532 inst
1533 }
1534
1535 fn clone_from(&mut self, source: &Self) {
1536 self.truncate(C::Index::ZERO);
1537 self.extend_from_slice(source);
1538 }
1539}
1540
1541impl<T, C: VecConfigNew<T>> ConstDefault for Vec<T, C> {
1542 const DEFAULT: Self = Self::new();
1543}
1544
1545impl<T: fmt::Debug, C: VecConfig> fmt::Debug for Vec<T, C> {
1546 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1547 self.as_slice().fmt(f)
1548 }
1549}
1550
1551impl<T, C: VecConfigNew<T>> Default for Vec<T, C> {
1552 #[inline]
1553 fn default() -> Self {
1554 Self::new()
1555 }
1556}
1557
1558impl<T, C: VecConfig> Deref for Vec<T, C> {
1559 type Target = [T];
1560
1561 #[inline]
1562 fn deref(&self) -> &Self::Target {
1563 unsafe { slice::from_raw_parts(self.buffer.data_ptr(), self.len().into()) }
1564 }
1565}
1566
1567impl<T, C: VecConfig> DerefMut for Vec<T, C> {
1568 #[inline]
1569 fn deref_mut(&mut self) -> &mut Self::Target {
1570 unsafe { slice::from_raw_parts_mut(self.buffer.data_ptr_mut(), self.len().into()) }
1571 }
1572}
1573
1574impl<T, C: VecConfig> Drop for Vec<T, C> {
1575 fn drop(&mut self) {
1576 let to_drop: &mut [T] = self.as_mut_slice();
1577 if !to_drop.is_empty() {
1578 unsafe {
1579 ptr::drop_in_place(to_drop);
1580 }
1581 unsafe { self.buffer.set_length(C::Index::ZERO) };
1583 }
1584 }
1585}
1586
1587impl<T, C: VecConfig> Extend<T> for Vec<T, C> {
1588 #[inline]
1589 fn extend<A: IntoIterator<Item = T>>(&mut self, iter: A) {
1590 match self.try_extend(&mut iter.into_iter()) {
1591 Ok(_) => (),
1592 Err(error) => error.panic(),
1593 }
1594 }
1595}
1596
1597impl<'a, T: Clone + 'a, C: VecConfig> Extend<&'a T> for Vec<T, C> {
1598 #[inline]
1599 fn extend<A: IntoIterator<Item = &'a T>>(&mut self, iter: A) {
1600 match self.try_extend(&mut iter.into_iter().cloned()) {
1601 Ok(_) => (),
1602 Err(error) => error.panic(),
1603 }
1604 }
1605}
1606
1607impl<T, C: VecConfigNew<T>> FromIterator<T> for Vec<T, C> {
1608 #[inline]
1609 fn from_iter<A: IntoIterator<Item = T>>(iter: A) -> Self {
1610 let iter = iter.into_iter();
1611 let (min_cap, _) = iter.size_hint();
1612 let Some(min_cap) = C::Index::try_from_usize(min_cap) else {
1613 index_panic();
1614 };
1615 let mut vec = Self::with_capacity(min_cap);
1616 vec.extend(iter);
1617 vec
1618 }
1619}
1620
1621unsafe impl<T: Send, C: VecConfig + Send> Send for Vec<T, C> {}
1624
1625unsafe impl<T: Sync, C: VecConfig + Sync> Sync for Vec<T, C> {}
1627
1628#[cfg(all(feature = "alloc", not(feature = "nightly")))]
1629impl<T, C> ConvertAlloc<alloc_crate::boxed::Box<[T]>> for Vec<T, C>
1630where
1631 C: VecConfigAlloc<T, Alloc = Global>,
1632{
1633 #[inline]
1634 fn convert(self) -> alloc_crate::boxed::Box<[T]> {
1635 self.into_boxed_slice().convert()
1636 }
1637}
1638
1639#[cfg(all(feature = "alloc", not(feature = "nightly")))]
1640impl<T, C> ConvertAlloc<Vec<T, C>> for alloc_crate::boxed::Box<[T]>
1641where
1642 C: VecConfigAlloc<T, Alloc = Global>,
1643{
1644 #[inline]
1645 fn convert(self) -> Vec<T, C> {
1646 let boxed: Box<[T], C::Alloc> = self.convert();
1647 boxed.into()
1648 }
1649}
1650
1651#[cfg(all(feature = "alloc", feature = "nightly"))]
1652impl<T, C> ConvertAlloc<Vec<T, C>> for alloc_crate::boxed::Box<[T], C::Alloc>
1653where
1654 C: VecConfigAlloc<T>,
1655{
1656 #[inline]
1657 fn convert(self) -> Vec<T, C> {
1658 let boxed: Box<[T], C::Alloc> = self.convert();
1659 boxed.into()
1660 }
1661}
1662
1663#[cfg(all(feature = "alloc", not(feature = "nightly")))]
1664impl<T, C> ConvertAlloc<alloc_crate::vec::Vec<T>> for Vec<T, C>
1665where
1666 C: VecConfigAlloc<T, Alloc = Global, Index = usize>,
1667{
1668 fn convert(self) -> alloc_crate::vec::Vec<T> {
1669 let (raw, len, cap) = self.into_raw_parts();
1670 unsafe { alloc_crate::vec::Vec::from_raw_parts(raw, len, cap) }
1671 }
1672}
1673
1674#[cfg(all(feature = "alloc", feature = "nightly"))]
1675impl<T, C> ConvertAlloc<alloc_crate::vec::Vec<T, C::Alloc>> for Vec<T, C>
1676where
1677 C: VecConfigAlloc<T, Alloc = Global, Index = usize>,
1678{
1679 fn convert(self) -> alloc_crate::vec::Vec<T, C::Alloc> {
1680 let (raw, len, cap, alloc) = self.into_raw_parts_with_alloc();
1681 unsafe {
1682 alloc_crate::vec::Vec::from_raw_parts_in(raw, len.to_usize(), cap.to_usize(), alloc)
1683 }
1684 }
1685}
1686
1687#[cfg(all(feature = "alloc", not(feature = "nightly")))]
1688impl<T, C> ConvertAlloc<Vec<T, C>> for alloc_crate::vec::Vec<T>
1689where
1690 C: VecConfigAlloc<T, Alloc = Global, Index = usize>,
1691{
1692 fn convert(self) -> Vec<T, C> {
1693 let mut vec = ManuallyDrop::new(self);
1694 unsafe { Vec::from_raw_parts(vec.as_mut_ptr(), vec.len(), vec.capacity()) }
1695 }
1696}
1697
1698#[cfg(all(feature = "alloc", feature = "nightly"))]
1699impl<T, C> ConvertAlloc<Vec<T, C>> for alloc_crate::vec::Vec<T, C::Alloc>
1700where
1701 C: VecConfigAlloc<T, Index = usize>,
1702{
1703 fn convert(self) -> Vec<T, C> {
1704 let mut vec = ManuallyDrop::new(self);
1705 unsafe {
1706 Vec::from_raw_parts_in(
1707 vec.as_mut_ptr(),
1708 vec.len().into(),
1709 vec.capacity().into(),
1710 ptr::read(vec.allocator()),
1711 )
1712 }
1713 }
1714}
1715
1716impl<T, C> From<Box<[T], C::Alloc>> for Vec<T, C>
1717where
1718 C: VecConfigAlloc<T>,
1719{
1720 #[inline]
1721 fn from(boxed: Box<[T], C::Alloc>) -> Self {
1722 let (ptr, alloc) = boxed.into_handle().into_parts();
1723 let len = C::Index::from_usize(ptr.len());
1724 unsafe { Vec::from_parts(ptr.cast(), len, len, alloc) }
1725 }
1726}
1727
1728impl<T, C, const N: usize> From<Box<[T; N], C::Alloc>> for Vec<T, C>
1729where
1730 C: VecConfigAlloc<T>,
1731{
1732 #[inline]
1733 fn from(boxed: Box<[T; N], C::Alloc>) -> Self {
1734 Box::into_boxed_slice(boxed).into()
1735 }
1736}
1737
1738impl<T: Clone, C: VecConfigNew<T>> From<&[T]> for Vec<T, C> {
1739 #[inline]
1740 fn from(data: &[T]) -> Self {
1741 Self::from_slice(data)
1742 }
1743}
1744
1745impl<T: Clone, C: VecConfigNew<T>> From<&mut [T]> for Vec<T, C> {
1746 #[inline]
1747 fn from(data: &mut [T]) -> Self {
1748 Self::from_slice(data)
1749 }
1750}
1751
1752impl<T: Clone, C: VecConfigNew<T>, const N: usize> From<&[T; N]> for Vec<T, C> {
1753 #[inline]
1754 fn from(data: &[T; N]) -> Self {
1755 Self::from_slice(data)
1756 }
1757}
1758
1759impl<T: Clone, C: VecConfigNew<T>, const N: usize> From<&mut [T; N]> for Vec<T, C> {
1760 #[inline]
1761 fn from(data: &mut [T; N]) -> Self {
1762 Self::from_slice(data)
1763 }
1764}
1765
1766impl<T, C: VecConfigNew<T>, const N: usize> From<[T; N]> for Vec<T, C> {
1767 #[inline]
1768 fn from(data: [T; N]) -> Self {
1769 Self::from_iter(data)
1770 }
1771}
1772
1773impl<C: VecConfigNew<u8>> From<&str> for Vec<u8, C> {
1774 #[inline]
1775 fn from(data: &str) -> Self {
1776 Self::from_slice(data.as_bytes())
1777 }
1778}
1779
1780#[cfg(all(feature = "alloc", not(feature = "nightly")))]
1781impl<T, C> From<alloc_crate::boxed::Box<[T]>> for Vec<T, C>
1782where
1783 C: VecConfigAlloc<T, Alloc = Global, Index = usize>,
1784{
1785 #[inline]
1786 fn from(boxed: alloc_crate::boxed::Box<[T]>) -> Self {
1787 boxed.convert()
1788 }
1789}
1790
1791#[cfg(all(feature = "alloc", feature = "nightly"))]
1792impl<T, C> From<alloc_crate::boxed::Box<[T], C::Alloc>> for Vec<T, C>
1793where
1794 C: VecConfigAlloc<T, Index = usize>,
1795{
1796 #[inline]
1797 fn from(boxed: alloc_crate::boxed::Box<[T], C::Alloc>) -> Self {
1798 boxed.convert()
1799 }
1800}
1801
1802#[cfg(feature = "alloc")]
1803impl<'b, T: Clone, C> From<alloc_crate::borrow::Cow<'b, [T]>> for Vec<T, C>
1804where
1805 C: VecConfigAlloc<T, Alloc = Global, Index = usize>,
1806{
1807 fn from(cow: alloc_crate::borrow::Cow<'b, [T]>) -> Self {
1808 cow.into_owned().convert()
1809 }
1810}
1811
1812#[cfg(feature = "alloc")]
1813impl<C> From<alloc_crate::string::String> for Vec<u8, C>
1814where
1815 C: VecConfigAlloc<u8, Alloc = Global, Index = usize>,
1816{
1817 #[inline]
1818 fn from(string: alloc_crate::string::String) -> Self {
1819 string.into_bytes().convert()
1820 }
1821}
1822
1823#[cfg(feature = "alloc")]
1824impl<C: VecConfigNew<u8>> From<&alloc_crate::string::String> for Vec<u8, C> {
1825 #[inline]
1826 fn from(string: &alloc_crate::string::String) -> Self {
1827 string.as_bytes().into()
1828 }
1829}
1830
1831#[cfg(feature = "alloc")]
1832impl<C> From<alloc_crate::ffi::CString> for Vec<u8, C>
1833where
1834 C: VecConfigAlloc<u8, Alloc = Global, Index = usize>,
1835{
1836 #[inline]
1837 fn from(string: alloc_crate::ffi::CString) -> Self {
1838 string.into_bytes().convert()
1839 }
1840}
1841
1842#[cfg(feature = "alloc")]
1843impl<C: VecConfigNew<u8>> From<&alloc_crate::ffi::CString> for Vec<u8, C> {
1844 #[inline]
1845 fn from(string: &alloc_crate::ffi::CString) -> Self {
1846 string.as_bytes().into()
1847 }
1848}
1849
1850#[cfg(all(feature = "alloc", not(feature = "nightly")))]
1851impl<T, C> From<alloc_crate::vec::Vec<T>> for Vec<T, C>
1852where
1853 C: VecConfigAlloc<T, Alloc = Global, Index = usize>,
1854{
1855 fn from(vec: alloc_crate::vec::Vec<T>) -> Self {
1856 let capacity = vec.capacity();
1857 let length = vec.len();
1858 let data = unsafe { ptr::NonNull::new_unchecked(ManuallyDrop::new(vec).as_mut_ptr()) };
1860 unsafe { Self::from_parts(data, length, capacity, Global) }
1861 }
1862}
1863
1864#[cfg(all(feature = "alloc", feature = "nightly"))]
1865impl<T, C> From<alloc_crate::vec::Vec<T, C::Alloc>> for Vec<T, C>
1866where
1867 C: VecConfigAlloc<T, Index = usize>,
1868 C::Alloc: AllocatorDefault,
1869{
1870 fn from(vec: alloc_crate::vec::Vec<T, C::Alloc>) -> Self {
1871 let capacity = vec.capacity();
1872 let length = vec.len();
1873 let data = unsafe { ptr::NonNull::new_unchecked(ManuallyDrop::new(vec).as_mut_ptr()) };
1875 unsafe { Self::from_parts(data, length, capacity, C::Alloc::DEFAULT) }
1876 }
1877}
1878
1879impl<T, C: VecConfig> IntoIterator for Vec<T, C> {
1880 type Item = T;
1881 type IntoIter = IntoIter<C::Buffer<T>>;
1882
1883 #[inline]
1884 fn into_iter(self) -> Self::IntoIter {
1885 IntoIter::new(self.into_inner())
1886 }
1887}
1888
1889impl<'a, T, C: VecConfig> IntoIterator for &'a Vec<T, C> {
1890 type Item = &'a T;
1891 type IntoIter = <&'a [T] as IntoIterator>::IntoIter;
1892
1893 #[inline]
1894 fn into_iter(self) -> Self::IntoIter {
1895 self.as_slice().iter()
1896 }
1897}
1898
1899impl<'a, T, C: VecConfig> IntoIterator for &'a mut Vec<T, C> {
1900 type Item = &'a mut T;
1901 type IntoIter = <&'a mut [T] as IntoIterator>::IntoIter;
1902
1903 #[inline]
1904 fn into_iter(self) -> Self::IntoIter {
1905 self.as_mut_slice().iter_mut()
1906 }
1907}
1908
1909impl<T1, C1, T2, C2> PartialEq<Vec<T2, C2>> for Vec<T1, C1>
1910where
1911 C1: VecConfig,
1912 C2: VecConfig,
1913 T1: PartialEq<T2>,
1914{
1915 #[inline]
1916 fn eq(&self, other: &Vec<T2, C2>) -> bool {
1917 self.as_slice().eq(other.as_slice())
1918 }
1919}
1920
1921impl<T: Eq, C: VecConfig> Eq for Vec<T, C> {}
1922
1923impl<T1, C1, T2> PartialEq<&[T2]> for Vec<T1, C1>
1924where
1925 T1: PartialEq<T2>,
1926 C1: VecConfig,
1927{
1928 #[inline]
1929 fn eq(&self, other: &&[T2]) -> bool {
1930 self.as_slice().eq(*other)
1931 }
1932}
1933
1934impl<T1, C1, T2> PartialEq<&mut [T2]> for Vec<T1, C1>
1935where
1936 T1: PartialEq<T2>,
1937 C1: VecConfig,
1938{
1939 #[inline]
1940 fn eq(&self, other: &&mut [T2]) -> bool {
1941 self.as_slice().eq(*other)
1942 }
1943}
1944
1945impl<T1, C1, T2> PartialEq<[T2]> for Vec<T1, C1>
1946where
1947 T1: PartialEq<T2>,
1948 C1: VecConfig,
1949{
1950 #[inline]
1951 fn eq(&self, other: &[T2]) -> bool {
1952 self.as_slice().eq(other)
1953 }
1954}
1955
1956impl<T1, C1, T2, const N: usize> PartialEq<&[T2; N]> for Vec<T1, C1>
1957where
1958 T1: PartialEq<T2>,
1959 C1: VecConfig,
1960{
1961 #[inline]
1962 fn eq(&self, other: &&[T2; N]) -> bool {
1963 self.as_slice().eq(&other[..])
1964 }
1965}
1966
1967impl<T1, C1, T2, const N: usize> PartialEq<&mut [T2; N]> for Vec<T1, C1>
1968where
1969 T1: PartialEq<T2>,
1970 C1: VecConfig,
1971{
1972 #[inline]
1973 fn eq(&self, other: &&mut [T2; N]) -> bool {
1974 self.as_slice().eq(&other[..])
1975 }
1976}
1977
1978impl<T1, C1, T2, const N: usize> PartialEq<[T2; N]> for Vec<T1, C1>
1979where
1980 T1: PartialEq<T2>,
1981 C1: VecConfig,
1982{
1983 #[inline]
1984 fn eq(&self, other: &[T2; N]) -> bool {
1985 self.as_slice().eq(&other[..])
1986 }
1987}
1988
1989impl<T1, T2, C2> PartialEq<Vec<T2, C2>> for &[T1]
1990where
1991 T2: PartialEq<T1>,
1992 C2: VecConfig,
1993{
1994 #[inline]
1995 fn eq(&self, other: &Vec<T2, C2>) -> bool {
1996 other.eq(self)
1997 }
1998}
1999
2000impl<T1, T2, C2> PartialEq<Vec<T2, C2>> for &mut [T1]
2001where
2002 T2: PartialEq<T1>,
2003 C2: VecConfig,
2004{
2005 #[inline]
2006 fn eq(&self, other: &Vec<T2, C2>) -> bool {
2007 other.eq(self)
2008 }
2009}
2010
2011impl<T1, T2, C2> PartialEq<Vec<T2, C2>> for [T1]
2012where
2013 T2: PartialEq<T1>,
2014 C2: VecConfig,
2015{
2016 #[inline]
2017 fn eq(&self, other: &Vec<T2, C2>) -> bool {
2018 other.eq(self)
2019 }
2020}
2021
2022impl<T1, T2, C2, const N: usize> PartialEq<Vec<T2, C2>> for [T1; N]
2023where
2024 T2: PartialEq<T1>,
2025 C2: VecConfig,
2026{
2027 #[inline]
2028 fn eq(&self, other: &Vec<T2, C2>) -> bool {
2029 other.eq(self)
2030 }
2031}
2032
2033#[cfg(all(feature = "alloc", not(feature = "nightly")))]
2034impl<A, B, C> PartialEq<alloc_crate::vec::Vec<A>> for Vec<B, C>
2035where
2036 B: PartialEq<A>,
2037 C: VecConfig,
2038{
2039 #[inline]
2040 fn eq(&self, other: &alloc_crate::vec::Vec<A>) -> bool {
2041 other.eq(self)
2042 }
2043}
2044
2045#[cfg(all(feature = "alloc", not(feature = "nightly")))]
2046impl<A, B, C> PartialEq<Vec<B, C>> for alloc_crate::vec::Vec<A>
2047where
2048 B: PartialEq<A>,
2049 C: VecConfig,
2050{
2051 #[inline]
2052 fn eq(&self, other: &Vec<B, C>) -> bool {
2053 other.eq(self)
2054 }
2055}
2056
2057impl<T, C: VecConfig, const N: usize> TryFrom<Vec<T, C>> for [T; N] {
2058 type Error = Vec<T, C>;
2059
2060 fn try_from(mut vec: Vec<T, C>) -> Result<Self, Self::Error> {
2061 if vec.len().to_usize() != N {
2062 return Err(vec);
2063 }
2064
2065 unsafe { vec.set_len(C::Index::ZERO) };
2066
2067 let data = vec.as_ptr() as *const [T; N];
2068 Ok(unsafe { data.read() })
2069 }
2070}
2071
2072#[cfg(all(feature = "alloc", not(feature = "nightly")))]
2073impl<T, C, const N: usize> TryFrom<Vec<T, C>> for alloc_crate::boxed::Box<[T; N]>
2074where
2075 C: VecConfigAlloc<T, Alloc = Global, Index = usize>,
2076{
2077 type Error = Vec<T, C>;
2078
2079 fn try_from(vec: Vec<T, C>) -> Result<Self, Self::Error> {
2080 if vec.len().to_usize() != N {
2081 return Err(vec);
2082 }
2083
2084 let (data, length, capacity, _alloc) = vec.into_parts();
2085 assert_eq!(capacity, length);
2086 Ok(unsafe { alloc_crate::boxed::Box::from_raw(data.as_ptr().cast()) })
2087 }
2088}
2089
2090#[cfg(feature = "std")]
2091impl<C: VecConfig> std::io::Write for Vec<u8, C> {
2092 #[inline]
2093 fn flush(&mut self) -> std::io::Result<()> {
2094 Ok(())
2095 }
2096
2097 fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
2098 match self._try_reserve(buf.len(), false) {
2099 Ok(_) => {
2100 unsafe { self.extend_unchecked(buf) };
2101 Ok(buf.len())
2102 }
2103 Err(StorageError::CapacityLimit) => {
2104 let spare = self.capacity().to_usize() - self.len().to_usize();
2106 if spare > 0 {
2107 unsafe { self.extend_unchecked(&buf[..spare]) };
2108 }
2109 Ok(spare)
2110 }
2111 Err(err) => Err(std::io::Error::new(std::io::ErrorKind::Other, err)),
2112 }
2113 }
2114}
2115
2116#[cfg(feature = "zeroize")]
2117impl<T, A: VecConfig> Zeroize for Vec<T, A> {
2118 fn zeroize(&mut self) {
2119 self.clear();
2120 self.spare_capacity_mut().zeroize();
2121 }
2122}
2123
2124#[cfg(feature = "zeroize")]
2125impl<T, C: VecConfig> ZeroizeOnDrop for Vec<T, C> where C::Buffer<T>: ZeroizeOnDrop {}
2126
2127#[cfg(doctest)]
2137fn _lifetime_check() {}