1use crate::alloc::{Allocator, Global};
8use crate::collections::{TryReserveError, TryReserveErrorKind};
9
10use std::{
11 alloc::Layout,
12 borrow::{Borrow, BorrowMut},
13 cmp,
14 fmt,
16 fmt::Debug,
17 hash::{Hash, Hasher},
18 iter::FusedIterator,
19 mem,
20 mem::ManuallyDrop,
21 mem::MaybeUninit,
22 ops::{Bound, Deref, DerefMut, Index, IndexMut, RangeBounds},
23 ptr,
24 ptr::NonNull,
25 slice,
26 slice::SliceIndex,
27};
28
29pub struct Vec<T, A: Allocator = Global> {
39 len: usize,
40 cap: usize,
41 nn: NonNull<T>,
42 alloc: A,
43}
44
45impl<T> Vec<T> {
47 #[must_use]
60 pub const fn new() -> Vec<T> {
61 Vec::new_in(Global)
62 }
63}
64
65impl<T, A: Allocator> Vec<T, A> {
69 pub const fn len(&self) -> usize {
71 self.len
72 }
73
74 pub const fn is_empty(&self) -> bool {
87 self.len() == 0
88 }
89
90 pub fn push(&mut self, value: T) {
92 if self.cap == self.len {
93 let nc = if self.cap == 0 { 4 } else { self.cap * 2 };
94 self.set_capacity(nc).unwrap();
95 }
96 unsafe {
97 self.set(self.len, value);
98 }
99 self.len += 1;
100 }
101
102 pub fn push_mut(&mut self, value: T) -> &mut T {
105 if self.cap == self.len {
106 let nc = if self.cap == 0 { 4 } else { self.cap * 2 };
107 self.set_capacity(nc).unwrap();
108 }
109 unsafe {
110 let end = self.as_mut_ptr().add(self.len);
111 ptr::write(end, value);
112 self.len += 1;
113 &mut *end
115 }
116 }
117
118 pub fn pop(&mut self) -> Option<T> {
120 if self.len == 0 {
121 None
122 } else {
123 self.len -= 1;
124 unsafe { Some(self.getx(self.len)) }
125 }
126 }
127
128 pub fn pop_if(&mut self, predicate: impl FnOnce(&mut T) -> bool) -> Option<T> {
132 let last = self.last_mut()?;
133 if predicate(last) { self.pop() } else { None }
134 }
135
136 pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T, A>> {
138 if self.is_empty() {
139 None
140 } else {
141 Some(PeekMut { vec: self })
142 }
143 }
144
145 pub fn insert(&mut self, index: usize, value: T) {
151 assert!(index <= self.len);
152 if self.cap == self.len {
153 let na = if self.cap == 0 { 4 } else { self.cap * 2 };
154 self.set_capacity(na).unwrap();
155 }
156 unsafe {
157 if index < self.len {
158 ptr::copy(self.ixp(index), self.ixp(index + 1), self.len - index);
159 }
160 self.set(index, value);
161 }
162 self.len += 1;
163 }
164
165 pub fn insert_mut(&mut self, index: usize, value: T) -> &mut T {
171 assert!(index <= self.len);
172 if self.cap == self.len {
173 let na = if self.cap == 0 { 4 } else { self.cap * 2 };
174 self.set_capacity(na).unwrap();
175 }
176 unsafe {
177 if index < self.len {
178 ptr::copy(self.ixp(index), self.ixp(index + 1), self.len - index);
179 }
180 self.set(index, value);
181 }
182 self.len += 1;
183 unsafe { &mut *self.ixp(index) }
184 }
185
186 pub fn remove(&mut self, index: usize) -> T {
192 assert!(index < self.len);
193 unsafe {
194 let result = self.getx(index);
195 ptr::copy(self.ixp(index + 1), self.ixp(index), self.len() - index - 1);
196 self.len -= 1;
197 result
198 }
199 }
200
201 pub fn swap_remove(&mut self, index: usize) -> T {
205 assert!(index < self.len);
206 unsafe {
207 let result = self.getx(index);
208 self.len -= 1;
209 if index != self.len {
210 let last = self.getx(self.len);
211 self.set(index, last);
212 }
213 result
214 }
215 }
216}
217
218impl<T, A: Allocator> Vec<T, A> {
222 pub fn append(&mut self, other: &mut Self) {
233 self.reserve(other.len);
234 unsafe {
235 ptr::copy_nonoverlapping(other.ixp(0), self.ixp(self.len), other.len);
236 }
237 self.len += other.len;
238 other.len = 0;
239 }
240
241 pub fn extend_from_slice(&mut self, other: &[T])
243 where
244 T: Clone,
245 {
246 for e in other {
247 self.push(e.clone());
248 }
249 }
250
251 pub fn extend_from_within<R>(&mut self, src: R)
253 where
254 T: Clone,
255 R: RangeBounds<usize>,
256 {
257 let (start, end) = self.get_range(src);
270
271 for i in start..end {
272 let e = self[i].clone();
273 self.push(e);
274 }
275 }
276
277 pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A>
297 where
298 R: RangeBounds<usize>,
299 I: IntoIterator<Item = T>,
300 A: Clone,
301 {
302 Splice {
303 drain: self.drain(range),
304 replace_with: replace_with.into_iter(),
305 }
306 }
307
308 pub fn clear(&mut self) {
313 while self.len > 0 {
314 self.pop();
315 }
316 }
317
318 pub fn truncate(&mut self, len: usize) {
328 while self.len > len {
329 self.pop();
330 }
331 }
332
333 pub fn resize(&mut self, new_len: usize, value: T)
340 where
341 T: Clone,
342 {
343 if new_len > self.len {
344 while self.len < new_len {
345 self.push(value.clone());
346 }
347 } else {
348 self.truncate(new_len);
349 }
350 }
351
352 pub fn resize_with<F>(&mut self, new_len: usize, f: F)
366 where
367 F: FnMut() -> T,
368 {
369 let mut f = f;
370 if new_len > self.len {
371 while self.len < new_len {
372 self.push(f());
373 }
374 } else {
375 self.truncate(new_len);
376 }
377 }
378
379 pub fn split_off(&mut self, at: usize) -> Self
381 where
382 A: Clone,
383 {
384 assert!(at <= self.len);
385
386 let other_len = self.len - at;
387 let mut other = Vec::with_capacity_in(other_len, self.alloc.clone());
388
389 unsafe {
390 self.len = at;
391 other.len = other_len;
392 ptr::copy_nonoverlapping(self.ixp(at), other.ixp(0), other_len);
393 }
394 other
395 }
396
397 pub fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [MaybeUninit<T>]) {
407 let ptr = self.as_mut_ptr();
408 let spare_ptr = unsafe { ptr.add(self.len) };
413 let spare_ptr = spare_ptr as *mut MaybeUninit<T>;
414 let spare_len = self.cap - self.len;
415
416 unsafe {
420 let initialized = slice::from_raw_parts_mut(ptr, self.len);
421 let spare = slice::from_raw_parts_mut(spare_ptr, spare_len);
422 (initialized, spare)
423 }
424 }
425
426 pub fn retain<F>(&mut self, f: F)
428 where
429 F: FnMut(&T) -> bool,
430 {
431 Gap::new(self, 0).retain(f);
432 }
433
434 pub fn retain_mut<F>(&mut self, f: F)
436 where
437 F: FnMut(&mut T) -> bool,
438 {
439 Gap::new(self, 0).retain_mut(f);
440 }
441
442 pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, A>
444 where
445 R: RangeBounds<usize>,
446 {
447 Drain::new(self, range)
448 }
449
450 pub fn extract_if<F, R>(&mut self, range: R, filter: F) -> ExtractIf<'_, T, F, A>
452 where
453 F: FnMut(&mut T) -> bool,
454 R: RangeBounds<usize>,
455 {
456 ExtractIf::new(self, filter, range)
457 }
458
459 pub fn dedup(&mut self)
465 where
466 T: PartialEq,
467 {
468 self.dedup_by(|a, b| a == b);
469 }
470
471 pub fn dedup_by<F>(&mut self, same_bucket: F)
481 where
482 F: FnMut(&mut T, &mut T) -> bool,
483 {
484 Gap::new(self, 0).dedup_by(same_bucket);
485 }
486
487 pub fn dedup_by_key<F, K>(&mut self, mut key: F)
492 where
493 F: FnMut(&mut T) -> K,
494 K: PartialEq,
495 {
496 self.dedup_by(|a, b| key(a) == key(b))
497 }
498
499 #[inline]
508 unsafe fn ixp(&self, i: usize) -> *mut T {
509 unsafe { self.nn.as_ptr().add(i) }
510 }
511
512 #[inline]
517 unsafe fn getx(&mut self, i: usize) -> T {
518 unsafe { ptr::read(self.ixp(i)) }
519 }
520
521 #[inline]
526 unsafe fn set(&mut self, i: usize, elem: T) {
527 unsafe {
528 ptr::write(self.ixp(i), elem);
529 }
530 }
531
532 fn set_capacity(&mut self, na: usize) -> Result<(), TryReserveError> {
534 assert!(na >= self.len);
535 if na == self.cap {
536 return Ok(());
537 }
538 let result = unsafe { self.basic_set_capacity(self.cap, na) };
539 if result.is_ok() {
540 self.cap = na;
541 }
542 result
543 }
544
545 unsafe fn basic_set_capacity(&mut self, oa: usize, na: usize) -> Result<(), TryReserveError> {
550 unsafe {
551 if mem::size_of::<T>() == 0 {
552 return Ok(());
553 }
554 if na == 0 {
555 self.alloc.deallocate(
556 NonNull::new(self.nn.as_ptr().cast::<u8>()).unwrap(),
557 Layout::array::<T>(oa).unwrap(),
558 );
559 self.nn = NonNull::dangling();
560 return Ok(());
561 }
562 let new_layout = match Layout::array::<T>(na) {
563 Ok(x) => x,
564 Err(_e) => {
565 let kind = TryReserveErrorKind::CapacityOverflow;
566 return Err(TryReserveError { kind });
567 }
568 };
569 let new_ptr = if oa == 0 {
570 self.alloc.allocate(new_layout)
571 } else {
572 let old_layout = Layout::array::<T>(oa).unwrap();
573 let old_ptr = self.nn.as_ptr().cast::<u8>();
574 let old_ptr = NonNull::new(old_ptr).unwrap();
575 if new_layout.size() > old_layout.size() {
576 self.alloc.grow(old_ptr, old_layout, new_layout)
577 } else {
578 self.alloc.shrink(old_ptr, old_layout, new_layout)
579 }
580 };
581 match new_ptr {
582 Ok(p) => self.nn = NonNull::new(p.as_ptr().cast::<T>()).unwrap(),
583 Err(_e) => {
584 let kind = TryReserveErrorKind::AllocError { layout: new_layout };
585 return Err(TryReserveError { kind });
586 }
587 }
588 }
589 Ok(())
590 }
591
592 fn get_range<R>(&self, range: R) -> (usize, usize)
593 where
594 R: RangeBounds<usize>,
595 {
596 let start = match range.start_bound() {
597 Bound::Included(x) => *x,
598 Bound::Excluded(x) => {
599 assert!(*x < usize::MAX);
600 *x + 1
601 }
602 Bound::Unbounded => 0,
603 };
604 let end = match range.end_bound() {
605 Bound::Included(x) => {
606 assert!(*x < usize::MAX);
607 *x + 1
608 }
609 Bound::Excluded(x) => *x,
610 Bound::Unbounded => self.len,
611 };
612 assert!(end <= self.len);
613 assert!(start <= end);
614 (start, end)
615 }
616}
617
618impl<T, A: Allocator> Vec<T, A> {
621 #[must_use]
623 pub const fn new_in(alloc: A) -> Vec<T, A> {
624 Self {
625 len: 0,
626 cap: 0,
627 alloc,
628 nn: NonNull::dangling(),
629 }
630 }
631
632 pub fn allocator(&self) -> &A {
634 &self.alloc
635 }
636
637 pub const fn capacity(&self) -> usize {
639 if size_of::<T>() == 0 {
640 usize::MAX
641 } else {
642 self.cap
643 }
644 }
645
646 pub fn with_capacity_in(capacity: usize, alloc: A) -> Vec<T, A> {
649 let mut v = Self::new_in(alloc);
650 v.set_capacity(capacity).unwrap();
651 v
652 }
653
654 pub fn reserve(&mut self, additional: usize) {
657 let capacity = self.len + additional;
658 if capacity > self.cap {
660 self.set_capacity(capacity).unwrap();
661 }
662 }
663
664 pub fn reserve_exact(&mut self, additional: usize) {
667 let capacity = self.len + additional;
668 if capacity > self.cap {
669 self.set_capacity(capacity).unwrap();
670 }
671 }
672
673 pub fn shrink_to_fit(&mut self) {
675 let _ = self.set_capacity(self.len);
676 }
677
678 pub fn shrink_to(&mut self, capacity: usize) {
680 if self.cap > capacity {
681 let _ = self.set_capacity(cmp::max(self.len, capacity));
682 }
683 }
684}
685
686impl<T> Vec<T> {
687 #[must_use]
700 pub fn with_capacity(capacity: usize) -> Vec<T> {
701 let mut v = Vec::<T>::new();
702 v.set_capacity(capacity).unwrap();
703 v
704 }
705}
706
707impl<T, A: Allocator> Vec<T, A> {
711 pub const fn as_slice(&self) -> &[T] {
716 unsafe { std::slice::from_raw_parts(self.nn.as_ptr(), self.len) }
717 }
718
719 pub const fn as_mut_slice(&mut self) -> &mut [T] {
724 unsafe { std::slice::from_raw_parts_mut(self.nn.as_ptr(), self.len) }
725 }
726
727 pub const fn as_non_null(&mut self) -> NonNull<T> {
729 self.nn
730 }
731
732 pub const fn as_ptr(&self) -> *const T {
735 self.nn.as_ptr()
736 }
737
738 pub const fn as_mut_ptr(&mut self) -> *mut T {
741 self.nn.as_ptr()
742 }
743
744 pub fn into_parts(self) -> (NonNull<T>, usize, usize) {
746 let (ptr, len, capacity) = self.into_raw_parts();
747 (unsafe { NonNull::new_unchecked(ptr) }, len, capacity)
749 }
750
751 pub fn into_parts_with_alloc(self) -> (NonNull<T>, usize, usize, A) {
753 let (ptr, len, capacity, alloc) = self.into_raw_parts_with_alloc();
754 (unsafe { NonNull::new_unchecked(ptr) }, len, capacity, alloc)
756 }
757
758 pub fn into_raw_parts(self) -> (*mut T, usize, usize) {
760 let mut me = ManuallyDrop::new(self);
767 (me.as_mut_ptr(), me.len, me.cap)
768 }
769
770 pub fn into_raw_parts_with_alloc(self) -> (*mut T, usize, usize, A) {
772 let mut me = ManuallyDrop::new(self);
773 let alloc = unsafe { ptr::read(&me.alloc) };
774 (me.as_mut_ptr(), me.len, me.cap, alloc)
775 }
776
777 pub unsafe fn from_parts_in(ptr: NonNull<T>, length: usize, capacity: usize, alloc: A) -> Self {
784 assert!(capacity >= length);
785 Self {
786 nn: ptr,
787 cap: capacity,
788 alloc,
789 len: length,
790 }
791 }
792
793 pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self {
800 assert!(capacity >= length);
801 let nn = unsafe { NonNull::new_unchecked(ptr) };
802 Self {
803 nn,
804 cap: capacity,
805 alloc,
806 len: length,
807 }
808 }
809
810 pub fn leak<'a>(self) -> &'a mut [T]
830 where
831 A: 'a,
832 {
833 let mut me = ManuallyDrop::new(self);
834 unsafe { slice::from_raw_parts_mut(me.as_mut_ptr(), me.len) }
835 }
836
837 pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
883 unsafe {
884 slice::from_raw_parts_mut(
885 self.as_mut_ptr().add(self.len) as *mut MaybeUninit<T>,
886 self.cap - self.len,
887 )
888 }
889 }
890
891 pub unsafe fn set_len(&mut self, new_len: usize) {
905 assert!(new_len <= self.cap);
906 self.len = new_len;
907 }
908
909 pub fn into_chunks<const N: usize>(mut self) -> Vec<[T; N], A> {
935 const {
936 assert!(N != 0, "chunk size must be greater than zero");
937 }
938
939 let (len, cap) = (self.len(), self.capacity());
940
941 let len_remainder = len % N;
942 if len_remainder != 0 {
943 self.truncate(len - len_remainder);
944 }
945
946 let cap_remainder = cap % N;
947 if mem::size_of::<T>() != 0 && cap_remainder != 0 {
948 self.shrink_to(cap - cap_remainder);
949 }
950
951 let (ptr, _, _, alloc) = self.into_raw_parts_with_alloc();
952
953 unsafe { Vec::from_raw_parts_in(ptr.cast(), len / N, cap / N, alloc) }
961 }
962}
963
964impl<T, A: Allocator, const N: usize> Vec<[T; N], A> {
965 pub fn into_flattened(self) -> Vec<T, A> {
986 let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc();
987 let (new_len, new_cap) = if mem::size_of::<T>() == 0 {
988 (len.checked_mul(N).expect("vec len overflow"), usize::MAX)
989 } else {
990 unsafe { (len.unchecked_mul(N), cap.unchecked_mul(N)) }
996 };
997 unsafe { Vec::<T, A>::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) }
1004 }
1005}
1006
1007impl<T> Vec<T> {
1008 pub fn from_fn<F>(length: usize, f: F) -> Vec<T>
1018 where
1019 F: FnMut(usize) -> T,
1020 {
1021 let mut f = f;
1022 let mut m = Vec::with_capacity(length);
1023 for i in 0..length {
1024 m.push(f(i));
1025 }
1026 m
1027 }
1028
1029 pub unsafe fn from_parts(ptr: NonNull<T>, length: usize, capacity: usize) -> Vec<T> {
1051 let mut v = Vec::new();
1052 v.len = length;
1053 v.cap = capacity;
1054 v.nn = ptr;
1055 v
1056 }
1057
1058 pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Vec<T> {
1064 let mut v = Vec::new();
1065 v.len = length;
1066 v.cap = capacity;
1067 v.nn = unsafe { NonNull::new_unchecked(ptr) };
1068 v
1069 }
1070}
1071
1072impl<T, A: Allocator> Vec<T, A> {
1075 pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
1077 let mut v = Self::new_in(alloc);
1078 v.set_capacity(capacity)?;
1079 Ok(v)
1080 }
1081
1082 pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
1084 let capacity = self.len + additional;
1085 if capacity > self.cap {
1087 return self.set_capacity(capacity);
1088 }
1089 Ok(())
1090 }
1091
1092 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
1094 let capacity = self.len + additional;
1095 if capacity > self.cap {
1096 return self.set_capacity(capacity);
1097 }
1098 Ok(())
1099 }
1100
1101 pub fn push_within_capacity(&mut self, value: T) -> Result<&mut T, T> {
1104 if self.cap == self.len {
1105 return Err(value);
1106 }
1107 let index = self.len;
1108 self.len += 1;
1109 unsafe {
1110 self.set(index, value);
1111 }
1112 Ok(unsafe { &mut *self.ixp(index) })
1113 }
1114
1115 pub fn try_remove(&mut self, index: usize) -> Option<T> {
1118 if index >= self.len {
1119 return None;
1120 }
1121 unsafe {
1122 let result = self.getx(index);
1123 ptr::copy(self.ixp(index + 1), self.ixp(index), self.len() - index - 1);
1124 self.len -= 1;
1125 Some(result)
1126 }
1127 }
1128}
1129
1130impl<T> Vec<T> {
1131 pub fn try_with_capacity(capacity: usize) -> Result<Vec<T>, TryReserveError> {
1133 let mut v = Vec::<T>::new();
1134 v.set_capacity(capacity)?;
1135 Ok(v)
1136 }
1137}
1138
1139unsafe impl<T: Send, A: Allocator + Send> Send for Vec<T, A> {}
1144unsafe impl<T: Sync, A: Allocator + Send> Sync for Vec<T, A> {}
1145
1146impl<T: Eq, A: Allocator> Eq for Vec<T, A> {}
1147
1148impl<T, A1, A2> PartialOrd<Vec<T, A2>> for Vec<T, A1>
1149where
1150 T: PartialOrd,
1151 A1: Allocator,
1152 A2: Allocator,
1153{
1154 fn partial_cmp(&self, other: &Vec<T, A2>) -> Option<std::cmp::Ordering> {
1155 PartialOrd::partial_cmp(&**self, &**other)
1156 }
1157}
1158
1159impl<T: Ord, A: Allocator> Ord for Vec<T, A> {
1161 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
1162 std::cmp::Ord::cmp(&**self, &**other)
1163 }
1164}
1165
1166impl<T, U, A1: Allocator, A2: Allocator> PartialEq<Vec<U, A2>> for Vec<T, A1>
1167where
1168 T: PartialEq<U>,
1169{
1170 fn eq(&self, other: &Vec<U, A2>) -> bool {
1171 self[..] == other[..]
1172 }
1173}
1174
1175impl<T, U, A: Allocator, const N: usize> PartialEq<[U; N]> for Vec<T, A>
1176where
1177 T: PartialEq<U>,
1178{
1179 fn eq(&self, other: &[U; N]) -> bool {
1180 self[..] == other[..]
1181 }
1182}
1183
1184impl<T, U, A: Allocator, const N: usize> PartialEq<&[U; N]> for Vec<T, A>
1185where
1186 T: PartialEq<U>,
1187{
1188 fn eq(&self, other: &&[U; N]) -> bool {
1189 self[..] == other[..]
1190 }
1191}
1192
1193impl<T: Hash, A: Allocator> Hash for Vec<T, A> {
1194 fn hash<H: Hasher>(&self, state: &mut H) {
1195 Hash::hash(&**self, state)
1196 }
1197}
1198
1199impl<T, I: SliceIndex<[T]>, A: Allocator> Index<I> for Vec<T, A> {
1200 type Output = I::Output;
1201 fn index(&self, index: I) -> &Self::Output {
1202 Index::index(&**self, index)
1203 }
1204}
1205
1206impl<T, I: SliceIndex<[T]>, A: Allocator> IndexMut<I> for Vec<T, A> {
1207 fn index_mut(&mut self, index: I) -> &mut Self::Output {
1208 IndexMut::index_mut(&mut **self, index)
1209 }
1210}
1211
1212impl<T, A: Allocator> Deref for Vec<T, A> {
1213 type Target = [T];
1214 fn deref(&self) -> &[T] {
1215 unsafe { std::slice::from_raw_parts(self.nn.as_ptr(), self.len) }
1216 }
1217}
1218
1219impl<T, A: Allocator> DerefMut for Vec<T, A> {
1220 fn deref_mut(&mut self) -> &mut [T] {
1221 unsafe { std::slice::from_raw_parts_mut(self.nn.as_ptr(), self.len) }
1222 }
1223}
1224
1225impl<'a, T: 'a, A: Allocator> IntoIterator for &'a Vec<T, A> {
1226 type Item = &'a T;
1227 type IntoIter = slice::Iter<'a, T>;
1228 fn into_iter(self) -> Self::IntoIter {
1229 self.iter()
1230 }
1231}
1232
1233impl<'a, T: 'a, A: Allocator> IntoIterator for &'a mut Vec<T, A> {
1234 type Item = &'a mut T;
1235 type IntoIter = slice::IterMut<'a, T>;
1236 fn into_iter(self) -> Self::IntoIter {
1237 self.iter_mut()
1238 }
1239}
1240
1241impl<T, A: Allocator> IntoIterator for Vec<T, A> {
1242 type Item = T;
1243 type IntoIter = IntoIter<T, A>;
1244 fn into_iter(self) -> Self::IntoIter {
1245 IntoIter::new(self)
1246 }
1247}
1248
1249impl<T, A: Allocator> Extend<T> for Vec<T, A> {
1250 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1251 for e in iter {
1252 self.push(e);
1253 }
1254 }
1255}
1256
1257impl<'a, T: Copy + 'a, A: Allocator> Extend<&'a T> for Vec<T, A> {
1258 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
1259 for e in iter {
1260 self.push(*e);
1261 }
1262 }
1263}
1264
1265impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> {
1266 fn clone(&self) -> Self {
1267 let mut v = Vec::with_capacity_in(self.len, self.alloc.clone());
1268 for e in self.iter() {
1269 v.push(e.clone());
1270 }
1271 v
1272 }
1273}
1274
1275impl<T, A: Allocator> Drop for Vec<T, A> {
1276 fn drop(&mut self) {
1277 while self.len != 0 {
1278 self.pop();
1279 }
1280 let _ = self.set_capacity(0);
1281 }
1282}
1283
1284impl<T> Default for Vec<T> {
1285 fn default() -> Self {
1286 Self::new()
1287 }
1288}
1289
1290impl<T: Debug, A: Allocator> Debug for Vec<T, A> {
1291 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1292 fmt::Debug::fmt(&**self, f)
1293 }
1294}
1295
1296impl<T: Clone> From<&[T]> for Vec<T> {
1298 fn from(s: &[T]) -> Vec<T> {
1306 let mut v = Vec::new();
1307 for e in s {
1308 v.push(e.clone());
1309 }
1310 v
1311 }
1312}
1313
1314impl<T: Clone> From<&mut [T]> for Vec<T> {
1315 fn from(s: &mut [T]) -> Vec<T> {
1323 let mut v = Vec::new();
1324 for e in s {
1325 v.push(e.clone());
1326 }
1327 v
1328 }
1329}
1330
1331impl<T, const N: usize> From<[T; N]> for Vec<T> {
1332 fn from(a: [T; N]) -> Vec<T> {
1340 Vec::from_iter(a)
1341 }
1342}
1343
1344impl<T> FromIterator<T> for Vec<T> {
1345 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> {
1346 let mut v = Vec::new();
1347 for e in iter {
1348 v.push(e);
1349 }
1350 v
1351 }
1352}
1353
1354impl<T: Clone, const N: usize> From<&[T; N]> for Vec<T> {
1355 fn from(s: &[T; N]) -> Vec<T> {
1363 Self::from(s.as_slice())
1364 }
1365}
1366
1367impl<T: Clone, const N: usize> From<&mut [T; N]> for Vec<T> {
1368 fn from(s: &mut [T; N]) -> Vec<T> {
1376 Self::from(s.as_mut_slice())
1377 }
1378}
1379
1380impl<T, A: Allocator> AsRef<Vec<T, A>> for Vec<T, A> {
1381 fn as_ref(&self) -> &Vec<T, A> {
1382 self
1383 }
1384}
1385
1386impl<T, A: Allocator> AsMut<Vec<T, A>> for Vec<T, A> {
1387 fn as_mut(&mut self) -> &mut Vec<T, A> {
1388 self
1389 }
1390}
1391
1392impl<T, A: Allocator> AsRef<[T]> for Vec<T, A> {
1393 fn as_ref(&self) -> &[T] {
1394 self
1395 }
1396}
1397
1398impl<T, A: Allocator> AsMut<[T]> for Vec<T, A> {
1399 fn as_mut(&mut self) -> &mut [T] {
1400 self
1401 }
1402}
1403
1404impl<T, A: Allocator> Borrow<[T]> for Vec<T, A> {
1405 fn borrow(&self) -> &[T] {
1406 &self[..]
1407 }
1408}
1409
1410impl<T, A: Allocator> BorrowMut<[T]> for Vec<T, A> {
1411 fn borrow_mut(&mut self) -> &mut [T] {
1412 &mut self[..]
1413 }
1414}
1415
1416impl<T, A: Allocator, const N: usize> TryFrom<Vec<T, A>> for [T; N] {
1417 type Error = Vec<T, A>;
1418
1419 fn try_from(mut vec: Vec<T, A>) -> Result<[T; N], Vec<T, A>> {
1446 if vec.len() != N {
1447 return Err(vec);
1448 }
1449
1450 unsafe { vec.set_len(0) };
1452
1453 let array = unsafe { ptr::read(vec.as_ptr() as *const [T; N]) };
1459 Ok(array)
1460 }
1461}
1462
1463#[derive(Debug)]
1469pub struct IntoIter<T, A: Allocator = Global> {
1470 start: usize,
1471 end: usize,
1472 v: Vec<T, A>,
1473}
1474
1475impl<T, A: Allocator> IntoIter<T, A> {
1476 fn new(mut v: Vec<T, A>) -> Self {
1477 let end = v.len;
1478 v.len = 0;
1479 Self { start: 0, end, v }
1480 }
1481
1482 pub fn as_slice(&self) -> &[T] {
1484 unsafe { slice::from_raw_parts(self.v.ixp(self.start), self.len()) }
1485 }
1486
1487 pub fn as_mut_slice(&mut self) -> &mut [T] {
1489 unsafe { slice::from_raw_parts_mut(self.v.ixp(self.start), self.len()) }
1490 }
1491
1492 pub fn allocator(&self) -> &A {
1494 &self.v.alloc
1495 }
1496}
1497
1498impl<T, A> Default for IntoIter<T, A>
1499where
1500 A: Allocator + Default,
1501{
1502 fn default() -> Self {
1504 Vec::new_in(Default::default()).into_iter()
1505 }
1506}
1507
1508impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
1509 fn as_ref(&self) -> &[T] {
1510 self.as_slice()
1511 }
1512}
1513
1514impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
1515 fn clone(&self) -> Self {
1516 let mut v = Vec::new_in(self.allocator().clone());
1517 v.extend_from_slice(self.as_slice());
1518 v.into_iter()
1519 }
1520}
1521
1522impl<T, A: Allocator> Iterator for IntoIter<T, A> {
1523 type Item = T;
1524 fn next(&mut self) -> Option<T> {
1525 if self.start == self.end {
1526 None
1527 } else {
1528 let ix = self.start;
1529 self.start += 1;
1530 Some(unsafe { self.v.getx(ix) })
1531 }
1532 }
1533}
1534
1535impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
1536 fn len(&self) -> usize {
1537 self.end - self.start
1538 }
1539}
1540
1541impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
1542 fn next_back(&mut self) -> Option<T> {
1543 if self.start == self.end {
1544 None
1545 } else {
1546 self.end -= 1;
1547 Some(unsafe { self.v.getx(self.end) })
1548 }
1549 }
1550}
1551
1552impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
1553
1554impl<T, A: Allocator> Drop for IntoIter<T, A> {
1555 fn drop(&mut self) {
1556 while self.next().is_some() {}
1557 }
1558}
1559
1560struct Gap<'a, T, A: Allocator> {
1565 r: usize, w: usize, len: usize, v: &'a mut Vec<T, A>,
1569}
1570
1571impl<'a, T, A: Allocator> Gap<'a, T, A> {
1572 fn new(v: &'a mut Vec<T, A>, b: usize) -> Self {
1573 let len = v.len;
1574 v.len = 0; Self { w: b, r: b, v, len }
1576 }
1577
1578 fn close_gap(&mut self) {
1580 let n = self.len - self.r;
1581 let g = self.r - self.w;
1582 if n > 0 && g != 0 {
1583 unsafe {
1584 let dst = self.v.ixp(self.w);
1585 let src = self.v.ixp(self.r);
1586 ptr::copy(src, dst, n);
1587 }
1588 }
1589 self.v.len = self.len - g;
1590 }
1591
1592 fn keep(&mut self, upto: usize) {
1593 unsafe {
1594 while self.r < upto {
1595 let nxt = self.v.ixp(self.r);
1597 if self.r != self.w {
1599 let to = self.v.ixp(self.w);
1600 ptr::write(to, ptr::read(nxt));
1601 }
1602 self.r += 1;
1603 self.w += 1;
1604 }
1605 }
1606 }
1607
1608 unsafe fn fill(&mut self, e: T) {
1610 unsafe {
1611 let to = self.v.ixp(self.w);
1612 ptr::write(to, e);
1613 self.w += 1;
1614 }
1615 }
1616
1617 fn append<I: Iterator<Item = T>>(&mut self, src: &mut I)
1619 where
1620 A: Clone,
1621 {
1622 while self.w < self.r {
1624 if let Some(e) = src.next() {
1625 unsafe {
1626 self.fill(e);
1627 }
1628 } else {
1629 return;
1630 }
1631 }
1632 self.v.len = self.len; let mut tail = self.v.split_off(self.w);
1634 for e in src {
1635 self.v.push(e);
1636 }
1637 self.v.append(&mut tail);
1638 self.len = self.v.len; }
1640
1641 fn retain<F>(&mut self, mut f: F)
1642 where
1643 F: FnMut(&T) -> bool,
1644 {
1645 unsafe {
1646 while self.r < self.len {
1647 let nxt = self.v.ixp(self.r);
1648 if f(&mut *nxt) {
1649 if self.r != self.w {
1651 let to = self.v.ixp(self.w);
1652 ptr::write(to, ptr::read(nxt));
1653 }
1654 self.r += 1;
1655 self.w += 1;
1656 } else {
1657 self.r += 1;
1659 let _v = ptr::read(nxt); }
1661 }
1662 }
1663 }
1664
1665 fn retain_mut<F>(&mut self, mut f: F)
1666 where
1667 F: FnMut(&mut T) -> bool,
1668 {
1669 unsafe {
1670 while self.r < self.len {
1671 let nxt = self.v.ixp(self.r);
1672 if f(&mut *nxt) {
1673 if self.r != self.w {
1675 let to = self.v.ixp(self.w);
1676 ptr::write(to, ptr::read(nxt));
1677 }
1678 self.r += 1;
1679 self.w += 1;
1680 } else {
1681 self.r += 1;
1683 let _v = ptr::read(nxt); }
1685 }
1686 }
1687 }
1688
1689 fn dedup_by<F>(&mut self, mut same_bucket: F)
1690 where
1691 F: FnMut(&mut T, &mut T) -> bool,
1692 {
1693 if self.len > 0 {
1694 unsafe {
1695 let mut cur = 0;
1696 self.r = 1;
1697 self.w = 1;
1698 while self.r < self.len {
1699 let cp = self.v.ixp(cur);
1700 let nxt = self.v.ixp(self.r);
1701 if same_bucket(&mut *nxt, &mut *cp) {
1702 self.r += 1;
1704 let _v = ptr::read(nxt); } else {
1706 cur = self.w;
1707 if self.r != self.w {
1708 let to = self.v.ixp(self.w);
1709 ptr::write(to, ptr::read(nxt));
1710 }
1711 self.r += 1;
1712 self.w += 1;
1713 }
1714 }
1715 }
1716 }
1717 }
1718
1719 fn extract_if<F>(&mut self, f: &mut F, end: usize) -> Option<T>
1720 where
1721 F: FnMut(&mut T) -> bool,
1722 {
1723 unsafe {
1724 while self.r < end {
1725 let nxt = self.v.ixp(self.r);
1726 if f(&mut *nxt) {
1727 self.r += 1;
1728 return Some(ptr::read(nxt));
1729 }
1730 if self.r != self.w {
1731 let to = self.v.ixp(self.w);
1732 ptr::write(to, ptr::read(nxt));
1733 }
1734 self.r += 1;
1735 self.w += 1;
1736 }
1737 None
1738 }
1739 }
1740}
1741
1742impl<'a, T, A: Allocator> Drop for Gap<'a, T, A> {
1743 fn drop(&mut self) {
1745 self.close_gap();
1746 }
1747}
1748
1749pub struct Drain<'a, T, A: Allocator = Global> {
1756 gap: Gap<'a, T, A>,
1757 end: usize,
1758 br: usize,
1759}
1760
1761impl<'a, T, A: Allocator> Drain<'a, T, A> {
1762 fn new<R: RangeBounds<usize>>(vec: &'a mut Vec<T, A>, range: R) -> Self {
1763 let (b, end) = vec.get_range(range);
1764 let gap = Gap::new(vec, b);
1765 Self { gap, end, br: end }
1766 }
1767
1768 pub fn keep_rest(mut self) {
1770 self.gap.keep(self.br);
1771 }
1772
1773 #[must_use]
1775 pub fn allocator(&self) -> &A {
1776 &self.gap.v.alloc
1777 }
1778
1779 #[must_use]
1781 pub fn as_slice(&self) -> &[T] {
1782 &self.gap.v[self.gap.r..self.br]
1783 }
1784}
1785
1786impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> {
1787 fn as_ref(&self) -> &[T] {
1788 self.as_slice()
1789 }
1790}
1791
1792impl<T: fmt::Debug, A: Allocator> fmt::Debug for Drain<'_, T, A> {
1793 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1794 f.debug_tuple("Drain").field(&self.as_slice()).finish()
1795 }
1796}
1797
1798impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
1799 type Item = T;
1800 fn next(&mut self) -> Option<T> {
1801 if self.gap.r == self.br {
1802 return None;
1803 }
1804 let x = self.gap.r;
1805 self.gap.r += 1;
1806 Some(unsafe { self.gap.v.getx(x) })
1807 }
1808
1809 fn size_hint(&self) -> (usize, Option<usize>) {
1810 let n = self.br - self.gap.r;
1811 (n, Some(n))
1812 }
1813}
1814
1815impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
1816 fn next_back(&mut self) -> Option<T> {
1817 if self.gap.r == self.br {
1818 return None;
1819 }
1820 self.br -= 1;
1821 let x = self.br;
1822 Some(unsafe { self.gap.v.getx(x) })
1823 }
1824}
1825
1826impl<T, A: Allocator> Drop for Drain<'_, T, A> {
1827 fn drop(&mut self) {
1828 while self.next().is_some() {}
1829 self.gap.r = self.end;
1830 }
1831}
1832
1833impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {}
1834
1835impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}
1836
1837pub struct ExtractIf<'a, T, F, A: Allocator = Global> {
1844 gap: Gap<'a, T, A>,
1845 end: usize,
1846 pred: F,
1847}
1848
1849impl<'a, T, F, A: Allocator> ExtractIf<'a, T, F, A> {
1850 pub(super) fn new<R: RangeBounds<usize>>(vec: &'a mut Vec<T, A>, pred: F, range: R) -> Self {
1851 let (b, end) = vec.get_range(range);
1852 let gap = Gap::new(vec, b);
1853 Self { gap, pred, end }
1854 }
1855
1856 pub fn allocator(&self) -> &A {
1858 &self.gap.v.alloc
1859 }
1860
1861 #[must_use]
1863 fn as_slice(&self) -> &[T] {
1864 &self.gap.v[self.gap.r..self.end]
1865 }
1866}
1867
1868impl<T: fmt::Debug, A: Allocator> fmt::Debug for ExtractIf<'_, T, A> {
1869 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1870 f.debug_tuple("ExtractIf").field(&self.as_slice()).finish()
1871 }
1872}
1873
1874impl<T, F, A: Allocator> Iterator for ExtractIf<'_, T, F, A>
1875where
1876 F: FnMut(&mut T) -> bool,
1877{
1878 type Item = T;
1879 fn next(&mut self) -> Option<T> {
1880 self.gap.extract_if(&mut self.pred, self.end)
1881 }
1882
1883 fn size_hint(&self) -> (usize, Option<usize>) {
1884 (0, Some(self.end - self.gap.r))
1885 }
1886}
1887
1888#[derive(Debug)]
1896pub struct Splice<'a, I: Iterator + 'a, A: Allocator + Clone + 'a = Global> {
1897 drain: Drain<'a, I::Item, A>,
1898 replace_with: I,
1899}
1900
1901impl<I: Iterator, A: Allocator + Clone> Drop for Splice<'_, I, A> {
1902 fn drop(&mut self) {
1903 self.drain.by_ref().for_each(drop);
1904 self.drain.gap.append(&mut self.replace_with);
1905 }
1906}
1907
1908impl<I: Iterator, A: Allocator + Clone> Iterator for Splice<'_, I, A> {
1909 type Item = I::Item;
1910
1911 fn next(&mut self) -> Option<Self::Item> {
1912 self.drain.next()
1913 }
1914
1915 fn size_hint(&self) -> (usize, Option<usize>) {
1916 self.drain.size_hint()
1917 }
1918}
1919
1920impl<I: Iterator, A: Allocator + Clone> DoubleEndedIterator for Splice<'_, I, A> {
1921 fn next_back(&mut self) -> Option<Self::Item> {
1922 self.drain.next_back()
1923 }
1924}
1925
1926impl<I: Iterator, A: Allocator + Clone> ExactSizeIterator for Splice<'_, I, A> {}
1927
1928pub struct PeekMut<'a, T, A: Allocator = Global> {
1938 vec: &'a mut Vec<T, A>,
1939}
1940
1941impl<T: fmt::Debug, A: Allocator> fmt::Debug for PeekMut<'_, T, A> {
1942 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1943 f.debug_tuple("PeekMut").field(self.deref()).finish()
1944 }
1945}
1946
1947impl<'a, T, A: Allocator> PeekMut<'a, T, A> {
1948 pub fn pop(this: Self) -> T {
1950 unsafe { this.vec.pop().unwrap_unchecked() }
1952 }
1953}
1954
1955impl<'a, T, A: Allocator> Deref for PeekMut<'a, T, A> {
1956 type Target = T;
1957
1958 fn deref(&self) -> &Self::Target {
1959 let idx = self.vec.len() - 1;
1960 unsafe { self.vec.get_unchecked(idx) }
1962 }
1963}
1964
1965impl<'a, T, A: Allocator> DerefMut for PeekMut<'a, T, A> {
1966 fn deref_mut(&mut self) -> &mut Self::Target {
1967 let idx = self.vec.len() - 1;
1968 unsafe { self.vec.get_unchecked_mut(idx) }
1970 }
1971}
1972
1973#[macro_export]
1977macro_rules! vec {
1978 () => (
1979 $crate::vec::Vec::new()
1980 );
1981 ($elem:expr; $n:expr) => (
1982 $crate::vec::from_elem($elem, $n)
1983 );
1984 ($($x:expr),+ $(,)?) => (
1985 Vec::from_iter([$($x),+].into_iter())
1986 );
1987}
1988
1989#[doc(hidden)]
1990pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
1991 let mut v = Vec::with_capacity(n);
1992 for _i in 0..n {
1993 v.push(elem.clone());
1994 }
1995 v
1996}
1997
1998#[test]
1999fn test() {
2000 let mut v = Vec::new();
2001 v.push(99);
2002 v.push(314);
2003 println!("v={:?}", &v);
2004 assert!(v[0] == 99);
2005 assert!(v[1] == 314);
2006 assert!(v.len() == 2);
2007 v[1] = 316;
2008 assert!(v[1] == 316);
2009 for x in &v {
2010 println!("x={}", x);
2011 }
2012 for x in &mut v {
2013 *x += 1;
2014 println!("x={}", x);
2015 }
2016 for x in v {
2017 println!("x={}", x);
2018 }
2019 let mut v = vec![199, 200, 200, 201, 201];
2024 println!("v={:?}", &v);
2025 v.dedup();
2026 println!("v={:?}", &v);
2027
2028 let mut numbers = vec![1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15];
2029 let extr: Vec<_> = numbers.extract_if(3..9, |x| *x % 2 == 0).collect();
2030
2031 println!("numbers={:?} extr={:?}", &numbers, &extr);
2032
2033 let mut a = vec![1, 2, 0, 5]; let b = vec![3, 4];
2035 a.splice(2..3, b);
2036 println!("a={:?}", &a);
2037 assert_eq!(a, [1, 2, 3, 4, 5]);
2038
2039 let v = vec![99; 5];
2040 println!("v={:?}", &v);
2041
2042 let v: Vec<_> = Vec::from_iter([1, 2, 3, 4].into_iter());
2043 println!("v={:?}", &v);
2044}
2045
2046#[cfg(test)]
2047mod test;